Skip to content

Commit 3edd762

Browse files
Merge pull request #285 from Dictionarry-Hub/dev
2 parents 3a0deb1 + cf67a1c commit 3edd762

File tree

29 files changed

+834
-262
lines changed

29 files changed

+834
-262
lines changed

Dockerfile

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,28 @@
11
# Dockerfile
22
FROM python:3.9-slim
33
WORKDIR /app
4-
# Install git and gosu for user switching
5-
RUN apt-get update && apt-get install -y git gosu && rm -rf /var/lib/apt/lists/*
4+
# Install git, gosu, and PowerShell Core
5+
RUN apt-get update && apt-get install -y \
6+
git \
7+
gosu \
8+
wget \
9+
ca-certificates \
10+
libicu-dev \
11+
&& wget -O /tmp/powershell.tar.gz https://github.com/PowerShell/PowerShell/releases/download/v7.4.0/powershell-7.4.0-linux-x64.tar.gz \
12+
&& mkdir -p /opt/microsoft/powershell/7 \
13+
&& tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 \
14+
&& chmod +x /opt/microsoft/powershell/7/pwsh \
15+
&& ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh \
16+
&& rm /tmp/powershell.tar.gz \
17+
&& rm -rf /var/lib/apt/lists/*
618
# Copy pre-built files from dist directory
719
COPY dist/backend/app ./app
20+
COPY dist/backend/scripts ./app/scripts
821
COPY dist/static ./app/static
922
COPY dist/requirements.txt .
1023
RUN pip install --no-cache-dir -r requirements.txt
24+
# Ensure scripts are executable
25+
RUN chmod +x /app/scripts/*.ps1 || true
1126
# Copy and setup entrypoint script
1227
COPY entrypoint.sh /entrypoint.sh
1328
RUN chmod +x /entrypoint.sh

backend/Dockerfile

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,21 @@
11
FROM python:3.9
22
WORKDIR /app
3+
# Install PowerShell Core
4+
RUN apt-get update && apt-get install -y \
5+
wget \
6+
ca-certificates \
7+
libicu-dev \
8+
&& wget -O /tmp/powershell.tar.gz https://github.com/PowerShell/PowerShell/releases/download/v7.4.0/powershell-7.4.0-linux-x64.tar.gz \
9+
&& mkdir -p /opt/microsoft/powershell/7 \
10+
&& tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 \
11+
&& chmod +x /opt/microsoft/powershell/7/pwsh \
12+
&& ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh \
13+
&& rm /tmp/powershell.tar.gz \
14+
&& rm -rf /var/lib/apt/lists/*
315
COPY requirements.txt .
416
RUN pip install --no-cache-dir -r requirements.txt
517
COPY . .
18+
# Ensure scripts are executable
19+
RUN chmod +x /app/scripts/*.ps1 || true
620
# Use gunicorn with 10-minute timeout
721
CMD ["python", "-m", "app.main"]

backend/app/data/__init__.py

Lines changed: 52 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
test_regex_pattern, test_format_conditions,
88
check_delete_constraints, filename_to_display)
99
from ..db import add_format_to_renames, remove_format_from_renames, is_format_in_renames
10+
from .cache import data_cache
1011

1112
logger = logging.getLogger(__name__)
1213
logger.setLevel(logging.INFO)
@@ -16,43 +17,19 @@
1617
@bp.route('/<string:category>', methods=['GET'])
1718
def retrieve_all(category):
1819
try:
19-
directory = get_category_directory(category)
20-
files = [f for f in os.listdir(directory) if f.endswith('.yml')]
21-
logger.debug(f"Found {len(files)} files in {category}")
22-
23-
if not files:
24-
return jsonify([]), 200
25-
26-
result = []
27-
errors = 0
28-
for file_name in files:
29-
file_path = os.path.join(directory, file_name)
30-
try:
31-
content = load_yaml_file(file_path)
32-
# Add metadata for custom formats
33-
if category == 'custom_format':
34-
content['metadata'] = {
35-
'includeInRename':
36-
is_format_in_renames(content['name'])
20+
# Use cache instead of reading from disk
21+
items = data_cache.get_all(category)
22+
23+
# Add metadata for custom formats
24+
if category == 'custom_format':
25+
for item in items:
26+
if 'content' in item and 'name' in item['content']:
27+
item['content']['metadata'] = {
28+
'includeInRename': is_format_in_renames(item['content']['name'])
3729
}
38-
result.append({
39-
"file_name":
40-
file_name,
41-
"content":
42-
content,
43-
"modified_date":
44-
get_file_modified_date(file_path)
45-
})
46-
except yaml.YAMLError:
47-
errors += 1
48-
result.append({
49-
"file_name": file_name,
50-
"error": "Failed to parse YAML"
51-
})
52-
53-
logger.info(
54-
f"Processed {len(files)} {category} files ({errors} errors)")
55-
return jsonify(result), 200
30+
31+
logger.info(f"Retrieved {len(items)} {category} items from cache")
32+
return jsonify(items), 200
5633

5734
except ValueError as ve:
5835
logger.error(ve)
@@ -127,6 +104,10 @@ def handle_item(category, name):
127104

128105
# Then delete the file
129106
os.remove(file_path)
107+
108+
# Update cache
109+
data_cache.remove_item(category, file_name)
110+
130111
return jsonify(
131112
{"message": f"Successfully deleted {file_name}"}), 200
132113
except OSError as e:
@@ -226,32 +207,62 @@ def handle_item(category, name):
226207
return jsonify({"error": "An unexpected error occurred"}), 500
227208

228209

210+
@bp.route('/regex/verify', methods=['POST'])
211+
def verify_regex():
212+
"""Verify a regex pattern using .NET regex engine via PowerShell"""
213+
try:
214+
data = request.get_json()
215+
if not data:
216+
return jsonify({"error": "No JSON data provided"}), 400
217+
218+
pattern = data.get('pattern')
219+
if not pattern:
220+
return jsonify({"error": "Pattern is required"}), 400
221+
222+
from .utils import verify_dotnet_regex
223+
224+
success, message = verify_dotnet_regex(pattern)
225+
226+
if success:
227+
return jsonify({"valid": True, "message": "Pattern is valid"}), 200
228+
else:
229+
return jsonify({"valid": False, "error": message}), 200
230+
231+
except Exception as e:
232+
logger.exception("Error verifying regex pattern")
233+
return jsonify({"valid": False, "error": str(e)}), 500
234+
235+
229236
@bp.route('/<string:category>/test', methods=['POST'])
230237
def run_tests(category):
231238
logger.info(f"Received test request for category: {category}")
232239

233240
try:
234241
data = request.get_json()
235242
if not data:
236-
logger.warning("Rejected test request - no JSON data provided")
243+
logger.warning("Test request rejected: no JSON data")
237244
return jsonify({"error": "No JSON data provided"}), 400
238245

239246
tests = data.get('tests', [])
240247
if not tests:
241-
logger.warning("Rejected test request - no test cases provided")
248+
logger.warning("Test request rejected: no tests provided")
242249
return jsonify({"error":
243250
"At least one test case is required"}), 400
244251

245252
if category == 'regex_pattern':
246253
pattern = data.get('pattern')
247-
logger.info(f"Processing regex test request - Pattern: {pattern}")
248254

249255
if not pattern:
250-
logger.warning("Rejected test request - missing pattern")
256+
logger.warning("Test request rejected: missing pattern")
251257
return jsonify({"error": "Pattern is required"}), 400
252258

253259
success, message, updated_tests = test_regex_pattern(
254260
pattern, tests)
261+
262+
if success and updated_tests:
263+
passed = sum(1 for t in updated_tests if t.get('passes'))
264+
total = len(updated_tests)
265+
logger.info(f"Tests completed: {passed}/{total} passed")
255266

256267
elif category == 'custom_format':
257268
conditions = data.get('conditions', [])
@@ -274,10 +285,8 @@ def run_tests(category):
274285
return jsonify(
275286
{"error": "Testing not supported for this category"}), 400
276287

277-
logger.info(f"Test execution completed - Success: {success}")
278-
279288
if not success:
280-
logger.warning(f"Test execution failed - {message}")
289+
logger.error(f"Test execution failed: {message}")
281290
return jsonify({"success": False, "message": message}), 400
282291

283292
return jsonify({"success": True, "tests": updated_tests}), 200

backend/app/data/cache.py

Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,117 @@
1+
import os
2+
import yaml
3+
import logging
4+
from typing import Dict, List, Any, Optional
5+
from datetime import datetime
6+
import threading
7+
from .utils import get_category_directory, get_file_modified_date, filename_to_display
8+
9+
logger = logging.getLogger(__name__)
10+
11+
class DataCache:
12+
"""In-memory cache for YAML data"""
13+
14+
def __init__(self):
15+
self._cache = {
16+
'regex_pattern': {},
17+
'custom_format': {},
18+
'profile': {}
19+
}
20+
self._lock = threading.RLock()
21+
self._initialized = False
22+
23+
def initialize(self, force_reload=False):
24+
"""Load all data into memory on startup
25+
26+
Args:
27+
force_reload: If True, force a reload even if already initialized
28+
"""
29+
with self._lock:
30+
if self._initialized and not force_reload:
31+
return
32+
33+
logger.info("Initializing data cache..." if not force_reload else "Reloading data cache...")
34+
for category in self._cache.keys():
35+
self._load_category(category)
36+
37+
self._initialized = True
38+
logger.info("Data cache initialized successfully" if not force_reload else "Data cache reloaded successfully")
39+
40+
def _load_category(self, category: str):
41+
"""Load all items from a category into cache"""
42+
try:
43+
directory = get_category_directory(category)
44+
items = {}
45+
46+
for filename in os.listdir(directory):
47+
if not filename.endswith('.yml'):
48+
continue
49+
50+
file_path = os.path.join(directory, filename)
51+
try:
52+
with open(file_path, 'r') as f:
53+
content = yaml.safe_load(f)
54+
if content:
55+
# Store with metadata
56+
items[filename] = {
57+
'file_name': filename,
58+
'modified_date': get_file_modified_date(file_path),
59+
'content': content
60+
}
61+
except Exception as e:
62+
logger.error(f"Error loading {file_path}: {e}")
63+
64+
self._cache[category] = items
65+
logger.info(f"Loaded {len(items)} items for category {category}")
66+
67+
except Exception as e:
68+
logger.error(f"Error loading category {category}: {e}")
69+
70+
def get_all(self, category: str) -> List[Dict[str, Any]]:
71+
"""Get all items from a category"""
72+
with self._lock:
73+
if not self._initialized:
74+
self.initialize()
75+
76+
return list(self._cache.get(category, {}).values())
77+
78+
def get_item(self, category: str, name: str) -> Optional[Dict[str, Any]]:
79+
"""Get a specific item"""
80+
with self._lock:
81+
if not self._initialized:
82+
self.initialize()
83+
84+
# Convert name to filename
85+
filename = f"{name.replace('[', '(').replace(']', ')')}.yml"
86+
return self._cache.get(category, {}).get(filename)
87+
88+
def update_item(self, category: str, filename: str, content: Dict[str, Any]):
89+
"""Update an item in cache"""
90+
with self._lock:
91+
if category in self._cache:
92+
file_path = os.path.join(get_category_directory(category), filename)
93+
self._cache[category][filename] = {
94+
'file_name': filename,
95+
'modified_date': get_file_modified_date(file_path),
96+
'content': content
97+
}
98+
logger.debug(f"Updated cache for {category}/{filename}")
99+
100+
def remove_item(self, category: str, filename: str):
101+
"""Remove an item from cache"""
102+
with self._lock:
103+
if category in self._cache and filename in self._cache[category]:
104+
del self._cache[category][filename]
105+
logger.debug(f"Removed from cache: {category}/{filename}")
106+
107+
def rename_item(self, category: str, old_filename: str, new_filename: str):
108+
"""Rename an item in cache"""
109+
with self._lock:
110+
if category in self._cache and old_filename in self._cache[category]:
111+
item = self._cache[category].pop(old_filename)
112+
item['file_name'] = new_filename
113+
self._cache[category][new_filename] = item
114+
logger.debug(f"Renamed in cache: {category}/{old_filename} -> {new_filename}")
115+
116+
# Global cache instance
117+
data_cache = DataCache()

0 commit comments

Comments
 (0)