Skip to content

Commit 20bc907

Browse files
authored
Merge branch 'master' into feature/webvtt-regression-test
2 parents 7babf11 + b1e9efb commit 20bc907

File tree

13 files changed

+352
-146
lines changed

13 files changed

+352
-146
lines changed

database.py

Lines changed: 30 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,15 @@
44
import re
55
import traceback
66
from abc import ABCMeta
7-
from typing import Any, Dict, Iterator, Tuple, Type, Union
7+
from typing import Any, Dict, Iterator, Optional, Tuple, Type, Union
88

99
from sqlalchemy import create_engine
10-
from sqlalchemy.dialects.sqlite.pysqlite import SQLiteDialect_pysqlite
10+
from sqlalchemy.engine import Dialect
1111
from sqlalchemy.exc import SQLAlchemyError
12-
from sqlalchemy.ext.declarative import DeclarativeMeta, declarative_base
13-
from sqlalchemy.orm import scoped_session, sessionmaker
14-
from sqlalchemy.sql.schema import Column, Table
15-
from sqlalchemy.sql.sqltypes import Enum, SchemaType, TypeDecorator
12+
from sqlalchemy.orm import (DeclarativeBase, DeclarativeMeta, scoped_session,
13+
sessionmaker)
14+
from sqlalchemy.pool import StaticPool
15+
from sqlalchemy.sql.sqltypes import String, TypeDecorator
1616

1717
from exceptions import EnumParsingException, FailedToSpawnDBSession
1818

@@ -23,7 +23,12 @@ class DeclarativeABCMeta(DeclarativeMeta, ABCMeta):
2323
pass
2424

2525

26-
Base = declarative_base(metaclass=DeclarativeMeta)
26+
class Base(DeclarativeBase):
27+
"""Base class for all models."""
28+
29+
pass
30+
31+
2732
Base.query = None
2833
db_engine = None
2934

@@ -43,9 +48,18 @@ def create_session(db_string: str, drop_tables: bool = False) -> scoped_session:
4348
global db_engine, Base
4449

4550
try:
46-
# In testing, we want to maintain same memory variable
47-
if db_engine is None or 'TESTING' not in os.environ or os.environ['TESTING'] == 'False':
48-
db_engine = create_engine(db_string, convert_unicode=True)
51+
# Only create engine if it doesn't exist
52+
# For SQLite in-memory, we must reuse the engine to share the database
53+
if db_engine is None:
54+
# For SQLite in-memory databases, use StaticPool to share connection
55+
if db_string == 'sqlite:///:memory:':
56+
db_engine = create_engine(
57+
db_string,
58+
connect_args={"check_same_thread": False},
59+
poolclass=StaticPool
60+
)
61+
else:
62+
db_engine = create_engine(db_string)
4963
db_session = scoped_session(sessionmaker(bind=db_engine))
5064
Base.query = db_session.query_property()
5165

@@ -162,32 +176,27 @@ def db_type(cls) -> DeclEnumType:
162176
return DeclEnumType(cls)
163177

164178

165-
class DeclEnumType(SchemaType, TypeDecorator):
179+
class DeclEnumType(TypeDecorator):
166180
"""Declarative enumeration type."""
167181

168182
cache_ok = True
183+
impl = String(50)
169184

170185
def __init__(self, enum: Any) -> None:
171186
self.enum = enum
172-
self.impl = Enum(
173-
*enum.values(),
174-
name="ck{0}".format(re.sub('([A-Z])', lambda m: "_" + m.group(1).lower(), enum.__name__))
175-
)
176-
177-
def _set_table(self, table: Column, column: Table) -> None:
178-
self.impl._set_table(table, column)
187+
super().__init__()
179188

180-
def copy(self) -> DeclEnumType:
189+
def copy(self, **kwargs: Any) -> DeclEnumType:
181190
"""Get enumeration type of self."""
182191
return DeclEnumType(self.enum)
183192

184-
def process_bind_param(self, value: EnumSymbol, dialect: SQLiteDialect_pysqlite) -> str:
193+
def process_bind_param(self, value: Optional[EnumSymbol], dialect: Dialect) -> Optional[str]:
185194
"""Get process bind parameter."""
186195
if value is None:
187196
return None
188197
return value.value
189198

190-
def process_result_value(self, value: str, dialect: SQLiteDialect_pysqlite) -> EnumSymbol:
199+
def process_result_value(self, value: Optional[str], dialect: Dialect) -> Optional[EnumSymbol]:
191200
"""Get process result value."""
192201
if value is None:
193202
return None

install/deploy/pre_deploy.sh

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,32 @@ if ! git diff --quiet 2>/dev/null; then
101101
git status --short
102102
fi
103103

104+
# Check 8: Verify logs directory ownership
105+
LOGS_DIR="$INSTALL_FOLDER/logs"
106+
WEB_USER="${WEB_USER:-www-data}"
107+
if [ -d "$LOGS_DIR" ]; then
108+
LOGS_OWNER=$(stat -c '%U' "$LOGS_DIR" 2>/dev/null || echo "unknown")
109+
if [ "$LOGS_OWNER" != "$WEB_USER" ]; then
110+
echo "WARNING: Logs directory owned by '$LOGS_OWNER', should be '$WEB_USER'"
111+
echo "Fixing ownership..."
112+
chown -R "$WEB_USER:$WEB_USER" "$LOGS_DIR" 2>/dev/null || {
113+
echo "ERROR: Failed to fix logs ownership. Run manually:"
114+
echo " sudo chown -R $WEB_USER:$WEB_USER $LOGS_DIR"
115+
exit 1
116+
}
117+
echo "✓ Logs directory ownership fixed"
118+
else
119+
echo "✓ Logs directory ownership OK ($WEB_USER)"
120+
fi
121+
else
122+
echo "Creating logs directory with correct ownership..."
123+
mkdir -p "$LOGS_DIR"
124+
chown "$WEB_USER:$WEB_USER" "$LOGS_DIR" 2>/dev/null || {
125+
echo "WARNING: Could not set logs ownership (run as root)"
126+
}
127+
echo "✓ Logs directory created"
128+
fi
129+
104130
# Export backup directory for other scripts
105131
echo "$BACKUP_DIR" > /tmp/sp-deploy-backup-dir.txt
106132

log_configuration.py

Lines changed: 41 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,10 @@
33
import logging
44
import logging.handlers
55
import os
6+
import sys
67
from logging import Logger, StreamHandler
78
from logging.handlers import RotatingFileHandler
8-
from typing import Union
9+
from typing import Optional, Union
910

1011

1112
class LogConfiguration:
@@ -19,21 +20,47 @@ def __init__(self, folder: str, filename: str, debug: bool = False) -> None:
1920
self._consoleLogger.setLevel(logging.DEBUG)
2021
else:
2122
self._consoleLogger.setLevel(logging.INFO)
22-
# create a file handler
23-
path = os.path.join(folder, 'logs', f'{filename}.log')
24-
self._fileLogger = logging.handlers.RotatingFileHandler(path, maxBytes=1024 * 1024, backupCount=20)
25-
self._fileLogger.setLevel(logging.DEBUG)
26-
# create a logging format
27-
formatter = logging.Formatter('[%(name)s][%(levelname)s][%(asctime)s] %(message)s')
28-
self._fileLogger.setFormatter(formatter)
23+
24+
# create a file handler with permission error handling
25+
self._fileLogger: Optional[RotatingFileHandler] = None
26+
log_dir = os.path.join(folder, 'logs')
27+
path = os.path.join(log_dir, f'{filename}.log')
28+
29+
try:
30+
# Ensure logs directory exists
31+
os.makedirs(log_dir, exist_ok=True)
32+
33+
self._fileLogger = logging.handlers.RotatingFileHandler(
34+
path, maxBytes=1024 * 1024, backupCount=20
35+
)
36+
self._fileLogger.setLevel(logging.DEBUG)
37+
# create a logging format
38+
formatter = logging.Formatter('[%(name)s][%(levelname)s][%(asctime)s] %(message)s')
39+
self._fileLogger.setFormatter(formatter)
40+
except PermissionError as e:
41+
# Log file owned by different user (e.g., root vs www-data)
42+
# Fall back to console-only logging rather than crashing
43+
print(
44+
f"[WARNING] Cannot write to log file {path}: {e}. "
45+
f"Falling back to console-only logging. "
46+
f"Fix: sudo chown www-data:www-data {log_dir} -R",
47+
file=sys.stderr
48+
)
49+
except OSError as e:
50+
# Other filesystem errors (disk full, etc.)
51+
print(
52+
f"[WARNING] Cannot create log file {path}: {e}. "
53+
f"Falling back to console-only logging.",
54+
file=sys.stderr
55+
)
2956

3057
@property
31-
def file_logger(self) -> RotatingFileHandler:
58+
def file_logger(self) -> Optional[RotatingFileHandler]:
3259
"""
3360
Get file logger.
3461
35-
:return: file logger
36-
:rtype: logging.handlers.RotatingFileHandler
62+
:return: file logger or None if file logging unavailable
63+
:rtype: Optional[logging.handlers.RotatingFileHandler]
3764
"""
3865
return self._fileLogger
3966

@@ -59,7 +86,8 @@ def create_logger(self, name: str) -> Logger:
5986
logger = logging.getLogger(name)
6087
logger.setLevel(logging.DEBUG)
6188
# add the handlers to the logger
62-
logger.addHandler(self.file_logger)
63-
logger.addHandler(self.console_logger)
89+
if self._fileLogger is not None:
90+
logger.addHandler(self._fileLogger)
91+
logger.addHandler(self._consoleLogger)
6492

6593
return logger

mod_health/controllers.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from typing import Any, Dict, Optional, Tuple
77

88
from flask import Blueprint, current_app, jsonify
9+
from sqlalchemy import text
910

1011
mod_health = Blueprint('health', __name__)
1112

@@ -20,7 +21,7 @@ def check_database() -> Dict[str, Any]:
2021
try:
2122
from database import create_session
2223
db = create_session(current_app.config['DATABASE_URI'])
23-
db.execute('SELECT 1')
24+
db.execute(text('SELECT 1'))
2425
# remove() returns the scoped session's connection to the pool
2526
db.remove()
2627
return {'status': 'ok'}

mod_test/controllers.py

Lines changed: 43 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,16 @@
11
"""Logic to find all tests, their progress and details of individual test."""
22

33
import os
4-
from datetime import datetime
54
from typing import Any, Dict, List
65

76
from flask import (Blueprint, Response, abort, g, jsonify, redirect, request,
87
url_for)
9-
from sqlalchemy import and_, func
10-
from sqlalchemy.sql import label
8+
from sqlalchemy import and_
119

1210
from decorators import template_renderer
1311
from exceptions import TestNotFoundException
1412
from mod_auth.controllers import check_access_rights, login_required
1513
from mod_auth.models import Role
16-
from mod_ci.models import GcpInstance
1714
from mod_customized.models import TestFork
1815
from mod_home.models import CCExtractorVersion, GeneralData
1916
from mod_regression.models import (Category, RegressionTestOutput,
@@ -133,58 +130,44 @@ def get_data_for_test(test, title=None) -> Dict[str, Any]:
133130
if title is None:
134131
title = f"test {test.id}"
135132

136-
hours = 0.00
137-
minutes = 0.00
138-
queued_tests = 0
139-
140-
"""
141-
evaluating estimated time if the test is still in queue
142-
estimated time = (number of tests already in queue + 1) * (average time of that platform)
143-
- (time already spend by those tests)
144-
calculates time in minutes and hours
145-
"""
133+
# Calculate average runtime for this platform (used when test hasn't started yet)
134+
avg_minutes = 0
146135
if len(test.progress) == 0:
147-
var_average = 'average_time_' + test.platform.value
148-
149-
# get average build and prep time.
150-
prep_average_key = 'avg_prep_time_' + test.platform.value
151-
average_prep_time = int(float(GeneralData.query.filter(GeneralData.key == prep_average_key).first().value))
152-
153-
test_progress_last_entry = g.db.query(func.max(TestProgress.test_id)).first()
154-
last_test_id = test_progress_last_entry[0] if test_progress_last_entry is not None else 0
155-
queued_gcp_instance = g.db.query(GcpInstance.test_id).filter(GcpInstance.test_id < test.id).subquery()
156-
queued_gcp_instance_entries = g.db.query(Test.id).filter(
157-
and_(Test.id.in_(queued_gcp_instance), Test.platform == test.platform)
158-
).subquery()
159-
gcp_instance_test = g.db.query(TestProgress.test_id, label('time', func.group_concat(
160-
TestProgress.timestamp))).filter(TestProgress.test_id.in_(queued_gcp_instance_entries)).group_by(
161-
TestProgress.test_id).all()
162-
number_gcp_instance_test = g.db.query(Test.id).filter(
163-
and_(Test.id > last_test_id, Test.id < test.id, Test.platform == test.platform)
164-
).count()
165-
average_duration = float(GeneralData.query.filter(GeneralData.key == var_average).first().value)
166-
queued_tests = number_gcp_instance_test
167-
time_run = 0.00
168-
for pr_test in gcp_instance_test:
169-
timestamps = pr_test.time.split(',')
170-
start = datetime.strptime(timestamps[0], '%Y-%m-%d %H:%M:%S')
171-
end = datetime.strptime(timestamps[-1], '%Y-%m-%d %H:%M:%S')
172-
time_run += (end - start).total_seconds()
173-
# subtracting current running tests
174-
total = average_prep_time + average_duration - time_run
175-
minutes = (total % 3600) // 60
176-
hours = total // 3600
136+
try:
137+
avg_time_key = 'average_time_' + test.platform.value
138+
prep_time_key = 'avg_prep_time_' + test.platform.value
139+
140+
avg_time_record = GeneralData.query.filter(GeneralData.key == avg_time_key).first()
141+
prep_time_record = GeneralData.query.filter(GeneralData.key == prep_time_key).first()
142+
143+
avg_duration = float(avg_time_record.value) if avg_time_record else 0
144+
avg_prep = float(prep_time_record.value) if prep_time_record else 0
145+
146+
# Total average time in minutes
147+
avg_minutes = int((avg_duration + avg_prep) / 60)
148+
except (ValueError, AttributeError):
149+
avg_minutes = 0
177150

178151
results = get_test_results(test)
179152

153+
# Calculate sample progress for initial page load
154+
completed_samples = len(test.results)
155+
total_samples = len(test.get_customized_regressiontests())
156+
progress_percentage = 0
157+
if total_samples > 0:
158+
progress_percentage = int((completed_samples / total_samples) * 100)
159+
180160
return {
181161
'test': test,
182162
'TestType': TestType,
183163
'results': results,
184164
'title': title,
185-
'next': queued_tests,
186-
'min': minutes,
187-
'hr': hours
165+
'avg_minutes': avg_minutes,
166+
'sample_progress': {
167+
'current': completed_samples,
168+
'total': total_samples,
169+
'percentage': progress_percentage
170+
}
188171
}
189172

190173

@@ -212,11 +195,23 @@ def get_json_data(test_id):
212195
'message': entry.message
213196
})
214197

198+
# Calculate sample progress from existing TestResult data
199+
completed_samples = len(test.results)
200+
total_samples = len(test.get_customized_regressiontests())
201+
progress_percentage = 0
202+
if total_samples > 0:
203+
progress_percentage = int((completed_samples / total_samples) * 100)
204+
215205
return jsonify({
216206
'status': 'success',
217207
'details': pr_data["progress"],
218208
'complete': test.finished,
219-
'progress_array': progress_array
209+
'progress_array': progress_array,
210+
'sample_progress': {
211+
'current': completed_samples,
212+
'total': total_samples,
213+
'percentage': progress_percentage
214+
}
220215
})
221216

222217

mypy.ini

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,14 @@
11
[mypy]
2-
python_version = 3.8
2+
python_version = 3.10
33
ignore_missing_imports = True
4-
warn_unused_ignores = True
4+
warn_unused_ignores = False
55
exclude = venv*
6+
7+
# Disable errors for SQLAlchemy 2.0 migration
8+
# These require more extensive refactoring:
9+
# - attr-defined: Model.query is set dynamically at runtime
10+
# - var-annotated: DeclEnum columns need proper type annotations
11+
# - assignment: datetime/date default values with Column types
12+
# - arg-type: Column types vs primitive types in function calls
13+
# - index: Test model indexing issues
14+
disable_error_code = attr-defined, var-annotated, assignment, arg-type, index

0 commit comments

Comments
 (0)