Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions tools/ci/run_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,8 +270,12 @@ def main() -> int:
write_log(results)
try:
write_junit(results)
except Exception as exc: # pragma: no cover - best effort only
print(f"warning: failed to write JUnit report: {exc}", file=sys.stderr)
except OSError as exc: # pragma: no cover - best effort only
print(
f"warning: failed to write JUnit report to {JUNIT_FILE}: {exc}. "
"Ensure the artifacts directory is writable.",
file=sys.stderr,
)
write_summary(results)

failures = sum(1 for result in results if not result.passed)
Expand Down
76 changes: 53 additions & 23 deletions tools/extract_entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,26 +14,56 @@
# Regex pattern to match QUAKED comment blocks
quaked_block_pattern = re.compile(r'/\*QUAKED[\s\S]*?\*/', re.MULTILINE)

entity_blocks = []

# Walk through the repository and search for QUAKED blocks in C++ sources.
for root, dirs, files in os.walk(PROJECT_ROOT):
for file in files:
if file.endswith(".cpp"):
file_path = Path(root) / file
try:
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
content = f.read()
matches = quaked_block_pattern.findall(content)
if matches:
entity_blocks.extend(matches)
except Exception as e:
print(f"Error reading {file_path}: {e}")

# Write all found blocks to the output file
with open(OUTPUT_PATH, "w", encoding="utf-8") as out:
for block in entity_blocks:
out.write(block.strip())
out.write("\n\n")

print(f"{len(entity_blocks)} entity definitions written to {OUTPUT_PATH}.")
def read_cpp_file_for_entities(file_path: Path) -> list[str]:
try:
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
content = f.read()
except FileNotFoundError:
print(
f"Error reading {file_path}: file not found. Verify the path before re-running."
)
return []
except PermissionError as exc:
print(
f"Error reading {file_path}: permission denied ({exc}). Adjust file permissions "
"or run with appropriate access."
)
return []
except OSError as exc:
print(
f"Error reading {file_path}: {exc}. Ensure the file is accessible and readable."
)
return []

return quaked_block_pattern.findall(content)


def collect_entity_blocks(project_root: Path) -> list[str]:
entity_blocks: list[str] = []

# Walk through the repository and search for QUAKED blocks in C++ sources.
for root, dirs, files in os.walk(project_root):
for file in files:
if file.endswith(".cpp"):
file_path = Path(root) / file
matches = read_cpp_file_for_entities(file_path)
if matches:
entity_blocks.extend(matches)

return entity_blocks


def main() -> None:
entity_blocks = collect_entity_blocks(PROJECT_ROOT)

# Write all found blocks to the output file
with open(OUTPUT_PATH, "w", encoding="utf-8") as out:
for block in entity_blocks:
out.write(block.strip())
out.write("\n\n")

print(f"{len(entity_blocks)} entity definitions written to {OUTPUT_PATH}.")


if __name__ == "__main__":
main()
14 changes: 12 additions & 2 deletions tools/fix_encoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,20 @@ def detect_encoding(path):
def fix_encoding(input_file, output_file):
encoding, raw_bytes = detect_encoding(input_file)

if not encoding:
print(
f"Failed to detect encoding for {input_file}; ensure the file is not empty "
"or specify the expected encoding manually."
)
return

try:
text = raw_bytes.decode(encoding, errors='replace') # Replace illegal chars
except Exception as e:
print(f"Failed to decode using {encoding}: {e}")
except (UnicodeDecodeError, LookupError) as e:
print(
f"Failed to decode {input_file} using detected encoding '{encoding}': {e}. "
"Try converting the file to UTF-8 manually before re-running."
)
return

with open(output_file, 'w', encoding='utf-8') as f:
Expand Down
89 changes: 53 additions & 36 deletions tools/header.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,39 +13,56 @@
# Normalize directory paths to match os.walk format
ALLOWED_DIRS = {os.path.normpath(d) for d in ALLOWED_DIRS}

# Store results
includes_by_file = {}

for root, dirs, files in os.walk('.'):
# Normalize root path
norm_root = os.path.normpath(root)
if norm_root not in ALLOWED_DIRS:
# Don't descend into subdirs of disallowed dirs
dirs[:] = []
continue

for file in files:
if file.endswith(SOURCE_EXTENSIONS):
filepath = os.path.join(root, file)
includes = []

try:
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
for line in f:
stripped = line.strip()
if stripped.startswith('//') or stripped.startswith('/*') or not stripped:
continue
if include_pattern.match(stripped):
includes.append(stripped)
except Exception as e:
includes.append(f"// Error reading file: {e}")

includes_by_file[filepath] = includes

# Write results
with open('includes2.txt', 'w', encoding='utf-8') as out:
for path, includes in sorted(includes_by_file.items()):
out.write(f"{path}:\n")
for inc in includes:
out.write(f" {inc}\n")
out.write("\n")
def read_includes_from_file(filepath):
includes = []
try:
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
for line in f:
stripped = line.strip()
if stripped.startswith('//') or stripped.startswith('/*') or not stripped:
continue
if include_pattern.match(stripped):
includes.append(stripped)
except FileNotFoundError:
includes.append(f"// Error reading file: {filepath} not found. Confirm the path is valid.")
except PermissionError as exc:
includes.append(
f"// Error reading file: permission denied ({exc}). Adjust permissions or run with access."
)
except OSError as exc:
includes.append(f"// Error reading file: {exc}. Ensure the file is readable.")
return includes


def collect_includes(allowed_dirs=ALLOWED_DIRS):
includes_by_file = {}
for root, dirs, files in os.walk('.'):
# Normalize root path
norm_root = os.path.normpath(root)
if norm_root not in allowed_dirs:
# Don't descend into subdirs of disallowed dirs
dirs[:] = []
continue

for file in files:
if file.endswith(SOURCE_EXTENSIONS):
filepath = os.path.join(root, file)
includes_by_file[filepath] = read_includes_from_file(filepath)
return includes_by_file


def main():
# Store results
includes_by_file = collect_includes()

# Write results
with open('includes2.txt', 'w', encoding='utf-8') as out:
for path, includes in sorted(includes_by_file.items()):
out.write(f"{path}:\n")
for inc in includes:
out.write(f" {inc}\n")
out.write("\n")


if __name__ == '__main__':
main()
85 changes: 85 additions & 0 deletions tools/tests/test_error_handling.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
import io
import tempfile
import unittest
from contextlib import redirect_stdout
from pathlib import Path
from unittest.mock import patch

from tools import extract_entities, fix_encoding, header


class FixEncodingTests(unittest.TestCase):
def test_reports_missing_encoding_for_empty_file(self):
with tempfile.TemporaryDirectory() as tmp_dir:
input_path = Path(tmp_dir) / "empty.txt"
input_path.write_bytes(b"")
output_path = Path(tmp_dir) / "out.txt"

buffer = io.StringIO()
with redirect_stdout(buffer):
fix_encoding.fix_encoding(str(input_path), str(output_path))

self.assertIn("Failed to detect encoding", buffer.getvalue())
self.assertFalse(output_path.exists(), "No output should be written when encoding is missing")

def test_reports_decode_errors(self):
with tempfile.TemporaryDirectory() as tmp_dir:
input_path = Path(tmp_dir) / "data.txt"
output_path = Path(tmp_dir) / "out.txt"

with patch("tools.fix_encoding.detect_encoding", return_value=("invalid-encoding", b"abc")):
buffer = io.StringIO()
with redirect_stdout(buffer):
fix_encoding.fix_encoding(str(input_path), str(output_path))

self.assertIn("Failed to decode", buffer.getvalue())
self.assertFalse(output_path.exists(), "No output should be written when decoding fails")


class ExtractEntitiesTests(unittest.TestCase):
def test_handles_missing_cpp_file(self):
with tempfile.TemporaryDirectory() as tmp_dir:
missing_file = Path(tmp_dir) / "missing.cpp"

buffer = io.StringIO()
with redirect_stdout(buffer):
matches = extract_entities.read_cpp_file_for_entities(missing_file)

self.assertEqual([], matches)
self.assertIn("file not found", buffer.getvalue())

def test_handles_permission_error(self):
with tempfile.TemporaryDirectory() as tmp_dir:
file_path = Path(tmp_dir) / "protected.cpp"
file_path.write_text("// content")

with patch("builtins.open", side_effect=PermissionError("permission denied")):
buffer = io.StringIO()
with redirect_stdout(buffer):
matches = extract_entities.read_cpp_file_for_entities(file_path)

self.assertEqual([], matches)
self.assertIn("permission denied", buffer.getvalue())


class HeaderTests(unittest.TestCase):
def test_reports_missing_include_file(self):
includes = header.read_includes_from_file("./nonexistent.cpp")

self.assertEqual(1, len(includes))
self.assertIn("not found", includes[0])

def test_reports_unreadable_include_file(self):
with tempfile.TemporaryDirectory() as tmp_dir:
file_path = Path(tmp_dir) / "data.cpp"
file_path.write_text("#include <iostream>\n")

with patch("builtins.open", side_effect=PermissionError("no access")):
includes = header.read_includes_from_file(str(file_path))

self.assertEqual(1, len(includes))
self.assertIn("permission denied", includes[0])


if __name__ == "__main__":
unittest.main()