Skip to content

Commit 5bee9ee

Browse files
docxologyclaude
andcommitted
style: apply black formatting and isort import sorting across codebase
Fixes quality-checks CI job by ensuring all Python files in src/, tests/, scripts/, and examples/ pass black --check and isort --check-only. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1 parent 23283dd commit 5bee9ee

File tree

499 files changed

+7665
-7643
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

499 files changed

+7665
-7643
lines changed

examples/core/example_config.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,10 @@
1313
from __future__ import annotations
1414

1515
from pathlib import Path
16+
1617
from metainformant.core import io, paths
17-
from metainformant.core.config import load_mapping_from_file, apply_env_overrides
18+
from metainformant.core.config import apply_env_overrides, load_mapping_from_file
19+
1820

1921
def main():
2022
"""Demonstrate configuration loading with environment overrides."""
@@ -29,16 +31,13 @@ def main():
2931
"log_dir": "output/example_workflow/logs",
3032
"max_memory_gb": 8,
3133
"debug": False,
32-
"analysis": {
33-
"method": "fast",
34-
"quality_threshold": 0.95,
35-
"save_intermediates": True
36-
}
34+
"analysis": {"method": "fast", "quality_threshold": 0.95, "save_intermediates": True},
3735
}
3836

3937
# Save sample config
4038
config_file = output_dir / "sample_config.yaml"
4139
import yaml
40+
4241
with open(config_file, "w") as f:
4342
yaml.dump(sample_config, f, default_flow_style=False)
4443

@@ -113,8 +112,8 @@ def main():
113112
"environment_variables_used": {
114113
"CORE_THREADS": "int (number of threads)",
115114
"CORE_WORK_DIR": "str (working directory path)",
116-
"CORE_LOG_DIR": "str (logging directory path)"
117-
}
115+
"CORE_LOG_DIR": "str (logging directory path)",
116+
},
118117
}
119118

120119
io.dump_json(result_data, result_file)
@@ -125,5 +124,6 @@ def main():
125124
print("\n=== Configuration Example Complete ===")
126125
print("Try setting environment variables and re-running to see overrides in action!")
127126

127+
128128
if __name__ == "__main__":
129129
main()

examples/core/example_io.py

Lines changed: 16 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,10 @@
1414

1515
import csv
1616
from pathlib import Path
17+
1718
from metainformant.core import io
1819

20+
1921
def main():
2022
"""Demonstrate file I/O operations."""
2123
# Setup output directory
@@ -26,21 +28,13 @@ def main():
2628

2729
# Sample data for demonstration
2830
sample_data = {
29-
"analysis": {
30-
"type": "dna_sequence_analysis",
31-
"timestamp": "2024-12-26T10:00:00Z",
32-
"version": "1.0"
33-
},
31+
"analysis": {"type": "dna_sequence_analysis", "timestamp": "2024-12-26T10:00:00Z", "version": "1.0"},
3432
"sequences": [
3533
{"id": "seq1", "sequence": "ATCGATCG", "length": 8, "gc_content": 0.5},
3634
{"id": "seq2", "sequence": "GCTAGCTA", "length": 8, "gc_content": 0.5},
37-
{"id": "seq3", "sequence": "TTTTAAAA", "length": 8, "gc_content": 0.0}
35+
{"id": "seq3", "sequence": "TTTTAAAA", "length": 8, "gc_content": 0.0},
3836
],
39-
"statistics": {
40-
"total_sequences": 3,
41-
"average_length": 8.0,
42-
"average_gc": 0.333
43-
}
37+
"statistics": {"total_sequences": 3, "average_length": 8.0, "average_gc": 0.333},
4438
}
4539

4640
# 1. JSON file operations
@@ -71,12 +65,9 @@ def main():
7165
# Convert sequence data to CSV format
7266
csv_data = []
7367
for seq in sample_data["sequences"]:
74-
csv_data.append({
75-
"id": seq["id"],
76-
"sequence": seq["sequence"],
77-
"length": seq["length"],
78-
"gc_content": seq["gc_content"]
79-
})
68+
csv_data.append(
69+
{"id": seq["id"], "sequence": seq["sequence"], "length": seq["length"], "gc_content": seq["gc_content"]}
70+
)
8071

8172
csv_file = output_dir / "io_example.csv"
8273
io.write_csv(csv_data, csv_file)
@@ -94,7 +85,7 @@ def main():
9485
{"step": 1, "operation": "load_data", "status": "success", "duration_ms": 150},
9586
{"step": 2, "operation": "validate_data", "status": "success", "duration_ms": 50},
9687
{"step": 3, "operation": "analyze_sequences", "status": "success", "duration_ms": 500},
97-
{"step": 4, "operation": "generate_report", "status": "success", "duration_ms": 200}
88+
{"step": 4, "operation": "generate_report", "status": "success", "duration_ms": 200},
9889
]
9990

10091
jsonl_file = output_dir / "io_example.jsonl"
@@ -127,12 +118,13 @@ def main():
127118
print("\n7. File size comparison...")
128119

129120
import os
121+
130122
sizes = {
131123
"JSON": os.path.getsize(json_file),
132124
"JSON.gz": os.path.getsize(json_gz_file),
133125
"CSV": os.path.getsize(csv_file),
134126
"JSONL": os.path.getsize(jsonl_file),
135-
"JSONL.gz": os.path.getsize(jsonl_gz_file)
127+
"JSONL.gz": os.path.getsize(jsonl_gz_file),
136128
}
137129

138130
print("File sizes (bytes):")
@@ -153,7 +145,7 @@ def main():
153145
"json_gz": str(json_gz_file.relative_to(output_dir)),
154146
"csv": str(csv_file.relative_to(output_dir)),
155147
"jsonl": str(jsonl_file.relative_to(output_dir)),
156-
"jsonl_gz": str(jsonl_gz_file.relative_to(output_dir))
148+
"jsonl_gz": str(jsonl_gz_file.relative_to(output_dir)),
157149
},
158150
"file_sizes_bytes": sizes,
159151
"compression_ratio": compression_ratio,
@@ -163,13 +155,13 @@ def main():
163155
"CSV data handling",
164156
"JSONL (JSON Lines) format",
165157
"Atomic write operations",
166-
"Error handling and validation"
158+
"Error handling and validation",
167159
],
168160
"data_summary": {
169161
"sequences_processed": len(sample_data["sequences"]),
170162
"workflow_steps": len(jsonl_data),
171-
"total_data_points": sum(len(seq["sequence"]) for seq in sample_data["sequences"])
172-
}
163+
"total_data_points": sum(len(seq["sequence"]) for seq in sample_data["sequences"]),
164+
},
173165
}
174166
}
175167

@@ -185,5 +177,6 @@ def main():
185177
print("- Use .gz extension for automatic compression")
186178
print("- All writes are atomic to prevent corruption")
187179

180+
188181
if __name__ == "__main__":
189182
main()

examples/core/example_logging.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,10 @@
1414

1515
import time
1616
from pathlib import Path
17+
1718
from metainformant.core import logging
1819

20+
1921
def main():
2022
"""Demonstrate logging functionality."""
2123
# Setup output directory
@@ -66,7 +68,7 @@ def simulate_analysis_step(step_name: str, duration: float):
6668
("data_validation", 0.05),
6769
("sequence_analysis", 0.2),
6870
("statistics_calculation", 0.15),
69-
("report_generation", 0.1)
71+
("report_generation", 0.1),
7072
]
7173

7274
total_time = 0
@@ -86,15 +88,11 @@ def simulate_analysis_step(step_name: str, duration: float):
8688
"average_length": 450,
8789
"gc_content_range": [0.35, 0.65],
8890
"quality_threshold": 0.95,
89-
"timestamp": "2024-12-26T10:00:00Z"
91+
"timestamp": "2024-12-26T10:00:00Z",
9092
}
9193

9294
logging.log_with_metadata(
93-
file_logger,
94-
"Analysis completed successfully",
95-
analysis_metadata,
96-
level="INFO",
97-
structured=True
95+
file_logger, "Analysis completed successfully", analysis_metadata, level="INFO", structured=True
9896
)
9997

10098
# 6. Environment-based configuration
@@ -135,7 +133,7 @@ def load_sequences():
135133
return ["ATCG" * 25] * 100 # Simulate loading 100 sequences
136134

137135
def calculate_gc_content(sequences):
138-
return sum(seq.count('G') + seq.count('C') for seq in sequences) / sum(len(seq) for seq in sequences)
136+
return sum(seq.count("G") + seq.count("C") for seq in sequences) / sum(len(seq) for seq in sequences)
139137

140138
def find_motifs(sequences, motif="ATCG"):
141139
return sum(seq.count(motif) for seq in sequences)
@@ -152,7 +150,7 @@ def find_motifs(sequences, motif="ATCG"):
152150
print("\n9. Creating logging summary...")
153151

154152
# Read the log file to show what was captured
155-
with open(log_file, 'r') as f:
153+
with open(log_file, "r") as f:
156154
log_lines = f.readlines()
157155

158156
summary = {
@@ -168,23 +166,24 @@ def find_motifs(sequences, motif="ATCG"):
168166
"Environment-based configuration",
169167
"Multiple component loggers",
170168
"Performance timing",
171-
"Workflow step tracking"
169+
"Workflow step tracking",
172170
],
173171
"workflow_simulation": {
174172
"steps_completed": len(workflow_steps),
175173
"total_workflow_time": total_time,
176174
"performance_operations": {
177175
"sequence_loading": load_time,
178176
"gc_content_calculation": gc_time,
179-
"motif_finding": motif_time
180-
}
177+
"motif_finding": motif_time,
178+
},
181179
},
182-
"sample_log_entries": log_lines[-5:] if len(log_lines) >= 5 else log_lines
180+
"sample_log_entries": log_lines[-5:] if len(log_lines) >= 5 else log_lines,
183181
}
184182
}
185183

186184
summary_file = output_dir / "logging_demo_summary.json"
187185
from metainformant.core import io
186+
188187
io.dump_json(summary, summary_file, indent=2)
189188

190189
print(f"✓ Logging summary saved to: {summary_file}")
@@ -199,5 +198,6 @@ def find_motifs(sequences, motif="ATCG"):
199198
print("- Use log_with_metadata() for structured data logging")
200199
print("- Configure logging level via CORE_LOG_LEVEL environment variable")
201200

201+
202202
if __name__ == "__main__":
203203
main()

0 commit comments

Comments
 (0)