Skip to content

Commit 68b76d0

Browse files
committed
feat: add lint checks
Signed-off-by: Eloy Coto <eloy.coto@acalustra.com>
1 parent 9d5111b commit 68b76d0

File tree

10 files changed

+122
-70
lines changed

10 files changed

+122
-70
lines changed

.github/workflows/ci.yml

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
name: CI
2+
3+
on:
4+
push:
5+
branches: [ master, test ]
6+
pull_request:
7+
branches: [ master ]
8+
9+
jobs:
10+
lint-and-test:
11+
runs-on: ubuntu-latest
12+
strategy:
13+
matrix:
14+
python-version: ["3.13"]
15+
16+
steps:
17+
- uses: actions/checkout@v5
18+
19+
- name: Install uv
20+
uses: astral-sh/setup-uv@v3
21+
with:
22+
version: "latest"
23+
24+
- name: Install dependencies
25+
run: |
26+
uv sync --dev
27+
28+
- name: Lint with ruff
29+
run: |
30+
uv run ruff check .
31+
uv run ruff format --check .

Makefile

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
help:
44
@echo "Available targets:"
5+
@echo " check - Run ruff check for lint issue"
6+
@echo " format - Run ruff format"
57
@echo " mcp-run - Run MCP server locally with uv"
68
@echo " run-servers - Run llamastack and lightspeed in a single command for develop"
79

@@ -10,3 +12,10 @@ mcp-run:
1012

1113
run-servers:
1214
podman-compose up llama-stack lightspeed-stack
15+
16+
check:
17+
uv run ruff check . --fix
18+
19+
format:
20+
uv run ruff format
21+

mcp_server.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,23 @@
1-
from fastmcp import FastMCP
21
import logging
2+
3+
from fastmcp import FastMCP
4+
35
from tools.orchestrator_service import orchestrator_mcp
4-
import tools.get_orchestrator_instances
5-
import tools.orchestrator_creation_workflow_rules
66

77
# Set up logging to see what's happening
8-
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
8+
logging.basicConfig(
9+
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
10+
)
911
logger = logging.getLogger(__name__)
1012

11-
mcp = FastMCP(
12-
name="Current Date and Time", port=8000
13-
)
13+
mcp = FastMCP(name="Current Date and Time", port=8000)
1414

1515
# Mount the orchestrator service
1616
mcp.mount(orchestrator_mcp, prefix="orchestrator")
1717

1818

19-
2019
if __name__ == "__main__":
2120
mcp.run(
22-
transport="http",
23-
host="0.0.0.0",
24-
)
21+
transport="http",
22+
host="0.0.0.0",
23+
)

pyproject.toml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,3 +38,8 @@ ignore = []
3838
[tool.ruff.format]
3939
quote-style = "double"
4040
indent-style = "space"
41+
42+
[dependency-groups]
43+
dev = [
44+
"ruff>=0.12.9",
45+
]

serverless-workflow/consolidate_schemas.py

Lines changed: 49 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,8 @@
77

88
import json
99
import os
10-
import re
11-
from typing import Dict, Any, Set
1210
from pathlib import Path
11+
from typing import Any, Dict, Set
1312

1413

1514
class SchemaConsolidator:
@@ -24,7 +23,7 @@ def load_schema(self, filename: str) -> Dict[str, Any]:
2423
if filename not in self.schemas:
2524
file_path = self.schema_dir / filename
2625
try:
27-
with open(file_path, 'r') as f:
26+
with open(file_path, "r") as f:
2827
self.schemas[filename] = json.load(f)
2928
print(f"Loaded schema: {filename}")
3029
except FileNotFoundError:
@@ -34,26 +33,30 @@ def load_schema(self, filename: str) -> Dict[str, Any]:
3433

3534
def extract_ref_filename(self, ref: str) -> str:
3635
"""Extract filename from $ref string."""
37-
# Handle refs like "secrets.json#/secrets" or "common.json#/definitions/metadata"
38-
if '#' in ref:
39-
return ref.split('#')[0]
36+
# Handle refs like "secrets.json#/secrets" or
37+
# "common.json#/definitions/metadata"
38+
if "#" in ref:
39+
return ref.split("#")[0]
4040
return ref
4141

4242
def extract_ref_path(self, ref: str) -> str:
4343
"""Extract JSON path from $ref string."""
44-
# Handle refs like "secrets.json#/secrets" or "common.json#/definitions/metadata"
45-
if '#' in ref:
46-
return ref.split('#')[1]
44+
# Handle refs like "secrets.json#/secrets" or
45+
# "common.json#/definitions/metadata"
46+
if "#" in ref:
47+
return ref.split("#")[1]
4748
return ""
4849

49-
def resolve_external_refs(self, schema: Dict[str, Any], current_file: str = "") -> Dict[str, Any]:
50+
def resolve_external_refs(
51+
self, schema: Dict[str, Any], current_file: str = ""
52+
) -> Dict[str, Any]:
5053
"""Recursively resolve external $ref references."""
5154
if isinstance(schema, dict):
52-
if '$ref' in schema:
53-
ref = schema['$ref']
55+
if "$ref" in schema:
56+
ref = schema["$ref"]
5457

5558
# Skip internal references (starting with #)
56-
if ref.startswith('#'):
59+
if ref.startswith("#"):
5760
return schema
5861

5962
# Extract filename and path
@@ -68,12 +71,15 @@ def resolve_external_refs(self, schema: Dict[str, Any], current_file: str = "")
6871
# Navigate to the specific path in the referenced schema
6972
target = ref_schema
7073
if ref_path:
71-
path_parts = ref_path.strip('/').split('/')
74+
path_parts = ref_path.strip("/").split("/")
7275
for part in path_parts:
7376
if part in target:
7477
target = target[part]
7578
else:
76-
print(f"Warning: Path {ref_path} not found in {ref_file}")
79+
print(
80+
f"Warning: Path {ref_path} not found in "
81+
f"{ref_file}"
82+
)
7783
return schema
7884

7985
# Recursively resolve refs in the target
@@ -100,25 +106,31 @@ def resolve_external_refs(self, schema: Dict[str, Any], current_file: str = "")
100106

101107
def process_schema_definitions(self, schema: Dict[str, Any], filename: str):
102108
"""Extract and merge definitions from a schema into consolidated definitions."""
103-
if 'definitions' in schema:
104-
for def_name, def_value in schema['definitions'].items():
109+
if "definitions" in schema:
110+
for def_name, def_value in schema["definitions"].items():
105111
# Create unique key to avoid conflicts
106112
unique_key = f"{filename.replace('.json', '')}_{def_name}"
107-
self.consolidated_definitions[unique_key] = self.resolve_external_refs(def_value, filename)
113+
self.consolidated_definitions[unique_key] = self.resolve_external_refs(
114+
def_value, filename
115+
)
108116
print(f"Added definition: {unique_key}")
109117

110-
def update_internal_refs(self, schema: Dict[str, Any], filename: str) -> Dict[str, Any]:
118+
def update_internal_refs(
119+
self, schema: Dict[str, Any], filename: str
120+
) -> Dict[str, Any]:
111121
"""Update internal references to point to consolidated definitions."""
112122
if isinstance(schema, dict):
113-
if '$ref' in schema:
114-
ref = schema['$ref']
123+
if "$ref" in schema:
124+
ref = schema["$ref"]
115125

116126
# Handle internal references
117-
if ref.startswith('#/definitions/'):
118-
def_name = ref.replace('#/definitions/', '')
127+
if ref.startswith("#/definitions/"):
128+
def_name = ref.replace("#/definitions/", "")
119129
# Update to use the prefixed definition name
120-
new_ref = f"#/definitions/{filename.replace('.json', '')}_{def_name}"
121-
return {'$ref': new_ref}
130+
new_ref = (
131+
f"#/definitions/{filename.replace('.json', '')}_{def_name}"
132+
)
133+
return {"$ref": new_ref}
122134

123135
return schema
124136
else:
@@ -146,7 +158,7 @@ def consolidate(self, main_schema_file: str = "workflow.json") -> Dict[str, Any]
146158
# First pass: collect all definitions from all referenced schemas
147159
print("\n--- Collecting definitions from all schemas ---")
148160
for filename in os.listdir(self.schema_dir):
149-
if filename.endswith('.json'):
161+
if filename.endswith(".json"):
150162
schema = self.load_schema(filename)
151163
self.process_schema_definitions(schema, filename)
152164

@@ -159,20 +171,22 @@ def consolidate(self, main_schema_file: str = "workflow.json") -> Dict[str, Any]
159171
final_schema = self.update_internal_refs(resolved_main, main_schema_file)
160172

161173
# Add all consolidated definitions
162-
if 'definitions' not in final_schema:
163-
final_schema['definitions'] = {}
174+
if "definitions" not in final_schema:
175+
final_schema["definitions"] = {}
164176

165-
final_schema['definitions'].update(self.consolidated_definitions)
177+
final_schema["definitions"].update(self.consolidated_definitions)
166178

167-
print(f"\n--- Consolidation complete ---")
179+
print("\n--- Consolidation complete ---")
168180
print(f"Total definitions consolidated: {len(self.consolidated_definitions)}")
169181

170182
return final_schema
171183

172-
def save_consolidated_schema(self, consolidated_schema: Dict[str, Any], output_file: str):
184+
def save_consolidated_schema(
185+
self, consolidated_schema: Dict[str, Any], output_file: str
186+
):
173187
"""Save the consolidated schema to a file."""
174188
output_path = output_file
175-
with open(output_path, 'w') as f:
189+
with open(output_path, "w") as f:
176190
json.dump(consolidated_schema, f, indent=2)
177191
print(f"Consolidated schema saved to: {output_path}")
178192

@@ -191,7 +205,9 @@ def main():
191205
consolidated = consolidator.consolidate("workflow.json")
192206

193207
# Save the result
194-
consolidator.save_consolidated_schema(consolidated, "consolidated_workflow_schema.json")
208+
consolidator.save_consolidated_schema(
209+
consolidated, "consolidated_workflow_schema.json"
210+
)
195211
print("\nConsolidation completed successfully!")
196212
except Exception as e:
197213
print(f"Error during consolidation: {e}")

tools/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
from .orchestrator_creation_workflow_rules import creation_workflow_rules
2+
3+
__all__ = [creation_workflow_rules]

tools/get_orchestrator_instances.py

Lines changed: 0 additions & 21 deletions
This file was deleted.

tools/orchestrator_creation_workflow_rules.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
from .orchestrator_service import orchestrator_mcp
21
import logging
3-
from typing import Dict, List, Any
2+
3+
from .orchestrator_service import orchestrator_mcp
44

55
logger = logging.getLogger(__name__)
66

@@ -299,7 +299,8 @@
299299
- Use `timeouts.eventTimeout` for event waiting limits (ISO 8601 duration format)
300300
- Consider error handling for event timeout scenarios
301301
- Events can carry data accessible in subsequent states via `.eventData`
302-
"""
302+
""" # noqa: E501
303+
303304

304305
@orchestrator_mcp.tool()
305306
def creation_workflow_rules(session_id: str) -> str:

tools/orchestrator_service.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
from fastmcp import FastMCP
21
import logging
32

3+
from fastmcp import FastMCP
4+
45
logger = logging.getLogger(__name__)
56

6-
orchestrator_mcp = FastMCP("Orchestrator Service")
7+
orchestrator_mcp = FastMCP("Orchestrator Service")

uv.lock

Lines changed: 8 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)