Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 84 additions & 2 deletions .github/workflows/repo-architect.yml
Original file line number Diff line number Diff line change
Expand Up @@ -77,14 +77,96 @@ jobs:
run: |
mkdir -p .agent docs/repo_architect

- name: Resolve GitHub Models configuration
env:
GITHUB_TOKEN: ${{ github.token }}
run: |
python - <<'PY'
import json
import os
import urllib.request

order = [
"anthropic/claude-sonnet-4.6",
"anthropic/claude-sonnet-4.5",
"openai/gpt-4.1",
]
secondary = "google/gemini-3-pro"
available = set()
catalog_ok = False
try:
req = urllib.request.Request(
"https://models.github.ai/catalog/models",
headers={
"Authorization": f"Bearer {os.environ['GITHUB_TOKEN']}",
"Accept": "application/json",
"User-Agent": "repo-architect-workflow",
},
)
with urllib.request.urlopen(req, timeout=30) as resp:
payload = json.loads(resp.read().decode("utf-8"))
models = payload.get("data", payload) if isinstance(payload, dict) else payload
if isinstance(models, list):
catalog_ok = True
for item in models:
if isinstance(item, dict):
model_id = item.get("id") or item.get("name") or item.get("model")
if isinstance(model_id, str) and model_id:
available.add(model_id)
except Exception as exc:
print(f"warning: GitHub Models catalog lookup failed; using defaults ({exc})")

def first_available(candidates):
for candidate in candidates:
if candidate in available:
return candidate
return None

def deterministic_available(exclude=None):
candidates = sorted(m for m in available if m != exclude)
return candidates[0] if candidates else None

if catalog_ok and available:
preferred = (
first_available(order)
or (secondary if secondary in available else None)
or deterministic_available()
)
else:
preferred = order[0]

if catalog_ok and available:
if secondary in available and secondary != preferred:
fallback = secondary
else:
fallback = (
first_available([c for c in order if c != preferred])
or deterministic_available(exclude=preferred)
or preferred
)
else:
fallback = secondary

if not isinstance(preferred, str) or not preferred:
preferred = order[0]
if not isinstance(fallback, str) or not fallback:
fallback = secondary if secondary != preferred else order[-1]

env_file = os.environ.get("GITHUB_ENV")
if not env_file:
raise RuntimeError("GITHUB_ENV is not set; this internal workflow step must run inside GitHub Actions with environment-file support.")
with open(env_file, "a", encoding="utf-8") as fh:
fh.write(f"REPO_ARCHITECT_PREFERRED_MODEL={preferred}\n")
fh.write(f"REPO_ARCHITECT_FALLBACK_MODEL={fallback}\n")
print(f"selected preferred={preferred} fallback={fallback}")
PY

- name: Run repo architect
env:
GITHUB_TOKEN: ${{ github.token }}
GITHUB_REPO: ${{ github.repository }}
GITHUB_BASE_BRANCH: ${{ github.event.repository.default_branch }}
REPO_ARCHITECT_BRANCH_SUFFIX: ${{ github.run_id }}-${{ github.run_attempt }}
REPO_ARCHITECT_PREFERRED_MODEL: openai/gpt-5.4
REPO_ARCHITECT_FALLBACK_MODEL: openai/gpt-4.1
run: |
MODE="${{ github.event.inputs.mode }}"
MODEL="${{ github.event.inputs.github_model }}"
Expand Down
98 changes: 90 additions & 8 deletions repo_architect.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@
}

# Model selection defaults
DEFAULT_PREFERRED_MODEL = "openai/gpt-5.4"
DEFAULT_FALLBACK_MODEL = "openai/gpt-4.1"
DEFAULT_PREFERRED_MODEL = "anthropic/claude-sonnet-4.6"
DEFAULT_FALLBACK_MODEL = "google/gemini-3-pro"
# Substrings in HTTP error bodies that indicate the model itself is unavailable (not a transient error)
_MODEL_UNAVAILABLE_SIGNALS = frozenset({
"unknown_model", "model_not_found", "unsupported_model", "unsupported model",
Expand Down Expand Up @@ -697,7 +697,7 @@ def build_analysis(root: pathlib.Path) -> Dict[str, Any]:
# -----------------------------

def enrich_with_github_models(config: Config, analysis: Dict[str, Any]) -> Dict[str, Any]:
preferred = config.preferred_model or config.github_model
preferred = config.github_model or config.preferred_model
fallback = config.fallback_model
meta: Dict[str, Any] = {
"enabled": False,
Expand Down Expand Up @@ -975,7 +975,7 @@ def build_parse_errors_plan(config: Config, analysis: Dict[str, Any]) -> Optiona
errors = analysis.get("parse_error_files", [])
if not errors:
return None
preferred = config.preferred_model or config.github_model
preferred = config.github_model or config.preferred_model
if not config.github_token or not preferred:
return None
fallback = config.fallback_model
Expand Down Expand Up @@ -1042,7 +1042,7 @@ def build_import_cycles_plan(config: Config, analysis: Dict[str, Any]) -> Option
cycles = analysis.get("cycles", [])
if not cycles:
return None
preferred = config.preferred_model or config.github_model
preferred = config.github_model or config.preferred_model
if not config.github_token or not preferred:
return None
fallback = config.fallback_model
Expand Down Expand Up @@ -1128,7 +1128,7 @@ def build_entrypoint_consolidation_plan(config: Config, analysis: Dict[str, Any]
backend_eps = clusters.get("backend_servers", [])
if len(backend_eps) < _ENTRYPOINT_CONSOLIDATION_THRESHOLD:
return None
preferred = config.preferred_model or config.github_model
preferred = config.github_model or config.preferred_model
if not config.github_token or not preferred:
return None
fallback = config.fallback_model
Expand Down Expand Up @@ -1482,14 +1482,96 @@ def workflow_yaml(secret_env_names: Sequence[str], cron: str, github_model: Opti
run: |
mkdir -p .agent docs/repo_architect

- name: Resolve GitHub Models configuration
env:
GITHUB_TOKEN: ${{{{ github.token }}}}
run: |
python - <<'PY'
import json
import os
import urllib.request

order = [
"anthropic/claude-sonnet-4.6",
"anthropic/claude-sonnet-4.5",
"openai/gpt-4.1",
]
secondary = "google/gemini-3-pro"
available = set()
catalog_ok = False
try:
req = urllib.request.Request(
"https://models.github.ai/catalog/models",
headers={{
"Authorization": f"Bearer {{os.environ['GITHUB_TOKEN']}}",
"Accept": "application/json",
"User-Agent": "repo-architect-workflow",
}},
)
with urllib.request.urlopen(req, timeout=30) as resp:
payload = json.loads(resp.read().decode("utf-8"))
models = payload.get("data", payload) if isinstance(payload, dict) else payload
if isinstance(models, list):
catalog_ok = True
for item in models:
if isinstance(item, dict):
model_id = item.get("id") or item.get("name") or item.get("model")
if isinstance(model_id, str) and model_id:
available.add(model_id)
except Exception as exc:
print(f"warning: GitHub Models catalog lookup failed; using defaults ({{exc}})")

def first_available(candidates):
for candidate in candidates:
if candidate in available:
return candidate
return None

def deterministic_available(exclude=None):
candidates = sorted(m for m in available if m != exclude)
return candidates[0] if candidates else None

if catalog_ok and available:
preferred = (
first_available(order)
or (secondary if secondary in available else None)
or deterministic_available()
)
else:
preferred = order[0]

if catalog_ok and available:
if secondary in available and secondary != preferred:
fallback = secondary
else:
fallback = (
first_available([c for c in order if c != preferred])
or deterministic_available(exclude=preferred)
or preferred
)
else:
fallback = secondary

if not isinstance(preferred, str) or not preferred:
preferred = order[0]
if not isinstance(fallback, str) or not fallback:
fallback = secondary if secondary != preferred else order[-1]

env_file = os.environ.get("GITHUB_ENV")
if not env_file:
raise RuntimeError("GITHUB_ENV is not set; this internal workflow step must run inside GitHub Actions with environment-file support.")
with open(env_file, "a", encoding="utf-8") as fh:
fh.write(f"REPO_ARCHITECT_PREFERRED_MODEL={{preferred}}\\n")
fh.write(f"REPO_ARCHITECT_FALLBACK_MODEL={{fallback}}\\n")
print(f"selected preferred={{preferred}} fallback={{fallback}}")
PY

- name: Run repo architect
env:
GITHUB_TOKEN: ${{{{ github.token }}}}
GITHUB_REPO: ${{{{ github.repository }}}}
GITHUB_BASE_BRANCH: ${{{{ github.event.repository.default_branch }}}}
REPO_ARCHITECT_BRANCH_SUFFIX: ${{{{ github.run_id }}}}-${{{{ github.run_attempt }}}}
REPO_ARCHITECT_PREFERRED_MODEL: openai/gpt-5.4
REPO_ARCHITECT_FALLBACK_MODEL: openai/gpt-4.1
{extra_env} run: |
MODE="${{{{ github.event.inputs.mode }}}}"
MODEL="${{{{ github.event.inputs.github_model }}}}"
Expand Down
69 changes: 67 additions & 2 deletions tests/test_repo_architect.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,72 @@ def side_effect(token: str, model: str, messages: list) -> dict:


# ---------------------------------------------------------------------------
# 3. Syntax validation of generated Python (ast.parse gate)
# 3. Model configuration behaviour
# ---------------------------------------------------------------------------

class TestModelConfiguration(unittest.TestCase):
def test_build_config_uses_env_models_when_github_model_blank(self) -> None:
env = dict(os.environ)
env.pop("GITHUB_MODEL", None)
env["REPO_ARCHITECT_PREFERRED_MODEL"] = "anthropic/claude-sonnet-4.6"
env["REPO_ARCHITECT_FALLBACK_MODEL"] = "google/gemini-3-pro"
with patch.object(ra, "discover_git_root", return_value=pathlib.Path("/tmp/repo")):
with patch.dict(os.environ, env, clear=True):
config = ra.build_config(ra.parse_args([]))
self.assertIsNone(config.github_model)
self.assertEqual(config.preferred_model, "anthropic/claude-sonnet-4.6")
self.assertEqual(config.fallback_model, "google/gemini-3-pro")

def test_github_model_override_takes_precedence_over_preferred(self) -> None:
analysis = {
"architecture_score": 0.8,
"cycles": [],
"parse_error_files": [],
"entrypoint_paths": [],
"roadmap": [],
}
response = {"choices": [{"message": {"content": "ok"}}], "model": "openai/manual-override"}
with tempfile.TemporaryDirectory() as tmp:
root = _make_git_root(tmp)
config = _make_config(
root,
github_token="tok",
github_model="openai/manual-override",
preferred_model="anthropic/claude-sonnet-4.6",
fallback_model="google/gemini-3-pro",
)
with patch.object(
ra,
"call_models_with_fallback_or_none",
return_value=(response, "openai/manual-override", None, False),
) as mocked_call:
meta = ra.enrich_with_github_models(config, analysis)
self.assertEqual(mocked_call.call_args.args[1], "openai/manual-override")
self.assertEqual(mocked_call.call_args.args[2], "google/gemini-3-pro")
self.assertEqual(meta["requested_model"], "openai/manual-override")
self.assertEqual(meta["actual_model"], "openai/manual-override")

def test_workflow_yaml_resolves_models_via_catalog_and_keeps_blank_override_logic(self) -> None:
workflow = ra.workflow_yaml([], "17 * * * *", None)
self.assertIn("Resolve GitHub Models configuration", workflow)
self.assertIn("https://models.github.ai/catalog/models", workflow)
self.assertIn("catalog_ok = False", workflow)
self.assertIn('"anthropic/claude-sonnet-4.6"', workflow)
self.assertIn('"anthropic/claude-sonnet-4.5"', workflow)
self.assertIn('"openai/gpt-4.1"', workflow)
self.assertIn('secondary = "google/gemini-3-pro"', workflow)
self.assertIn("def deterministic_available(exclude=None):", workflow)
self.assertIn("or deterministic_available()", workflow)
self.assertIn("or deterministic_available(exclude=preferred)", workflow)
self.assertIn("or preferred", workflow)
self.assertIn("REPO_ARCHITECT_PREFERRED_MODEL={preferred}", workflow)
self.assertIn("REPO_ARCHITECT_FALLBACK_MODEL={fallback}", workflow)
self.assertIn('if [ -n "$MODEL" ]; then EXTRA_ARGS="$EXTRA_ARGS --github-model $MODEL"; fi', workflow)
self.assertIn("models: read", workflow)


# ---------------------------------------------------------------------------
# 4. Syntax validation of generated Python (ast.parse gate)
# ---------------------------------------------------------------------------

class TestSyntaxValidationGate(unittest.TestCase):
Expand Down Expand Up @@ -313,7 +378,7 @@ def test_build_parse_errors_plan_accepts_valid_fix(self) -> None:


# ---------------------------------------------------------------------------
# 4. Campaign aggregation behaviour
# 5. Campaign aggregation behaviour
# ---------------------------------------------------------------------------

class TestCampaignAggregation(unittest.TestCase):
Expand Down