Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ Docker, ensure that you upload the PDF file to the OSA folder before building th
/app/OSA/... or just use volume mounting to access the file.

The --generate-workflows option is intended to create customizable CI/CD pipelines for Python repositories. For detailed
documentation, see the [Workflow Generator README](./osa_tool/workflow/README.md).
documentation, see the [Workflow Generator README](./osa_tool/operations/codebase/workflow_generation/README.md).

### Configuration

Expand All @@ -208,9 +208,12 @@ documentation, see the [Workflow Generator README](./osa_tool/workflow/README.md
| `--no-fork` | Avoid create fork for target repository | `False` |
| `--no-pull-request` | Avoid create pull request for target repository | `False` |

Also OSA supports custom configuration via TOML files. Use the `--config-file` option to specify a path to custom configuration file. If no custom configuration file is provided, OSA will use the default configuration.
Also OSA supports custom configuration via TOML files. Use the `--config-file` option to specify a path to custom
configuration file. If no custom configuration file is provided, OSA will use the default configuration.

By default, OSA uses a single model for all tasks (specified via `--model`). If you want to use different models for different types of tasks, disable the `--use-single-model` flag and specify models for each task type (`--model-docstring`, `--model-readme`, `--model-validation`, `--model-general`).
By default, OSA uses a single model for all tasks (specified via `--model`). If you want to use different models for
different types of tasks, disable the `--use-single-model` flag and specify models for each task type (
`--model-docstring`, `--model-readme`, `--model-validation`, `--model-general`).

To learn how to work with the interactive CLI and view descriptions of all available keys, visit
the [CLI usage guide](./osa_tool/scheduler/README.md).
Expand Down
5 changes: 4 additions & 1 deletion docs/core/operations/OPERATIONS.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@ This document is auto-generated. Do not edit manually.

| Name | Priority | Intents | Scopes | Args Schema | Executor | Method |
|------|----------|---------|--------|-------------|----------|--------|
| `generate_report` | 5 | new_task | full_repo, analysis | — | `ReportGenerator` | `build_pdf` |
| `generate_report` | 5 | new_task | full_repo, analysis | — | `ReportGenerator` | `run` |
| `validate_doc` | 10 | new_task | full_repo, analysis | — | `DocValidator` | `run` |
| `validate_paper` | 15 | new_task | full_repo, analysis | — | `PaperValidator` | `run` |
| `convert_notebooks` | 30 | new_task | full_repo, codebase | ConvertNotebooksArgs | `NotebookConverter` | `convert_notebooks` |
| `translate_dirs` | 40 | new_task | full_repo, codebase | — | `RepositoryStructureTranslator` | `rename_directories_and_files` |
| `generate_docstrings` | 50 | new_task, feedback | full_repo, codebase | GenerateDocstringsArgs | `DocstringsGenerator` | `run` |
Expand All @@ -16,4 +18,5 @@ This document is auto-generated. Do not edit manually.
| `generate_readme` | 70 | new_task, feedback | full_repo, docs | — | `ReadmeAgent` | `generate_readme` |
| `translate_readme` | 75 | new_task, feedback | full_repo, docs | TranslateReadmeArgs | `ReadmeTranslator` | `translate_readme` |
| `generate_about` | 80 | new_task | full_repo, docs | — | `AboutGenerator` | `generate_about_content` |
| `generate_workflows` | 85 | new_task, feedback | full_repo, codebase | GenerateWorkflowsArgs | `WorkflowsExecutor` | `generate` |
| `organize` | 90 | new_task | full_repo, codebase | — | `RepoOrganizer` | `organize` |
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ def generate(self) -> dict:
pyproject_path = self.repo_path / "pyproject.toml"

old_context = self._get_existing_context(req_file_path, pyproject_path)
if old_context:
self._add_event(EventKind.ANALYZED, mode="existing-context")

# Scan with notebooks
try:
Expand Down
115 changes: 115 additions & 0 deletions osa_tool/operations/codebase/workflow_generation/workflow_executor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
from typing import List

from osa_tool.config.settings import ConfigManager
from osa_tool.core.models.event import EventKind, OperationEvent
from osa_tool.scheduler.workflow_manager import WorkflowManager
from osa_tool.utils.logger import logger


class WorkflowsExecutor:
"""
Executor for CI/CD workflow generation in the agentic pipeline.

Bridges WorkflowManager to the Operation/Executor pattern,
bypassing the legacy Plan-based config path.
"""

def __init__(
self,
config_manager: ConfigManager,
workflow_manager: WorkflowManager,
include_black: bool = True,
include_tests: bool = True,
include_pep8: bool = True,
include_autopep8: bool = False,
include_fix_pep8: bool = False,
include_pypi: bool = False,
pep8_tool: str = "flake8",
use_poetry: bool = False,
include_codecov: bool = True,
python_versions: List[str] = None,
branches: List[str] = None,
):
self.config_manager = config_manager
self.workflow_manager = workflow_manager
self.events: list[OperationEvent] = []
self._requested = {
"generate_workflows": True,
"include_black": include_black,
"include_tests": include_tests,
"include_pep8": include_pep8,
"include_autopep8": include_autopep8,
"include_fix_pep8": include_fix_pep8,
"include_pypi": include_pypi,
"pep8_tool": pep8_tool,
"use_poetry": use_poetry,
"include_codecov": include_codecov,
"python_versions": python_versions or ["3.9", "3.10"],
"branches": branches or ["main", "master"],
}

def generate(self) -> dict:
if not self.workflow_manager.has_python_code():
logger.info("No Python code detected. Skipping workflow generation.")
self.events.append(
OperationEvent(
kind=EventKind.SKIPPED,
target="workflows",
data={"reason": "no_python_code"},
)
)
return {"result": {"generated": False}, "events": self.events}

logger.debug("Requested workflow settings: %s", self._requested)
effective = self._skip_existing_jobs(self._requested)
logger.debug("Effective workflow settings after filtering: %s", effective)
WorkflowManager.apply_workflow_settings(self.config_manager, effective)
success = self.workflow_manager.generate_workflow(self.config_manager)

if success:
enabled = [k for k, v in effective.items() if k.startswith("include_") and v is True]
logger.info("CI/CD workflow generation succeeded. Enabled jobs: %s", enabled)
self.events.append(
OperationEvent(
kind=EventKind.GENERATED,
target="workflows",
data={
"include_black": effective.get("include_black"),
"include_tests": effective.get("include_tests"),
"include_pep8": effective.get("include_pep8"),
"include_autopep8": effective.get("include_autopep8"),
"include_fix_pep8": effective.get("include_fix_pep8"),
"include_pypi": effective.get("include_pypi"),
},
)
)
else:
logger.error("CI/CD workflow generation failed. Check previous log messages for details.")
self.events.append(
OperationEvent(
kind=EventKind.FAILED,
target="workflows",
data={"reason": "generation_error"},
)
)

return {"result": {"generated": success, "settings": effective}, "events": self.events}

def _skip_existing_jobs(self, settings: dict) -> dict:
"""Disable generation for jobs that already exist in the repository."""
result = dict(settings)
for key, job_names in self.workflow_manager.job_name_for_key.items():
if key not in result:
continue
names = [job_names] if isinstance(job_names, str) else job_names
if any(job in self.workflow_manager.existing_jobs for job in names):
result[key] = False
logger.warning("Skipping '%s' workflow: job already exists in the repository.", key)
self.events.append(
OperationEvent(
kind=EventKind.SKIPPED,
target=key,
data={"reason": "already_exists"},
)
)
return result
85 changes: 45 additions & 40 deletions osa_tool/operations/docs/community_docs_generation/docs_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,54 +25,59 @@ def generate_documentation(config_manager: ConfigManager, metadata: RepositoryMe
events: list[OperationEvent] = []
generated_files: list[str] = []
contributing = ContributingBuilder(config_manager, metadata)
contributing.build()
events.append(OperationEvent(kind=EventKind.GENERATED, target="CONTRIBUTING"))
generated_files.append("CONTRIBUTING.md")

community = CommunityTemplateBuilder(config_manager, metadata)
community.build_code_of_conduct()
events.append(OperationEvent(kind=EventKind.GENERATED, target="CODE_OF_CONDUCT"))
generated_files.append("CODE_OF_CONDUCT.md")

community.build_security()
events.append(OperationEvent(kind=EventKind.GENERATED, target="SECURITY"))
generated_files.append("SECURITY.md")
try:
contributing.build()
events.append(OperationEvent(kind=EventKind.GENERATED, target="CONTRIBUTING"))
generated_files.append("CONTRIBUTING.md")
except Exception as e:
logger.error("Failed to generate CONTRIBUTING: %s", repr(e), exc_info=True)
events.append(OperationEvent(kind=EventKind.FAILED, target="CONTRIBUTING", data={"error": repr(e)}))

if config_manager.get_git_settings().host in ["github", "gitlab"]:
community.build_pull_request()
community.build_bug_issue()
community.build_documentation_issue()
community.build_feature_issue()
try:
community.build_code_of_conduct()
events.append(OperationEvent(kind=EventKind.GENERATED, target="CODE_OF_CONDUCT"))
generated_files.append("CODE_OF_CONDUCT.md")
except Exception as e:
logger.error("Failed to generate CODE_OF_CONDUCT: %s", repr(e), exc_info=True)
events.append(OperationEvent(kind=EventKind.FAILED, target="CODE_OF_CONDUCT", data={"error": repr(e)}))

events.extend(
[
OperationEvent(kind=EventKind.GENERATED, target="PULL_REQUEST_TEMPLATE"),
OperationEvent(kind=EventKind.GENERATED, target="ISSUE_TEMPLATE:bug"),
OperationEvent(kind=EventKind.GENERATED, target="ISSUE_TEMPLATE:documentation"),
OperationEvent(kind=EventKind.GENERATED, target="ISSUE_TEMPLATE:feature"),
]
)
generated_files.extend(
[
"PULL_REQUEST_TEMPLATE.md",
"BUG_ISSUE.md",
"DOCUMENTATION_ISSUE.md",
"FEATURE_ISSUE.md",
]
)
try:
community.build_security()
events.append(OperationEvent(kind=EventKind.GENERATED, target="SECURITY"))
generated_files.append("SECURITY.md")
except Exception as e:
logger.error("Failed to generate SECURITY: %s", repr(e), exc_info=True)
events.append(OperationEvent(kind=EventKind.FAILED, target="SECURITY", data={"error": repr(e)}))

if config_manager.get_git_settings().host == "gitlab":
community.build_vulnerability_disclosure()
if config_manager.get_git_settings().host in ["github", "gitlab"]:
for method, target, filename in [
(community.build_pull_request, "PULL_REQUEST_TEMPLATE", "PULL_REQUEST_TEMPLATE.md"),
(community.build_bug_issue, "ISSUE_TEMPLATE:bug", "BUG_ISSUE.md"),
(community.build_documentation_issue, "ISSUE_TEMPLATE:documentation", "DOCUMENTATION_ISSUE.md"),
(community.build_feature_issue, "ISSUE_TEMPLATE:feature", "FEATURE_ISSUE.md"),
]:
try:
method()
events.append(OperationEvent(kind=EventKind.GENERATED, target=target))
generated_files.append(filename)
except Exception as e:
logger.error("Failed to generate %s: %s", target, repr(e), exc_info=True)
events.append(OperationEvent(kind=EventKind.FAILED, target=target, data={"error": repr(e)}))

events.append(
OperationEvent(
kind=EventKind.GENERATED,
target="VULNERABILITY_DISCLOSURE",
if config_manager.get_git_settings().host == "gitlab":
try:
community.build_vulnerability_disclosure()
events.append(OperationEvent(kind=EventKind.GENERATED, target="VULNERABILITY_DISCLOSURE"))
generated_files.append("Vulnerability_Disclosure.md")
except Exception as e:
logger.error("Failed to generate VULNERABILITY_DISCLOSURE: %s", repr(e), exc_info=True)
events.append(
OperationEvent(kind=EventKind.FAILED, target="VULNERABILITY_DISCLOSURE", data={"error": repr(e)})
)
)
generated_files.append("Vulnerability_Disclosure.md")

logger.info("All additional documentation successfully generated.")
logger.info("Additional documentation generation completed.")

return {
"result": {
Expand Down
37 changes: 37 additions & 0 deletions osa_tool/operations/operations_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from osa_tool.operations.codebase.notebook_conversion.notebook_converter import NotebookConverter
from osa_tool.operations.codebase.organization.repo_organizer import RepoOrganizer
from osa_tool.operations.codebase.requirements_generation.requirements_generation import RequirementsGenerator
from osa_tool.operations.codebase.workflow_generation.workflow_executor import WorkflowsExecutor
from osa_tool.operations.docs.about_generation.about_generator import AboutGenerator
from osa_tool.operations.docs.community_docs_generation.docs_run import generate_documentation
from osa_tool.operations.docs.community_docs_generation.license_generation import LicenseCompiler
Expand Down Expand Up @@ -229,6 +230,42 @@ class GenerateAboutOperation(Operation):
executor_dependencies = ["config_manager", "git_agent"]


class GenerateWorkflowsArgs(BaseModel):
include_black: bool = Field(True, description="Generate Black code formatter workflow.")
include_tests: bool = Field(True, description="Generate unit tests workflow.")
include_pep8: bool = Field(True, description="Generate PEP 8 compliance check workflow.")
include_autopep8: bool = Field(False, description="Generate autopep8 auto-fix workflow.")
include_fix_pep8: bool = Field(False, description="Generate fix-pep8 slash-command workflow.")
include_pypi: bool = Field(False, description="Generate PyPI publish workflow.")
pep8_tool: Literal["flake8", "pylint"] = Field("flake8", description="Tool for PEP 8 checking.")
use_poetry: bool = Field(False, description="Use Poetry for PyPI packaging.")
include_codecov: bool = Field(True, description="Include Codecov coverage upload step.")
python_versions: List[str] = Field(
default_factory=lambda: ["3.9", "3.10"],
description="Python versions to test against. Example: ['3.10', '3.11', '3.12']",
)
branches: List[str] = Field(
default_factory=lambda: ["main", "master"],
description="Git branches to trigger workflows on.",
)


class GenerateWorkflowsOperation(Operation):
name = "generate_workflows"
description = "Generate CI/CD workflow files (GitHub Actions / GitLab CI) for the repository."

supported_intents = ["new_task", "feedback"]
supported_scopes = ["full_repo", "codebase"]
priority = 85

args_schema = GenerateWorkflowsArgs
args_policy = "auto"

executor = WorkflowsExecutor
executor_method = "generate"
executor_dependencies = ["config_manager", "workflow_manager"]


class OrganizeRepositoryOperation(Operation):
name = "organize"
description = (
Expand Down
36 changes: 31 additions & 5 deletions osa_tool/scheduler/workflow_manager.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
import os
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Optional

import yaml

from osa_tool.config.settings import ConfigManager
from osa_tool.scheduler.plan import Plan
from osa_tool.core.git.metadata import RepositoryMetadata
from osa_tool.operations.codebase.workflow_generation.workflow_generator import (
GitHubWorkflowGenerator,
GitLabWorkflowGenerator,
)
from osa_tool.scheduler.plan import Plan
from osa_tool.tools.repository_analysis.sourcerank import SourceRank
from osa_tool.utils.arguments_parser import get_keys_from_group_in_yaml
from osa_tool.utils.logger import logger
from osa_tool.utils.utils import parse_folder_name
from osa_tool.workflow.workflow_generator import GitHubWorkflowGenerator, GitLabWorkflowGenerator


class WorkflowManager(ABC):
Expand Down Expand Up @@ -71,12 +75,21 @@ def has_python_code(self) -> bool:
"""
Checks whether the repository contains Python code.

First checks the repository metadata language field. If that is absent or
does not mention Python, falls back to counting ``.py`` files on disk.

Returns:
True if Python code is present, False otherwise.
"""
if not self.metadata.language:
return False
return "Python" in self.metadata.language
if self.metadata.language and "Python" in self.metadata.language:
return True

py_count = sum(1 for _ in Path(self.base_path).rglob("*.py"))
if py_count > 0:
logger.info("Metadata did not report Python, but found %d .py file(s) on disk.", py_count)
return True

return False

def build_actual_plan(self, sourcerank: SourceRank) -> dict:
"""
Expand Down Expand Up @@ -122,6 +135,19 @@ def build_actual_plan(self, sourcerank: SourceRank) -> dict:

return result_plan

@staticmethod
def apply_workflow_settings(config_manager: ConfigManager, settings: dict) -> None:
"""
Apply workflow settings directly from a dict, bypassing the legacy Plan.
Used by the agentic pipeline.

Args:
config_manager: Configuration manager to update.
settings: Dict of workflow settings keys and values.
"""
config_manager.config.workflows = config_manager.config.workflows.model_copy(update=settings)
logger.info("Config successfully updated with workflow settings")

def update_workflow_config(self, config_manager: ConfigManager, plan: Plan) -> None:
"""
Update workflow configuration settings in the config loader based on the given plan.
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "osa_tool"
version = "0.2.8"
version = "0.2.9"
description = "Tool that just makes your open source project better!"
requires-python = ">=3.11,<4.0"
authors = [
Expand Down
Loading