Skip to content

Commit 228f419

Browse files
committed
Add reformatted compile_check for ruff verification
Signed-off-by: sduvvuri1603 <[email protected]>
1 parent 3f0cf50 commit 228f419

File tree

4 files changed

+37
-46
lines changed

4 files changed

+37
-46
lines changed
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
"""compile_check GitHub scripts package."""

.github/scripts/compile_check/compile_check.py

Lines changed: 30 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
#!/usr/bin/env python3
2-
"""
3-
Compile and dependency validation tool for Kubeflow Pipelines components.
2+
"""Compile and dependency validation tool for Kubeflow Pipelines components.
43
54
This script discovers component and pipeline modules based on the presence of
65
`metadata.yaml` files, validates declared dependencies, and ensures each target
@@ -27,9 +26,7 @@
2726
SpecifierSet = None # type: ignore[assignment]
2827

2928
from kfp import compiler as pipeline_compiler
30-
from kfp.dsl import base_component
31-
from kfp.dsl import graph_component
32-
29+
from kfp.dsl import base_component, graph_component
3330

3431
REPO_ROOT = Path(__file__).resolve().parents[1]
3532

@@ -48,26 +45,29 @@ class MetadataTarget:
4845

4946
@dataclass
5047
class ValidationResult:
48+
"""Stores the outcome of validating a single metadata target."""
49+
5150
target: MetadataTarget
5251
success: bool
5352
compiled_objects: List[str] = field(default_factory=list)
5453
warnings: List[str] = field(default_factory=list)
5554
errors: List[str] = field(default_factory=list)
5655

5756
def add_error(self, message: str) -> None:
57+
"""Record a validation error and mark the result unsuccessful."""
5858
logging.error(message)
5959
self.errors.append(message)
6060
self.success = False
6161

6262
def add_warning(self, message: str) -> None:
63+
"""Record a non-fatal validation warning."""
6364
logging.warning(message)
6465
self.warnings.append(message)
6566

6667

6768
def parse_args(argv: Optional[Sequence[str]] = None) -> argparse.Namespace:
68-
parser = argparse.ArgumentParser(
69-
description="Compile Kubeflow components and pipelines."
70-
)
69+
"""Parse command-line arguments for the compile check tool."""
70+
parser = argparse.ArgumentParser(description="Compile Kubeflow components and pipelines.")
7171
parser.add_argument(
7272
"--tier",
7373
choices=["core", "all"],
@@ -99,6 +99,7 @@ def parse_args(argv: Optional[Sequence[str]] = None) -> argparse.Namespace:
9999

100100

101101
def configure_logging(verbose: bool) -> None:
102+
"""Configure logging verbosity for the script."""
102103
level = logging.DEBUG if verbose else logging.INFO
103104
logging.basicConfig(
104105
level=level,
@@ -129,18 +130,21 @@ def should_include_target(
129130
metadata: Dict,
130131
include_flagless: bool,
131132
) -> bool:
133+
"""Return whether a metadata entry should be considered for compilation."""
132134
ci_config = metadata.get("ci") or {}
133135
if "compile_check" in ci_config:
134136
return bool(ci_config["compile_check"])
135137
return include_flagless
136138

137139

138140
def build_module_import_path(module_path: Path) -> str:
141+
"""Convert a module path to a dotted import path."""
139142
relative = module_path.relative_to(REPO_ROOT)
140143
return ".".join(relative.with_suffix("").parts)
141144

142145

143146
def load_metadata(metadata_path: Path) -> Dict:
147+
"""Load and validate metadata from the given path."""
144148
with metadata_path.open("r", encoding="utf-8") as handle:
145149
data = yaml.safe_load(handle) or {}
146150
if not isinstance(data, dict):
@@ -153,15 +157,14 @@ def create_targets(
153157
include_flagless: bool,
154158
path_filters: Sequence[str],
155159
) -> List[MetadataTarget]:
160+
"""Build MetadataTarget objects from discovered metadata files."""
156161
normalized_filters = [Path(p).resolve() for p in path_filters]
157162
targets: List[MetadataTarget] = []
158163

159164
for metadata_path, tier, target_kind in discovered:
160165
if normalized_filters:
161166
absolute_metadata_dir = metadata_path.parent.resolve()
162-
if not any(
163-
absolute_metadata_dir.is_relative_to(f) for f in normalized_filters
164-
):
167+
if not any(absolute_metadata_dir.is_relative_to(f) for f in normalized_filters):
165168
continue
166169

167170
try:
@@ -174,9 +177,7 @@ def create_targets(
174177
logging.debug("Skipping %s (compile_check disabled).", metadata_path)
175178
continue
176179

177-
module_filename = (
178-
"component.py" if target_kind == "component" else "pipeline.py"
179-
)
180+
module_filename = "component.py" if target_kind == "component" else "pipeline.py"
180181
module_path = metadata_path.with_name(module_filename)
181182
if not module_path.exists():
182183
logging.error(
@@ -200,24 +201,22 @@ def create_targets(
200201
return targets
201202

202203

203-
def find_objects(
204-
module, target_kind: str
205-
) -> List[Tuple[str, base_component.BaseComponent]]:
204+
def find_objects(module, target_kind: str) -> List[Tuple[str, base_component.BaseComponent]]:
205+
"""Locate pipeline or component factory functions within a module."""
206206
found: List[Tuple[str, base_component.BaseComponent]] = []
207207
for attr_name in dir(module):
208208
attr = getattr(module, attr_name)
209209
if target_kind == "pipeline":
210210
if isinstance(attr, graph_component.GraphComponent):
211211
found.append((attr_name, attr))
212212
else:
213-
if isinstance(attr, base_component.BaseComponent) and not isinstance(
214-
attr, graph_component.GraphComponent
215-
):
213+
if isinstance(attr, base_component.BaseComponent) and not isinstance(attr, graph_component.GraphComponent):
216214
found.append((attr_name, attr))
217215
return found
218216

219217

220218
def validate_dependencies(metadata: Dict, result: ValidationResult) -> None:
219+
"""Validate dependency metadata declared for a target."""
221220
dependencies = metadata.get("dependencies") or {}
222221
if not isinstance(dependencies, dict):
223222
result.add_error("`dependencies` must be a mapping.")
@@ -244,25 +243,21 @@ def validate_dependencies(metadata: Dict, result: ValidationResult) -> None:
244243
if not name:
245244
result.add_error(f"{label} is missing a `name` field.")
246245
if not version:
247-
result.add_error(
248-
f"{label} for {name or '<unknown>'} is missing a `version` field."
249-
)
246+
result.add_error(f"{label} for {name or '<unknown>'} is missing a `version` field.")
250247
elif SpecifierSet is not None:
251248
try:
252249
SpecifierSet(str(version))
253250
except Exception as exc:
254251
result.add_error(
255-
f"{label} for {name or '<unknown>'} has an invalid version specifier "
256-
f"{version!r}: {exc}"
252+
f"{label} for {name or '<unknown>'} has an invalid version specifier {version!r}: {exc}"
257253
)
258254
else:
259-
result.add_warning(
260-
"packaging module not available; skipping validation for dependency versions."
261-
)
255+
result.add_warning("packaging module not available; skipping validation for dependency versions.")
262256
return
263257

264258

265259
def compile_pipeline(obj: graph_component.GraphComponent, output_dir: Path) -> Path:
260+
"""Compile a pipeline function and return the output path."""
266261
output_path = output_dir / f"{obj.name or 'pipeline'}.json"
267262
pipeline_compiler.Compiler().compile(
268263
pipeline_func=obj,
@@ -272,12 +267,14 @@ def compile_pipeline(obj: graph_component.GraphComponent, output_dir: Path) -> P
272267

273268

274269
def compile_component(obj: base_component.BaseComponent, output_dir: Path) -> Path:
270+
"""Compile a component function and return the output path."""
275271
output_path = output_dir / f"{obj.name or 'component'}.yaml"
276272
obj.component_spec.save_to_component_yaml(str(output_path))
277273
return output_path
278274

279275

280276
def validate_target(target: MetadataTarget) -> ValidationResult:
277+
"""Validate a single metadata target by compiling exposed objects."""
281278
result = ValidationResult(target=target, success=True)
282279
validate_dependencies(target.metadata, result)
283280
if not result.success and result.errors:
@@ -289,16 +286,13 @@ def validate_target(target: MetadataTarget) -> ValidationResult:
289286
module = importlib.import_module(target.module_import)
290287
except Exception:
291288
result.add_error(
292-
f"Failed to import module {target.module_import} defined in {target.module_path}.\n"
293-
f"{traceback.format_exc()}"
289+
f"Failed to import module {target.module_import} defined in {target.module_path}.\n{traceback.format_exc()}"
294290
)
295291
return result
296292

297293
objects = find_objects(module, target.target_kind)
298294
if not objects:
299-
result.add_error(
300-
f"No {target.target_kind} objects discovered in module {target.module_import}."
301-
)
295+
result.add_error(f"No {target.target_kind} objects discovered in module {target.module_import}.")
302296
return result
303297

304298
with tempfile.TemporaryDirectory() as temp_dir:
@@ -329,6 +323,7 @@ def validate_target(target: MetadataTarget) -> ValidationResult:
329323

330324

331325
def run_validation(args: argparse.Namespace) -> int:
326+
"""Validate all discovered metadata targets."""
332327
configure_logging(args.verbose)
333328
sys.path.insert(0, str(REPO_ROOT))
334329

@@ -354,9 +349,7 @@ def run_validation(args: argparse.Namespace) -> int:
354349
logging.info(
355350
"✓ %s compiled successfully (%s)",
356351
target.metadata.get("name", target.module_import),
357-
", ".join(result.compiled_objects)
358-
if result.compiled_objects
359-
else "no output",
352+
", ".join(result.compiled_objects) if result.compiled_objects else "no output",
360353
)
361354
else:
362355
logging.error(
@@ -381,6 +374,7 @@ def run_validation(args: argparse.Namespace) -> int:
381374

382375

383376
def main(argv: Optional[Sequence[str]] = None) -> int:
377+
"""Entrypoint for running compile checks via the CLI."""
384378
args = parse_args(argv)
385379
try:
386380
return run_validation(args)

.github/scripts/compile_check/tests/test_compile_check.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from __future__ import annotations
55

66
import argparse
7+
import sys
78
import tempfile
89
import textwrap
910
import unittest
@@ -12,12 +13,6 @@
1213

1314
import yaml
1415

15-
import sys
16-
17-
SCRIPTS_ROOT = Path(__file__).resolve().parents[2]
18-
if str(SCRIPTS_ROOT) not in sys.path:
19-
sys.path.insert(0, str(SCRIPTS_ROOT))
20-
2116
from compile_check import compile_check
2217

2318

@@ -38,12 +33,14 @@ class CompileCheckTestCase(unittest.TestCase):
3833
"""Unit tests covering compile_check’s discovery, dependency, and compile paths."""
3934

4035
def setUp(self) -> None:
36+
"""Create a temporary repository root for each test case."""
4137
self._tempdir = tempfile.TemporaryDirectory()
4238
self.repo_root = Path(self._tempdir.name)
4339
self.original_repo_root = compile_check.REPO_ROOT
4440
compile_check.REPO_ROOT = self.repo_root
4541

4642
def tearDown(self) -> None:
43+
"""Clean up temporary directories and restore global state."""
4744
compile_check.REPO_ROOT = self.original_repo_root
4845
self._tempdir.cleanup()
4946

__init__.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
"""
2-
Kubeflow Pipelines Components
1+
"""Kubeflow Pipelines Components
32
43
A collection of reusable components and pipelines for Kubeflow Pipelines.
54
@@ -20,5 +19,5 @@
2019
components = import_module("kubeflow.pipelines.components.components")
2120
pipelines = import_module("kubeflow.pipelines.components.pipelines")
2221
else:
23-
from . import components # type: ignore[F401] # re-export for consumers
24-
from . import pipelines # type: ignore[F401]
22+
from . import components as components
23+
from . import pipelines as pipelines

0 commit comments

Comments
 (0)