|
16 | 16 |
|
17 | 17 | from pathlib import Path
|
18 | 18 |
|
19 |
| -import kfp |
20 |
| -from kfp.components import load_component_from_file |
| 19 | +import kfp as kfp |
| 20 | +from kfp.components import load_component_from_file, create_component_from_func |
| 21 | +from typing import NamedTuple |
21 | 22 |
|
22 | 23 | test_data_dir = Path(__file__).parent / 'test_data'
|
23 |
| -producer_op = load_component_from_file(str(test_data_dir / 'produce_2.component.yaml')) |
24 |
| -processor_op = load_component_from_file(str(test_data_dir / 'process_2_2.component.yaml')) |
25 |
| -consumer_op = load_component_from_file(str(test_data_dir / 'consume_2.component.yaml')) |
| 24 | +producer_op = load_component_from_file( |
| 25 | + str(test_data_dir / 'produce_2.component.yaml')) |
| 26 | +processor_op = load_component_from_file( |
| 27 | + str(test_data_dir / 'process_2_2.component.yaml')) |
| 28 | +consumer_op = load_component_from_file( |
| 29 | + str(test_data_dir / 'consume_2.component.yaml')) |
| 30 | + |
| 31 | + |
| 32 | +def metadata_and_metrics() -> NamedTuple( |
| 33 | + "Outputs", |
| 34 | + [("mlpipeline_ui_metadata", "UI_metadata"), ("mlpipeline_metrics", "Metrics" |
| 35 | + )], |
| 36 | +): |
| 37 | + metadata = { |
| 38 | + "outputs": [{ |
| 39 | + "storage": "inline", |
| 40 | + "source": "*this should be bold*", |
| 41 | + "type": "markdown" |
| 42 | + }] |
| 43 | + } |
| 44 | + metrics = { |
| 45 | + "metrics": [ |
| 46 | + { |
| 47 | + "name": "train-accuracy", |
| 48 | + "numberValue": 0.9, |
| 49 | + }, |
| 50 | + { |
| 51 | + "name": "test-accuracy", |
| 52 | + "numberValue": 0.7, |
| 53 | + }, |
| 54 | + ] |
| 55 | + } |
| 56 | + from collections import namedtuple |
| 57 | + import json |
| 58 | + |
| 59 | + return namedtuple("output", |
| 60 | + ["mlpipeline_ui_metadata", "mlpipeline_metrics"])( |
| 61 | + json.dumps(metadata), json.dumps(metrics)) |
26 | 62 |
|
27 | 63 |
|
28 | 64 | @kfp.dsl.pipeline()
|
29 | 65 | def artifact_passing_pipeline():
|
30 | 66 | producer_task = producer_op()
|
31 |
| - processor_task = processor_op(producer_task.outputs['output_1'], producer_task.outputs['output_2']) |
32 |
| - consumer_task = consumer_op(processor_task.outputs['output_1'], processor_task.outputs['output_2']) |
| 67 | + processor_task = processor_op(producer_task.outputs['output_1'], |
| 68 | + producer_task.outputs['output_2']) |
| 69 | + consumer_task = consumer_op(processor_task.outputs['output_1'], |
| 70 | + processor_task.outputs['output_2']) |
33 | 71 |
|
| 72 | + markdown_task = create_component_from_func(func=metadata_and_metrics)() |
34 | 73 | # This line is only needed for compiling using dsl-compile to work
|
35 |
| - kfp.dsl.get_pipeline_conf().data_passing_method = volume_based_data_passing_method |
| 74 | + kfp.dsl.get_pipeline_conf( |
| 75 | + ).data_passing_method = volume_based_data_passing_method |
36 | 76 |
|
37 | 77 |
|
38 | 78 | from kubernetes.client.models import V1Volume, V1PersistentVolumeClaimVolumeSource
|
39 | 79 | from kfp.dsl import data_passing_methods
|
40 | 80 |
|
41 |
| - |
42 | 81 | volume_based_data_passing_method = data_passing_methods.KubernetesVolume(
|
43 | 82 | volume=V1Volume(
|
44 | 83 | name='data',
|
45 | 84 | persistent_volume_claim=V1PersistentVolumeClaimVolumeSource(
|
46 |
| - claim_name='data-volume', |
47 |
| - ), |
| 85 | + claim_name='data-volume',), |
48 | 86 | ),
|
49 | 87 | path_prefix='artifact_data/',
|
50 | 88 | )
|
|
0 commit comments