|
19 | 19 | from util.extract_types import get_expected_power_columns |
20 | 20 | from util.train_types import ModelOutputType, FeatureGroups, is_single_source_feature_group, all_feature_groups, default_trainers |
21 | 21 | from util.loader import load_json, DEFAULT_PIPELINE, load_pipeline_metadata, get_pipeline_path, get_model_group_path, list_pipelines, list_model_names, load_metadata, load_csv, get_preprocess_folder, get_general_filename, load_machine_spec |
22 | | -from util.saver import save_json, save_csv, save_train_args |
| 22 | +from util.saver import save_json, save_csv, save_train_args, _pipeline_model_metadata_filename |
23 | 23 | from util.config import ERROR_KEY, model_toppath |
24 | 24 | from util import get_valid_feature_group_from_queries, PowerSourceMap |
25 | 25 | from train.prom.prom_query import _range_queries |
26 | 26 | from train.exporter import exporter |
27 | 27 | from train import load_class |
28 | 28 | from train.profiler.node_type_index import NodeTypeIndexCollection, NodeTypeSpec, generate_spec |
29 | 29 |
|
30 | | -from cmd_plot import ts_plot, feature_power_plot, summary_plot |
| 30 | +from cmd_plot import ts_plot, feature_power_plot, summary_plot, metadata_plot |
31 | 31 | from cmd_util import extract_time, save_query_results, get_validate_df, summary_validation, get_extractor, check_ot_fg, get_pipeline, assert_train, get_isolator, UTC_OFFSET_TIMEDELTA |
32 | 32 |
|
33 | 33 | import threading |
@@ -597,9 +597,11 @@ def estimate(args): |
597 | 597 | - `preprocess` plots time series of usage and power metrics for both AbsPower and DynPower |
598 | 598 | - `estimate` passes all arguments to `estimate` function, and plots the predicted time series and correlation between usage and power metrics |
599 | 599 | - `error` passes all arguments to `estimate` function, and plots the summary of prediction error |
| 600 | + - `metadata` plot pipeline metadata |
600 | 601 | - --energy-source : specify target energy sources (use comma(,) as delimiter) |
601 | 602 | - --extractor : specify extractor to get preprocessed data of AbsPower model linked to the input data |
602 | 603 | - --isolator : specify isolator to get preprocessed data of DynPower model linked to the input data |
| 604 | +- --pipeline_name : specify pipeline name |
603 | 605 | """ |
604 | 606 |
|
605 | 607 | def plot(args): |
@@ -685,6 +687,12 @@ def plot(args): |
685 | 687 | for energy_source in energy_sources: |
686 | 688 | data_filename = get_general_filename(args.target_data, energy_source, fg, ot, args.extractor, args.isolator) |
687 | 689 | summary_plot(args, energy_source, summary_df, output_folder, data_filename) |
| 690 | + elif args.target_data == "metadata": |
| 691 | + for energy_source in energy_sources: |
| 692 | + data_filename = _pipeline_model_metadata_filename(energy_source, ot.name) |
| 693 | + pipeline_path = get_pipeline_path(data_path, pipeline_name=pipeline_name) |
| 694 | + model_metadata_df = load_pipeline_metadata(pipeline_path, energy_source, ot.name) |
| 695 | + metadata_plot(args, energy_source, model_metadata_df, output_folder, data_filename) |
688 | 696 |
|
689 | 697 | """ |
690 | 698 | export |
@@ -732,7 +740,15 @@ def export(args): |
732 | 740 | pipeline_name = args.pipeline_name |
733 | 741 | pipeline_path = get_pipeline_path(data_path, pipeline_name=pipeline_name) |
734 | 742 |
|
735 | | - exporter.export(data_path, pipeline_path, output_path, publisher=args.publisher, collect_date=collect_date, inputs=inputs) |
| 743 | + local_export_path = exporter.export(data_path, pipeline_path, output_path, publisher=args.publisher, collect_date=collect_date, inputs=inputs) |
| 744 | + args.target_data = "metadata" |
| 745 | + |
| 746 | + args.output = local_export_path |
| 747 | + args.output_type = "AbsPower" |
| 748 | + args.energy_source = ",".join(PowerSourceMap.keys()) |
| 749 | + plot(args) |
| 750 | + args.output_type = "DynPower" |
| 751 | + plot(args) |
736 | 752 |
|
737 | 753 | """ |
738 | 754 | plot_scenario |
|
0 commit comments