diff --git a/phase_stat_providers/carbon/intensity/entsoe/provider.py b/phase_stat_providers/carbon/intensity/entsoe/provider.py new file mode 100644 index 000000000..c200920c4 --- /dev/null +++ b/phase_stat_providers/carbon/intensity/entsoe/provider.py @@ -0,0 +1,43 @@ +import os + +# from phase_stat_providers.base import BasePhaseStatProvider + +class CarbonIntensityStaticProvider(BasePhaseStatProvider): + def __init__(self, value): + self._value = value + self._data = [] + def input(metric, detail_name, phase, value, type, max_value, min_value, unit, created_at) + self._data.append([ + 'metric': metric, + 'detail_name': detail_name, + 'phase': phase, + 'value': value, + 'type': type, + 'max_value': max_value, + 'min_value': min_value, + 'unit': unit, + 'created_at': created_at + ]) + + def output(): + for data in self._data: + # TODO: Query the API and get an average value + from entsoe import EntsoeRawClient + # import pandas as pd + + # client = EntsoeRawClient(api_key=) + + # start = pd.Timestamp('20171201', tz='Europe/Brussels') + # end = pd.Timestamp('20180101', tz='Europe/Brussels') + # country_code = 'DE' # Belgium + # country_code_from = 'FR' # France + # country_code_to = 'DE_LU' # Germany-Luxembourg + # type_marketagreement_type = 'A01' + # contract_marketagreement_type = 'A01' + # process_type = 'A51' + + # # methods that return XML + # + intensity = get_intensity(start=data['phase']['start'], end=data['phase']['end']) + intensity = transformToMicrogram(intensity) + yield (run_id, 'carbon_intensity_entsoe', '[SYSTEM]', f"{idx:03}_{data['phase']['name']}", intensity, 'MEAN', None, None, f"ugCO2e") diff --git a/phase_stat_providers/carbon/intensity/static/provider.py b/phase_stat_providers/carbon/intensity/static/provider.py new file mode 100644 index 000000000..40cc8a62d --- /dev/null +++ b/phase_stat_providers/carbon/intensity/static/provider.py @@ -0,0 +1,25 @@ +import os + +# from phase_stat_providers.base import BasePhaseStatProvider + +class CarbonIntensityStaticProvider(BasePhaseStatProvider): + def __init__(self, value): + self._value = value + self._data = [] + + def input(metric, detail_name, phase, value, type, max_value, min_value, unit, created_at) + self._data.append([ + 'metric': metric, + 'detail_name': detail_name, + 'phase': phase, + 'value': value, + 'type': type, + 'max_value': max_value, + 'min_value': min_value, + 'unit': unit, + 'created_at': created_at + ]) + + def output(): + for data in self._data: + yield (run_id, 'carbon_intensity_static', '[SYSTEM]', f"{idx:03}_{data['phase']['name']}", self._value, 'MEAN', None, None, f"ugCO2e") diff --git a/tools/phase_stats.py b/tools/phase_stats.py index b36358d29..c2964049b 100644 --- a/tools/phase_stats.py +++ b/tools/phase_stats.py @@ -100,13 +100,18 @@ def build_and_store_phase_stats(run_id, sci=None): power_min = (min_value * 10**6) / ((phase['end'] - phase['start']) / value_count) csv_buffer.write(generate_csv_line(run_id, f"{metric.replace('_energy_', '_power_')}", detail_name, f"{idx:03}_{phase['name']}", power_sum, 'MEAN', power_max, power_min, 'mW')) + # -------------- REFACTOR START if metric.endswith('_machine'): machine_co2 = (value_sum / 3_600) * config['sci']['I'] csv_buffer.write(generate_csv_line(run_id, f"{metric.replace('_energy_', '_co2_')}", detail_name, f"{idx:03}_{phase['name']}", machine_co2, 'TOTAL', None, None, 'ug')) - + # -------------- REFACTOR END else: csv_buffer.write(generate_csv_line(run_id, metric, detail_name, f"{idx:03}_{phase['name']}", value_sum, 'TOTAL', max_value, min_value, unit)) + + + # -------------- REFACTOR START + # after going through detail metrics, create cumulated ones if network_io_bytes_total: # build the network energy @@ -132,6 +137,9 @@ def build_and_store_phase_stats(run_id, sci=None): csv_buffer.write(generate_csv_line(run_id, 'software_carbon_intensity_global', '[SYSTEM]', f"{idx:03}_{phase['name']}", (machine_co2 + embodied_carbon_share_ug) / sci['R'], 'TOTAL', None, None, f"ugCO2e/{sci['R_d']}")) + # -------------- REFACTOR END + + csv_buffer.seek(0) # Reset buffer position to the beginning DB().copy_from( csv_buffer,