Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion ci/run_chargeback_tests.yml
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
---
- name: "Verify all the applicable projects, endpoints, pods & services for cloudkitty"
hosts: "{{ cifmw_target_hook_host | default('localhost') }}"
gather_facts: no
gather_facts: true
ignore_errors: true
environment:
KUBECONFIG: "{{ cifmw_openshift_kubeconfig }}"
PATH: "{{ cifmw_path }}"
vars_files:
- vars/common.yml
- vars/osp18_env.yml
vars:
common_pod_status_str: "Running"
Expand Down
9 changes: 9 additions & 0 deletions roles/telemetry_chargeback/defaults/main.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,11 @@
---
openstack_cmd: "openstack"

output_file_local: "{{ role_path }}/files/loki_synth_data.json"
ck_py_script_path: "{{ role_path }}/files/gen_synth_loki_data.py"
ck_data_template_path: "{{ role_path }}/files/loki_data_templ.j2"
ck_days: 30
ck_step: 300

# Output directory for test artifacts
logs_dir: "/home/zuul/ci-framework-data/tests/feature-verification-tests"
166 changes: 166 additions & 0 deletions roles/telemetry_chargeback/files/gen_synth_loki_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
import logging
import argparse
from datetime import datetime, timezone, timedelta
from pathlib import Path
from typing import Union
from jinja2 import Template


# --- Configure logging with a default level that can be changed ---
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%dT%H:%M:%S+00:00'
)
logger = logging.getLogger()


def _format_timestamp(epoch_seconds: float) -> str:
"""
Converts an epoch timestamp into a human-readable UTC string.

Args:
epoch_seconds (float): The timestamp in seconds since the epoch.

Returns:
str: The formatted datetime string (e.g., "2023-10-26T14:30:00+00:00").
"""
try:
dt_object = datetime.fromtimestamp(epoch_seconds, tz=timezone.utc)
return dt_object.isoformat()
except (ValueError, TypeError):
logger.warning(f"Invalid epoch value provided: {epoch_seconds}")
return "INVALID_TIMESTAMP"


def generate_loki_data(
template_path: Path,
output_path: Path,
start_time: datetime,
end_time: datetime,
time_step_seconds: int
):
"""
Generates synthetic Loki log data by first preparing a data list
and then rendering it with a single template.

Args:
template_path (Path): Path to the main log template file.
output_path (Path): Path for the generated output JSON file.
start_time (datetime): The start time for data generation.
end_time (datetime): The end time for data generation.
time_step_seconds (int): The duration of each log entry in seconds.
"""

# --- Step 1: Generate the data structure first ---
logger.info(
f"Generating data from {start_time.strftime('%Y-%m-%d')} to "
f"{end_time.strftime('%Y-%m-%d')} with a {time_step_seconds}s step."
)
start_epoch = int(start_time.timestamp())
end_epoch = int(end_time.timestamp())
logger.debug(f"Time range in epoch seconds: {start_epoch} to {end_epoch}")

log_data_list = [] # This list will hold all our data points

# Loop through the time range and generate data points
for current_epoch in range(start_epoch, end_epoch, time_step_seconds):
end_of_step_epoch = current_epoch + time_step_seconds - 1

# Prepare replacement values
nanoseconds = int(current_epoch * 1_000_000_000)
start_str = _format_timestamp(current_epoch)
end_str = _format_timestamp(end_of_step_epoch)

logger.debug(f"Processing epoch: {current_epoch} ->"
f"nanoseconds: {nanoseconds}")

# Create a dictionary for this time step and add it to the list
log_data_list.append({
"nanoseconds": nanoseconds,
"start_time": start_str,
"end_time": end_str
})

logger.info(f"Generated {len(log_data_list)} data points to be rendered.")

# --- Step 2: Load template and render ---
try:
logger.info(f"Loading main template from: {template_path}")
template_content = template_path.read_text()
template = Template(template_content,
trim_blocks=True, lstrip_blocks=True)

except FileNotFoundError as e:
logger.error(f"Error loading template file: {e}. Aborting.")
raise # Re-raise the exception to be caught in main()

# --- Render the template in one pass with all the data ---
logger.info("Rendering final output...")
# The template expects a variable named 'log_data'
final_output = template.render(log_data=log_data_list)

# --- Step 3: Write the final string to the file ---
try:
with output_path.open('w') as f_out:
f_out.write(final_output)
logger.info(f"Successfully generated synthetic data to "
f"'{output_path}'")
except IOError as e:
logger.error(f"Failed to write to output file '{output_path}': {e}")
except Exception as e:
logger.error(f"An unexpected error occurred during file write: {e}")


def main():
"""Main entry point for the script."""
parser = argparse.ArgumentParser(
description="Generate synthetic Loki log data from "
"a single main template.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
# --- Required File Path Arguments ---
parser.add_argument("-o", "--output", required=True,
help="Path to the output file.")
# --- Only one template argument is needed now ---
parser.add_argument("--template", required=True, help="Path to the main "
"log template file (e.g., loki_main.tmpl).")

# --- Optional Generation Arguments ---
parser.add_argument("--days", type=int, default=30, help="How many days "
"of data to generate, ending today.")
parser.add_argument("--step", type=int, default=300, help="Time step in "
"seconds for each log entry.")

# --- Optional Utility Arguments ---
parser.add_argument("--debug", action="store_true", help="Enable debug "
"level for verbose output.")

args = parser.parse_args()

if args.debug:
logger.setLevel(logging.DEBUG)
logger.debug("Debug mode enabled.")

# Define the time range for data generation
end_time_utc = datetime.now(timezone.utc)
start_time_utc = end_time_utc - timedelta(days=args.days)
logger.debug(f"Time range calculated: {start_time_utc} to {end_time_utc}")

# Run the generator
try:
generate_loki_data(
template_path=Path(args.template),
output_path=Path(args.output),
start_time=start_time_utc,
end_time=end_time_utc,
time_step_seconds=args.step
)
except FileNotFoundError:
logger.error("Process aborted because template file was not found.")
except Exception as e:
logger.critical(f"A critical, unhandled error stopped the script: {e}")


if __name__ == "__main__":
main()
16 changes: 16 additions & 0 deletions roles/telemetry_chargeback/files/loki_data_templ.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{"streams": [{ "stream": { "service": "cloudkitty" }, "values": [
{%- for item in log_data %}
[
"{{ item.nanoseconds }}",
"{\"start\": \"{{ item.start_time }}\", \"end\": \"{{ item.end_time }}\", \"type\": \"ceilometer_image_size\", \"unit\": \"MiB\", \"description\": null, \"qty\": 20.6875, \"price\": 0.0206875, \"groupby\": {\"id\": \"cd65d30f-8b94-4fa3-95dc-e3b429f479b2\", \"project_id\": \"0030775de80e4d84a4fd0d73e0a1b3a7\", \"user_id\": null, \"week_of_the_year\": \"37\", \"day_of_the_year\": \"258\", \"month\": \"9\", \"year\": \"2025\"}, \"metadata\": {\"container_format\": \"bare\", \"disk_format\": \"qcow2\"}}"
],
[
"{{ item.nanoseconds }}",
"{\"start\": \"{{ item.start_time }}\", \"end\": \"{{ item.end_time }}\", \"type\": \"instance\", \"unit\": \"instance\", \"description\": null, \"qty\": 1.0, \"price\": 0.3, \"groupby\": {\"id\": \"de168c31-ed44-4a1a-a079-51bd238a91d6\", \"project_id\": \"9cf5bcfc61a24682acc448af2d062ad2\", \"user_id\": \"c29ab6e886354bbd88ee9899e62d1d40\", \"week_of_the_year\": \"37\", \"day_of_the_year\": \"258\", \"month\": \"9\", \"year\": \"2025\"}, \"metadata\": {\"flavor_name\": \"m1.tiny\", \"flavor_id\": \"1\", \"vcpus\": \"\"}}"
]
{#- This logic adds a comma after every pair, *except* for the very last one. #}
{%- if not loop.last -%}
,
{%- endif -%}
{%- endfor %}
]}]}
43 changes: 43 additions & 0 deletions roles/telemetry_chargeback/tasks/gen_synth_loki_data.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
- name: Define Synthetic Data Variables
ansible.builtin.set_fact:
output_file_remote: "{{ logs_dir }}/gen_loki_synth_data.log"

- name: Check for preexisting output file
ansible.builtin.stat:
path: "{{ output_file_local }}"
register: file_preexists

- name: TEST Generate Synthetic Data
ansible.builtin.command:
cmd: >
python3 "{{ ck_py_script_path }}"
--template "{{ ck_data_template_path }}"
-o "{{ output_file_local }}"
--days "{{ ck_days }}"
--step "{{ ck_step }}"
register: script_output
when: not file_preexists.stat.exists | bool
changed_when: script_output.rc == 0

- name: Read the content of the file
ansible.builtin.slurp:
src: "{{ output_file_local }}"
register: slurped_file

- name: TEST Validate JSON format of syntheticc data file
ansible.builtin.assert:
that:
# This filter will trigger a task failure if the string isn't valid JSON
- slurped_file.content | b64decode | from_json is defined
fail_msg: "The file does not contain valid JSON format."
success_msg: "JSON format validated successfully."

- name: Print output_file_remote path
ansible.builtin.debug:
msg: "Sythetic data file: {{ output_file_remote }}"

- name: Copy output file to remote host
ansible.builtin.copy:
src: "{{ output_file_local }}"
dest: "{{ output_file_remote }}"
mode: '0644'
3 changes: 3 additions & 0 deletions roles/telemetry_chargeback/tasks/main.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
---
- name: "Validate Chargeback Feature"
ansible.builtin.include_tasks: "chargeback_tests.yml"

- name: "Generate Synthetic Data"
ansible.builtin.include_tasks: "gen_synth_loki_data.yml"
3 changes: 3 additions & 0 deletions run_gen_script.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#!/bin/bash
cd /Users/ayefimov/sandbox/gen_ck_data
python3 gen_synth_loki_data.py --template loki_data_templ.j2 -o modified_template.json