diff --git a/README.md b/README.md index 34f36db..d3fbab0 100644 --- a/README.md +++ b/README.md @@ -39,16 +39,15 @@ export OPENFGA_STORE_ID="your-store-id-here" #### Authentication Tokens -Generate impersonated Heimdall JWTs for service calls using the provided helper script: +A Heimdall JWT secret is needed to use the `!jwt` macro in playbooks. If you +export it as an environmental variable, you can pass it to the mock data tool +as a command line argument. No `export` step is needed as this is used only +to populate arguments to the mock data tool shell invocation. ```bash -PROJECTS_TOKEN="$(./scripts/mock-heimdall-jwt.sh lfx-v2-project-service "clients@m2m_helper")" -COMMITTEES_TOKEN="$(./scripts/mock-heimdall-jwt.sh lfx-v2-committee-service "clients@m2m_helper")" -export PROJECTS_TOKEN COMMITTEES_TOKEN +JWT_RSA_SECRET="$(kubectl get secret/heimdall-signer-cert -n lfx -o json | jq -r '.data["signer.pem"]' | base64 --decode)" ``` -*Note: in the future we may replace this with a YAML `!jwt` macro, and pass in the just the signing key as an environment variable.* - ## Usage ### Running Mock Data Generation @@ -58,14 +57,16 @@ Use uv to run the mock data tool (uv will automatically manage Python versions a ```bash # Test the script (uv will create the virtual environment automatically). uv run lfx-v2-mockdata --help + # Load some data! -uv run lfx-v2-mockdata -t playbooks/projects/{root_project_access,base_projects,extra_projects} playbooks/committees/base_committees +uv run lfx-v2-mockdata --jwt-rsa-secret "$JWT_RSA_SECRET" -t playbooks/projects/{root_project_access,base_projects,extra_projects} playbooks/committees/base_committees ``` **Important Notes:** - **Order matters!** Playbook directories run in the order specified on the command line. - Within each directory, playbooks execute in alphabetical order. - Dependencies between playbooks should be considered when organizing execution order. Multiple passes are made to allow `!ref` calls to be resolved, but the right order will improve performance and help avoid max-retry errors. +- The `!jwt` macro will attempt to detect the JWKS key ID from the endpoint at `http://lfx-platform-heimdall.lfx.svc.cluster.local:4457/.well-known/jwks`. If this URL is not accessible from the execution environment, you must pass an explicit JWT key ID using the `--jwt-key-id` argument. ### Wiping Existing Data diff --git a/playbooks/committees/base_committees/buf_committees.yaml b/playbooks/committees/base_committees/buf_committees.yaml index 3ef2f29..647520c 100644 --- a/playbooks/committees/base_committees/buf_committees.yaml +++ b/playbooks/committees/base_committees/buf_committees.yaml @@ -14,7 +14,7 @@ buf_committees: url: {{ environ.COMMITTEES_URL | default("http://lfx-v2-committee-service.lfx.svc.cluster.local:8080/committees") }} method: POST headers: - Authorization: Bearer {{ environ.COMMITTEES_TOKEN | default("-") }} + Authorization: !jwt bearer=true,aud=lfx-v2-committee-service,principal=clients@m2m_helper steps: - json: name: Governing Board @@ -60,9 +60,9 @@ buf_board_members: url: !sub "{{ environ.COMMITTEES_URL | default('http://lfx-v2-committee-service.lfx.svc.cluster.local:8080/committees') }}/${ buf_committees.steps[?json.name == 'Governing Board']._response.uid | [0] }/members?v=1" method: POST headers: - Authorization: Bearer {{ environ.COMMITTEES_TOKEN | default("-") }} + Authorization: !jwt bearer=true,aud=lfx-v2-committee-service,principal=clients@m2m_helper steps: - {% for i in range(1) %} + {% for i in range(8) %} - json: appointed_by: Membership Entitlement email: {{ fake.ascii_company_email() }} @@ -71,17 +71,19 @@ buf_board_members: organization: name: {{ fake.company() }} website: {{ fake.url() }} - {% if i == 0 %} role: + {% if i == 0 %} name: Chair - {% endif %} + {% else %} + name: None + {% endif %} status: Active voting: {% set start_date = fake.date_this_year() %} # 365 days later {% set end_date = start_date + timedelta(days=365) %} - start_date: "{{ start_date.isoformat().replace('+00:00', 'Z') }}" - end_date: "{{ end_date.isoformat().replace('+00:00', 'Z') }}" + start_date: "{{ start_date.isoformat() }}" + end_date: "{{ end_date.isoformat() }}" status: {{ [ "Alternate Voting Rep", "Voting Rep", diff --git a/playbooks/projects/base_projects/1_tlf.yaml b/playbooks/projects/base_projects/1_tlf.yaml index 22c7e2a..8fb6885 100644 --- a/playbooks/projects/base_projects/1_tlf.yaml +++ b/playbooks/projects/base_projects/1_tlf.yaml @@ -7,7 +7,7 @@ base_projects: url: {{ environ.PROJECTS_URL | default("http://lfx-v2-project-service.lfx.svc.cluster.local:8080/projects") }} method: POST headers: - Authorization: Bearer {{ environ.PROJECTS_TOKEN | default("-") }} + Authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper steps: - json: slug: tlf diff --git a/playbooks/projects/base_projects/2_incorporated.yaml b/playbooks/projects/base_projects/2_incorporated.yaml index 6606c58..0ba0e28 100644 --- a/playbooks/projects/base_projects/2_incorporated.yaml +++ b/playbooks/projects/base_projects/2_incorporated.yaml @@ -10,10 +10,10 @@ extra_incorporated: url: {{ environ.PROJECTS_URL | default("http://lfx-v2-project-service.lfx.svc.cluster.local:8080/projects") }} method: POST headers: - Authorization: Bearer {{ environ.PROJECTS_TOKEN | default("-") }} + Authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper steps: {% for outer in range(12) %} - {% set project_name = generate_name(style='capital') %} + {% set project_name = fake.catch_phrase().title() %} {% set outer_stage = ["Active", "Formation - Engaged"] | random %} - json: slug: extra_inc_{{ outer }} @@ -21,8 +21,8 @@ extra_incorporated: {{ project_name + ([" Foundation", " Project", ""] | random) }} description: >- {{ project_name }} - {{ lorem.get_sentence().lower() }} - {{ lorem.get_sentence() }} + {{ fake.sentence().lower() }} + {{ fake.sentence() }} public: {{ outer_stage == "Active" }} parent_uid: !ref "root_project.steps[0]._response" legal_entity_name: *extra_inc_{{ outer }}_name @@ -30,7 +30,7 @@ extra_incorporated: repository_url: https://example.com/extra_inc_{{ outer }} stage: {{ outer_stage }} {% for inner in range([0, 0, 1, 2] | random) %} - {% set inner_project_name = generate_name(style='capital') %} + {% set inner_project_name = fake.catch_phrase().title() %} {% set inner_stage = ["Active", "Formation - Engaged"] | random %} - json: slug: extra_inc_{{ outer }}_{{ inner }} diff --git a/playbooks/projects/base_projects/3_umbrellas.yaml b/playbooks/projects/base_projects/3_umbrellas.yaml index 02572a2..4f602bf 100644 --- a/playbooks/projects/base_projects/3_umbrellas.yaml +++ b/playbooks/projects/base_projects/3_umbrellas.yaml @@ -7,7 +7,7 @@ sample_umbrella_buf: url: {{ environ.PROJECTS_URL | default("http://lfx-v2-project-service.lfx.svc.cluster.local:8080/projects") }} method: POST headers: - Authorization: Bearer {{ environ.PROJECTS_TOKEN | default("-") }} + Authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper steps: - json: slug: buf @@ -26,14 +26,14 @@ sample_umbrella_buf: stage: Active website_url: https://buf-foundation.example/ {% for buf_child in range(25) %} - {% set buf_child_name = generate_name(style='capital') %} + {% set buf_child_name = fake.catch_phrase().title() %} - json: slug: {{ buf_child_name | lower | replace(" ", "-") }} name: {{ buf_child_name }} Project description: >- {{ buf_child_name }} - {{ lorem.get_sentence().lower() }} - {{ lorem.get_sentence() }} + {{ fake.sentence().lower() }} + {{ fake.sentence() }} public: true # Children's parent is the umbrella; legal parent is Linux Foundation. parent_uid: !ref "sample_umbrella_buf.steps[0]._response.uid" @@ -69,14 +69,14 @@ sample_umbrella_iubp: repository_url: https://example.com/iubp-consortium website_url: https://iubp.example/ {% for iubp_child in range(15) %} - {% set iubp_child_name = generate_name(style='capital') %} + {% set iubp_child_name = fake.catch_phrase().title() %} - json: slug: {{ iubp_child_name | lower | replace(" ", "-") }} name: {{ iubp_child_name }} Project description: >- {{ iubp_child_name }} - {{ lorem.get_sentence().lower() }} - {{ lorem.get_sentence() }} + {{ fake.sentence().lower() }} + {{ fake.sentence() }} public: true # Of course, despite looking like valid Series LLC names, these are of # course FAKE and for testing only. diff --git a/playbooks/projects/extra_projects/n_depth.yaml b/playbooks/projects/extra_projects/n_depth.yaml index 03063d0..dd059c4 100644 --- a/playbooks/projects/extra_projects/n_depth.yaml +++ b/playbooks/projects/extra_projects/n_depth.yaml @@ -15,29 +15,29 @@ n_depth: url: {{ environ.PROJECTS_URL | default("http://lfx-v2-project-service.lfx.svc.cluster.local:8080/projects") }} method: POST headers: - Authorization: Bearer {{ environ.PROJECTS_TOKEN | default("-") }} + Authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper steps: - {% set project_name = generate_name(style='capital') %} + {% set project_name = fake.catch_phrase().title() %} - json: slug: depth_test_0 name: {{ project_name + " Foundation" }} description: >- {{ project_name }} - {{ lorem.get_sentence().lower() }} - {{ lorem.get_sentence() }} + {{ fake.sentence().lower() }} + {{ fake.sentence() }} public: true parent_uid: !ref "n_depth_tlf_lookup.steps[0]._response" legal_parent_uid: !ref "n_depth_tlf_lookup.steps[0]._response" stage: Active {% for depth in range(1, 15) %} - {% set project_name = generate_name(style='capital') %} + {% set project_name = fake.catch_phrase().title() %} - json: slug: depth_test_{{ depth }} name: {{ project_name + " Project" }} description: >- {{ project_name }} - {{ lorem.get_sentence().lower() }} - {{ lorem.get_sentence() }} + {{ fake.sentence().lower() }} + {{ fake.sentence() }} public: true parent_uid: !ref "n_depth.steps[?json.slug == 'depth_test_{{ depth - 1 }}']._response.uid | [0]" legal_parent_uid: !ref "n_depth_tlf_lookup.steps[0]._response" diff --git a/playbooks/projects/recreate_root_project/root.yaml b/playbooks/projects/recreate_root_project/root.yaml index e8f4eac..4f61ab2 100644 --- a/playbooks/projects/recreate_root_project/root.yaml +++ b/playbooks/projects/recreate_root_project/root.yaml @@ -12,7 +12,7 @@ recreate_root_project_slug: key: slug/ROOT steps: - raw: &root_project_uid - "{{ environ.PROJECTS_ROOT_UID | default(uuid()) }}" + "{{ environ.PROJECTS_ROOT_UID | default(fake.uuid4()) }}" recreate_root_project: type: nats-kv-put diff --git a/playbooks/v1_meetings/umbrella_board_meeting/board_meeting.yaml b/playbooks/v1_meetings/umbrella_board_meeting/board_meeting.yaml new file mode 100644 index 0000000..2bb1436 --- /dev/null +++ b/playbooks/v1_meetings/umbrella_board_meeting/board_meeting.yaml @@ -0,0 +1,772 @@ +# Copyright The Linux Foundation and each contributor to LFX. +# SPDX-License-Identifier: MIT +# +# This scheduled meeting, its past meetings, registrants, attendees, and +# artifacts all re-use a lot of variables and data, therefore it didn't make as +# much sense to to split the playbooks across multiple files. +--- +buf_board_meeting_project_lookup: + type: nats-request + params: + subject: lfx.projects-api.slug_to_uid + steps: + - raw: buf + +{# This *must* match the range size used for the BUF board committee member +list. #} +{% set board_member_count = 8 -%} + +# +# The scheduled meeting series itself. +# + +{# Total past meetings is used to define the initial meeting time, AND in the + loop that creates the past meetings. The logic is hardcoded to assume a + once-per-month meeting. #} +{% set total_past_meetings = 3 -%} +{% set zoom_host_user = fake.pystr(min_chars=22, max_chars=22) %} + +buf_board_meeting_create: + type: nats-publish + params: + # Create the meeting in OpenSearch (via indexer). + subject: lfx.index.v1_meeting + steps: + - json: + action: created + headers: + authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal={{ fake.user_name() }},email={{ fake.email() }} + x-on-behalf-of: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper + data: + {% set meeting_id = fake.pyint(min_value=10000000000, max_value=99999999999) %} + {# Calculate initial meeting time based on `total_past_meetings`. #} + {% set previous_meeting = fake.date_time_between( + start_date='-28d', + end_date='-14d', + ) %} + {% set meeting_start_time = ( + previous_meeting - timedelta(days=30 * (total_past_meetings - 1)) + ).replace(minute=0, second=0, microsecond=0) %} + {% set meeting_duration = 60 %} + {# Our logic only supports recurrence patterns of weeks 1 through 4, + not "last week of the month", aka week "-1", so ensure we don't + start on a month-day later than 28. #} + {% if meeting_start_time.day > 28 %} + {% set meeting_start_time = meeting_start_time.replace(day=28) %} + {% endif %} + {# Ensure it's on a weekday. #} + {% if meeting_start_time.weekday() in (5, 6) %} + {% set meeting_start_time = meeting_start_time - timedelta(days=2) %} + {% endif %} + {# Map Python weekday (0 = Monday, 6 = Sunday) to Zoom (1 = Sunday, 7 = Saturday). #} + {% set zoom_week_day = (meeting_start_time.weekday() + 1) % 7 + 1 %} + {% set week_of_month = (meeting_start_time.day / 7) | round(0, 'ceil') | int %} + topic: Monthly Board Meeting + visibility: public + agenda: "Monthly meeting for the Big Umbrella Foundation board. {{ fake.sentence() }}" + # This is rewritten as an array to incorporate the "mappings" one-to-many table. + committees: + - uid: !ref "buf_committees.steps[?json.name == 'Governing Board']._response.uid | [0]" + filters: [] + restricted: false + password: {{ fake.uuid4() }} + recurrence: + # 100 years from initial meeting. + end_date_time: "{{ (meeting_start_time + timedelta(days=365*100)).isoformat() }}Z" + repeat_interval: "1" + type: "3" + monthly_week: "{{ week_of_month }}" + monthly_week_day: "{{ zoom_week_day }}" + zoom_ai_enabled: true + # 6-digit numeric passcode. + host_key: "{{ fake.pyint(min_value=100000, max_value=999999) }}" + transcript_enabled: true + early_join_time: "10" + recording_enabled: true + duration: "{{ meeting_duration }}" + meeting_id: "{{ meeting_id }}" + recording_access: public + concurrent_zoom_user_enabled: true + # TODO. + #updated_occurrences: + # - duration: "60" + # recurrence: + # repeat_interval: "2" + # end_date_time: "2124-10-07T13:00:00Z" + # type: "2" + # weekly_days: "3" + # old_occurrence_id: "1762275600" + # timezone: "" + # topic: Monthly Board Meeting + # all_following: true + # new_occurrence_id: "1762275600" + # agenda: "" + require_ai_summary_approval: true + use_new_invite_email_address: true + # TODO + #ics_additional_uids: + # - 98457927869:2025-11-04T17:00:00Z + created_at: &board_meeting_created_at + "{{ fake.date_time_between(start_date='-1y', end_date='-90d').isoformat() }}Z" + meeting_type: Board + join_url: "https://zoom.example/j/{{ meeting_id }}?pwd={{ fake.pystr() }}" + transcript_access: public + start_time: "{{ meeting_start_time.isoformat() }}Z" + # Note, proj_id (sfid) in the source data will have been stripped by lfx-v1-sync-helper. + project_uid: !ref "buf_board_meeting_project_lookup.steps[0]._response" + # Note, this is later than `created_at`. + modified_at: "{{ fake.date_time_between(start_date='-90d', end_date='-30d').isoformat() }}Z" + last_end_time: "{{ (meeting_start_time + timedelta(days=365*100, minutes=meeting_duration+60)).timestamp() | int }}" + user_id: "{{ zoom_host_user }}" + last_mailing_list_members_sync_job_status: none + # Use "America/Los_Angeles" 50% of the time, else random. + {% if fake.pybool() %} + timezone: "America/Los_Angeles" + {% else %} + timezone: "{{ fake.timezone() }}" + {% endif %} + last_bulk_registrant_job_status: none + ai_summary_access: public + # 6-digit numeric passcode. + passcode: "{{ fake.pyint(min_value=100000, max_value=999999) }}" + +buf_board_meeting_access_update: + type: nats-publish + params: + # Add meeting relationships in OpenFGA (via fga-sync). + subject: lfx.update_access.v1_meeting + steps: + - json: !ref buf_board_meeting_create.steps[0].json.data + +# +# Participants for the scheduled meeting itself. +# +# Note, there is no "v1_meeting_settings" object, since in v2 that only holds +# organizers, which don't exist in v1. +# + +{% set member_has_username = fake.pylist(nb_elements=board_member_count, variable_nb_elements=False, value_types=["bool"]) %} + +buf_board_meeting_committee_participants_create: + type: nats-publish + params: + # Create the registered participant in OpenSearch (via indexer). + subject: lfx.index.v1_meeting_registrant + steps: + # Count matches the count in buf_board_members. + {% for i in range(board_member_count) %} + - json: + action: created + headers: + authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal={{ fake.user_name() }},email={{ fake.email() }} + x-on-behalf-of: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper + data: + registrant_id: "{{ fake.uuid4() }}" + meeting_id: "{{ meeting_id }}" + committee: !ref "buf_committees.steps[?json.name == 'Governing Board']._response.uid | [0]" + org: !ref "buf_board_members.steps[{{ i }}].json.organization.name" + org_is_project_member: {{ fake.pybool() }} + email: !ref "buf_board_members.steps[{{ i }}].json.email" + last_invite_delivery_successful: true + last_invite_received_time: "{{ fake.date_time_between(start_date='-30d', end_date='now').isoformat() }}Z" + # TODO. + profile_picture: "https://lfx-cdn-prod.s3.amazonaws.com/users/avatar/a.png" + host: {{ fake.pybool() }} + last_invite_bounced: false + {% if fake.pybool() %} + job_title: "-" + {% else %} + job_title: "{{ fake.job() }}" + {% endif %} + {% if fake.pybool() %} + created_at: *board_meeting_created_at + {% else %} + created_at: "{{ fake.date_time_between(start_date='-90d', end_date='-60d').isoformat() }}Z" + {% endif %} + org_is_member: {{ fake.pybool() }} + last_invite_received_message_id: "{{ fake.uuid4().replace('-', '')[0:40] }}" + modified_at: "{{ fake.date_time_between(start_date='-60d', end_date='-30d').isoformat() }}Z" + # 50% chance to be empty, else use the local part of their email as a + # mock LFID username. `user_id` (sfid) is stripped by lfx-v1-sync-helper. + {% if member_has_username[i] %} + # TODO to parse from email local part. + username: {{ fake.user_name() }} + {% else %} + username: "" + {% endif %} + last_name: !ref "buf_board_members.steps[{{ i }}].json.last_name" + # faker emails SHOULD be all lowercase at present. + case_sensitive_email: !ref buf_board_members.steps[{{ i }}].json.email + first_name: !ref buf_board_members.steps[{{ i }}].json.first_name + last_invite_delivered_time: "{{ fake.date_time_between(start_date='-60d', end_date='now').isoformat() }}Z" + type: committee + {% endfor %} + +buf_board_meeting_committee_participants_access_update: + type: nats-publish + params: + # Create the registered participant in OpenFGA (via fga-sync). + subject: lfx.put_registrant.v1_meeting + steps: + {% for i in range(board_member_count) %} + {% if member_has_username[i] %} + {# Only send access update if the member has a username! #} + - json: !ref buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data + {% endif %} + {% endfor %} + +# +# Loop over each past meeting. +# + +{% set ns = namespace(occurrence_start_time=meeting_start_time) %} +{# Loop over the number of past monthly meetings to create #} +{% for past_meeting_index in range(total_past_meetings) -%} +{# Note, incrementing occurrence_start_time happpens at the END of the loop. #} +{% set occurrence_actual_start = fake.date_time_between( + start_date=ns.occurrence_start_time - timedelta(minutes=10), + end_date=ns.occurrence_start_time + timedelta(minutes=2), +) %} +{% set occurrence_actual_end = fake.date_time_between( + start_date=occurrence_actual_start + timedelta(minutes=meeting_duration-15), + end_date=ns.occurrence_start_time + timedelta(minutes=meeting_duration+15), +) %} +{% set occurrence_created = fake.date_time_between( + start_date=occurrence_actual_start, + end_date=occurrence_actual_start + timedelta(seconds=15), +) %} +{% set occurrence_modified = fake.date_time_between( + start_date=occurrence_created, + end_date=occurrence_actual_end, +) %} +{% set occurrence_id = (ns.occurrence_start_time.timestamp() * 1000) | int -%} +{% set session_uuid = b64encode(fake.binary(length=16)) %} + +buf_board_past_meeting_{{ past_meeting_index }}_create: + type: nats-publish + params: + # Create the past meeting in OpenSearch (via indexer). + subject: lfx.index.v1_past_meeting + steps: + - json: + action: created + headers: + authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal={{ fake.user_name() }},email={{ fake.email() }} + x-on-behalf-of: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper + data: + topic: Monthly Board Meeting + artifacts: null + visibility: public + agenda: "Monthly meeting for the Big Umbrella Foundation board. {{ fake.sentence() }}" + committees: + - uid: !ref "buf_committees.steps[?json.name == 'Governing Board']._response.uid | [0]" + filters: + - voting_rep + - alt_voting_rep + - observer + - emeritus + restricted: true + is_manually_created: false + project_slug: buf + scheduled_end_time: "{{ (ns.occurrence_start_time + timedelta(minutes=meeting_duration)).isoformat() }}Z" + recurrence: !ref buf_board_meeting_create.steps[0].json.data.recurrence + zoom_ai_enabled: true + transcript_enabled: true + early_join_time: "10" + sessions: + - end_time: "{{ occurrence_actual_end.isoformat() }}Z" + uuid: "{{ session_uuid }}" + start_time: "{{ occurrence_actual_start.isoformat() }}Z" + recording_enabled: true + meeting_and_occurrence_id: "{{ meeting_id }}-{{ occurrence_id }}" + duration: "{{ meeting_duration }}" + meeting_id: "{{ meeting_id }}" + recording_access: public + scheduled_start_time: "{{ ns.occurrence_start_time.isoformat() }}Z" + require_ai_summary_approval: true + created_at: "{{ occurrence_created.isoformat() }}Z" + meeting_type: "" + occurrence_id: "{{ occurrence_id }}" + transcript_access: public + recording_password: "" + # Note, proj_id (sfid) in the source data will have been stripped by + # lfx-v1-sync-helper. We'll still keep project_slug, though. + project_uid: !ref "buf_board_meeting_project_lookup.steps[0]._response" + modified_at: "{{ occurrence_modified.isoformat() }}Z" + timezone: !ref buf_board_meeting_create.steps[0].json.data.timezone + ai_summary_access: public + # TODO ... what is this? (it didn't necesesarily match recurrence.type in my test data) + type: "3" + +buf_board_past_meeting_{{ past_meeting_index }}_access_update: + type: nats-publish + params: + # Add past meeting relationships in OpenFGA (via fga-sync). + subject: lfx.update_access.v1_past_meeting + steps: + - json: !ref buf_board_past_meeting_{{ past_meeting_index }}_create.steps[0].json.data + +# +# Meeting artifacts (recording, transcript, summary) for each past meeting. +# + +{% set recording_create_time = fake.date_time_between( + start_date=occurrence_actual_end, + end_date=occurrence_actual_end + timedelta(minutes=30), +) %} +{% set recording_modify_time = fake.date_time_between( + start_date=recording_create_time, + end_date=recording_create_time + timedelta(minutes=5), +) %} +{% set audio_only_size = fake.pyint(min_value=60000000, max_value=120000000) %} +{% set timeline_size = fake.pyint(min_value=2000000, max_value=5000000) %} +{% set shared_screen_with_speaker_view_size = fake.pyint(min_value=100000000, max_value=800000000) %} +{% set chat_file_size = fake.pyint(min_value=10, max_value=200) %} +{% set audio_transcript_size = fake.pyint(min_value=80000, max_value=200000) %} +{% set total_recording_size = ( + audio_only_size + + timeline_size + + shared_screen_with_speaker_view_size + + chat_file_size + + audio_transcript_size +) %} +{# The "share" object's size doesn't include the transcript, based on observed + data. #} +{% set total_size_without_transcript = ( + audio_only_size + + timeline_size + + shared_screen_with_speaker_view_size + + chat_file_size +) -%} + +buf_board_past_meeting_{{ past_meeting_index }}_recording_create: + type: nats-publish + params: + # Create the past meeting recording in OpenSearch (via indexer). + subject: lfx.index.v1_past_meeting_recording + steps: + - json: + action: created + headers: + authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal={{ fake.user_name() }},email={{ fake.email() }} + x-on-behalf-of: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper + data: + # This is a filtered version of the v1 recordings object. + created_at: "{{ recording_create_time.isoformat() }}Z" + total_size: "{{ total_recording_size }}" + topic: Monthly Board Meeting + occurrence_id: "{{ occurrence_id }}" + # `visibility` is for the meeting and should not be used for this object. + # visibility: public + recording_files: + {% set recording_hash = fake.pystr(min_chars=90, max_chars=120) %} + - play_url: https://zoom.example/rec/play/{{ recording_hash }} + file_type: M4A + meeting_id: "{{ session_uuid }}" + recording_type: audio_only + download_url: https://zoom.example/rec/webhook_download/{{ recording_hash }}/{{ fake.pystr(min_chars=340, max_chars=350) }} + file_extension: M4A + id: {{ fake.uuid4() }} + file_size: "{{ audio_only_size }}" + status: completed + recording_start: "{{ occurrence_actual_start.isoformat() }}Z" + recording_end: "{{ occurrence_actual_end.isoformat() }}Z" + {% set recording_hash = fake.pystr(min_chars=90, max_chars=120) %} + - play_url: https://zoom.example/rec/play/{{ recording_hash }} + file_type: MP4 + meeting_id: "{{ session_uuid }}" + recording_type: shared_screen_with_speaker_view + download_url: https://zoom.example/rec/webhook_download/{{ recording_hash }}/{{ fake.pystr(min_chars=340, max_chars=350) }} + file_extension: MP4 + id: {{ fake.uuid4() }} + file_size: "{{ shared_screen_with_speaker_view_size }}" + status: completed + recording_start: "{{ occurrence_actual_start.isoformat() }}Z" + recording_end: "{{ occurrence_actual_end.isoformat() }}Z" + {% set recording_hash = fake.pystr(min_chars=90, max_chars=120) %} + - play_url: https://zoom.example/rec/play/{{ recording_hash }} + meeting_id: "{{ session_uuid }}" + recording_type: chat_file + file_type: CHAT + download_url: https://zoom.example/rec/webhook_download/{{ recording_hash }}/{{ fake.pystr(min_chars=340, max_chars=350) }} + file_extension: TXT + id: {{ fake.uuid4() }} + file_size: "{{ chat_file_size }}" + recording_start: "{{ occurrence_actual_start.isoformat() }}Z" + status: completed + recording_end: "{{ occurrence_actual_end.isoformat() }}Z" + # `transcript_access` is filtered from the recording object. + # transcript_access: public + start_time: "" + host_id: {{ zoom_host_user }} + # Note, proj_id (sfid) in the source data will have been stripped by lfx-v1-sync-helper. + project_uid: !ref "buf_board_meeting_project_lookup.steps[0]._response" + modified_at: "{{ recording_modify_time.isoformat() }}Z" + project_slug: buf + transcript_enabled: true + host_email: itx+example999@lfx.linuxfoundation.org + sessions: + {% set share_hash = fake.pystr(min_chars=80, max_chars=90) %} + - total_size: "{{ total_size_without_transcript }}" + start_time: "{{ occurrence_actual_start.isoformat() }}Z" + password: "" + uuid: "{{ session_uuid }}" + share_url: https://zoom.example/rec/share/{{ share_hash }} + recording_count: "5" + meeting_and_occurrence_id: "{{ meeting_id }}-{{ occurrence_id }}" + meeting_id: "{{ meeting_id }}" + recording_access: public + +buf_board_past_meeting_{{ past_meeting_index }}_recording_access_update: + type: nats-publish + params: + # Add past meeting recording relationships in OpenFGA (via fga-sync). + subject: lfx.update_access.v1_past_meeting_recording + steps: + - json: + # Only include the fields needed for access control + id: !ref buf_board_past_meeting_{{ past_meeting_index }}_recording_create.steps[0].json.data.meeting_and_occurrence_id + meeting_and_occurrence_id: !ref buf_board_past_meeting_{{ past_meeting_index }}_recording_create.steps[0].json.data.meeting_and_occurrence_id + recording_access: public + participants: + {% for i in range(board_member_count) %} + {% if member_has_username[i] %} + - lf_sso: !ref "buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data.username" + host: !ref "buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data.host" + is_invited: true + is_attended: !ref "buf_board_past_meeting_{{ past_meeting_index }}_participants_create.steps[{{ i }}].json.data.is_attended" + {% endif %} + {% endfor %} + +buf_board_past_meeting_{{ past_meeting_index }}_transcript_create: + type: nats-publish + params: + # Create the past meeting transcript in OpenSearch (via indexer). + subject: lfx.index.v1_past_meeting_transcript + steps: + - json: + action: created + headers: + authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal={{ fake.user_name() }},email={{ fake.email() }} + x-on-behalf-of: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper + data: + # This is a filtered version of the v1 recordings object. + created_at: "{{ recording_create_time.isoformat() }}Z" + total_size: "{{ total_recording_size }}" + topic: Monthly Board Meeting + occurrence_id: "{{ occurrence_id }}" + # `visibility` is for the meeting and should not be used for this object. + # visibility: public + recording_files: + {% set recording_hash = fake.pystr(min_chars=90, max_chars=120) %} + - play_url: "" + file_type: TIMELINE + meeting_id: "{{ session_uuid }}" + recording_type: timeline + download_url: https://zoom.example/rec/webhook_download/{{ recording_hash }}/{{ fake.pystr(min_chars=340, max_chars=350) }} + file_extension: JSON + id: {{ fake.uuid4() }} + file_size: "{{ timeline_size }}" + recording_end: "{{ occurrence_actual_end.isoformat() }}Z" + recording_start: "{{ occurrence_actual_start.isoformat() }}Z" + status: completed + {% set recording_hash = fake.pystr(min_chars=90, max_chars=120) %} + - play_url: https://zoom.example/rec/play/{{ recording_hash }} + meeting_id: "{{ session_uuid }}" + file_type: TRANSCRIPT + recording_type: audio_transcript + download_url: https://zoom.example/rec/webhook_download/{{ recording_hash }}/{{ fake.pystr(min_chars=340, max_chars=350) }} + file_extension: VTT + id: {{ fake.uuid4() }} + file_size: "{{ audio_transcript_size }}" + recording_start: "{{ occurrence_actual_start.isoformat() }}Z" + status: completed + recording_end: "{{ occurrence_actual_end.isoformat() }}Z" + transcript_access: public + start_time: "" + host_id: {{ zoom_host_user }} + # Note, proj_id (sfid) in the source data will have been stripped by lfx-v1-sync-helper. + project_uid: !ref "buf_board_meeting_project_lookup.steps[0]._response" + modified_at: "{{ recording_modify_time.isoformat() }}Z" + project_slug: buf + transcript_enabled: true + host_email: itx+example999@lfx.linuxfoundation.org + sessions: + {% set share_hash = fake.pystr(min_chars=80, max_chars=90) %} + - total_size: "{{ total_size_without_transcript }}" + start_time: "{{ occurrence_actual_start.isoformat() }}Z" + password: "" + uuid: "{{ session_uuid }}" + share_url: https://zoom.example/rec/share/{{ share_hash }} + recording_count: "5" + meeting_and_occurrence_id: "{{ meeting_id }}-{{ occurrence_id }}" + meeting_id: "{{ meeting_id }}" + # `recording_access` is filtered from the transcript object. + # recording_access: public + +buf_board_past_meeting_{{ past_meeting_index }}_transcript_access_update: + type: nats-publish + params: + # Add past meeting transcript relationships in OpenFGA (via fga-sync). + subject: lfx.update_access.v1_past_meeting_transcript + steps: + - json: + # Only include the fields needed for access control + id: !ref buf_board_past_meeting_{{ past_meeting_index }}_transcript_create.steps[0].json.data.meeting_and_occurrence_id + meeting_and_occurrence_id: !ref buf_board_past_meeting_{{ past_meeting_index }}_transcript_create.steps[0].json.data.meeting_and_occurrence_id + transcript_access: public + participants: + {% for i in range(board_member_count) %} + {% if member_has_username[i] %} + - lf_sso: !ref "buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data.username" + host: !ref "buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data.host" + is_invited: true + is_attended: !ref "buf_board_past_meeting_{{ past_meeting_index }}_participants_create.steps[{{ i }}].json.data.is_attended" + {% endif %} + {% endfor %} + +{% set summary_create_time = fake.date_time_between( + start_date=occurrence_actual_end, + end_date=occurrence_actual_end + timedelta(minutes=10), +) %} + +buf_board_past_meeting_{{ past_meeting_index }}_summary_create: + type: nats-publish + params: + # Create the registered participant in OpenSearch (via indexer). + subject: lfx.index.v1_meeting_summary + steps: + - json: + action: created + headers: + authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal={{ fake.user_name() }},email={{ fake.email() }} + x-on-behalf-of: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper + data: + summary_details: + {% for i in range(fake.pyint(min_value=8, max_value=12)) %} + - summary: >- + {% for i in range(fake.pyint(min_value=3, max_value=6)) -%} + {{ fake.sentence() }} + {% endfor %} + label: {{ fake.catch_phrase() }} + {% endfor %} + requires_approval: true + zoom_meeting_uuid: {{ session_uuid }} + edited_next_steps: null + approved: false + summary_created_time: "{{ summary_create_time.isoformat() }}Z" + password: {{ fake.uuid4() }} + summary_last_modified_time: "{{ summary_create_time.isoformat() }}Z" + summary_start_time: "{{ occurrence_actual_start.isoformat() }}Z" + summary_title: Meeting Summary for Monthly Board Meeting + zoom_meeting_topic: Monthly Board Meeting + meeting_and_occurrence_id: "{{ meeting_id }}-{{ occurrence_id }}" + id: {{ fake.uuid4() }} + meeting_id: "{{ meeting_id }}" + edited_summary_details: null + email_sent: true + created_at: "{{ summary_create_time.isoformat() }}Z" + edited_summary_overview: "" + next_steps: + {% for i in range(fake.pyint(min_value=8, max_value=11)) %} + - "{{ fake.first_name() }}: {{ fake.bs() }} and {{ fake.bs() }}" + {% endfor %} + occurrence_id: "{{ occurrence_id }}" + summary_end_time: "{{ occurrence_actual_end.isoformat() }}Z" + modified_at: "{{ summary_create_time.isoformat() }}Z" + zoom_meeting_host_email: itx+example999@lfx.linuxfoundation.org + # TODO. + zoom_webhook_event: '{"event":"event.name","event_ts":9999,"payload":{"key":"value"}}' + zoom_meeting_host_id: "{{ zoom_host_user }}" + summary_overview: >- + The Board meeting {{ fake.sentence(nb_words=6).lower() -}} + {% for i in range(fake.pyint(min_value=2, max_value=5)) -%} + {{ fake.sentence() -}} + {% endfor %} + +buf_board_past_meetings_{{ past_meeting_index }}_summary_access_update: + type: nats-publish + params: + # The past meeting AI summary (to fga-sync). + subject: lfx.update_access.v1_past_meeting_summary + steps: + - json: + # Only include the fields needed for access control + id: !ref buf_board_past_meeting_{{ past_meeting_index }}_summary_create.steps[0].json.data.id + meeting_and_occurrence_id: !ref buf_board_past_meeting_{{ past_meeting_index }}_summary_create.steps[0].json.data.meeting_and_occurrence_id + summary_access: public + participants: + {% for i in range(board_member_count) %} + {% if member_has_username[i] %} + - lf_sso: !ref "buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data.username" + host: !ref "buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data.host" + is_invited: true + is_attended: !ref "buf_board_past_meeting_{{ past_meeting_index }}_participants_create.steps[{{ i }}].json.data.is_attended" + {% endif %} + {% endfor %} + +# +# Add participants for past meeting occurrence #{{ past_meeting_index }}. +# + +buf_board_past_meeting_{{ past_meeting_index }}_participants_create: + type: nats-publish + params: + # Create the registered participant in OpenSearch (via indexer). + subject: lfx.index.v1_past_meeting_participant + steps: + {% for i in range(board_member_count) -%} + {# 50-50 chance of this participant also being an attender #} + {% set is_attended = fake.pybool() %} + {% if is_attended %} + {% set attendee_join_time = fake.date_time_between( + start_date=occurrence_actual_start, + end_date=occurrence_actual_start + timedelta(minutes=meeting_duration//2), + ) %} + {% set attendee_leave_time = fake.date_time_between( + start_date=occurrence_actual_end - timedelta(minutes=meeting_duration//2), + end_date=occurrence_actual_end, + ) %} + {% endif %} + - json: + action: created + headers: + authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal={{ fake.user_name() }},email={{ fake.email() }} + x-on-behalf-of: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper + data: + # v2 attributes for convenience. + is_invited: true + is_attended: {{ is_attended }} + # The remaining attributes are a combination of synced ITX "attendees" + # and "invitees" table data. + org: !ref "buf_board_members.steps[{{ i }}].json.organization.name" + committee_role: !ref "buf_board_members.steps[{{ i }}].json.role.name" + org_is_project_member: !ref "buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data.org_is_project_member" + registrant_id: !ref "buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data.registrant_id" + invitee_id: {{ fake.uuid4() }} + email: !ref "buf_board_members.steps[{{ i }}].json.email" + first_name: !ref "buf_board_members.steps[{{ i }}].json.first_name" + last_name: !ref "buf_board_members.steps[{{ i }}].json.last_name" + job_title: !ref "buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data.job_title" + project_slug: buf + profile_picture: !ref "buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data.profile_picture" + committee_voting_status: !ref "buf_board_members.steps[{{ i }}].json.voting.status" + meeting_id: "{{ meeting_id }}" + meeting_and_occurrence_id: "{{ meeting_id }}-{{ occurrence_id }}" + created_at: "{{ occurrence_created.isoformat() }}Z" + occurrence_id: "{{ occurrence_id }}" + org_is_member: false + # Note, proj_id (sfid) in the source data will have been stripped by lfx-v1-sync-helper. + project_uid: !ref "buf_board_meeting_project_lookup.steps[0]._response" + {% if is_attended %} + modified_at: "{{ (attendee_leave_time + timedelta(seconds=3)).isoformat() }}Z" + {% else %} + modified_at: "{{ occurrence_modified.isoformat() }}Z" + {% endif %} + # Note, lf_user_id (sfid) is stripped by lfx-v1-sync-helper. + lf_sso: !ref "buf_board_meeting_committee_participants_create.steps[{{ i }}].json.data.username" + committee_id: !ref "buf_committees.steps[?json.name == 'Governing Board']._response.uid | [0]" + {% if is_attended %} + # name is captured for attendees only (first/last are captured for invitees). + name: !sub "${ buf_board_members.steps[{{ i }}].json.first_name } ${ buf_board_members.steps[{{ i }}].json.last_name }" + # more attendee-specific fields: + sessions: + - leave_time: "{{ attendee_leave_time.isoformat() }}Z" + leave_reason: 'left the meeting. Reason : left the meeting' + join_time: "{{ attendee_join_time.isoformat() }}Z" + participant_uuid: "{{ fake.uuid4().upper() }}" + # TODO: is this actually random, or should it match something? + id: "{{ fake.uuid4() }}" + # This seems wrong in the observed source data (it's false even though + # committee_voting_status is set!). Keeping as-is for fidelity. + is_committee_member: false + is_verified: true + is_unknown: false + AverageAttendance: "0" + {% endif %} + {% endfor %} + +buf_board_past_meeting_{{ past_meeting_index }}_participants_access_update: + type: nats-publish + params: + # Create the past-meeting participant in OpenFGA (via fga-sync). + subject: lfx.put_participant.v1_past_meeting + steps: + {% for i in range(board_member_count) %} + {% if member_has_username[i] %} + {# Only send access update if the member has a username! #} + - json: !ref buf_board_past_meeting_{{ past_meeting_index }}_participants_create.steps[{{ i }}].json.data + {% endif %} + {% endfor %} + +# +# Add 1 to 2 guests to this occurrence (indexer only; no fga-sync). +# + +buf_board_past_meeting_{{ past_meeting_index }}_participant_guests_create: + type: nats-publish + params: + subject: lfx.index.v1_past_meeting_participant + steps: + {% for i in range(fake.pyint(min_value=1, max_value=2)) -%} + {% set attendee_join_time = fake.date_time_between( + start_date=occurrence_actual_start, + end_date=occurrence_actual_start + timedelta(minutes=meeting_duration//2), + ) %} + {% set attendee_leave_time = fake.date_time_between( + start_date=occurrence_actual_end - timedelta(minutes=meeting_duration//2), + end_date=occurrence_actual_end, + ) %} + - json: + action: created + headers: + authorization: !jwt bearer=true,aud=lfx-v2-project-service,principal={{ fake.user_name() }},email={{ fake.email() }} + x-on-behalf-of: !jwt bearer=true,aud=lfx-v2-project-service,principal=clients@m2m_helper + data: + # v2 attributes for convenience. + is_invited: false + is_attended: true + # The remaining attributes only come from the ITX "attendees" table, so + # some things are missing (like first/last names). + org: "" + committee_role: "" + org_is_project_member: false + registrant_id: "" + email: "" + name: "{{ fake.name() }} ({{ fake.company() }})" + project_slug: buf + profile_picture: "" + is_committee_member: false + committee_voting_status: "" + sessions: + - leave_time: "{{ attendee_leave_time.isoformat() }}Z" + leave_reason: 'left the meeting. Reason : left the meeting' + join_time: "{{ attendee_join_time.isoformat() }}Z" + participant_uuid: "{{ fake.uuid4().upper() }}" + meeting_and_occurrence_id: "{{ meeting_id }}-{{ occurrence_id }}" + id: "{{ fake.uuid4() }}" + meeting_id: "{{ meeting_id }}" + is_verified: false + job_title: "" + is_unknown: false + AverageAttendance: "0" + created_at: "{{ (attendee_join_time + timedelta(seconds=3)).isoformat() }}Z" + occurrence_id: "{{ occurrence_id }}" + org_is_member: false + # Note, proj_id (sfid) in the source data will have been stripped by lfx-v1-sync-helper. + project_uid: !ref "buf_board_meeting_project_lookup.steps[0]._response" + modified_at: "{{ (attendee_leave_time + timedelta(seconds=3)).isoformat() }}Z" + # Note, lf_user_id (sfid) is stripped by lfx-v1-sync-helper. + lf_sso: "" + committee_id: "" + {% endfor %} + +{# Find the next past meeting time based on the same day-of-week and + week-of-month as the previous meeting. #} +{% set weeks_to_next_month = ((32 - ns.occurrence_start_time.day) / 7) | round(0, 'ceil') | int %} +{% set ns.occurrence_start_time = ns.occurrence_start_time + timedelta(days=7 * weeks_to_next_month) %} +{# Now, adjust to the correct week in the month. #} +{% set week_offset = ((ns.occurrence_start_time.day / 7) | round(0, 'ceil') | int) - week_of_month %} +{% set ns.occurrence_start_time = ns.occurrence_start_time - timedelta(days=7 * week_offset) %} + +{%- endfor %} diff --git a/pyproject.toml b/pyproject.toml index c5ded48..6f9862c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,14 +6,14 @@ name = "lfx-v2-mockdata" version = "0.1.0" requires-python = "~=3.12.5" dependencies = [ + "cryptography>=42.0.0", "faker>=37.12.0", "jinja2>=3.1.6", "jmespath>=1.0.1", - "names-generator>=0.2.0", "nats-py>=2.9.0", "pydantic>=2.10.5", + "pyjwt>=2.10.0", "python-dotenv>=1.0.1", - "python-lorem>=1.3.0.post3", "pyyaml>=6.0.2", "requests>=2.32.4", "structlog>=24.4.0", diff --git a/src/lfx_v2_mockdata/__init__.py b/src/lfx_v2_mockdata/__init__.py index 17700e8..23a45d3 100644 --- a/src/lfx_v2_mockdata/__init__.py +++ b/src/lfx_v2_mockdata/__init__.py @@ -30,21 +30,23 @@ import os import re import sys -import uuid +import time +from base64 import b64encode from collections import OrderedDict from http import HTTPMethod from typing import Any import jmespath -import lorem +import jwt import nats import requests import structlog import yaml +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa from dotenv import load_dotenv from faker import Faker from jinja2 import Environment, FileSystemLoader, select_autoescape -from names_generator import generate_name from nats.aio.client import Client as NatsClient from nats.errors import TimeoutError from nats.js import JetStreamContext @@ -70,6 +72,8 @@ class UploadMockDataArgs(BaseModel): dry_run: bool = False upload: bool = False force: bool = False + jwt_rsa_secret: str | None = None + jwt_key_id: str | None = None jmespath_context: contextvars.ContextVar[dict[str, Any]] = contextvars.ContextVar( @@ -85,6 +89,11 @@ class UploadMockDataArgs(BaseModel): nats_client: None | NatsClient = None jetstream_client: None | JetStreamContext = None +# JWT caching variables. +_jwt_cache: dict[str, tuple[str, float]] = {} # Cache JWT tokens with expiry. +_jwks_kid_cache: str = "" # Cache JWKS key ID. +JWT_CACHE_TTL = 240 # Cache JWT tokens for 4 minutes (1 minute before expiry). + # NATS configuration. NATS_URL = os.getenv("NATS_URL", "nats://nats:4222") WAIT_TIMEOUT = 10 # seconds @@ -222,11 +231,165 @@ def replace_placeholder(match): return result +class JWTGenerator(yaml.YAMLObject): + """JWTGenerator represents a parsed !jwt YAML tag. + + The !jwt tag generates a JWT token with the specified claims. + Arguments are passed as key=value pairs separated by commas. + + Example: + !jwt aud=lfx-v2-project-service,principal=clients@m2m_helper,\\ + email=test@example.com + """ + + def __init__(self, args_string): + self.args_string = args_string + self.parsed_args = self._parse_args(args_string) + + def __repr__(self): + return f"JWTGenerator({repr(self.args_string)})" + + def __str__(self): + return str(self.generate_jwt()) + + def _parse_args(self, args_string: str) -> dict[str, str]: + """Parse key=value,key=value arguments into a dictionary.""" + args_dict: dict[str, str] = {} + if not args_string: + return args_dict + + # Split by comma and parse key=value pairs. + for pair in args_string.split(","): + pair = pair.strip() + if "=" not in pair: + continue + key, value = pair.split("=", 1) + args_dict[key.strip()] = value.strip() + return args_dict + + def generate_jwt(self) -> str: + """Generate a JWT token based on the provided arguments.""" + cli_args = args.get() + + # Check if we have the RSA secret. + if not cli_args.jwt_rsa_secret: + raise ValueError("JWT RSA secret not provided via --jwt-rsa-secret") + + # Required arguments. + audience = self.parsed_args.get("aud") + principal = self.parsed_args.get("principal") + + if not audience: + raise ValueError("JWT 'aud' (audience) argument is required") + if not principal: + raise ValueError("JWT 'principal' argument is required") + + # Optional email. + email = self.parsed_args.get("email") + + # Create cache key based on the JWT arguments. + bearer = self.parsed_args.get("bearer") + cache_key = f"{audience}|{principal}|{email or ''}|{bearer or ''}" + + # Check if we have a cached JWT that's still valid. + now = time.time() + if cache_key in _jwt_cache: + cached_token, cache_time = _jwt_cache[cache_key] + if now - cache_time < JWT_CACHE_TTL: + return cached_token + + # Get or fetch the key ID. + key_id = self._get_key_id(cli_args) + + # Create JWT payload. + now_int = int(now) + payload = { + "aud": audience, + "iss": "heimdall", + "sub": principal.replace("clients@", ""), # Remove clients@ prefix. + "principal": principal, + "exp": now_int + 300, # 5 minutes expiry. + "nbf": now_int, # Valid from now. + "jti": fake.uuid4(), # Unique JWT ID. + } + + # Add email to payload if provided. + if email: + payload["email"] = email + + # Load the RSA private key. + try: + loaded_key = serialization.load_pem_private_key( + cli_args.jwt_rsa_secret.encode(), password=None + ) + # Ensure we have an RSA private key for PS256 algorithm. + if not isinstance(loaded_key, rsa.RSAPrivateKey): + raise ValueError( + "JWT signing requires an RSA private key for PS256 algorithm" + ) + private_key = loaded_key + except Exception as e: + raise ValueError(f"Failed to load RSA private key: {e}") from e + + # Generate the JWT. + token = jwt.encode( + payload, + private_key, + algorithm="PS256", + headers={"kid": key_id}, + ) + + if bearer and bearer.lower() in ("true", "t", "1", "yes", "y"): + token = f"Bearer {token}" + + # Cache the generated token. + _jwt_cache[cache_key] = (token, now) + + return token + + def _get_key_id(self, cli_args) -> str: + """Get the JWT key ID from command line argument or JWKS endpoint.""" + global _jwks_kid_cache + + # Use command line argument if provided. + if cli_args.jwt_key_id: + return cli_args.jwt_key_id + + # Check if we have a cached JWKS key ID. + if _jwks_kid_cache != "": + return _jwks_kid_cache + + # Fetch from Heimdall JWKS endpoint. + jwks_url = ( + "http://lfx-platform-heimdall.lfx.svc.cluster.local:4457/.well-known/jwks" + ) + try: + response = requests.get(jwks_url, timeout=10) + response.raise_for_status() + jwks_data = response.json() + + if "keys" not in jwks_data or not jwks_data["keys"]: + raise ValueError("No keys found in JWKS response") + + key_id = jwks_data["keys"][0].get("kid") + if not key_id: + raise ValueError("No key ID found in first JWKS key") + + # Cache the fetched key ID. + _jwks_kid_cache = key_id + return key_id + except Exception as e: + raise ValueError( + f"Failed to fetch key ID from JWKS endpoint: {e}. " + "Consider using --jwt-key-id argument." + ) from e + + class JMESPathEncoder(json.JSONEncoder): """Extend the default JSON encoder for JMESPath macros. - Supports both the JMESPath (!ref) and JMESPathSubstitution (!sub) - macros. + Supports JMESPath (!ref), JMESPathSubstitution (!sub), and JWTGenerator + (!jwt) macros. """ def default(self, obj): @@ -234,6 +397,8 @@ def default(self, obj): return obj.evaluate() if isinstance(obj, JMESPathSubstitution): return obj.evaluate() + if isinstance(obj, JWTGenerator): + return obj.generate_jwt() # Handle all other types (or raise a TypeError). return super().default(obj) @@ -315,6 +480,22 @@ def yaml_include(loader, node): return yaml.safe_load(out_data) +def yaml_jwt(loader, node): + """Convert !jwt YAML tag to JWTGenerator object. + + This function is registered with the YAML loader via add_constructor(). + """ + return JWTGenerator(node.value) + + +def jwt_yaml(dumper, data): + """Represent JWTGenerator object as a !jwt YAML tag. + + This function is registered with the YAML dumper via add_representer(). + """ + return dumper.represent_scalar("!jwt", data.args_string) + + def yaml_render(template_dir, yaml_file): """Setup Jinja2 and render and parse a YAML file.""" logger.info("Loading template", template_dir=template_dir, yaml_file=yaml_file) @@ -331,17 +512,15 @@ def yaml_render(template_dir, yaml_file): ), ) # Add helper functions to the Jinja2 environment. + env.globals["b64encode"] = lambda s: b64encode(s).decode() env.globals["environ"] = dict(os.environ) env.globals["fake"] = fake - env.globals["generate_name"] = generate_name - env.globals["lorem"] = lorem env.globals["timedelta"] = datetime.timedelta env.globals["now_z"] = ( lambda: datetime.datetime.now(datetime.UTC) .isoformat("T") .replace("+00:00", "Z") ) - env.globals["uuid"] = lambda: str(uuid.uuid4()) # Store the environment in the context for use by the !include # constructor/macro and remaining YAML files in this context/directory. jinja_env.set(env) @@ -972,6 +1151,19 @@ def parse_args() -> UploadMockDataArgs: action="store_true", help="keep running steps after a failure", ) + parser.add_argument( + "--jwt-rsa-secret", + dest="jwt_rsa_secret", + help="RSA private key in PEM format for JWT signing", + ) + parser.add_argument( + "--jwt-key-id", + dest="jwt_key_id", + help=( + "JWT key ID (kid) for JWT header. If not provided, will " + "fetch from Heimdall JWKS endpoint" + ), + ) # Parse arguments and convert to Pydantic model. parsed_args = parser.parse_args() return UploadMockDataArgs( @@ -981,14 +1173,18 @@ def parse_args() -> UploadMockDataArgs: dry_run=parsed_args.dry_run, upload=parsed_args.upload, force=parsed_args.force, + jwt_rsa_secret=parsed_args.jwt_rsa_secret, + jwt_key_id=parsed_args.jwt_key_id, ) yaml.SafeLoader.add_constructor("!include", yaml_include) yaml.SafeLoader.add_constructor("!ref", yaml_ref) yaml.SafeLoader.add_constructor("!sub", yaml_sub) +yaml.SafeLoader.add_constructor("!jwt", yaml_jwt) yaml.add_representer(JMESPath, ref_yaml) yaml.add_representer(JMESPathSubstitution, sub_yaml) +yaml.add_representer(JWTGenerator, jwt_yaml) jmespath_context.set({}) args.set(UploadMockDataArgs(template_dirs=[])) diff --git a/uv.lock b/uv.lock index 0f744d0..4929f0c 100644 --- a/uv.lock +++ b/uv.lock @@ -20,6 +20,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927, upload-time = "2024-12-14T13:52:36.114Z" }, ] +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, +] + [[package]] name = "charset-normalizer" version = "3.4.1" @@ -43,12 +66,44 @@ wheels = [ ] [[package]] -name = "cmdkit" -version = "2.7.7" +name = "cryptography" +version = "46.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a9/8d/0483d6018a5293ef74889ee132b243d34c324be19378acb93341bbefb43b/cmdkit-2.7.7.tar.gz", hash = "sha256:afe20b60ee60658e299dba465248b74c1801bfe24f891ef745b3a6810251c280", size = 23225, upload-time = "2024-12-28T21:21:57.08Z" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/23/62/c82680c31dc2dcfa55918a28720d2e5695cbb4e778b621416d12883203cd/cmdkit-2.7.7-py3-none-any.whl", hash = "sha256:9dfb1ae63329d0ce1970886206ef14a9773d5ebcc15608c96df119bc82abf023", size = 26819, upload-time = "2024-12-28T21:21:54.931Z" }, + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, ] [[package]] @@ -107,14 +162,14 @@ name = "lfx-v2-mockdata" version = "0.1.0" source = { editable = "." } dependencies = [ + { name = "cryptography" }, { name = "faker" }, { name = "jinja2" }, { name = "jmespath" }, - { name = "names-generator" }, { name = "nats-py" }, { name = "pydantic" }, + { name = "pyjwt" }, { name = "python-dotenv" }, - { name = "python-lorem" }, { name = "pyyaml" }, { name = "requests" }, { name = "structlog" }, @@ -133,14 +188,14 @@ dev = [ [package.metadata] requires-dist = [ + { name = "cryptography", specifier = ">=42.0.0" }, { name = "faker", specifier = ">=37.12.0" }, { name = "jinja2", specifier = ">=3.1.6" }, { name = "jmespath", specifier = ">=1.0.1" }, - { name = "names-generator", specifier = ">=0.2.0" }, { name = "nats-py", specifier = ">=2.9.0" }, { name = "pydantic", specifier = ">=2.10.5" }, + { name = "pyjwt", specifier = ">=2.10.0" }, { name = "python-dotenv", specifier = ">=1.0.1" }, - { name = "python-lorem", specifier = ">=1.3.0.post3" }, { name = "pyyaml", specifier = ">=6.0.2" }, { name = "requests", specifier = ">=2.32.4" }, { name = "structlog", specifier = ">=24.4.0" }, @@ -204,18 +259,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, ] -[[package]] -name = "names-generator" -version = "0.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cmdkit" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fd/de/60c26d1488db2ffdb050d2c0e6007998c2afd90760a4d7b23d32cde5b446/names_generator-0.2.0.tar.gz", hash = "sha256:de60e354b8ca8f2d1ce0816079d3ba569417371867b94328021f03c9dd47a5be", size = 27106, upload-time = "2024-04-16T03:38:58.399Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/3d/7210fc4b1999771b875d96ffa30cf4384c7fe8f0a7d9874df4b753b7703e/names_generator-0.2.0-py3-none-any.whl", hash = "sha256:693e8e48e9a54a2a8f714b01d1bd70945743a15d4587059e7e4e16efc361be11", size = 26699, upload-time = "2024-04-16T03:38:54.288Z" }, -] - [[package]] name = "nats-py" version = "2.11.0" @@ -231,6 +274,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, ] +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + [[package]] name = "pydantic" version = "2.10.5" @@ -271,21 +323,21 @@ wheels = [ ] [[package]] -name = "python-dotenv" -version = "1.0.1" +name = "pyjwt" +version = "2.10.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115, upload-time = "2024-01-23T06:33:00.505Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" }, + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, ] [[package]] -name = "python-lorem" -version = "1.3.0.post3" +name = "python-dotenv" +version = "1.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/ff/da026761ffdd68f5c9415ff54c4adaefaa409c422b41d18ed8fa6a59bff5/python_lorem-1.3.0.post3.tar.gz", hash = "sha256:570d532a179783e024864b2799651f748a3e26ded7ee6d694b67f429f7bca6fd", size = 11664, upload-time = "2024-06-26T03:28:57.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115, upload-time = "2024-01-23T06:33:00.505Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/10/e1533d7aa598cdf73772929ea7e1ef3d62af2260c88b616862b2b2dac974/python_lorem-1.3.0.post3-cp312-none-any.whl", hash = "sha256:6df337cc3c5fe2869f328df669164ce3633cf5347d6a7b92c86a96ac4295b30d", size = 9111, upload-time = "2024-06-26T03:28:55.192Z" }, + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" }, ] [[package]]