diff --git a/.gitattributes b/.gitattributes index 1923c3f051..6c1891dd04 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,237 @@ # Always check-out / check-in files with LF line endings. * text=auto eol=lf + +# Do not edit below this line. Edits will be overwritten by gen_gitattributes.sh + +catalog/internal/server/openapi/api.go linguist-generated=true +catalog/internal/server/openapi/api_model_catalog_service.go linguist-generated=true +catalog/internal/server/openapi/error.go linguist-generated=true +catalog/internal/server/openapi/helpers.go linguist-generated=true +catalog/internal/server/openapi/impl.go linguist-generated=true +catalog/internal/server/openapi/logger.go linguist-generated=true +catalog/internal/server/openapi/routers.go linguist-generated=true +catalog/pkg/openapi/api_model_catalog_service.go linguist-generated=true +catalog/pkg/openapi/client.go linguist-generated=true +catalog/pkg/openapi/configuration.go linguist-generated=true +catalog/pkg/openapi/model_artifact_type_query_param.go linguist-generated=true +catalog/pkg/openapi/model_base_model.go linguist-generated=true +catalog/pkg/openapi/model_base_resource.go linguist-generated=true +catalog/pkg/openapi/model_base_resource_dates.go linguist-generated=true +catalog/pkg/openapi/model_base_resource_list.go linguist-generated=true +catalog/pkg/openapi/model_catalog_artifact.go linguist-generated=true +catalog/pkg/openapi/model_catalog_artifact_list.go linguist-generated=true +catalog/pkg/openapi/model_catalog_label.go linguist-generated=true +catalog/pkg/openapi/model_catalog_label_list.go linguist-generated=true +catalog/pkg/openapi/model_catalog_metrics_artifact.go linguist-generated=true +catalog/pkg/openapi/model_catalog_model.go linguist-generated=true +catalog/pkg/openapi/model_catalog_model_artifact.go linguist-generated=true +catalog/pkg/openapi/model_catalog_model_list.go linguist-generated=true +catalog/pkg/openapi/model_catalog_source.go linguist-generated=true +catalog/pkg/openapi/model_catalog_source_list.go linguist-generated=true +catalog/pkg/openapi/model_error.go linguist-generated=true +catalog/pkg/openapi/model_filter_option.go linguist-generated=true +catalog/pkg/openapi/model_filter_option_range.go linguist-generated=true +catalog/pkg/openapi/model_filter_options_list.go linguist-generated=true +catalog/pkg/openapi/model_metadata_bool_value.go linguist-generated=true +catalog/pkg/openapi/model_metadata_double_value.go linguist-generated=true +catalog/pkg/openapi/model_metadata_int_value.go linguist-generated=true +catalog/pkg/openapi/model_metadata_proto_value.go linguist-generated=true +catalog/pkg/openapi/model_metadata_string_value.go linguist-generated=true +catalog/pkg/openapi/model_metadata_struct_value.go linguist-generated=true +catalog/pkg/openapi/model_metadata_value.go linguist-generated=true +catalog/pkg/openapi/model_order_by_field.go linguist-generated=true +catalog/pkg/openapi/model_sort_order.go linguist-generated=true +catalog/pkg/openapi/response.go linguist-generated=true +catalog/pkg/openapi/utils.go linguist-generated=true +clients/python/src/mr_openapi/__init__.py linguist-generated=true +clients/python/src/mr_openapi/api/model_registry_service_api.py linguist-generated=true +clients/python/src/mr_openapi/api_client.py linguist-generated=true +clients/python/src/mr_openapi/configuration.py linguist-generated=true +clients/python/src/mr_openapi/exceptions.py linguist-generated=true +clients/python/src/mr_openapi/models/__init__.py linguist-generated=true +clients/python/src/mr_openapi/models/artifact.py linguist-generated=true +clients/python/src/mr_openapi/models/artifact_create.py linguist-generated=true +clients/python/src/mr_openapi/models/artifact_list.py linguist-generated=true +clients/python/src/mr_openapi/models/artifact_state.py linguist-generated=true +clients/python/src/mr_openapi/models/artifact_type_query_param.py linguist-generated=true +clients/python/src/mr_openapi/models/artifact_update.py linguist-generated=true +clients/python/src/mr_openapi/models/base_artifact.py linguist-generated=true +clients/python/src/mr_openapi/models/base_model.py linguist-generated=true +clients/python/src/mr_openapi/models/base_resource.py linguist-generated=true +clients/python/src/mr_openapi/models/base_resource_create.py linguist-generated=true +clients/python/src/mr_openapi/models/base_resource_dates.py linguist-generated=true +clients/python/src/mr_openapi/models/base_resource_list.py linguist-generated=true +clients/python/src/mr_openapi/models/base_resource_update.py linguist-generated=true +clients/python/src/mr_openapi/models/data_set.py linguist-generated=true +clients/python/src/mr_openapi/models/data_set_create.py linguist-generated=true +clients/python/src/mr_openapi/models/data_set_update.py linguist-generated=true +clients/python/src/mr_openapi/models/doc_artifact.py linguist-generated=true +clients/python/src/mr_openapi/models/doc_artifact_create.py linguist-generated=true +clients/python/src/mr_openapi/models/doc_artifact_update.py linguist-generated=true +clients/python/src/mr_openapi/models/error.py linguist-generated=true +clients/python/src/mr_openapi/models/execution_state.py linguist-generated=true +clients/python/src/mr_openapi/models/experiment.py linguist-generated=true +clients/python/src/mr_openapi/models/experiment_create.py linguist-generated=true +clients/python/src/mr_openapi/models/experiment_list.py linguist-generated=true +clients/python/src/mr_openapi/models/experiment_run.py linguist-generated=true +clients/python/src/mr_openapi/models/experiment_run_create.py linguist-generated=true +clients/python/src/mr_openapi/models/experiment_run_list.py linguist-generated=true +clients/python/src/mr_openapi/models/experiment_run_state.py linguist-generated=true +clients/python/src/mr_openapi/models/experiment_run_status.py linguist-generated=true +clients/python/src/mr_openapi/models/experiment_run_update.py linguist-generated=true +clients/python/src/mr_openapi/models/experiment_state.py linguist-generated=true +clients/python/src/mr_openapi/models/experiment_update.py linguist-generated=true +clients/python/src/mr_openapi/models/inference_service.py linguist-generated=true +clients/python/src/mr_openapi/models/inference_service_create.py linguist-generated=true +clients/python/src/mr_openapi/models/inference_service_list.py linguist-generated=true +clients/python/src/mr_openapi/models/inference_service_state.py linguist-generated=true +clients/python/src/mr_openapi/models/inference_service_update.py linguist-generated=true +clients/python/src/mr_openapi/models/metadata_bool_value.py linguist-generated=true +clients/python/src/mr_openapi/models/metadata_double_value.py linguist-generated=true +clients/python/src/mr_openapi/models/metadata_int_value.py linguist-generated=true +clients/python/src/mr_openapi/models/metadata_proto_value.py linguist-generated=true +clients/python/src/mr_openapi/models/metadata_string_value.py linguist-generated=true +clients/python/src/mr_openapi/models/metadata_struct_value.py linguist-generated=true +clients/python/src/mr_openapi/models/metadata_value.py linguist-generated=true +clients/python/src/mr_openapi/models/metric.py linguist-generated=true +clients/python/src/mr_openapi/models/metric_create.py linguist-generated=true +clients/python/src/mr_openapi/models/metric_list.py linguist-generated=true +clients/python/src/mr_openapi/models/metric_update.py linguist-generated=true +clients/python/src/mr_openapi/models/model_artifact.py linguist-generated=true +clients/python/src/mr_openapi/models/model_artifact_create.py linguist-generated=true +clients/python/src/mr_openapi/models/model_artifact_list.py linguist-generated=true +clients/python/src/mr_openapi/models/model_artifact_update.py linguist-generated=true +clients/python/src/mr_openapi/models/model_version.py linguist-generated=true +clients/python/src/mr_openapi/models/model_version_create.py linguist-generated=true +clients/python/src/mr_openapi/models/model_version_list.py linguist-generated=true +clients/python/src/mr_openapi/models/model_version_state.py linguist-generated=true +clients/python/src/mr_openapi/models/model_version_update.py linguist-generated=true +clients/python/src/mr_openapi/models/order_by_field.py linguist-generated=true +clients/python/src/mr_openapi/models/parameter.py linguist-generated=true +clients/python/src/mr_openapi/models/parameter_create.py linguist-generated=true +clients/python/src/mr_openapi/models/parameter_type.py linguist-generated=true +clients/python/src/mr_openapi/models/parameter_update.py linguist-generated=true +clients/python/src/mr_openapi/models/registered_model.py linguist-generated=true +clients/python/src/mr_openapi/models/registered_model_create.py linguist-generated=true +clients/python/src/mr_openapi/models/registered_model_list.py linguist-generated=true +clients/python/src/mr_openapi/models/registered_model_state.py linguist-generated=true +clients/python/src/mr_openapi/models/registered_model_update.py linguist-generated=true +clients/python/src/mr_openapi/models/serve_model.py linguist-generated=true +clients/python/src/mr_openapi/models/serve_model_create.py linguist-generated=true +clients/python/src/mr_openapi/models/serve_model_list.py linguist-generated=true +clients/python/src/mr_openapi/models/serve_model_update.py linguist-generated=true +clients/python/src/mr_openapi/models/serving_environment.py linguist-generated=true +clients/python/src/mr_openapi/models/serving_environment_create.py linguist-generated=true +clients/python/src/mr_openapi/models/serving_environment_list.py linguist-generated=true +clients/python/src/mr_openapi/models/serving_environment_update.py linguist-generated=true +clients/python/src/mr_openapi/models/sort_order.py linguist-generated=true +clients/python/src/mr_openapi/rest.py linguist-generated=true +internal/converter/generated/embedmd_openapi_converter.gen.go linguist-generated=true +internal/converter/generated/openapi_converter.gen.go linguist-generated=true +internal/converter/generated/openapi_embedmd_converter.gen.go linguist-generated=true +internal/converter/generated/openapi_reconciler.gen.go linguist-generated=true +internal/db/schema/artifact.gen.go linguist-generated=true +internal/db/schema/artifactproperty.gen.go linguist-generated=true +internal/db/schema/association.gen.go linguist-generated=true +internal/db/schema/attribution.gen.go linguist-generated=true +internal/db/schema/context.gen.go linguist-generated=true +internal/db/schema/contextproperty.gen.go linguist-generated=true +internal/db/schema/event.gen.go linguist-generated=true +internal/db/schema/eventpath.gen.go linguist-generated=true +internal/db/schema/execution.gen.go linguist-generated=true +internal/db/schema/executionproperty.gen.go linguist-generated=true +internal/db/schema/mlmdenv.gen.go linguist-generated=true +internal/db/schema/parentcontext.gen.go linguist-generated=true +internal/db/schema/parenttype.gen.go linguist-generated=true +internal/db/schema/schema_migrations.gen.go linguist-generated=true +internal/db/schema/type.gen.go linguist-generated=true +internal/db/schema/typeproperty.gen.go linguist-generated=true +internal/server/openapi/api.go linguist-generated=true +internal/server/openapi/api_model_registry_service.go linguist-generated=true +internal/server/openapi/error.go linguist-generated=true +internal/server/openapi/helpers.go linguist-generated=true +internal/server/openapi/impl.go linguist-generated=true +internal/server/openapi/logger.go linguist-generated=true +internal/server/openapi/routers.go linguist-generated=true +pkg/openapi/api_model_registry_service.go linguist-generated=true +pkg/openapi/client.go linguist-generated=true +pkg/openapi/configuration.go linguist-generated=true +pkg/openapi/model_artifact.go linguist-generated=true +pkg/openapi/model_artifact_create.go linguist-generated=true +pkg/openapi/model_artifact_list.go linguist-generated=true +pkg/openapi/model_artifact_state.go linguist-generated=true +pkg/openapi/model_artifact_type_query_param.go linguist-generated=true +pkg/openapi/model_artifact_update.go linguist-generated=true +pkg/openapi/model_base_artifact.go linguist-generated=true +pkg/openapi/model_base_model.go linguist-generated=true +pkg/openapi/model_base_resource.go linguist-generated=true +pkg/openapi/model_base_resource_create.go linguist-generated=true +pkg/openapi/model_base_resource_dates.go linguist-generated=true +pkg/openapi/model_base_resource_list.go linguist-generated=true +pkg/openapi/model_base_resource_update.go linguist-generated=true +pkg/openapi/model_data_set.go linguist-generated=true +pkg/openapi/model_data_set_create.go linguist-generated=true +pkg/openapi/model_data_set_list.go linguist-generated=true +pkg/openapi/model_data_set_update.go linguist-generated=true +pkg/openapi/model_doc_artifact.go linguist-generated=true +pkg/openapi/model_doc_artifact_create.go linguist-generated=true +pkg/openapi/model_doc_artifact_update.go linguist-generated=true +pkg/openapi/model_error.go linguist-generated=true +pkg/openapi/model_execution_state.go linguist-generated=true +pkg/openapi/model_experiment.go linguist-generated=true +pkg/openapi/model_experiment_create.go linguist-generated=true +pkg/openapi/model_experiment_list.go linguist-generated=true +pkg/openapi/model_experiment_run.go linguist-generated=true +pkg/openapi/model_experiment_run_create.go linguist-generated=true +pkg/openapi/model_experiment_run_list.go linguist-generated=true +pkg/openapi/model_experiment_run_state.go linguist-generated=true +pkg/openapi/model_experiment_run_status.go linguist-generated=true +pkg/openapi/model_experiment_run_update.go linguist-generated=true +pkg/openapi/model_experiment_state.go linguist-generated=true +pkg/openapi/model_experiment_update.go linguist-generated=true +pkg/openapi/model_inference_service.go linguist-generated=true +pkg/openapi/model_inference_service_create.go linguist-generated=true +pkg/openapi/model_inference_service_list.go linguist-generated=true +pkg/openapi/model_inference_service_state.go linguist-generated=true +pkg/openapi/model_inference_service_update.go linguist-generated=true +pkg/openapi/model_metadata_bool_value.go linguist-generated=true +pkg/openapi/model_metadata_double_value.go linguist-generated=true +pkg/openapi/model_metadata_int_value.go linguist-generated=true +pkg/openapi/model_metadata_proto_value.go linguist-generated=true +pkg/openapi/model_metadata_string_value.go linguist-generated=true +pkg/openapi/model_metadata_struct_value.go linguist-generated=true +pkg/openapi/model_metadata_value.go linguist-generated=true +pkg/openapi/model_metric.go linguist-generated=true +pkg/openapi/model_metric_create.go linguist-generated=true +pkg/openapi/model_metric_list.go linguist-generated=true +pkg/openapi/model_metric_update.go linguist-generated=true +pkg/openapi/model_model_artifact.go linguist-generated=true +pkg/openapi/model_model_artifact_create.go linguist-generated=true +pkg/openapi/model_model_artifact_list.go linguist-generated=true +pkg/openapi/model_model_artifact_update.go linguist-generated=true +pkg/openapi/model_model_version.go linguist-generated=true +pkg/openapi/model_model_version_create.go linguist-generated=true +pkg/openapi/model_model_version_list.go linguist-generated=true +pkg/openapi/model_model_version_state.go linguist-generated=true +pkg/openapi/model_model_version_update.go linguist-generated=true +pkg/openapi/model_order_by_field.go linguist-generated=true +pkg/openapi/model_parameter.go linguist-generated=true +pkg/openapi/model_parameter_create.go linguist-generated=true +pkg/openapi/model_parameter_type.go linguist-generated=true +pkg/openapi/model_parameter_update.go linguist-generated=true +pkg/openapi/model_registered_model.go linguist-generated=true +pkg/openapi/model_registered_model_create.go linguist-generated=true +pkg/openapi/model_registered_model_list.go linguist-generated=true +pkg/openapi/model_registered_model_state.go linguist-generated=true +pkg/openapi/model_registered_model_update.go linguist-generated=true +pkg/openapi/model_serve_model.go linguist-generated=true +pkg/openapi/model_serve_model_create.go linguist-generated=true +pkg/openapi/model_serve_model_list.go linguist-generated=true +pkg/openapi/model_serve_model_update.go linguist-generated=true +pkg/openapi/model_serving_environment.go linguist-generated=true +pkg/openapi/model_serving_environment_create.go linguist-generated=true +pkg/openapi/model_serving_environment_list.go linguist-generated=true +pkg/openapi/model_serving_environment_update.go linguist-generated=true +pkg/openapi/model_sort_order.go linguist-generated=true +pkg/openapi/response.go linguist-generated=true +pkg/openapi/utils.go linguist-generated=true diff --git a/.github/dependabot.yml b/.github/dependabot.yml index fc59b5934e..ca1b891dae 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -17,6 +17,8 @@ updates: - "/jobs/async-upload" schedule: interval: "weekly" + exclude-paths: + - "jobs/async-upload/requirements.txt" - package-ecosystem: "docker" directory: "/" schedule: diff --git a/.github/workflows/async-upload-test.yml b/.github/workflows/async-upload-test.yml index 09d6100709..fdf65c515a 100644 --- a/.github/workflows/async-upload-test.yml +++ b/.github/workflows/async-upload-test.yml @@ -16,7 +16,7 @@ on: - "jobs/async-upload/**" - ".github/workflows/**" -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read env: @@ -39,7 +39,7 @@ jobs: run: working-directory: jobs/async-upload steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 with: @@ -66,7 +66,7 @@ jobs: run: working-directory: jobs/async-upload steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 with: @@ -89,7 +89,7 @@ jobs: working-directory: jobs/async-upload steps: - name: Check out the repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: fetch-depth: 0 - name: Set up Python diff --git a/.github/workflows/build-and-push-async-upload.yml b/.github/workflows/build-and-push-async-upload.yml index 4a5abcd8fb..1a5d59f597 100644 --- a/.github/workflows/build-and-push-async-upload.yml +++ b/.github/workflows/build-and-push-async-upload.yml @@ -11,6 +11,10 @@ on: - '!**.gitignore' - '!**.md' - '!**.txt' + - '.github/workflows/build-and-push-async-upload.yml' # self + +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID + contents: read env: IMG_REGISTRY: quay.io @@ -27,7 +31,7 @@ jobs: packages: write steps: - name: Checkout repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -62,3 +66,5 @@ jobs: ${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_NAME }}:main cache-from: type=gha cache-to: type=gha,mode=max + provenance: mode=max # pay attention no secrets are passed as build arguments: https://docs.docker.com/build/ci/github-actions/attestations/#default-provenance:~:text=don%27t%20support%20attestations.-,Warning,-If%20you%27re%20using + sbom: true diff --git a/.github/workflows/build-and-push-csi-image.yml b/.github/workflows/build-and-push-csi-image.yml index eb2c3f738a..4d958a85d8 100644 --- a/.github/workflows/build-and-push-csi-image.yml +++ b/.github/workflows/build-and-push-csi-image.yml @@ -13,6 +13,8 @@ on: - '.github/ISSUE_TEMPLATE/**' - '.github/dependabot.yml' - 'docs/**' +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID\ + contents: read env: IMG_REGISTRY: quay.io IMG_ORG: opendatahub @@ -24,7 +26,8 @@ jobs: build-csi-image: runs-on: ubuntu-latest permissions: - contents: read + actions: read # anchore/sbom-action for syft + contents: write # anchore/sbom-action for syft packages: write steps: # Assign context variable for various action contexts (tag, main, CI) @@ -35,7 +38,7 @@ jobs: if: github.head_ref == '' && github.ref == 'refs/heads/main' run: echo "BUILD_CONTEXT=main" >> $GITHUB_ENV # checkout branch - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 # set image version - name: Set main-branch environment if: env.BUILD_CONTEXT == 'main' @@ -66,6 +69,18 @@ jobs: env: IMG: "${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_REPO }}" run: IMG=${{ env.IMG }} IMG_VERSION=${{ env.VERSION }} make image/push + - name: Generate SBOM + uses: anchore/sbom-action@v0 + with: + image: "${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_REPO }}:${{ env.VERSION }}" + format: spdx-json # default, but making sure of the format + artifact-name: "model-registry-server-${{ env.VERSION }}-sbom.spdx.json" + output-file: "model-registry-server-${{ env.VERSION }}-sbom.spdx.json" # pin the file to use it later below + - name: Install Cosign + uses: sigstore/cosign-installer@v3 + - name: Attach SBOM to Image + run: | + cosign attach sbom --sbom model-registry-server-${{ env.VERSION }}-sbom.spdx.json "${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_REPO }}:${{ env.VERSION }}" # Tag latest and main - name: Tag Latest if: env.BUILD_CONTEXT == 'main' && env.PUSH_IMAGE == 'true' diff --git a/.github/workflows/build-and-push-image.yml b/.github/workflows/build-and-push-image.yml index 5efff349de..2796c0683a 100644 --- a/.github/workflows/build-and-push-image.yml +++ b/.github/workflows/build-and-push-image.yml @@ -18,8 +18,9 @@ env: PUSH_IMAGE: true DOCKER_USER: ${{ secrets.QUAY_USERNAME }} DOCKER_PWD: ${{ secrets.QUAY_PASSWORD }} + PLATFORMS: linux/arm64,linux/amd64 -permissions: # default workflow permission, overridden for specific job where required +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read jobs: @@ -27,7 +28,8 @@ jobs: uses: ./.github/workflows/prepare.yml build-image: permissions: - contents: read + actions: read # anchore/sbom-action for syft + contents: write # anchore/sbom-action for syft packages: write runs-on: ubuntu-latest needs: prepare @@ -40,7 +42,13 @@ jobs: if: github.head_ref == '' && github.ref == 'refs/heads/main' run: echo "BUILD_CONTEXT=main" >> $GITHUB_ENV # checkout branch - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 + # Set up QEMU for multi-architecture builds + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + # Set up Docker Buildx + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 # set image version - name: Set main-branch environment if: env.BUILD_CONTEXT == 'main' @@ -55,23 +63,24 @@ jobs: - name: Build and Push Image shell: bash run: ./scripts/build_deploy.sh - - name: Tag Latest - if: env.BUILD_CONTEXT == 'main' - shell: bash - env: - IMG: "${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_REPO }}" - BUILD_IMAGE: false # image is already built in "Build and Push Image" step + - name: Generate SBOM + uses: anchore/sbom-action@v0 + with: + image: "${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_REPO }}:${{ env.VERSION }}" + format: spdx-json # default, but making sure of the format + artifact-name: "model-registry-server-${{ env.VERSION }}-sbom.spdx.json" + output-file: "model-registry-server-${{ env.VERSION }}-sbom.spdx.json" # pin the file to use it later below + - name: Install Cosign + uses: sigstore/cosign-installer@v3 + - name: Attach SBOM to Image run: | - docker tag ${{ env.IMG }}:$VERSION ${{ env.IMG }}:latest - # BUILD_IMAGE=false skip the build, just push the tag made above - VERSION=latest ./scripts/build_deploy.sh - - name: Tag Main + cosign attach sbom --sbom model-registry-server-${{ env.VERSION }}-sbom.spdx.json "${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_REPO }}:${{ env.VERSION }}" + - name: Tag Latest and Main if: env.BUILD_CONTEXT == 'main' shell: bash env: IMG: "${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_REPO }}" - BUILD_IMAGE: false # image is already built in "Build and Push Image" step run: | - docker tag ${{ env.IMG }}:$VERSION ${{ env.IMG }}:main - # BUILD_IMAGE=false skip the build, just push the tag made above - VERSION=main ./scripts/build_deploy.sh + # Create manifest tags for multi-arch images + docker buildx imagetools create -t ${{ env.IMG }}:latest ${{ env.IMG }}:$VERSION + docker buildx imagetools create -t ${{ env.IMG }}:main ${{ env.IMG }}:$VERSION diff --git a/.github/workflows/build-and-push-testops-image.yml b/.github/workflows/build-and-push-testops-image.yml new file mode 100644 index 0000000000..84578117ba --- /dev/null +++ b/.github/workflows/build-and-push-testops-image.yml @@ -0,0 +1,122 @@ +name: Build and Push testops container image + +on: + push: + branches: + - 'main' + - 'stable' + - 'stable-2.x' + paths: + - 'Dockerfile.testops' + - 'clients/python/**' + - 'api/**' + - 'manifests/**' + - 'scripts/**' + - '!LICENSE*' + - '!**.gitignore' + - '!**.md' + - '!**.txt' + - '.github/workflows/build-and-push-testops-image.yml' # self + +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID + contents: read + +env: + IMG_REGISTRY: quay.io + IMG_ORG: opendatahub + IMG_NAME: model-registry-testops + REGISTRY_USER: ${{ secrets.QUAY_USERNAME }} + REGISTRY_PWD: ${{ secrets.QUAY_PASSWORD }} + +jobs: + build-and-push: + runs-on: ubuntu-latest + if: github.repository == 'opendatahub-io/model-registry' + permissions: + contents: read + packages: write + steps: + - name: Checkout repository + uses: actions/checkout@v5.0.0 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.IMG_REGISTRY }} + username: ${{ env.REGISTRY_USER }} + password: ${{ env.REGISTRY_PWD }} + + - name: Set branch-based environment + run: | + commit_sha=${{ github.sha }} + branch_name=${GITHUB_REF#refs/heads/} + tag=${branch_name}-${commit_sha:0:7} + # Calculate expiration date (7 days from now) + expiry_date=$(date -d '+7 days' -u +%Y-%m-%dT%H:%M:%SZ) + + echo "VERSION=${tag}" >> $GITHUB_ENV + echo "BRANCH_NAME=${branch_name}" >> $GITHUB_ENV + echo "EXPIRY_DATE=${expiry_date}" >> $GITHUB_ENV + + - name: Set tag environment # this is for v* tag image build + if: startsWith(github.ref, 'refs/tags/v') + run: | + echo "VERSION=${{ github.ref_name }}" >> $GITHUB_ENV + + - name: Build and push SHA-based image (expires in 7 days) + uses: docker/build-push-action@v6 + with: + context: . + file: ./Dockerfile.testops + push: true + tags: | + ${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_NAME }}:${{ env.VERSION }} + labels: | + org.opencontainers.image.source=https://github.com/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} + org.opencontainers.image.created=${{ github.event.repository.updated_at }} + quay.expires-after=7d + cache-from: type=gha + cache-to: type=gha,mode=max + provenance: mode=max + sbom: true + + - name: Build and push persistent branch tags (main) + if: github.ref == 'refs/heads/main' + uses: docker/build-push-action@v6 + with: + context: . + file: ./Dockerfile.testops + push: true + tags: | + ${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_NAME }}:${{ env.BRANCH_NAME }} + ${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_NAME }}:latest + labels: | + org.opencontainers.image.source=https://github.com/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} + org.opencontainers.image.created=${{ github.event.repository.updated_at }} + cache-from: type=gha + cache-to: type=gha,mode=max + provenance: mode=max + sbom: true + + - name: Build and push persistent branch tags (non-main) + if: github.ref != 'refs/heads/main' + uses: docker/build-push-action@v6 + with: + context: . + file: ./Dockerfile.testops + push: true + tags: | + ${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_NAME }}:${{ env.BRANCH_NAME }} + labels: | + org.opencontainers.image.source=https://github.com/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} + org.opencontainers.image.created=${{ github.event.repository.updated_at }} + cache-from: type=gha + cache-to: type=gha,mode=max + provenance: mode=max + sbom: true diff --git a/.github/workflows/build-and-push-ui-images-standalone.yml b/.github/workflows/build-and-push-ui-images-standalone.yml index 09a357724e..e180e878fe 100644 --- a/.github/workflows/build-and-push-ui-images-standalone.yml +++ b/.github/workflows/build-and-push-ui-images-standalone.yml @@ -13,6 +13,8 @@ on: - '!**.gitignore' - '!**.md' - '!**.txt' +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID + contents: read env: IMG_REGISTRY: ghcr.io IMG_ORG: kubeflow @@ -27,7 +29,7 @@ jobs: packages: write steps: - name: Checkout repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -76,4 +78,6 @@ jobs: DEPLOYMENT_MODE=standalone STYLE_THEME=mui-theme cache-from: type=gha - cache-to: type=gha,mode=max \ No newline at end of file + cache-to: type=gha,mode=max + provenance: mode=max # pay attention no secrets are passed as build arguments: https://docs.docker.com/build/ci/github-actions/attestations/#default-provenance:~:text=don%27t%20support%20attestations.-,Warning,-If%20you%27re%20using + sbom: true diff --git a/.github/workflows/build-and-push-ui-images.yml b/.github/workflows/build-and-push-ui-images.yml index 19e1d3743c..2d4766fe83 100644 --- a/.github/workflows/build-and-push-ui-images.yml +++ b/.github/workflows/build-and-push-ui-images.yml @@ -13,6 +13,8 @@ on: - '!**.gitignore' - '!**.md' - '!**.txt' +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID + contents: read env: IMG_REGISTRY: ghcr.io IMG_ORG: kubeflow @@ -27,7 +29,7 @@ jobs: packages: write steps: - name: Checkout repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -76,4 +78,6 @@ jobs: DEPLOYMENT_MODE=kubeflow STYLE_THEME=mui-theme cache-from: type=gha - cache-to: type=gha,mode=max \ No newline at end of file + cache-to: type=gha,mode=max + provenance: mode=max # pay attention no secrets are passed as build arguments: https://docs.docker.com/build/ci/github-actions/attestations/#default-provenance:~:text=don%27t%20support%20attestations.-,Warning,-If%20you%27re%20using + sbom: true diff --git a/.github/workflows/build-image-pr.yml b/.github/workflows/build-image-pr.yml index 467f3572bc..5a225deb75 100644 --- a/.github/workflows/build-image-pr.yml +++ b/.github/workflows/build-image-pr.yml @@ -12,7 +12,7 @@ on: - "docs/**" - "clients/python/**" -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read env: @@ -25,7 +25,7 @@ jobs: build-and-test-image: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Generate Tag shell: bash id: tags @@ -39,7 +39,7 @@ jobs: IMG_VERSION: ${{ steps.tags.outputs.tag }} run: make image/build - name: Start Kind Cluster - uses: helm/kind-action@v1.12.0 + uses: helm/kind-action@v1.13.0 with: node_image: "kindest/node:v1.27.11" - name: Remove AppArmor profile for mysql in KinD on GHA # https://github.com/kubeflow/manifests/issues/2507 @@ -58,6 +58,18 @@ jobs: echo "Deploying operator from model-registry-operator branch ${BRANCH}" kubectl apply -k "https://github.com/opendatahub-io/model-registry-operator.git/config/default?ref=${BRANCH}" kubectl set env -n model-registry-operator-system deployment/model-registry-operator-controller-manager REST_IMAGE="${IMG}" + kubectl wait --for=condition=Available=true -n model-registry-operator-system deployment/model-registry-operator-controller-manager --timeout=5m + - name: Display about MR Operator in KinD cluster status in any case + if: always() + run: | + kubectl get deployments -n model-registry-operator-system -o wide + kubectl get pods -n model-registry-operator-system -o wide --show-labels + kubectl events -A + kubectl describe -n model-registry-operator-system deployment/model-registry-operator-controller-manager + for p in $(kubectl get pods -n model-registry-operator-system -o name); do + echo "===== Logs for $p =====" + kubectl logs -n model-registry-operator-system $p + done - name: Create Test Registry run: | kubectl apply -k "https://github.com/opendatahub-io/model-registry-operator.git/config/samples/mysql?ref=${BRANCH}" @@ -66,8 +78,14 @@ jobs: run: | kubectl wait --for=condition=Available=true deployment/model-registry-db --timeout=5m kubectl wait --for=condition=Available=true modelregistries/modelregistry-sample --timeout=5m + - name: Display KinD cluster status in any case + if: always() + run: | kubectl get deployments -o wide kubectl get pods -o wide + kubectl events + kubectl get modelregistries/modelregistry-sample -o wide + kubectl describe modelregistries/modelregistry-sample - name: Set up Python uses: actions/setup-python@v6 with: diff --git a/.github/workflows/build-image-ui-pr.yml b/.github/workflows/build-image-ui-pr.yml index 3c9243b12b..f941073ee6 100644 --- a/.github/workflows/build-image-ui-pr.yml +++ b/.github/workflows/build-image-ui-pr.yml @@ -11,9 +11,8 @@ on: - "!**.md" - "!**.txt" -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read - packages: read env: IMG_ORG: kubeflow @@ -22,10 +21,13 @@ env: BRANCH: ${{ github.base_ref }} jobs: build-image: + permissions: + contents: read + packages: read runs-on: ubuntu-latest steps: # checkout branch - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Build UI Image shell: bash run: ./scripts/build_deploy.sh diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d1724931b3..e575bd92f5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -14,7 +14,7 @@ on: - ".github/dependabot.yml" - "docs/**" -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read jobs: @@ -25,15 +25,17 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Setup Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: "1.24" + go-version: "1.25.3" - name: Build run: make build/compile - - name: Unit tests + - name: Registry unit tests run: make test-cover + - name: Catalog unit tests + run: make -C catalog test-cover - name: Upload coverage to Codecov uses: codecov/codecov-action@v4.5.0 with: diff --git a/.github/workflows/check-db-schema-structs.yaml b/.github/workflows/check-db-schema-structs.yaml index 41dba3b0df..cc3f990e44 100644 --- a/.github/workflows/check-db-schema-structs.yaml +++ b/.github/workflows/check-db-schema-structs.yaml @@ -7,18 +7,18 @@ on: tags: - 'v*' -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read jobs: check-mysql-schema-structs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Setup Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: "1.24.4" + go-version: "1.25.3" - name: Generate MySQL DB schema structs run: make gen/gorm/mysql - name: Check if there are uncommitted file changes @@ -34,11 +34,11 @@ jobs: check-postgres-schema-structs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Setup Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: "1.24.4" + go-version: "1.25.3" - name: Generate PostgreSQL DB schema structs run: make gen/gorm/postgres - name: Check if there are uncommitted file changes diff --git a/.github/workflows/check-gitattributes.yaml b/.github/workflows/check-gitattributes.yaml new file mode 100644 index 0000000000..24cd4d424e --- /dev/null +++ b/.github/workflows/check-gitattributes.yaml @@ -0,0 +1,13 @@ +name: Validate .gitattributes file +on: pull_request + +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID + contents: read + +jobs: + validate: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - name: Validate OpenAPI spec + run: ./scripts/gen_gitattributes.sh --check diff --git a/.github/workflows/check-openapi-spec-pr.yaml b/.github/workflows/check-openapi-spec-pr.yaml index 23da5e19e7..7a63e635c9 100644 --- a/.github/workflows/check-openapi-spec-pr.yaml +++ b/.github/workflows/check-openapi-spec-pr.yaml @@ -5,14 +5,14 @@ on: - ".github/workflows/**" - "api/openapi/model-registry.yaml" -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read jobs: validate: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Validate OpenAPI spec run: | make openapi/validate diff --git a/.github/workflows/constraints.txt b/.github/workflows/constraints.txt index a958db4014..39d4981e92 100644 --- a/.github/workflows/constraints.txt +++ b/.github/workflows/constraints.txt @@ -1,6 +1,6 @@ -pip==23.3.2 -nox==2023.4.22 -nox-poetry==1.0.3 -poetry==1.8.3 -poetry-plugin-export==1.8.0 -virtualenv==20.24.6 +pip==25.3 +nox==2025.11.12 +nox-poetry==1.2.0 +poetry==2.0.1 +poetry-plugin-export==1.9.0 +virtualenv==20.35.4 diff --git a/.github/workflows/controller-test.yml b/.github/workflows/controller-test.yml index a0f589298a..e57be6d30a 100644 --- a/.github/workflows/controller-test.yml +++ b/.github/workflows/controller-test.yml @@ -20,7 +20,7 @@ on: - "pkg/openapi/**" - "go.mod" -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read env: @@ -30,15 +30,30 @@ jobs: name: Run on Ubuntu runs-on: ubuntu-latest steps: + - name: Generate tag + shell: bash + id: tags + run: | + commit_sha=${{ github.event.after }} + tag="${BRANCH:-main}"-${commit_sha:0:7} + echo "tag=${tag}" >> $GITHUB_OUTPUT + - name: Clone the code - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Setup Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: '~1.22' + go-version: "1.25.3" - name: Running Tests run: | go mod tidy - make controller/test \ No newline at end of file + make controller/test + + - name: Build controller + shell: bash + env: + IMG_REPO: model-registry/controller + IMG_VERSION: ${{ steps.tags.outputs.tag }} + run: make image/build diff --git a/.github/workflows/csi-test.yml b/.github/workflows/csi-test.yml index e01fa2d013..8b16160291 100644 --- a/.github/workflows/csi-test.yml +++ b/.github/workflows/csi-test.yml @@ -21,7 +21,7 @@ on: # csi build depends on base go.mod https://github.com/kubeflow/model-registry/issues/311 - "go.mod" -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read env: @@ -35,7 +35,7 @@ jobs: build-and-test-csi-image: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Generate tag shell: bash @@ -46,7 +46,12 @@ jobs: echo "tag=${tag}" >> $GITHUB_OUTPUT - name: Install network tools - run: sudo apt-get update && sudo apt-get install -y netcat-openbsd + run: sudo apt-get update && sudo apt-get install -y netcat-openbsd jq + + - name: Install Helm + uses: azure/setup-helm@v4 + with: + version: 'v3.14.0' - name: Build local model registry image shell: bash @@ -64,9 +69,9 @@ jobs: run: make image/build - name: Start KinD cluster - uses: helm/kind-action@v1.12.0 + uses: helm/kind-action@v1.13.0 with: - node_image: "kindest/node:v1.27.11" + node_image: "kindest/node:v1.31.0" - name: Remove AppArmor profile for mysql in KinD on GHA # https://github.com/kubeflow/manifests/issues/2507 run: | diff --git a/.github/workflows/first-time-contributor-pr.yml b/.github/workflows/first-time-contributor-pr.yml index 72161428b9..87312d8117 100644 --- a/.github/workflows/first-time-contributor-pr.yml +++ b/.github/workflows/first-time-contributor-pr.yml @@ -1,49 +1,85 @@ -# goal: this is meant to remind maintainers/approvers to add labels to ensure all tests are executed before merging -# and avoid merging without realizing that required tests has not been run; complementary to bulletpoint in template: https://github.com/kubeflow/model-registry/blame/de5f225d96a4daeca77506d233082b1c4ea5afa3/.github/pull_request_template.md#L21 -name: Welcome first-time contributors (Beta) +name: Welcome first-time contributors (Beta2) + on: - pull_request_target: + pull_request: + types: + - opened + - synchronize + - reopened + issues: types: - opened -permissions: - pull-requests: write -# do NOT: add actions/checkout to this flow, add-third party scripts, or auto-trigger CI jobs +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID + contents: read + jobs: welcome: runs-on: ubuntu-latest + permissions: + pull-requests: write + issues: write steps: - - name: Check contributor status - id: check_1st_time_contrib - uses: actions/github-script@v8 - with: - script: | - const { data: pr } = await github.rest.pulls.get({ - owner: context.repo.owner, - repo: context.repo.repo, - pull_number: context.issue.number - }); - const isFirstTimeContributor = pr.user.contributions === 0; - console.log(`First-time contributor status: ${isFirstTimeContributor}`); - core.setOutput('isFirstTimeContributor', isFirstTimeContributor.toString()); - - - name: Log contributor status (isFirstTimeContributor true) - if: steps.check_1st_time_contrib.outputs.isFirstTimeContributor == 'true' - run: echo "First-time contributor status is ${{ steps.check_1st_time_contrib.outputs.isFirstTimeContributor }}" - - - name: Log contributor status (isFirstTimeContributor false) - if: steps.check_1st_time_contrib.outputs.isFirstTimeContributor == 'false' - run: echo "First-time contributor status is ${{ steps.check_1st_time_contrib.outputs.isFirstTimeContributor }}" - - - name: Add a comment to the PR if first time contributor - if: steps.check_1st_time_contrib.outputs.isFirstTimeContributor == 'true' - uses: actions/github-script@v8 + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Install PyYAML + run: pip3 install pyyaml + + - name: Extract approvers from OWNERS file + id: set-approvers + run: | + python3 << 'EOF' + import yaml + import os + + with open('OWNERS', 'r') as f: + data = yaml.safe_load(f) + + approvers = data.get('approvers', []) + result = ' '.join([f'@{approver}' for approver in approvers]) + + with open(os.environ['GITHUB_OUTPUT'], 'a') as f: + f.write(f'approvers={result}\n') + + print(f'Extracted approvers: {result}') + EOF + - name: Log approvers + run: | + echo "Approvers: ${{ steps.set-approvers.outputs.approvers }}" + + - name: Welcome first-time contributors message + uses: actions/first-interaction@v3 + continue-on-error: true with: - script: | - github.rest.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: "Maintainers: let's ensure the label `ok-to-test` has been maintained and all the tests have been executed before merging.

Thank you for your first Pull Request! 🎉🎉" - }) -# do NOT: add actions/checkout to this flow, add-third party scripts, or auto-trigger CI jobs + issue_message: | + 🎉 **Welcome to the Kubeflow Model Registry!** 🎉 + Thanks for opening your first issue! We're happy to have you as part of our community 🚀 + + **Here's what happens next:** + - If you'd like to contribute to this issue, check out our [Contributing Guide](https://github.com/kubeflow/model-registry/blob/main/CONTRIBUTING.md) for repo-specific guidelines and the [Kubeflow Contributor Guide](https://www.kubeflow.org/docs/about/contributing/) for general community standards + - Our team will review your issue soon! + + **Join the community:** + - **Slack**: Join our [Slack channels](https://www.kubeflow.org/docs/about/community/#slack-channels) + - **Meetings**: Attend the [Kubeflow](https://www.kubeflow.org/docs/about/community/#list-of-available-meetings) online calls + + Feel free to ask questions in the comments if you need any help or clarification! + Thanks again for contributing to Kubeflow! 🙏 + pr_message: | + 🎉 **Welcome to the Kubeflow Model Registry!** 🎉 + Thanks for opening your first PR! We're happy to have you as part of our community 🚀 + + **Here's what happens next:** + - If you haven't already, please check out our [Contributing Guide](https://github.com/kubeflow/model-registry/blob/main/CONTRIBUTING.md) for repo-specific guidelines and the [Kubeflow Contributor Guide](https://www.kubeflow.org/docs/about/contributing/) for general community standards + - Our team will review your PR soon! + + **Join the community:** + - **Slack**: Join our [Slack channels](https://www.kubeflow.org/docs/about/community/#slack-channels) + - **Meetings**: Attend the [Kubeflow](https://www.kubeflow.org/docs/about/community/#list-of-available-meetings) online calls + + Feel free to ask questions in the comments if you need any help or clarification! + Thanks again for contributing to Kubeflow! 🙏 + + Note for: ${{ steps.set-approvers.outputs.approvers }} + Kindly ensure the label `ok-to-test` has been added to the PR, and all the tests have been executed before merging! diff --git a/.github/workflows/go-mod-tidy-diff-check.yml b/.github/workflows/go-mod-tidy-diff-check.yml new file mode 100644 index 0000000000..1771f16f6e --- /dev/null +++ b/.github/workflows/go-mod-tidy-diff-check.yml @@ -0,0 +1,37 @@ +name: Go Mod Tidy Diff Check + +on: + push: + branches: + - main + pull_request: + branches: + - main + +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID + contents: read + +jobs: + tidy-check: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v6 + + - name: Set up Go + uses: actions/setup-go@v6 + with: + go-version: "1.25.3" + + - name: Cache Go modules + uses: actions/cache@v4 + with: + path: ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + + - name: Run go mod tidy and check for changes + run: | + go mod tidy + git diff --exit-code go.mod go.sum diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index 16dbac1afa..e167534049 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -2,6 +2,9 @@ name: "Pull Request Labeler" on: - pull_request_target +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID + contents: read + jobs: labeler: permissions: diff --git a/.github/workflows/prepare.yml b/.github/workflows/prepare.yml index 9da561f5dc..46352e0430 100644 --- a/.github/workflows/prepare.yml +++ b/.github/workflows/prepare.yml @@ -1,18 +1,18 @@ on: workflow_call -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read jobs: prepare: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Setup Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: "1.24" + go-version: "1.25.3" - name: Prepare run: make clean build/prepare - name: Check if there are uncommitted file changes diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index eeed37f623..329ba4b9a2 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -4,6 +4,8 @@ on: tags: - py-v* workflow_dispatch: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID + contents: read jobs: release: name: Release @@ -14,7 +16,7 @@ jobs: FORCE_COLOR: "1" steps: - name: Check out the repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: fetch-depth: 0 - name: Set up Python diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml index dbf0d61d02..a9d28c1441 100644 --- a/.github/workflows/python-tests.yml +++ b/.github/workflows/python-tests.yml @@ -14,7 +14,7 @@ on: - ".github/dependabot.yml" - "docs/**" -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read jobs: @@ -31,7 +31,7 @@ jobs: FORCE_COLOR: "1" steps: - name: Check out the repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v6 with: @@ -53,6 +53,7 @@ jobs: run: | pipx install --pip-args=--constraint=${{ github.workspace }}/.github/workflows/constraints.txt poetry poetry --version + pipx inject poetry poetry-plugin-export - name: Install Nox run: | pipx install --pip-args=--constraint=${{ github.workspace }}/.github/workflows/constraints.txt nox @@ -61,12 +62,7 @@ jobs: - name: Nox lint working-directory: clients/python run: | - if [[ ${{ matrix.session }} == "mypy" ]]; then - nox --python=${{ matrix.python }} ||\ - echo "::error title='mypy failure'::Check the logs for more details" - else - nox --python=${{ matrix.python }} - fi + nox --python=${{ matrix.python }} check-autogen: name: Check autogenerated code is in sync @@ -78,7 +74,7 @@ jobs: nodejs: ["20"] steps: - name: Check out the repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: fetch-depth: 0 - name: Set up Python @@ -86,7 +82,7 @@ jobs: with: python-version: ${{ matrix.python }} - name: Set up Node.js - uses: actions/setup-node@v5 + uses: actions/setup-node@v6 with: node-version: ${{ matrix.nodejs }} - name: Upgrade pip @@ -118,6 +114,9 @@ jobs: test: name: E2E Py ${{ matrix.python }} K8s ${{ matrix.kubernetes-version }} DB ${{ matrix.manifest-db }} runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: read # to use dorny/paths-filter (below) strategy: fail-fast: false matrix: @@ -149,7 +148,7 @@ jobs: DEPLOY_MANIFEST_DB: "${{ matrix.manifest-db }}" steps: - name: Check out the repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v6 with: @@ -171,6 +170,7 @@ jobs: run: | pipx install --pip-args=--constraint=${{ github.workspace }}/.github/workflows/constraints.txt poetry poetry --version + pipx inject poetry poetry-plugin-export - name: Install Nox run: | pipx install --pip-args=--constraint=${{ github.workspace }}/.github/workflows/constraints.txt nox @@ -198,7 +198,7 @@ jobs: IMG_VERSION: ${{ steps.tags.outputs.tag }} run: make image/build - name: Start Kind Cluster - uses: helm/kind-action@v1.12.0 + uses: helm/kind-action@v1.13.0 with: node_image: kindest/node:${{ matrix.kubernetes-version }} cluster_name: chart-testing-py-${{ matrix.python }} @@ -237,13 +237,21 @@ jobs: files: coverage.xml fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} - - name: Nox test fuzz (main only) - if: github.ref == 'refs/heads/main' + - name: Check if OpenAPI files changed + id: check-openapi-changed + if: github.event_name == 'pull_request' + uses: dorny/paths-filter@v3 + with: + filters: | + openapi: + - 'api/**' + - name: Nox test fuzz (main only, or PRs with OpenAPI changes) + if: (github.event_name == 'push' && github.ref == 'refs/heads/main') || (github.event_name == 'pull_request' && steps.check-openapi-changed.outputs.openapi == 'true') working-directory: clients/python run: | - kubectl port-forward -n ${{ env.MR_NAMESPACE }} service/model-registry-service 8080:8080 & - kubectl port-forward -n minio svc/minio 9000:9000 & - kubectl port-forward service/distribution-registry-test-service 5001:5001 & + kubectl port-forward -n ${{ env.MR_NAMESPACE }} service/model-registry-service 8080:8080 > /dev/null 2>&1 & + kubectl port-forward -n minio svc/minio 9000:9000 > /dev/null 2>&1 & + kubectl port-forward service/distribution-registry-test-service 5001:5001 > /dev/null 2>&1 & sleep 2 nox --python=${{ matrix.python }} --session=fuzz @@ -260,7 +268,7 @@ jobs: FORCE_COLOR: "1" steps: - name: Check out the repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v6 with: @@ -282,6 +290,7 @@ jobs: run: | pipx install --pip-args=--constraint=${{ github.workspace }}/.github/workflows/constraints.txt poetry poetry --version + pipx inject poetry poetry-plugin-export - name: Install Nox run: | pipx install --pip-args=--constraint=${{ github.workspace }}/.github/workflows/constraints.txt nox @@ -293,12 +302,12 @@ jobs: nox --python=${{ matrix.python }} poetry build - name: Upload dist - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: py-dist path: clients/python/dist - name: Upload documentation - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: py-docs path: clients/python/docs/_build diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 9f4ec72191..7a139b4e9e 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -14,8 +14,8 @@ on: push: branches: [ "main" ] -# Declare default permissions as read only. -permissions: read-all +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID + contents: read jobs: analysis: @@ -26,18 +26,17 @@ jobs: security-events: write # Needed to publish results and get a badge (see publish_results below). id-token: write - # Uncomment the permissions below if installing in a private repository. - # contents: read - # actions: read + contents: read + actions: read steps: - name: "Checkout code" - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.0 with: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2 + uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3 with: results_file: results.sarif results_format: sarif @@ -59,7 +58,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: SARIF file path: results.sarif @@ -67,6 +66,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2 + uses: github/codeql-action/upload-sarif@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8 with: sarif_file: results.sarif diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index 634790f5d0..d47b0ef332 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -9,6 +9,9 @@ on: schedule: - cron: "04 4 * * *" +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID + contents: read + jobs: stale: runs-on: ubuntu-22.04 diff --git a/.github/workflows/test-fuzz.yml b/.github/workflows/test-fuzz.yml index f3d7ab7732..b5dd618498 100644 --- a/.github/workflows/test-fuzz.yml +++ b/.github/workflows/test-fuzz.yml @@ -7,7 +7,7 @@ on: required: true type: number -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read env: @@ -19,10 +19,18 @@ env: jobs: test-fuzz: + name: Test Fuzz DB ${{ matrix.manifest-db }} runs-on: ubuntu-latest defaults: run: working-directory: clients/python + strategy: + fail-fast: false + matrix: + manifest-db: ["db", "postgres"] # subdirectory of manifests/kustomize/overlays to select which database: 'db' (MySQL) or 'postgres' + kubernetes-version: ["v1.31.0"] + env: + DEPLOY_MANIFEST_DB: "${{ matrix.manifest-db }}" steps: - name: Get PR details id: pr @@ -45,7 +53,7 @@ jobs: } - name: Checkout PR - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: ref: ${{ fromJson(steps.pr.outputs.result).sha }} @@ -57,13 +65,30 @@ jobs: - name: Install Poetry run: | pipx install poetry + - name: Start Kind Cluster + uses: helm/kind-action@v1.13.0 + with: + node_image: kindest/node:${{ matrix.kubernetes-version }} + cluster_name: py-fuzz-${{ matrix.kubernetes-version }} + kubectl_version: ${{ matrix.kubernetes-version }} - name: Remove AppArmor profile for mysql in KinD on GHA # https://github.com/kubeflow/manifests/issues/2507 run: | set -x sudo apparmor_parser -R /etc/apparmor.d/usr.sbin.mysqld + - name: Generate Tag + shell: bash + id: tags + run: | + commit_sha=${{ fromJson(steps.pr.outputs.result).sha }} + tag=fuzz-${commit_sha:0:7} + echo "tag=${tag}" | tee -a $GITHUB_OUTPUT + - name: Run Fuzz Tests + env: + IMG_VERSION: ${{ steps.tags.outputs.tag }} + CLUSTER_NAME: py-fuzz-${{ matrix.kubernetes-version }} run: | echo "Starting fuzz tests..." make test-fuzz diff --git a/.github/workflows/trivy-image-scanning.yaml b/.github/workflows/trivy-image-scanning.yaml index 0e56c9d9cf..e829bce857 100644 --- a/.github/workflows/trivy-image-scanning.yaml +++ b/.github/workflows/trivy-image-scanning.yaml @@ -4,13 +4,15 @@ on: schedule: - cron: '0 0 * * 1' # Every Monday at 00:00 -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read - security-events: write jobs: image-scanning: runs-on: ubuntu-latest + permissions: + contents: read + security-events: write strategy: matrix: IMAGE: [ @@ -22,7 +24,7 @@ jobs: ] steps: - name: Checkout code - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Sanitize image name for SARIF filename run: | @@ -43,7 +45,7 @@ jobs: timeout: 30m0s - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v3 + uses: github/codeql-action/upload-sarif@v4 if: always() with: sarif_file: 'trivy-results-${{ env.SANITIZED_IMAGE_NAME }}.sarif' diff --git a/.github/workflows/ui-bff-build.yml b/.github/workflows/ui-bff-build.yml index 29867956d2..46dd17a715 100644 --- a/.github/workflows/ui-bff-build.yml +++ b/.github/workflows/ui-bff-build.yml @@ -19,17 +19,17 @@ on: - "!**.gitignore" - "!**.md" -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Setup Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: go-version: "1.24.6" @@ -38,7 +38,7 @@ jobs: run: make clean - name: Lint - uses: golangci/golangci-lint-action@v8 + uses: golangci/golangci-lint-action@v9 with: version: v2.1.0 working-directory: clients/ui/bff/ diff --git a/.github/workflows/ui-frontend-build.yml b/.github/workflows/ui-frontend-build.yml index 1f84a98852..8f7e3689ef 100644 --- a/.github/workflows/ui-frontend-build.yml +++ b/.github/workflows/ui-frontend-build.yml @@ -19,17 +19,17 @@ on: - "!**.gitignore" - "!**.md" -permissions: +permissions: # set contents: read at top-level, per OpenSSF ScoreCard rule TokenPermissionsID contents: read jobs: test-and-build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Node.js - uses: actions/setup-node@v5 + uses: actions/setup-node@v6 with: node-version: "20" diff --git a/.gitignore b/.gitignore index 8740d95808..4705a4f32e 100644 --- a/.gitignore +++ b/.gitignore @@ -58,3 +58,10 @@ istio-* # Python venv/ .python-version + +# Claude code +CLAUDE.md +.claude/ + +**/go.work +**/go.work.sum diff --git a/Dockerfile b/Dockerfile index 186d21ced2..77ae7e334e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,11 +1,11 @@ # Build the model-registry binary -FROM --platform=$BUILDPLATFORM registry.access.redhat.com/ubi9/go-toolset:1.24 AS common +FROM --platform=$BUILDPLATFORM registry.access.redhat.com/ubi9/go-toolset:1.25 AS common ARG TARGETOS ARG TARGETARCH WORKDIR /workspace # Copy the Go Modules manifests and workspace file -COPY ["go.mod", "go.sum", "go.work", "go.work.sum", "./"] +COPY ["go.mod", "go.sum", "./"] COPY ["pkg/openapi/go.mod", "pkg/openapi/"] COPY ["catalog/pkg/openapi/go.mod", "catalog/pkg/openapi/"] # cache deps before building and copying source so that we don't need to re-download as much diff --git a/Dockerfile.odh b/Dockerfile.odh index b86c966d70..821d5814b6 100644 --- a/Dockerfile.odh +++ b/Dockerfile.odh @@ -1,9 +1,9 @@ # Build the model-registry binary -FROM registry.access.redhat.com/ubi9/go-toolset:1.24 AS builder +FROM registry.access.redhat.com/ubi9/go-toolset:1.25 AS builder WORKDIR /workspace # Copy the Go Modules manifests and workspace file -COPY ["go.mod", "go.sum", "go.work", "./"] +COPY ["go.mod", "go.sum", "./"] COPY ["pkg/openapi/go.mod", "pkg/openapi/"] # cache deps before building and copying source so that we don't need to re-download as much # and so that source changes don't invalidate our downloaded layer diff --git a/Dockerfile.testops b/Dockerfile.testops new file mode 100644 index 0000000000..b1af5d168d --- /dev/null +++ b/Dockerfile.testops @@ -0,0 +1,47 @@ +FROM registry.access.redhat.com/ubi9/python-312 + +ARG TARGETARCH +# Switch to root to install packages and create user +USER 0 + +# Install skopeo +RUN dnf update -y && \ + dnf install -y skopeo && \ + dnf clean all + +# Create odh user +RUN useradd -m -s /bin/bash odh + +# Set working directory +WORKDIR /home/odh + +# Copy project files +COPY clients/python clients/python +COPY api/ api/ +COPY manifests/ manifests/ +COPY scripts/ scripts/ + +# Download kubectl binary +RUN ARCH="${TARGETARCH:-amd64}" && \ + KUBECTL_VERSION=$(curl -L -s https://dl.k8s.io/release/stable.txt) && \ + curl -fsSLO "https://dl.k8s.io/release/${KUBECTL_VERSION}/bin/linux/${ARCH}/kubectl" && \ + curl -fsSLO "https://dl.k8s.io/release/${KUBECTL_VERSION}/bin/linux/${ARCH}/kubectl.sha256" && \ + echo "$( /dev/null)) } endif +bin/goimports: + GOBIN=$(PROJECT_PATH)/bin ${GO} install golang.org/x/tools/cmd/goimports@latest + .PHONY: clean/deps clean/deps: rm -Rf bin/* @@ -220,6 +223,11 @@ deps: bin/golangci-lint bin/goverter bin/openapi-generator-cli bin/envtest vendor: ${GO} mod vendor +.PHONY: update/worksum +update/worksum: + ${GO} clean --modcache + ${GO} mod download + # WARNING: DO NOT DELETE THIS TARGET, USED BY Dockerfile!!! .PHONY: build/prepare build/prepare: gen vet lint diff --git a/OWNERS b/OWNERS index 06065d7c1f..70e22f4896 100644 --- a/OWNERS +++ b/OWNERS @@ -7,6 +7,7 @@ approvers: - adysenrothman - dhirajsb +- fege - isinyaaa - lampajr - nehachopra27 @@ -16,10 +17,12 @@ approvers: - tonyxrmdavidson - Al-Pragliola - pboyd +- Crazyglue options: {} reviewers: - adysenrothman - dhirajsb +- fege - isinyaaa - lampajr - nehachopra27 @@ -29,3 +32,4 @@ reviewers: - tonyxrmdavidson - Al-Pragliola - pboyd +- Crazyglue diff --git a/README.md b/README.md index 5d583f28d3..ac200d0059 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ Model registry provides a central repository for model developers to store and m 8. [UI](clients/ui/README.md) ## Pre-requisites: -- go >= 1.24 +- go >= 1.25 - protoc v24.3 - [Protocol Buffers v24.3 Release](https://github.com/protocolbuffers/protobuf/releases/tag/v24.3) - npm >= 10.2.0 - [Installing Node.js and npm](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm) - Java >= 11.0 diff --git a/RELEASE.md b/RELEASE.md index 791247c418..40d195a509 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -72,7 +72,7 @@ git checkout -b mr_maintainer-$TDATE-upstreamSync pushd manifests/kustomize/base && kustomize edit set image ghcr.io/kubeflow/model-registry/server=ghcr.io/kubeflow/model-registry/server:$VVERSION && popd pushd manifests/kustomize/options/csi && kustomize edit set image ghcr.io/kubeflow/model-registry/storage-initializer=ghcr.io/kubeflow/model-registry/storage-initializer:$VVERSION && popd pushd manifests/kustomize/options/ui/base && kustomize edit set image model-registry-ui=ghcr.io/kubeflow/model-registry/ui:$VVERSION && popd -pushd manifests/kustomize/options/catalog && kustomize edit set image ghcr.io/kubeflow/model-registry/server=ghcr.io/kubeflow/model-registry/server:$VVERSION && popd +pushd manifests/kustomize/options/catalog/base && kustomize edit set image ghcr.io/kubeflow/model-registry/server=ghcr.io/kubeflow/model-registry/server:$VVERSION && popd git add . git commit -s @@ -89,8 +89,7 @@ git push --set-upstream origin mr_maintainer-$TDATE-upstreamSync - optional. if you create the tag from local git (see point below); await GHA complete that push Container images to docker.io or any other KF registry: https://github.com/kubeflow/model-registry/actions - create [the Release from GitHub](https://github.com/kubeflow/model-registry/releases/new), ⚠️ select the _release branch_ ⚠️ , input the _new tag_
(in this example the tag is created from GitHub; alternatively, you could just do the tag manually by checking out the release branch locally--remember to pull!!--and issuing the tag from local machine). -Encouraging in upstream to mark it as a non-production release (in the github screen). -Encouraging to use the "alpha" version policy of KF in the beginning of the release markdown (see previous pre-releases). +- encouraging to use the "alpha" version policy of KF in the beginning of the release markdown (see previous releases). It is helpful to prefix this in the release notes: diff --git a/api/openapi/catalog.yaml b/api/openapi/catalog.yaml index d4693c0dbb..6479d1012f 100644 --- a/api/openapi/catalog.yaml +++ b/api/openapi/catalog.yaml @@ -10,6 +10,32 @@ servers: - url: "https://localhost:8080" - url: "http://localhost:8080" paths: + /api/model_catalog/v1alpha1/labels: + summary: Path used to get the list of catalog labels. + description: >- + The REST endpoint/path used to list zero or more `CatalogLabel` entities. + get: + summary: List All CatalogLabels + tags: + - ModelCatalogService + parameters: + - $ref: "#/components/parameters/pageSize" + - $ref: "#/components/parameters/labelOrderBy" + - $ref: "#/components/parameters/sortOrder" + - $ref: "#/components/parameters/nextPageToken" + responses: + "200": + $ref: "#/components/responses/CatalogLabelListResponse" + "400": + $ref: "#/components/responses/BadRequest" + "401": + $ref: "#/components/responses/Unauthorized" + "404": + $ref: "#/components/responses/NotFound" + "500": + $ref: "#/components/responses/InternalServerError" + operationId: findLabels + description: Gets a list of all `CatalogLabel` entities. /api/model_catalog/v1alpha1/models: description: >- The REST endpoint/path used to list zero or more `CatalogModel` entities from all `CatalogSources`. @@ -20,20 +46,63 @@ paths: parameters: - name: source description: |- - Filter models by source. This parameter is currently required and - may only be specified once. + Filter models by source. Multiple values can be separated by commas + to filter by multiple sources (OR logic). For example: + ?source=huggingface,local will return models from either + huggingface OR local sources. schema: - type: string + type: array + items: + type: string + style: form + explode: true in: query - required: true + required: false - name: q description: Free-form keyword search used to filter the response. schema: type: string in: query required: false + - name: sourceLabel + description: |- + Filter models by the label associated with the source. Multiple + values can be separated by commas. If one of the values is the + string `null`, then models from every source without a label will + be returned. + schema: + type: array + items: + type: string + in: query + required: false + - $ref: "#/components/parameters/filterQuery" - $ref: "#/components/parameters/pageSize" - - $ref: "#/components/parameters/orderBy" + - name: orderBy + style: form + explode: true + examples: + orderBy: + value: ID + description: |- + Specifies the order by criteria for listing entities. + + Supported values are: + - CREATE_TIME + - LAST_UPDATE_TIME + - ID + - NAME + - ACCURACY + + The `ACCURACY` sort will sort by the `overall_average` property in any linked metrics artifact. + + In addition, models can be sorted by properties. For example: + - `provider.string_value` sorts by provider name + - `artifacts.ifeval.double_value` sorts by the min/max value a property called ifeval across all associated artifacts + schema: + $ref: "#/components/schemas/OrderByField" + in: query + required: false - $ref: "#/components/parameters/sortOrder" - $ref: "#/components/parameters/nextPageToken" responses: @@ -48,6 +117,22 @@ paths: "500": $ref: "#/components/responses/InternalServerError" operationId: findModels + /api/model_catalog/v1alpha1/models/filter_options: + description: Lists options for `filterQuery` when listing models. + get: + summary: Lists fields and available options that can be used in `filterQuery` on the list models endpoint. + tags: + - ModelCatalogService + responses: + "200": + $ref: "#/components/responses/FilterOptionsResponse" + "400": + $ref: "#/components/responses/BadRequest" + "401": + $ref: "#/components/responses/Unauthorized" + "500": + $ref: "#/components/responses/InternalServerError" + operationId: findModelsFilterOptions /api/model_catalog/v1alpha1/sources: summary: Path used to get the list of catalog sources. description: >- @@ -107,14 +192,14 @@ paths: required: true /api/model_catalog/v1alpha1/sources/{source_id}/models/{model_name}/artifacts: description: >- - The REST endpoint/path used to list `CatalogModelArtifacts`. + The REST endpoint/path used to list `CatalogArtifacts`. get: - summary: List CatalogModelArtifacts. + summary: List CatalogArtifacts. tags: - ModelCatalogService responses: "200": - $ref: "#/components/responses/CatalogModelArtifactListResponse" + $ref: "#/components/responses/CatalogArtifactListResponse" "401": $ref: "#/components/responses/Unauthorized" "404": @@ -135,16 +220,20 @@ paths: type: string in: path required: true + - $ref: "#/components/parameters/artifactType" + - $ref: "#/components/parameters/artifact_type" + - $ref: "#/components/parameters/artifactFilterQuery" + - $ref: "#/components/parameters/pageSize" + - $ref: "#/components/parameters/artifactOrderBy" + - $ref: "#/components/parameters/sortOrder" + - $ref: "#/components/parameters/nextPageToken" components: schemas: ArtifactTypeQueryParam: description: Supported artifact types for querying. enum: - model-artifact - - doc-artifact - - dataset-artifact - - metric - - parameter + - metrics-artifact type: string BaseModel: type: object @@ -202,6 +291,35 @@ components: type: object additionalProperties: $ref: "#/components/schemas/MetadataValue" + BaseResource: + allOf: + - type: object + properties: + customProperties: + description: User provided custom properties which are not defined by its type. + type: object + additionalProperties: + $ref: "#/components/schemas/MetadataValue" + description: + description: |- + An optional description about the resource. + type: string + externalId: + description: |- + The external id that come from the clients’ system. This field is optional. + If set, it must be unique among all resources within a database instance. + type: string + name: + description: |- + The client provided name of the artifact. This field is optional. If set, + it must be unique among all the artifacts of the same artifact type within + a database instance and cannot be changed once set. + type: string + id: + format: int64 + description: The unique server generated id of the resource. + type: string + - $ref: "#/components/schemas/BaseResourceDates" BaseResourceDates: description: Common timestamp fields for resources type: object @@ -234,6 +352,83 @@ components: format: int32 description: Number of items in result list. type: integer + CatalogArtifact: + description: A single artifact in the catalog API. + oneOf: + - $ref: "#/components/schemas/CatalogModelArtifact" + - $ref: "#/components/schemas/CatalogMetricsArtifact" + discriminator: + propertyName: artifactType + mapping: + model-artifact: "#/components/schemas/CatalogModelArtifact" + metrics-artifact: "#/components/schemas/CatalogMetricsArtifact" + CatalogArtifactList: + description: List of CatalogModel entities. + allOf: + - type: object + properties: + items: + description: Array of `CatalogArtifact` entities. + type: array + items: + $ref: "#/components/schemas/CatalogArtifact" + required: + - items + - $ref: "#/components/schemas/BaseResourceList" + CatalogLabel: + description: A catalog label. Labels are used to categorize catalog sources. Represented as a flexible map of string key-value pairs with a required 'name' field. + type: object + required: + - name + properties: + name: + type: string + nullable: true + description: The unique name identifier for the label. + displayName: + type: string + description: An optional human-readable name to show in place of `name`. + additionalProperties: + type: string + example: + name: huggingface + displayName: HuggingFace Hub + description: HuggingFace models with full support and legal indemnification. + CatalogLabelList: + description: List of CatalogLabel entities. + allOf: + - type: object + properties: + items: + description: Array of `CatalogLabel` entities. + type: array + items: + $ref: "#/components/schemas/CatalogLabel" + required: + - items + - $ref: "#/components/schemas/BaseResourceList" + CatalogMetricsArtifact: + description: A metadata Artifact Entity. + allOf: + - type: object + required: + - artifactType + - metricsType + properties: + artifactType: + type: string + default: metrics-artifact + metricsType: + type: string + enum: + - performance-metrics + - accuracy-metrics + customProperties: + description: User provided custom properties which are not defined by its type. + type: object + additionalProperties: + $ref: "#/components/schemas/MetadataValue" + - $ref: "#/components/schemas/BaseResource" CatalogModel: description: A model in the model catalog. allOf: @@ -248,38 +443,29 @@ components: source_id: type: string description: ID of the source this model belongs to. - - $ref: "#/components/schemas/BaseResourceDates" - $ref: "#/components/schemas/BaseModel" + - $ref: "#/components/schemas/BaseResource" CatalogModelArtifact: - description: A single artifact for a catalog model. + description: A Catalog Model Artifact Entity. allOf: - type: object required: + - artifactType - uri properties: + artifactType: + type: string + default: model-artifact uri: type: string format: uri - description: URI where the artifact can be retrieved. + description: URI where the model can be retrieved. customProperties: description: User provided custom properties which are not defined by its type. type: object additionalProperties: $ref: "#/components/schemas/MetadataValue" - - $ref: "#/components/schemas/BaseResourceDates" - CatalogModelArtifactList: - description: List of CatalogModel entities. - allOf: - - type: object - properties: - items: - description: Array of `CatalogModelArtifact` entities. - type: array - items: - $ref: "#/components/schemas/CatalogModelArtifact" - required: - - items - - $ref: "#/components/schemas/BaseResourceList" + - $ref: "#/components/schemas/BaseResource" CatalogModelList: description: List of CatalogModel entities. allOf: @@ -298,6 +484,7 @@ components: required: - id - name + - labels type: object properties: id: @@ -310,6 +497,32 @@ components: description: Whether the catalog source is enabled. type: boolean default: true + labels: + description: Labels for the catalog source. + type: array + items: + type: string + includedModels: + description: |- + Optional allow-list of models that are eligible for this source. Entries can be + exact model names or patterns that use `*` as a wildcard. When provided, only + models matching at least one pattern are considered. + + Pattern matching is case-insensitive, so `Granite/*` will match `granite/model`, + `Granite/model`, and `GRANITE/model`. + type: array + items: + type: string + excludedModels: + description: |- + Optional block-list of models that should be removed from the catalog even if + they match `includedModels`. Patterns support the `*` wildcard. + + Pattern matching is case-insensitive, so `*-beta` will match `Model-Beta`, + `model-beta`, and `MODEL-BETA`. + type: array + items: + type: string CatalogSourceList: description: List of CatalogSource entities. allOf: @@ -334,6 +547,42 @@ components: message: description: Error message type: string + FilterOption: + type: object + required: + - type + properties: + type: + type: string + description: The data type of the filter option + enum: + - string + - number + values: + type: array + description: Known values of the property for string types with a small number of possible options. + items: {} + range: + $ref: "#/components/schemas/FilterOptionRange" + FilterOptionRange: + type: object + description: Min and max values for number types. + properties: + min: + type: number + format: double + max: + type: number + format: double + FilterOptionsList: + description: List of FilterOptions + type: object + properties: + filters: + type: object + description: A single filter option. + additionalProperties: + $ref: "#/components/schemas/FilterOption" MetadataBoolValue: description: A bool property value. type: object @@ -442,7 +691,8 @@ components: string_value: my_value metadataType: MetadataStringValue OrderByField: - description: Supported fields for ordering result entities. + description: |- + Supported fields for ordering result entities. enum: - CREATE_TIME - LAST_UPDATE_TIME @@ -462,12 +712,18 @@ components: schema: $ref: "#/components/schemas/Error" description: Bad Request parameters - CatalogModelArtifactListResponse: + CatalogArtifactListResponse: + content: + application/json: + schema: + $ref: "#/components/schemas/CatalogArtifactList" + description: A response containing a list of CatalogArtifact entities. + CatalogLabelListResponse: content: application/json: schema: - $ref: "#/components/schemas/CatalogModelArtifactList" - description: A response containing a list of CatalogModelArtifact entities. + $ref: "#/components/schemas/CatalogLabelList" + description: A response containing a list of CatalogLabel entities. CatalogModelListResponse: content: application/json: @@ -498,6 +754,12 @@ components: schema: $ref: "#/components/schemas/Error" description: Conflict with current state of target resource + FilterOptionsResponse: + content: + application/json: + schema: + $ref: "#/components/schemas/FilterOptionsList" + description: A response containing options for a `filterQuery` parameter. InternalServerError: content: application/json: @@ -529,6 +791,80 @@ components: $ref: "#/components/schemas/Error" description: Unprocessable Entity error parameters: + filterQuery: + examples: + filterQuery: + value: "name='my-model' AND state='LIVE'" + name: filterQuery + description: | + A SQL-like query string to filter the list of entities. The query supports rich filtering capabilities with automatic type inference. + + **Supported Operators:** + - Comparison: `=`, `!=`, `<>`, `>`, `<`, `>=`, `<=` + - Pattern matching: `LIKE`, `ILIKE` (case-insensitive) + - Set membership: `IN` + - Logical: `AND`, `OR` + - Grouping: `()` for complex expressions + + **Data Types:** + - Strings: `"value"` or `'value'` + - Numbers: `42`, `3.14`, `1e-5` + - Booleans: `true`, `false` (case-insensitive) + + **Property Access:** + - Standard properties: `name`, `id`, `state`, `createTimeSinceEpoch` + - Custom properties: Any user-defined property name + - Escaped properties: Use backticks for special characters: `` `custom-property` `` + - Type-specific access: `property.string_value`, `property.double_value`, `property.int_value`, `property.bool_value` + + **Examples:** + - Basic: `name = "my-model"` + - Comparison: `accuracy > 0.95` + - Pattern: `name LIKE "%tensorflow%"` + - Complex: `(name = "model-a" OR name = "model-b") AND state = "LIVE"` + - Custom property: `framework.string_value = "pytorch"` + - Escaped property: `` `mlflow.source.type` = "notebook" `` + schema: + type: string + in: query + required: false + artifactFilterQuery: + examples: + artifactFilterQuery: + value: "name='my-artifact' AND uri LIKE '%s3%'" + name: filterQuery + description: | + A SQL-like query string to filter catalog artifacts. The query supports rich filtering capabilities with automatic type inference. + + **Supported Operators:** + - Comparison: `=`, `!=`, `<>`, `>`, `<`, `>=`, `<=` + - Pattern matching: `LIKE`, `ILIKE` (case-insensitive) + - Set membership: `IN` + - Logical: `AND`, `OR` + - Grouping: `()` for complex expressions + + **Data Types:** + - Strings: `"value"` or `'value'` + - Numbers: `42`, `3.14`, `1e-5` + - Booleans: `true`, `false` (case-insensitive) + + **Property Access (Artifacts):** + - Standard properties: `name`, `id`, `uri`, `artifactType`, `createTimeSinceEpoch` + - Custom properties: Any user-defined property name in `customProperties` + - Escaped properties: Use backticks for special characters: `` `custom-property` `` + - Type-specific access: `property.string_value`, `property.double_value`, `property.int_value`, `property.bool_value` + + **Examples:** + - Basic: `name = "my-artifact"` + - Comparison: `ttft_mean > 90` + - Pattern: `uri LIKE "%s3.amazonaws.com%"` + - Complex: `(artifactType = "model-artifact" OR artifactType = "metrics-artifact") AND name LIKE "%pytorch%"` + - Custom property: `format.string_value = "pytorch"` + - Escaped property: `` `custom-key` = "value" `` + schema: + type: string + in: query + required: false orderBy: style: form explode: true @@ -541,6 +877,103 @@ components: $ref: "#/components/schemas/OrderByField" in: query required: false + artifactOrderBy: + style: form + explode: true + examples: + standardField: + value: ID + summary: Order by standard field + customPropertyDouble: + value: mmlu.double_value + summary: Order by custom double property + customPropertyString: + value: framework_type.string_value + summary: Order by custom string property + customPropertyInt: + value: hardware_count.int_value + summary: Order by custom integer property + name: orderBy + description: | + Specifies the order by criteria for listing artifacts. + + **Standard Fields:** + - `ID` - Order by artifact ID + - `NAME` - Order by artifact name + - `CREATE_TIME` - Order by creation timestamp + - `LAST_UPDATE_TIME` - Order by last update timestamp + + **Custom Property Ordering:** + + Artifacts can be ordered by custom properties using the format: `.` + + Supported value types: + - `double_value` - For numeric (floating-point) properties + - `int_value` - For integer properties + - `string_value` - For string properties + + Examples: + - `mmlu.double_value` - Order by the 'mmlu' benchmark score + - `accuracy.double_value` - Order by accuracy metric + - `framework_type.string_value` - Order by framework type + - `hardware_count.int_value` - Order by hardware count + - `ttft_mean.double_value` - Order by time-to-first-token mean + + **Behavior:** + - If an invalid value type is specified (e.g., `accuracy.invalid_type`), an error is returned + - If an invalid format is used (e.g., `accuracy` without `.value_type`), it falls back to ID ordering + - If a property doesn't exist, it falls back to ID ordering + - Artifacts with the specified property are ordered first (by the property value), followed by artifacts without the property (ordered by ID) + - Empty property names (e.g., `.double_value`) return an error + schema: + type: string + in: query + required: false + labelOrderBy: + style: form + explode: true + examples: + labelOrderBy: + value: name + name: orderBy + description: | + Specifies the key to order catalog labels by. You can provide any string key + that may exist in the label maps. Labels that contain the specified key will + be sorted by that key's value. Labels that don't contain the key will maintain + their original order and appear after labels that do contain the key. + schema: + type: string + in: query + required: false + artifactType: + style: form + explode: true + examples: + artifactType: + value: model-artifact + name: artifactType + description: "Specifies the artifact type for listing artifacts." + schema: + type: array + items: + $ref: "#/components/schemas/ArtifactTypeQueryParam" + in: query + required: false + artifact_type: + deprecated: true + style: form + explode: true + examples: + artifact_type: + value: model-artifact + name: artifact_type + description: "Specifies the artifact type for listing artifacts." + schema: + type: array + items: + $ref: "#/components/schemas/ArtifactTypeQueryParam" + in: query + required: false id: name: id description: The ID of resource. @@ -578,43 +1011,6 @@ components: type: string in: query required: false - filterQuery: - examples: - filterQuery: - value: "name='my-model' AND state='LIVE'" - name: filterQuery - description: | - A SQL-like query string to filter the list of entities. The query supports rich filtering capabilities with automatic type inference. - - **Supported Operators:** - - Comparison: `=`, `!=`, `<>`, `>`, `<`, `>=`, `<=` - - Pattern matching: `LIKE`, `ILIKE` (case-insensitive) - - Set membership: `IN` - - Logical: `AND`, `OR` - - Grouping: `()` for complex expressions - - **Data Types:** - - Strings: `"value"` or `'value'` - - Numbers: `42`, `3.14`, `1e-5` - - Booleans: `true`, `false` (case-insensitive) - - **Property Access:** - - Standard properties: `name`, `id`, `state`, `createTimeSinceEpoch` - - Custom properties: Any user-defined property name - - Escaped properties: Use backticks for special characters: `` `custom-property` `` - - Type-specific access: `property.string_value`, `property.double_value`, `property.int_value`, `property.bool_value` - - **Examples:** - - Basic: `name = "my-model"` - - Comparison: `accuracy > 0.95` - - Pattern: `name LIKE "%tensorflow%"` - - Complex: `(name = "model-a" OR name = "model-b") AND state = "LIVE"` - - Custom property: `framework.string_value = "pytorch"` - - Escaped property: `` `mlflow.source.type` = "notebook" `` - schema: - type: string - in: query - required: false pageSize: examples: pageSize: @@ -644,18 +1040,6 @@ components: $ref: "#/components/schemas/SortOrder" in: query required: false - artifactType: - style: form - explode: true - examples: - artifactType: - value: model-artifact - name: artifactType - description: "Specifies the artifact type for listing artifacts." - schema: - $ref: "#/components/schemas/ArtifactTypeQueryParam" - in: query - required: false stepIds: style: form explode: true diff --git a/api/openapi/model-registry.yaml b/api/openapi/model-registry.yaml index 69ccbd0a61..86f7702d15 100644 --- a/api/openapi/model-registry.yaml +++ b/api/openapi/model-registry.yaml @@ -2996,6 +2996,18 @@ components: $ref: "#/components/schemas/OrderByField" in: query required: false + artifactType: + style: form + explode: true + examples: + artifactType: + value: model-artifact + name: artifactType + description: "Specifies the artifact type for listing artifacts." + schema: + $ref: "#/components/schemas/ArtifactTypeQueryParam" + in: query + required: false id: name: id description: The ID of resource. @@ -3099,18 +3111,6 @@ components: $ref: "#/components/schemas/SortOrder" in: query required: false - artifactType: - style: form - explode: true - examples: - artifactType: - value: model-artifact - name: artifactType - description: "Specifies the artifact type for listing artifacts." - schema: - $ref: "#/components/schemas/ArtifactTypeQueryParam" - in: query - required: false stepIds: style: form explode: true diff --git a/api/openapi/src/catalog.yaml b/api/openapi/src/catalog.yaml index ef1baeba6d..6b38af4a9d 100644 --- a/api/openapi/src/catalog.yaml +++ b/api/openapi/src/catalog.yaml @@ -20,20 +20,63 @@ paths: parameters: - name: source description: |- - Filter models by source. This parameter is currently required and - may only be specified once. + Filter models by source. Multiple values can be separated by commas + to filter by multiple sources (OR logic). For example: + ?source=huggingface,local will return models from either + huggingface OR local sources. schema: - type: string + type: array + items: + type: string + style: form + explode: true in: query - required: true + required: false - name: q description: Free-form keyword search used to filter the response. schema: type: string in: query required: false + - name: sourceLabel + description: |- + Filter models by the label associated with the source. Multiple + values can be separated by commas. If one of the values is the + string `null`, then models from every source without a label will + be returned. + schema: + type: array + items: + type: string + in: query + required: false + - $ref: "#/components/parameters/filterQuery" - $ref: "#/components/parameters/pageSize" - - $ref: "#/components/parameters/orderBy" + - name: orderBy + style: form + explode: true + examples: + orderBy: + value: ID + description: |- + Specifies the order by criteria for listing entities. + + Supported values are: + - CREATE_TIME + - LAST_UPDATE_TIME + - ID + - NAME + - ACCURACY + + The `ACCURACY` sort will sort by the `overall_average` property in any linked metrics artifact. + + In addition, models can be sorted by properties. For example: + - `provider.string_value` sorts by provider name + - `artifacts.ifeval.double_value` sorts by the min/max value a property called ifeval across all associated artifacts + schema: + $ref: "#/components/schemas/OrderByField" + in: query + required: false - $ref: "#/components/parameters/sortOrder" - $ref: "#/components/parameters/nextPageToken" responses: @@ -48,6 +91,48 @@ paths: "500": $ref: "#/components/responses/InternalServerError" operationId: findModels + /api/model_catalog/v1alpha1/models/filter_options: + description: Lists options for `filterQuery` when listing models. + get: + summary: Lists fields and available options that can be used in `filterQuery` on the list models endpoint. + tags: + - ModelCatalogService + responses: + "200": + $ref: "#/components/responses/FilterOptionsResponse" + "400": + $ref: "#/components/responses/BadRequest" + "401": + $ref: "#/components/responses/Unauthorized" + "500": + $ref: "#/components/responses/InternalServerError" + operationId: findModelsFilterOptions + /api/model_catalog/v1alpha1/labels: + summary: Path used to get the list of catalog labels. + description: >- + The REST endpoint/path used to list zero or more `CatalogLabel` entities. + get: + summary: List All CatalogLabels + tags: + - ModelCatalogService + parameters: + - $ref: "#/components/parameters/pageSize" + - $ref: "#/components/parameters/labelOrderBy" + - $ref: "#/components/parameters/sortOrder" + - $ref: "#/components/parameters/nextPageToken" + responses: + "200": + $ref: "#/components/responses/CatalogLabelListResponse" + "400": + $ref: "#/components/responses/BadRequest" + "401": + $ref: "#/components/responses/Unauthorized" + "404": + $ref: "#/components/responses/NotFound" + "500": + $ref: "#/components/responses/InternalServerError" + operationId: findLabels + description: Gets a list of all `CatalogLabel` entities. /api/model_catalog/v1alpha1/sources: summary: Path used to get the list of catalog sources. description: >- @@ -107,14 +192,14 @@ paths: required: true /api/model_catalog/v1alpha1/sources/{source_id}/models/{model_name}/artifacts: description: >- - The REST endpoint/path used to list `CatalogModelArtifacts`. + The REST endpoint/path used to list `CatalogArtifacts`. get: - summary: List CatalogModelArtifacts. + summary: List CatalogArtifacts. tags: - ModelCatalogService responses: "200": - $ref: "#/components/responses/CatalogModelArtifactListResponse" + $ref: "#/components/responses/CatalogArtifactListResponse" "401": $ref: "#/components/responses/Unauthorized" "404": @@ -135,8 +220,66 @@ paths: type: string in: path required: true + - $ref: "#/components/parameters/artifactType" + - $ref: "#/components/parameters/artifact_type" + - $ref: "#/components/parameters/artifactFilterQuery" + - $ref: "#/components/parameters/pageSize" + - $ref: "#/components/parameters/artifactOrderBy" + - $ref: "#/components/parameters/sortOrder" + - $ref: "#/components/parameters/nextPageToken" components: schemas: + CatalogArtifact: + description: A single artifact in the catalog API. + oneOf: + - $ref: "#/components/schemas/CatalogModelArtifact" + - $ref: "#/components/schemas/CatalogMetricsArtifact" + discriminator: + propertyName: artifactType + mapping: + model-artifact: "#/components/schemas/CatalogModelArtifact" + metrics-artifact: "#/components/schemas/CatalogMetricsArtifact" + CatalogArtifactList: + description: List of CatalogModel entities. + allOf: + - type: object + properties: + items: + description: Array of `CatalogArtifact` entities. + type: array + items: + $ref: "#/components/schemas/CatalogArtifact" + required: + - items + - $ref: "#/components/schemas/BaseResourceList" + ArtifactTypeQueryParam: + description: Supported artifact types for querying. + enum: + - model-artifact + - metrics-artifact + type: string + CatalogMetricsArtifact: + description: A metadata Artifact Entity. + allOf: + - type: object + required: + - artifactType + - metricsType + properties: + artifactType: + type: string + default: metrics-artifact + metricsType: + type: string + enum: + - performance-metrics + - accuracy-metrics + customProperties: + description: User provided custom properties which are not defined by its type. + type: object + additionalProperties: + $ref: "#/components/schemas/MetadataValue" + - $ref: "#/components/schemas/BaseResource" CatalogModel: description: A model in the model catalog. allOf: @@ -151,48 +294,71 @@ components: source_id: type: string description: ID of the source this model belongs to. - - $ref: "#/components/schemas/BaseResourceDates" - $ref: "#/components/schemas/BaseModel" + - $ref: "#/components/schemas/BaseResource" CatalogModelArtifact: - description: A single artifact for a catalog model. + description: A Catalog Model Artifact Entity. allOf: - type: object required: + - artifactType - uri properties: + artifactType: + type: string + default: model-artifact uri: type: string format: uri - description: URI where the artifact can be retrieved. + description: URI where the model can be retrieved. customProperties: description: User provided custom properties which are not defined by its type. type: object additionalProperties: $ref: "#/components/schemas/MetadataValue" - - $ref: "#/components/schemas/BaseResourceDates" - CatalogModelArtifactList: + - $ref: "#/components/schemas/BaseResource" + CatalogModelList: description: List of CatalogModel entities. allOf: - type: object properties: items: - description: Array of `CatalogModelArtifact` entities. + description: Array of `CatalogModel` entities. type: array items: - $ref: "#/components/schemas/CatalogModelArtifact" + $ref: "#/components/schemas/CatalogModel" required: - items - $ref: "#/components/schemas/BaseResourceList" - CatalogModelList: - description: List of CatalogModel entities. + CatalogLabel: + description: A catalog label. Labels are used to categorize catalog sources. Represented as a flexible map of string key-value pairs with a required 'name' field. + type: object + required: + - name + properties: + name: + type: string + nullable: true + description: The unique name identifier for the label. + displayName: + type: string + description: An optional human-readable name to show in place of `name`. + additionalProperties: + type: string + example: + name: huggingface + displayName: HuggingFace Hub + description: HuggingFace models with full support and legal indemnification. + CatalogLabelList: + description: List of CatalogLabel entities. allOf: - type: object properties: items: - description: Array of `CatalogModel` entities. + description: Array of `CatalogLabel` entities. type: array items: - $ref: "#/components/schemas/CatalogModel" + $ref: "#/components/schemas/CatalogLabel" required: - items - $ref: "#/components/schemas/BaseResourceList" @@ -201,6 +367,7 @@ components: required: - id - name + - labels type: object properties: id: @@ -213,6 +380,32 @@ components: description: Whether the catalog source is enabled. type: boolean default: true + labels: + description: Labels for the catalog source. + type: array + items: + type: string + includedModels: + description: |- + Optional allow-list of models that are eligible for this source. Entries can be + exact model names or patterns that use `*` as a wildcard. When provided, only + models matching at least one pattern are considered. + + Pattern matching is case-insensitive, so `Granite/*` will match `granite/model`, + `Granite/model`, and `GRANITE/model`. + type: array + items: + type: string + excludedModels: + description: |- + Optional block-list of models that should be removed from the catalog even if + they match `includedModels`. Patterns support the `*` wildcard. + + Pattern matching is case-insensitive, so `*-beta` will match `Model-Beta`, + `model-beta`, and `MODEL-BETA`. + type: array + items: + type: string CatalogSourceList: description: List of CatalogSource entities. allOf: @@ -224,8 +417,45 @@ components: items: $ref: "#/components/schemas/CatalogSource" - $ref: "#/components/schemas/BaseResourceList" + FilterOption: + type: object + required: + - type + properties: + type: + type: string + description: The data type of the filter option + enum: + - string + - number + values: + type: array + description: Known values of the property for string types with a small number of possible options. + items: {} + range: + $ref: "#/components/schemas/FilterOptionRange" + FilterOptionRange: + type: object + description: Min and max values for number types. + properties: + min: + type: number + format: double + max: + type: number + format: double + FilterOptionsList: + description: List of FilterOptions + type: object + properties: + filters: + type: object + description: A single filter option. + additionalProperties: + $ref: "#/components/schemas/FilterOption" OrderByField: - description: Supported fields for ordering result entities. + description: |- + Supported fields for ordering result entities. enum: - CREATE_TIME - LAST_UPDATE_TIME @@ -234,12 +464,18 @@ components: type: string responses: - CatalogModelArtifactListResponse: + CatalogArtifactListResponse: + content: + application/json: + schema: + $ref: "#/components/schemas/CatalogArtifactList" + description: A response containing a list of CatalogArtifact entities. + CatalogLabelListResponse: content: application/json: schema: - $ref: "#/components/schemas/CatalogModelArtifactList" - description: A response containing a list of CatalogModelArtifact entities. + $ref: "#/components/schemas/CatalogLabelList" + description: A response containing a list of CatalogLabel entities. CatalogModelListResponse: content: application/json: @@ -264,8 +500,92 @@ components: schema: $ref: "#/components/schemas/CatalogSource" description: A response containing a `CatalogSource` entity. + FilterOptionsResponse: + content: + application/json: + schema: + $ref: "#/components/schemas/FilterOptionsList" + description: A response containing options for a `filterQuery` parameter. parameters: + filterQuery: + examples: + filterQuery: + value: "name='my-model' AND state='LIVE'" + name: filterQuery + description: | + A SQL-like query string to filter catalog models. The query supports rich filtering capabilities with automatic type inference. + + **Supported Operators:** + - Comparison: `=`, `!=`, `<>`, `>`, `<`, `>=`, `<=` + - Pattern matching: `LIKE`, `ILIKE` (case-insensitive) + - Set membership: `IN` + - Logical: `AND`, `OR` + - Grouping: `()` for complex expressions + + **Data Types:** + - Strings: `"value"` or `'value'` + - Numbers: `42`, `3.14`, `1e-5` + - Booleans: `true`, `false` (case-insensitive) + + **Property Access:** + - Standard properties: `name`, `id`, `state`, `createTimeSinceEpoch` + - Custom properties: Any user-defined property name + - Escaped properties: Use backticks for special characters: `` `custom-property` `` + - Type-specific access: `property.string_value`, `property.double_value`, `property.int_value`, `property.bool_value` + - **Related artifact properties**: `artifacts.` to filter models by their artifact properties + + **Examples:** + - Basic: `name = "my-model"` + - Comparison: `accuracy > 0.95` + - Pattern: `name LIKE "%tensorflow%"` + - Complex: `(name = "model-a" OR name = "model-b") AND state = "LIVE"` + - Custom property: `framework.string_value = "pytorch"` + - Escaped property: `` `mlflow.source.type` = "notebook" `` + - **Artifact filtering**: `artifacts.ttft_mean >= 90` - filter models that have artifacts with ttft_mean >= 90 + - **Artifact custom property**: `artifacts.format = "pytorch"` - filter models by artifact custom properties + - **Combined filtering**: `name = "llm-model" AND artifacts.performance_score > 0.95` - combine model and artifact filters + schema: + type: string + in: query + required: false + artifactFilterQuery: + examples: + artifactFilterQuery: + value: "name='my-artifact' AND uri LIKE '%s3%'" + name: filterQuery + description: | + A SQL-like query string to filter catalog artifacts. The query supports rich filtering capabilities with automatic type inference. + + **Supported Operators:** + - Comparison: `=`, `!=`, `<>`, `>`, `<`, `>=`, `<=` + - Pattern matching: `LIKE`, `ILIKE` (case-insensitive) + - Set membership: `IN` + - Logical: `AND`, `OR` + - Grouping: `()` for complex expressions + + **Data Types:** + - Strings: `"value"` or `'value'` + - Numbers: `42`, `3.14`, `1e-5` + - Booleans: `true`, `false` (case-insensitive) + + **Property Access (Artifacts):** + - Standard properties: `name`, `id`, `uri`, `artifactType`, `createTimeSinceEpoch` + - Custom properties: Any user-defined property name in `customProperties` + - Escaped properties: Use backticks for special characters: `` `custom-property` `` + - Type-specific access: `property.string_value`, `property.double_value`, `property.int_value`, `property.bool_value` + + **Examples:** + - Basic: `name = "my-artifact"` + - Comparison: `ttft_mean > 90` + - Pattern: `uri LIKE "%s3.amazonaws.com%"` + - Complex: `(artifactType = "model-artifact" OR artifactType = "metrics-artifact") AND name LIKE "%pytorch%"` + - Custom property: `format.string_value = "pytorch"` + - Escaped property: `` `custom-key` = "value" `` + schema: + type: string + in: query + required: false orderBy: style: form explode: true @@ -278,4 +598,101 @@ components: $ref: "#/components/schemas/OrderByField" in: query required: false + artifactOrderBy: + style: form + explode: true + examples: + standardField: + value: ID + summary: Order by standard field + customPropertyDouble: + value: mmlu.double_value + summary: Order by custom double property + customPropertyString: + value: framework_type.string_value + summary: Order by custom string property + customPropertyInt: + value: hardware_count.int_value + summary: Order by custom integer property + name: orderBy + description: | + Specifies the order by criteria for listing artifacts. + + **Standard Fields:** + - `ID` - Order by artifact ID + - `NAME` - Order by artifact name + - `CREATE_TIME` - Order by creation timestamp + - `LAST_UPDATE_TIME` - Order by last update timestamp + + **Custom Property Ordering:** + + Artifacts can be ordered by custom properties using the format: `.` + + Supported value types: + - `double_value` - For numeric (floating-point) properties + - `int_value` - For integer properties + - `string_value` - For string properties + + Examples: + - `mmlu.double_value` - Order by the 'mmlu' benchmark score + - `accuracy.double_value` - Order by accuracy metric + - `framework_type.string_value` - Order by framework type + - `hardware_count.int_value` - Order by hardware count + - `ttft_mean.double_value` - Order by time-to-first-token mean + + **Behavior:** + - If an invalid value type is specified (e.g., `accuracy.invalid_type`), an error is returned + - If an invalid format is used (e.g., `accuracy` without `.value_type`), it falls back to ID ordering + - If a property doesn't exist, it falls back to ID ordering + - Artifacts with the specified property are ordered first (by the property value), followed by artifacts without the property (ordered by ID) + - Empty property names (e.g., `.double_value`) return an error + schema: + type: string + in: query + required: false + labelOrderBy: + style: form + explode: true + examples: + labelOrderBy: + value: name + name: orderBy + description: | + Specifies the key to order catalog labels by. You can provide any string key + that may exist in the label maps. Labels that contain the specified key will + be sorted by that key's value. Labels that don't contain the key will maintain + their original order and appear after labels that do contain the key. + schema: + type: string + in: query + required: false + artifactType: + style: form + explode: true + examples: + artifactType: + value: model-artifact + name: artifactType + description: "Specifies the artifact type for listing artifacts." + schema: + type: array + items: + $ref: "#/components/schemas/ArtifactTypeQueryParam" + in: query + required: false + artifact_type: + deprecated: true + style: form + explode: true + examples: + artifact_type: + value: model-artifact + name: artifact_type + description: "Specifies the artifact type for listing artifacts." + schema: + type: array + items: + $ref: "#/components/schemas/ArtifactTypeQueryParam" + in: query + required: false tags: [] diff --git a/api/openapi/src/lib/common.yaml b/api/openapi/src/lib/common.yaml index 0e4e32e84a..c5098cf7c8 100644 --- a/api/openapi/src/lib/common.yaml +++ b/api/openapi/src/lib/common.yaml @@ -1,14 +1,5 @@ components: schemas: - ArtifactTypeQueryParam: - description: Supported artifact types for querying. - enum: - - model-artifact - - doc-artifact - - dataset-artifact - - metric - - parameter - type: string BaseModel: type: object properties: @@ -79,6 +70,35 @@ components: description: Output only. Last update time of the resource since epoch in millisecond since epoch. type: string readOnly: true + BaseResource: + allOf: + - type: object + properties: + customProperties: + description: User provided custom properties which are not defined by its type. + type: object + additionalProperties: + $ref: "#/components/schemas/MetadataValue" + description: + description: |- + An optional description about the resource. + type: string + externalId: + description: |- + The external id that come from the clients’ system. This field is optional. + If set, it must be unique among all resources within a database instance. + type: string + name: + description: |- + The client provided name of the artifact. This field is optional. If set, + it must be unique among all the artifacts of the same artifact type within + a database instance and cannot be changed once set. + type: string + id: + format: int64 + description: The unique server generated id of the resource. + type: string + - $ref: "#/components/schemas/BaseResourceDates" BaseResourceList: required: - nextPageToken @@ -370,18 +390,6 @@ components: $ref: "#/components/schemas/SortOrder" in: query required: false - artifactType: - style: form - explode: true - examples: - artifactType: - value: model-artifact - name: artifactType - description: "Specifies the artifact type for listing artifacts." - schema: - $ref: "#/components/schemas/ArtifactTypeQueryParam" - in: query - required: false stepIds: style: form explode: true diff --git a/api/openapi/src/model-registry.yaml b/api/openapi/src/model-registry.yaml index dd780ed143..13ff4af00a 100644 --- a/api/openapi/src/model-registry.yaml +++ b/api/openapi/src/model-registry.yaml @@ -1660,6 +1660,15 @@ components: - ABANDONED - REFERENCE type: string + ArtifactTypeQueryParam: + description: Supported artifact types for querying. + enum: + - model-artifact + - doc-artifact + - dataset-artifact + - metric + - parameter + type: string ArtifactUpdate: description: An Artifact to be updated. oneOf: @@ -1676,35 +1685,6 @@ components: dataset-artifact: "#/components/schemas/DataSetUpdate" metric: "#/components/schemas/MetricUpdate" parameter: "#/components/schemas/ParameterUpdate" - BaseResource: - allOf: - - type: object - properties: - customProperties: - description: User provided custom properties which are not defined by its type. - type: object - additionalProperties: - $ref: "#/components/schemas/MetadataValue" - description: - description: |- - An optional description about the resource. - type: string - externalId: - description: |- - The external id that come from the clients’ system. This field is optional. - If set, it must be unique among all resources within a database instance. - type: string - name: - description: |- - The client provided name of the artifact. This field is optional. If set, - it must be unique among all the artifacts of the same artifact type within - a database instance and cannot be changed once set. - type: string - id: - format: int64 - description: The unique server generated id of the resource. - type: string - - $ref: "#/components/schemas/BaseResourceDates" BaseResourceCreate: type: object properties: @@ -2731,6 +2711,18 @@ components: $ref: "#/components/schemas/OrderByField" in: query required: false + artifactType: + style: form + explode: true + examples: + artifactType: + value: model-artifact + name: artifactType + description: "Specifies the artifact type for listing artifacts." + schema: + $ref: "#/components/schemas/ArtifactTypeQueryParam" + in: query + required: false securitySchemes: {} links: # Artifact diff --git a/catalog/Makefile b/catalog/Makefile index 785c6db020..c298ec644a 100644 --- a/catalog/Makefile +++ b/catalog/Makefile @@ -5,6 +5,7 @@ GENQLIENT_CONFIG := internal/catalog/genqlient/genqlient.yaml GENQLIENT_OUTPUT := internal/catalog/genqlient/generated.go GENQLIENT_SOURCES := $(wildcard internal/catalog/genqlient/queries/*.graphql) GRAPHQL_SCHEMA := internal/catalog/genqlient/queries/schema.graphql +GO ?= "$(shell which go)" .PHONY: gen/openapi-server gen/openapi-server: internal/server/openapi/api_model_catalog_service.go @@ -18,8 +19,8 @@ gen/openapi: pkg/openapi/client.go pkg/openapi/client.go: ../api/openapi/catalog.yaml ${OPENAPI_GENERATOR} generate \ -i ../api/openapi/catalog.yaml -g go -o pkg/openapi --package-name openapi \ - --ignore-file-override ./.openapi-generator-ignore --additional-properties=isGoSubmodule=true,enumClassPrefix=true,useOneOfDiscriminatorLookup=true - gofmt -w pkg/openapi + --ignore-file-override ./.openapi-generator-ignore --additional-properties=isGoSubmodule=true,enumClassPrefix=true,useOneOfDiscriminatorLookup=true,generateUnmarshalJSON=false + $(PROJECT_BIN)/goimports -w pkg/openapi .PHONY: gen/graphql gen/graphql: $(GENQLIENT_OUTPUT) @@ -42,3 +43,16 @@ clean-graphql: .PHONY: clean-internal-server-openapi clean-internal-server-openapi: while IFS= read -r file; do rm -f "internal/server/openapi/$$file"; done < internal/server/openapi/.openapi-generator/FILES + +.PHONY: test +test: + ${GO} test ./... + +.PHONY: test-nocache +test-nocache: + ${GO} test ./... -count=1 + +.PHONY: test-cover +test-cover: + ${GO} test ./... -coverprofile=coverage.txt + ${GO} tool cover -html=coverage.txt -o coverage.html diff --git a/catalog/README.md b/catalog/README.md index 84dd25e2b8..bf196f2da1 100644 --- a/catalog/README.md +++ b/catalog/README.md @@ -78,7 +78,7 @@ catalogs: ## Development ### Prerequisites -- Go >= 1.24 +- Go >= 1.25 - Java >= 11.0 (for OpenAPI generation) - Node.js >= 20.0.0 (for GraphQL schema downloads) @@ -122,7 +122,7 @@ type CatalogSourceProvider interface { 2. Register your provider: ```go -catalog.RegisterCatalogType("my-catalog", func(source *CatalogSourceConfig) (CatalogSourceProvider, error) { +catalog.RegisterCatalogType("my-catalog", func(source *Source) (CatalogSourceProvider, error) { return NewMyCatalogProvider(source) }) ``` @@ -145,4 +145,4 @@ The catalog service is designed to complement the main Model Registry service by - Unified metadata aggregation - Read-only access to distributed model catalogs -For complete Model Registry documentation, see the [main README](../README.md). \ No newline at end of file +For complete Model Registry documentation, see the [main README](../README.md). diff --git a/catalog/cmd/catalog.go b/catalog/cmd/catalog.go index f4cb8da07c..51bc5d35bf 100644 --- a/catalog/cmd/catalog.go +++ b/catalog/cmd/catalog.go @@ -1,44 +1,102 @@ package cmd import ( + "context" "fmt" "net/http" + "reflect" + "time" "github.com/golang/glog" "github.com/kubeflow/model-registry/catalog/internal/catalog" + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/service" "github.com/kubeflow/model-registry/catalog/internal/server/openapi" + "github.com/kubeflow/model-registry/internal/datastore" + "github.com/kubeflow/model-registry/internal/datastore/embedmd" "github.com/spf13/cobra" ) var catalogCfg = struct { - ListenAddress string - ConfigPath string + ListenAddress string + ConfigPath []string + PerformanceMetricsPath []string }{ - ListenAddress: "0.0.0.0:8080", - ConfigPath: "sources.yaml", + ListenAddress: "0.0.0.0:8080", + ConfigPath: []string{"sources.yaml"}, + PerformanceMetricsPath: []string{}, } var CatalogCmd = &cobra.Command{ Use: "catalog", Short: "Catalog API server", - Long: `Launch the API server for the model catalog`, - RunE: runCatalogServer, + Long: `Launch the API server for the model catalog. Use PostgreSQL's + environment variables + (https://www.postgresql.org/docs/current/libpq-envars.html) to + configure the database connection.`, + RunE: runCatalogServer, } func init() { - CatalogCmd.Flags().StringVarP(&catalogCfg.ListenAddress, "listen", "l", catalogCfg.ListenAddress, "Address to listen on") - CatalogCmd.Flags().StringVar(&catalogCfg.ConfigPath, "catalogs-path", catalogCfg.ConfigPath, "Path to catalog source configuration file") + fs := CatalogCmd.Flags() + fs.StringVarP(&catalogCfg.ListenAddress, "listen", "l", catalogCfg.ListenAddress, "Address to listen on") + fs.StringSliceVar(&catalogCfg.ConfigPath, "catalogs-path", catalogCfg.ConfigPath, "Path to catalog source configuration file") + fs.StringSliceVar(&catalogCfg.PerformanceMetricsPath, "performance-metrics", catalogCfg.PerformanceMetricsPath, "Path to performance metrics data directory") } func runCatalogServer(cmd *cobra.Command, args []string) error { - sources, err := catalog.LoadCatalogSources(catalogCfg.ConfigPath) + ds, err := datastore.NewConnector("embedmd", &embedmd.EmbedMDConfig{ + DatabaseType: "postgres", // We only support postgres right now + DatabaseDSN: "", // Empty DSN, see https://www.postgresql.org/docs/current/libpq-envars.html + }) + if err != nil { + return fmt.Errorf("error creating datastore: %w", err) + } + + repoSet, err := ds.Connect(service.DatastoreSpec()) + if err != nil { + return fmt.Errorf("error initializing datastore: %v", err) + } + + services := service.NewServices( + getRepo[models.CatalogModelRepository](repoSet), + getRepo[models.CatalogArtifactRepository](repoSet), + getRepo[models.CatalogModelArtifactRepository](repoSet), + getRepo[models.CatalogMetricsArtifactRepository](repoSet), + getRepo[models.PropertyOptionsRepository](repoSet), + ) + + loader := catalog.NewLoader(services, catalogCfg.ConfigPath) + + perfLoader, err := catalog.NewPerformanceMetricsLoader(catalogCfg.PerformanceMetricsPath, services.CatalogModelRepository, services.CatalogMetricsArtifactRepository, repoSet.TypeMap()) + if err != nil { + return fmt.Errorf("error initializing performance metrics: %v", err) + } + loader.RegisterEventHandler(perfLoader.Load) + + poRefresher := models.NewPropertyOptionsRefresher(context.Background(), services.PropertyOptionsRepository, time.Second) + loader.RegisterEventHandler(func(ctx context.Context, record catalog.ModelProviderRecord) error { + poRefresher.Trigger() + return nil + }) + + err = loader.Start(context.Background()) if err != nil { return fmt.Errorf("error loading catalog sources: %v", err) } - svc := openapi.NewModelCatalogServiceAPIService(sources) + svc := openapi.NewModelCatalogServiceAPIService(catalog.NewDBCatalog(services, loader.Sources), loader.Sources, loader.Labels) ctrl := openapi.NewModelCatalogServiceAPIController(svc) glog.Infof("Catalog API server listening on %s", catalogCfg.ListenAddress) return http.ListenAndServe(catalogCfg.ListenAddress, openapi.NewRouter(ctrl)) } + +func getRepo[T any](repoSet datastore.RepoSet) T { + repo, err := repoSet.Repository(reflect.TypeFor[T]()) + if err != nil { + panic(fmt.Sprintf("unable to get repository: %v", err)) + } + + return repo.(T) +} diff --git a/catalog/internal/catalog/catalog.go b/catalog/internal/catalog/catalog.go index ebb2f1383e..437fa28660 100644 --- a/catalog/internal/catalog/catalog.go +++ b/catalog/internal/catalog/catalog.go @@ -2,204 +2,48 @@ package catalog import ( "context" - "fmt" - "os" - "path/filepath" - "sync" - - "github.com/golang/glog" - "k8s.io/apimachinery/pkg/util/yaml" model "github.com/kubeflow/model-registry/catalog/pkg/openapi" ) type ListModelsParams struct { - Query string - OrderBy model.OrderByField - SortOrder model.SortOrder -} - -// CatalogSourceProvider is implemented by catalog source types, e.g. YamlCatalog -type CatalogSourceProvider interface { + Query string + FilterQuery string + SourceIDs []string + SourceLabels []string + PageSize int32 + OrderBy model.OrderByField + SortOrder model.SortOrder + NextPageToken *string +} + +type ListArtifactsParams struct { + FilterQuery string + PageSize int32 + OrderBy string + SortOrder model.SortOrder + NextPageToken *string + ArtifactTypesFilter []string +} + +// APIProvider implements the API endpoints. +type APIProvider interface { // GetModel returns model metadata for a single model by its name. If // nothing is found with the name provided it returns nil, without an // error. - GetModel(ctx context.Context, name string) (*model.CatalogModel, error) + GetModel(ctx context.Context, modelName string, sourceID string) (*model.CatalogModel, error) // ListModels returns all models according to the parameters. If // nothing suitable is found, it returns an empty list. + // If sourceIDs is provided, filter models by source IDs. If not provided, return all models. ListModels(ctx context.Context, params ListModelsParams) (model.CatalogModelList, error) // GetArtifacts returns all artifacts for a particular model. If no // model is found with that name, it returns nil. If the model is // found, but has no artifacts, an empty list is returned. - GetArtifacts(ctx context.Context, name string) (*model.CatalogModelArtifactList, error) -} - -// CatalogSourceConfig is a single entry from the catalog sources YAML file. -type CatalogSourceConfig struct { - model.CatalogSource `json:",inline"` - - // Catalog type to use, must match one of the registered types - Type string `json:"type"` - - // Properties used for configuring the catalog connection based on catalog implementation - Properties map[string]any `json:"properties,omitempty"` -} - -// sourceConfig is the structure for the catalog sources YAML file. -type sourceConfig struct { - Catalogs []CatalogSourceConfig `json:"catalogs"` -} - -type CatalogTypeRegisterFunc func(source *CatalogSourceConfig) (CatalogSourceProvider, error) - -var registeredCatalogTypes = make(map[string]CatalogTypeRegisterFunc, 0) - -func RegisterCatalogType(catalogType string, callback CatalogTypeRegisterFunc) error { - if _, exists := registeredCatalogTypes[catalogType]; exists { - return fmt.Errorf("catalog type %s already exists", catalogType) - } - registeredCatalogTypes[catalogType] = callback - return nil -} - -type CatalogSource struct { - Provider CatalogSourceProvider - Metadata model.CatalogSource -} - -type SourceCollection struct { - sourcesMu sync.RWMutex - sources map[string]CatalogSource -} - -func NewSourceCollection(sources map[string]CatalogSource) *SourceCollection { - return &SourceCollection{sources: sources} -} - -func (sc *SourceCollection) All() map[string]CatalogSource { - sc.sourcesMu.RLock() - defer sc.sourcesMu.RUnlock() - - return sc.sources -} - -func (sc *SourceCollection) Get(name string) (src CatalogSource, ok bool) { - sc.sourcesMu.RLock() - defer sc.sourcesMu.RUnlock() - - src, ok = sc.sources[name] - return -} - -func (sc *SourceCollection) load(path string) error { - // Get absolute path of the catalog config file - absConfigPath, err := filepath.Abs(path) - if err != nil { - return fmt.Errorf("failed to get absolute path for %s: %v", path, err) - } - - // Get the directory of the config file to resolve relative paths - configDir := filepath.Dir(absConfigPath) - - // Save current working directory - originalWd, err := os.Getwd() - if err != nil { - return fmt.Errorf("failed to get current working directory: %v", err) - } - - // Change to the config directory to make relative paths work - if err := os.Chdir(configDir); err != nil { - return fmt.Errorf("failed to change to config directory %s: %v", configDir, err) - } - - // Ensure we restore the original working directory when we're done - defer func() { - if err := os.Chdir(originalWd); err != nil { - glog.Errorf("failed to restore original working directory %s: %v", originalWd, err) - } - }() - - config := sourceConfig{} - bytes, err := os.ReadFile(absConfigPath) - if err != nil { - return err - } - - if err = yaml.UnmarshalStrict(bytes, &config); err != nil { - return err - } - - sources := make(map[string]CatalogSource, len(config.Catalogs)) - for _, catalogConfig := range config.Catalogs { - // If enabled is explicitly set to false, skip - hasEnabled := catalogConfig.HasEnabled() - if hasEnabled && *catalogConfig.Enabled == false { - continue - } - // If not explicitly set, default to enabled - if !hasEnabled { - t := true - catalogConfig.CatalogSource.Enabled = &t - } - - catalogType := catalogConfig.Type - glog.Infof("reading config type %s...", catalogType) - registerFunc, ok := registeredCatalogTypes[catalogType] - if !ok { - return fmt.Errorf("catalog type %s not registered", catalogType) - } - id := catalogConfig.GetId() - if len(id) == 0 { - return fmt.Errorf("invalid catalog id %s", id) - } - if _, exists := sources[id]; exists { - return fmt.Errorf("duplicate catalog id %s", id) - } - provider, err := registerFunc(&catalogConfig) - if err != nil { - return fmt.Errorf("error reading catalog type %s with id %s: %v", catalogType, id, err) - } - - sources[id] = CatalogSource{ - Provider: provider, - Metadata: catalogConfig.CatalogSource, - } - - glog.Infof("loaded config %s of type %s", id, catalogType) - } - - sc.sourcesMu.Lock() - defer sc.sourcesMu.Unlock() - sc.sources = sources - - return nil -} - -func LoadCatalogSources(path string) (*SourceCollection, error) { - sc := &SourceCollection{} - err := sc.load(path) - if err != nil { - return nil, err - } - - go func() { - changes, err := getMonitor().Path(path) - if err != nil { - glog.Errorf("unable to watch sources file: %v", err) - // Not fatal, we just won't get automatic updates. - } - - for range changes { - glog.Infof("Reloading sources %s", path) - - err = sc.load(path) - if err != nil { - glog.Errorf("unable to load sources: %v", err) - } - } - }() + GetArtifacts(ctx context.Context, modelName string, sourceID string, params ListArtifactsParams) (model.CatalogArtifactList, error) - return sc, nil + // GetFilterOptions returns all available filter options for models. + // This includes field names, data types, and available values or ranges. + GetFilterOptions(ctx context.Context) (*model.FilterOptionsList, error) } diff --git a/catalog/internal/catalog/catalog_test.go b/catalog/internal/catalog/catalog_test.go index d2b9269c9b..c398dce05d 100644 --- a/catalog/internal/catalog/catalog_test.go +++ b/catalog/internal/catalog/catalog_test.go @@ -1,11 +1,18 @@ package catalog import ( + "context" + "fmt" "reflect" "sort" "testing" + "time" - model "github.com/kubeflow/model-registry/catalog/pkg/openapi" + dbmodels "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/service" + apimodels "github.com/kubeflow/model-registry/catalog/pkg/openapi" + "github.com/kubeflow/model-registry/internal/apiutils" + mrmodels "github.com/kubeflow/model-registry/internal/db/models" ) func TestLoadCatalogSources(t *testing.T) { @@ -21,24 +28,33 @@ func TestLoadCatalogSources(t *testing.T) { { name: "test-catalog-sources", args: args{catalogsPath: "testdata/test-catalog-sources.yaml"}, - want: []string{"catalog1", "catalog3", "catalog4"}, + want: []string{"catalog1"}, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := LoadCatalogSources(tt.args.catalogsPath) + // Create mock services + services := service.NewServices( + &MockCatalogModelRepository{}, + &MockCatalogArtifactRepository{}, + &MockCatalogModelArtifactRepository{}, + &MockCatalogMetricsArtifactRepository{}, + &MockPropertyOptionsRepository{}, + ) + loader := NewLoader(services, []string{tt.args.catalogsPath}) + err := loader.Start(context.Background()) if (err != nil) != tt.wantErr { - t.Errorf("LoadCatalogSources() error = %v, wantErr %v", err, tt.wantErr) + t.Errorf("NewLoader().Start() error = %v, wantErr %v", err, tt.wantErr) return } - gotKeys := make([]string, 0, len(got.All())) - for k := range got.All() { + gotKeys := make([]string, 0, len(loader.Sources.All())) + for k := range loader.Sources.All() { gotKeys = append(gotKeys, k) } sort.Strings(gotKeys) if !reflect.DeepEqual(gotKeys, tt.want) { - t.Errorf("LoadCatalogSources() got = %v, want %v", got, tt.want) + t.Errorf("NewLoader().Start() got = %v, want %v", gotKeys, tt.want) } }) } @@ -52,27 +68,18 @@ func TestLoadCatalogSourcesEnabledDisabled(t *testing.T) { tests := []struct { name string args args - want map[string]model.CatalogSource + want map[string]apimodels.CatalogSource wantErr bool }{ { name: "test-catalog-sources-enabled-and-disabled", args: args{catalogsPath: "testdata/test-catalog-sources.yaml"}, - want: map[string]model.CatalogSource{ + want: map[string]apimodels.CatalogSource{ "catalog1": { Id: "catalog1", Name: "Catalog 1", Enabled: &trueValue, - }, - "catalog3": { - Id: "catalog3", - Name: "Catalog 3", - Enabled: &trueValue, - }, - "catalog4": { - Id: "catalog4", - Name: "Catalog 4", - Enabled: &trueValue, + Labels: []string{}, }, }, wantErr: false, @@ -80,23 +87,694 @@ func TestLoadCatalogSourcesEnabledDisabled(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := LoadCatalogSources(tt.args.catalogsPath) + // Create mock services + services := service.NewServices( + &MockCatalogModelRepository{}, + &MockCatalogArtifactRepository{}, + &MockCatalogModelArtifactRepository{}, + &MockCatalogMetricsArtifactRepository{}, + &MockPropertyOptionsRepository{}, + ) + loader := NewLoader(services, []string{tt.args.catalogsPath}) + err := loader.Start(context.Background()) if (err != nil) != tt.wantErr { - t.Errorf("LoadCatalogSources() error = %v, wantErr %v", err, tt.wantErr) + t.Errorf("NewLoader().Start() error = %v, wantErr %v", err, tt.wantErr) + return + } + if err != nil { return } + + if !reflect.DeepEqual(loader.Sources.All(), tt.want) { + t.Errorf("NewLoader().Start() got metadata = %#v, want %#v", loader.Sources.All(), tt.want) + } + }) + } +} + +func TestLabelsValidation(t *testing.T) { + // Create mock services + services := service.NewServices( + &MockCatalogModelRepository{}, + &MockCatalogArtifactRepository{}, + &MockCatalogModelArtifactRepository{}, + &MockCatalogMetricsArtifactRepository{}, + &MockPropertyOptionsRepository{}, + ) + + tests := []struct { + name string + config *sourceConfig + wantErr bool + errMsg string + }{ + { + name: "valid labels with name field", + config: &sourceConfig{ + Catalogs: []Source{}, + Labels: []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + {"name": "labelNameTwo", "displayName": "Label Name Two"}, + }, + }, + wantErr: false, + }, + { + name: "invalid label missing name field", + config: &sourceConfig{ + Catalogs: []Source{}, + Labels: []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + {"displayName": "Label Name Two"}, // Missing "name" + }, + }, + wantErr: true, + errMsg: "invalid label at index 1: missing required 'name' field", + }, + { + name: "invalid label with empty name", + config: &sourceConfig{ + Catalogs: []Source{}, + Labels: []map[string]any{ + {"name": "", "displayName": "Empty Name"}, + }, + }, + wantErr: true, + errMsg: "invalid label at index 0: missing required 'name' field", + }, + { + name: "duplicate label names within same origin", + config: &sourceConfig{ + Catalogs: []Source{}, + Labels: []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One 1"}, + {"name": "labelNameTwo", "displayName": "Label Name Two"}, + {"name": "labelNameOne", "displayName": "Label Name One 2"}, + }, + }, + wantErr: true, + errMsg: "duplicate label name 'labelNameOne' within the same origin", + }, + { + name: "nil labels should not error", + config: &sourceConfig{ + Catalogs: []Source{}, + Labels: nil, + }, + wantErr: false, + }, + { + name: "empty labels array should not error", + config: &sourceConfig{ + Catalogs: []Source{}, + Labels: []map[string]any{}, + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + loader := NewLoader(services, []string{}) + err := loader.updateLabels("test-path", tt.config) + + if tt.wantErr { + if err == nil { + t.Errorf("updateLabels() expected error but got none") + return + } + if tt.errMsg != "" && err.Error() != tt.errMsg { + t.Errorf("updateLabels() error = %v, want %v", err.Error(), tt.errMsg) + } + } else { + if err != nil { + t.Errorf("updateLabels() unexpected error = %v", err) + } + } + }) + } +} + +func TestCatalogSourceLabelsDefaultToEmptySlice(t *testing.T) { + type args struct { + catalogsPath string + } + tests := []struct { + name string + args args + want func(sources map[string]apimodels.CatalogSource) bool + }{ + { + name: "labels-default-to-empty-slice", + args: args{catalogsPath: "testdata/test-catalog-sources.yaml"}, + want: func(sources map[string]apimodels.CatalogSource) bool { + // Verify that all loaded catalog sources have labels defaulting to empty slice + for _, source := range sources { + if source.Labels == nil { + return false // Labels should not be nil + } + if len(source.Labels) != 0 { + return false // Labels should be empty slice, not nil and not containing elements + } + } + return len(sources) > 0 // Ensure we actually loaded some sources to test + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create mock services + services := service.NewServices( + &MockCatalogModelRepository{}, + &MockCatalogArtifactRepository{}, + &MockCatalogModelArtifactRepository{}, + &MockCatalogMetricsArtifactRepository{}, + &MockPropertyOptionsRepository{}, + ) + loader := NewLoader(services, []string{tt.args.catalogsPath}) + err := loader.Start(context.Background()) if err != nil { + t.Errorf("NewLoader().Start() error = %v", err) return } - gotMetadata := make(map[string]model.CatalogSource) - for id, source := range got.All() { - gotMetadata[id] = source.Metadata + sources := loader.Sources.All() + if !tt.want(sources) { + t.Errorf("Labels validation failed for sources: %#v", sources) } - if !reflect.DeepEqual(gotMetadata, tt.want) { - t.Errorf("LoadCatalogSources() got metadata = %#v, want %#v", gotMetadata, tt.want) + // Explicitly verify each source has empty labels slice + for id, source := range sources { + if source.Labels == nil { + t.Errorf("Source %s has nil Labels, expected empty slice", id) + } else if len(source.Labels) != 0 { + t.Errorf("Source %s has non-empty Labels %v, expected empty slice", id, source.Labels) + } } }) } } + +func TestLoadCatalogSourcesWithMockRepositories(t *testing.T) { + // Create mock repositories with tracking capabilities + mockModelRepo := &MockCatalogModelRepository{} + mockArtifactRepo := &MockCatalogArtifactRepository{} + mockModelArtifactRepo := &MockCatalogModelArtifactRepository{} + mockMetricsArtifactRepo := &MockCatalogMetricsArtifactRepository{} + + services := service.NewServices( + mockModelRepo, + mockArtifactRepo, + mockModelArtifactRepo, + mockMetricsArtifactRepo, + &MockPropertyOptionsRepository{}, + ) + + // Register a test provider that will create some test data + testProviderName := "test-provider" + RegisterModelProvider(testProviderName, func(ctx context.Context, source *Source, reldir string) (<-chan ModelProviderRecord, error) { + ch := make(chan ModelProviderRecord, 1) + + // Create a test model + modelName := "test-model" + model := &dbmodels.CatalogModelImpl{ + Attributes: &dbmodels.CatalogModelAttributes{ + Name: &modelName, + }, + } + + // Create test artifacts + modelArtifactName := "model-artifact" + metricsArtifactName := "metrics-artifact" + + artifacts := []dbmodels.CatalogArtifact{ + { + CatalogModelArtifact: &dbmodels.CatalogModelArtifactImpl{ + Attributes: &dbmodels.CatalogModelArtifactAttributes{ + Name: &modelArtifactName, + }, + }, + }, + { + CatalogMetricsArtifact: &dbmodels.CatalogMetricsArtifactImpl{ + Attributes: &dbmodels.CatalogMetricsArtifactAttributes{ + Name: &metricsArtifactName, + }, + }, + }, + } + + ch <- ModelProviderRecord{ + Model: model, + Artifacts: artifacts, + } + close(ch) + + return ch, nil + }) + + // Create test config content (use in-memory instead of file) + testConfig := &sourceConfig{ + Catalogs: []Source{ + { + CatalogSource: apimodels.CatalogSource{ + Id: "test-catalog", + Name: "Test Catalog", + Enabled: apiutils.Of(true), + }, + Type: testProviderName, + Properties: map[string]any{ + "test": "property", + }, + }, + }, + } + + // Create a loader and test the database update directly + l := NewLoader(services, []string{}) + ctx := context.Background() + + err := l.updateDatabase(ctx, "test-path", testConfig) + if err != nil { + t.Fatalf("updateDatabase() error = %v", err) + } + + // Wait a bit for the goroutine to process + time.Sleep(100 * time.Millisecond) + + // Verify that the model was saved + if len(mockModelRepo.SavedModels) != 1 { + t.Errorf("Expected 1 model to be saved, got %d", len(mockModelRepo.SavedModels)) + } + + if len(mockModelRepo.SavedModels) > 0 { + savedModel := mockModelRepo.SavedModels[0] + if savedModel.GetAttributes() == nil || savedModel.GetAttributes().Name == nil { + t.Error("Saved model should have attributes with name") + } else if *savedModel.GetAttributes().Name != "test-model" { + t.Errorf("Expected model name 'test-model', got '%s'", *savedModel.GetAttributes().Name) + } + } + + // Verify that artifacts were saved + if len(mockModelArtifactRepo.SavedArtifacts) != 1 { + t.Errorf("Expected 1 model artifact to be saved, got %d", len(mockModelArtifactRepo.SavedArtifacts)) + } + + if len(mockMetricsArtifactRepo.SavedMetrics) != 1 { + t.Errorf("Expected 1 metrics artifact to be saved, got %d", len(mockMetricsArtifactRepo.SavedMetrics)) + } +} + +func TestLoadCatalogSourcesWithRepositoryErrors(t *testing.T) { + // Create a mock repository that fails on save + mockModelRepo := &MockCatalogModelRepositoryWithErrors{ + shouldFailSave: true, + } + mockArtifactRepo := &MockCatalogArtifactRepository{} + mockModelArtifactRepo := &MockCatalogModelArtifactRepository{} + mockMetricsArtifactRepo := &MockCatalogMetricsArtifactRepository{} + + services := service.NewServices( + mockModelRepo, + mockArtifactRepo, + mockModelArtifactRepo, + mockMetricsArtifactRepo, + &MockPropertyOptionsRepository{}, + ) + + // Register a test provider + testProviderName := "test-error-provider" + RegisterModelProvider(testProviderName, func(ctx context.Context, source *Source, reldir string) (<-chan ModelProviderRecord, error) { + ch := make(chan ModelProviderRecord, 1) + + modelName := "test-model" + model := &dbmodels.CatalogModelImpl{ + Attributes: &dbmodels.CatalogModelAttributes{ + Name: &modelName, + }, + } + + ch <- ModelProviderRecord{ + Model: model, + Artifacts: []dbmodels.CatalogArtifact{}, + } + close(ch) + + return ch, nil + }) + + testConfig := &sourceConfig{ + Catalogs: []Source{ + { + CatalogSource: apimodels.CatalogSource{ + Id: "test-catalog", + Name: "Test Catalog", + Enabled: apiutils.Of(true), + }, + Type: testProviderName, + }, + }, + } + + l := NewLoader(services, []string{}) + ctx := context.Background() + + // This should not return an error even if repository operations fail + // (errors are logged but don't stop the loading process) + err := l.updateDatabase(ctx, "test-path", testConfig) + if err != nil { + t.Fatalf("updateDatabase() should not fail even with repository errors, got error = %v", err) + } + + // Wait for processing + time.Sleep(100 * time.Millisecond) + + // Verify that no models were saved due to the error + if len(mockModelRepo.SavedModels) != 0 { + t.Errorf("Expected 0 models to be saved due to error, got %d", len(mockModelRepo.SavedModels)) + } +} + +func TestMockRepositoryBehavior(t *testing.T) { + mockRepo := &MockCatalogModelRepository{} + + // Test Save operation + modelName := "test-model" + model := &dbmodels.CatalogModelImpl{ + Attributes: &dbmodels.CatalogModelAttributes{ + Name: &modelName, + }, + } + + savedModel, err := mockRepo.Save(model) + if err != nil { + t.Fatalf("Save() error = %v", err) + } + + if savedModel.GetID() == nil { + t.Error("Saved model should have an ID") + } + + if *savedModel.GetID() != 1 { + t.Errorf("Expected ID 1, got %d", *savedModel.GetID()) + } + + // Test GetByID operation + retrievedModel, err := mockRepo.GetByID(1) + if err != nil { + t.Fatalf("GetByID() error = %v", err) + } + + if retrievedModel.GetAttributes().Name == nil || *retrievedModel.GetAttributes().Name != modelName { + t.Errorf("Retrieved model name mismatch, expected %s", modelName) + } + + // Test GetByName operation + retrievedModel, err = mockRepo.GetByName(modelName) + if err != nil { + t.Fatalf("GetByName() error = %v", err) + } + + if retrievedModel.GetID() == nil || *retrievedModel.GetID() != 1 { + t.Error("Retrieved model should have ID 1") + } + + // Test List operation + listWrapper, err := mockRepo.List(dbmodels.CatalogModelListOptions{}) + if err != nil { + t.Fatalf("List() error = %v", err) + } + + if len(listWrapper.Items) != 1 { + t.Errorf("Expected 1 item in list, got %d", len(listWrapper.Items)) + } + + // Test not found scenarios + _, err = mockRepo.GetByID(999) + if err == nil { + t.Error("GetByID() should return error for non-existent ID") + } + + _, err = mockRepo.GetByName("non-existent") + if err == nil { + t.Error("GetByName() should return error for non-existent name") + } +} + +// MockCatalogModelRepositoryWithErrors is a mock that can simulate errors +type MockCatalogModelRepositoryWithErrors struct { + MockCatalogModelRepository + shouldFailSave bool +} + +func (m *MockCatalogModelRepositoryWithErrors) Save(model dbmodels.CatalogModel) (dbmodels.CatalogModel, error) { + if m.shouldFailSave { + return nil, fmt.Errorf("simulated save error") + } + return m.MockCatalogModelRepository.Save(model) +} + +// MockCatalogModelRepository mocks the CatalogModelRepository interface. +type MockCatalogModelRepository struct { + SavedModels []dbmodels.CatalogModel + NextID int32 +} + +func (m *MockCatalogModelRepository) GetByID(id int32) (dbmodels.CatalogModel, error) { + for _, model := range m.SavedModels { + if model.GetID() != nil && *model.GetID() == id { + return model, nil + } + } + return nil, &MockNotFoundError{Entity: "CatalogModel", ID: id} +} + +func (m *MockCatalogModelRepository) List(listOptions dbmodels.CatalogModelListOptions) (*mrmodels.ListWrapper[dbmodels.CatalogModel], error) { + return &mrmodels.ListWrapper[dbmodels.CatalogModel]{ + Items: m.SavedModels, + PageSize: int32(len(m.SavedModels)), + Size: int32(len(m.SavedModels)), + }, nil +} + +func (m *MockCatalogModelRepository) GetByName(name string) (dbmodels.CatalogModel, error) { + for _, model := range m.SavedModels { + if model.GetAttributes() != nil && model.GetAttributes().Name != nil && *model.GetAttributes().Name == name { + return model, nil + } + } + return nil, &MockNotFoundError{Entity: "CatalogModel", ID: 0} +} + +func (m *MockCatalogModelRepository) Save(model dbmodels.CatalogModel) (dbmodels.CatalogModel, error) { + m.NextID++ + id := m.NextID + + // Create a new model with assigned ID + savedModel := &dbmodels.CatalogModelImpl{ + ID: &id, + TypeID: model.GetTypeID(), + Attributes: model.GetAttributes(), + Properties: model.GetProperties(), + CustomProperties: model.GetCustomProperties(), + } + + m.SavedModels = append(m.SavedModels, savedModel) + return savedModel, nil +} + +// MockCatalogModelArtifactRepository mocks the CatalogModelArtifactRepository interface. +type MockCatalogModelArtifactRepository struct { + SavedArtifacts []dbmodels.CatalogModelArtifact + NextID int32 +} + +func (m *MockCatalogModelArtifactRepository) GetByID(id int32) (dbmodels.CatalogModelArtifact, error) { + for _, artifact := range m.SavedArtifacts { + if artifact.GetID() != nil && *artifact.GetID() == id { + return artifact, nil + } + } + return nil, &MockNotFoundError{Entity: "CatalogModelArtifact", ID: id} +} + +func (m *MockCatalogModelArtifactRepository) List(listOptions dbmodels.CatalogModelArtifactListOptions) (*mrmodels.ListWrapper[dbmodels.CatalogModelArtifact], error) { + return &mrmodels.ListWrapper[dbmodels.CatalogModelArtifact]{ + Items: m.SavedArtifacts, + PageSize: int32(len(m.SavedArtifacts)), + Size: int32(len(m.SavedArtifacts)), + }, nil +} + +func (m *MockCatalogModelArtifactRepository) Save(modelArtifact dbmodels.CatalogModelArtifact, parentResourceID *int32) (dbmodels.CatalogModelArtifact, error) { + m.NextID++ + id := m.NextID + + // Create a new artifact with assigned ID + savedArtifact := &dbmodels.CatalogModelArtifactImpl{ + ID: &id, + TypeID: modelArtifact.GetTypeID(), + Attributes: modelArtifact.GetAttributes(), + Properties: modelArtifact.GetProperties(), + CustomProperties: modelArtifact.GetCustomProperties(), + } + + m.SavedArtifacts = append(m.SavedArtifacts, savedArtifact) + return savedArtifact, nil +} + +// MockCatalogMetricsArtifactRepository mocks the CatalogMetricsArtifactRepository interface. +type MockCatalogMetricsArtifactRepository struct { + SavedMetrics []dbmodels.CatalogMetricsArtifact + NextID int32 +} + +func (m *MockCatalogMetricsArtifactRepository) GetByID(id int32) (dbmodels.CatalogMetricsArtifact, error) { + for _, metrics := range m.SavedMetrics { + if metrics.GetID() != nil && *metrics.GetID() == id { + return metrics, nil + } + } + return nil, &MockNotFoundError{Entity: "CatalogMetricsArtifact", ID: id} +} + +func (m *MockCatalogMetricsArtifactRepository) List(listOptions dbmodels.CatalogMetricsArtifactListOptions) (*mrmodels.ListWrapper[dbmodels.CatalogMetricsArtifact], error) { + return &mrmodels.ListWrapper[dbmodels.CatalogMetricsArtifact]{ + Items: m.SavedMetrics, + PageSize: int32(len(m.SavedMetrics)), + Size: int32(len(m.SavedMetrics)), + }, nil +} + +func (m *MockCatalogMetricsArtifactRepository) Save(metricsArtifact dbmodels.CatalogMetricsArtifact, parentResourceID *int32) (dbmodels.CatalogMetricsArtifact, error) { + m.NextID++ + id := m.NextID + + // Create a new metrics artifact with assigned ID + savedMetrics := &dbmodels.CatalogMetricsArtifactImpl{ + ID: &id, + TypeID: metricsArtifact.GetTypeID(), + Attributes: metricsArtifact.GetAttributes(), + Properties: metricsArtifact.GetProperties(), + CustomProperties: metricsArtifact.GetCustomProperties(), + } + + m.SavedMetrics = append(m.SavedMetrics, savedMetrics) + return savedMetrics, nil +} + +func (m *MockCatalogMetricsArtifactRepository) BatchSave(metricsArtifacts []dbmodels.CatalogMetricsArtifact, parentResourceID *int32) ([]dbmodels.CatalogMetricsArtifact, error) { + savedArtifacts := make([]dbmodels.CatalogMetricsArtifact, len(metricsArtifacts)) + + for i, metricsArtifact := range metricsArtifacts { + m.NextID++ + id := m.NextID + + // Create a new metrics artifact with assigned ID + savedMetrics := &dbmodels.CatalogMetricsArtifactImpl{ + ID: &id, + TypeID: metricsArtifact.GetTypeID(), + Attributes: metricsArtifact.GetAttributes(), + Properties: metricsArtifact.GetProperties(), + CustomProperties: metricsArtifact.GetCustomProperties(), + } + + m.SavedMetrics = append(m.SavedMetrics, savedMetrics) + savedArtifacts[i] = savedMetrics + } + + return savedArtifacts, nil +} + +// MockCatalogArtifactRepository mocks the CatalogArtifactRepository interface. +type MockCatalogArtifactRepository struct { + SavedArtifacts []dbmodels.CatalogArtifact + NextID int32 +} + +func (m *MockCatalogArtifactRepository) GetByID(id int32) (dbmodels.CatalogArtifact, error) { + for _, artifact := range m.SavedArtifacts { + // Check both model and metrics artifacts for the ID + if artifact.CatalogModelArtifact != nil && artifact.CatalogModelArtifact.GetID() != nil && *artifact.CatalogModelArtifact.GetID() == id { + return artifact, nil + } + if artifact.CatalogMetricsArtifact != nil && artifact.CatalogMetricsArtifact.GetID() != nil && *artifact.CatalogMetricsArtifact.GetID() == id { + return artifact, nil + } + } + return dbmodels.CatalogArtifact{}, &MockNotFoundError{Entity: "CatalogArtifact", ID: id} +} + +func (m *MockCatalogArtifactRepository) List(listOptions dbmodels.CatalogArtifactListOptions) (*mrmodels.ListWrapper[dbmodels.CatalogArtifact], error) { + return &mrmodels.ListWrapper[dbmodels.CatalogArtifact]{ + Items: m.SavedArtifacts, + PageSize: int32(len(m.SavedArtifacts)), + Size: int32(len(m.SavedArtifacts)), + }, nil +} + +func (m *MockCatalogArtifactRepository) DeleteByParentID(artifactType string, parentResourceID int32) error { + // Simple mock implementation - could be enhanced to actually filter and delete + return nil +} + +// MockNotFoundError represents an error when an entity is not found. +type MockNotFoundError struct { + Entity string + ID int32 +} + +func (e *MockNotFoundError) Error() string { + return fmt.Sprintf("%s with ID %d not found", e.Entity, e.ID) +} + +// MockPropertyOptionsRepository mocks the PropertyOptionsRepository interface. +type MockPropertyOptionsRepository struct { + RefreshCalls []dbmodels.PropertyOptionType + ListCalls []struct { + Type dbmodels.PropertyOptionType + TypeID int32 + } + MockOptions map[dbmodels.PropertyOptionType]map[int32][]dbmodels.PropertyOption +} + +func NewMockPropertyOptionsRepository() *MockPropertyOptionsRepository { + return &MockPropertyOptionsRepository{ + RefreshCalls: make([]dbmodels.PropertyOptionType, 0), + ListCalls: make([]struct { + Type dbmodels.PropertyOptionType + TypeID int32 + }, 0), + MockOptions: make(map[dbmodels.PropertyOptionType]map[int32][]dbmodels.PropertyOption), + } +} + +func (m *MockPropertyOptionsRepository) Refresh(t dbmodels.PropertyOptionType) error { + m.RefreshCalls = append(m.RefreshCalls, t) + return nil +} + +func (m *MockPropertyOptionsRepository) List(t dbmodels.PropertyOptionType, typeID int32) ([]dbmodels.PropertyOption, error) { + m.ListCalls = append(m.ListCalls, struct { + Type dbmodels.PropertyOptionType + TypeID int32 + }{Type: t, TypeID: typeID}) + + if typeMap, exists := m.MockOptions[t]; exists { + if options, exists := typeMap[typeID]; exists { + return options, nil + } + } + + // Return empty slice by default + return []dbmodels.PropertyOption{}, nil +} + +// SetMockOptions allows tests to set up mock data for specific types and typeIDs. +func (m *MockPropertyOptionsRepository) SetMockOptions(t dbmodels.PropertyOptionType, typeID int32, options []dbmodels.PropertyOption) { + if m.MockOptions[t] == nil { + m.MockOptions[t] = make(map[int32][]dbmodels.PropertyOption) + } + m.MockOptions[t][typeID] = options +} diff --git a/catalog/internal/catalog/db_catalog.go b/catalog/internal/catalog/db_catalog.go new file mode 100644 index 0000000000..14523164a4 --- /dev/null +++ b/catalog/internal/catalog/db_catalog.go @@ -0,0 +1,559 @@ +package catalog + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "sort" + "strconv" + + "github.com/kubeflow/model-registry/catalog/internal/db/models" + dbmodels "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/service" + apimodels "github.com/kubeflow/model-registry/catalog/pkg/openapi" + "github.com/kubeflow/model-registry/internal/apiutils" + "github.com/kubeflow/model-registry/internal/converter" + mrmodels "github.com/kubeflow/model-registry/internal/db/models" + "github.com/kubeflow/model-registry/pkg/api" + "github.com/kubeflow/model-registry/pkg/openapi" +) + +type dbCatalogImpl struct { + catalogModelRepository dbmodels.CatalogModelRepository + catalogArtifactRepository dbmodels.CatalogArtifactRepository + propertyOptionsRepository dbmodels.PropertyOptionsRepository + sources *SourceCollection +} + +func NewDBCatalog(services service.Services, sources *SourceCollection) APIProvider { + return &dbCatalogImpl{ + catalogArtifactRepository: services.CatalogArtifactRepository, + catalogModelRepository: services.CatalogModelRepository, + propertyOptionsRepository: services.PropertyOptionsRepository, + sources: sources, + } +} + +func (d *dbCatalogImpl) GetModel(ctx context.Context, modelName string, sourceID string) (*apimodels.CatalogModel, error) { + modelsList, err := d.catalogModelRepository.List(dbmodels.CatalogModelListOptions{ + Name: &modelName, + SourceIDs: &[]string{sourceID}, + }) + if err != nil { + return nil, err + } + + if len(modelsList.Items) == 0 { + return nil, fmt.Errorf("no models found for name=%v: %w", modelName, api.ErrNotFound) + } + + if len(modelsList.Items) > 1 { + return nil, fmt.Errorf("multiple models found for name=%v: %w", modelName, api.ErrNotFound) + } + + model := mapDBModelToAPIModel(modelsList.Items[0]) + + return &model, nil +} + +func (d *dbCatalogImpl) ListModels(ctx context.Context, params ListModelsParams) (apimodels.CatalogModelList, error) { + pageSize := int32(params.PageSize) + orderBy := string(params.OrderBy) + sortOrder := string(params.SortOrder) + + // Use consistent defaults to match pagination logic + if orderBy == "" { + orderBy = mrmodels.DefaultOrderBy + } else if orderBy == "ACCURACY" { + orderBy = "artifacts.overall_average.double_value" + } + + if sortOrder == "" { + sortOrder = mrmodels.DefaultSortOrder + } + + nextPageToken := params.NextPageToken + + var queryPtr *string + if params.Query != "" { + queryPtr = ¶ms.Query + } + + sourceIDs := params.SourceIDs + if len(sourceIDs) == 0 && len(params.SourceLabels) > 0 { + sources := d.sources.ByLabel(params.SourceLabels) + if len(sources) == 0 { + // No matching sources, so no matching models. + return apimodels.CatalogModelList{ + Items: make([]apimodels.CatalogModel, 0), + PageSize: pageSize, + }, nil + } + + sourceIDs = make([]string, len(sources)) + for i, source := range sources { + sourceIDs[i] = source.Id + } + } + + modelsList, err := d.catalogModelRepository.List(dbmodels.CatalogModelListOptions{ + SourceIDs: &sourceIDs, + Query: queryPtr, + Pagination: mrmodels.Pagination{ + FilterQuery: ¶ms.FilterQuery, + PageSize: &pageSize, + OrderBy: &orderBy, + SortOrder: &sortOrder, + NextPageToken: nextPageToken, + }, + }) + if err != nil { + return apimodels.CatalogModelList{}, err + } + + modelList := &apimodels.CatalogModelList{ + Items: make([]apimodels.CatalogModel, 0, len(modelsList.Items)), + } + + for _, model := range modelsList.Items { + modelList.Items = append(modelList.Items, mapDBModelToAPIModel(model)) + } + + modelList.NextPageToken = modelsList.NextPageToken + modelList.PageSize = pageSize + modelList.Size = int32(len(modelsList.Items)) + + return *modelList, nil +} + +func (d *dbCatalogImpl) GetArtifacts(ctx context.Context, modelName string, sourceID string, params ListArtifactsParams) (apimodels.CatalogArtifactList, error) { + pageSize := int32(params.PageSize) + + // Use consistent defaults to match pagination logic + orderBy := string(params.OrderBy) + if orderBy == "" { + orderBy = mrmodels.DefaultOrderBy + } + + sortOrder := string(params.SortOrder) + if sortOrder == "" { + sortOrder = mrmodels.DefaultSortOrder + } + + nextPageToken := params.NextPageToken + + m, err := d.GetModel(ctx, modelName, sourceID) + if err != nil { + if errors.Is(err, api.ErrNotFound) { + return apimodels.CatalogArtifactList{}, fmt.Errorf("invalid model name '%s' for source '%s': %w", modelName, sourceID, api.ErrBadRequest) + } + return apimodels.CatalogArtifactList{}, err + } + + parentResourceID, err := strconv.ParseInt(*m.Id, 10, 32) + if err != nil { + return apimodels.CatalogArtifactList{}, err + } + + parentResourceID32 := int32(parentResourceID) + + var filterQueryPtr *string + if params.FilterQuery != "" { + filterQueryPtr = ¶ms.FilterQuery + } + + artifactsList, err := d.catalogArtifactRepository.List(dbmodels.CatalogArtifactListOptions{ + ParentResourceID: &parentResourceID32, + ArtifactTypesFilter: params.ArtifactTypesFilter, + Pagination: mrmodels.Pagination{ + FilterQuery: filterQueryPtr, + PageSize: &pageSize, + OrderBy: &orderBy, + SortOrder: &sortOrder, + NextPageToken: nextPageToken, + }, + }) + if err != nil { + return apimodels.CatalogArtifactList{}, err + } + + artifactList := &apimodels.CatalogArtifactList{ + Items: make([]apimodels.CatalogArtifact, 0), + } + + for _, artifact := range artifactsList.Items { + mappedArtifact, err := mapDBArtifactToAPIArtifact(artifact) + if err != nil { + return apimodels.CatalogArtifactList{}, err + } + artifactList.Items = append(artifactList.Items, mappedArtifact) + } + + artifactList.NextPageToken = artifactsList.NextPageToken + artifactList.PageSize = pageSize + artifactList.Size = int32(len(artifactList.Items)) + + return *artifactList, nil +} + +func (d *dbCatalogImpl) GetFilterOptions(ctx context.Context) (*apimodels.FilterOptionsList, error) { + contextProperties, err := d.propertyOptionsRepository.List(models.ContextPropertyOptionType, 0) + if err != nil { + return nil, err + } + artifactProperties, err := d.propertyOptionsRepository.List(models.ArtifactPropertyOptionType, 0) + if err != nil { + return nil, err + } + + // Build FilterOptionsList + options := make(map[string]apimodels.FilterOption, len(contextProperties)+len(artifactProperties)) + + for _, prop := range contextProperties { + // Skip internal/technical fields that shouldn't be exposed as filters + switch prop.Name { + case "source_id", "logo", "license_link": + continue + } + + option := dbPropToAPIOption(prop) + if option != nil { + options[prop.FullName("")] = *option + } + } + + for _, prop := range artifactProperties { + // Skip internal/technical fields that shouldn't be exposed as filters + switch prop.Name { + case "metricsType", "model_id": + continue + } + option := dbPropToAPIOption(prop) + if option != nil { + options[prop.FullName("artifacts")] = *option + } + } + + return &apimodels.FilterOptionsList{ + Filters: &options, + }, nil +} + +func dbPropToAPIOption(prop dbmodels.PropertyOption) *apimodels.FilterOption { + var option apimodels.FilterOption + + switch prop.ValueField() { + case dbmodels.StringValueField: + if len(prop.StringValue) == 0 { + return nil + } + option.Type = "string" + sort.Strings(prop.StringValue) + option.Values = anySlice(prop.StringValue) + + case dbmodels.ArrayValueField: + if len(prop.ArrayValue) == 0 { + return nil + } + option.Type = "string" + sort.Strings(prop.ArrayValue) + option.Values = anySlice(prop.ArrayValue) + + case dbmodels.IntValueField: + if prop.MinIntValue == nil || prop.MaxIntValue == nil { + return nil + } + + option.Type = "number" + option.Range = &apimodels.FilterOptionRange{ + Min: apiutils.Of(float64(*prop.MinIntValue)), + Max: apiutils.Of(float64(*prop.MaxIntValue)), + } + + case dbmodels.DoubleValueField: + if prop.MinDoubleValue == nil || prop.MaxDoubleValue == nil { + return nil + } + + option.Type = "number" + option.Range = &apimodels.FilterOptionRange{ + Min: prop.MinDoubleValue, + Max: prop.MaxDoubleValue, + } + } + + return &option +} + +func anySlice[T any](s []T) []any { + as := make([]any, len(s)) + for i, v := range s { + as[i] = v + } + return as +} + +func mapDBModelToAPIModel(m dbmodels.CatalogModel) apimodels.CatalogModel { + res := apimodels.CatalogModel{} + + id := strconv.FormatInt(int64(*m.GetID()), 10) + res.Id = &id + + if m.GetAttributes() != nil { + res.Name = *m.GetAttributes().Name + res.ExternalId = m.GetAttributes().ExternalID + + if m.GetAttributes().CreateTimeSinceEpoch != nil { + createTimeSinceEpoch := strconv.FormatInt(*m.GetAttributes().CreateTimeSinceEpoch, 10) + res.CreateTimeSinceEpoch = &createTimeSinceEpoch + } + if m.GetAttributes().LastUpdateTimeSinceEpoch != nil { + lastUpdateTimeSinceEpoch := strconv.FormatInt(*m.GetAttributes().LastUpdateTimeSinceEpoch, 10) + res.LastUpdateTimeSinceEpoch = &lastUpdateTimeSinceEpoch + } + } + + if m.GetProperties() != nil { + for _, prop := range *m.GetProperties() { + switch prop.Name { + case "source_id": + if prop.StringValue != nil { + res.SourceId = prop.StringValue + } + case "description": + if prop.StringValue != nil { + res.Description = prop.StringValue + } + case "library_name": + if prop.StringValue != nil { + res.LibraryName = prop.StringValue + } + case "license_link": + if prop.StringValue != nil { + res.LicenseLink = prop.StringValue + } + case "license": + if prop.StringValue != nil { + res.License = prop.StringValue + } + case "logo": + if prop.StringValue != nil { + res.Logo = prop.StringValue + } + case "maturity": + if prop.StringValue != nil { + res.Maturity = prop.StringValue + } + case "provider": + if prop.StringValue != nil { + res.Provider = prop.StringValue + } + case "readme": + if prop.StringValue != nil { + res.Readme = prop.StringValue + } + case "language": + if prop.StringValue != nil { + var languages []string + if err := json.Unmarshal([]byte(*prop.StringValue), &languages); err == nil { + res.Language = languages + } + } + case "tasks": + if prop.StringValue != nil { + var tasks []string + if err := json.Unmarshal([]byte(*prop.StringValue), &tasks); err == nil { + res.Tasks = tasks + } + } + } + } + } + + // Map custom properties + if m.GetCustomProperties() != nil && len(*m.GetCustomProperties()) > 0 { + customProps := make(map[string]apimodels.MetadataValue, len(*m.GetCustomProperties())) + for _, prop := range *m.GetCustomProperties() { + if prop.StringValue != nil { + customProps[prop.Name] = apimodels.MetadataStringValueAsMetadataValue( + apimodels.NewMetadataStringValue(*prop.StringValue, "MetadataStringValue"), + ) + } + } + if len(customProps) > 0 { + res.CustomProperties = customProps + } + } + + return res +} + +func mapDBArtifactToAPIArtifact(a dbmodels.CatalogArtifact) (apimodels.CatalogArtifact, error) { + if a.CatalogModelArtifact != nil { + return mapToModelArtifact(a.CatalogModelArtifact) + } else if a.CatalogMetricsArtifact != nil { + metricsTypeValue := string(a.CatalogMetricsArtifact.GetAttributes().MetricsType) + return mapToMetricsArtifact(a.CatalogMetricsArtifact, metricsTypeValue) + } + + return apimodels.CatalogArtifact{}, fmt.Errorf("invalid catalog artifact type: %v", a) +} + +func mapToModelArtifact(a dbmodels.CatalogModelArtifact) (apimodels.CatalogArtifact, error) { + catalogModelArtifact := &apimodels.CatalogModelArtifact{ + ArtifactType: dbmodels.CatalogModelArtifactType, + } + + if a.GetID() != nil { + id := strconv.FormatInt(int64(*a.GetID()), 10) + catalogModelArtifact.Id = &id + } + + if a.GetAttributes() != nil { + attrs := a.GetAttributes() + + catalogModelArtifact.Name = attrs.Name + catalogModelArtifact.ExternalId = attrs.ExternalID + + if attrs.URI != nil { + catalogModelArtifact.Uri = *attrs.URI + } + + if attrs.CreateTimeSinceEpoch != nil { + createTime := strconv.FormatInt(*attrs.CreateTimeSinceEpoch, 10) + catalogModelArtifact.CreateTimeSinceEpoch = &createTime + } + + if attrs.LastUpdateTimeSinceEpoch != nil { + updateTime := strconv.FormatInt(*attrs.LastUpdateTimeSinceEpoch, 10) + catalogModelArtifact.LastUpdateTimeSinceEpoch = &updateTime + } + } + + if a.GetProperties() != nil { + for _, prop := range *a.GetProperties() { + switch prop.Name { + case "description": + if prop.StringValue != nil { + catalogModelArtifact.Description = prop.StringValue + } + case "artifactType": + if prop.StringValue != nil { + catalogModelArtifact.ArtifactType = *prop.StringValue + } + } + } + } + + // Map custom properties + if a.GetCustomProperties() != nil && len(*a.GetCustomProperties()) > 0 { + customPropsMap, err := converter.MapEmbedMDCustomProperties(*a.GetCustomProperties()) + if err != nil { + return apimodels.CatalogArtifact{}, fmt.Errorf("error mapping custom properties: %w", err) + } + + catalogCustomProps := convertMetadataValueMap(customPropsMap) + catalogModelArtifact.CustomProperties = catalogCustomProps + } + + return apimodels.CatalogArtifact{ + CatalogModelArtifact: catalogModelArtifact, + }, nil +} + +func mapToMetricsArtifact(a dbmodels.CatalogMetricsArtifact, metricsType string) (apimodels.CatalogArtifact, error) { + catalogMetricsArtifact := &apimodels.CatalogMetricsArtifact{ + ArtifactType: dbmodels.CatalogMetricsArtifactType, + MetricsType: metricsType, + } + + if a.GetID() != nil { + id := strconv.FormatInt(int64(*a.GetID()), 10) + catalogMetricsArtifact.Id = &id + } + + if a.GetAttributes() != nil { + attrs := a.GetAttributes() + + catalogMetricsArtifact.Name = attrs.Name + catalogMetricsArtifact.ExternalId = attrs.ExternalID + + if attrs.CreateTimeSinceEpoch != nil { + createTime := strconv.FormatInt(*attrs.CreateTimeSinceEpoch, 10) + catalogMetricsArtifact.CreateTimeSinceEpoch = &createTime + } + + if attrs.LastUpdateTimeSinceEpoch != nil { + updateTime := strconv.FormatInt(*attrs.LastUpdateTimeSinceEpoch, 10) + catalogMetricsArtifact.LastUpdateTimeSinceEpoch = &updateTime + } + } + + if a.GetProperties() != nil { + for _, prop := range *a.GetProperties() { + switch prop.Name { + case "description": + if prop.StringValue != nil { + catalogMetricsArtifact.Description = prop.StringValue + } + } + } + } + + // Map custom properties + if a.GetCustomProperties() != nil && len(*a.GetCustomProperties()) > 0 { + customPropsMap, err := converter.MapEmbedMDCustomProperties(*a.GetCustomProperties()) + if err != nil { + return apimodels.CatalogArtifact{}, fmt.Errorf("error mapping custom properties: %w", err) + } + + catalogCustomProps := convertMetadataValueMap(customPropsMap) + catalogMetricsArtifact.CustomProperties = catalogCustomProps + + } + + return apimodels.CatalogArtifact{ + CatalogMetricsArtifact: catalogMetricsArtifact, + }, nil +} + +// convertMetadataValueMap converts from pkg/openapi.MetadataValue to catalog/pkg/openapi.MetadataValue +func convertMetadataValueMap(source map[string]openapi.MetadataValue) map[string]apimodels.MetadataValue { + result := make(map[string]apimodels.MetadataValue) + + for key, value := range source { + catalogValue := apimodels.MetadataValue{} + + if value.MetadataStringValue != nil { + catalogValue.MetadataStringValue = &apimodels.MetadataStringValue{ + StringValue: value.MetadataStringValue.StringValue, + MetadataType: value.MetadataStringValue.MetadataType, + } + } else if value.MetadataIntValue != nil { + catalogValue.MetadataIntValue = &apimodels.MetadataIntValue{ + IntValue: value.MetadataIntValue.IntValue, + MetadataType: value.MetadataIntValue.MetadataType, + } + } else if value.MetadataDoubleValue != nil { + catalogValue.MetadataDoubleValue = &apimodels.MetadataDoubleValue{ + DoubleValue: value.MetadataDoubleValue.DoubleValue, + MetadataType: value.MetadataDoubleValue.MetadataType, + } + } else if value.MetadataBoolValue != nil { + catalogValue.MetadataBoolValue = &apimodels.MetadataBoolValue{ + BoolValue: value.MetadataBoolValue.BoolValue, + MetadataType: value.MetadataBoolValue.MetadataType, + } + } else if value.MetadataStructValue != nil { + catalogValue.MetadataStructValue = &apimodels.MetadataStructValue{ + StructValue: value.MetadataStructValue.StructValue, + MetadataType: value.MetadataStructValue.MetadataType, + } + } + + result[key] = catalogValue + } + + return result +} diff --git a/catalog/internal/catalog/db_catalog_filterquery_test.go b/catalog/internal/catalog/db_catalog_filterquery_test.go new file mode 100644 index 0000000000..4bf91127f7 --- /dev/null +++ b/catalog/internal/catalog/db_catalog_filterquery_test.go @@ -0,0 +1,1253 @@ +package catalog + +import ( + "regexp" + "strings" + "testing" + + "github.com/DATA-DOG/go-sqlmock" + catalogfilter "github.com/kubeflow/model-registry/catalog/internal/db/filter" + "github.com/kubeflow/model-registry/internal/db/filter" + "github.com/kubeflow/model-registry/internal/db/schema" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/driver/postgres" + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +// SQLCapture captures SQL queries and arguments generated by GORM +type SQLCapture struct { + Queries []string + Args [][]any +} + +// setupMockGORMWithCapture creates a mock PostgreSQL GORM DB that captures SQL queries +func setupMockGORMWithCapture(t *testing.T) (*gorm.DB, sqlmock.Sqlmock, *SQLCapture) { + mockDB, mock, err := sqlmock.New(sqlmock.QueryMatcherOption(sqlmock.QueryMatcherRegexp)) + require.NoError(t, err) + + // Configure GORM with PostgreSQL and the mock database + dialector := postgres.New(postgres.Config{ + Conn: mockDB, + PreferSimpleProtocol: true, // Disable prepared statements for easier testing + }) + + db, err := gorm.Open(dialector, &gorm.Config{ + Logger: logger.Default.LogMode(logger.Silent), + DryRun: true, // Enable DryRun mode to capture SQL without executing + }) + require.NoError(t, err) + + capture := &SQLCapture{ + Queries: make([]string, 0), + Args: make([][]any, 0), + } + + return db, mock, capture +} + +// captureQuerySQL builds the query and captures the generated SQL using DryRun mode +func captureQuerySQL(t *testing.T, db *gorm.DB, query *gorm.DB) (string, []any) { + // Execute the query in dry run mode to generate SQL + stmt := query.Find(&[]schema.Context{}).Statement + if stmt != nil { + return stmt.SQL.String(), stmt.Vars + } + + return "", []any{} +} + +func TestFilterQueryToSQLGeneration(t *testing.T) { + tests := []struct { + name string + filterQuery string + expectedSQL []string // SQL fragments that should be present + expectedArgs []any // Expected query arguments (flattened) + shouldError bool + description string + }{ + { + name: "Simple name equality filter", + filterQuery: `name = "test-model"`, + expectedSQL: []string{ + `"Context".name = $`, + }, + expectedArgs: []any{"test-model"}, + description: "Core property filters should query the main entity table directly with PostgreSQL quoting", + }, + { + name: "Custom property string filter", + filterQuery: `framework.string_value = "PyTorch"`, + expectedSQL: []string{ + "JOIN", + `"ContextProperty"`, + "prop_1", + `prop_1.context_id = "Context".id`, + `prop_1.name = $`, + `prop_1.string_value = $`, + }, + expectedArgs: []any{"framework", "PyTorch"}, + description: "Custom properties should require JOIN with property table using PostgreSQL syntax", + }, + { + name: "Custom property with type inference", + filterQuery: `accuracy > 0.95`, + expectedSQL: []string{ + "JOIN", + `"ContextProperty"`, + "prop_1", + `prop_1.context_id = "Context".id`, + `prop_1.name = $`, + `prop_1.double_value > $`, + }, + expectedArgs: []any{"accuracy", 0.95}, + description: "Numeric values should infer double_value type", + }, + { + name: "LIKE pattern matching", + filterQuery: `name LIKE "%model%"`, + expectedSQL: []string{ + `"Context".name LIKE $`, + }, + expectedArgs: []any{"%model%"}, + description: "LIKE operator should be preserved in SQL", + }, + { + name: "Case-insensitive ILIKE (PostgreSQL native)", + filterQuery: `name ILIKE "%MODEL%"`, + expectedSQL: []string{ + `"Context".name ILIKE $`, + }, + expectedArgs: []any{"%MODEL%"}, + description: "ILIKE should use PostgreSQL's native ILIKE operator", + }, + { + name: "IN clause with multiple values", + filterQuery: `license IN ('MIT','Apache-2.0','GPL')`, + expectedSQL: []string{ + "JOIN", + `"ContextProperty"`, + "prop_1", + `prop_1.name = $`, + `prop_1.string_value IN ($3,$4,$5)`, + }, + expectedArgs: []any{"license", "MIT", "Apache-2.0", "GPL"}, + description: "IN clause should generate proper PostgreSQL placeholder syntax", + }, + { + name: "Multiple filters with AND", + filterQuery: `provider.string_value = "HuggingFace" AND framework.string_value = "PyTorch"`, + expectedSQL: []string{ + "JOIN", + `"ContextProperty"`, + "prop_1", + `prop_1.name = $`, + `prop_1.string_value = $`, + "prop_2", + `prop_2.name = $`, + `prop_2.string_value = $`, + }, + expectedArgs: []any{"provider", "HuggingFace", "framework", "PyTorch"}, + description: "Multiple custom properties should create separate JOINs", + }, + { + name: "OR condition with parentheses", + filterQuery: `(framework.string_value = "PyTorch" OR framework.string_value = "TensorFlow")`, + expectedSQL: []string{ + "EXISTS", + `"ContextProperty"`, + `"ContextProperty".context_id = "Context".id`, + `"ContextProperty".name = $`, + `"ContextProperty".string_value`, + "OR", + }, + expectedArgs: []any{"framework", "PyTorch", "framework", "TensorFlow"}, + description: "OR conditions should use EXISTS subqueries with properly qualified column names", + }, + { + name: "OR condition with LIKE on array properties (bug fix test)", + filterQuery: `(language LIKE '%"en"%' OR language LIKE '%"it"%')`, + expectedSQL: []string{ + "EXISTS", + `"ContextProperty"`, + `"ContextProperty".context_id = "Context".id`, + `"ContextProperty".name = $`, + `"ContextProperty".string_value LIKE`, + "OR", + }, + expectedArgs: []any{"language", `%"en"%`, "language", `%"it"%`}, + description: "OR conditions with LIKE should generate valid SQL with table-qualified columns (fixes 'invalid field' error)", + }, + { + name: "Mixed core and custom properties", + filterQuery: `name = "test-model" AND accuracy.double_value > 0.9`, + expectedSQL: []string{ + `"Context".name = $`, + "JOIN", + `"ContextProperty"`, + "prop_1", + `prop_1.name = $`, + `prop_1.double_value > $`, + }, + expectedArgs: []any{"test-model", "accuracy", 0.9}, + description: "Mixed property types should combine direct and JOIN conditions", + }, + { + name: "Numeric comparisons", + filterQuery: `accuracy.double_value >= 0.95 AND priority.int_value < 10`, + expectedSQL: []string{ + `prop_1.double_value >= $`, + `prop_2.int_value < $`, + }, + expectedArgs: []any{"accuracy", 0.95, "priority", int64(10)}, + description: "Numeric comparisons should preserve operator types", + }, + { + name: "Boolean property filter", + filterQuery: `is_validated.bool_value = true`, + expectedSQL: []string{ + `prop_1.bool_value = $`, + }, + expectedArgs: []any{"is_validated", true}, + description: "Boolean values should be handled correctly", + }, + { + name: "String with quotes and special characters", + filterQuery: `description = "Model's \"best\" version (v1.0)"`, + expectedSQL: []string{ + "JOIN", + `"ContextProperty"`, + `prop_1.string_value = $`, + }, + expectedArgs: []any{"description", `Model's "best" version (v1.0)`}, + description: "Special characters should be properly escaped and parameterized", + }, + { + name: "Equals for a JSON array", + filterQuery: `language = "en"`, + expectedSQL: []string{ + `prop_1.string_value IS JSON ARRAY`, + `prop_1.string_value::jsonb ?| array[$`, + }, + expectedArgs: []any{"language", "en"}, + description: "Equals on JSON arrays should search inside the array", + }, + { + name: "Not equals for a JSON array", + filterQuery: `language != "en"`, + expectedSQL: []string{ + `prop_1.string_value IS NOT JSON ARRAY`, + `NOT prop_1.string_value::jsonb ?| array[$`, + }, + expectedArgs: []any{"language", "en"}, + description: "Not equals on JSON arrays should search inside the array", + }, + { + name: "IN for a JSON array", + filterQuery: `language IN ("en","it")`, + expectedSQL: []string{ + `prop_1.string_value IS JSON ARRAY`, + `prop_1.string_value::jsonb ?| array[$`, + }, + expectedArgs: []any{"language", "en", "it"}, + description: "IN on JSON arrays should search inside the array", + }, + { + name: "Complex OR with model, custom, and artifact properties", + filterQuery: `(name = "gpt" OR provider.string_value = "OpenAI" OR artifacts.format = "onnx")`, + expectedSQL: []string{ + `"Context".name = $`, + "OR", + "EXISTS", + `"ContextProperty"`, + `"ContextProperty".string_value = $`, + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + "artprop_", + ".string_value = $", + }, + expectedArgs: []any{"gpt", "provider", "OpenAI", "format", "onnx"}, + description: "Complex 3-way OR with core, custom, and artifact properties should all use properly qualified columns", + }, + { + name: "Mixed AND/OR with artifacts and custom properties", + filterQuery: `(language LIKE '%"en"%' OR language LIKE '%"fr"%') AND (artifacts.ttft_mean < 100 OR provider.string_value = "Meta")`, + expectedSQL: []string{ + "EXISTS", + `"ContextProperty"`, + `"ContextProperty".string_value LIKE`, + "OR", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + "artprop_", + ".int_value <", + ".double_value <", + }, + description: "Complex mixed AND/OR with both custom and artifact properties should properly qualify all columns", + }, + { + name: "Invalid syntax should error", + filterQuery: `invalid syntax here`, + shouldError: true, + description: "Malformed queries should return parsing errors", + }, + { + name: "Unmatched parentheses should error", + filterQuery: `name = "test" AND (framework = "pytorch"`, + shouldError: true, + description: "Syntax errors should be caught during parsing", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Parse the filterQuery + filterExpr, err := filter.Parse(tt.filterQuery) + + if tt.shouldError { + assert.Error(t, err, "Expected parsing error for: %s", tt.filterQuery) + return + } + + require.NoError(t, err, "Failed to parse filterQuery: %s", tt.filterQuery) + require.NotNil(t, filterExpr, "FilterExpression should not be nil") + + // Create a query builder for catalog models (Context entities) + queryBuilder := filter.NewQueryBuilderForRestEntity( + filter.RestEntityType(catalogfilter.RestEntityCatalogModel), + catalogfilter.NewCatalogEntityMappings(), // Use catalog-specific mappings + ) + + // Create mock PostgreSQL GORM DB to capture generated SQL + mockDB, sqlMock, _ := setupMockGORMWithCapture(t) + defer func() { + if err := sqlMock.ExpectationsWereMet(); err != nil { + t.Logf("SQL mock expectations not met: %v", err) + } + }() + + // Build the query - this will generate SQL but not execute it + baseQuery := mockDB.Model(&schema.Context{}).Where("type_id = ?", 1) + resultQuery := queryBuilder.BuildQuery(baseQuery, filterExpr) + + // Capture the generated SQL using DryRun mode + generatedSQL, queryArgs := captureQuerySQL(t, mockDB, resultQuery) + require.NotEmpty(t, generatedSQL, "Should have captured generated SQL") + + t.Logf("Generated SQL: %s", generatedSQL) + t.Logf("Query args: %v", queryArgs) + + for _, expectedFragment := range tt.expectedSQL { + assert.Contains(t, generatedSQL, expectedFragment, + "Generated SQL should contain fragment: %s\nFull SQL: %s", + expectedFragment, generatedSQL) + } + + // Verify arguments if specified + if len(tt.expectedArgs) > 0 { + // Check that all expected args are present (order may vary due to JOINs) + for _, expectedArg := range tt.expectedArgs { + found := false + for _, actualArg := range queryArgs { + if actualArg == expectedArg { + found = true + break + } + } + assert.True(t, found, "Expected argument %v not found in actual args: %v", + expectedArg, queryArgs) + } + } + + t.Logf("✅ %s", tt.description) + }) + } +} + +func TestPostgreSQLSpecificFeatures(t *testing.T) { + tests := []struct { + name string + filterQuery string + expectedSQL []string + description string + }{ + { + name: "ILIKE with PostgreSQL native support", + filterQuery: `name ILIKE "%pytorch%"`, + expectedSQL: []string{ + `"Context".name ILIKE $`, + }, + description: "PostgreSQL should use native ILIKE instead of UPPER() workaround", + }, + { + name: "JSON property query (if supported)", + filterQuery: `metadata.string_value = "{\"version\": \"1.0\"}"`, + expectedSQL: []string{ + `prop_1.string_value = $`, + }, + description: "JSON strings should be properly parameterized", + }, + { + name: "Unicode content", + filterQuery: `description.string_value = "模型描述"`, + expectedSQL: []string{ + `prop_1.string_value = $`, + }, + description: "Unicode characters should be handled correctly", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Parse and build query + filterExpr, err := filter.Parse(tt.filterQuery) + require.NoError(t, err) + + queryBuilder := filter.NewQueryBuilderForRestEntity( + filter.RestEntityType(catalogfilter.RestEntityCatalogModel), + catalogfilter.NewCatalogEntityMappings(), + ) + + mockDB, sqlMock, _ := setupMockGORMWithCapture(t) + defer func() { + if err := sqlMock.ExpectationsWereMet(); err != nil { + t.Logf("SQL mock expectations not met: %v", err) + } + }() + + baseQuery := mockDB.Model(&schema.Context{}).Where("type_id = ?", 1) + resultQuery := queryBuilder.BuildQuery(baseQuery, filterExpr) + + // Capture the generated SQL using DryRun mode + generatedSQL, queryArgs := captureQuerySQL(t, mockDB, resultQuery) + if generatedSQL != "" { + t.Logf("Generated SQL: %s", generatedSQL) + t.Logf("Query args: %v", queryArgs) + + for _, expectedFragment := range tt.expectedSQL { + assert.Contains(t, generatedSQL, expectedFragment, + "Generated SQL should contain: %s", expectedFragment) + } + } + + t.Logf("✅ %s", tt.description) + }) + } +} + +func TestFilterQuerySQLInjectionPrevention(t *testing.T) { + tests := []struct { + name string + filterQuery string + description string + }{ + { + name: "SQL injection in string value", + filterQuery: `name = "'; DROP TABLE \"Context\"; --"`, + description: "SQL injection attempts should be safely parameterized", + }, + { + name: "SQL injection in property name", + filterQuery: `malicious'; DROP TABLE "Context"; -- = "value"`, + description: "Property names should be validated and escaped", + }, + { + name: "SQL injection via ILIKE pattern", + filterQuery: `name ILIKE "'; DROP TABLE \"Context\"; --"`, + description: "ILIKE patterns should be parameterized", + }, + { + name: "PostgreSQL-specific injection attempts", + filterQuery: `name = "'; CREATE FUNCTION malicious() RETURNS void AS $$ DROP TABLE \"Context\"; $$ LANGUAGE sql; --"`, + description: "PostgreSQL function injection should be prevented", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Parse the filterQuery + filterExpr, err := filter.Parse(tt.filterQuery) + + // Some injection attempts might fail at parse time, which is fine + if err != nil { + t.Logf("Query rejected at parse time (good): %v", err) + return + } + + if filterExpr == nil { + return + } + + // Create query builder + queryBuilder := filter.NewQueryBuilderForRestEntity( + filter.RestEntityType(catalogfilter.RestEntityCatalogModel), + catalogfilter.NewCatalogEntityMappings(), + ) + + // Create mock PostgreSQL GORM DB + mockDB, sqlMock, _ := setupMockGORMWithCapture(t) + defer func() { + if err := sqlMock.ExpectationsWereMet(); err != nil { + t.Logf("SQL mock expectations not met: %v", err) + } + }() + + // Build the query + baseQuery := mockDB.Model(&schema.Context{}).Where("type_id = ?", 1) + resultQuery := queryBuilder.BuildQuery(baseQuery, filterExpr) + + // Capture the generated SQL using DryRun mode + generatedSQL, _ := captureQuerySQL(t, mockDB, resultQuery) + + // Verify that dangerous SQL is not present in the generated query + if generatedSQL != "" { + + // Check for dangerous SQL patterns + dangerousPatterns := []string{ + "DROP TABLE", + "DELETE FROM", + "INSERT INTO", + "UPDATE.*SET", + "CREATE FUNCTION", + "--", + ";", + } + + for _, pattern := range dangerousPatterns { + matched, _ := regexp.MatchString(pattern, generatedSQL) + assert.False(t, matched, + "Generated SQL should not contain dangerous pattern '%s': %s", + pattern, generatedSQL) + } + + // Verify that all user input is parameterized (using $ placeholders) + // PostgreSQL uses $1, $2, etc. for parameters + parameterizedCount := len(regexp.MustCompile(`\$\d+`).FindAllString(generatedSQL, -1)) + assert.Greater(t, parameterizedCount, 0, + "Query should use parameterized queries with $ placeholders") + + t.Logf("✅ %s - SQL safely parameterized with %d parameters", tt.description, parameterizedCount) + t.Logf("Generated SQL: %s", generatedSQL) + } + }) + } +} + +func TestComplexFilterQueryGeneration(t *testing.T) { + tests := []struct { + name string + filterQuery string + expectedJoins int // Expected number of property table joins + expectedExists int // Expected number of EXISTS subqueries + description string + }{ + { + name: "Multiple custom properties with AND", + filterQuery: `framework.string_value = "PyTorch" AND license.string_value = "MIT" AND provider.string_value = "HuggingFace"`, + expectedJoins: 3, + description: "Each custom property should create a separate JOIN", + }, + { + name: "Complex OR with parentheses", + filterQuery: `(framework.string_value = "PyTorch" OR framework.string_value = "TensorFlow") AND license.string_value = "MIT"`, + expectedExists: 2, // OR condition uses 2 EXISTS (one for each OR branch) + expectedJoins: 1, // AND condition uses JOIN + description: "OR conditions should use EXISTS, AND conditions use JOINs", + }, + { + name: "Nested logical conditions", + filterQuery: `((accuracy.double_value > 0.9 AND f1_score.double_value > 0.85) OR (precision.double_value > 0.95)) AND framework.string_value = "PyTorch"`, + description: "Deeply nested conditions should be handled correctly", + }, + { + name: "Mixed property types and operators", + filterQuery: `name ILIKE "%model%" AND accuracy.double_value >= 0.9 AND is_public.bool_value = true AND tags.string_value IN ('nlp','computer-vision')`, + description: "Should handle core properties, custom properties, and different data types with PostgreSQL syntax", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Parse the filterQuery + filterExpr, err := filter.Parse(tt.filterQuery) + require.NoError(t, err, "Failed to parse filterQuery: %s", tt.filterQuery) + require.NotNil(t, filterExpr) + + // Create query builder + queryBuilder := filter.NewQueryBuilderForRestEntity( + filter.RestEntityType(catalogfilter.RestEntityCatalogModel), + catalogfilter.NewCatalogEntityMappings(), + ) + + // Create mock PostgreSQL GORM DB + mockDB, sqlMock, _ := setupMockGORMWithCapture(t) + defer func() { + if err := sqlMock.ExpectationsWereMet(); err != nil { + t.Logf("SQL mock expectations not met: %v", err) + } + }() + + // Build the query + baseQuery := mockDB.Model(&schema.Context{}).Where("type_id = ?", 1) + resultQuery := queryBuilder.BuildQuery(baseQuery, filterExpr) + + // Capture the generated SQL using DryRun mode + generatedSQL, _ := captureQuerySQL(t, mockDB, resultQuery) + + // Analyze the generated SQL + if generatedSQL != "" { + t.Logf("Generated SQL: %s", generatedSQL) + + // Count JOINs if expected + if tt.expectedJoins > 0 { + joinCount := strings.Count(generatedSQL, "JOIN") + assert.Equal(t, tt.expectedJoins, joinCount, + "Expected %d JOINs, found %d in: %s", + tt.expectedJoins, joinCount, generatedSQL) + } + + // Count EXISTS if expected + if tt.expectedExists > 0 { + existsCount := strings.Count(generatedSQL, "EXISTS") + assert.Equal(t, tt.expectedExists, existsCount, + "Expected %d EXISTS clauses, found %d in: %s", + tt.expectedExists, existsCount, generatedSQL) + } + + // Verify PostgreSQL-specific features + assert.Contains(t, generatedSQL, `"`, "Should use PostgreSQL identifier quoting") + + // Verify parameterized queries + paramCount := len(regexp.MustCompile(`\$\d+`).FindAllString(generatedSQL, -1)) + assert.Greater(t, paramCount, 0, "Should use PostgreSQL parameter placeholders") + + // Verify query is valid SQL (no syntax errors) + assert.NotContains(t, generatedSQL, "ERROR") + assert.NotContains(t, generatedSQL, "INVALID") + } + + t.Logf("✅ %s", tt.description) + }) + } +} + +func TestArtifactFilteringCapability(t *testing.T) { + tests := []struct { + name string + filterQuery string + expectedSQL []string + expectedArgs []any + description string + }{ + { + name: "Basic artifact property filter - numeric", + filterQuery: `artifacts.ttft_mean >= 90.0`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + "attr_", // Attribution alias + "ON", + ".context_id = \"Context\".id", + `"Artifact"`, + "art_", // Artifact alias + ".id = ", + ".artifact_id", + `"ArtifactProperty"`, + "artprop_", // ArtifactProperty alias + ".artifact_id = ", + ".name = $", + ".double_value >= $", + }, + expectedArgs: []any{"ttft_mean", float64(90.0)}, + description: "Should JOIN through Attribution to filter models by artifact numeric properties", + }, + { + name: "Artifact custom property - string", + filterQuery: `artifacts.format = "pytorch"`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + ".name = $", + ".string_value = $", + }, + expectedArgs: []any{"format", "pytorch"}, + description: "Should filter models by artifact string custom properties", + }, + { + name: "Artifact property with explicit type", + filterQuery: `artifacts.performance_score.double_value > 0.95`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + ".name = $", + ".double_value > $", + }, + expectedArgs: []any{"performance_score", 0.95}, + description: "Should handle explicit type specification for artifact properties", + }, + { + name: "Combined model and artifact filters", + filterQuery: `name = "llm-model" AND artifacts.performance_score > 0.95`, + expectedSQL: []string{ + `"Context".name = $`, + "AND", + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + ".name = $", + ".double_value > $", + }, + expectedArgs: []any{"llm-model", "performance_score", 0.95}, + description: "Should combine model properties and artifact properties in a single query", + }, + { + name: "Multiple artifact property filters", + filterQuery: `artifacts.ttft_mean >= 90.0 AND artifacts.tpot_mean <= 50.0`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + ".name = $", + ".double_value >= $", + ".name = $", + ".double_value <= $", + }, + // Args order: JOIN property names first, then WHERE value conditions + expectedArgs: []any{"ttft_mean", "tpot_mean", float64(90), float64(50)}, + description: "Should handle multiple artifact property filters in a single EXISTS with multiple property JOINs on the SAME artifact", + }, + { + name: "Artifact property with LIKE", + filterQuery: `artifacts.model_format LIKE "%.onnx"`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + ".string_value LIKE $", + }, + expectedArgs: []any{"model_format", "%.onnx"}, + description: "Should support pattern matching on artifact properties", + }, + { + name: "Artifact property with IN clause", + filterQuery: `artifacts.device IN ('cuda','rocm')`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + ".string_value IN ($", + }, + expectedArgs: []any{"device", "cuda", "rocm"}, + description: "Should handle IN clause for artifact properties", + }, + { + name: "Complex query with model, custom, and artifact properties", + filterQuery: `name LIKE "%llama%" AND provider.string_value = "Meta" AND artifacts.ttft_mean < 100.0`, + expectedSQL: []string{ + `"Context".name LIKE $`, + "EXISTS", + `"ContextProperty"`, + ".name = $", + ".string_value = $", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + ".name = $", + ".double_value < $", + }, + expectedArgs: []any{"%llama%", "provider", "Meta", "ttft_mean", float64(100.0)}, + description: "Should handle complex queries mixing core properties, custom properties, and artifact properties", + }, + { + name: "Artifact boolean property", + filterQuery: `artifacts.is_quantized.bool_value = true`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + ".bool_value = $", + }, + expectedArgs: []any{"is_quantized", true}, + description: "Should handle boolean artifact properties", + }, + { + name: "Artifact integer property", + filterQuery: `artifacts.batch_size.int_value = 32`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + ".int_value = $", + }, + expectedArgs: []any{"batch_size", int64(32)}, + description: "Should handle integer artifact properties", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Parse the filterQuery + filterExpr, err := filter.Parse(tt.filterQuery) + require.NoError(t, err, "Failed to parse filterQuery: %s", tt.filterQuery) + require.NotNil(t, filterExpr, "FilterExpression should not be nil") + + // Create a query builder for catalog models (Context entities) + queryBuilder := filter.NewQueryBuilderForRestEntity( + filter.RestEntityType(catalogfilter.RestEntityCatalogModel), + catalogfilter.NewCatalogEntityMappings(), + ) + + // Create mock PostgreSQL GORM DB to capture generated SQL + mockDB, sqlMock, _ := setupMockGORMWithCapture(t) + defer func() { + if err := sqlMock.ExpectationsWereMet(); err != nil { + t.Logf("SQL mock expectations not met: %v", err) + } + }() + + // Build the query - this will generate SQL but not execute it + baseQuery := mockDB.Model(&schema.Context{}).Where("type_id = ?", 1) + resultQuery := queryBuilder.BuildQuery(baseQuery, filterExpr) + + // Capture the generated SQL using DryRun mode + generatedSQL, queryArgs := captureQuerySQL(t, mockDB, resultQuery) + require.NotEmpty(t, generatedSQL, "Should have captured generated SQL") + + t.Logf("Generated SQL: %s", generatedSQL) + t.Logf("Query args: %v", queryArgs) + + // Verify all expected SQL fragments are present + for _, expectedFragment := range tt.expectedSQL { + assert.Contains(t, generatedSQL, expectedFragment, + "Generated SQL should contain fragment: %s\nFull SQL: %s", + expectedFragment, generatedSQL) + } + + // Verify arguments if specified - ORDER MATTERS for SQL placeholder mapping + if len(tt.expectedArgs) > 0 { + // Args must be in exact order to match SQL placeholders ($1, $2, etc.) + // This is critical for combined artifact filters where JOIN args come before WHERE args + require.Equal(t, len(tt.expectedArgs), len(queryArgs)-1, // -1 for type_id + "Argument count mismatch (excluding type_id)") + + // Compare args starting from index 1 (skip type_id at index 0) + for i, expectedArg := range tt.expectedArgs { + actualArg := queryArgs[i+1] // +1 to skip type_id + assert.Equal(t, expectedArg, actualArg, + "Argument at position %d should be %v but was %v. Full args: %v", + i+1, expectedArg, actualArg, queryArgs) + } + } + + t.Logf("✅ %s", tt.description) + }) + } +} + +func TestArtifactFilteringSQLInjectionPrevention(t *testing.T) { + tests := []struct { + name string + filterQuery string + description string + }{ + { + name: "SQL injection in artifact property value", + filterQuery: `artifacts.format = "'; DROP TABLE \"Artifact\"; --"`, + description: "SQL injection attempts in artifact values should be safely parameterized", + }, + { + name: "SQL injection in artifact property name", + filterQuery: `artifacts.malicious'; DROP TABLE "Artifact"; -- = "value"`, + description: "Malicious artifact property names should be validated", + }, + { + name: "Complex injection via artifact filter", + filterQuery: `artifacts.test = "value' OR '1'='1"`, + description: "Classic OR injection should be prevented in artifact filters", + }, + { + name: "PostgreSQL-specific injection in artifact query", + filterQuery: `artifacts.property = "'; CREATE FUNCTION attack() RETURNS void AS $$ DROP TABLE \"Context\"; $$ LANGUAGE sql; --"`, + description: "PostgreSQL function injection should be prevented in artifact queries", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Parse the filterQuery + filterExpr, err := filter.Parse(tt.filterQuery) + + // Some injection attempts might fail at parse time, which is fine + if err != nil { + t.Logf("Query rejected at parse time (good): %v", err) + return + } + + if filterExpr == nil { + return + } + + // Create query builder + queryBuilder := filter.NewQueryBuilderForRestEntity( + filter.RestEntityType(catalogfilter.RestEntityCatalogModel), + catalogfilter.NewCatalogEntityMappings(), + ) + + // Create mock PostgreSQL GORM DB + mockDB, sqlMock, _ := setupMockGORMWithCapture(t) + defer func() { + if err := sqlMock.ExpectationsWereMet(); err != nil { + t.Logf("SQL mock expectations not met: %v", err) + } + }() + + // Build the query + baseQuery := mockDB.Model(&schema.Context{}).Where("type_id = ?", 1) + resultQuery := queryBuilder.BuildQuery(baseQuery, filterExpr) + + // Capture the generated SQL using DryRun mode + generatedSQL, _ := captureQuerySQL(t, mockDB, resultQuery) + + // Verify that dangerous SQL is not present in the generated query + if generatedSQL != "" { + t.Logf("Generated SQL: %s", generatedSQL) + + // Check for dangerous SQL patterns + dangerousPatterns := []string{ + "DROP TABLE", + "DELETE FROM", + "INSERT INTO", + "UPDATE.*SET", + "CREATE FUNCTION", + "--", + ";", + } + + for _, pattern := range dangerousPatterns { + matched, _ := regexp.MatchString(pattern, generatedSQL) + assert.False(t, matched, + "Generated SQL should not contain dangerous pattern '%s': %s", + pattern, generatedSQL) + } + + // Verify that all user input is parameterized (using $ placeholders) + parameterizedCount := len(regexp.MustCompile(`\$\d+`).FindAllString(generatedSQL, -1)) + assert.Greater(t, parameterizedCount, 0, + "Query should use parameterized queries with $ placeholders") + + t.Logf("✅ %s - SQL safely parameterized with %d parameters", tt.description, parameterizedCount) + } + }) + } +} + +func TestArtifactFilteringEdgeCases(t *testing.T) { + tests := []struct { + name string + filterQuery string + expectedSQL []string + shouldError bool + description string + }{ + { + name: "Artifact property with special characters in name", + filterQuery: "`artifacts.custom-metric` > 100", + expectedSQL: []string{ + "EXISTS", + `"ArtifactProperty"`, + ".name = $", + }, + description: "Should handle escaped artifact property names with special characters", + }, + { + name: "Nested model and artifact conditions with parentheses", + filterQuery: `(name = "model-a" OR name = "model-b") AND artifacts.score > 0.9`, + expectedSQL: []string{ + `"Context".name = $`, + "OR", + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + }, + description: "Should handle complex nested conditions with both model and artifact filters", + }, + { + name: "Multiple artifact properties with OR condition", + filterQuery: `artifacts.format = "onnx" OR artifacts.format = "pytorch"`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + "artprop_", + ".string_value = $", + "OR", + }, + description: "OR conditions on artifact properties should use EXISTS subqueries with aliased columns", + }, + { + name: "OR condition combining model and artifact properties", + filterQuery: `(name = "model-a" OR artifacts.score > 0.9)`, + expectedSQL: []string{ + `"Context".name = $`, + "OR", + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + "artprop_", + ".double_value > $", + }, + description: "OR combining core properties and artifact properties should generate proper EXISTS subqueries", + }, + { + name: "Complex OR with custom properties and artifact properties", + filterQuery: `(provider.string_value = "Meta" OR artifacts.format = "onnx") AND language LIKE '%"en"%'`, + expectedSQL: []string{ + "EXISTS", + `"ContextProperty"`, + `"ContextProperty".string_value = $`, + "OR", + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + "artprop_", + ".string_value = $", + "JOIN", + `"ContextProperty"`, + }, + description: "Complex OR with custom and artifact properties should properly qualify all columns with aliases", + }, + { + name: "Nested OR conditions with artifacts", + filterQuery: `((artifacts.ttft_mean < 100 OR artifacts.tpot_mean < 50) AND name LIKE "%llama%")`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + "artprop_", + ".int_value <", + ".double_value <", + "OR", + `"Context".name LIKE $`, + }, + description: "Nested OR conditions with artifact properties should work correctly", + }, + { + name: "OR condition with artifact LIKE patterns", + filterQuery: `(artifacts.model_format LIKE "%.onnx" OR artifacts.model_format LIKE "%.pytorch")`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + `"Artifact"`, + `"ArtifactProperty"`, + "artprop_", + ".string_value LIKE $", + "OR", + }, + description: "OR with artifact LIKE patterns should properly qualify columns with aliases", + }, + { + name: "Artifact property comparison operators", + filterQuery: `artifacts.memory_mb >= 1024 AND artifacts.latency_ms <= 100`, + expectedSQL: []string{ + "EXISTS", + `"ArtifactProperty"`, + // Integer literals now query both int_value and double_value + ".int_value >= $", + "OR", + ".double_value >= $", + ".int_value <= $", + ".double_value <= $", + }, + description: "Should handle various comparison operators on artifact properties (queries both int and double columns for integer literals)", + }, + { + name: "Artifact property with NULL-like string", + filterQuery: `artifacts.status = "null"`, + expectedSQL: []string{ + "EXISTS", + `"ArtifactProperty"`, + ".string_value = $", + }, + description: "Should handle string 'null' as a regular value, not SQL NULL", + }, + { + name: "Very long artifact property name", + filterQuery: `artifacts.this_is_a_very_long_property_name_that_should_still_work_correctly = "test"`, + expectedSQL: []string{ + "EXISTS", + `"ArtifactProperty"`, + ".name = $", + }, + description: "Should handle long artifact property names", + }, + { + name: "Artifact property with Unicode value", + filterQuery: `artifacts.description = "模型描述"`, + expectedSQL: []string{ + "EXISTS", + `"ArtifactProperty"`, + ".string_value = $", + }, + description: "Should handle Unicode characters in artifact property values", + }, + { + name: "Mixed case-sensitive and case-insensitive artifact queries", + filterQuery: `artifacts.format = "ONNX" AND artifacts.provider ILIKE "%hugging%"`, + expectedSQL: []string{ + "EXISTS", + `"ArtifactProperty"`, + ".string_value = $", + "UPPER(", + ".string_value) LIKE UPPER(", + }, + description: "Should handle both exact and case-insensitive matching on artifact properties (ILIKE uses UPPER for cross-DB compatibility)", + }, + { + name: "Bug fix: multiple artifact filters must match SAME artifact", + filterQuery: `artifacts.hardware_type LIKE "H200" AND artifacts.ttft_p95 < 50`, + expectedSQL: []string{ + "EXISTS", + `"Attribution"`, + `"Artifact"`, + // Both property JOINs should reference the same artifact (art_X) + "artprop_", + ".artifact_id = art_", + // Both conditions should be in the WHERE clause + ".string_value LIKE $", + ".double_value < $", + }, + description: "Multiple artifact property filters with AND should generate a SINGLE EXISTS with multiple property JOINs ensuring BOTH conditions match the SAME artifact (not different artifacts)", + }, + { + name: "Integer literal queries both int_value and double_value", + filterQuery: `artifacts.count = 100`, + expectedSQL: []string{ + "EXISTS", + `"ArtifactProperty"`, + // Integer literals query BOTH columns with OR + ".int_value = $", + "OR", + ".double_value = $", + }, + description: "Integer literals without explicit type should query both int_value and double_value columns to prevent silent query failures", + }, + { + name: "Explicit int_value type only queries int column", + filterQuery: `artifacts.count.int_value = 100`, + expectedSQL: []string{ + "EXISTS", + `"ArtifactProperty"`, + ".int_value = $", + }, + description: "Explicit .int_value type specification should only query int_value column", + }, + { + name: "Explicit double_value type only queries double column", + filterQuery: `artifacts.score.double_value = 95.5`, + expectedSQL: []string{ + "EXISTS", + `"ArtifactProperty"`, + ".double_value = $", + }, + description: "Explicit .double_value type specification should only query double_value column", + }, + { + name: "Float literal only queries double column", + filterQuery: `artifacts.score = 95.5`, + expectedSQL: []string{ + "EXISTS", + `"ArtifactProperty"`, + ".double_value = $", + }, + description: "Float literals should only query double_value column (no ambiguity)", + }, + { + name: "Range query with integer literals", + filterQuery: `artifacts.priority >= 1 AND artifacts.priority <= 5`, + expectedSQL: []string{ + "EXISTS", + `"ArtifactProperty"`, + // Both conditions should have OR clauses + ".int_value >= $", + "OR", + ".double_value >= $", + ".int_value <= $", + ".double_value <= $", + }, + description: "Range queries with integer literals should check both columns to find values stored in either format", + }, + { + name: "Empty Artifact Property", + filterQuery: `artifacts.`, + shouldError: true, + description: "Should error on empty artifact property queries", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Parse the filterQuery + filterExpr, err := filter.Parse(tt.filterQuery) + + if tt.shouldError { + assert.Error(t, err, "Expected parsing error for: %s", tt.filterQuery) + return + } + + require.NoError(t, err, "Failed to parse filterQuery: %s", tt.filterQuery) + require.NotNil(t, filterExpr, "FilterExpression should not be nil") + + // Create a query builder for catalog models + queryBuilder := filter.NewQueryBuilderForRestEntity( + filter.RestEntityType(catalogfilter.RestEntityCatalogModel), + catalogfilter.NewCatalogEntityMappings(), + ) + + // Create mock PostgreSQL GORM DB + mockDB, sqlMock, _ := setupMockGORMWithCapture(t) + defer func() { + if err := sqlMock.ExpectationsWereMet(); err != nil { + t.Logf("SQL mock expectations not met: %v", err) + } + }() + + // Build the query + baseQuery := mockDB.Model(&schema.Context{}).Where("type_id = ?", 1) + resultQuery := queryBuilder.BuildQuery(baseQuery, filterExpr) + + // Capture the generated SQL + generatedSQL, queryArgs := captureQuerySQL(t, mockDB, resultQuery) + require.NotEmpty(t, generatedSQL, "Should have captured generated SQL") + + t.Logf("Generated SQL: %s", generatedSQL) + t.Logf("Query args: %v", queryArgs) + + // Verify expected SQL fragments + for _, expectedFragment := range tt.expectedSQL { + assert.Contains(t, generatedSQL, expectedFragment, + "Generated SQL should contain fragment: %s\nFull SQL: %s", + expectedFragment, generatedSQL) + } + + t.Logf("✅ %s", tt.description) + }) + } +} diff --git a/catalog/internal/catalog/db_catalog_test.go b/catalog/internal/catalog/db_catalog_test.go new file mode 100644 index 0000000000..6f6c47190b --- /dev/null +++ b/catalog/internal/catalog/db_catalog_test.go @@ -0,0 +1,1191 @@ +package catalog + +import ( + "context" + "fmt" + "os" + "strconv" + "testing" + + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/service" + model "github.com/kubeflow/model-registry/catalog/pkg/openapi" + "github.com/kubeflow/model-registry/internal/apiutils" + mr_models "github.com/kubeflow/model-registry/internal/db/models" + "github.com/kubeflow/model-registry/internal/db/schema" + "github.com/kubeflow/model-registry/internal/testutils" + "github.com/kubeflow/model-registry/pkg/api" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/gorm" +) + +func TestMain(m *testing.M) { + os.Exit(testutils.TestMainPostgresHelper(m)) +} + +func TestDBCatalog(t *testing.T) { + // Setup test database + sharedDB, cleanup := testutils.SetupPostgresWithMigrations(t, service.DatastoreSpec()) + defer cleanup() + + // Get type IDs + catalogModelTypeID := getCatalogModelTypeIDForDBTest(t, sharedDB) + modelArtifactTypeID := getCatalogModelArtifactTypeIDForDBTest(t, sharedDB) + metricsArtifactTypeID := getCatalogMetricsArtifactTypeIDForDBTest(t, sharedDB) + + // Create repositories + catalogModelRepo := service.NewCatalogModelRepository(sharedDB, catalogModelTypeID) + catalogArtifactRepo := service.NewCatalogArtifactRepository(sharedDB, map[string]int32{ + service.CatalogModelArtifactTypeName: modelArtifactTypeID, + service.CatalogMetricsArtifactTypeName: metricsArtifactTypeID, + }) + modelArtifactRepo := service.NewCatalogModelArtifactRepository(sharedDB, modelArtifactTypeID) + metricsArtifactRepo := service.NewCatalogMetricsArtifactRepository(sharedDB, metricsArtifactTypeID) + + svcs := service.NewServices( + catalogModelRepo, + catalogArtifactRepo, + modelArtifactRepo, + metricsArtifactRepo, + service.NewPropertyOptionsRepository(sharedDB), + ) + + // Create DB catalog instance + dbCatalog := NewDBCatalog(svcs, nil) + ctx := context.Background() + + t.Run("TestNewDBCatalog", func(t *testing.T) { + catalog := NewDBCatalog(svcs, nil) + require.NotNil(t, catalog) + + // Verify it implements the interface + var _ APIProvider = catalog + }) + + t.Run("TestGetModel_Success", func(t *testing.T) { + // Create test model + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-get-model"), + ExternalID: apiutils.Of("test-get-model-ext"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("test-source-id")}, + {Name: "description", StringValue: apiutils.Of("Test model description")}, + }, + } + + savedModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Test GetModel + retrievedModel, err := dbCatalog.GetModel(ctx, "test-get-model", "test-source-id") + require.NoError(t, err) + require.NotNil(t, retrievedModel) + + assert.Equal(t, "test-get-model", retrievedModel.Name) + assert.Equal(t, strconv.FormatInt(int64(*savedModel.GetID()), 10), *retrievedModel.Id) + assert.Equal(t, "test-get-model-ext", *retrievedModel.ExternalId) + assert.Equal(t, "test-source-id", *retrievedModel.SourceId) + assert.Equal(t, "Test model description", *retrievedModel.Description) + }) + + t.Run("TestGetModel_NotFound", func(t *testing.T) { + // Test with non-existent model + _, err := dbCatalog.GetModel(ctx, "non-existent-model", "test-source-id") + require.Error(t, err) + assert.Contains(t, err.Error(), "no models found") + assert.ErrorIs(t, err, api.ErrNotFound) + }) + + t.Run("TestListModels_Success", func(t *testing.T) { + // Create test models + sourceIDs := []string{"list-test-source"} + + model1 := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("list-test-model-1"), + ExternalID: apiutils.Of("list-test-1"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("list-test-source")}, + {Name: "description", StringValue: apiutils.Of("First test model")}, + }, + } + + model2 := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("list-test-model-2"), + ExternalID: apiutils.Of("list-test-2"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("list-test-source")}, + {Name: "description", StringValue: apiutils.Of("Second test model")}, + }, + } + + _, err := catalogModelRepo.Save(model1) + require.NoError(t, err) + _, err = catalogModelRepo.Save(model2) + require.NoError(t, err) + + // Test ListModels + params := ListModelsParams{ + SourceIDs: sourceIDs, + PageSize: 10, + OrderBy: model.ORDERBYFIELD_CREATE_TIME, + SortOrder: model.SORTORDER_ASC, + NextPageToken: apiutils.Of(""), + } + + result, err := dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.GreaterOrEqual(t, len(result.Items), 2, "Should return at least 2 models") + assert.Equal(t, int32(10), result.PageSize) + assert.GreaterOrEqual(t, result.Size, int32(2)) + + // Verify models are properly mapped + modelNames := make(map[string]bool) + for _, model := range result.Items { + modelNames[model.Name] = true + // Verify required fields are present + assert.NotEmpty(t, *model.Id) + assert.NotEmpty(t, *model.SourceId) + } + + // Should contain our test models + foundCount := 0 + if modelNames["list-test-model-1"] { + foundCount++ + } + if modelNames["list-test-model-2"] { + foundCount++ + } + assert.GreaterOrEqual(t, foundCount, 2, "Should find our test models") + }) + + t.Run("TestListModels_WithPagination", func(t *testing.T) { + // Test pagination + sourceIDs := []string{"pagination-test-source"} + + // Create multiple models + for i := 0; i < 5; i++ { + model := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of(fmt.Sprintf("pagination-test-model-%d", i)), + ExternalID: apiutils.Of(fmt.Sprintf("pagination-test-%d", i)), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("pagination-test-source")}, + }, + } + _, err := catalogModelRepo.Save(model) + require.NoError(t, err) + } + + params := ListModelsParams{ + SourceIDs: sourceIDs, + PageSize: 3, + OrderBy: model.ORDERBYFIELD_CREATE_TIME, + SortOrder: model.SORTORDER_ASC, + NextPageToken: apiutils.Of(""), + } + + result, err := dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.LessOrEqual(t, len(result.Items), 3, "Should respect page size") + assert.Equal(t, int32(3), result.PageSize) + }) + + t.Run("TestListModels_WithQuery", func(t *testing.T) { + // Create test models with different properties for query filtering + sourceIDs := []string{"query-test-source"} + + model1 := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("BERT-base-model"), + ExternalID: apiutils.Of("bert-base-1"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("query-test-source")}, + {Name: "description", StringValue: apiutils.Of("BERT base model for NLP tasks")}, + {Name: "provider", StringValue: apiutils.Of("Hugging Face")}, + {Name: "tasks", StringValue: apiutils.Of(`["text-classification", "question-answering"]`)}, + }, + } + + model2 := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("GPT-3.5-turbo"), + ExternalID: apiutils.Of("gpt-35-turbo-1"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("query-test-source")}, + {Name: "description", StringValue: apiutils.Of("OpenAI GPT model for text generation")}, + {Name: "provider", StringValue: apiutils.Of("OpenAI")}, + {Name: "tasks", StringValue: apiutils.Of(`["text-generation", "conversational"]`)}, + }, + } + + model3 := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("ResNet-50-image"), + ExternalID: apiutils.Of("resnet-50-1"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("query-test-source")}, + {Name: "description", StringValue: apiutils.Of("Deep learning model for image classification")}, + {Name: "provider", StringValue: apiutils.Of("PyTorch")}, + {Name: "tasks", StringValue: apiutils.Of(`["image-classification", "computer-vision"]`)}, + }, + } + + _, err := catalogModelRepo.Save(model1) + require.NoError(t, err) + _, err = catalogModelRepo.Save(model2) + require.NoError(t, err) + _, err = catalogModelRepo.Save(model3) + require.NoError(t, err) + + // Test query filtering by name + params := ListModelsParams{ + Query: "BERT", + SourceIDs: sourceIDs, + PageSize: 10, + OrderBy: model.ORDERBYFIELD_CREATE_TIME, + SortOrder: model.SORTORDER_ASC, + NextPageToken: apiutils.Of(""), + } + + result, err := dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.Equal(t, int32(1), result.Size, "Should return 1 model matching 'BERT'") + assert.Contains(t, result.Items[0].Name, "BERT", "Should contain BERT model") + + // Test query filtering by description + params.Query = "NLP" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.Equal(t, int32(1), result.Size, "Should return 1 model with 'NLP' in description") + assert.Contains(t, result.Items[0].Name, "BERT", "Should contain BERT model") + + // Test query filtering by provider + params.Query = "OpenAI" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.Equal(t, int32(1), result.Size, "Should return 1 model from 'OpenAI' provider") + assert.Contains(t, result.Items[0].Name, "GPT", "Should contain GPT model") + + // Test query filtering that should match multiple models + params.Query = "model" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.GreaterOrEqual(t, result.Size, int32(3), "Should return at least 3 models matching 'model'") + + // Test query that should return no results + params.Query = "nonexistent" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.Equal(t, int32(0), result.Size, "Should return 0 models for nonexistent query") + + // Test query filtering by tasks - text-classification + params.Query = "text-classification" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.Equal(t, int32(1), result.Size, "Should return 1 model with 'text-classification' task") + assert.Contains(t, result.Items[0].Name, "BERT", "Should contain BERT model") + + // Test query filtering by tasks - image-classification + params.Query = "image-classification" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.Equal(t, int32(1), result.Size, "Should return 1 model with 'image-classification' task") + assert.Contains(t, result.Items[0].Name, "ResNet", "Should contain ResNet model") + + // Test query filtering by tasks - conversational + params.Query = "conversational" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.Equal(t, int32(1), result.Size, "Should return 1 model with 'conversational' task") + assert.Contains(t, result.Items[0].Name, "GPT", "Should contain GPT model") + + // Test query filtering by tasks - partial match on "classification" + params.Query = "classification" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.Equal(t, int32(2), result.Size, "Should return 2 models with 'classification' in their tasks") + + // Test query filtering by tasks - computer-vision + params.Query = "computer-vision" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + + assert.Equal(t, int32(1), result.Size, "Should return 1 model with 'computer-vision' task") + assert.Contains(t, result.Items[0].Name, "ResNet", "Should contain ResNet model") + }) + + t.Run("TestListModels_FilterQuery", func(t *testing.T) { + // Create test models with diverse properties for filterQuery testing + sourceIDs := []string{"filterquery-test-source"} + + model1 := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("TensorFlow-ResNet50"), + ExternalID: apiutils.Of("tf-resnet50-1"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("filterquery-test-source")}, + {Name: "description", StringValue: apiutils.Of("Deep learning model for image classification using TensorFlow")}, + {Name: "provider", StringValue: apiutils.Of("Google")}, + {Name: "framework", StringValue: apiutils.Of("TensorFlow")}, + {Name: "tasks", StringValue: apiutils.Of(`["image-classification", "computer-vision"]`)}, + {Name: "accuracy", StringValue: apiutils.Of("0.95")}, + }, + } + + model2 := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("PyTorch-BERT"), + ExternalID: apiutils.Of("pt-bert-1"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("filterquery-test-source")}, + {Name: "description", StringValue: apiutils.Of("BERT model for natural language processing using PyTorch")}, + {Name: "provider", StringValue: apiutils.Of("Hugging Face")}, + {Name: "framework", StringValue: apiutils.Of("PyTorch")}, + {Name: "tasks", StringValue: apiutils.Of(`["text-classification", "question-answering"]`)}, + {Name: "accuracy", StringValue: apiutils.Of("0.92")}, + }, + } + + model3 := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("Scikit-learn-LogisticRegression"), + ExternalID: apiutils.Of("sk-lr-1"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("filterquery-test-source")}, + {Name: "description", StringValue: apiutils.Of("Traditional machine learning model for classification")}, + {Name: "provider", StringValue: apiutils.Of("Scikit-learn")}, + {Name: "framework", StringValue: apiutils.Of("Scikit-learn")}, + {Name: "tasks", StringValue: apiutils.Of(`["classification", "regression"]`)}, + {Name: "accuracy", StringValue: apiutils.Of("0.88")}, + }, + } + + _, err := catalogModelRepo.Save(model1) + require.NoError(t, err) + _, err = catalogModelRepo.Save(model2) + require.NoError(t, err) + _, err = catalogModelRepo.Save(model3) + require.NoError(t, err) + + // Test: Basic name filtering with exact match + params := ListModelsParams{ + FilterQuery: "name = \"TensorFlow-ResNet50\"", + SourceIDs: sourceIDs, + PageSize: 10, + OrderBy: model.ORDERBYFIELD_NAME, + SortOrder: model.SORTORDER_ASC, + NextPageToken: apiutils.Of(""), + } + + result, err := dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(1), result.Size, "Should return 1 model with exact name match") + assert.Equal(t, "TensorFlow-ResNet50", result.Items[0].Name) + + // Test: LIKE pattern matching + params.FilterQuery = "name LIKE \"%Tensor%\"" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(1), result.Size, "Should return 1 model with LIKE pattern match") + assert.Contains(t, result.Items[0].Name, "Tensor") + + // Test: LIKE pattern matching with case sensitivity + params.FilterQuery = "name ILIKE \"%tensor%\"" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(1), result.Size, "Should return 1 model with case-insensitive ILIKE match") + assert.Contains(t, result.Items[0].Name, "Tensor") + + // Test: OR logic + params.FilterQuery = "name = \"TensorFlow-ResNet50\" OR name = \"PyTorch-BERT\"" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(2), result.Size, "Should return 2 models with OR logic") + + // Verify we got the expected models + modelNames := make(map[string]bool) + for _, item := range result.Items { + modelNames[item.Name] = true + } + assert.True(t, modelNames["TensorFlow-ResNet50"], "Should contain TensorFlow model") + assert.True(t, modelNames["PyTorch-BERT"], "Should contain PyTorch model") + + // Test: AND logic + params.FilterQuery = "name LIKE \"%Tensor%\" AND name LIKE \"%ResNet%\"" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(1), result.Size, "Should return 1 model with AND logic") + assert.Equal(t, "TensorFlow-ResNet50", result.Items[0].Name) + + // Test: Custom property filtering + params.FilterQuery = "framework.string_value = \"PyTorch\"" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(1), result.Size, "Should return 1 model with PyTorch framework") + assert.Equal(t, "PyTorch-BERT", result.Items[0].Name) + + // Test: Custom property filtering with LIKE + params.FilterQuery = "provider.string_value LIKE \"%Google%\"" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(1), result.Size, "Should return 1 model with Google provider") + assert.Equal(t, "TensorFlow-ResNet50", result.Items[0].Name) + + // Test: Numeric comparison + params.FilterQuery = "accuracy.string_value > \"0.90\"" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(2), result.Size, "Should return 2 models with accuracy > 0.90") + + // Verify we got the expected models (TensorFlow and PyTorch) + modelNames = make(map[string]bool) + for _, item := range result.Items { + modelNames[item.Name] = true + } + assert.True(t, modelNames["TensorFlow-ResNet50"], "Should contain TensorFlow model") + assert.True(t, modelNames["PyTorch-BERT"], "Should contain PyTorch model") + + // Test: Complex query with multiple conditions + params.FilterQuery = "(framework.string_value = \"TensorFlow\" OR framework.string_value = \"PyTorch\") AND accuracy.string_value > \"0.90\"" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(2), result.Size, "Should return 2 models with complex query") + + // Test: No matches + params.FilterQuery = "name = \"NonExistentModel\"" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(0), result.Size, "Should return 0 models for non-existent name") + + // Test: Empty filterQuery should return all models + params.FilterQuery = "" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(3), result.Size, "Should return all 3 models with empty filterQuery") + + // Test: Combined with regular query parameter + params.Query = "BERT" + params.FilterQuery = "framework.string_value = \"PyTorch\"" + result, err = dbCatalog.ListModels(ctx, params) + require.NoError(t, err) + assert.Equal(t, int32(1), result.Size, "Should return 1 model matching both query and filterQuery") + assert.Equal(t, "PyTorch-BERT", result.Items[0].Name) + + // Test: Invalid filterQuery syntax should return error + params.Query = "" + params.FilterQuery = "invalid syntax here" + _, err = dbCatalog.ListModels(ctx, params) + require.Error(t, err, "Should return error for invalid filterQuery syntax") + assert.Contains(t, err.Error(), "invalid filter query", "Error should mention invalid filter query") + }) + + t.Run("TestGetArtifacts_Success", func(t *testing.T) { + // Create test model + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("artifact-test-model"), + ExternalID: apiutils.Of("artifact-test-model-ext"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("artifact-test-source")}, + }, + } + + savedModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create test artifacts + modelArtifact := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(modelArtifactTypeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("test-model-artifact"), + ExternalID: apiutils.Of("test-model-artifact-ext"), + URI: apiutils.Of("s3://test/model.bin"), + ArtifactType: apiutils.Of(models.CatalogModelArtifactType), + }, + } + + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("test-metrics-artifact"), + ExternalID: apiutils.Of("test-metrics-artifact-ext"), + MetricsType: models.MetricsTypeAccuracy, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + } + + savedModelArt, err := modelArtifactRepo.Save(modelArtifact, savedModel.GetID()) + require.NoError(t, err) + savedMetricsArt, err := metricsArtifactRepo.Save(metricsArtifact, savedModel.GetID()) + require.NoError(t, err) + + // Test GetArtifacts + params := ListArtifactsParams{ + PageSize: 10, + OrderBy: string(model.ORDERBYFIELD_CREATE_TIME), + SortOrder: model.SORTORDER_ASC, + NextPageToken: apiutils.Of(""), + } + + result, err := dbCatalog.GetArtifacts(ctx, "artifact-test-model", "artifact-test-source", params) + require.NoError(t, err) + + assert.GreaterOrEqual(t, len(result.Items), 2, "Should return at least 2 artifacts") + assert.Equal(t, int32(10), result.PageSize) + + // Verify both types of artifacts are returned + var modelArtifactFound, metricsArtifactFound bool + artifactIDs := make(map[string]bool) + + for _, artifact := range result.Items { + if artifact.CatalogModelArtifact != nil { + modelArtifactFound = true + artifactIDs[*artifact.CatalogModelArtifact.Id] = true + assert.Equal(t, "model-artifact", artifact.CatalogModelArtifact.ArtifactType) + } + if artifact.CatalogMetricsArtifact != nil { + metricsArtifactFound = true + artifactIDs[*artifact.CatalogMetricsArtifact.Id] = true + assert.Equal(t, "metrics-artifact", artifact.CatalogMetricsArtifact.ArtifactType) + } + } + + assert.True(t, modelArtifactFound, "Should find model artifact") + assert.True(t, metricsArtifactFound, "Should find metrics artifact") + + // Verify our specific artifacts are in the results + modelArtifactIDStr := strconv.FormatInt(int64(*savedModelArt.GetID()), 10) + metricsArtifactIDStr := strconv.FormatInt(int64(*savedMetricsArt.GetID()), 10) + assert.True(t, artifactIDs[modelArtifactIDStr], "Should contain our model artifact") + assert.True(t, artifactIDs[metricsArtifactIDStr], "Should contain our metrics artifact") + }) + + t.Run("TestGetArtifacts_ModelNotFound", func(t *testing.T) { + // Test with non-existent model + params := ListArtifactsParams{ + PageSize: 10, + OrderBy: string(model.ORDERBYFIELD_CREATE_TIME), + SortOrder: model.SORTORDER_ASC, + NextPageToken: apiutils.Of(""), + } + + _, err := dbCatalog.GetArtifacts(ctx, "non-existent-model", "test-source", params) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid model name") + }) + + t.Run("TestGetArtifacts_WithCustomProperties", func(t *testing.T) { + // Create model + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("custom-props-model"), + ExternalID: apiutils.Of("custom-props-model-ext"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("custom-props-source")}, + }, + } + + savedModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create artifact with custom properties + customProps := []mr_models.Properties{ + {Name: "custom_prop_1", StringValue: apiutils.Of("value_1")}, + {Name: "custom_prop_2", StringValue: apiutils.Of("value_2")}, + } + + artifactWithProps := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(modelArtifactTypeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("artifact-with-props"), + ExternalID: apiutils.Of("artifact-with-props-ext"), + URI: apiutils.Of("s3://test/props.bin"), + ArtifactType: apiutils.Of(models.CatalogModelArtifactType), + }, + CustomProperties: &customProps, + } + + _, err = modelArtifactRepo.Save(artifactWithProps, savedModel.GetID()) + require.NoError(t, err) + + // Get artifacts and verify custom properties + params := ListArtifactsParams{ + PageSize: 10, + OrderBy: string(model.ORDERBYFIELD_CREATE_TIME), + SortOrder: model.SORTORDER_ASC, + NextPageToken: apiutils.Of(""), + } + + result, err := dbCatalog.GetArtifacts(ctx, "custom-props-model", "custom-props-source", params) + require.NoError(t, err) + + // Find our artifact and check custom properties + found := false + for _, artifact := range result.Items { + if artifact.CatalogModelArtifact != nil && + artifact.CatalogModelArtifact.Name != nil && + *artifact.CatalogModelArtifact.Name == "artifact-with-props" { + + found = true + assert.NotNil(t, artifact.CatalogModelArtifact.CustomProperties) + + // Verify custom properties are present and properly converted + customPropsMap := artifact.CatalogModelArtifact.CustomProperties + assert.Contains(t, customPropsMap, "custom_prop_1") + assert.Contains(t, customPropsMap, "custom_prop_2") + + // Verify the values are properly converted to MetadataValue + prop1 := customPropsMap["custom_prop_1"] + assert.NotNil(t, prop1.MetadataStringValue) + assert.Equal(t, "value_1", prop1.MetadataStringValue.StringValue) + + break + } + } + assert.True(t, found, "Should find artifact with custom properties") + }) + + t.Run("TestMappingFunctions", func(t *testing.T) { + t.Run("TestMapCatalogModelToCatalogModel", func(t *testing.T) { + // Create a catalog model with various properties + catalogModel := &models.CatalogModelImpl{ + ID: apiutils.Of(int32(123)), + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("mapping-test-model"), + ExternalID: apiutils.Of("mapping-test-ext"), + CreateTimeSinceEpoch: apiutils.Of(int64(1234567890)), + LastUpdateTimeSinceEpoch: apiutils.Of(int64(1234567891)), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("test-source")}, + {Name: "description", StringValue: apiutils.Of("Test description")}, + {Name: "library_name", StringValue: apiutils.Of("pytorch")}, + {Name: "language", StringValue: apiutils.Of("[\"python\", \"go\"]")}, + {Name: "tasks", StringValue: apiutils.Of("[\"classification\", \"regression\"]")}, + }, + } + + result := mapDBModelToAPIModel(catalogModel) + + assert.Equal(t, "123", *result.Id) + assert.Equal(t, "mapping-test-model", result.Name) + assert.Equal(t, "mapping-test-ext", *result.ExternalId) + assert.Equal(t, "test-source", *result.SourceId) + assert.Equal(t, "Test description", *result.Description) + assert.Equal(t, "pytorch", *result.LibraryName) + assert.Equal(t, "1234567890", *result.CreateTimeSinceEpoch) + assert.Equal(t, "1234567891", *result.LastUpdateTimeSinceEpoch) + + // Verify JSON arrays are properly parsed + assert.Equal(t, []string{"python", "go"}, result.Language) + assert.Equal(t, []string{"classification", "regression"}, result.Tasks) + }) + + t.Run("TestMapCatalogArtifactToCatalogArtifact", func(t *testing.T) { + // Test model artifact mapping + var catalogModelArtifact models.CatalogModelArtifact = &models.CatalogModelArtifactImpl{ + ID: apiutils.Of(int32(456)), + TypeID: apiutils.Of(int32(modelArtifactTypeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("test-model-artifact"), + ExternalID: apiutils.Of("test-model-artifact-ext"), + URI: apiutils.Of("s3://test/model.bin"), + }, + } + + catalogArtifact := models.CatalogArtifact{ + CatalogModelArtifact: catalogModelArtifact, + } + + result, err := mapDBArtifactToAPIArtifact(catalogArtifact) + require.NoError(t, err) + + assert.NotNil(t, result.CatalogModelArtifact) + assert.Nil(t, result.CatalogMetricsArtifact) + assert.Equal(t, "456", *result.CatalogModelArtifact.Id) + assert.Equal(t, "test-model-artifact", *result.CatalogModelArtifact.Name) + assert.Equal(t, "s3://test/model.bin", result.CatalogModelArtifact.Uri) + + // Test metrics artifact mapping + var catalogMetricsArtifact models.CatalogMetricsArtifact = &models.CatalogMetricsArtifactImpl{ + ID: apiutils.Of(int32(789)), + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("test-metrics-artifact"), + ExternalID: apiutils.Of("test-metrics-artifact-ext"), + MetricsType: models.MetricsTypePerformance, + }, + } + + catalogArtifact2 := models.CatalogArtifact{ + CatalogMetricsArtifact: catalogMetricsArtifact, + } + + result2, err := mapDBArtifactToAPIArtifact(catalogArtifact2) + require.NoError(t, err) + + assert.Nil(t, result2.CatalogModelArtifact) + assert.NotNil(t, result2.CatalogMetricsArtifact) + assert.Equal(t, "789", *result2.CatalogMetricsArtifact.Id) + assert.Equal(t, "test-metrics-artifact", *result2.CatalogMetricsArtifact.Name) + assert.Equal(t, "performance-metrics", result2.CatalogMetricsArtifact.MetricsType) + }) + + t.Run("TestMapCatalogArtifact_EmptyArtifact", func(t *testing.T) { + // Test with empty catalog artifact + emptyCatalogArtifact := models.CatalogArtifact{} + + _, err := mapDBArtifactToAPIArtifact(emptyCatalogArtifact) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid catalog artifact type") + }) + }) + + t.Run("TestErrorHandling", func(t *testing.T) { + t.Run("TestGetArtifacts_InvalidModelID", func(t *testing.T) { + // Create a model with invalid ID format for testing + // This would be an edge case where the ID isn't a valid integer + + // We can't easily test this directly since IDs are generated as integers + // But we can test the error case by mocking a scenario + + // For now, let's test a scenario where the model exists but has some issue + params := ListArtifactsParams{ + PageSize: 10, + OrderBy: string(model.ORDERBYFIELD_CREATE_TIME), + SortOrder: model.SORTORDER_ASC, + NextPageToken: apiutils.Of(""), + } + + _, err := dbCatalog.GetArtifacts(ctx, "non-existent-model", "test-source", params) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid model name") + }) + }) + + t.Run("TestGetFilterOptions", func(t *testing.T) { + // Create models with various properties for filter options testing + model1 := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("filter-options-model-1"), + ExternalID: apiutils.Of("filter-opt-1"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("filter-test-source")}, + {Name: "license", StringValue: apiutils.Of("MIT")}, + {Name: "provider", StringValue: apiutils.Of("HuggingFace")}, + {Name: "maturity", StringValue: apiutils.Of("stable")}, + {Name: "library_name", StringValue: apiutils.Of("transformers")}, + {Name: "language", StringValue: apiutils.Of(`["python", "rust"]`)}, + {Name: "tasks", StringValue: apiutils.Of(`["text-classification", "token-classification"]`)}, + }, + } + + model2 := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("filter-options-model-2"), + ExternalID: apiutils.Of("filter-opt-2"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("filter-test-source")}, + {Name: "license", StringValue: apiutils.Of("Apache-2.0")}, + {Name: "provider", StringValue: apiutils.Of("OpenAI")}, + {Name: "maturity", StringValue: apiutils.Of("experimental")}, + {Name: "library_name", StringValue: apiutils.Of("openai")}, + {Name: "language", StringValue: apiutils.Of(`["python", "javascript"]`)}, + {Name: "tasks", StringValue: apiutils.Of(`["text-generation", "conversational"]`)}, + {Name: "readme", StringValue: apiutils.Of("This is a very long readme that exceeds 100 characters and should be excluded from filter options because it's too verbose for filtering purposes.")}, + }, + } + + model3 := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("filter-options-model-3"), + ExternalID: apiutils.Of("filter-opt-3"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("filter-test-source")}, + {Name: "license", StringValue: apiutils.Of("MIT")}, + {Name: "provider", StringValue: apiutils.Of("PyTorch")}, + {Name: "maturity", StringValue: apiutils.Of("stable")}, + {Name: "language", StringValue: apiutils.Of(`["python"]`)}, + {Name: "tasks", StringValue: apiutils.Of(`["image-classification"]`)}, + {Name: "logo", StringValue: apiutils.Of("https://example.com/logo.png")}, + {Name: "license_link", StringValue: apiutils.Of("https://example.com/license")}, + }, + } + + _, err := catalogModelRepo.Save(model1) + require.NoError(t, err) + _, err = catalogModelRepo.Save(model2) + require.NoError(t, err) + _, err = catalogModelRepo.Save(model3) + require.NoError(t, err) + + require.NoError(t, dbCatalog.(*dbCatalogImpl).propertyOptionsRepository.Refresh(models.ContextPropertyOptionType)) + require.NoError(t, dbCatalog.(*dbCatalogImpl).propertyOptionsRepository.Refresh(models.ArtifactPropertyOptionType)) + + // Test GetFilterOptions + filterOptions, err := dbCatalog.GetFilterOptions(ctx) + require.NoError(t, err) + require.NotNil(t, filterOptions) + require.NotNil(t, filterOptions.Filters) + + filters := *filterOptions.Filters + + // Should include short properties + assert.Contains(t, filters, "license") + assert.Contains(t, filters, "provider") + assert.Contains(t, filters, "maturity") + assert.Contains(t, filters, "library_name") + assert.Contains(t, filters, "language") + assert.Contains(t, filters, "tasks") + + // Should exclude internal/verbose fields + assert.NotContains(t, filters, "source_id", "source_id should be excluded") + assert.NotContains(t, filters, "logo", "logo should be excluded") + assert.NotContains(t, filters, "license_link", "license_link should be excluded") + assert.NotContains(t, filters, "readme", "readme should be excluded (too long)") + + licenseFilter := filters["license"] + assert.Equal(t, "string", licenseFilter.Type) + assert.NotNil(t, licenseFilter.Values) + assert.GreaterOrEqual(t, len(licenseFilter.Values), 2, "Should have at least MIT and Apache-2.0") + + // Convert to string slice for easier checking + licenseValues := make([]string, 0) + for _, v := range licenseFilter.Values { + if strVal, ok := v.(string); ok { + licenseValues = append(licenseValues, strVal) + } + } + assert.Contains(t, licenseValues, "MIT") + assert.Contains(t, licenseValues, "Apache-2.0") + + // Verify provider filter options + providerFilter := filters["provider"] + assert.Equal(t, "string", providerFilter.Type) + providerValues := make([]string, 0) + for _, v := range providerFilter.Values { + if strVal, ok := v.(string); ok { + providerValues = append(providerValues, strVal) + } + } + assert.Contains(t, providerValues, "HuggingFace") + assert.Contains(t, providerValues, "OpenAI") + assert.Contains(t, providerValues, "PyTorch") + + // Verify JSON array fields are properly parsed and expanded + languageFilter := filters["language"] + assert.Equal(t, "string", languageFilter.Type) + languageValues := make([]string, 0) + for _, v := range languageFilter.Values { + if strVal, ok := v.(string); ok { + languageValues = append(languageValues, strVal) + } + } + // Should contain individual values from JSON arrays + assert.Contains(t, languageValues, "python") + assert.Contains(t, languageValues, "rust") + assert.Contains(t, languageValues, "javascript") + + // Verify tasks are properly expanded + tasksFilter := filters["tasks"] + assert.Equal(t, "string", tasksFilter.Type) + tasksValues := make([]string, 0) + for _, v := range tasksFilter.Values { + if strVal, ok := v.(string); ok { + tasksValues = append(tasksValues, strVal) + } + } + assert.Contains(t, tasksValues, "text-classification") + assert.Contains(t, tasksValues, "token-classification") + assert.Contains(t, tasksValues, "text-generation") + assert.Contains(t, tasksValues, "conversational") + assert.Contains(t, tasksValues, "image-classification") + + // Verify no duplicates + pythonCount := 0 + for _, v := range languageValues { + if v == "python" { + pythonCount++ + } + } + assert.Equal(t, 1, pythonCount, "python should appear only once (deduplicated)") + + // Verify maturity options + maturityFilter := filters["maturity"] + maturityValues := make([]string, 0) + for _, v := range maturityFilter.Values { + if strVal, ok := v.(string); ok { + maturityValues = append(maturityValues, strVal) + } + } + assert.Contains(t, maturityValues, "stable") + assert.Contains(t, maturityValues, "experimental") + }) + + t.Run("TestGetArtifacts_WithFilterQuery", func(t *testing.T) { + // Create test model + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("filterquery-artifact-test-model"), + ExternalID: apiutils.Of("filterquery-artifact-test-model-ext"), + }, + Properties: &[]mr_models.Properties{ + {Name: "source_id", StringValue: apiutils.Of("filterquery-test-source")}, + }, + } + + savedModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create multiple test artifacts with different properties + artifact1 := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(modelArtifactTypeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("pytorch-model-artifact"), + ExternalID: apiutils.Of("pytorch-model-artifact-ext"), + URI: apiutils.Of("s3://bucket/pytorch/model.bin"), + ArtifactType: apiutils.Of(models.CatalogModelArtifactType), + }, + CustomProperties: &[]mr_models.Properties{ + {Name: "format", StringValue: apiutils.Of("pytorch")}, + {Name: "model_size", DoubleValue: apiutils.Of(float64(500))}, + }, + } + + artifact2 := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(modelArtifactTypeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("onnx-model-artifact"), + ExternalID: apiutils.Of("onnx-model-artifact-ext"), + URI: apiutils.Of("https://huggingface.co/models/onnx/model.onnx"), + ArtifactType: apiutils.Of(models.CatalogModelArtifactType), + }, + CustomProperties: &[]mr_models.Properties{ + {Name: "format", StringValue: apiutils.Of("onnx")}, + {Name: "model_size", DoubleValue: apiutils.Of(float64(1500))}, + }, + } + + artifact3 := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("accuracy-metrics"), + ExternalID: apiutils.Of("accuracy-metrics-ext"), + MetricsType: models.MetricsTypeAccuracy, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + CustomProperties: &[]mr_models.Properties{ + {Name: "overall_average", DoubleValue: apiutils.Of(float64(0.95))}, + }, + } + + _, err = modelArtifactRepo.Save(artifact1, savedModel.GetID()) + require.NoError(t, err) + _, err = modelArtifactRepo.Save(artifact2, savedModel.GetID()) + require.NoError(t, err) + _, err = metricsArtifactRepo.Save(artifact3, savedModel.GetID()) + require.NoError(t, err) + + // Test cases + tests := []struct { + name string + filterQuery string + expectedCount int32 + expectedNames []string + shouldError bool + }{ + { + name: "Filter by URI pattern - s3", + filterQuery: `uri LIKE "%s3%"`, + expectedCount: 1, + expectedNames: []string{"pytorch-model-artifact"}, + }, + { + name: "Filter by custom property format", + filterQuery: `format.string_value = "onnx"`, + expectedCount: 1, + expectedNames: []string{"onnx-model-artifact"}, + }, + { + name: "Filter by numeric custom property", + filterQuery: `model_size.double_value > 1000`, + expectedCount: 1, + expectedNames: []string{"onnx-model-artifact"}, + }, + { + name: "Complex filter with AND", + filterQuery: `uri LIKE "%huggingface%" AND format.string_value = "onnx"`, + expectedCount: 1, + expectedNames: []string{"onnx-model-artifact"}, + }, + { + name: "Filter by name pattern", + filterQuery: `name LIKE "%pytorch%"`, + expectedCount: 1, + expectedNames: []string{"pytorch-model-artifact"}, + }, + { + name: "Filter with OR condition", + filterQuery: `format.string_value = "pytorch" OR format.string_value = "onnx"`, + expectedCount: 2, + expectedNames: []string{"pytorch-model-artifact", "onnx-model-artifact"}, + }, + { + name: "Filter with no matches", + filterQuery: `name = "non-existent-artifact"`, + expectedCount: 0, + expectedNames: []string{}, + }, + { + name: "Empty filterQuery returns all artifacts", + filterQuery: "", + expectedCount: 3, + expectedNames: []string{"pytorch-model-artifact", "onnx-model-artifact", "accuracy-metrics"}, + }, + { + name: "Invalid filterQuery syntax", + filterQuery: "invalid syntax here", + shouldError: true, + }, + { + name: "Inferred int type - should match double values (dual-column query)", + filterQuery: `model_size > 400`, + expectedCount: 2, + expectedNames: []string{"pytorch-model-artifact", "onnx-model-artifact"}, + }, + { + name: "Explicit double_value with integer literal", + filterQuery: `model_size.double_value > 400`, + expectedCount: 2, + expectedNames: []string{"pytorch-model-artifact", "onnx-model-artifact"}, + }, + { + name: "Explicit double_value with float literal", + filterQuery: `model_size.double_value > 400.0`, + expectedCount: 2, + expectedNames: []string{"pytorch-model-artifact", "onnx-model-artifact"}, + }, + { + name: "Explicit int_value with integer literal", + filterQuery: `model_size.int_value > 400`, + expectedCount: 0, // Data is stored as double, so int_value query returns nothing + expectedNames: []string{}, + }, + { + name: "Explicit string_value with string literal", + filterQuery: `format.string_value = "onnx"`, + expectedCount: 1, + expectedNames: []string{"onnx-model-artifact"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + params := ListArtifactsParams{ + FilterQuery: tt.filterQuery, + PageSize: 10, + OrderBy: string(model.ORDERBYFIELD_CREATE_TIME), + SortOrder: model.SORTORDER_ASC, + NextPageToken: apiutils.Of(""), + } + + result, err := dbCatalog.GetArtifacts(ctx, "filterquery-artifact-test-model", "filterquery-test-source", params) + + if tt.shouldError { + require.Error(t, err, "Expected error for invalid filter query") + assert.Contains(t, err.Error(), "invalid filter query", "Error should mention invalid filter query") + return + } + + require.NoError(t, err) + assert.Equal(t, tt.expectedCount, result.Size, "Expected %d artifacts but got %d", tt.expectedCount, result.Size) + + // Verify artifact names + actualNames := make([]string, 0) + for _, artifact := range result.Items { + if artifact.CatalogModelArtifact != nil && artifact.CatalogModelArtifact.Name != nil { + actualNames = append(actualNames, *artifact.CatalogModelArtifact.Name) + } + if artifact.CatalogMetricsArtifact != nil && artifact.CatalogMetricsArtifact.Name != nil { + actualNames = append(actualNames, *artifact.CatalogMetricsArtifact.Name) + } + } + assert.ElementsMatch(t, tt.expectedNames, actualNames, "Artifact names should match expected") + }) + } + }) +} + +func getCatalogModelTypeIDForDBTest(t *testing.T, db *gorm.DB) int32 { + var typeRecord schema.Type + err := db.Where("name = ?", service.CatalogModelTypeName).First(&typeRecord).Error + if err != nil { + require.NoError(t, err, "Failed to query CatalogModel type") + } + return typeRecord.ID +} + +func getCatalogModelArtifactTypeIDForDBTest(t *testing.T, db *gorm.DB) int32 { + var typeRecord schema.Type + err := db.Where("name = ?", service.CatalogModelArtifactTypeName).First(&typeRecord).Error + if err != nil { + require.NoError(t, err, "Failed to query CatalogModelArtifact type") + } + return typeRecord.ID +} + +func getCatalogMetricsArtifactTypeIDForDBTest(t *testing.T, db *gorm.DB) int32 { + var typeRecord schema.Type + err := db.Where("name = ?", service.CatalogMetricsArtifactTypeName).First(&typeRecord).Error + if err != nil { + require.NoError(t, err, "Failed to query CatalogMetricsArtifact type") + } + return typeRecord.ID +} diff --git a/catalog/internal/catalog/hf_catalog.go b/catalog/internal/catalog/hf_catalog.go index 76f36c3e8c..0ae837b749 100644 --- a/catalog/internal/catalog/hf_catalog.go +++ b/catalog/internal/catalog/hf_catalog.go @@ -18,13 +18,13 @@ type hfCatalogImpl struct { baseURL string } -var _ CatalogSourceProvider = &hfCatalogImpl{} +var _ APIProvider = &hfCatalogImpl{} const ( defaultHuggingFaceURL = "https://huggingface.co" ) -func (h *hfCatalogImpl) GetModel(ctx context.Context, name string) (*openapi.CatalogModel, error) { +func (h *hfCatalogImpl) GetModel(ctx context.Context, modelName string, sourceID string) (*openapi.CatalogModel, error) { // TODO: Implement HuggingFace model retrieval return nil, fmt.Errorf("HuggingFace model retrieval not yet implemented") } @@ -39,16 +39,25 @@ func (h *hfCatalogImpl) ListModels(ctx context.Context, params ListModelsParams) }, nil } -func (h *hfCatalogImpl) GetArtifacts(ctx context.Context, name string) (*openapi.CatalogModelArtifactList, error) { +func (h *hfCatalogImpl) GetArtifacts(ctx context.Context, modelName string, sourceID string, params ListArtifactsParams) (openapi.CatalogArtifactList, error) { // TODO: Implement HuggingFace model artifacts retrieval // For now, return empty list to satisfy interface - return &openapi.CatalogModelArtifactList{ - Items: []openapi.CatalogModelArtifact{}, + return openapi.CatalogArtifactList{ + Items: []openapi.CatalogArtifact{}, PageSize: 0, Size: 0, }, nil } +func (h *hfCatalogImpl) GetFilterOptions(ctx context.Context) (*openapi.FilterOptionsList, error) { + // TODO: Implement HuggingFace filter options retrieval + // For now, return empty options to satisfy interface + emptyFilters := make(map[string]openapi.FilterOption) + return &openapi.FilterOptionsList{ + Filters: &emptyFilters, + }, nil +} + // validateCredentials checks if the HuggingFace API credentials are valid func (h *hfCatalogImpl) validateCredentials(ctx context.Context) error { glog.Infof("Validating HuggingFace API credentials") @@ -82,7 +91,7 @@ func (h *hfCatalogImpl) validateCredentials(ctx context.Context) error { } // newHfCatalog creates a new HuggingFace catalog source -func newHfCatalog(source *CatalogSourceConfig) (CatalogSourceProvider, error) { +func newHfCatalog(source *Source, reldir string) (APIProvider, error) { apiKey, ok := source.Properties["apiKey"].(string) if !ok || apiKey == "" { return nil, fmt.Errorf("missing or invalid 'apiKey' property for HuggingFace catalog") @@ -117,9 +126,3 @@ func newHfCatalog(source *CatalogSourceConfig) (CatalogSourceProvider, error) { glog.Infof("HuggingFace catalog source configured successfully") return h, nil } - -func init() { - if err := RegisterCatalogType("hf", newHfCatalog); err != nil { - panic(err) - } -} diff --git a/catalog/internal/catalog/hf_catalog_test.go b/catalog/internal/catalog/hf_catalog_test.go index 564d8b38f1..e21b384ed7 100644 --- a/catalog/internal/catalog/hf_catalog_test.go +++ b/catalog/internal/catalog/hf_catalog_test.go @@ -11,7 +11,7 @@ import ( ) func TestNewHfCatalog_MissingAPIKey(t *testing.T) { - source := &CatalogSourceConfig{ + source := &Source{ CatalogSource: openapi.CatalogSource{ Id: "test_hf", Name: "Test HF", @@ -22,7 +22,7 @@ func TestNewHfCatalog_MissingAPIKey(t *testing.T) { }, } - _, err := newHfCatalog(source) + _, err := newHfCatalog(source, "") if err == nil { t.Fatal("Expected error for missing API key, got nil") } @@ -52,7 +52,7 @@ func TestNewHfCatalog_WithValidCredentials(t *testing.T) { })) defer server.Close() - source := &CatalogSourceConfig{ + source := &Source{ CatalogSource: openapi.CatalogSource{ Id: "test_hf", Name: "Test HF", @@ -65,7 +65,7 @@ func TestNewHfCatalog_WithValidCredentials(t *testing.T) { }, } - catalog, err := newHfCatalog(source) + catalog, err := newHfCatalog(source, "") if err != nil { t.Fatalf("Failed to create HF catalog: %v", err) } @@ -76,7 +76,7 @@ func TestNewHfCatalog_WithValidCredentials(t *testing.T) { ctx := context.Background() // Test GetModel - should return not implemented error - model, err := hfCatalog.GetModel(ctx, "test-model") + model, err := hfCatalog.GetModel(ctx, "test-model", "") if err == nil { t.Fatal("Expected not implemented error, got nil") } @@ -99,11 +99,11 @@ func TestNewHfCatalog_WithValidCredentials(t *testing.T) { } // Test GetArtifacts - should return empty list - artifacts, err := hfCatalog.GetArtifacts(ctx, "test-model") + artifacts, err := hfCatalog.GetArtifacts(ctx, "test-model", "", ListArtifactsParams{}) if err != nil { t.Fatalf("Failed to get artifacts: %v", err) } - if artifacts == nil { + if artifacts.Items == nil { t.Fatal("Expected artifacts list, got nil") } if len(artifacts.Items) != 0 { @@ -118,7 +118,7 @@ func TestNewHfCatalog_InvalidCredentials(t *testing.T) { })) defer server.Close() - source := &CatalogSourceConfig{ + source := &Source{ CatalogSource: openapi.CatalogSource{ Id: "test_hf", Name: "Test HF", @@ -130,7 +130,7 @@ func TestNewHfCatalog_InvalidCredentials(t *testing.T) { }, } - _, err := newHfCatalog(source) + _, err := newHfCatalog(source, "") if err == nil { t.Fatal("Expected error for invalid credentials, got nil") } @@ -147,7 +147,7 @@ func TestNewHfCatalog_DefaultConfiguration(t *testing.T) { })) defer server.Close() - source := &CatalogSourceConfig{ + source := &Source{ CatalogSource: openapi.CatalogSource{ Id: "test_hf", Name: "Test HF", @@ -159,7 +159,7 @@ func TestNewHfCatalog_DefaultConfiguration(t *testing.T) { }, } - catalog, err := newHfCatalog(source) + catalog, err := newHfCatalog(source, "") if err != nil { t.Fatalf("Failed to create HF catalog with defaults: %v", err) } diff --git a/catalog/internal/catalog/labels.go b/catalog/internal/catalog/labels.go new file mode 100644 index 0000000000..3842448d6f --- /dev/null +++ b/catalog/internal/catalog/labels.go @@ -0,0 +1,157 @@ +package catalog + +import ( + "fmt" + "sync" +) + +type LabelCollection struct { + mu sync.RWMutex + + // origins keeps track of which labels came from which origin (file path). + // Each origin maps to the indices of labels it contributed. + origins map[string][]int + + // labels stores all unique labels + labels []map[string]*string +} + +func NewLabelCollection() *LabelCollection { + return &LabelCollection{ + origins: map[string][]int{}, + labels: []map[string]*string{}, + } +} + +// Merge adds labels from one origin (ordinarily, a file path), completely +// replacing anything that was previously from that origin. +// Returns an error if: +// - duplicate label names exist within newLabels +// - a label name conflicts with an existing label from a different origin +func (lc *LabelCollection) Merge(origin string, newLabels []map[string]any) error { + newLabelNames := make(map[any]bool, len(newLabels)) + for _, newLabel := range newLabels { + if name, ok := newLabel["name"]; ok { + _, isStr := name.(string) + if !isStr && name != nil { + return fmt.Errorf("unknown name type: %v", name) + } + + if newLabelNames[name] { + return fmt.Errorf("duplicate label name '%v' within the same origin", name) + } + newLabelNames[name] = true + } + } + + lc.mu.Lock() + defer lc.mu.Unlock() + + // Build a map of existing label names from OTHER origins (excluding this origin) + // This allows us to validate BEFORE mutating state + oldIndices, originExists := lc.origins[origin] + existingNamesFromOtherOrigins := make(map[any]bool) + for i, label := range lc.labels { + // Skip labels from this origin (they will be replaced) + isFromThisOrigin := false + if originExists { + for _, idx := range oldIndices { + if i == idx { + isFromThisOrigin = true + break + } + } + } + + if !isFromThisOrigin { + if name, ok := label["name"]; ok { + if name == nil { + existingNamesFromOtherOrigins[nil] = true + } else { + existingNamesFromOtherOrigins[*name] = true + } + } + } + } + + // Validate conflicts and prepare labels to add in a single pass + labelsToAdd := make([]map[string]*string, 0, len(newLabels)) + for _, newLabel := range newLabels { + // Check for conflicts with other origins + if name, ok := newLabel["name"]; ok { + if existingNamesFromOtherOrigins[name] { + return fmt.Errorf("label with name '%s' already exists from another origin", name) + } + } + + // Verify that every key is a string, except for name which can be nil + newLabelSP := make(map[string]*string, len(newLabel)) + newLabelSP["name"] = nil // default + for k, v := range newLabel { + if k == "name" && v == nil { + continue + } + + str, ok := v.(string) + if !ok { + return fmt.Errorf("label '%v' has invalid property %q, type %T", newLabel["name"], k, v) + } + newLabelSP[k] = &str + } + + labelsToAdd = append(labelsToAdd, newLabelSP) + } + + // All validation passed, now proceed with mutation + // Remove labels that were previously set for this origin + if originExists { + // Mark old labels for removal by setting them to nil + for _, idx := range oldIndices { + if idx < len(lc.labels) { + lc.labels[idx] = nil + } + } + } + + // Compact the slice by removing nil entries + compacted := make([]map[string]*string, 0, len(lc.labels)) + for _, label := range lc.labels { + if label != nil { + compacted = append(compacted, label) + } + } + lc.labels = compacted + + // Add the validated new labels + newIndices := make([]int, 0, len(labelsToAdd)) + for _, newLabel := range labelsToAdd { + lc.labels = append(lc.labels, newLabel) + newIndices = append(newIndices, len(lc.labels)-1) + } + + if len(newIndices) > 0 { + lc.origins[origin] = newIndices + } else { + delete(lc.origins, origin) + } + + return nil +} + +func (lc *LabelCollection) All() []map[string]any { + lc.mu.RLock() + defer lc.mu.RUnlock() + + result := make([]map[string]any, len(lc.labels)) + for i := range lc.labels { + result[i] = make(map[string]any, len(lc.labels[i])) + for k, v := range lc.labels[i] { + if v == nil { + result[i][k] = nil + } else { + result[i][k] = *v + } + } + } + return result +} diff --git a/catalog/internal/catalog/labels_test.go b/catalog/internal/catalog/labels_test.go new file mode 100644 index 0000000000..f6e725981f --- /dev/null +++ b/catalog/internal/catalog/labels_test.go @@ -0,0 +1,333 @@ +package catalog + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLabelCollection_NewLabelCollection(t *testing.T) { + lc := NewLabelCollection() + assert.NotNil(t, lc) + assert.Empty(t, lc.All()) +} + +func TestLabelCollection_Merge_AddLabels(t *testing.T) { + lc := NewLabelCollection() + + labels1 := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + {"name": "labelNameTwo", "displayName": "Label Name Two"}, + {"name": nil, "displayName": "Null Label"}, + } + + err := lc.Merge("source1", labels1) + assert.NoError(t, err) + + all := lc.All() + assert.Len(t, all, 3) + assert.Contains(t, all, labels1[0]) + assert.Contains(t, all, labels1[1]) + assert.Contains(t, all, labels1[2]) +} + +func TestLabelCollection_Merge_ReplaceLabels(t *testing.T) { + lc := NewLabelCollection() + + // Add initial labels from source1 + labels1 := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + {"name": "labelNameTwo", "displayName": "Label Name Two"}, + } + err := lc.Merge("source1", labels1) + assert.NoError(t, err) + + // Replace labels from source1 + labels2 := []map[string]any{ + {"name": "labelNameThree", "displayName": "Label Name Three"}, + } + err = lc.Merge("source1", labels2) + assert.NoError(t, err) + + all := lc.All() + assert.Len(t, all, 1, "Should only have the new label from source1") + assert.Equal(t, "labelNameThree", all[0]["name"]) +} + +func TestLabelCollection_Merge_MultipleOrigins(t *testing.T) { + lc := NewLabelCollection() + + labels1 := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + } + labels2 := []map[string]any{ + {"name": "labelNameTwo", "displayName": "Label Name Two"}, + } + + err := lc.Merge("source1", labels1) + assert.NoError(t, err) + err = lc.Merge("source2", labels2) + assert.NoError(t, err) + + all := lc.All() + assert.Len(t, all, 2) + assert.Contains(t, all, labels1[0]) + assert.Contains(t, all, labels2[0]) +} + +func TestLabelCollection_Merge_DuplicateNameWithinSameOrigin(t *testing.T) { + lc := NewLabelCollection() + + // Try to add labels with duplicate names in the same batch + labels := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + {"name": "labelNameTwo", "displayName": "Label Name Two"}, + {"name": "labelNameOne", "displayName": "Label Name One"}, // Duplicate! + } + err := lc.Merge("source1", labels) + assert.Error(t, err) + assert.Contains(t, err.Error(), "duplicate label name 'labelNameOne' within the same origin") + + // Verify no labels were added (transaction-like behavior) + all := lc.All() + assert.Len(t, all, 0) +} + +func TestLabelCollection_Merge_DuplicateNullNameWithinSameOrigin(t *testing.T) { + lc := NewLabelCollection() + + // Try to add labels with duplicate names in the same batch + labels := []map[string]any{ + {"name": nil, "displayName": "Label Name Null"}, + {"name": "labelNameTwo", "displayName": "Label Name Two"}, + {"name": nil, "displayName": "Label Name Null"}, // Duplicate! + } + err := lc.Merge("source1", labels) + assert.Error(t, err) + assert.Contains(t, err.Error(), "duplicate label name '' within the same origin") + + // Verify no labels were added (transaction-like behavior) + all := lc.All() + assert.Len(t, all, 0) +} + +func TestLabelCollection_Merge_DuplicateNameFromDifferentOrigins(t *testing.T) { + lc := NewLabelCollection() + + // Add label from source1 + labels1 := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One from source1"}, + } + err := lc.Merge("source1", labels1) + assert.NoError(t, err) + + // Try to add a label with the same name from source2 + labels2 := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One from source2"}, + } + err = lc.Merge("source2", labels2) + assert.Error(t, err) + assert.Contains(t, err.Error(), "label with name 'labelNameOne' already exists from another origin") + + // Verify only the first label exists + all := lc.All() + assert.Len(t, all, 1) + assert.Equal(t, "Label Name One from source1", all[0]["displayName"]) +} + +func TestLabelCollection_Merge_SameOriginCanReplaceSameName(t *testing.T) { + lc := NewLabelCollection() + + // Add label from source1 + labels1 := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One Version 1"}, + } + err := lc.Merge("source1", labels1) + assert.NoError(t, err) + + // Replace with same name from same source - should work + labels2 := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One Version 2"}, + } + err = lc.Merge("source1", labels2) + assert.NoError(t, err) + + // Verify the label was updated + all := lc.All() + assert.Len(t, all, 1) + assert.Equal(t, "Label Name One Version 2", all[0]["displayName"]) +} + +func TestLabelCollection_Merge_SameOriginWithIdenticalLabels(t *testing.T) { + lc := NewLabelCollection() + + // Add labels from source1 + labels := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + {"name": "labelNameTwo", "displayName": "Label Name Two"}, + } + err := lc.Merge("source1", labels) + assert.NoError(t, err) + + // Verify labels exist + all := lc.All() + assert.Len(t, all, 2) + + // Merge again with IDENTICAL labels (no changes) + // This should work - labels should be replaced with identical copies + err = lc.Merge("source1", labels) + assert.NoError(t, err) + + // Verify labels still exist (not removed due to deep equality check) + all = lc.All() + assert.Len(t, all, 2, "Labels should still exist after re-merging identical data") + assert.Contains(t, all, labels[0]) + assert.Contains(t, all, labels[1]) +} + +func TestLabelCollection_Merge_RollbackOnValidationFailure(t *testing.T) { + lc := NewLabelCollection() + + // Add initial labels from source1 + labels1 := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + {"name": "labelNameTwo", "displayName": "Label Name Two"}, + } + err := lc.Merge("source1", labels1) + assert.NoError(t, err) + + // Verify initial state + all := lc.All() + assert.Len(t, all, 2) + + // Try to update source1 with invalid labels (duplicates within same batch) + invalidLabels := []map[string]any{ + {"name": "enterprise", "displayName": "Enterprise"}, + {"name": "enterprise", "displayName": "Enterprise Duplicate"}, // Duplicate! + } + err = lc.Merge("source1", invalidLabels) + assert.Error(t, err) + assert.Contains(t, err.Error(), "duplicate label name 'enterprise' within the same origin") + + // Verify that the original labels are STILL THERE (rollback behavior) + all = lc.All() + assert.Len(t, all, 2, "Original labels should remain after failed validation") + assert.Contains(t, all, labels1[0]) + assert.Contains(t, all, labels1[1]) + assert.Equal(t, "Label Name One", all[0]["displayName"]) + assert.Equal(t, "Label Name Two", all[1]["displayName"]) +} + +func TestLabelCollection_Merge_RollbackOnCrossOriginConflict(t *testing.T) { + lc := NewLabelCollection() + + // Add labels from source1 + labels1 := []map[string]any{ + {"name": "source1-label", "displayName": "Source 1 Label"}, + } + err := lc.Merge("source1", labels1) + assert.NoError(t, err) + + // Add labels from source2 + labels2 := []map[string]any{ + {"name": "source2-label", "displayName": "Source 2 Label"}, + } + err = lc.Merge("source2", labels2) + assert.NoError(t, err) + + // Verify both sources have their labels + all := lc.All() + assert.Len(t, all, 2) + + // Try to update source1 with a label that conflicts with source2 + conflictingLabels := []map[string]any{ + {"name": "source2-label", "displayName": "Trying to steal source2 label"}, + } + err = lc.Merge("source1", conflictingLabels) + assert.Error(t, err) + assert.Contains(t, err.Error(), "label with name 'source2-label' already exists from another origin") + + // Verify that source1's original labels are STILL THERE (rollback behavior) + all = lc.All() + assert.Len(t, all, 2, "Both original labels should remain after failed validation") + + // Find source1's label + var source1Label map[string]any + for _, label := range all { + if label["name"] == "source1-label" { + source1Label = label + break + } + } + assert.NotNil(t, source1Label, "Source1 label should still exist") + assert.Equal(t, "Source 1 Label", source1Label["displayName"]) +} + +func TestLabelCollection_Merge_Deduplicate(t *testing.T) { + lc := NewLabelCollection() + + // Same label from two sources - should fail because of name conflict + label := map[string]any{"name": "labelNameOne", "displayName": "Label Name One"} + + err := lc.Merge("source1", []map[string]any{label}) + assert.NoError(t, err) + + // This should fail because a label with the same name already exists + err = lc.Merge("source2", []map[string]any{label}) + assert.Error(t, err) + assert.Contains(t, err.Error(), "label with name 'labelNameOne' already exists from another origin") +} + +func TestLabelCollection_Merge_EmptyLabels(t *testing.T) { + lc := NewLabelCollection() + + labels1 := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + } + err := lc.Merge("source1", labels1) + assert.NoError(t, err) + + // Clear labels from source1 by merging empty slice + err = lc.Merge("source1", []map[string]any{}) + assert.NoError(t, err) + + all := lc.All() + assert.Empty(t, all, "Labels from source1 should be removed") +} + +func TestLabelCollection_Merge_UpdateOrigin(t *testing.T) { + lc := NewLabelCollection() + + // Add labels from source1 + labels1 := []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + {"name": "labelNameTwo", "displayName": "Label Name Two"}, + } + err := lc.Merge("source1", labels1) + assert.NoError(t, err) + + // Add labels from source2 + labels2 := []map[string]any{ + {"name": "labelNameThree", "displayName": "Label Name Three"}, + } + err = lc.Merge("source2", labels2) + assert.NoError(t, err) + + // Update source1 with different labels + labels3 := []map[string]any{ + {"name": "labelNameFour", "displayName": "Label Name Four"}, + } + err = lc.Merge("source1", labels3) + assert.NoError(t, err) + + all := lc.All() + assert.Len(t, all, 2, "Should have labelNameFour from source1 and labelNameThree from source2") + + // Verify source2 labels are still there + assert.Contains(t, all, labels2[0]) + // Verify new source1 labels are there + assert.Contains(t, all, labels3[0]) + // Verify old source1 labels are gone + assert.NotContains(t, all, labels1[0]) + assert.NotContains(t, all, labels1[1]) +} diff --git a/catalog/internal/catalog/loader.go b/catalog/internal/catalog/loader.go new file mode 100644 index 0000000000..3cb5b56634 --- /dev/null +++ b/catalog/internal/catalog/loader.go @@ -0,0 +1,367 @@ +package catalog + +import ( + "context" + "errors" + "fmt" + "os" + "path/filepath" + "sync" + + "github.com/golang/glog" + dbmodels "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/service" + apimodels "github.com/kubeflow/model-registry/catalog/pkg/openapi" + "github.com/kubeflow/model-registry/internal/apiutils" + mrmodels "github.com/kubeflow/model-registry/internal/db/models" + "k8s.io/apimachinery/pkg/util/yaml" +) + +// ModelProviderRecord contains one model and its associated artifacts. +type ModelProviderRecord struct { + Model dbmodels.CatalogModel + Artifacts []dbmodels.CatalogArtifact +} + +// ModelProviderFunc emits models and related data in the channel it returns. It is +// expected to spawn a goroutine and return immediately. The returned channel must +// close when the goroutine ends. The goroutine should end when the context is +// canceled, but may end sooner. +type ModelProviderFunc func(ctx context.Context, source *Source, reldir string) (<-chan ModelProviderRecord, error) + +var registeredModelProviders = map[string]ModelProviderFunc{} + +func RegisterModelProvider(name string, callback ModelProviderFunc) error { + if _, exists := registeredModelProviders[name]; exists { + return fmt.Errorf("provider type %s already exists", name) + } + registeredModelProviders[name] = callback + return nil +} + +// LoaderEventHandler is the definition of a function called after a model is loaded. +type LoaderEventHandler func(ctx context.Context, record ModelProviderRecord) error + +// sourceConfig is the structure for the catalog sources YAML file. +type sourceConfig struct { + Catalogs []Source `json:"catalogs"` + Labels []map[string]any `json:"labels,omitempty"` +} + +// Source is a single entry from the catalog sources YAML file. +type Source struct { + apimodels.CatalogSource `json:",inline"` + + // Catalog type to use, must match one of the registered types + Type string `json:"type"` + + // Properties used for configuring the catalog connection based on catalog implementation + Properties map[string]any `json:"properties,omitempty"` +} + +type Loader struct { + // Sources contains current source information loaded from the configuration files. + Sources *SourceCollection + + // Labels contains current labels loaded from the configuration files. + Labels *LabelCollection + + paths []string + services service.Services + closersMu sync.Mutex + closers map[string]func() + handlers []LoaderEventHandler +} + +func NewLoader(services service.Services, paths []string) *Loader { + return &Loader{ + Sources: NewSourceCollection(), + Labels: NewLabelCollection(), + paths: paths, + services: services, + closers: map[string]func(){}, + } +} + +// RegisterEventHandler adds a function that will be called for every +// successfully processed record. This should be called before Start. +// +// Handlers are called in the order they are registered. +func (l *Loader) RegisterEventHandler(fn LoaderEventHandler) { + l.handlers = append(l.handlers, fn) +} + +// Start processes the sources YAML files. Background goroutines will be +// stopped when the context is canceled. +func (l *Loader) Start(ctx context.Context) error { + for _, path := range l.paths { + err := l.loadOne(ctx, path) + if err != nil { + return fmt.Errorf("%s: %w", path, err) + } + + go func(path string) { + changes, err := getMonitor().Path(ctx, path) + if err != nil { + glog.Errorf("unable to watch sources file (%s): %v", path, err) + // Not fatal, we just won't get automatic updates. + } + + for range changes { + glog.Infof("Reloading sources %s", path) + + err = l.loadOne(ctx, path) + if err != nil { + glog.Errorf("unable to load sources: %v", err) + } + } + }(path) + } + + return nil +} + +// loadOne processes (or re-processes) a sources config file. +func (l *Loader) loadOne(ctx context.Context, path string) error { + // Get absolute path of the catalog config file + path, err := filepath.Abs(path) + if err != nil { + return fmt.Errorf("failed to get absolute path for %s: %v", path, err) + } + + config, err := l.read(path) + if err != nil { + return err + } + + err = l.updateSources(path, config) + if err != nil { + return err + } + + err = l.updateLabels(path, config) + if err != nil { + return err + } + + return l.updateDatabase(ctx, path, config) +} + +func (l *Loader) read(path string) (*sourceConfig, error) { + config := &sourceConfig{} + bytes, err := os.ReadFile(path) + if err != nil { + return nil, err + } + + if err = yaml.UnmarshalStrict(bytes, &config); err != nil { + return nil, err + } + + enabledSources := make([]Source, 0, len(config.Catalogs)) + + // Remove disabled sources and explicitly set enabled on the others. + for _, source := range config.Catalogs { + // If enabled is explicitly set to false, skip + if source.HasEnabled() && *source.Enabled == false { + continue + } + // If not explicitly set, default to enabled + source.CatalogSource.Enabled = apiutils.Of(true) + + // Default to an empty labels list + if source.Labels == nil { + source.Labels = []string{} + } + + enabledSources = append(enabledSources, source) + } + config.Catalogs = enabledSources + + return config, nil +} + +func (l *Loader) updateSources(path string, config *sourceConfig) error { + sources := make(map[string]apimodels.CatalogSource, len(config.Catalogs)) + + for _, source := range config.Catalogs { + glog.Infof("reading config type %s...", source.Type) + id := source.GetId() + if len(id) == 0 { + return fmt.Errorf("invalid source: missing id") + } + if _, exists := sources[id]; exists { + return fmt.Errorf("invalid source: duplicate id %s", id) + } + + // Validate includedModels/excludedModels patterns early + if err := ValidateSourceFilters(source.IncludedModels, source.ExcludedModels); err != nil { + return fmt.Errorf("invalid source %s: %w", id, err) + } + + sources[id] = source.CatalogSource + + glog.Infof("loaded source %s of type %s", id, source.Type) + } + + return l.Sources.Merge(path, sources) +} + +func (l *Loader) updateLabels(path string, config *sourceConfig) error { + // Merge labels from config into the label collection + if config.Labels == nil { + // No labels in config, but we still need to clear any previous labels from this origin + return l.Labels.Merge(path, []map[string]any{}) + } + + // Validate that each label has a required "name" field + for i, label := range config.Labels { + if name, ok := label["name"]; !ok || name == "" { + return fmt.Errorf("invalid label at index %d: missing required 'name' field", i) + } + } + + return l.Labels.Merge(path, config.Labels) +} + +func (l *Loader) updateDatabase(ctx context.Context, path string, config *sourceConfig) error { + ctx, cancel := context.WithCancel(ctx) + + l.closersMu.Lock() + if l.closers[path] != nil { + l.closers[path]() + } + l.closers[path] = cancel + l.closersMu.Unlock() + + records := l.readProviderRecords(ctx, path, config) + + go func() { + for record := range records { + attr := record.Model.GetAttributes() + if attr == nil || attr.Name == nil { + continue + } + + glog.Infof("Loading model %s with %d artifact(s)", *attr.Name, len(record.Artifacts)) + + model, err := l.services.CatalogModelRepository.Save(record.Model) + if err != nil { + glog.Errorf("%s: unable to save: %v", *attr.Name, err) + continue + } + + modelID := model.GetID() + if modelID == nil { + glog.Errorf("%s: model has no ID after save") + continue + } + + // Remove artifacts that existed before. + err = l.services.CatalogArtifactRepository.DeleteByParentID(service.CatalogModelArtifactTypeName, *modelID) + if err != nil { + glog.Errorf("%s: unable to remove old catalog model artifacts: %v", err) + } + err = l.services.CatalogArtifactRepository.DeleteByParentID(service.CatalogMetricsArtifactTypeName, *modelID) + if err != nil { + glog.Errorf("%s: unable to remove old catalog model artifacts: %v", err) + } + + for i, artifact := range record.Artifacts { + switch { + case artifact.CatalogModelArtifact != nil: + _, err = l.services.CatalogModelArtifactRepository.Save(artifact.CatalogModelArtifact, modelID) + case artifact.CatalogMetricsArtifact != nil: + _, err = l.services.CatalogMetricsArtifactRepository.Save(artifact.CatalogMetricsArtifact, modelID) + default: + err = errors.New("unknown artifact type") + } + + if err != nil { + glog.Errorf("%s, artifact %d: %v", *attr.Name, i, err) + } + } + + for _, handler := range l.handlers { + handler(ctx, record) + } + } + }() + + return nil +} + +// readProviderRecords calls the provider for every configured source and +// merges the returned channels together. The returned channel is closed when +// the last provider channel is closed. +func (l *Loader) readProviderRecords(ctx context.Context, path string, config *sourceConfig) <-chan ModelProviderRecord { + configDir := filepath.Dir(path) + + ch := make(chan ModelProviderRecord) + var wg sync.WaitGroup + + for _, source := range config.Catalogs { + glog.Infof("Reading models from %s source %s", source.Type, source.Id) + + registerFunc, ok := registeredModelProviders[source.Type] + if !ok { + glog.Errorf("catalog type %s not registered", source.Type) + continue + } + + records, err := registerFunc(ctx, &source, configDir) + if err != nil { + glog.Errorf("error reading catalog type %s with id %s: %v", source.Type, source.Id, err) + continue + } + + wg.Add(1) + go func() { + defer wg.Done() + for r := range records { + // Set source_id on every returned model. + l.setModelSourceID(r.Model, source.Id) + + ch <- r + } + }() + } + + go func() { + defer close(ch) + wg.Wait() + }() + + return ch +} + +func (l *Loader) setModelSourceID(model dbmodels.CatalogModel, sourceID string) { + if model == nil { + return + } + + // Add a source_id property to the model's properties list.. the hard + // way, because we use pointers to slices for some reason. + + props := model.GetProperties() + if props == nil { + if modelImpl, ok := model.(*dbmodels.CatalogModelImpl); ok { + newProps := make([]mrmodels.Properties, 0, 1) + modelImpl.Properties = &newProps + props = &newProps + } else { + // Can't do anything with this. + return + } + } + + for i := range *props { + if (*props)[i].Name == "source_id" { + // Already has a source_id, just update it + (*props)[i].StringValue = &sourceID + return + } + } + + *props = append(*props, mrmodels.NewStringProperty("source_id", sourceID, false)) +} diff --git a/catalog/internal/catalog/model_filter.go b/catalog/internal/catalog/model_filter.go new file mode 100644 index 0000000000..ba485e7474 --- /dev/null +++ b/catalog/internal/catalog/model_filter.go @@ -0,0 +1,180 @@ +package catalog + +import ( + "fmt" + "regexp" + "strings" +) + +// ModelFilter encapsulates include/exclude pattern matching for model names. +type ModelFilter struct { + included []*compiledPattern + excluded []*compiledPattern +} + +type compiledPattern struct { + raw string + re *regexp.Regexp +} + +func newCompiledPattern(field string, idx int, raw string) (*compiledPattern, error) { + value := strings.TrimSpace(raw) + if value == "" { + return nil, fmt.Errorf("%s[%d]: pattern cannot be empty", field, idx) + } + + // Convert a simple glob (only supporting '*') into a regexp. + var b strings.Builder + b.WriteString("(?i)^") + for _, r := range value { + if r == '*' { + b.WriteString(".*") + continue + } + b.WriteString(regexp.QuoteMeta(string(r))) + } + b.WriteString("$") + + re, err := regexp.Compile(b.String()) + if err != nil { + return nil, fmt.Errorf("%s[%d]: invalid pattern %q: %w", field, idx, value, err) + } + + return &compiledPattern{ + raw: value, + re: re, + }, nil +} + +func compilePatterns(field string, patterns []string) ([]*compiledPattern, error) { + if len(patterns) == 0 { + return nil, nil + } + + compiled := make([]*compiledPattern, 0, len(patterns)) + for i, pattern := range patterns { + cp, err := newCompiledPattern(field, i, pattern) + if err != nil { + return nil, err + } + compiled = append(compiled, cp) + } + return compiled, nil +} + +// ValidateSourceFilters validates that the includedModels and excludedModels patterns +// are valid (non-empty, compilable, non-conflicting). This is useful for early validation +// at configuration load time without constructing the full ModelFilter. +func ValidateSourceFilters(included, excluded []string) error { + if err := detectConflictingPatterns(included, excluded); err != nil { + return err + } + + if _, err := compilePatterns("includedModels", included); err != nil { + return err + } + + if _, err := compilePatterns("excludedModels", excluded); err != nil { + return err + } + + return nil +} + +// NewModelFilter builds a ModelFilter from the provided include/exclude pattern lists. +func NewModelFilter(included, excluded []string) (*ModelFilter, error) { + if err := ValidateSourceFilters(included, excluded); err != nil { + return nil, err + } + + inc, err := compilePatterns("includedModels", included) + if err != nil { + return nil, err + } + + exc, err := compilePatterns("excludedModels", excluded) + if err != nil { + return nil, err + } + + if len(inc) == 0 && len(exc) == 0 { + return nil, nil + } + + return &ModelFilter{ + included: inc, + excluded: exc, + }, nil +} + +func detectConflictingPatterns(included, excluded []string) error { + if len(included) == 0 || len(excluded) == 0 { + return nil + } + + includedIdx := make(map[string]int, len(included)) + for i, pattern := range included { + value := strings.TrimSpace(pattern) + includedIdx[value] = i + } + + for j, pattern := range excluded { + value := strings.TrimSpace(pattern) + if i, exists := includedIdx[value]; exists { + return fmt.Errorf("pattern %q is defined in both includedModels[%d] and excludedModels[%d]", value, i, j) + } + } + return nil +} + +// Allows returns true if the provided model name passes the include/exclude rules. +func (f *ModelFilter) Allows(name string) bool { + if f == nil { + return true + } + + if len(f.included) > 0 { + matched := false + for _, pattern := range f.included { + if pattern.re.MatchString(name) { + matched = true + break + } + } + if !matched { + return false + } + } + + for _, pattern := range f.excluded { + if pattern.re.MatchString(name) { + return false + } + } + + return true +} + +// NewModelFilterFromSource composes a ModelFilter using the source-level configuration and any legacy additions. +func NewModelFilterFromSource(source *Source, extraIncluded, extraExcluded []string) (*ModelFilter, error) { + if source == nil { + return nil, fmt.Errorf("source cannot be nil when building filters") + } + + included := append([]string{}, source.IncludedModels...) + if len(extraIncluded) > 0 { + included = append(included, extraIncluded...) + } + + excluded := append([]string{}, source.ExcludedModels...) + if len(extraExcluded) > 0 { + excluded = append(excluded, extraExcluded...) + } + + filter, err := NewModelFilter(included, excluded) + if err != nil { + return nil, fmt.Errorf("invalid include/exclude configuration for source %s: %w", source.Id, err) + } + + return filter, nil +} diff --git a/catalog/internal/catalog/model_filter_test.go b/catalog/internal/catalog/model_filter_test.go new file mode 100644 index 0000000000..d3d32e9205 --- /dev/null +++ b/catalog/internal/catalog/model_filter_test.go @@ -0,0 +1,109 @@ +package catalog + +import ( + "testing" + + apimodels "github.com/kubeflow/model-registry/catalog/pkg/openapi" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestModelFilterAllows(t *testing.T) { + filter, err := NewModelFilter([]string{"Granite/*"}, []string{"Granite/beta-*"}) + require.NoError(t, err) + + assert.True(t, filter.Allows("Granite/3-1-instruct")) + assert.False(t, filter.Allows("Granite/beta-release")) + assert.False(t, filter.Allows("Other/model")) + + // Test case-insensitive matching + assert.True(t, filter.Allows("granite/3-1-instruct")) + assert.True(t, filter.Allows("GRANITE/3-1-instruct")) + assert.False(t, filter.Allows("granite/beta-release")) + + allowAll, err := NewModelFilter([]string{"*"}, nil) + require.NoError(t, err) + assert.True(t, allowAll.Allows("anything/goes")) +} + +func TestModelFilterConflictsAndValidation(t *testing.T) { + _, err := NewModelFilter([]string{"Granite/*"}, []string{"Granite/*"}) + require.Error(t, err) + assert.Contains(t, err.Error(), "pattern \"Granite/*\"") + + _, err = NewModelFilter([]string{""}, nil) + require.Error(t, err) + assert.Contains(t, err.Error(), "pattern cannot be empty") +} + +func TestNewModelFilterFromSourceMergesLegacy(t *testing.T) { + source := &Source{ + CatalogSource: apimodels.CatalogSource{ + Id: "test", + Name: "Test source", + Labels: []string{}, + IncludedModels: []string{"Granite/*"}, + }, + } + + filter, err := NewModelFilterFromSource(source, nil, []string{"Legacy/*"}) + require.NoError(t, err) + + assert.True(t, filter.Allows("Granite/model")) + assert.False(t, filter.Allows("Legacy/model")) +} + +func TestModelFilterWithWildcardInMiddle(t *testing.T) { + // Test that wildcards match across the entire name + filter, err := NewModelFilter(nil, []string{"*deprecated*", "*old*"}) + require.NoError(t, err) + + assert.True(t, filter.Allows("Granite/empty-stable")) + assert.False(t, filter.Allows("Mistral/empty-deprecated")) + assert.False(t, filter.Allows("DeepSeek/empty-old-v1")) + assert.False(t, filter.Allows("Foo/old")) + assert.False(t, filter.Allows("Bar/deprecated")) + + // Test that */pattern* requires the pattern immediately after / + filter2, err := NewModelFilter(nil, []string{"*/deprecated", "*/old*"}) + require.NoError(t, err) + + assert.True(t, filter2.Allows("Mistral/empty-deprecated")) // doesn't match */deprecated (no immediate match after /) + assert.False(t, filter2.Allows("Foo/deprecated")) // matches */deprecated + assert.False(t, filter2.Allows("Bar/old-model")) // matches */old* +} + +func TestValidateSourceFilters(t *testing.T) { + t.Run("no filters", func(t *testing.T) { + err := ValidateSourceFilters(nil, nil) + assert.NoError(t, err) + }) + + t.Run("valid patterns", func(t *testing.T) { + err := ValidateSourceFilters([]string{"Granite/*", "Meta/*"}, []string{"*-beta"}) + assert.NoError(t, err) + }) + + t.Run("conflicting patterns", func(t *testing.T) { + err := ValidateSourceFilters([]string{"Granite/*"}, []string{"Granite/*"}) + require.Error(t, err) + assert.Contains(t, err.Error(), "Granite/*") + }) + + t.Run("empty pattern in includedModels", func(t *testing.T) { + err := ValidateSourceFilters([]string{"Granite/*", ""}, nil) + require.Error(t, err) + assert.Contains(t, err.Error(), "pattern cannot be empty") + }) + + t.Run("whitespace-only pattern", func(t *testing.T) { + err := ValidateSourceFilters([]string{" "}, nil) + require.Error(t, err) + assert.Contains(t, err.Error(), "pattern cannot be empty") + }) + + t.Run("valid glob patterns", func(t *testing.T) { + err := ValidateSourceFilters([]string{"valid/*"}, nil) + assert.NoError(t, err) // Our conversion always produces valid regex + }) +} diff --git a/catalog/internal/catalog/monitor.go b/catalog/internal/catalog/monitor.go index 8159e5d430..9a30dc7c6d 100644 --- a/catalog/internal/catalog/monitor.go +++ b/catalog/internal/catalog/monitor.go @@ -1,13 +1,16 @@ package catalog import ( + "context" "fmt" "hash/crc32" "io" "os" "path/filepath" + "slices" "sync" "sync/atomic" + "time" "github.com/fsnotify/fsnotify" "github.com/golang/glog" @@ -37,6 +40,11 @@ type monitor struct { recordsMu sync.RWMutex records map[string]map[string]*monitorRecord + + // How long to wait after receiving an event before processing it (this + // exists to avoid processing partially written files). Defaults to 1 + // second. + Pause time.Duration } var _monitor *monitor @@ -69,6 +77,7 @@ func newMonitor() (*monitor, error) { m := &monitor{ watcher: watcher, records: map[string]map[string]*monitorRecord{}, + Pause: time.Second, } go m.monitor() @@ -115,8 +124,8 @@ func (m *monitor) Close() { // change. The file does not need to exist before calling this method, however // the provided path should only be a file or a symlink (not a directory, // device, etc.). The returned channel will be closed when the monitor is -// closed. -func (m *monitor) Path(p string) (<-chan struct{}, error) { +// closed or when the context is canceled. +func (m *monitor) Path(ctx context.Context, p string) (<-chan struct{}, error) { absPath, err := filepath.Abs(p) if err != nil { return nil, fmt.Errorf("abs: %w", err) @@ -149,6 +158,24 @@ func (m *monitor) Path(p string) (<-chan struct{}, error) { } m.records[dir][base].updateHash(filepath.Join(dir, base)) + go func() { + // Wait for the context to close, then clean up. + <-ctx.Done() + + m.recordsMu.Lock() + defer m.recordsMu.Unlock() + + if m.records == nil { + // Closed + return + } + + if n := slices.Index(m.records[dir][base].channels, ch); n >= 0 { + m.records[dir][base].channels = slices.Delete(m.records[dir][base].channels, n, n+1) + } + close(ch) + }() + return ch, nil } @@ -180,6 +207,9 @@ func (m *monitor) monitor() { continue } + // Pause briefly to avoid processing partially written files. + time.Sleep(m.Pause) + func() { m.recordsMu.RLock() defer m.recordsMu.RUnlock() @@ -217,22 +247,26 @@ type monitorRecord struct { // updateHash recalculates the hash and returns true if it has changed. func (mr *monitorRecord) updateHash(path string) bool { - newHash := mr.calculateHash(path) + newHash, err := mr.calculateHash(path) + if err != nil { + // If we can't read the file (e.g., broken symlink), don't trigger an event + return false + } oldHash := atomic.SwapUint32(&mr.hash, newHash) return oldHash != newHash } -func (monitorRecord) calculateHash(path string) uint32 { +func (monitorRecord) calculateHash(path string) (uint32, error) { fh, err := os.Open(path) if err != nil { - return 0 + return 0, err } defer fh.Close() h := crc32.NewIEEE() _, err = io.Copy(h, fh) if err != nil { - return 0 + return 0, err } - return h.Sum32() + return h.Sum32(), nil } diff --git a/catalog/internal/catalog/monitor_test.go b/catalog/internal/catalog/monitor_test.go index 6dc2117631..f26189d035 100644 --- a/catalog/internal/catalog/monitor_test.go +++ b/catalog/internal/catalog/monitor_test.go @@ -18,6 +18,7 @@ func TestMonitor(t *testing.T) { if !assert.NoError(err) { return } + mon.Pause = 0 tmpDir := t.TempDir() fileA := filepath.Join(tmpDir, "a") @@ -31,8 +32,8 @@ func TestMonitor(t *testing.T) { return watchMonitor(ch) } - a := _watchMonitor(mon.Path(fileA)) - b := _watchMonitor(mon.Path(fileB)) + a := _watchMonitor(mon.Path(t.Context(), fileA)) + b := _watchMonitor(mon.Path(t.Context(), fileB)) updateFile(t, fileA) a.AssertCount(t, 1) @@ -78,6 +79,7 @@ func TestMonitorSymlinks(t *testing.T) { if !assert.NoError(err) { return } + mon.Pause = 0 defer mon.Close() // Watch the files on the published path. @@ -88,8 +90,8 @@ func TestMonitorSymlinks(t *testing.T) { return watchMonitor(ch) } - a := _watchMonitor(mon.Path(filepath.Join(tmpDir, "a"))) - b := _watchMonitor(mon.Path(filepath.Join(tmpDir, "b"))) + a := _watchMonitor(mon.Path(t.Context(), filepath.Join(tmpDir, "a"))) + b := _watchMonitor(mon.Path(t.Context(), filepath.Join(tmpDir, "b"))) // Set up a directory structure with symlinks like k8s does for mounted // configmaps. diff --git a/catalog/internal/catalog/performance_metrics.go b/catalog/internal/catalog/performance_metrics.go new file mode 100644 index 0000000000..988af7fc45 --- /dev/null +++ b/catalog/internal/catalog/performance_metrics.go @@ -0,0 +1,631 @@ +package catalog + +import ( + "bufio" + "bytes" + "context" + "encoding/json" + "fmt" + "os" + "path/filepath" + "strings" + "time" + + "github.com/golang/glog" + dbmodels "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/service" + "github.com/kubeflow/model-registry/internal/apiutils" + "github.com/kubeflow/model-registry/internal/db/models" +) + +// metadataJSON represents the minimal structure needed from metadata.json files +// Only the ID field is needed to look up existing models +type metadataJSON struct { + ID string `json:"id"` // Maps to model name for lookup + OverallAccuracy *float64 `json:"overall_accuracy"` // Overall accuracy score for the model +} + +// parseMetadataJSON parses JSON data into metadataJSON struct, extracting only the ID field +func parseMetadataJSON(data []byte) (metadataJSON, error) { + var metadata metadataJSON + if err := json.Unmarshal(data, &metadata); err != nil { + return metadataJSON{}, fmt.Errorf("failed to unmarshal JSON: %v", err) + } + + if metadata.ID == "" { + return metadataJSON{}, fmt.Errorf("missing required 'id' field in metadata") + } + + return metadata, nil +} + +// evaluationRecord represents a single evaluation result from evaluations.ndjson +// Only minimal fields needed for association are explicitly defined +// evaluationRecords will be merged into a single accuracy-metrics artifact +type evaluationRecord struct { + // Core fields needed to associate evaluation with model + ModelID string `json:"model_id"` + Benchmark string `json:"benchmark"` + + // CustomProperties captures all other fields dynamically + CustomProperties map[string]interface{} `json:"-"` +} + +// UnmarshalJSON implements custom JSON unmarshaling to capture all undefined fields as CustomProperties +func (er *evaluationRecord) UnmarshalJSON(data []byte) error { + // First unmarshal into a generic map to get all fields + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return err + } + + // Extract the core fields + if modelID, ok := raw["model_id"].(string); ok { + er.ModelID = modelID + } + if benchmark, ok := raw["benchmark"].(string); ok { + er.Benchmark = benchmark + } + + // Initialize CustomProperties if nil + if er.CustomProperties == nil { + er.CustomProperties = make(map[string]interface{}) + } + + // Copy all fields to CustomProperties, including the core ones + for key, value := range raw { + er.CustomProperties[key] = value + } + + return nil +} + +// performanceRecord represents a single performance result from performance.ndjson +// Only minimal fields needed for association are explicitly defined +type performanceRecord struct { + // Core fields needed to associate performance data with model + ID string `json:"id"` + ModelID string `json:"model_id"` + + // CustomProperties captures remaining fields dynamically + CustomProperties map[string]interface{} `json:"-"` +} + +// UnmarshalJSON implements custom JSON unmarshaling to capture all undefined fields as CustomProperties +func (pr *performanceRecord) UnmarshalJSON(data []byte) error { + // First unmarshal into a generic map to get all fields + var raw map[string]interface{} + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.UseNumber() + if err := decoder.Decode(&raw); err != nil { + return err + } + + // Extract the core fields + if id, ok := raw["id"].(string); ok { + pr.ID = id + } + if modelID, ok := raw["model_id"].(string); ok { + pr.ModelID = modelID + } + + // Initialize CustomProperties if nil + if pr.CustomProperties == nil { + pr.CustomProperties = make(map[string]interface{}) + } + + // Copy all fields to CustomProperties, including the core ones + for key, value := range raw { + pr.CustomProperties[key] = value + } + + return nil +} + +type PerformanceMetricsLoader struct { + path []string + modelRepo dbmodels.CatalogModelRepository + metricsArtifactRepo dbmodels.CatalogMetricsArtifactRepository + modelTypeID int32 + metricsArtifactTypeID int32 + // Cache of model ID -> directory path mapping to avoid repeated directory scans + modelDirCache map[string]string +} + +func NewPerformanceMetricsLoader(path []string, modelRepo dbmodels.CatalogModelRepository, metricsArtifactRepo dbmodels.CatalogMetricsArtifactRepository, typeMap map[string]int32) (*PerformanceMetricsLoader, error) { + if len(path) == 0 { + glog.Info("No performance metrics path provided, skipping performance metrics loading") + return nil, nil + } + + // Check if path exists + for _, p := range path { + if _, err := os.Stat(p); os.IsNotExist(err) { + glog.Warningf("Performance metrics path %s does not exist, skipping performance metrics loading", p) + return nil, nil + } + } + + glog.Infof("Loading performance metrics data from %s", path) + + // Get the TypeID for CatalogModel from the type map + modelTypeID, exists := typeMap[service.CatalogModelTypeName] + if !exists { + return nil, fmt.Errorf("CatalogModel type not found in type map") + } + glog.V(2).Infof("Using catalog model type ID: %d", modelTypeID) + + // Get the TypeID for CatalogMetricsArtifact from the type map + metricsArtifactTypeID, exists := typeMap[service.CatalogMetricsArtifactTypeName] + if !exists { + return nil, fmt.Errorf("CatalogMetricsArtifact type not found in type map") + } + glog.V(2).Infof("Using metrics artifact type ID: %d", metricsArtifactTypeID) + + loader := &PerformanceMetricsLoader{ + path: path, + modelRepo: modelRepo, + metricsArtifactRepo: metricsArtifactRepo, + modelTypeID: modelTypeID, + metricsArtifactTypeID: metricsArtifactTypeID, + modelDirCache: make(map[string]string), + } + + // Build the model directory cache once during initialization + if err := loader.buildModelDirCache(); err != nil { + return nil, fmt.Errorf("failed to build model directory cache: %v", err) + } + + return loader, nil +} + +// buildModelDirCache scans directories once and builds a cache of model ID -> directory path +func (pml *PerformanceMetricsLoader) buildModelDirCache() error { + modelCount := 0 + for _, rootPath := range pml.path { + err := filepath.Walk(rootPath, func(dirPath string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + // Skip if not a directory + if !info.IsDir() { + return nil + } + + // Check if this directory contains metadata.json + metadataPath := filepath.Join(dirPath, "metadata.json") + if _, err := os.Stat(metadataPath); os.IsNotExist(err) { + return nil // Skip directories without metadata.json + } + + // Read and parse metadata.json to extract the model ID + metadataData, err := os.ReadFile(metadataPath) + if err != nil { + glog.Warningf("Failed to read metadata file %s: %v", metadataPath, err) + return nil // Continue with other directories + } + + // Parse metadata to extract the model ID for lookup + metadata, err := parseMetadataJSON(metadataData) + if err != nil { + glog.Warningf("Failed to parse metadata file %s: %v", metadataPath, err) + return nil // Continue with other directories + } + + // Add to cache + pml.modelDirCache[metadata.ID] = dirPath + modelCount++ + glog.V(3).Infof("Cached model directory: %s -> %s", metadata.ID, dirPath) + + return nil + }) + + if err != nil { + return fmt.Errorf("failed to walk directory %s: %v", rootPath, err) + } + } + + glog.Infof("Built model directory cache (%d models indexed)", modelCount) + + return nil +} + +func (pml *PerformanceMetricsLoader) Load(ctx context.Context, record ModelProviderRecord) error { + if pml == nil { + return nil + } + + attrs := record.Model.GetAttributes() + if attrs == nil || attrs.Name == nil { + return nil + } + + modelName := *attrs.Name + glog.Infof("Loading performance metrics for %s", modelName) + + // Look up the model directory in the cache + dirPath, found := pml.modelDirCache[modelName] + if !found { + glog.V(2).Infof("No performance metrics directory found for model %s", modelName) + return nil + } + + glog.V(2).Infof("Found cached directory for model %s: %s", modelName, dirPath) + + // Process this specific model directory using the cached path + artifactsCreated, err := processModelDirectory(dirPath, pml.modelRepo, pml.metricsArtifactRepo, pml.modelTypeID, pml.metricsArtifactTypeID) + if err != nil { + return fmt.Errorf("failed to process metrics for model %s: %v", modelName, err) + } + + if artifactsCreated > 0 { + glog.Infof("Loaded %d performance metrics artifacts for model %s", artifactsCreated, modelName) + } + + return nil +} + +// processModelDirectory processes a single model directory containing metadata.json and metric files +// Only processes metrics for models that already exist in the database +// Returns the number of artifacts created and any error encountered +func processModelDirectory(dirPath string, modelRepo dbmodels.CatalogModelRepository, metricsArtifactRepo dbmodels.CatalogMetricsArtifactRepository, modelTypeID int32, metricsArtifactTypeID int32) (int, error) { + // Read and parse metadata.json to extract the model ID + metadataPath := filepath.Join(dirPath, "metadata.json") + metadataData, err := os.ReadFile(metadataPath) + if err != nil { + return 0, fmt.Errorf("failed to read metadata file %s: %v", metadataPath, err) + } + + // Parse metadata to extract the model ID for lookup + metadata, err := parseMetadataJSON(metadataData) + if err != nil { + return 0, fmt.Errorf("failed to parse metadata file %s: %v", metadataPath, err) + } + + // Check if the model already exists - only process metrics for existing models + existingModel, err := modelRepo.GetByName(metadata.ID) + if err != nil { + return 0, fmt.Errorf("failed to check for existing model: %v", err) + } + + // Skip processing if model doesn't exist + if existingModel == nil { + glog.V(2).Infof("Model %s does not exist in database, skipping metrics processing", metadata.ID) + return 0, nil + } + + modelID := *existingModel.GetID() + glog.V(2).Infof("Found existing model %s with ID %d, processing metrics", metadata.ID, modelID) + + // Use batch processing for all artifacts + return processModelArtifactsBatch(dirPath, modelID, metadata.ID, metadata.OverallAccuracy, metricsArtifactRepo, metricsArtifactTypeID) +} + +// processModelArtifactsBatch processes all metric artifacts for a model in batch +// This reduces DB overhead by parsing, checking, and inserting in optimized phases +func processModelArtifactsBatch(dirPath string, modelID int32, modelName string, overallAccuracy *float64, metricsArtifactRepo dbmodels.CatalogMetricsArtifactRepository, metricsArtifactTypeID int32) (int, error) { + // Parse all metrics files + var evaluationRecords []evaluationRecord + var performanceRecords []performanceRecord + + // Parse evaluation metrics if file exists + evaluationsPath := filepath.Join(dirPath, "evaluations.ndjson") + if _, err := os.Stat(evaluationsPath); err == nil { + records, err := parseEvaluationFile(evaluationsPath) + if err != nil { + glog.Errorf("Failed to parse evaluations file for %s: %v", modelName, err) + } else { + evaluationRecords = records + } + } + + // Parse performance metrics if file exists + performancePath := filepath.Join(dirPath, "performance.ndjson") + if _, err := os.Stat(performancePath); err == nil { + records, err := parsePerformanceFile(performancePath) + if err != nil { + glog.Errorf("Failed to parse performance file for %s: %v", modelName, err) + } else { + performanceRecords = records + } + } + + totalRecords := len(evaluationRecords) + len(performanceRecords) + if totalRecords == 0 { + return 0, nil + } + + // Bulk load all existing artifacts for this model and check in-memory + // Single DB query to get ALL existing artifacts for this model + existingArtifactsList, err := metricsArtifactRepo.List(dbmodels.CatalogMetricsArtifactListOptions{ + ParentResourceID: &modelID, + }) + if err != nil { + return 0, fmt.Errorf("failed to load existing artifacts for model: %v", err) + } + + // Build in-memory map for O(1) lookups: external_id -> artifact + existingArtifactsMap := make(map[string]bool, existingArtifactsList.Size) + for _, artifact := range existingArtifactsList.Items { + if artifact.GetAttributes() != nil && artifact.GetAttributes().ExternalID != nil { + existingArtifactsMap[*artifact.GetAttributes().ExternalID] = true + } + } + + // Check which artifacts need to be created using the in-memory map + artifactsToInsert := make([]*dbmodels.CatalogMetricsArtifactImpl, 0, totalRecords) + + // Check evaluation artifacts + if len(evaluationRecords) > 0 { + externalID := fmt.Sprintf("accuracy-metrics-model-%d", modelID) + if !existingArtifactsMap[externalID] { + artifact := createAccuracyMetricsArtifact(evaluationRecords, modelID, metricsArtifactTypeID, overallAccuracy, nil, nil) + artifactsToInsert = append(artifactsToInsert, artifact) + } else { + glog.V(2).Infof("Accuracy metrics artifact already exists, skipping") + } + } + + // Check performance artifacts + for _, perfRecord := range performanceRecords { + if !existingArtifactsMap[perfRecord.ID] { + artifact := createPerformanceArtifact(perfRecord, modelID, metricsArtifactTypeID, nil, nil) + artifactsToInsert = append(artifactsToInsert, artifact) + } else { + glog.V(2).Infof("Performance artifact %s already exists, skipping", perfRecord.ID) + } + } + + if len(artifactsToInsert) == 0 { + glog.V(2).Infof("All artifacts already exist for model %s, nothing to insert", modelName) + return 0, nil + } + + // Batch insert all new artifacts using BatchSave + // Convert to slice of interface type for BatchSave + artifactsToSave := make([]dbmodels.CatalogMetricsArtifact, len(artifactsToInsert)) + for i, artifact := range artifactsToInsert { + artifactsToSave[i] = artifact + } + + savedArtifacts, err := metricsArtifactRepo.BatchSave(artifactsToSave, &modelID) + if err != nil { + return 0, fmt.Errorf("failed to batch save artifacts: %v", err) + } + + return len(savedArtifacts), nil +} + +// parseEvaluationFile reads and parses an evaluations.ndjson file +func parseEvaluationFile(filePath string) ([]evaluationRecord, error) { + file, err := os.Open(filePath) + if err != nil { + return nil, fmt.Errorf("failed to open evaluation file %s: %v", filePath, err) + } + defer file.Close() + + scanner := bufio.NewScanner(file) + evaluationRecords := []evaluationRecord{} + + for scanner.Scan() { + line := scanner.Text() + if strings.TrimSpace(line) == "" { + continue + } + + var evalRecord evaluationRecord + if err := json.Unmarshal([]byte(line), &evalRecord); err != nil { + glog.Errorf("Failed to parse evaluation record: %v", err) + continue + } + + evaluationRecords = append(evaluationRecords, evalRecord) + } + + if err := scanner.Err(); err != nil { + return nil, fmt.Errorf("error reading evaluation file: %v", err) + } + + return evaluationRecords, nil +} + +// parsePerformanceFile reads and parses a performance.ndjson file +func parsePerformanceFile(filePath string) ([]performanceRecord, error) { + file, err := os.Open(filePath) + if err != nil { + return nil, fmt.Errorf("failed to open performance file %s: %v", filePath, err) + } + defer file.Close() + + scanner := bufio.NewScanner(file) + performanceRecords := []performanceRecord{} + + for scanner.Scan() { + line := scanner.Text() + if strings.TrimSpace(line) == "" { + continue + } + + var perfRecord performanceRecord + if err := json.Unmarshal([]byte(line), &perfRecord); err != nil { + glog.Errorf("Failed to parse performance record: %v", err) + continue + } + + performanceRecords = append(performanceRecords, perfRecord) + } + + if err := scanner.Err(); err != nil { + return nil, fmt.Errorf("error reading performance file: %v", err) + } + + return performanceRecords, nil +} + +// createAccuracyMetricsArtifact creates a single metrics artifact from all evaluation records +func createAccuracyMetricsArtifact(evalRecords []evaluationRecord, modelID int32, typeID int32, overallAccuracy *float64, existingID *int32, existingCreateTime *int64) *dbmodels.CatalogMetricsArtifactImpl { + artifactName := fmt.Sprintf("accuracy-metrics-model-%d", modelID) + externalID := fmt.Sprintf("accuracy-metrics-model-%d", modelID) + + // Use existing create time if provided, otherwise find from evaluation records + createTime := existingCreateTime + var updateTime *int64 + + for _, evalRecord := range evalRecords { + if existingCreateTime == nil { + if createdAtFloat, ok := evalRecord.CustomProperties["created_at"].(float64); ok { + createdAt := int64(createdAtFloat) + if createTime == nil || createdAt < *createTime { + createTime = &createdAt + } + } + } + if updatedAtFloat, ok := evalRecord.CustomProperties["updated_at"].(float64); ok { + updatedAt := int64(updatedAtFloat) + if updateTime == nil || updatedAt > *updateTime { + updateTime = &updatedAt + } + } + delete(evalRecord.CustomProperties, "updated_at") + delete(evalRecord.CustomProperties, "created_at") + } + + // Properties can be empty or contain general metadata + properties := []models.Properties{} + + // Create custom properties - simple mapping of benchmark_name to score_value + customProperties := []models.Properties{} + + for _, evalRecord := range evalRecords { + // Add the benchmark score as a named property (e.g., "aime24": 63.3333) + if score, ok := evalRecord.CustomProperties["score"].(float64); ok { + customProperties = append(customProperties, models.Properties{ + Name: evalRecord.Benchmark, + DoubleValue: &score, + }) + } + } + + // Add overall_average custom property from metadata.json overall_accuracy field + if overallAccuracy != nil { + customProperties = append(customProperties, models.Properties{ + Name: "overall_average", + DoubleValue: overallAccuracy, + }) + } + + // Create the metrics artifact with metricsType set to accuracy-metrics + metricsArtifact := &dbmodels.CatalogMetricsArtifactImpl{ + ID: existingID, // Use existing ID if updating + TypeID: &typeID, + Attributes: &dbmodels.CatalogMetricsArtifactAttributes{ + Name: &artifactName, + ExternalID: &externalID, + CreateTimeSinceEpoch: createTime, + LastUpdateTimeSinceEpoch: updateTime, + MetricsType: dbmodels.MetricsTypeAccuracy, + }, + Properties: &properties, + CustomProperties: &customProperties, + } + + return metricsArtifact +} + +// createPerformanceArtifact creates a metrics artifact from performance record +func createPerformanceArtifact(perfRecord performanceRecord, modelID int32, typeID int32, existingID *int32, existingCreateTime *int64) *dbmodels.CatalogMetricsArtifactImpl { + // Create artifact name (must be unique per artifact) + artifactName := fmt.Sprintf("performance-%s", perfRecord.ID) + + // Use existing create time if provided, otherwise extract from custom properties + createTime := existingCreateTime + var updateTime *int64 + + if existingCreateTime == nil { + if createdAtNum, ok := perfRecord.CustomProperties["created_at"].(json.Number); ok { + createdAt, err := createdAtNum.Int64() + if err == nil { + createTime = &createdAt + } else { + glog.Warningf("%s: invalid created_at value: %v", artifactName, err) + } + } + } + if createTime == nil { + createTime = apiutils.Of(time.Now().UnixMilli()) + } + + if updatedAtNum, ok := perfRecord.CustomProperties["updated_at"].(json.Number); ok { + updatedAt, err := updatedAtNum.Int64() + if err == nil { + updateTime = &updatedAt + } else { + glog.Warningf("%s: invalid updated_at value: %v", artifactName, err) + } + } + if updateTime == nil { + updateTime = apiutils.Of(time.Now().UnixMilli()) + } + delete(perfRecord.CustomProperties, "updated_at") + delete(perfRecord.CustomProperties, "created_at") + + // Properties can be empty - all data goes in custom properties + properties := []models.Properties{} + + // Create custom properties - simple mapping of all performance data + customProperties := []models.Properties{} + + // Add all fields from the performance record as custom properties + for key, value := range perfRecord.CustomProperties { + prop := models.Properties{Name: key} + + // Handle different value types + switch v := value.(type) { + case string: + prop.StringValue = &v + case float64: + prop.DoubleValue = &v + case int64: + prop.SetInt64Value(v) + case int: + intVal := int32(v) + prop.IntValue = &intVal + case bool: + prop.BoolValue = &v + case json.Number: + if n, err := v.Int64(); err == nil { + prop.SetInt64Value(n) + } else if f, err := v.Float64(); err == nil { + prop.DoubleValue = &f + } else { + // This shouldn't happen, but convert it to a string if it does. + strVal := v.String() + prop.StringValue = &strVal + } + default: + // Convert other types to string representation + strVal := fmt.Sprintf("%v", v) + prop.StringValue = &strVal + } + + customProperties = append(customProperties, prop) + } + + // Create the metrics artifact with metricsType set to performance-metrics + metricsArtifact := &dbmodels.CatalogMetricsArtifactImpl{ + ID: existingID, // Use existing ID if updating + TypeID: &typeID, + Attributes: &dbmodels.CatalogMetricsArtifactAttributes{ + Name: &artifactName, + ExternalID: &perfRecord.ID, + CreateTimeSinceEpoch: createTime, + LastUpdateTimeSinceEpoch: updateTime, + MetricsType: dbmodels.MetricsTypePerformance, + }, + Properties: &properties, + CustomProperties: &customProperties, + } + + return metricsArtifact +} diff --git a/catalog/internal/catalog/performance_metrics_test.go b/catalog/internal/catalog/performance_metrics_test.go new file mode 100644 index 0000000000..0bf8120786 --- /dev/null +++ b/catalog/internal/catalog/performance_metrics_test.go @@ -0,0 +1,867 @@ +package catalog + +import ( + "encoding/json" + "testing" +) + +func TestParseMetadataJSON(t *testing.T) { + tests := []struct { + name string + jsonData string + want metadataJSON + wantErr bool + }{ + { + name: "complete metadata with all core fields", + jsonData: `{ + "id": "test-model-123", + "description": "A test model for unit testing", + "readme": "# Test Model\nThis is a test model.", + "maturity": "stable", + "languages": ["python", "go"], + "tasks": ["classification", "regression"], + "provider_name": "test-provider", + "logo": "https://example.com/logo.png", + "license": "MIT", + "license_link": "https://opensource.org/licenses/MIT", + "library_name": "test-library", + "created_at": 1609459200, + "updated_at": 1609545600 + }`, + want: metadataJSON{ + ID: "test-model-123", + }, + wantErr: false, + }, + { + name: "minimal metadata with only required fields", + jsonData: `{ + "id": "minimal-model" + }`, + want: metadataJSON{ + ID: "minimal-model", + }, + wantErr: false, + }, + { + name: "metadata with custom properties", + jsonData: `{ + "id": "custom-model", + "description": "Model with custom properties", + "custom_field_string": "custom value", + "custom_field_number": 42, + "custom_field_float": 3.14, + "custom_field_bool": true, + "custom_field_array": ["item1", "item2"], + "custom_field_object": {"nested": "value"} + }`, + want: metadataJSON{ + ID: "custom-model", + }, + wantErr: false, + }, + { + name: "metadata with mixed core and custom fields", + jsonData: `{ + "id": "mixed-model", + "description": "Mixed fields model", + "languages": ["python"], + "custom_version": "1.0.0", + "custom_tags": ["ml", "ai"], + "custom_config": { + "batch_size": 32, + "learning_rate": 0.001 + } + }`, + want: metadataJSON{ + ID: "mixed-model", + }, + wantErr: false, + }, + { + name: "empty arrays and objects", + jsonData: `{ + "id": "empty-arrays-model", + "languages": [], + "tasks": [], + "custom_empty_array": [], + "custom_empty_object": {} + }`, + want: metadataJSON{ + ID: "empty-arrays-model", + }, + wantErr: false, + }, + { + name: "zero timestamps", + jsonData: `{ + "id": "zero-timestamps-model", + "created_at": 0, + "updated_at": 0 + }`, + want: metadataJSON{ + ID: "zero-timestamps-model", + }, + wantErr: false, + }, + { + name: "null values in custom properties", + jsonData: `{ + "id": "null-values-model", + "custom_null_field": null, + "custom_string": "not null" + }`, + want: metadataJSON{ + ID: "null-values-model", + }, + wantErr: false, + }, + { + name: "invalid JSON", + jsonData: `{"id": "invalid-json", "description":}`, + want: metadataJSON{}, + wantErr: true, + }, + { + name: "empty JSON object", + jsonData: `{}`, + want: metadataJSON{}, + wantErr: true, // Should error because ID is required + }, + { + name: "missing ID field", + jsonData: `{"description": "has description but no id"}`, + want: metadataJSON{}, + wantErr: true, // Should error because ID is required + }, + { + name: "JSON with type mismatches should fail", + jsonData: `{ + "id": 123, + "languages": "not-an-array", + "created_at": "not-a-number" + }`, + want: metadataJSON{}, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := parseMetadataJSON([]byte(tt.jsonData)) + + if (err != nil) != tt.wantErr { + t.Errorf("parseMetadataJSON() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if err != nil { + return // If we expected an error and got one, we're done + } + + // Compare all fields + if got.ID != tt.want.ID { + t.Errorf("parseMetadataJSON() ID = %v, want %v", got.ID, tt.want.ID) + } + }) + } +} + +func TestParseMetadataJSON_EdgeCases(t *testing.T) { + tests := []struct { + name string + jsonData string + wantErr bool + }{ + { + name: "null JSON", + jsonData: `null`, + wantErr: true, // Should error because ID will be empty + }, + { + name: "array instead of object", + jsonData: `["not", "an", "object"]`, + wantErr: true, + }, + { + name: "string instead of object", + jsonData: `"not an object"`, + wantErr: true, + }, + { + name: "number instead of object", + jsonData: `42`, + wantErr: true, + }, + { + name: "boolean instead of object", + jsonData: `true`, + wantErr: true, + }, + { + name: "empty string", + jsonData: ``, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := parseMetadataJSON([]byte(tt.jsonData)) + + if (err != nil) != tt.wantErr { + t.Errorf("parseMetadataJSON() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func TestParseMetadataJSON_OnlyIDMatters(t *testing.T) { + // Test that only the ID field is extracted, other fields are ignored + jsonData := `{ + "id": "test-id", + "custom_field": "ignored" + }` + + metadata, err := parseMetadataJSON([]byte(jsonData)) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + + // Verify that only the ID field is populated + if metadata.ID != "test-id" { + t.Errorf("ID = %v, want %v", metadata.ID, "test-id") + } +} + +func TestOverallAccuracyToOverallAverage(t *testing.T) { + t.Run("parse overall_accuracy from metadata", func(t *testing.T) { + tests := []struct { + name string + jsonData string + wantNil bool + wantValue float64 + }{ + { + name: "overall_accuracy present", + jsonData: `{"id": "model-1", "overall_accuracy": 85.5}`, + wantNil: false, + wantValue: 85.5, + }, + { + name: "overall_accuracy is zero", + jsonData: `{"id": "model-2", "overall_accuracy": 0}`, + wantNil: false, + wantValue: 0.0, + }, + { + name: "overall_accuracy is null", + jsonData: `{"id": "model-3", "overall_accuracy": null}`, + wantNil: true, + }, + { + name: "overall_accuracy missing", + jsonData: `{"id": "model-4"}`, + wantNil: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + metadata, err := parseMetadataJSON([]byte(tt.jsonData)) + if err != nil { + t.Fatalf("parseMetadataJSON() error = %v", err) + } + + if tt.wantNil { + if metadata.OverallAccuracy != nil { + t.Errorf("OverallAccuracy = %v, want nil", *metadata.OverallAccuracy) + } + } else { + if metadata.OverallAccuracy == nil { + t.Errorf("OverallAccuracy = nil, want %v", tt.wantValue) + } else if *metadata.OverallAccuracy != tt.wantValue { + t.Errorf("OverallAccuracy = %v, want %v", *metadata.OverallAccuracy, tt.wantValue) + } + } + }) + } + }) + + t.Run("artifact has overall_average when overall_accuracy provided", func(t *testing.T) { + overallAccuracy := 87.5 + evalRecords := []evaluationRecord{ + {Benchmark: "mmlu", CustomProperties: map[string]interface{}{"score": 90.0}}, + } + + artifact := createAccuracyMetricsArtifact(evalRecords, 1, 100, &overallAccuracy, nil, nil) + + found := false + for _, prop := range *artifact.CustomProperties { + if prop.Name == "overall_average" && prop.DoubleValue != nil { + if *prop.DoubleValue != overallAccuracy { + t.Errorf("overall_average = %v, want %v", *prop.DoubleValue, overallAccuracy) + } + found = true + break + } + } + if !found { + t.Error("overall_average custom property not found in artifact") + } + }) + + t.Run("artifact has no overall_average when overall_accuracy is nil", func(t *testing.T) { + evalRecords := []evaluationRecord{ + {Benchmark: "mmlu", CustomProperties: map[string]interface{}{"score": 90.0}}, + } + + artifact := createAccuracyMetricsArtifact(evalRecords, 1, 100, nil, nil, nil) + + for _, prop := range *artifact.CustomProperties { + if prop.Name == "overall_average" { + t.Error("overall_average should not exist when overall_accuracy is nil") + } + } + }) +} + +func TestEvaluationRecordUnmarshalJSON(t *testing.T) { + tests := []struct { + name string + jsonData string + wantModelID string + wantBenchmark string + wantCustomProps map[string]interface{} + wantErr bool + checkCustomProps bool + }{ + { + name: "complete evaluation record", + jsonData: `{ + "model_id": "test-model-123", + "benchmark": "aime24", + "score": 63.3333, + "created_at": 1609459200, + "updated_at": 1609545600 + }`, + wantModelID: "test-model-123", + wantBenchmark: "aime24", + wantCustomProps: map[string]interface{}{ + "model_id": "test-model-123", + "benchmark": "aime24", + "score": 63.3333, + "created_at": float64(1609459200), + "updated_at": float64(1609545600), + }, + wantErr: false, + checkCustomProps: true, + }, + { + name: "minimal evaluation record with only core fields", + jsonData: `{ + "model_id": "minimal-model", + "benchmark": "test-benchmark" + }`, + wantModelID: "minimal-model", + wantBenchmark: "test-benchmark", + wantCustomProps: map[string]interface{}{ + "model_id": "minimal-model", + "benchmark": "test-benchmark", + }, + wantErr: false, + checkCustomProps: true, + }, + { + name: "evaluation record with custom properties", + jsonData: `{ + "model_id": "custom-model", + "benchmark": "custom-bench", + "score": 95.5, + "custom_field_string": "custom value", + "custom_field_number": 42, + "custom_field_float": 3.14, + "custom_field_bool": true + }`, + wantModelID: "custom-model", + wantBenchmark: "custom-bench", + wantCustomProps: map[string]interface{}{ + "model_id": "custom-model", + "benchmark": "custom-bench", + "score": 95.5, + "custom_field_string": "custom value", + "custom_field_number": float64(42), + "custom_field_float": 3.14, + "custom_field_bool": true, + }, + wantErr: false, + checkCustomProps: true, + }, + { + name: "evaluation record with nested objects", + jsonData: `{ + "model_id": "nested-model", + "benchmark": "nested-bench", + "custom_object": { + "nested_key": "nested_value", + "nested_number": 123 + }, + "custom_array": ["item1", "item2", "item3"] + }`, + wantModelID: "nested-model", + wantBenchmark: "nested-bench", + wantErr: false, + checkCustomProps: false, // Don't check deep equality for complex nested structures + }, + { + name: "evaluation record with null values", + jsonData: `{ + "model_id": "null-model", + "benchmark": "null-bench", + "null_field": null, + "score": 50.0 + }`, + wantModelID: "null-model", + wantBenchmark: "null-bench", + wantCustomProps: map[string]interface{}{ + "model_id": "null-model", + "benchmark": "null-bench", + "null_field": nil, + "score": 50.0, + }, + wantErr: false, + checkCustomProps: true, + }, + { + name: "evaluation record missing core fields", + jsonData: `{ + "score": 75.5, + "created_at": 1609459200 + }`, + wantModelID: "", + wantBenchmark: "", + wantErr: false, + checkCustomProps: false, + }, + { + name: "evaluation record with wrong type for core fields", + jsonData: `{ + "model_id": 123, + "benchmark": 456, + "score": 85.0 + }`, + wantModelID: "", + wantBenchmark: "", + wantErr: false, + checkCustomProps: false, + }, + { + name: "empty JSON object", + jsonData: `{}`, + wantModelID: "", + wantBenchmark: "", + wantErr: false, + checkCustomProps: false, + }, + { + name: "invalid JSON", + jsonData: `{"model_id": "invalid", "benchmark":}`, + wantErr: true, + checkCustomProps: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var er evaluationRecord + err := er.UnmarshalJSON([]byte(tt.jsonData)) + + if (err != nil) != tt.wantErr { + t.Errorf("evaluationRecord.UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if err != nil { + return // If we expected an error and got one, we're done + } + + // Check core fields + if er.ModelID != tt.wantModelID { + t.Errorf("ModelID = %v, want %v", er.ModelID, tt.wantModelID) + } + if er.Benchmark != tt.wantBenchmark { + t.Errorf("Benchmark = %v, want %v", er.Benchmark, tt.wantBenchmark) + } + + // Check CustomProperties + if er.CustomProperties == nil { + t.Error("CustomProperties should not be nil") + } + + // Optionally check custom properties in detail + if tt.checkCustomProps { + if len(er.CustomProperties) != len(tt.wantCustomProps) { + t.Errorf("CustomProperties length = %v, want %v", len(er.CustomProperties), len(tt.wantCustomProps)) + } + for key, wantValue := range tt.wantCustomProps { + gotValue, exists := er.CustomProperties[key] + if !exists { + t.Errorf("CustomProperties missing key %v", key) + continue + } + if gotValue != wantValue { + t.Errorf("CustomProperties[%v] = %v (type %T), want %v (type %T)", + key, gotValue, gotValue, wantValue, wantValue) + } + } + } + }) + } +} + +func TestPerformanceRecordUnmarshalJSON(t *testing.T) { + tests := []struct { + name string + jsonData string + wantID string + wantModelID string + wantCustomProps map[string]interface{} + wantErr bool + checkCustomProps bool + }{ + { + name: "complete performance record", + jsonData: `{ + "id": "perf-123", + "model_id": "test-model-456", + "throughput": 1000.5, + "latency_p50": 10.5, + "latency_p95": 25.3, + "latency_p99": 50.1, + "created_at": 1609459200, + "updated_at": 1609545600 + }`, + wantID: "perf-123", + wantModelID: "test-model-456", + wantCustomProps: map[string]interface{}{ + "id": "perf-123", + "model_id": "test-model-456", + "throughput": 1000.5, + "latency_p50": 10.5, + "latency_p95": 25.3, + "latency_p99": 50.1, + "created_at": float64(1609459200), + "updated_at": float64(1609545600), + }, + wantErr: false, + checkCustomProps: true, + }, + { + name: "minimal performance record with only core fields", + jsonData: `{ + "id": "minimal-perf", + "model_id": "minimal-model" + }`, + wantID: "minimal-perf", + wantModelID: "minimal-model", + wantCustomProps: map[string]interface{}{ + "id": "minimal-perf", + "model_id": "minimal-model", + }, + wantErr: false, + checkCustomProps: true, + }, + { + name: "performance record with custom properties", + jsonData: `{ + "id": "custom-perf", + "model_id": "custom-model", + "throughput": 500.0, + "custom_field_string": "custom value", + "custom_field_number": 42, + "custom_field_float": 3.14, + "custom_field_bool": true + }`, + wantID: "custom-perf", + wantModelID: "custom-model", + wantCustomProps: map[string]interface{}{ + "id": "custom-perf", + "model_id": "custom-model", + "throughput": 500.0, + "custom_field_string": "custom value", + "custom_field_number": float64(42), + "custom_field_float": 3.14, + "custom_field_bool": true, + }, + wantErr: false, + checkCustomProps: true, + }, + { + name: "performance record with nested objects and arrays", + jsonData: `{ + "id": "nested-perf", + "model_id": "nested-model", + "custom_object": { + "nested_key": "nested_value", + "nested_number": 123 + }, + "custom_array": ["item1", "item2", "item3"] + }`, + wantID: "nested-perf", + wantModelID: "nested-model", + wantErr: false, + checkCustomProps: false, // Don't check deep equality for complex nested structures + }, + { + name: "performance record with null values", + jsonData: `{ + "id": "null-perf", + "model_id": "null-model", + "null_field": null, + "throughput": 250.0 + }`, + wantID: "null-perf", + wantModelID: "null-model", + wantCustomProps: map[string]interface{}{ + "id": "null-perf", + "model_id": "null-model", + "null_field": nil, + "throughput": 250.0, + }, + wantErr: false, + checkCustomProps: true, + }, + { + name: "performance record missing core fields", + jsonData: `{ + "throughput": 100.0, + "latency_p50": 5.0 + }`, + wantID: "", + wantModelID: "", + wantErr: false, + checkCustomProps: false, + }, + { + name: "performance record with wrong type for core fields", + jsonData: `{ + "id": 123, + "model_id": 456, + "throughput": 500.0 + }`, + wantID: "", + wantModelID: "", + wantErr: false, + checkCustomProps: false, + }, + { + name: "performance record with zero values", + jsonData: `{ + "id": "zero-perf", + "model_id": "zero-model", + "throughput": 0, + "latency_p50": 0.0, + "created_at": 0 + }`, + wantID: "zero-perf", + wantModelID: "zero-model", + wantCustomProps: map[string]interface{}{ + "id": "zero-perf", + "model_id": "zero-model", + "throughput": float64(0), + "latency_p50": 0.0, + "created_at": float64(0), + }, + wantErr: false, + checkCustomProps: true, + }, + { + name: "empty JSON object", + jsonData: `{}`, + wantID: "", + wantModelID: "", + wantErr: false, + checkCustomProps: false, + }, + { + name: "invalid JSON", + jsonData: `{"id": "invalid", "model_id":}`, + wantErr: true, + checkCustomProps: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var pr performanceRecord + err := pr.UnmarshalJSON([]byte(tt.jsonData)) + + if (err != nil) != tt.wantErr { + t.Errorf("performanceRecord.UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if err != nil { + return // If we expected an error and got one, we're done + } + + // Check core fields + if pr.ID != tt.wantID { + t.Errorf("ID = %v, want %v", pr.ID, tt.wantID) + } + if pr.ModelID != tt.wantModelID { + t.Errorf("ModelID = %v, want %v", pr.ModelID, tt.wantModelID) + } + + // Check CustomProperties + if pr.CustomProperties == nil { + t.Error("CustomProperties should not be nil") + } + + // Optionally check custom properties in detail + if tt.checkCustomProps { + if len(pr.CustomProperties) != len(tt.wantCustomProps) { + t.Errorf("CustomProperties length = %v, want %v", len(pr.CustomProperties), len(tt.wantCustomProps)) + } + for key, wantValue := range tt.wantCustomProps { + gotValue, exists := pr.CustomProperties[key] + if !exists { + t.Errorf("CustomProperties missing key %v", key) + continue + } + + // Translate json.Number values + if jsonNumber, ok := gotValue.(json.Number); ok { + var newValue any + switch wantValue.(type) { + case float64: + newValue, err = jsonNumber.Float64() + case int, int32, int64: + newValue, err = jsonNumber.Int64() + } + if err == nil { + gotValue = newValue + } + } + + if gotValue != wantValue { + t.Errorf("CustomProperties[%v] = %v (type %T), want %v (type %T)", + key, gotValue, gotValue, wantValue, wantValue) + } + } + } + }) + } +} + +func TestEvaluationRecordUnmarshalJSON_CoreFieldsInCustomProperties(t *testing.T) { + // Test that core fields are included in CustomProperties + jsonData := `{ + "model_id": "test-model", + "benchmark": "test-benchmark", + "score": 90.5 + }` + + var er evaluationRecord + err := er.UnmarshalJSON([]byte(jsonData)) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + + // Verify core fields are in CustomProperties + if er.CustomProperties["model_id"] != "test-model" { + t.Errorf("CustomProperties[model_id] = %v, want %v", er.CustomProperties["model_id"], "test-model") + } + if er.CustomProperties["benchmark"] != "test-benchmark" { + t.Errorf("CustomProperties[benchmark] = %v, want %v", er.CustomProperties["benchmark"], "test-benchmark") + } + if er.CustomProperties["score"] != 90.5 { + t.Errorf("CustomProperties[score] = %v, want %v", er.CustomProperties["score"], 90.5) + } +} + +func TestPerformanceRecordUnmarshalJSON_CoreFieldsInCustomProperties(t *testing.T) { + // Test that core fields are included in CustomProperties + jsonData := `{ + "id": "perf-id", + "model_id": "test-model", + "throughput": 1000.0 + }` + + var pr performanceRecord + err := pr.UnmarshalJSON([]byte(jsonData)) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + + // Verify core fields are in CustomProperties + if pr.CustomProperties["id"] != "perf-id" { + t.Errorf("CustomProperties[id] = %v, want %v", pr.CustomProperties["id"], "perf-id") + } + if pr.CustomProperties["model_id"] != "test-model" { + t.Errorf("CustomProperties[model_id] = %v, want %v", pr.CustomProperties["model_id"], "test-model") + } + if v, _ := pr.CustomProperties["throughput"].(json.Number).Float64(); v != 1000.0 { + t.Errorf("CustomProperties[throughput] = %v, want %v", pr.CustomProperties["throughput"], 1000.0) + } +} + +func TestUnmarshalJSON_EdgeCases(t *testing.T) { + tests := []struct { + name string + jsonData string + wantErr bool + }{ + { + name: "null JSON for evaluationRecord", + jsonData: `null`, + wantErr: false, // null JSON unmarshals to empty map, not an error + }, + { + name: "array instead of object for evaluationRecord", + jsonData: `["not", "an", "object"]`, + wantErr: true, + }, + { + name: "string instead of object for evaluationRecord", + jsonData: `"not an object"`, + wantErr: true, + }, + { + name: "number instead of object", + jsonData: `42`, + wantErr: true, + }, + { + name: "boolean instead of object", + jsonData: `true`, + wantErr: true, + }, + { + name: "empty string", + jsonData: ``, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name+" (evaluationRecord)", func(t *testing.T) { + var er evaluationRecord + err := er.UnmarshalJSON([]byte(tt.jsonData)) + + if (err != nil) != tt.wantErr { + t.Errorf("evaluationRecord.UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr) + } + }) + + t.Run(tt.name+" (performanceRecord)", func(t *testing.T) { + var pr performanceRecord + err := pr.UnmarshalJSON([]byte(tt.jsonData)) + + if (err != nil) != tt.wantErr { + t.Errorf("performanceRecord.UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/catalog/internal/catalog/sources.go b/catalog/internal/catalog/sources.go new file mode 100644 index 0000000000..5c92432888 --- /dev/null +++ b/catalog/internal/catalog/sources.go @@ -0,0 +1,105 @@ +package catalog + +import ( + "fmt" + "maps" + "slices" + "strings" + "sync" + + model "github.com/kubeflow/model-registry/catalog/pkg/openapi" +) + +type SourceCollection struct { + mu sync.RWMutex + + // origins keeps track of where a source came from by some name (intended to be a file path). + origins map[string][]string + + sources map[string]model.CatalogSource +} + +func NewSourceCollection() *SourceCollection { + return &SourceCollection{ + origins: map[string][]string{}, + sources: map[string]model.CatalogSource{}, + } +} + +// Merge adds sources from one origin (ordinarily, a file path--but any unique +// string will do), completely replacing anything that was previously from that +// origin. +func (sc *SourceCollection) Merge(origin string, sources map[string]model.CatalogSource) error { + sc.mu.Lock() + defer sc.mu.Unlock() + + // Remove everything that was set before for this origin so that + // unreferenced sources are deleted. + for _, id := range sc.origins[origin] { + delete(sc.sources, id) + } + sc.origins[origin] = slices.Collect(maps.Keys(sources)) + + for sourceID, source := range sources { + // Everything was deleted above, so if there's a source that + // already exists it must have come from another origin (file). + if _, exists := sc.sources[sourceID]; exists { + return fmt.Errorf("source %s exists from multiple origins", sourceID) + } + + sc.sources[sourceID] = source + } + + return nil +} + +func (sc *SourceCollection) All() map[string]model.CatalogSource { + sc.mu.RLock() + defer sc.mu.RUnlock() + + return sc.sources +} + +func (sc *SourceCollection) Get(name string) (src model.CatalogSource, ok bool) { + sc.mu.RLock() + defer sc.mu.RUnlock() + + src, ok = sc.sources[name] + return +} + +// ByLabel returns sources that have any of the labels provided. The matching +// is case insensitive. +// +// If a label is "null", every source without a label is returned. +func (sc *SourceCollection) ByLabel(labels []string) []model.CatalogSource { + sc.mu.RLock() + defer sc.mu.RUnlock() + + labelMap := make(map[string]struct{}, len(labels)) + for _, label := range labels { + labelMap[strings.ToLower(label)] = struct{}{} + } + + matches := map[string]model.CatalogSource{} + + if _, hasNull := labelMap["null"]; hasNull { + for _, source := range sc.sources { + if len(source.Labels) == 0 { + matches[source.Id] = source + } + } + } + +OUTER: + for _, source := range sc.sources { + for _, label := range source.Labels { + if _, match := labelMap[strings.ToLower(label)]; match { + matches[source.Id] = source + continue OUTER + } + } + } + + return slices.Collect(maps.Values(matches)) +} diff --git a/catalog/internal/catalog/sources_test.go b/catalog/internal/catalog/sources_test.go new file mode 100644 index 0000000000..906d340b89 --- /dev/null +++ b/catalog/internal/catalog/sources_test.go @@ -0,0 +1,330 @@ +package catalog + +import ( + "reflect" + "sort" + "testing" + + model "github.com/kubeflow/model-registry/catalog/pkg/openapi" + "github.com/kubeflow/model-registry/internal/apiutils" +) + +func TestSourceCollection_ByLabel(t *testing.T) { + // Create test sources with various labels + sources := map[string]model.CatalogSource{ + "source1": { + Id: "source1", + Name: "Source 1", + Enabled: apiutils.Of(true), + Labels: []string{"frontend", "production"}, + }, + "source2": { + Id: "source2", + Name: "Source 2", + Enabled: apiutils.Of(true), + Labels: []string{"Backend", "Development"}, // Mixed case to test case insensitivity + }, + "source3": { + Id: "source3", + Name: "Source 3", + Enabled: apiutils.Of(false), + Labels: []string{"analytics", "PRODUCTION"}, // Mixed case + }, + "source4": { + Id: "source4", + Name: "Source 4", + Enabled: apiutils.Of(true), + Labels: []string{"testing", "staging"}, + }, + "source5": { + Id: "source5", + Name: "Source 5", + Enabled: apiutils.Of(true), + Labels: []string{}, // No labels + }, + } + + tests := []struct { + name string + labels []string + expectedSources []string // IDs of expected sources + }{ + { + name: "single label match", + labels: []string{"frontend"}, + expectedSources: []string{"source1"}, + }, + { + name: "case insensitive match", + labels: []string{"FRONTEND"}, + expectedSources: []string{"source1"}, + }, + { + name: "multiple labels - any match", + labels: []string{"frontend", "backend"}, + expectedSources: []string{"source1", "source2"}, + }, + { + name: "case insensitive multiple labels", + labels: []string{"FRONTEND", "backend"}, + expectedSources: []string{"source1", "source2"}, + }, + { + name: "production label case insensitive", + labels: []string{"production"}, + expectedSources: []string{"source1", "source3"}, + }, + { + name: "no matching labels", + labels: []string{"nonexistent"}, + expectedSources: nil, + }, + { + name: "empty labels input", + labels: []string{}, + expectedSources: nil, + }, + { + name: "multiple different labels", + labels: []string{"analytics", "testing"}, + expectedSources: []string{"source3", "source4"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create a new SourceCollection and populate it + sc := NewSourceCollection() + err := sc.Merge("test-origin", sources) + if err != nil { + t.Fatalf("Failed to merge sources: %v", err) + } + + // Call ByLabel + result := sc.ByLabel(tt.labels) + + if tt.expectedSources == nil && result != nil { + t.Errorf("ByLabel() = %v, want %v", result, tt.expectedSources) + return + } + + // Extract IDs from result for comparison + var resultIDs []string + for _, source := range result { + resultIDs = append(resultIDs, source.Id) + } + + // Sort both slices for comparison + sort.Strings(resultIDs) + sort.Strings(tt.expectedSources) + + if !reflect.DeepEqual(resultIDs, tt.expectedSources) { + t.Errorf("ByLabel() = %v, want %v", resultIDs, tt.expectedSources) + } + + // Verify that the returned sources are complete objects, not just IDs + for _, source := range result { + if source.Name == "" { + t.Errorf("Returned source %s has empty name", source.Id) + } + if source.Labels == nil { + t.Errorf("Returned source %s has nil labels", source.Id) + } + } + }) + } +} + +func TestSourceCollection_ByLabel_EmptyCollection(t *testing.T) { + sc := NewSourceCollection() + + tests := []struct { + name string + labels []string + }{ + { + name: "empty collection with regular labels", + labels: []string{"frontend"}, + }, + { + name: "empty collection with null label", + labels: []string{"null"}, + }, + { + name: "empty collection with empty labels", + labels: []string{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := sc.ByLabel(tt.labels) + if len(result) != 0 { + t.Errorf("ByLabel() on empty collection should return empty slice, got %d items", len(result)) + } + }) + } +} + +func TestSourceCollection_ByLabel_NilLabels(t *testing.T) { + sc := NewSourceCollection() + + // Add a source with nil labels (edge case) + sources := map[string]model.CatalogSource{ + "source1": { + Id: "source1", + Name: "Source 1", + Enabled: apiutils.Of(true), + Labels: nil, // nil labels + }, + } + + err := sc.Merge("test-origin", sources) + if err != nil { + t.Fatalf("Failed to merge sources: %v", err) + } + + tests := []struct { + name string + labels []string + expectedCount int + }{ + { + name: "search with regular label on source with nil labels", + labels: []string{"frontend"}, + expectedCount: 0, + }, + { + name: "search with null label on source with nil labels", + labels: []string{"null"}, + expectedCount: 1, // Should return all sources including those with nil labels + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := sc.ByLabel(tt.labels) + if len(result) != tt.expectedCount { + t.Errorf("ByLabel() = %d sources, want %d", len(result), tt.expectedCount) + } + }) + } +} + +func TestSourceCollection_ByLabel_NullBehavior(t *testing.T) { + // Create test sources with various label configurations to test "null" behavior + sources := map[string]model.CatalogSource{ + "source_with_labels": { + Id: "source_with_labels", + Name: "Source With Labels", + Enabled: apiutils.Of(true), + Labels: []string{"frontend", "production"}, + }, + "source_empty_labels": { + Id: "source_empty_labels", + Name: "Source Empty Labels", + Enabled: apiutils.Of(true), + Labels: []string{}, // Empty labels slice + }, + "source_nil_labels": { + Id: "source_nil_labels", + Name: "Source Nil Labels", + Enabled: apiutils.Of(true), + Labels: nil, // Nil labels + }, + "source_another_with_labels": { + Id: "source_another_with_labels", + Name: "Another Source With Labels", + Enabled: apiutils.Of(false), + Labels: []string{"backend", "testing"}, + }, + } + + tests := []struct { + name string + labels []string + expectedSources []string // IDs of expected sources + description string + }{ + { + name: "null label returns sources without labels", + labels: []string{"null"}, + expectedSources: []string{"source_empty_labels", "source_nil_labels"}, + description: "Should return sources with empty or nil labels when searching for 'null'", + }, + { + name: "null label case insensitive", + labels: []string{"NULL"}, + expectedSources: []string{"source_empty_labels", "source_nil_labels"}, + description: "Should be case insensitive for 'null' label", + }, + { + name: "null label mixed case", + labels: []string{"Null"}, + expectedSources: []string{"source_empty_labels", "source_nil_labels"}, + description: "Should handle mixed case 'null' label", + }, + { + name: "null with other labels", + labels: []string{"null", "frontend"}, + expectedSources: []string{"source_empty_labels", "source_nil_labels", "source_with_labels"}, + description: "Should return sources without labels AND sources with matching labels", + }, + { + name: "null with multiple other labels", + labels: []string{"null", "frontend", "backend"}, + expectedSources: []string{"source_empty_labels", "source_nil_labels", "source_with_labels", "source_another_with_labels"}, + description: "Should return sources without labels AND sources with any matching labels", + }, + { + name: "multiple nulls should work same as single null", + labels: []string{"null", "NULL"}, + expectedSources: []string{"source_empty_labels", "source_nil_labels"}, + description: "Multiple 'null' variants should behave same as single 'null'", + }, + { + name: "null only matches unlabeled sources", + labels: []string{"null", "nonexistent"}, + expectedSources: []string{"source_empty_labels", "source_nil_labels"}, + description: "Should return sources without labels even when other labels don't match", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create a new SourceCollection and populate it + sc := NewSourceCollection() + err := sc.Merge("test-origin", sources) + if err != nil { + t.Fatalf("Failed to merge sources: %v", err) + } + + // Call ByLabel + result := sc.ByLabel(tt.labels) + + // Extract IDs from result for comparison + var resultIDs []string + for _, source := range result { + resultIDs = append(resultIDs, source.Id) + } + + // Sort both slices for comparison + sort.Strings(resultIDs) + sort.Strings(tt.expectedSources) + + if !reflect.DeepEqual(resultIDs, tt.expectedSources) { + t.Errorf("ByLabel(%v) = %v, want %v\nDescription: %s", tt.labels, resultIDs, tt.expectedSources, tt.description) + } + + // Verify that each returned source is a complete object + for _, source := range result { + if source.Id == "" { + t.Errorf("Returned source has empty ID") + } + if source.Name == "" { + t.Errorf("Returned source %s has empty name", source.Id) + } + } + }) + } +} diff --git a/catalog/internal/catalog/testdata/dev-catalog-sources.yaml b/catalog/internal/catalog/testdata/dev-catalog-sources.yaml new file mode 100644 index 0000000000..1e3b2a4e51 --- /dev/null +++ b/catalog/internal/catalog/testdata/dev-catalog-sources.yaml @@ -0,0 +1,23 @@ +catalogs: + - name: "Organization AI Models" + id: organization_ai_models + type: yaml + enabled: true + properties: + yamlCatalogPath: dev-organization-models.yaml + labels: + - Organization AI + - name: "Validated AI Models" + id: validated_ai_models + type: yaml + enabled: true + properties: + yamlCatalogPath: dev-validated-models.yaml + labels: + - Validated AI + - name: "Community and Custom Models" + id: community_custom_models + type: yaml + enabled: true + properties: + yamlCatalogPath: dev-community-models.yaml diff --git a/catalog/internal/catalog/testdata/dev-community-models.yaml b/catalog/internal/catalog/testdata/dev-community-models.yaml new file mode 100644 index 0000000000..ac8b15285c --- /dev/null +++ b/catalog/internal/catalog/testdata/dev-community-models.yaml @@ -0,0 +1,155 @@ +source: Community +models: + - name: open-models/falcon-mini-2b + provider: Open Models Foundation + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9InB1cnBsZSIgeDE9IjAlIiB5MT0iMCUiIHgyPSIxMDAlIiB5Mj0iMTAwJSI+CiAgICAgIDxzdG9wIG9mZnNldD0iMCUiIHN0eWxlPSJzdG9wLWNvbG9yOiM5QjU5QjY7c3RvcC1vcGFjaXR5OjEiIC8+CiAgICAgIDxzdG9wIG9mZnNldD0iMTAwJSIgc3R5bGU9InN0b3AtY29sb3I6IzhFNDRBRDtzdG9wLW9wYWNpdHk6MSIgLz4KICAgIDwvbGluZWFyR3JhZGllbnQ+CiAgPC9kZWZzPgogIDxyZWN0IHg9IjI1IiB5PSIzNSIgd2lkdGg9IjMwIiBoZWlnaHQ9IjMwIiBmaWxsPSJ1cmwoI3B1cnBsZSkiIHRyYW5zZm9ybT0icm90YXRlKC0xNSA0MCA1MCkiLz4KICA8cmVjdCB4PSI0NSIgeT0iMzUiIHdpZHRoPSIzMCIgaGVpZ2h0PSIzMCIgZmlsbD0idXJsKCNwdXJwbGUpIiBvcGFjaXR5PSIwLjciIHRyYW5zZm9ybT0icm90YXRlKDE1IDYwIDUwKSIvPgo8L3N2Zz4K + description: |- + (DEMO) Falcon-Mini-2B is a lightweight, community-contributed 2 billion parameter model optimized + for edge deployment and resource-constrained environments. Sed do eiusmod tempor incididunt. + readme: |- + # Falcon-Mini-2B + + **Model Summary:** + Lorem ipsum dolor sit amet, consectetur adipiscing elit. This is a community-contributed model designed for efficient inference. + + - **Developers:** Open Models Foundation Community + - **Release Date**: October 5th, 2024 + - **License:** [MIT License](https://opensource.org/licenses/MIT) + + **Features:** + * Lightweight architecture + * Fast inference + * Edge-optimized + * Multilingual support + + Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris. + language: ["en", "es", "fr"] + license: mit + licenseLink: https://opensource.org/licenses/MIT + libraryName: transformers + tasks: + - text-generation + createTimeSinceEpoch: "1728086400000" + lastUpdateTimeSinceEpoch: "1728086400000" + artifacts: + - uri: oci://registry.example.com/open-models/falcon-mini-2b:v1.0 + createTimeSinceEpoch: "1728086400000" + lastUpdateTimeSinceEpoch: "1728086400000" + + - name: quantum-research/sentiment-analyzer-base + provider: Quantum Research Labs + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9Im9yYW5nZSIgeDE9IjAlIiB5MT0iMCUiIHgyPSIxMDAlIiB5Mj0iMTAwJSI+CiAgICAgIDxzdG9wIG9mZnNldD0iMCUiIHN0eWxlPSJzdG9wLWNvbG9yOiNGMzlDMTI7c3RvcC1vcGFjaXR5OjEiIC8+CiAgICAgIDxzdG9wIG9mZnNldD0iMTAwJSIgc3R5bGU9InN0b3AtY29sb3I6I0U2N0UyMjtzdG9wLW9wYWNpdHk6MSIgLz4KICAgIDwvbGluZWFyR3JhZGllbnQ+CiAgPC9kZWZzPgogIDxwb2x5Z29uIHBvaW50cz0iNTAsMjAgNjEsNDAgODMsNDMgNjYsNjAgNzAsODIgNTAsNzAgMzAsODIgMzQsNjAgMTcsNDMgMzksNDAiIGZpbGw9InVybCgjb3JhbmdlKSIvPgo8L3N2Zz4K + description: |- + (DEMO) Sentiment-Analyzer-Base is a specialized model for sentiment analysis and emotion detection + trained on diverse social media and customer feedback datasets. Duis aute irure dolor. + readme: |- + # Sentiment-Analyzer-Base + + **Model Summary:** + Excepteur sint occaecat cupidatat non proident. This model specializes in sentiment classification tasks. + + - **Developers:** Quantum Research Labs + - **Release Date**: September 20th, 2024 + - **License:** [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) + + **Capabilities:** + * Sentiment classification (positive/negative/neutral) + * Emotion detection + * Social media analysis + * Customer feedback processing + + Sunt in culpa qui officia deserunt mollit anim id est laborum. + language: ["en"] + license: apache-2.0 + licenseLink: https://www.apache.org/licenses/LICENSE-2.0.txt + libraryName: transformers + tasks: + - text-classification + - sentiment-analysis + createTimeSinceEpoch: "1726790400000" + lastUpdateTimeSinceEpoch: "1726790400000" + artifacts: + - uri: oci://registry.example.com/quantum-research/sentiment-analyzer:v2.0 + createTimeSinceEpoch: "1726790400000" + lastUpdateTimeSinceEpoch: "1726790400000" + + - name: indie-ai/creative-writer-3b + provider: Indie AI Collective + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9InRlYWwiIHgxPSIwJSIgeTE9IjAlIiB4Mj0iMTAwJSIgeTI9IjEwMCUiPgogICAgICA8c3RvcCBvZmZzZXQ9IjAlIiBzdHlsZT0ic3RvcC1jb2xvcjojMTZBMDg1O3N0b3Atb3BhY2l0eToxIiAvPgogICAgICA8c3RvcCBvZmZzZXQ9IjEwMCUiIHN0eWxlPSJzdG9wLWNvbG9yOiMxMzhENzU7c3RvcC1vcGFjaXR5OjEiIC8+CiAgICA8L2xpbmVhckdyYWRpZW50PgogIDwvZGVmcz4KICA8cG9seWdvbiBwb2ludHM9IjUwLDE1IDgwLDMwIDgwLDcwIDUwLDg1IDIwLDcwIDIwLDMwIiBmaWxsPSJ1cmwoI3RlYWwpIiBzdHJva2U9IiMxMTdBNjUiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxjaXJjbGUgY3g9IjUwIiBjeT0iNTAiIHI9IjE1IiBmaWxsPSJ3aGl0ZSIgb3BhY2l0eT0iMC4zIi8+Cjwvc3ZnPgo= + description: |- + (DEMO) Creative-Writer-3B is an experimental model fine-tuned for creative writing tasks including + story generation, poetry, and narrative completion. Nemo enim ipsam voluptatem quia voluptas. + readme: |- + # Creative-Writer-3B + + **Model Summary:** + Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus. This experimental model focuses on creative text generation. + + - **Developers:** Indie AI Collective + - **Release Date**: August 12th, 2024 + - **License:** [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/) + + **Creative Capabilities:** + * Story and narrative generation + * Poetry composition + * Creative writing assistance + * Character development + * Plot ideation + + Itaque earum rerum hic tenetur a sapiente delectus. + + **Note:** This is an experimental community model and outputs should be reviewed for quality. + language: ["en"] + license: cc-by-sa-4.0 + licenseLink: https://creativecommons.org/licenses/by-sa/4.0/ + libraryName: transformers + tasks: + - text-generation + createTimeSinceEpoch: "1723420800000" + lastUpdateTimeSinceEpoch: "1723420800000" + artifacts: + - uri: oci://registry.example.com/indie-ai/creative-writer-3b:experimental + createTimeSinceEpoch: "1723420800000" + lastUpdateTimeSinceEpoch: "1723420800000" + + - name: alpha-labs/translation-mini-1b + provider: Alpha Translation Labs + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9InJlZCIgeDE9IjAlIiB5MT0iMCUiIHgyPSIxMDAlIiB5Mj0iMTAwJSI+CiAgICAgIDxzdG9wIG9mZnNldD0iMCUiIHN0eWxlPSJzdG9wLWNvbG9yOiNFNzRDM0M7c3RvcC1vcGFjaXR5OjEiIC8+CiAgICAgIDxzdG9wIG9mZnNldD0iMTAwJSIgc3R5bGU9InN0b3AtY29sb3I6I0MwMzkyQjtzdG9wLW9wYWNpdHk6MSIgLz4KICAgIDwvbGluZWFyR3JhZGllbnQ+CiAgPC9kZWZzPgogIDxwYXRoIGQ9Ik0gNTAgMTAgTCA2MCA0MCBMIDcwIDcwIEwgNTAgNjAgTCAzMCA3MCBMIDQwIDQwIFoiIGZpbGw9InVybCgjcmVkKSIvPgogIDxjaXJjbGUgY3g9IjUwIiBjeT0iMzAiIHI9IjUiIGZpbGw9IndoaXRlIi8+CiAgPHBhdGggZD0iTSAzMCA3MCBRIDQwIDgwIDUwIDc1IFEgNjAgODAgNzAgNzAiIGZpbGw9IiNGMzlDMTIiLz4KPC9zdmc+Cg== + description: |- + (DEMO) Translation-Mini-1B is a compact multilingual translation model supporting 20+ language pairs, + optimized for low-latency translation tasks. Quis autem vel eum iure reprehenderit. + readme: |- + # Translation-Mini-1B + + **Model Summary:** + Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit. A community-contributed translation model. + + - **Developers:** Alpha Translation Labs + - **Release Date**: July 30th, 2024 + - **License:** [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) + + **Supported Languages:** + English, Spanish, French, German, Italian, Portuguese, Dutch, Russian, Chinese, Japanese, Korean, Arabic, Hindi, Turkish, Polish, Vietnamese, Thai, Indonesian, Swedish, Danish + + **Features:** + * Fast translation (< 50ms latency) + * 20+ language pairs + * Optimized for short text + * API-friendly + + Omnis voluptas assumenda est, omnis dolor repellendus. + + **Limitations:** + Best suited for short texts (< 512 tokens). For longer documents, consider using larger models. + language: ["multilingual"] + license: apache-2.0 + licenseLink: https://www.apache.org/licenses/LICENSE-2.0.txt + libraryName: transformers + tasks: + - translation + createTimeSinceEpoch: "1722297600000" + lastUpdateTimeSinceEpoch: "1722297600000" + artifacts: + - uri: oci://registry.example.com/alpha-labs/translation-mini-1b:v1.5 + createTimeSinceEpoch: "1722297600000" + lastUpdateTimeSinceEpoch: "1722297600000" diff --git a/catalog/internal/catalog/testdata/dev-organization-models.yaml b/catalog/internal/catalog/testdata/dev-organization-models.yaml new file mode 100644 index 0000000000..d2c9e93308 --- /dev/null +++ b/catalog/internal/catalog/testdata/dev-organization-models.yaml @@ -0,0 +1,395 @@ +source: Organization AI +models: + - name: acme-ai/neural-7b-instruct + provider: Acme AI Labs + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9ImdyYWQxIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIxMDAlIj4KICAgICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6IzRBOTBFMjtzdG9wLW9wYWNpdHk6MSIgLz4KICAgICAgPHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjojMzU3QUJEO3N0b3Atb3BhY2l0eToxIiAvPgogICAgPC9saW5lYXJHcmFkaWVudD4KICA8L2RlZnM+CiAgPGNpcmNsZSBjeD0iNTAiIGN5PSIyMCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSIzMCIgY3k9IjQwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjcwIiBjeT0iNDAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGNpcmNsZSBjeD0iMjAiIGN5PSI3MCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSI1MCIgY3k9IjcwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjgwIiBjeT0iNzAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGxpbmUgeDE9IjUwIiB5MT0iMjYiIHgyPSIzMCIgeTI9IjM0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI1MCIgeTE9IjI2IiB4Mj0iNzAiIHkyPSIzNCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iMzAiIHkxPSI0NiIgeDI9IjIwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+CiAgPGxpbmUgeDE9IjMwIiB5MT0iNDYiIHgyPSI1MCIgeTI9IjY0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI3MCIgeTE9IjQ2IiB4Mj0iNTAiIHkyPSI2NCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iNzAiIHkxPSI0NiIgeDI9IjgwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+Cjwvc3ZnPgo= + description: |- + (DEMO) Neural-7B-Instruct is a 7 billion parameter instruction-tuned language model designed for + general-purpose text generation tasks. This model excels at following complex instructions, + question answering, and multi-turn conversations. + readme: |- + # Neural-7B-Instruct + + **Model Summary:** + Neural-7B-Instruct is a 7B parameter instruction-tuned model fine-tuned from Neural-7B-Base. Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. + + - **Developers:** Acme AI Labs + - **Release Date**: January 15th, 2025 + - **License:** [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) + + **Supported Languages:** + English, Spanish, French, German, Italian, Portuguese, Dutch, Russian, Chinese, Japanese, Korean + + **Intended Use:** + Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat: + * Question answering + * Text summarization + * Content generation + * Dialogue systems + * Information extraction + + **Installation:** + + ```shell + pip install transformers torch + ``` + + **Generation Example:** + + ```python + from transformers import AutoModelForCausalLM, AutoTokenizer + + model_path = "acme-ai/neural-7b-instruct" + tokenizer = AutoTokenizer.from_pretrained(model_path) + model = AutoModelForCausalLM.from_pretrained(model_path, device_map="auto") + + messages = [{"role": "user", "content": "Explain machine learning in simple terms."}] + input_ids = tokenizer.apply_chat_template(messages, return_tensors="pt") + + outputs = model.generate(input_ids, max_new_tokens=256) + print(tokenizer.decode(outputs[0])) + ``` + + **Model Architecture:** + Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur: + - Transformer architecture with multi-head attention + - 32 layers, 4096 hidden dimensions + - 32 attention heads with grouped-query attention + - Rotary positional embeddings (RoPE) + - SwiGLU activation functions + + **Limitations:** + Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. The model may produce biased or factually incorrect outputs and should be used with appropriate safety measures. + language: ["en", "es", "fr", "de", "it", "pt", "nl", "ru", "zh", "ja", "ko"] + license: apache-2.0 + licenseLink: https://www.apache.org/licenses/LICENSE-2.0.txt + libraryName: transformers + tasks: + - text-generation + - question-answering + createTimeSinceEpoch: "1736899200000" + lastUpdateTimeSinceEpoch: "1736899200000" + artifacts: + - uri: oci://registry.example.com/acme-ai/neural-7b-instruct:v1.0 + createTimeSinceEpoch: "1736899200000" + lastUpdateTimeSinceEpoch: "1736899200000" + + - name: stellar-labs/quantum-13b-base + provider: Stellar Labs + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9ImdyYWQxIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIxMDAlIj4KICAgICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6IzRBOTBFMjtzdG9wLW9wYWNpdHk6MSIgLz4KICAgICAgPHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjojMzU3QUJEO3N0b3Atb3BhY2l0eToxIiAvPgogICAgPC9saW5lYXJHcmFkaWVudD4KICA8L2RlZnM+CiAgPGNpcmNsZSBjeD0iNTAiIGN5PSIyMCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSIzMCIgY3k9IjQwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjcwIiBjeT0iNDAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGNpcmNsZSBjeD0iMjAiIGN5PSI3MCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSI1MCIgY3k9IjcwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjgwIiBjeT0iNzAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGxpbmUgeDE9IjUwIiB5MT0iMjYiIHgyPSIzMCIgeTI9IjM0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI1MCIgeTE9IjI2IiB4Mj0iNzAiIHkyPSIzNCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iMzAiIHkxPSI0NiIgeDI9IjIwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+CiAgPGxpbmUgeDE9IjMwIiB5MT0iNDYiIHgyPSI1MCIgeTI9IjY0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI3MCIgeTE9IjQ2IiB4Mj0iNTAiIHkyPSI2NCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iNzAiIHkxPSI0NiIgeDI9IjgwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+Cjwvc3ZnPgo= + description: |- + (DEMO) Quantum-13B-Base is a foundational language model with 13 billion parameters trained + from scratch on a diverse corpus. Sed ut perspiciatis unde omnis iste natus error sit + voluptatem accusantium doloremque laudantium. + readme: |- + # Quantum-13B-Base + + **Model Summary:** + Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quantum-13B-Base is a pre-trained transformer model designed as a foundation for fine-tuning on specific downstream tasks. + + - **Developers:** Stellar Labs Research + - **Release Date**: November 3rd, 2024 + - **License:** [MIT License](https://opensource.org/licenses/MIT) + + **Training Data:** + Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. + + **Key Features:** + * 13 billion parameters + * Context length of 8192 tokens + * Trained on 2.5 trillion tokens + * Multi-lingual capabilities + * Efficient inference optimizations + + **Usage Example:** + + ```python + from transformers import AutoModel, AutoTokenizer + + model = AutoModel.from_pretrained("stellar-labs/quantum-13b-base") + tokenizer = AutoTokenizer.from_pretrained("stellar-labs/quantum-13b-base") + + text = "Your input text here" + inputs = tokenizer(text, return_tensors="pt") + outputs = model(**inputs) + ``` + + **Performance Metrics:** + Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. + language: ["en", "es", "fr", "de", "zh", "ja"] + license: mit + licenseLink: https://opensource.org/licenses/MIT + libraryName: transformers + tasks: + - feature-extraction + - fill-mask + createTimeSinceEpoch: "1730678400000" + lastUpdateTimeSinceEpoch: "1730678400000" + artifacts: + - uri: oci://registry.example.com/stellar-labs/quantum-13b-base:v2.1 + createTimeSinceEpoch: "1730678400000" + lastUpdateTimeSinceEpoch: "1730678400000" + + - name: neural-dynamics/code-pilot-3b + provider: Neural Dynamics + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9ImdyYWQxIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIxMDAlIj4KICAgICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6IzRBOTBFMjtzdG9wLW9wYWNpdHk6MSIgLz4KICAgICAgPHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjojMzU3QUJEO3N0b3Atb3BhY2l0eToxIiAvPgogICAgPC9saW5lYXJHcmFkaWVudD4KICA8L2RlZnM+CiAgPGNpcmNsZSBjeD0iNTAiIGN5PSIyMCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSIzMCIgY3k9IjQwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjcwIiBjeT0iNDAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGNpcmNsZSBjeD0iMjAiIGN5PSI3MCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSI1MCIgY3k9IjcwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjgwIiBjeT0iNzAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGxpbmUgeDE9IjUwIiB5MT0iMjYiIHgyPSIzMCIgeTI9IjM0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI1MCIgeTE9IjI2IiB4Mj0iNzAiIHkyPSIzNCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iMzAiIHkxPSI0NiIgeDI9IjIwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+CiAgPGxpbmUgeDE9IjMwIiB5MT0iNDYiIHgyPSI1MCIgeTI9IjY0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI3MCIgeTE9IjQ2IiB4Mj0iNTAiIHkyPSI2NCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iNzAiIHkxPSI0NiIgeDI9IjgwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+Cjwvc3ZnPgo= + description: |- + (DEMO) Code-Pilot-3B is a specialized 3 billion parameter model optimized for code generation, + code completion, and programming-related tasks. Ut enim ad minima veniam, quis nostrum + exercitationem ullam corporis suscipit laboriosam. + readme: |- + # Code-Pilot-3B + + **Model Summary:** + At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident. + + - **Developers:** Neural Dynamics + - **Release Date**: December 20th, 2024 + - **License:** [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) + + **Supported Programming Languages:** + Python, JavaScript, TypeScript, Java, C++, Go, Rust, Ruby, PHP, Swift, Kotlin, C# + + **Capabilities:** + * Code completion + * Code generation from natural language + * Bug detection and fixing + * Code documentation generation + * Test generation + + **Example Usage:** + + ```python + from transformers import AutoModelForCausalLM, AutoTokenizer + + model = AutoModelForCausalLM.from_pretrained("neural-dynamics/code-pilot-3b") + tokenizer = AutoTokenizer.from_pretrained("neural-dynamics/code-pilot-3b") + + prompt = "# Write a function to calculate fibonacci numbers\\ndef fibonacci(n):" + inputs = tokenizer(prompt, return_tensors="pt") + outputs = model.generate(**inputs, max_length=200) + print(tokenizer.decode(outputs[0])) + ``` + + **Training Details:** + Similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. + + **Benchmarks:** + Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. + language: ["en"] + license: apache-2.0 + licenseLink: https://www.apache.org/licenses/LICENSE-2.0.txt + libraryName: transformers + tasks: + - text-generation + - code-generation + createTimeSinceEpoch: "1734652800000" + lastUpdateTimeSinceEpoch: "1734652800000" + artifacts: + - uri: oci://registry.example.com/neural-dynamics/code-pilot-3b:latest + createTimeSinceEpoch: "1734652800000" + lastUpdateTimeSinceEpoch: "1734652800000" + + - name: acme-ai/multimodal-vision-7b + provider: Acme AI Labs + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9ImdyYWQxIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIxMDAlIj4KICAgICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6IzRBOTBFMjtzdG9wLW9wYWNpdHk6MSIgLz4KICAgICAgPHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjojMzU3QUJEO3N0b3Atb3BhY2l0eToxIiAvPgogICAgPC9saW5lYXJHcmFkaWVudD4KICA8L2RlZnM+CiAgPGNpcmNsZSBjeD0iNTAiIGN5PSIyMCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSIzMCIgY3k9IjQwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjcwIiBjeT0iNDAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGNpcmNsZSBjeD0iMjAiIGN5PSI3MCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSI1MCIgY3k9IjcwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjgwIiBjeT0iNzAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGxpbmUgeDE9IjUwIiB5MT0iMjYiIHgyPSIzMCIgeTI9IjM0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI1MCIgeTE9IjI2IiB4Mj0iNzAiIHkyPSIzNCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iMzAiIHkxPSI0NiIgeDI9IjIwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+CiAgPGxpbmUgeDE9IjMwIiB5MT0iNDYiIHgyPSI1MCIgeTI9IjY0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI3MCIgeTE9IjQ2IiB4Mj0iNTAiIHkyPSI2NCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iNzAiIHkxPSI0NiIgeDI9IjgwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+Cjwvc3ZnPgo= + description: |- + (DEMO) Multimodal-Vision-7B is a 7 billion parameter vision-language model capable of understanding + and generating text based on visual inputs. Temporibus autem quibusdam et aut officiis + debitis aut rerum necessitatibus saepe eveniet. + readme: |- + # Multimodal-Vision-7B + + **Model Summary:** + Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat. + + - **Developers:** Acme AI Labs Multimodal Team + - **Release Date**: February 1st, 2025 + - **License:** [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) + + **Capabilities:** + * Image captioning and description + * Visual question answering + * Image-to-text generation + * Chart and diagram interpretation + * Document understanding + + **Input Formats:** + Supports JPG, PNG, WebP, and PDF formats up to 4K resolution + + **Usage Example:** + + ```python + from transformers import AutoProcessor, AutoModelForVision2Seq + from PIL import Image + + model = AutoModelForVision2Seq.from_pretrained("acme-ai/multimodal-vision-7b") + processor = AutoProcessor.from_pretrained("acme-ai/multimodal-vision-7b") + + image = Image.open("example.jpg") + prompt = "Describe this image in detail:" + + inputs = processor(images=image, text=prompt, return_tensors="pt") + outputs = model.generate(**inputs, max_new_tokens=200) + print(processor.decode(outputs[0])) + ``` + + **Architecture:** + Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur. + + **Performance:** + On vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati. + language: ["en", "es", "fr", "de", "zh"] + license: apache-2.0 + licenseLink: https://www.apache.org/licenses/LICENSE-2.0.txt + libraryName: transformers + tasks: + - image-to-text + - visual-question-answering + createTimeSinceEpoch: "1738368000000" + lastUpdateTimeSinceEpoch: "1738368000000" + artifacts: + - uri: oci://registry.example.com/acme-ai/multimodal-vision-7b:v1.0 + createTimeSinceEpoch: "1738368000000" + lastUpdateTimeSinceEpoch: "1738368000000" + + - name: stellar-labs/reasoning-1b-chat + provider: Stellar Labs + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9ImdyYWQxIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIxMDAlIj4KICAgICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6IzRBOTBFMjtzdG9wLW9wYWNpdHk6MSIgLz4KICAgICAgPHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjojMzU3QUJEO3N0b3Atb3BhY2l0eToxIiAvPgogICAgPC9saW5lYXJHcmFkaWVudD4KICA8L2RlZnM+CiAgPGNpcmNsZSBjeD0iNTAiIGN5PSIyMCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSIzMCIgY3k9IjQwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjcwIiBjeT0iNDAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGNpcmNsZSBjeD0iMjAiIGN5PSI3MCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSI1MCIgY3k9IjcwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjgwIiBjeT0iNzAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGxpbmUgeDE9IjUwIiB5MT0iMjYiIHgyPSIzMCIgeTI9IjM0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI1MCIgeTE9IjI2IiB4Mj0iNzAiIHkyPSIzNCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iMzAiIHkxPSI0NiIgeDI9IjIwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+CiAgPGxpbmUgeDE9IjMwIiB5MT0iNDYiIHgyPSI1MCIgeTI9IjY0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI3MCIgeTE9IjQ2IiB4Mj0iNTAiIHkyPSI2NCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iNzAiIHkxPSI0NiIgeDI9IjgwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+Cjwvc3ZnPgo= + description: |- + (DEMO) Reasoning-1B-Chat is an efficient 1 billion parameter chat model optimized for + conversational AI and reasoning tasks. Sed ut perspiciatis unde omnis iste natus + error sit voluptatem accusantium. + readme: |- + # Reasoning-1B-Chat + + **Model Summary:** + Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Reasoning-1B-Chat is designed for resource-constrained environments while maintaining strong performance. + + - **Developers:** Stellar Labs + - **Release Date**: January 10th, 2025 + - **License:** [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) + + **Key Features:** + * Lightweight 1B parameter architecture + * Fast inference speed + * Low memory footprint (< 2GB) + * Optimized for edge deployment + * Strong logical reasoning capabilities + + **Supported Use Cases:** + Doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo: + * Customer service chatbots + * Personal assistants + * Educational tutoring systems + * Task-oriented dialogue + + **Example:** + + ```python + from transformers import AutoModelForCausalLM, AutoTokenizer + + model = AutoModelForCausalLM.from_pretrained("stellar-labs/reasoning-1b-chat") + tokenizer = AutoTokenizer.from_pretrained("stellar-labs/reasoning-1b-chat") + + messages = [ + {"role": "user", "content": "What is the capital of France?"} + ] + + inputs = tokenizer.apply_chat_template(messages, return_tensors="pt") + outputs = model.generate(inputs, max_new_tokens=100) + print(tokenizer.decode(outputs[0])) + ``` + + **Quantization Support:** + Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt: + * INT8 quantization + * INT4 quantization + * GGUF format support + + **Deployment:** + Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt. + language: ["en", "es", "fr", "de"] + license: apache-2.0 + licenseLink: https://www.apache.org/licenses/LICENSE-2.0.txt + libraryName: transformers + tasks: + - conversational + - text-generation + createTimeSinceEpoch: "1736467200000" + lastUpdateTimeSinceEpoch: "1736467200000" + artifacts: + - uri: oci://registry.example.com/stellar-labs/reasoning-1b-chat:v1.2 + createTimeSinceEpoch: "1736467200000" + lastUpdateTimeSinceEpoch: "1736467200000" + + - name: neural-dynamics/embeddings-large + provider: Neural Dynamics + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9ImdyYWQxIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIxMDAlIj4KICAgICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6IzRBOTBFMjtzdG9wLW9wYWNpdHk6MSIgLz4KICAgICAgPHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjojMzU3QUJEO3N0b3Atb3BhY2l0eToxIiAvPgogICAgPC9saW5lYXJHcmFkaWVudD4KICA8L2RlZnM+CiAgPGNpcmNsZSBjeD0iNTAiIGN5PSIyMCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSIzMCIgY3k9IjQwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjcwIiBjeT0iNDAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGNpcmNsZSBjeD0iMjAiIGN5PSI3MCIgcj0iNiIgZmlsbD0idXJsKCNncmFkMSkiLz4KICA8Y2lyY2xlIGN4PSI1MCIgY3k9IjcwIiByPSI2IiBmaWxsPSJ1cmwoI2dyYWQxKSIvPgogIDxjaXJjbGUgY3g9IjgwIiBjeT0iNzAiIHI9IjYiIGZpbGw9InVybCgjZ3JhZDEpIi8+CiAgPGxpbmUgeDE9IjUwIiB5MT0iMjYiIHgyPSIzMCIgeTI9IjM0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI1MCIgeTE9IjI2IiB4Mj0iNzAiIHkyPSIzNCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iMzAiIHkxPSI0NiIgeDI9IjIwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+CiAgPGxpbmUgeDE9IjMwIiB5MT0iNDYiIHgyPSI1MCIgeTI9IjY0IiBzdHJva2U9IiM0QTkwRTIiIHN0cm9rZS13aWR0aD0iMiIvPgogIDxsaW5lIHgxPSI3MCIgeTE9IjQ2IiB4Mj0iNTAiIHkyPSI2NCIgc3Ryb2tlPSIjNEE5MEUyIiBzdHJva2Utd2lkdGg9IjIiLz4KICA8bGluZSB4MT0iNzAiIHkxPSI0NiIgeDI9IjgwIiB5Mj0iNjQiIHN0cm9rZT0iIzRBOTBFMiIgc3Ryb2tlLXdpZHRoPSIyIi8+Cjwvc3ZnPgo= + description: |- + (DEMO) Embeddings-Large is a state-of-the-art embedding model designed for semantic search, + similarity matching, and retrieval-augmented generation (RAG) applications. Ut aut + reiciendis voluptatibus maiores alias consequatur. + readme: |- + # Embeddings-Large + + **Model Summary:** + At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatem deleniti atque corrupti quos dolores et quas molestias excepturi sint. + + - **Developers:** Neural Dynamics Embedding Team + - **Release Date**: September 15th, 2024 + - **License:** [MIT License](https://opensource.org/licenses/MIT) + + **Specifications:** + * Embedding dimension: 1024 + * Maximum sequence length: 512 tokens + * Supports 50+ languages + * Normalized embeddings for cosine similarity + + **Applications:** + Cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi: + * Semantic search engines + * Document retrieval + * Clustering and classification + * Recommendation systems + * RAG pipelines + + **Usage:** + + ```python + from sentence_transformers import SentenceTransformer + + model = SentenceTransformer('neural-dynamics/embeddings-large') + + # Encode sentences + sentences = [ + "This is an example sentence", + "Each sentence is converted to a vector" + ] + + embeddings = model.encode(sentences) + + # Compute similarity + from sklearn.metrics.pairwise import cosine_similarity + similarity = cosine_similarity([embeddings[0]], [embeddings[1]]) + print(f"Similarity: {similarity[0][0]:.4f}") + ``` + + **Performance Benchmarks:** + Id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio. + + **Integration:** + Compatible with popular vector databases including Weaviate, Qdrant, Milvus, and Pinecone. + language: ["multilingual"] + license: mit + licenseLink: https://opensource.org/licenses/MIT + libraryName: sentence-transformers + tasks: + - feature-extraction + - sentence-similarity + createTimeSinceEpoch: "1726358400000" + lastUpdateTimeSinceEpoch: "1726358400000" + artifacts: + - uri: oci://registry.example.com/neural-dynamics/embeddings-large:v3.0 + createTimeSinceEpoch: "1726358400000" + lastUpdateTimeSinceEpoch: "1726358400000" diff --git a/catalog/internal/catalog/testdata/dev-validated-models.yaml b/catalog/internal/catalog/testdata/dev-validated-models.yaml new file mode 100644 index 0000000000..14ace5720e --- /dev/null +++ b/catalog/internal/catalog/testdata/dev-validated-models.yaml @@ -0,0 +1,1627 @@ +source: Validated AI +models: + - name: certified/production-llm-8b + provider: Validation Authority + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9ImdyYWQyIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIxMDAlIj4KICAgICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6IzI3QUU2MDtzdG9wLW9wYWNpdHk6MSIgLz4KICAgICAgPHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjojMjI5OTU0O3N0b3Atb3BhY2l0eToxIiAvPgogICAgPC9saW5lYXJHcmFkaWVudD4KICA8L2RlZnM+CiAgPHBhdGggZD0iTSA1MCAxMCBMIDgwIDMwIEwgODAgNzAgTCA1MCA5MCBMIDIwIDcwIEwgMjAgMzAgWiIgZmlsbD0idXJsKCNncmFkMikiIHN0cm9rZT0iIzFFODQ0OSIgc3Ryb2tlLXdpZHRoPSIyIi8+CiAgPHBhdGggZD0iTSAzNSA1MCBMIDQ1IDYwIEwgNjUgMzUiIHN0cm9rZT0id2hpdGUiIHN0cm9rZS13aWR0aD0iNiIgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIiBzdHJva2UtbGluZWpvaW49InJvdW5kIiBmaWxsPSJub25lIi8+Cjwvc3ZnPgo= + description: |- + (DEMO) Production-LLM-8B is an enterprise-validated 8 billion parameter language model that has + undergone rigorous testing for production deployment. Sed do eiusmod tempor incididunt + ut labore et dolore magna aliqua. + readme: |- + # Production-LLM-8B (Validated) + + **Model Summary:** + Lorem ipsum dolor sit amet, consectetur adipiscing elit. This model has been extensively tested and validated for enterprise production environments. + + - **Developers:** Validation Authority in collaboration with Acme AI Labs + - **Validation Date**: January 20th, 2025 + - **License:** [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) + - **Validation Status:** ✓ Certified for Production Use + + **Validation Testing:** + Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat: + * Security assessment completed + * Bias and fairness evaluation + * Performance benchmarking + * Resource utilization profiling + * Compliance verification (SOC 2, ISO 27001) + + **Production Features:** + * Guaranteed inference latency SLAs + * Comprehensive monitoring and logging + * Multi-GPU and distributed inference support + * Enterprise-grade error handling + * Automated failover capabilities + + **Deployment:** + + ```python + from transformers import AutoModelForCausalLM, AutoTokenizer + + model = AutoModelForCausalLM.from_pretrained( + "certified/production-llm-8b", + torch_dtype="auto", + device_map="auto" + ) + tokenizer = AutoTokenizer.from_pretrained("certified/production-llm-8b") + + messages = [{"role": "user", "content": "Analyze this customer feedback..."}] + inputs = tokenizer.apply_chat_template(messages, return_tensors="pt") + outputs = model.generate(inputs, max_new_tokens=512, temperature=0.7) + ``` + + **Validation Metrics:** + Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur: + * Accuracy: 94.2% on validation benchmarks + * Average latency: < 100ms (p95) + * Throughput: 500 tokens/sec on standard hardware + * Memory efficiency: 12GB VRAM required + + **Security:** + Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Model includes built-in guardrails and content filtering. + + **Support:** + Enterprise support available with 24/7 SLA and dedicated technical assistance. + language: ["en", "es", "fr", "de", "it", "pt"] + license: apache-2.0 + licenseLink: https://www.apache.org/licenses/LICENSE-2.0.txt + maturity: Production + libraryName: transformers + tasks: + - text-generation + - question-answering + - summarization + createTimeSinceEpoch: "1737331200000" + lastUpdateTimeSinceEpoch: "1737331200000" + customProperties: + validated: + string_value: "" + metadataType: MetadataStringValue + artifacts: + - uri: oci://registry.example.com/certified/production-llm-8b:validated-v1.0 + createTimeSinceEpoch: "1737331200000" + lastUpdateTimeSinceEpoch: "1737331200000" + customProperties: + validation_status: + string_value: "certified" + metadataType: MetadataStringValue + validation_date: + string_value: "2025-01-20" + metadataType: MetadataStringValue + - artifactType: metrics-artifact + metricsType: performance-metrics + createTimeSinceEpoch: "1736899200000" + lastUpdateTimeSinceEpoch: "1736899200000" + customProperties: + config_id: + string_value: "prod-8b-h100-2gpu-chatbot" + metadataType: MetadataStringValue + scenario_id: + string_value: "prod-llm-8b-perf-1" + metadataType: MetadataStringValue + use_case: + string_value: "chatbot" + metadataType: MetadataStringValue + hardware_type: + string_value: "H100" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + ttft_mean: + double_value: 85.2 + metadataType: MetadataDoubleValue + ttft_p90: + double_value: 102.5 + metadataType: MetadataDoubleValue + ttft_p95: + double_value: 108.3 + metadataType: MetadataDoubleValue + ttft_p99: + double_value: 125.7 + metadataType: MetadataDoubleValue + e2e_mean: + double_value: 6850.3 + metadataType: MetadataDoubleValue + e2e_p90: + double_value: 9100.2 + metadataType: MetadataDoubleValue + e2e_p95: + double_value: 9720.5 + metadataType: MetadataDoubleValue + e2e_p99: + double_value: 10850.8 + metadataType: MetadataDoubleValue + tps_mean: + double_value: 1105.4 + metadataType: MetadataDoubleValue + tps_p90: + double_value: 2088.6 + metadataType: MetadataDoubleValue + tps_p95: + double_value: 3125.2 + metadataType: MetadataDoubleValue + tps_p99: + double_value: 7452.3 + metadataType: MetadataDoubleValue + itl_mean: + double_value: 26.8 + metadataType: MetadataDoubleValue + itl_p90: + double_value: 27.4 + metadataType: MetadataDoubleValue + itl_p95: + double_value: 27.6 + metadataType: MetadataDoubleValue + itl_p99: + double_value: 28.1 + metadataType: MetadataDoubleValue + requests_per_second: + double_value: 4.0 + metadataType: MetadataDoubleValue + mean_input_tokens: + double_value: 512.3 + metadataType: MetadataDoubleValue + mean_output_tokens: + double_value: 256.1 + metadataType: MetadataDoubleValue + framework_type: + string_value: "vllm" + metadataType: MetadataStringValue + framework_version: + string_value: "v0.8.4" + metadataType: MetadataStringValue + deployment_type: + string_value: "production" + metadataType: MetadataStringValue + + - artifactType: metrics-artifact + metricsType: performance-metrics + createTimeSinceEpoch: "1736899200000" + lastUpdateTimeSinceEpoch: "1736899200000" + customProperties: + config_id: + string_value: "prod-8b-h100-2gpu-rag" + metadataType: MetadataStringValue + scenario_id: + string_value: "prod-llm-8b-perf-2" + metadataType: MetadataStringValue + use_case: + string_value: "rag" + metadataType: MetadataStringValue + hardware_type: + string_value: "H100" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + ttft_mean: + double_value: 245.6 + metadataType: MetadataDoubleValue + ttft_p90: + double_value: 285.3 + metadataType: MetadataDoubleValue + ttft_p95: + double_value: 302.1 + metadataType: MetadataDoubleValue + ttft_p99: + double_value: 325.4 + metadataType: MetadataDoubleValue + e2e_mean: + double_value: 10250.7 + metadataType: MetadataDoubleValue + e2e_p90: + double_value: 13680.4 + metadataType: MetadataDoubleValue + e2e_p95: + double_value: 14520.8 + metadataType: MetadataDoubleValue + e2e_p99: + double_value: 16100.2 + metadataType: MetadataDoubleValue + tps_mean: + double_value: 485.2 + metadataType: MetadataDoubleValue + tps_p90: + double_value: 902.5 + metadataType: MetadataDoubleValue + tps_p95: + double_value: 1342.8 + metadataType: MetadataDoubleValue + tps_p99: + double_value: 3185.6 + metadataType: MetadataDoubleValue + itl_mean: + double_value: 20.5 + metadataType: MetadataDoubleValue + itl_p90: + double_value: 21.2 + metadataType: MetadataDoubleValue + itl_p95: + double_value: 21.4 + metadataType: MetadataDoubleValue + itl_p99: + double_value: 21.9 + metadataType: MetadataDoubleValue + requests_per_second: + double_value: 1.0 + metadataType: MetadataDoubleValue + mean_input_tokens: + double_value: 4096.5 + metadataType: MetadataDoubleValue + mean_output_tokens: + double_value: 512.2 + metadataType: MetadataDoubleValue + framework_type: + string_value: "vllm" + metadataType: MetadataStringValue + framework_version: + string_value: "v0.8.4" + metadataType: MetadataStringValue + deployment_type: + string_value: "production" + metadataType: MetadataStringValue + + - artifactType: metrics-artifact + metricsType: performance-metrics + createTimeSinceEpoch: "1736899200000" + lastUpdateTimeSinceEpoch: "1736899200000" + customProperties: + config_id: + string_value: "prod-8b-h200-1gpu-chatbot" + metadataType: MetadataStringValue + scenario_id: + string_value: "prod-llm-8b-perf-3" + metadataType: MetadataStringValue + use_case: + string_value: "chatbot" + metadataType: MetadataStringValue + hardware_type: + string_value: "H200" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + ttft_mean: + double_value: 78.5 + metadataType: MetadataDoubleValue + ttft_p90: + double_value: 94.2 + metadataType: MetadataDoubleValue + ttft_p95: + double_value: 99.8 + metadataType: MetadataDoubleValue + ttft_p99: + double_value: 115.3 + metadataType: MetadataDoubleValue + e2e_mean: + double_value: 6240.5 + metadataType: MetadataDoubleValue + e2e_p90: + double_value: 8320.8 + metadataType: MetadataDoubleValue + e2e_p95: + double_value: 8890.2 + metadataType: MetadataDoubleValue + e2e_p99: + double_value: 9920.5 + metadataType: MetadataDoubleValue + tps_mean: + double_value: 1225.7 + metadataType: MetadataDoubleValue + tps_p90: + double_value: 2312.4 + metadataType: MetadataDoubleValue + tps_p95: + double_value: 3458.9 + metadataType: MetadataDoubleValue + tps_p99: + double_value: 8245.6 + metadataType: MetadataDoubleValue + itl_mean: + double_value: 24.3 + metadataType: MetadataDoubleValue + itl_p90: + double_value: 24.9 + metadataType: MetadataDoubleValue + itl_p95: + double_value: 25.1 + metadataType: MetadataDoubleValue + itl_p99: + double_value: 25.6 + metadataType: MetadataDoubleValue + requests_per_second: + double_value: 5.0 + metadataType: MetadataDoubleValue + mean_input_tokens: + double_value: 512.1 + metadataType: MetadataDoubleValue + mean_output_tokens: + double_value: 256.3 + metadataType: MetadataDoubleValue + framework_type: + string_value: "vllm" + metadataType: MetadataStringValue + framework_version: + string_value: "v0.8.4" + metadataType: MetadataStringValue + deployment_type: + string_value: "production" + metadataType: MetadataStringValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1736899200000" + lastUpdateTimeSinceEpoch: "1736899200000" + customProperties: + benchmark: + string_value: "mmlu" + metadataType: MetadataStringValue + score: + double_value: 75.8 + metadataType: MetadataDoubleValue + score_metric: + string_value: "accuracy_percent" + metadataType: MetadataStringValue + hardware_type: + string_value: "H100" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1736899200000" + lastUpdateTimeSinceEpoch: "1736899200000" + customProperties: + benchmark: + string_value: "hellaswag" + metadataType: MetadataStringValue + score: + double_value: 68.5 + metadataType: MetadataDoubleValue + score_metric: + string_value: "accuracy_percent" + metadataType: MetadataStringValue + hardware_type: + string_value: "H100" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1736899200000" + lastUpdateTimeSinceEpoch: "1736899200000" + customProperties: + benchmark: + string_value: "arc" + metadataType: MetadataStringValue + score: + double_value: 72.3 + metadataType: MetadataDoubleValue + score_metric: + string_value: "accuracy_percent" + metadataType: MetadataStringValue + hardware_type: + string_value: "H100" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1736899200000" + lastUpdateTimeSinceEpoch: "1736899200000" + customProperties: + benchmark: + string_value: "truthfulqa" + metadataType: MetadataStringValue + score: + double_value: 65.8 + metadataType: MetadataDoubleValue + score_metric: + string_value: "accuracy_percent" + metadataType: MetadataStringValue + hardware_type: + string_value: "H100" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1736899200000" + lastUpdateTimeSinceEpoch: "1736899200000" + customProperties: + benchmark: + string_value: "winogrande" + metadataType: MetadataStringValue + score: + double_value: 77.2 + metadataType: MetadataDoubleValue + score_metric: + string_value: "accuracy_percent" + metadataType: MetadataStringValue + hardware_type: + string_value: "H100" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1736899200000" + lastUpdateTimeSinceEpoch: "1736899200000" + customProperties: + benchmark: + string_value: "bbh" + metadataType: MetadataStringValue + score: + double_value: 70.4 + metadataType: MetadataDoubleValue + score_metric: + string_value: "accuracy_percent" + metadataType: MetadataStringValue + hardware_type: + string_value: "H100" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + + - name: certified/secure-embeddings-v2 + provider: Validation Authority + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9ImdyYWQyIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIxMDAlIj4KICAgICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6IzI3QUU2MDtzdG9wLW9wYWNpdHk6MSIgLz4KICAgICAgPHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjojMjI5OTU0O3N0b3Atb3BhY2l0eToxIiAvPgogICAgPC9saW5lYXJHcmFkaWVudD4KICA8L2RlZnM+CiAgPHBhdGggZD0iTSA1MCAxMCBMIDgwIDMwIEwgODAgNzAgTCA1MCA5MCBMIDIwIDcwIEwgMjAgMzAgWiIgZmlsbD0idXJsKCNncmFkMikiIHN0cm9rZT0iIzFFODQ0OSIgc3Ryb2tlLXdpZHRoPSIyIi8+CiAgPHBhdGggZD0iTSAzNSA1MCBMIDQ1IDYwIEwgNjUgMzUiIHN0cm9rZT0id2hpdGUiIHN0cm9rZS13aWR0aD0iNiIgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIiBzdHJva2UtbGluZWpvaW49InJvdW5kIiBmaWxsPSJub25lIi8+Cjwvc3ZnPgo= + description: |- + (DEMO) Secure-Embeddings-V2 is a validated embedding model optimized for enterprise search + and retrieval with enhanced security features. Nemo enim ipsam voluptatem quia + voluptas sit aspernatur aut odit aut fugit. + readme: |- + # Secure-Embeddings-V2 (Validated) + + **Model Summary:** + Sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. This embedding model has been validated for secure enterprise deployment with data privacy guarantees. + + - **Developers:** Validation Authority & Neural Dynamics + - **Validation Date**: December 5th, 2024 + - **License:** [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) + - **Validation Status:** ✓ Security Certified + + **Security Features:** + * Data encryption at rest and in transit + * No telemetry or external data transmission + * Differential privacy training + * Adversarial robustness testing + * GDPR and CCPA compliant + + **Specifications:** + Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet: + * Embedding dimension: 768 + * Max sequence length: 512 tokens + * Normalized embeddings + * Language support: 95+ languages + + **Enterprise Deployment:** + + ```python + from sentence_transformers import SentenceTransformer + + # Load model with security configurations + model = SentenceTransformer('certified/secure-embeddings-v2') + + # Configure for air-gapped deployment + model.eval() + model.requires_grad = False + + documents = [ + "Confidential business document 1", + "Sensitive customer data record" + ] + + # Generate embeddings locally without external calls + embeddings = model.encode( + documents, + show_progress_bar=False, + normalize_embeddings=True + ) + ``` + + **Validation Testing:** + Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae: + * Security audit passed + * Privacy compliance verified + * Performance benchmarking complete + * Adversarial attack resistance tested + + **Use Cases:** + * Enterprise document search + * Secure RAG implementations + * Confidential data retrieval + * Regulatory compliant AI systems + + **Deployment Options:** + Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat. + language: ["multilingual"] + license: apache-2.0 + licenseLink: https://www.apache.org/licenses/LICENSE-2.0.txt + maturity: Production + libraryName: sentence-transformers + tasks: + - feature-extraction + - sentence-similarity + createTimeSinceEpoch: "1733356800000" + lastUpdateTimeSinceEpoch: "1733356800000" + customProperties: + validated: + string_value: "" + metadataType: MetadataStringValue + artifacts: + - uri: oci://registry.example.com/certified/secure-embeddings-v2:validated + createTimeSinceEpoch: "1733356800000" + lastUpdateTimeSinceEpoch: "1733356800000" + customProperties: + security_certification: + string_value: "passed" + metadataType: MetadataStringValue + compliance: + string_value: "GDPR,CCPA,SOC2" + metadataType: MetadataStringValue + - artifactType: metrics-artifact + metricsType: performance-metrics + createTimeSinceEpoch: "1726790400000" + lastUpdateTimeSinceEpoch: "1726790400000" + customProperties: + config_id: + string_value: "sec-emb-h200-1gpu-batch32" + metadataType: MetadataStringValue + scenario_id: + string_value: "sec-emb-v2-perf-1" + metadataType: MetadataStringValue + use_case: + string_value: "embedding" + metadataType: MetadataStringValue + hardware_type: + string_value: "H200" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + ttft_mean: + double_value: 12.5 + metadataType: MetadataDoubleValue + ttft_p90: + double_value: 15.2 + metadataType: MetadataDoubleValue + ttft_p95: + double_value: 16.8 + metadataType: MetadataDoubleValue + ttft_p99: + double_value: 19.4 + metadataType: MetadataDoubleValue + e2e_mean: + double_value: 45.3 + metadataType: MetadataDoubleValue + e2e_p90: + double_value: 52.8 + metadataType: MetadataDoubleValue + e2e_p95: + double_value: 56.2 + metadataType: MetadataDoubleValue + e2e_p99: + double_value: 62.5 + metadataType: MetadataDoubleValue + tps_mean: + double_value: 8500.2 + metadataType: MetadataDoubleValue + tps_p90: + double_value: 10200.5 + metadataType: MetadataDoubleValue + tps_p95: + double_value: 11500.8 + metadataType: MetadataDoubleValue + tps_p99: + double_value: 13200.4 + metadataType: MetadataDoubleValue + itl_mean: + double_value: 1.4 + metadataType: MetadataDoubleValue + itl_p90: + double_value: 1.6 + metadataType: MetadataDoubleValue + itl_p95: + double_value: 1.7 + metadataType: MetadataDoubleValue + itl_p99: + double_value: 1.9 + metadataType: MetadataDoubleValue + requests_per_second: + double_value: 32.0 + metadataType: MetadataDoubleValue + mean_input_tokens: + double_value: 128.0 + metadataType: MetadataDoubleValue + mean_output_tokens: + double_value: 1024.0 + metadataType: MetadataDoubleValue + framework_type: + string_value: "sentence-transformers" + metadataType: MetadataStringValue + framework_version: + string_value: "v2.3.1" + metadataType: MetadataStringValue + deployment_type: + string_value: "production" + metadataType: MetadataStringValue + + - artifactType: metrics-artifact + metricsType: performance-metrics + createTimeSinceEpoch: "1726790400000" + lastUpdateTimeSinceEpoch: "1726790400000" + customProperties: + config_id: + string_value: "sec-emb-a100-1gpu-batch64" + metadataType: MetadataStringValue + scenario_id: + string_value: "sec-emb-v2-perf-2" + metadataType: MetadataStringValue + use_case: + string_value: "embedding" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-80" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + ttft_mean: + double_value: 18.7 + metadataType: MetadataDoubleValue + ttft_p90: + double_value: 22.4 + metadataType: MetadataDoubleValue + ttft_p95: + double_value: 24.6 + metadataType: MetadataDoubleValue + ttft_p99: + double_value: 28.2 + metadataType: MetadataDoubleValue + e2e_mean: + double_value: 72.8 + metadataType: MetadataDoubleValue + e2e_p90: + double_value: 84.5 + metadataType: MetadataDoubleValue + e2e_p95: + double_value: 89.6 + metadataType: MetadataDoubleValue + e2e_p99: + double_value: 98.3 + metadataType: MetadataDoubleValue + tps_mean: + double_value: 7200.5 + metadataType: MetadataDoubleValue + tps_p90: + double_value: 8650.2 + metadataType: MetadataDoubleValue + tps_p95: + double_value: 9750.8 + metadataType: MetadataDoubleValue + tps_p99: + double_value: 11200.3 + metadataType: MetadataDoubleValue + itl_mean: + double_value: 1.1 + metadataType: MetadataDoubleValue + itl_p90: + double_value: 1.3 + metadataType: MetadataDoubleValue + itl_p95: + double_value: 1.4 + metadataType: MetadataDoubleValue + itl_p99: + double_value: 1.6 + metadataType: MetadataDoubleValue + requests_per_second: + double_value: 64.0 + metadataType: MetadataDoubleValue + mean_input_tokens: + double_value: 128.0 + metadataType: MetadataDoubleValue + mean_output_tokens: + double_value: 1024.0 + metadataType: MetadataDoubleValue + framework_type: + string_value: "sentence-transformers" + metadataType: MetadataStringValue + framework_version: + string_value: "v2.3.1" + metadataType: MetadataStringValue + deployment_type: + string_value: "production" + metadataType: MetadataStringValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1726790400000" + lastUpdateTimeSinceEpoch: "1726790400000" + customProperties: + benchmark: + string_value: "mteb_retrieval" + metadataType: MetadataStringValue + score: + double_value: 88.4 + metadataType: MetadataDoubleValue + score_metric: + string_value: "ndcg@10" + metadataType: MetadataStringValue + hardware_type: + string_value: "H200" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1726790400000" + lastUpdateTimeSinceEpoch: "1726790400000" + customProperties: + benchmark: + string_value: "mteb_classification" + metadataType: MetadataStringValue + score: + double_value: 85.2 + metadataType: MetadataDoubleValue + score_metric: + string_value: "accuracy_percent" + metadataType: MetadataStringValue + hardware_type: + string_value: "H200" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1726790400000" + lastUpdateTimeSinceEpoch: "1726790400000" + customProperties: + benchmark: + string_value: "mteb_clustering" + metadataType: MetadataStringValue + score: + double_value: 82.6 + metadataType: MetadataDoubleValue + score_metric: + string_value: "v_measure" + metadataType: MetadataStringValue + hardware_type: + string_value: "H200" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1726790400000" + lastUpdateTimeSinceEpoch: "1726790400000" + customProperties: + benchmark: + string_value: "mteb_sts" + metadataType: MetadataStringValue + score: + double_value: 90.1 + metadataType: MetadataDoubleValue + score_metric: + string_value: "spearman_correlation" + metadataType: MetadataStringValue + hardware_type: + string_value: "H200" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1726790400000" + lastUpdateTimeSinceEpoch: "1726790400000" + customProperties: + benchmark: + string_value: "beir_avg" + metadataType: MetadataStringValue + score: + double_value: 87.3 + metadataType: MetadataDoubleValue + score_metric: + string_value: "ndcg@10" + metadataType: MetadataStringValue + hardware_type: + string_value: "H200" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + + - name: certified/analytics-forecaster-5b + provider: Validation Authority + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9ImdyYWQyIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIxMDAlIj4KICAgICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6IzI3QUU2MDtzdG9wLW9wYWNpdHk6MSIgLz4KICAgICAgPHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjojMjI5OTU0O3N0b3Atb3BhY2l0eToxIiAvPgogICAgPC9saW5lYXJHcmFkaWVudD4KICA8L2RlZnM+CiAgPHBhdGggZD0iTSA1MCAxMCBMIDgwIDMwIEwgODAgNzAgTCA1MCA5MCBMIDIwIDcwIEwgMjAgMzAgWiIgZmlsbD0idXJsKCNncmFkMikiIHN0cm9rZT0iIzFFODQ0OSIgc3Ryb2tlLXdpZHRoPSIyIi8+CiAgPHBhdGggZD0iTSAzNSA1MCBMIDQ1IDYwIEwgNjUgMzUiIHN0cm9rZT0id2hpdGUiIHN0cm9rZS13aWR0aD0iNiIgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIiBzdHJva2UtbGluZWpvaW49InJvdW5kIiBmaWxsPSJub25lIi8+Cjwvc3ZnPgo= + description: |- + (DEMO) Analytics-Forecaster-5B is a specialized 5 billion parameter model validated for + business analytics, time-series forecasting, and data insights generation. Quis autem + vel eum iure reprehenderit qui in ea voluptate velit. + readme: |- + # Analytics-Forecaster-5B (Validated) + + **Model Summary:** + Esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur. This model specializes in analytical tasks and has been validated for business intelligence applications. + + - **Developers:** Validation Authority & Stellar Labs + - **Validation Date**: November 18th, 2024 + - **License:** [MIT License](https://opensource.org/licenses/MIT) + - **Validation Status:** ✓ Analytics Certified + + **Capabilities:** + * Time-series forecasting + * Trend analysis and prediction + * Anomaly detection + * Business metric interpretation + * Natural language to SQL conversion + * Data visualization recommendations + + **Validation Criteria:** + At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti: + * Forecasting accuracy: 92% on benchmark datasets + * SQL generation correctness: 96% + * Anomaly detection F1-score: 0.89 + * Response time: < 200ms for standard queries + + **Usage Example:** + + ```python + from transformers import AutoModelForCausalLM, AutoTokenizer + + model = AutoModelForCausalLM.from_pretrained("certified/analytics-forecaster-5b") + tokenizer = AutoTokenizer.from_pretrained("certified/analytics-forecaster-5b") + + # Forecasting task + prompt = \"\"\"Given the following sales data: + Q1 2024: $1.2M + Q2 2024: $1.5M + Q3 2024: $1.8M + + Forecast Q4 2024 sales and explain the trend:\"\"\" + + inputs = tokenizer(prompt, return_tensors="pt") + outputs = model.generate(**inputs, max_new_tokens=300) + print(tokenizer.decode(outputs[0])) + ``` + + **Integration:** + Similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga: + * REST API endpoints available + * Jupyter notebook integration + * Power BI and Tableau connectors + * Custom dashboard integrations + + **Supported Data Formats:** + * CSV, JSON, Parquet + * SQL databases (PostgreSQL, MySQL, etc.) + * Time-series databases (InfluxDB, TimescaleDB) + * Cloud data warehouses (BigQuery, Snowflake compatible) + + **Performance:** + Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus. + language: ["en"] + license: mit + licenseLink: https://opensource.org/licenses/MIT + maturity: Production + libraryName: transformers + tasks: + - text-generation + - table-question-answering + createTimeSinceEpoch: "1731888000000" + lastUpdateTimeSinceEpoch: "1731888000000" + customProperties: + validated: + string_value: "" + metadataType: MetadataStringValue + artifacts: + - uri: oci://registry.example.com/certified/analytics-forecaster-5b:v2.0-validated + createTimeSinceEpoch: "1731888000000" + lastUpdateTimeSinceEpoch: "1731888000000" + customProperties: + validation_type: + string_value: "analytics" + metadataType: MetadataStringValue + accuracy_score: + string_value: "92.4" + metadataType: MetadataStringValue + - artifactType: metrics-artifact + metricsType: performance-metrics + createTimeSinceEpoch: "1720656000000" + lastUpdateTimeSinceEpoch: "1720656000000" + customProperties: + config_id: + string_value: "analytics-5b-a100-2gpu-forecasting" + metadataType: MetadataStringValue + scenario_id: + string_value: "analytics-5b-perf-1" + metadataType: MetadataStringValue + use_case: + string_value: "time_series_forecasting" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-80" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + ttft_mean: + double_value: 95.3 + metadataType: MetadataDoubleValue + ttft_p90: + double_value: 115.8 + metadataType: MetadataDoubleValue + ttft_p95: + double_value: 122.4 + metadataType: MetadataDoubleValue + ttft_p99: + double_value: 138.6 + metadataType: MetadataDoubleValue + e2e_mean: + double_value: 7420.5 + metadataType: MetadataDoubleValue + e2e_p90: + double_value: 9850.2 + metadataType: MetadataDoubleValue + e2e_p95: + double_value: 10520.8 + metadataType: MetadataDoubleValue + e2e_p99: + double_value: 11750.4 + metadataType: MetadataDoubleValue + tps_mean: + double_value: 945.3 + metadataType: MetadataDoubleValue + tps_p90: + double_value: 1785.6 + metadataType: MetadataDoubleValue + tps_p95: + double_value: 2668.2 + metadataType: MetadataDoubleValue + tps_p99: + double_value: 6358.5 + metadataType: MetadataDoubleValue + itl_mean: + double_value: 28.9 + metadataType: MetadataDoubleValue + itl_p90: + double_value: 29.6 + metadataType: MetadataDoubleValue + itl_p95: + double_value: 29.8 + metadataType: MetadataDoubleValue + itl_p99: + double_value: 30.4 + metadataType: MetadataDoubleValue + requests_per_second: + double_value: 3.0 + metadataType: MetadataDoubleValue + mean_input_tokens: + double_value: 1024.0 + metadataType: MetadataDoubleValue + mean_output_tokens: + double_value: 512.0 + metadataType: MetadataDoubleValue + framework_type: + string_value: "pytorch" + metadataType: MetadataStringValue + framework_version: + string_value: "v2.1.0" + metadataType: MetadataStringValue + deployment_type: + string_value: "production" + metadataType: MetadataStringValue + + - artifactType: metrics-artifact + metricsType: performance-metrics + createTimeSinceEpoch: "1720656000000" + lastUpdateTimeSinceEpoch: "1720656000000" + customProperties: + config_id: + string_value: "analytics-5b-h100-2gpu-analytics" + metadataType: MetadataStringValue + scenario_id: + string_value: "analytics-5b-perf-2" + metadataType: MetadataStringValue + use_case: + string_value: "business_analytics" + metadataType: MetadataStringValue + hardware_type: + string_value: "H100" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + ttft_mean: + double_value: 82.4 + metadataType: MetadataDoubleValue + ttft_p90: + double_value: 100.2 + metadataType: MetadataDoubleValue + ttft_p95: + double_value: 106.8 + metadataType: MetadataDoubleValue + ttft_p99: + double_value: 121.5 + metadataType: MetadataDoubleValue + e2e_mean: + double_value: 6850.3 + metadataType: MetadataDoubleValue + e2e_p90: + double_value: 9100.5 + metadataType: MetadataDoubleValue + e2e_p95: + double_value: 9720.2 + metadataType: MetadataDoubleValue + e2e_p99: + double_value: 10850.8 + metadataType: MetadataDoubleValue + tps_mean: + double_value: 1085.6 + metadataType: MetadataDoubleValue + tps_p90: + double_value: 2048.3 + metadataType: MetadataDoubleValue + tps_p95: + double_value: 3062.5 + metadataType: MetadataDoubleValue + tps_p99: + double_value: 7305.2 + metadataType: MetadataDoubleValue + itl_mean: + double_value: 26.6 + metadataType: MetadataDoubleValue + itl_p90: + double_value: 27.2 + metadataType: MetadataDoubleValue + itl_p95: + double_value: 27.5 + metadataType: MetadataDoubleValue + itl_p99: + double_value: 28.0 + metadataType: MetadataDoubleValue + requests_per_second: + double_value: 4.0 + metadataType: MetadataDoubleValue + mean_input_tokens: + double_value: 2048.0 + metadataType: MetadataDoubleValue + mean_output_tokens: + double_value: 1024.0 + metadataType: MetadataDoubleValue + framework_type: + string_value: "pytorch" + metadataType: MetadataStringValue + framework_version: + string_value: "v2.1.0" + metadataType: MetadataStringValue + deployment_type: + string_value: "production" + metadataType: MetadataStringValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1720656000000" + lastUpdateTimeSinceEpoch: "1720656000000" + customProperties: + benchmark: + string_value: "smape" + metadataType: MetadataStringValue + score: + double_value: 92.4 + metadataType: MetadataDoubleValue + score_metric: + string_value: "accuracy_percent" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-80" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1720656000000" + lastUpdateTimeSinceEpoch: "1720656000000" + customProperties: + benchmark: + string_value: "mase" + metadataType: MetadataStringValue + score: + double_value: 0.82 + metadataType: MetadataDoubleValue + score_metric: + string_value: "error_ratio" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-80" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1720656000000" + lastUpdateTimeSinceEpoch: "1720656000000" + customProperties: + benchmark: + string_value: "rmse" + metadataType: MetadataStringValue + score: + double_value: 15.3 + metadataType: MetadataDoubleValue + score_metric: + string_value: "root_mean_squared_error" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-80" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1720656000000" + lastUpdateTimeSinceEpoch: "1720656000000" + customProperties: + benchmark: + string_value: "mae" + metadataType: MetadataStringValue + score: + double_value: 11.8 + metadataType: MetadataDoubleValue + score_metric: + string_value: "mean_absolute_error" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-80" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1720656000000" + lastUpdateTimeSinceEpoch: "1720656000000" + customProperties: + benchmark: + string_value: "mape" + metadataType: MetadataStringValue + score: + double_value: 8.5 + metadataType: MetadataDoubleValue + score_metric: + string_value: "mean_absolute_percentage_error" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-80" + metadataType: MetadataStringValue + hardware_count: + int_value: "2" + metadataType: MetadataIntValue + + - name: certified/compliance-assistant-3b + provider: Validation Authority + logo: data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIiB3aWR0aD0iMTAwIiBoZWlnaHQ9IjEwMCI+CiAgPGRlZnM+CiAgICA8bGluZWFyR3JhZGllbnQgaWQ9ImdyYWQyIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIxMDAlIj4KICAgICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6IzI3QUU2MDtzdG9wLW9wYWNpdHk6MSIgLz4KICAgICAgPHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjojMjI5OTU0O3N0b3Atb3BhY2l0eToxIiAvPgogICAgPC9saW5lYXJHcmFkaWVudD4KICA8L2RlZnM+CiAgPHBhdGggZD0iTSA1MCAxMCBMIDgwIDMwIEwgODAgNzAgTCA1MCA5MCBMIDIwIDcwIEwgMjAgMzAgWiIgZmlsbD0idXJsKCNncmFkMikiIHN0cm9rZT0iIzFFODQ0OSIgc3Ryb2tlLXdpZHRoPSIyIi8+CiAgPHBhdGggZD0iTSAzNSA1MCBMIDQ1IDYwIEwgNjUgMzUiIHN0cm9rZT0id2hpdGUiIHN0cm9rZS13aWR0aD0iNiIgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIiBzdHJva2UtbGluZWpvaW49InJvdW5kIiBmaWxsPSJub25lIi8+Cjwvc3ZnPgo= + description: |- + (DEMO) Compliance-Assistant-3B is a lightweight 3 billion parameter model validated for + regulatory compliance checking, policy interpretation, and audit assistance. Omnis + voluptas assumenda est, omnis dolor repellendus. + readme: |- + # Compliance-Assistant-3B (Validated) + + **Model Summary:** + Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint. This model has been specifically validated for compliance and regulatory use cases. + + - **Developers:** Validation Authority + - **Validation Date**: January 28th, 2025 + - **License:** [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) + - **Validation Status:** ✓ Compliance Certified + + **Compliance Domains:** + * Financial regulations (SOX, Basel III, GDPR) + * Healthcare compliance (HIPAA, HITECH) + * Data privacy (CCPA, GDPR, PIPEDA) + * Industry standards (ISO, SOC 2) + * Employment law and HR policies + + **Validated Capabilities:** + Itaque earum rerum hic tenetur a sapiente delectus: + * Policy document interpretation + * Compliance gap analysis + * Regulatory change impact assessment + * Audit trail generation + * Risk scoring and classification + + **Example Usage:** + + ```python + from transformers import pipeline + + compliance_assistant = pipeline( + "text-generation", + model="certified/compliance-assistant-3b", + device_map="auto" + ) + + query = \"\"\"Review the following data handling practice for GDPR compliance: + User data is stored for 5 years after account deletion and shared with + third-party analytics providers.\"\"\" + + result = compliance_assistant( + query, + max_new_tokens=400, + temperature=0.3 # Lower temperature for factual responses + ) + + print(result[0]['generated_text']) + ``` + + **Validation Testing:** + Ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat: + * Legal expert review completed + * Accuracy on compliance questions: 94.8% + * False positive rate: < 2% + * Regulatory knowledge currency: Updated monthly + + **Safety Features:** + * Output validation against regulatory databases + * Confidence scoring for recommendations + * Citation and source tracking + * Explainable AI outputs for audit trails + + **Important Notice:** + Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur. This model provides assistance and should not replace qualified legal counsel. + language: ["en"] + license: apache-2.0 + licenseLink: https://www.apache.org/licenses/LICENSE-2.0.txt + maturity: Production + libraryName: transformers + tasks: + - text-generation + - question-answering + - text-classification + createTimeSinceEpoch: "1738022400000" + lastUpdateTimeSinceEpoch: "1738022400000" + customProperties: + validated: + string_value: "" + metadataType: MetadataStringValue + artifacts: + - uri: oci://registry.example.com/certified/compliance-assistant-3b:validated-v1.1 + createTimeSinceEpoch: "1738022400000" + lastUpdateTimeSinceEpoch: "1738022400000" + customProperties: + compliance_domains: + string_value: "GDPR,HIPAA,SOX,CCPA" + metadataType: MetadataStringValue + last_regulatory_update: + string_value: "2025-01-28" + metadataType: MetadataStringValue + - artifactType: metrics-artifact + metricsType: performance-metrics + createTimeSinceEpoch: "1724889600000" + lastUpdateTimeSinceEpoch: "1724889600000" + customProperties: + config_id: + string_value: "compliance-3b-a100-1gpu-chatbot" + metadataType: MetadataStringValue + scenario_id: + string_value: "compliance-3b-perf-1" + metadataType: MetadataStringValue + use_case: + string_value: "compliance_checking" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-40" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + ttft_mean: + double_value: 62.5 + metadataType: MetadataDoubleValue + ttft_p90: + double_value: 75.8 + metadataType: MetadataDoubleValue + ttft_p95: + double_value: 80.2 + metadataType: MetadataDoubleValue + ttft_p99: + double_value: 92.4 + metadataType: MetadataDoubleValue + e2e_mean: + double_value: 4850.3 + metadataType: MetadataDoubleValue + e2e_p90: + double_value: 6450.8 + metadataType: MetadataDoubleValue + e2e_p95: + double_value: 6890.2 + metadataType: MetadataDoubleValue + e2e_p99: + double_value: 7680.5 + metadataType: MetadataDoubleValue + tps_mean: + double_value: 1520.8 + metadataType: MetadataDoubleValue + tps_p90: + double_value: 2870.4 + metadataType: MetadataDoubleValue + tps_p95: + double_value: 4292.6 + metadataType: MetadataDoubleValue + tps_p99: + double_value: 10254.3 + metadataType: MetadataDoubleValue + itl_mean: + double_value: 18.9 + metadataType: MetadataDoubleValue + itl_p90: + double_value: 19.4 + metadataType: MetadataDoubleValue + itl_p95: + double_value: 19.6 + metadataType: MetadataDoubleValue + itl_p99: + double_value: 20.1 + metadataType: MetadataDoubleValue + requests_per_second: + double_value: 6.0 + metadataType: MetadataDoubleValue + mean_input_tokens: + double_value: 512.0 + metadataType: MetadataDoubleValue + mean_output_tokens: + double_value: 256.0 + metadataType: MetadataDoubleValue + framework_type: + string_value: "vllm" + metadataType: MetadataStringValue + framework_version: + string_value: "v0.8.4" + metadataType: MetadataStringValue + deployment_type: + string_value: "production" + metadataType: MetadataStringValue + + - artifactType: metrics-artifact + metricsType: performance-metrics + createTimeSinceEpoch: "1724889600000" + lastUpdateTimeSinceEpoch: "1724889600000" + customProperties: + config_id: + string_value: "compliance-3b-h200-1gpu-policy" + metadataType: MetadataStringValue + scenario_id: + string_value: "compliance-3b-perf-2" + metadataType: MetadataStringValue + use_case: + string_value: "policy_interpretation" + metadataType: MetadataStringValue + hardware_type: + string_value: "H200" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + ttft_mean: + double_value: 55.3 + metadataType: MetadataDoubleValue + ttft_p90: + double_value: 67.2 + metadataType: MetadataDoubleValue + ttft_p95: + double_value: 71.5 + metadataType: MetadataDoubleValue + ttft_p99: + double_value: 82.8 + metadataType: MetadataDoubleValue + e2e_mean: + double_value: 4320.5 + metadataType: MetadataDoubleValue + e2e_p90: + double_value: 5750.2 + metadataType: MetadataDoubleValue + e2e_p95: + double_value: 6145.8 + metadataType: MetadataDoubleValue + e2e_p99: + double_value: 6850.4 + metadataType: MetadataDoubleValue + tps_mean: + double_value: 1685.4 + metadataType: MetadataDoubleValue + tps_p90: + double_value: 3182.5 + metadataType: MetadataDoubleValue + tps_p95: + double_value: 4758.2 + metadataType: MetadataDoubleValue + tps_p99: + double_value: 11358.6 + metadataType: MetadataDoubleValue + itl_mean: + double_value: 16.8 + metadataType: MetadataDoubleValue + itl_p90: + double_value: 17.2 + metadataType: MetadataDoubleValue + itl_p95: + double_value: 17.4 + metadataType: MetadataDoubleValue + itl_p99: + double_value: 17.9 + metadataType: MetadataDoubleValue + requests_per_second: + double_value: 8.0 + metadataType: MetadataDoubleValue + mean_input_tokens: + double_value: 1024.0 + metadataType: MetadataDoubleValue + mean_output_tokens: + double_value: 512.0 + metadataType: MetadataDoubleValue + framework_type: + string_value: "vllm" + metadataType: MetadataStringValue + framework_version: + string_value: "v0.8.4" + metadataType: MetadataStringValue + deployment_type: + string_value: "production" + metadataType: MetadataStringValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1724889600000" + lastUpdateTimeSinceEpoch: "1724889600000" + customProperties: + benchmark: + string_value: "legal_qa" + metadataType: MetadataStringValue + score: + double_value: 82.5 + metadataType: MetadataDoubleValue + score_metric: + string_value: "accuracy_percent" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-40" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1724889600000" + lastUpdateTimeSinceEpoch: "1724889600000" + customProperties: + benchmark: + string_value: "policy_classification" + metadataType: MetadataStringValue + score: + double_value: 88.3 + metadataType: MetadataDoubleValue + score_metric: + string_value: "f1_score" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-40" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1724889600000" + lastUpdateTimeSinceEpoch: "1724889600000" + customProperties: + benchmark: + string_value: "regulatory_extraction" + metadataType: MetadataStringValue + score: + double_value: 85.7 + metadataType: MetadataDoubleValue + score_metric: + string_value: "precision" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-40" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1724889600000" + lastUpdateTimeSinceEpoch: "1724889600000" + customProperties: + benchmark: + string_value: "compliance_detection" + metadataType: MetadataStringValue + score: + double_value: 91.2 + metadataType: MetadataDoubleValue + score_metric: + string_value: "recall" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-40" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1724889600000" + lastUpdateTimeSinceEpoch: "1724889600000" + customProperties: + benchmark: + string_value: "gdpr_adherence" + metadataType: MetadataStringValue + score: + double_value: 94.8 + metadataType: MetadataDoubleValue + score_metric: + string_value: "compliance_score" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-40" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + + - artifactType: metrics-artifact + metricsType: accuracy-metrics + createTimeSinceEpoch: "1724889600000" + lastUpdateTimeSinceEpoch: "1724889600000" + customProperties: + benchmark: + string_value: "sox_compliance" + metadataType: MetadataStringValue + score: + double_value: 92.4 + metadataType: MetadataDoubleValue + score_metric: + string_value: "compliance_score" + metadataType: MetadataStringValue + hardware_type: + string_value: "A100-40" + metadataType: MetadataStringValue + hardware_count: + int_value: "1" + metadataType: MetadataIntValue + + + + diff --git a/catalog/internal/catalog/testdata/test-yaml-catalog.yaml b/catalog/internal/catalog/testdata/test-yaml-catalog.yaml index 624112f23e..f0685bbb7a 100644 --- a/catalog/internal/catalog/testdata/test-yaml-catalog.yaml +++ b/catalog/internal/catalog/testdata/test-yaml-catalog.yaml @@ -325,6 +325,14 @@ models: lastUpdateTimeSinceEpoch: "1734637721000" artifacts: - uri: oci://registry.redhat.io/rhelai1/granite-8b-code-base:1.3-1732870892 + - artifactType: metrics-artifact + createTimeSinceEpoch: "1733514949000" + lastUpdateTimeSinceEpoch: "1734637721000" + customProperties: + x: + int_value: 1 + y: + double_value: 2.1 - name: rhelai1/granite-8b-code-instruct provider: IBM description: |- @@ -673,8 +681,10 @@ models: customProperties: foo: string_value: bar + metadataType: MetadataStringValue baz: string_value: qux + metadataType: MetadataStringValue - name: model-with-no-artifacts provider: Test description: A model used for testing the GetArtifacts method when no artifacts are present. diff --git a/catalog/internal/catalog/testdata/testdb.cnf b/catalog/internal/catalog/testdata/testdb.cnf new file mode 100644 index 0000000000..873958cd70 --- /dev/null +++ b/catalog/internal/catalog/testdata/testdb.cnf @@ -0,0 +1,5 @@ +[mysqld] +character-set-server = utf8mb4 +collation-server = utf8mb4_general_ci + +!includedir /etc/mysql/conf.d/ diff --git a/catalog/internal/catalog/yaml_catalog.go b/catalog/internal/catalog/yaml_catalog.go index 1c15f326ca..cf1da5223b 100644 --- a/catalog/internal/catalog/yaml_catalog.go +++ b/catalog/internal/catalog/yaml_catalog.go @@ -2,253 +2,416 @@ package catalog import ( "context" + "encoding/json" "fmt" - "math" "os" "path/filepath" - "sort" "strconv" - "strings" - "sync" "k8s.io/apimachinery/pkg/util/yaml" "github.com/golang/glog" - model "github.com/kubeflow/model-registry/catalog/pkg/openapi" + dbmodels "github.com/kubeflow/model-registry/catalog/internal/db/models" + apimodels "github.com/kubeflow/model-registry/catalog/pkg/openapi" + "github.com/kubeflow/model-registry/internal/db/models" ) -type yamlModel struct { - model.CatalogModel `yaml:",inline"` - Artifacts []*model.CatalogModelArtifact `yaml:"artifacts"` -} +const ( + yamlCatalogPathKey = "yamlCatalogPath" + excludedModelsKey = "excludedModels" +) -type yamlCatalog struct { - Source string `yaml:"source"` - Models []yamlModel `yaml:"models"` +// convertMetadataValueToProperty converts a MetadataValue to a Properties object +// This helper eliminates code duplication when converting custom properties +func convertMetadataValueToProperty(key string, value apimodels.MetadataValue) models.Properties { + // Handle different MetadataValue types + if value.MetadataStringValue != nil { + return models.NewStringProperty(key, value.MetadataStringValue.StringValue, true) + } else if value.MetadataIntValue != nil { + // MetadataIntValue.IntValue is a string, need to convert to int32 + if intVal, err := strconv.ParseInt(value.MetadataIntValue.IntValue, 10, 32); err == nil { + return models.NewIntProperty(key, int32(intVal), true) + } else { + // If parsing fails, store as string + return models.NewStringProperty(key, value.MetadataIntValue.IntValue, true) + } + } else if value.MetadataDoubleValue != nil { + return models.NewDoubleProperty(key, value.MetadataDoubleValue.DoubleValue, true) + } else if value.MetadataBoolValue != nil { + return models.NewBoolProperty(key, value.MetadataBoolValue.BoolValue, true) + } else { + // For complex types, serialize to JSON + if jsonBytes, err := json.Marshal(value); err == nil { + return models.NewStringProperty(key, string(jsonBytes), true) + } + // Fallback to empty string if JSON marshaling fails + return models.NewStringProperty(key, "", true) + } } -type yamlCatalogImpl struct { - modelsLock sync.RWMutex - models map[string]*yamlModel +// convertCustomProperties converts a map of custom properties to a slice of Properties +func convertCustomProperties(customProps *map[string]apimodels.MetadataValue) []models.Properties { + if customProps == nil { + return nil + } + + var properties []models.Properties + for key, value := range *customProps { + properties = append(properties, convertMetadataValueToProperty(key, value)) + } + return properties } -var _ CatalogSourceProvider = &yamlCatalogImpl{} +func init() { + if err := RegisterModelProvider("yaml", newYamlModelProvider); err != nil { + panic(err) + } +} -func (y *yamlCatalogImpl) GetModel(ctx context.Context, name string) (*model.CatalogModel, error) { - y.modelsLock.RLock() - defer y.modelsLock.RUnlock() +type yamlModel struct { + apimodels.CatalogModel `yaml:",inline"` + Artifacts []*yamlArtifact `yaml:"artifacts"` +} - ym := y.models[name] - if ym == nil { - return nil, nil - } - cp := ym.CatalogModel - return &cp, nil +type yamlArtifact struct { + apimodels.CatalogArtifact } -func (y *yamlCatalogImpl) ListModels(ctx context.Context, params ListModelsParams) (model.CatalogModelList, error) { - y.modelsLock.RLock() - defer y.modelsLock.RUnlock() +// convertModelAttributes converts basic model attributes and timestamps +func (ym *yamlModel) convertModelAttributes() *dbmodels.CatalogModelAttributes { + attrs := &dbmodels.CatalogModelAttributes{ + Name: &ym.Name, + } - var filteredModels []*model.CatalogModel - for _, ym := range y.models { - cm := ym.CatalogModel - if params.Query != "" { - query := strings.ToLower(params.Query) - // Check if query matches name, description, tasks, provider, or libraryName - if !strings.Contains(strings.ToLower(cm.Name), query) && - !strings.Contains(strings.ToLower(cm.GetDescription()), query) && - !strings.Contains(strings.ToLower(cm.GetProvider()), query) && - !strings.Contains(strings.ToLower(cm.GetLibraryName()), query) { + // Convert timestamps + if ym.CreateTimeSinceEpoch != nil { + if createTime, err := strconv.ParseInt(*ym.CreateTimeSinceEpoch, 10, 64); err == nil { + attrs.CreateTimeSinceEpoch = &createTime + } + } - // Check tasks - foundInTasks := false - for _, task := range cm.GetTasks() { // Use GetTasks() for nil safety - if strings.Contains(strings.ToLower(task), query) { - foundInTasks = true - break - } - } - if !foundInTasks { - continue // Skip if no match in any searchable field - } - } + if ym.LastUpdateTimeSinceEpoch != nil { + if updateTime, err := strconv.ParseInt(*ym.LastUpdateTimeSinceEpoch, 10, 64); err == nil { + attrs.LastUpdateTimeSinceEpoch = &updateTime } - filteredModels = append(filteredModels, &cm) } - // Sort the filtered models - sort.Slice(filteredModels, func(i, j int) bool { - a := filteredModels[i] - b := filteredModels[j] + return attrs +} - var less bool - switch params.OrderBy { - case model.ORDERBYFIELD_CREATE_TIME: - // Convert CreateTimeSinceEpoch (string) to int64 for comparison - // Handle potential nil or conversion errors by treating as 0 - aTime, _ := strconv.ParseInt(a.GetCreateTimeSinceEpoch(), 10, 64) - bTime, _ := strconv.ParseInt(b.GetCreateTimeSinceEpoch(), 10, 64) - less = aTime < bTime - case model.ORDERBYFIELD_LAST_UPDATE_TIME: - // Convert LastUpdateTimeSinceEpoch (string) to int64 for comparison - // Handle potential nil or conversion errors by treating as 0 - aTime, _ := strconv.ParseInt(a.GetLastUpdateTimeSinceEpoch(), 10, 64) - bTime, _ := strconv.ParseInt(b.GetLastUpdateTimeSinceEpoch(), 10, 64) - less = aTime < bTime - case model.ORDERBYFIELD_NAME: - fallthrough - default: - // Fallback to name sort if an unknown sort field is provided - less = strings.Compare(a.Name, b.Name) < 0 - } +// convertModelProperties converts model properties to regular and custom properties +func (ym *yamlModel) convertModelProperties() ([]models.Properties, []models.Properties) { + var properties []models.Properties + var customProperties []models.Properties - if params.SortOrder == model.SORTORDER_DESC { - return !less - } - return less - }) + // Regular properties + if ym.Description != nil { + properties = append(properties, models.NewStringProperty("description", *ym.Description, false)) + } + if ym.Readme != nil { + properties = append(properties, models.NewStringProperty("readme", *ym.Readme, false)) + } + if ym.Maturity != nil { + properties = append(properties, models.NewStringProperty("maturity", *ym.Maturity, false)) + } + if ym.Provider != nil { + properties = append(properties, models.NewStringProperty("provider", *ym.Provider, false)) + } + if ym.Logo != nil { + properties = append(properties, models.NewStringProperty("logo", *ym.Logo, false)) + } + if ym.License != nil { + properties = append(properties, models.NewStringProperty("license", *ym.License, false)) + } + if ym.LicenseLink != nil { + properties = append(properties, models.NewStringProperty("license_link", *ym.LicenseLink, false)) + } + if ym.LibraryName != nil { + properties = append(properties, models.NewStringProperty("library_name", *ym.LibraryName, false)) + } + if ym.SourceId != nil { + properties = append(properties, models.NewStringProperty("source_id", *ym.SourceId, false)) + } - count := len(filteredModels) - if count > math.MaxInt32 { - count = math.MaxInt32 + // Convert array properties as struct properties + if ym.Language == nil { + ym.Language = []string{} + } + if languageJSON, err := json.Marshal(ym.Language); err == nil { + properties = append(properties, models.NewStringProperty("language", string(languageJSON), false)) } - list := model.CatalogModelList{ - Items: make([]model.CatalogModel, count), - PageSize: int32(count), - Size: int32(count), + if ym.Tasks == nil { + ym.Tasks = []string{} } - for i := range list.Items { - list.Items[i] = *filteredModels[i] + if tasksJSON, err := json.Marshal(ym.Tasks); err == nil { + properties = append(properties, models.NewStringProperty("tasks", string(tasksJSON), false)) } - return list, nil // Return the struct value directly + + // Convert custom properties from the YAML model + if customProps := convertCustomProperties(&ym.CustomProperties); customProps != nil { + customProperties = append(customProperties, customProps...) + } + + return properties, customProperties } -func (y *yamlCatalogImpl) GetArtifacts(ctx context.Context, name string) (*model.CatalogModelArtifactList, error) { - y.modelsLock.RLock() - defer y.modelsLock.RUnlock() +// convertModelArtifact converts a CatalogModelArtifact to database format +func convertModelArtifact(artifact *apimodels.CatalogModelArtifact) *dbmodels.CatalogArtifact { + modelArtifact := &dbmodels.CatalogModelArtifactImpl{} - ym := y.models[name] - if ym == nil { - return nil, nil + // Set basic attributes + attrs := &dbmodels.CatalogModelArtifactAttributes{ + URI: &artifact.Uri, } - count := len(ym.Artifacts) - if count > math.MaxInt32 { - count = math.MaxInt32 + // Convert timestamps + if artifact.CreateTimeSinceEpoch != nil { + if createTime, err := strconv.ParseInt(*artifact.CreateTimeSinceEpoch, 10, 64); err == nil { + attrs.CreateTimeSinceEpoch = &createTime + } } + if artifact.LastUpdateTimeSinceEpoch != nil { + if updateTime, err := strconv.ParseInt(*artifact.LastUpdateTimeSinceEpoch, 10, 64); err == nil { + attrs.LastUpdateTimeSinceEpoch = &updateTime + } + } + + modelArtifact.Attributes = attrs - list := model.CatalogModelArtifactList{ - Items: make([]model.CatalogModelArtifact, count), - PageSize: int32(count), - Size: int32(count), + var artifactProperties []models.Properties + artifactProperties = append(artifactProperties, models.NewStringProperty("uri", artifact.Uri, false)) + + // Convert custom properties using helper function + if customProps := convertCustomProperties(&artifact.CustomProperties); customProps != nil { + modelArtifact.CustomProperties = &customProps } - for i := range list.Items { - list.Items[i] = *ym.Artifacts[i] + + modelArtifact.Properties = &artifactProperties + + return &dbmodels.CatalogArtifact{ + CatalogModelArtifact: modelArtifact, } - return &list, nil } -func isModelExcluded(modelName string, patterns []string) bool { - for _, pattern := range patterns { - if strings.HasSuffix(pattern, "*") { - if strings.HasPrefix(modelName, strings.TrimSuffix(pattern, "*")) { - return true - } - } else if modelName == pattern { - return true +// convertMetricsArtifact converts a CatalogMetricsArtifact to database format +func convertMetricsArtifact(artifact *apimodels.CatalogMetricsArtifact) *dbmodels.CatalogArtifact { + metricsArtifact := &dbmodels.CatalogMetricsArtifactImpl{} + + // Set basic attributes + attrs := &dbmodels.CatalogMetricsArtifactAttributes{ + MetricsType: dbmodels.MetricsType(artifact.MetricsType), + } + + // Convert timestamps + if artifact.CreateTimeSinceEpoch != nil { + if createTime, err := strconv.ParseInt(*artifact.CreateTimeSinceEpoch, 10, 64); err == nil { + attrs.CreateTimeSinceEpoch = &createTime + } + } + if artifact.LastUpdateTimeSinceEpoch != nil { + if updateTime, err := strconv.ParseInt(*artifact.LastUpdateTimeSinceEpoch, 10, 64); err == nil { + attrs.LastUpdateTimeSinceEpoch = &updateTime } } - return false -} -func (y *yamlCatalogImpl) load(path string, excludedModelsList []string) error { - bytes, err := os.ReadFile(path) - if err != nil { - return fmt.Errorf("failed to read %s file: %v", yamlCatalogPath, err) + metricsArtifact.Attributes = attrs + + // Handle properties + var artifactProperties []models.Properties + artifactProperties = append(artifactProperties, models.NewStringProperty("metricsType", artifact.MetricsType, false)) + + // Convert custom properties using helper function + if customProps := convertCustomProperties(&artifact.CustomProperties); customProps != nil { + metricsArtifact.CustomProperties = &customProps + } + + metricsArtifact.Properties = &artifactProperties + + return &dbmodels.CatalogArtifact{ + CatalogMetricsArtifact: metricsArtifact, } +} + +func (ym *yamlModel) ToModelProviderRecord() ModelProviderRecord { + model := dbmodels.CatalogModelImpl{} + artifacts := make([]dbmodels.CatalogArtifact, len(ym.Artifacts)) - var contents yamlCatalog - if err = yaml.UnmarshalStrict(bytes, &contents); err != nil { - return fmt.Errorf("failed to parse %s file: %v", yamlCatalogPath, err) + // Convert model attributes + model.Attributes = ym.convertModelAttributes() + + // Convert model properties + properties, customProperties := ym.convertModelProperties() + if len(properties) > 0 { + model.Properties = &properties + } + if len(customProperties) > 0 { + model.CustomProperties = &customProperties } - models := make(map[string]*yamlModel) - for i := range contents.Models { - modelName := contents.Models[i].Name - if isModelExcluded(modelName, excludedModelsList) { - continue + // Convert artifacts + for j := range ym.Artifacts { + if ym.Artifacts[j].CatalogModelArtifact != nil { + artifacts[j] = *convertModelArtifact(ym.Artifacts[j].CatalogModelArtifact) + } else if ym.Artifacts[j].CatalogMetricsArtifact != nil { + artifacts[j] = *convertMetricsArtifact(ym.Artifacts[j].CatalogMetricsArtifact) } - models[modelName] = &contents.Models[i] } - y.modelsLock.Lock() - defer y.modelsLock.Unlock() - y.models = models - - return nil + return ModelProviderRecord{ + Model: &model, + Artifacts: artifacts, + } } -const yamlCatalogPath = "yamlCatalogPath" - -func newYamlCatalog(source *CatalogSourceConfig) (CatalogSourceProvider, error) { - yamlModelFile, exists := source.Properties[yamlCatalogPath].(string) - if !exists || yamlModelFile == "" { - return nil, fmt.Errorf("missing %s string property", yamlCatalogPath) +func (a *yamlArtifact) UnmarshalJSON(buf []byte) error { + // This is very similar to generated code to unmarshal a + // CatalogArtifact, but this version properly handles artifacts without + // an artifactType, which is important for backwards compatibility. + var yat struct { + ArtifactType string `json:"artifactType"` } - yamlModelFile, err := filepath.Abs(yamlModelFile) + err := json.Unmarshal(buf, &yat) if err != nil { - return nil, fmt.Errorf("abs: %w", err) + return err } - // Excluded models is an optional source property. - var excludedModels []string - if excludedModelsData, ok := source.Properties["excludedModels"]; ok { - excludedModelsList, ok := excludedModelsData.([]any) - if !ok { - return nil, fmt.Errorf("'excludedModels' property should be a list") - } - excludedModels = make([]string, len(excludedModelsList)) - for i, v := range excludedModelsList { - excludedModels[i], ok = v.(string) - if !ok { - return nil, fmt.Errorf("invalid entry in 'excludedModels' list, expected a string") - } + switch yat.ArtifactType { + case "model-artifact", "": + err = json.Unmarshal(buf, &a.CatalogArtifact.CatalogModelArtifact) + if a.CatalogArtifact.CatalogModelArtifact != nil { + // Ensure artifactType is set even if it wasn't initially. + a.CatalogArtifact.CatalogModelArtifact.ArtifactType = "model-artifact" } + case "metrics-artifact": + err = json.Unmarshal(buf, &a.CatalogArtifact.CatalogMetricsArtifact) + default: + return fmt.Errorf("unknown artifactType: %s", yat.ArtifactType) } - p := &yamlCatalogImpl{ - models: make(map[string]*yamlModel), - } - err = p.load(yamlModelFile, excludedModels) + return err +} + +type yamlCatalog struct { + Source string `yaml:"source"` + Models []yamlModel `yaml:"models"` +} + +type yamlModelProvider struct { + path string + filter *ModelFilter +} + +func (p *yamlModelProvider) Models(ctx context.Context) (<-chan ModelProviderRecord, error) { + // read the catalog and report errors + catalog, err := p.read() if err != nil { return nil, err } + ch := make(chan ModelProviderRecord) go func() { - changes, err := getMonitor().Path(yamlModelFile) + defer close(ch) + + // Send the initial list right away. + p.emit(ctx, catalog, ch) + + // Watch for changes + changes, err := getMonitor().Path(ctx, p.path) if err != nil { + // Not fatal, we still have the inital load, but there + // won't be any updates. glog.Errorf("unable to watch YAML catalog file: %v", err) - // Not fatal, we just won't get automatic updates. + return } - for range changes { - glog.Infof("Reloading YAML catalog %s", yamlModelFile) + for { + select { + case <-ctx.Done(): + return + case <-changes: + glog.Infof("Reloading YAML catalog %s", p.path) + + catalog, err = p.read() + if err != nil { + glog.Errorf("unable to load YAML catalog: %v", err) + continue + } - err = p.load(yamlModelFile, excludedModels) - if err != nil { - glog.Errorf("unable to load YAML catalog: %v", err) + p.emit(ctx, catalog, ch) } } }() - return p, nil + return ch, nil } -func init() { - if err := RegisterCatalogType("yaml", newYamlCatalog); err != nil { - panic(err) +func (p *yamlModelProvider) read() (*yamlCatalog, error) { + buf, err := os.ReadFile(p.path) + if err != nil { + return nil, fmt.Errorf("failed to read %s file: %v", yamlCatalogPathKey, err) } + + var catalog yamlCatalog + if err = yaml.UnmarshalStrict(buf, &catalog); err != nil { + return nil, fmt.Errorf("failed to parse %s file: %v", yamlCatalogPathKey, err) + } + + return &catalog, nil +} + +func (p *yamlModelProvider) emit(ctx context.Context, catalog *yamlCatalog, out chan<- ModelProviderRecord) { + done := ctx.Done() + for _, model := range catalog.Models { + if !p.filter.Allows(model.Name) { + continue + } + + select { + case out <- model.ToModelProviderRecord(): + case <-done: + return + } + } +} + +func newYamlModelProvider(ctx context.Context, source *Source, reldir string) (<-chan ModelProviderRecord, error) { + p := &yamlModelProvider{} + + path, exists := source.Properties[yamlCatalogPathKey].(string) + if !exists || path == "" { + return nil, fmt.Errorf("missing %s string property", yamlCatalogPathKey) + } + + if filepath.IsAbs(path) { + p.path = path + } else { + p.path = filepath.Join(reldir, path) + } + + var legacyExcluded []string + if raw, exists := source.Properties[excludedModelsKey]; exists { + values, ok := raw.([]any) + if !ok { + return nil, fmt.Errorf("%q property should be a list", excludedModelsKey) + } + + legacyExcluded = make([]string, len(values)) + for i, value := range values { + nameStr, ok := value.(string) + if !ok { + return nil, fmt.Errorf("%s: invalid list: index %d: wanted string, got %T", excludedModelsKey, i, value) + } + legacyExcluded[i] = nameStr + } + } + + filter, err := NewModelFilterFromSource(source, nil, legacyExcluded) + if err != nil { + return nil, err + } + p.filter = filter + + return p.Models(ctx) } diff --git a/catalog/internal/catalog/yaml_catalog_test.go b/catalog/internal/catalog/yaml_catalog_test.go index b0cc491e34..3d7c0590fd 100644 --- a/catalog/internal/catalog/yaml_catalog_test.go +++ b/catalog/internal/catalog/yaml_catalog_test.go @@ -2,206 +2,646 @@ package catalog import ( "context" + "encoding/json" + "fmt" + "os" + "path/filepath" + "slices" + "strings" "testing" + "time" model "github.com/kubeflow/model-registry/catalog/pkg/openapi" + "github.com/kubeflow/model-registry/internal/apiutils" + "github.com/kubeflow/model-registry/internal/db/models" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -func TestYAMLCatalogGetModel(t *testing.T) { - assert := assert.New(t) - provider := testYAMLProvider(t, "testdata/test-yaml-catalog.yaml") +func TestYamlModelToModelProviderRecord(t *testing.T) { + tests := []struct { + name string + yamlModel yamlModel + expectError bool + validateFunc func(t *testing.T, record ModelProviderRecord) + }{ + { + name: "complete model with all properties", + yamlModel: yamlModel{ + CatalogModel: model.CatalogModel{ + Name: "test-model", + Description: apiutils.Of("Test model description"), + Readme: apiutils.Of("# Test Model\nThis is a test model."), + Maturity: apiutils.Of("Generally Available"), + Language: []string{"en", "fr"}, + Tasks: []string{"text-generation", "nlp"}, + Provider: apiutils.Of("IBM"), + Logo: apiutils.Of("https://example.com/logo.png"), + License: apiutils.Of("apache-2.0"), + LicenseLink: apiutils.Of("https://www.apache.org/licenses/LICENSE-2.0"), + LibraryName: apiutils.Of("transformers"), + SourceId: apiutils.Of("test-source"), + CreateTimeSinceEpoch: apiutils.Of("1678886400000"), + LastUpdateTimeSinceEpoch: apiutils.Of("1681564800000"), + CustomProperties: map[string]model.MetadataValue{ + "custom_key": { + MetadataStringValue: &model.MetadataStringValue{ + StringValue: "custom_value", + MetadataType: "MetadataStringValue", + }, + }, + }, + }, + Artifacts: []*yamlArtifact{ + { + CatalogArtifact: model.CatalogArtifact{ + CatalogModelArtifact: &model.CatalogModelArtifact{ + ArtifactType: "model-artifact", + Uri: "https://example.com/model.tar.gz", + CreateTimeSinceEpoch: apiutils.Of("1678886400000"), + LastUpdateTimeSinceEpoch: apiutils.Of("1681564800000"), + CustomProperties: map[string]model.MetadataValue{ + "model_size": { + MetadataStringValue: &model.MetadataStringValue{ + StringValue: "2GB", + MetadataType: "MetadataStringValue", + }, + }, + "accuracy": { + MetadataDoubleValue: &model.MetadataDoubleValue{ + DoubleValue: 0.95, + MetadataType: "MetadataDoubleValue", + }, + }, + }, + }, + }, + }, + { + CatalogArtifact: model.CatalogArtifact{ + CatalogMetricsArtifact: &model.CatalogMetricsArtifact{ + ArtifactType: "metrics-artifact", + MetricsType: "evaluation-metrics", + CreateTimeSinceEpoch: apiutils.Of("1678886400000"), + LastUpdateTimeSinceEpoch: apiutils.Of("1681564800000"), + CustomProperties: map[string]model.MetadataValue{ + "framework": { + MetadataStringValue: &model.MetadataStringValue{ + StringValue: "scikit-learn", + MetadataType: "MetadataStringValue", + }, + }, + }, + }, + }, + }, + }, + }, + validateFunc: func(t *testing.T, record ModelProviderRecord) { + require.NotNil(t, record.Model) - model, err := provider.GetModel(context.Background(), "rhelai1/granite-8b-code-base") - if assert.NoError(err) { - assert.Equal("rhelai1/granite-8b-code-base", model.Name) + attrs := record.Model.GetAttributes() + require.NotNil(t, attrs) + assert.Equal(t, int64(1678886400000), *attrs.CreateTimeSinceEpoch) + assert.Equal(t, int64(1681564800000), *attrs.LastUpdateTimeSinceEpoch) - newLogo := "foobar" - model.Logo = &newLogo + // Check regular properties (spec-defined properties) + regularProps := record.Model.GetProperties() + require.NotNil(t, regularProps) - model2, err := provider.GetModel(context.Background(), "rhelai1/granite-8b-code-base") - if assert.NoError(err) { - assert.NotEqual(model2.Logo, model.Logo, "changes to one returned object should not affect other return values") - } - } + regularPropMap := make(map[string]models.Properties) + for _, prop := range *regularProps { + regularPropMap[prop.Name] = prop + } - notFound, err := provider.GetModel(context.Background(), "foo") - assert.NoError(err) - assert.Nil(notFound) -} + // Validate spec-defined properties are regular properties + assert.Contains(t, regularPropMap, "description") + assert.Equal(t, "Test model description", *regularPropMap["description"].StringValue) + assert.False(t, regularPropMap["description"].IsCustomProperty) + + assert.Contains(t, regularPropMap, "readme") + assert.Equal(t, "# Test Model\nThis is a test model.", *regularPropMap["readme"].StringValue) + assert.False(t, regularPropMap["readme"].IsCustomProperty) + + assert.Contains(t, regularPropMap, "maturity") + assert.Equal(t, "Generally Available", *regularPropMap["maturity"].StringValue) + assert.False(t, regularPropMap["maturity"].IsCustomProperty) + + assert.Contains(t, regularPropMap, "provider") + assert.Equal(t, "IBM", *regularPropMap["provider"].StringValue) + assert.False(t, regularPropMap["provider"].IsCustomProperty) + + assert.Contains(t, regularPropMap, "logo") + assert.Equal(t, "https://example.com/logo.png", *regularPropMap["logo"].StringValue) + assert.False(t, regularPropMap["logo"].IsCustomProperty) + + assert.Contains(t, regularPropMap, "license") + assert.Equal(t, "apache-2.0", *regularPropMap["license"].StringValue) + assert.False(t, regularPropMap["license"].IsCustomProperty) + + assert.Contains(t, regularPropMap, "license_link") + assert.Equal(t, "https://www.apache.org/licenses/LICENSE-2.0", *regularPropMap["license_link"].StringValue) + assert.False(t, regularPropMap["license_link"].IsCustomProperty) + + assert.Contains(t, regularPropMap, "library_name") + assert.Equal(t, "transformers", *regularPropMap["library_name"].StringValue) + assert.False(t, regularPropMap["library_name"].IsCustomProperty) + + assert.Contains(t, regularPropMap, "source_id") + assert.Equal(t, "test-source", *regularPropMap["source_id"].StringValue) + assert.False(t, regularPropMap["source_id"].IsCustomProperty) + + // Validate array properties are JSON encoded as regular properties + assert.Contains(t, regularPropMap, "language") + var languages []string + err := json.Unmarshal([]byte(*regularPropMap["language"].StringValue), &languages) + require.NoError(t, err) + assert.Equal(t, []string{"en", "fr"}, languages) + assert.False(t, regularPropMap["language"].IsCustomProperty) + + assert.Contains(t, regularPropMap, "tasks") + var tasks []string + err = json.Unmarshal([]byte(*regularPropMap["tasks"].StringValue), &tasks) + require.NoError(t, err) + assert.Equal(t, []string{"text-generation", "nlp"}, tasks) + assert.False(t, regularPropMap["tasks"].IsCustomProperty) + + // Check custom properties + customProps := record.Model.GetCustomProperties() + require.NotNil(t, customProps) + + customPropMap := make(map[string]models.Properties) + for _, prop := range *customProps { + customPropMap[prop.Name] = prop + } + + // Validate truly custom properties + assert.Contains(t, customPropMap, "custom_key") + assert.Equal(t, "custom_value", *customPropMap["custom_key"].StringValue) + assert.True(t, customPropMap["custom_key"].IsCustomProperty) + + // Validate artifacts + assert.Len(t, record.Artifacts, 2) + + // Validate ModelArtifact + modelArtifact := record.Artifacts[0] + require.NotNil(t, modelArtifact.CatalogModelArtifact) + assert.Nil(t, modelArtifact.CatalogMetricsArtifact) + + // Check CatalogModelArtifact attributes + modelAttrs := modelArtifact.CatalogModelArtifact.GetAttributes() + assert.Equal(t, "https://example.com/model.tar.gz", *modelAttrs.URI) + assert.Equal(t, int64(1678886400000), *modelAttrs.CreateTimeSinceEpoch) + assert.Equal(t, int64(1681564800000), *modelAttrs.LastUpdateTimeSinceEpoch) + + // Check CatalogModelArtifact regular properties + modelArtifactProps := modelArtifact.CatalogModelArtifact.GetProperties() + require.NotNil(t, modelArtifactProps) + modelArtifactPropMap := make(map[string]models.Properties) + for _, prop := range *modelArtifactProps { + modelArtifactPropMap[prop.Name] = prop + } + assert.Contains(t, modelArtifactPropMap, "uri") + assert.Equal(t, "https://example.com/model.tar.gz", *modelArtifactPropMap["uri"].StringValue) + assert.False(t, modelArtifactPropMap["uri"].IsCustomProperty) + + // Check CatalogModelArtifact custom properties + modelArtifactCustomProps := modelArtifact.CatalogModelArtifact.GetCustomProperties() + require.NotNil(t, modelArtifactCustomProps) + modelArtifactCustomPropMap := make(map[string]models.Properties) + for _, prop := range *modelArtifactCustomProps { + modelArtifactCustomPropMap[prop.Name] = prop + } + assert.Contains(t, modelArtifactCustomPropMap, "model_size") + assert.Equal(t, "2GB", *modelArtifactCustomPropMap["model_size"].StringValue) + assert.True(t, modelArtifactCustomPropMap["model_size"].IsCustomProperty) + assert.Contains(t, modelArtifactCustomPropMap, "accuracy") + assert.Equal(t, 0.95, *modelArtifactCustomPropMap["accuracy"].DoubleValue) + assert.True(t, modelArtifactCustomPropMap["accuracy"].IsCustomProperty) + + // Validate CatalogMetricsArtifact + metricsArtifact := record.Artifacts[1] + require.NotNil(t, metricsArtifact.CatalogMetricsArtifact) + assert.Nil(t, metricsArtifact.CatalogModelArtifact) -func TestYAMLCatalogGetArtifacts(t *testing.T) { - assert := assert.New(t) - provider := testYAMLProvider(t, "testdata/test-yaml-catalog.yaml") - - // Test case 1: Model with artifacts - artifacts, err := provider.GetArtifacts(context.Background(), "rhelai1/granite-8b-code-base") - if assert.NoError(err) { - assert.NotNil(artifacts) - assert.Equal(int32(1), artifacts.Size) - assert.Equal(int32(1), artifacts.PageSize) - assert.Len(artifacts.Items, 1) - assert.Equal("oci://registry.redhat.io/rhelai1/granite-8b-code-base:1.3-1732870892", artifacts.Items[0].Uri) + // Check CatalogMetricsArtifact attributes + metricsAttrs := metricsArtifact.CatalogMetricsArtifact.GetAttributes() + assert.Equal(t, "evaluation-metrics", string(metricsAttrs.MetricsType)) + assert.Equal(t, int64(1678886400000), *metricsAttrs.CreateTimeSinceEpoch) + assert.Equal(t, int64(1681564800000), *metricsAttrs.LastUpdateTimeSinceEpoch) + + // Check CatalogMetricsArtifact regular properties + metricsArtifactProps := metricsArtifact.CatalogMetricsArtifact.GetProperties() + require.NotNil(t, metricsArtifactProps) + metricsArtifactPropMap := make(map[string]models.Properties) + for _, prop := range *metricsArtifactProps { + metricsArtifactPropMap[prop.Name] = prop + } + assert.Contains(t, metricsArtifactPropMap, "metricsType") + assert.Equal(t, "evaluation-metrics", *metricsArtifactPropMap["metricsType"].StringValue) + assert.False(t, metricsArtifactPropMap["metricsType"].IsCustomProperty) + + // Check CatalogMetricsArtifact custom properties + metricsArtifactCustomProps := metricsArtifact.CatalogMetricsArtifact.GetCustomProperties() + require.NotNil(t, metricsArtifactCustomProps) + metricsArtifactCustomPropMap := make(map[string]models.Properties) + for _, prop := range *metricsArtifactCustomProps { + metricsArtifactCustomPropMap[prop.Name] = prop + } + assert.Contains(t, metricsArtifactCustomPropMap, "framework") + assert.Equal(t, "scikit-learn", *metricsArtifactCustomPropMap["framework"].StringValue) + assert.True(t, metricsArtifactCustomPropMap["framework"].IsCustomProperty) + }, + }, + { + name: "minimal model with only required fields", + yamlModel: yamlModel{ + CatalogModel: model.CatalogModel{ + Name: "minimal-model", + }, + }, + validateFunc: func(t *testing.T, record ModelProviderRecord) { + require.NotNil(t, record.Model) + + attrs := record.Model.GetAttributes() + require.NotNil(t, attrs) + assert.Nil(t, attrs.CreateTimeSinceEpoch) + assert.Nil(t, attrs.LastUpdateTimeSinceEpoch) + + // Should have no regular properties for minimal model + regularProps := record.Model.GetProperties() + if regularProps != nil { + *regularProps = slices.DeleteFunc(*regularProps, func(p models.Properties) bool { + switch p.Name { + case "language", "tasks": + return true + } + return false + }) + assert.Empty(t, *regularProps) + } + + // Should have no custom properties for minimal model + customProps := record.Model.GetCustomProperties() + if customProps != nil { + assert.Empty(t, *customProps) + } + + // Should have no artifacts for minimal model + assert.Empty(t, record.Artifacts) + }, + }, + { + name: "model with only ModelArtifact", + yamlModel: yamlModel{ + CatalogModel: model.CatalogModel{ + Name: "model-with-artifact", + }, + Artifacts: []*yamlArtifact{ + { + CatalogArtifact: model.CatalogArtifact{ + CatalogModelArtifact: &model.CatalogModelArtifact{ + ArtifactType: "model-artifact", + Uri: "s3://bucket/model.bin", + }, + }, + }, + }, + }, + validateFunc: func(t *testing.T, record ModelProviderRecord) { + require.NotNil(t, record.Model) + assert.Len(t, record.Artifacts, 1) + + artifact := record.Artifacts[0] + require.NotNil(t, artifact.CatalogModelArtifact) + assert.Nil(t, artifact.CatalogMetricsArtifact) + + attrs := artifact.CatalogModelArtifact.GetAttributes() + assert.Equal(t, "s3://bucket/model.bin", *attrs.URI) + assert.Nil(t, attrs.CreateTimeSinceEpoch) + assert.Nil(t, attrs.LastUpdateTimeSinceEpoch) + + // Check regular properties + props := artifact.CatalogModelArtifact.GetProperties() + require.NotNil(t, props) + assert.Len(t, *props, 1) + assert.Equal(t, "uri", (*props)[0].Name) + assert.Equal(t, "s3://bucket/model.bin", *(*props)[0].StringValue) + assert.False(t, (*props)[0].IsCustomProperty) + + // Should have no custom properties + customProps := artifact.CatalogModelArtifact.GetCustomProperties() + if customProps != nil { + assert.Empty(t, *customProps) + } + }, + }, + { + name: "model with only MetricsArtifact", + yamlModel: yamlModel{ + CatalogModel: model.CatalogModel{ + Name: "model-with-metrics", + }, + Artifacts: []*yamlArtifact{ + { + CatalogArtifact: model.CatalogArtifact{ + CatalogMetricsArtifact: &model.CatalogMetricsArtifact{ + ArtifactType: "metrics-artifact", + MetricsType: "performance-metrics", + }, + }, + }, + }, + }, + validateFunc: func(t *testing.T, record ModelProviderRecord) { + require.NotNil(t, record.Model) + assert.Len(t, record.Artifacts, 1) + + artifact := record.Artifacts[0] + assert.Nil(t, artifact.CatalogModelArtifact) + require.NotNil(t, artifact.CatalogMetricsArtifact) + + attrs := artifact.CatalogMetricsArtifact.GetAttributes() + assert.Equal(t, "performance-metrics", string(attrs.MetricsType)) + assert.Nil(t, attrs.CreateTimeSinceEpoch) + assert.Nil(t, attrs.LastUpdateTimeSinceEpoch) + + // Check regular properties + props := artifact.CatalogMetricsArtifact.GetProperties() + require.NotNil(t, props) + assert.Len(t, *props, 1) + assert.Equal(t, "metricsType", (*props)[0].Name) + assert.Equal(t, "performance-metrics", *(*props)[0].StringValue) + assert.False(t, (*props)[0].IsCustomProperty) + + // Should have no custom properties + customProps := artifact.CatalogMetricsArtifact.GetCustomProperties() + if customProps != nil { + assert.Empty(t, *customProps) + } + }, + }, + { + name: "artifacts with invalid timestamps", + yamlModel: yamlModel{ + CatalogModel: model.CatalogModel{ + Name: "model-with-invalid-artifact-timestamps", + }, + Artifacts: []*yamlArtifact{ + { + CatalogArtifact: model.CatalogArtifact{ + CatalogModelArtifact: &model.CatalogModelArtifact{ + ArtifactType: "model-artifact", + Uri: "https://example.com/model.bin", + CreateTimeSinceEpoch: apiutils.Of("invalid-timestamp"), + LastUpdateTimeSinceEpoch: apiutils.Of("also-invalid"), + }, + }, + }, + }, + }, + validateFunc: func(t *testing.T, record ModelProviderRecord) { + require.NotNil(t, record.Model) + assert.Len(t, record.Artifacts, 1) + + artifact := record.Artifacts[0] + require.NotNil(t, artifact.CatalogModelArtifact) + + attrs := artifact.CatalogModelArtifact.GetAttributes() + assert.Equal(t, "https://example.com/model.bin", *attrs.URI) + // Invalid timestamps should be ignored (not set) + assert.Nil(t, attrs.CreateTimeSinceEpoch) + assert.Nil(t, attrs.LastUpdateTimeSinceEpoch) + }, + }, + { + name: "model with invalid timestamps", + yamlModel: yamlModel{ + CatalogModel: model.CatalogModel{ + Name: "invalid-timestamp-model", + CreateTimeSinceEpoch: apiutils.Of("invalid-timestamp"), + LastUpdateTimeSinceEpoch: apiutils.Of("also-invalid"), + }, + }, + validateFunc: func(t *testing.T, record ModelProviderRecord) { + require.NotNil(t, record.Model) + + attrs := record.Model.GetAttributes() + require.NotNil(t, attrs) + assert.Equal(t, "invalid-timestamp-model", *attrs.Name) + // Invalid timestamps should be ignored (not set) + assert.Nil(t, attrs.CreateTimeSinceEpoch) + assert.Nil(t, attrs.LastUpdateTimeSinceEpoch) + }, + }, } - // Test case 2: Model with no artifacts - noArtifactsModel, err := provider.GetArtifacts(context.Background(), "model-with-no-artifacts") - if assert.NoError(err) { - assert.NotNil(noArtifactsModel) - assert.Equal(int32(0), noArtifactsModel.Size) - assert.Equal(int32(0), noArtifactsModel.PageSize) - assert.Len(noArtifactsModel.Items, 0) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + record := tt.yamlModel.ToModelProviderRecord() + tt.validateFunc(t, record) + }) } +} + +func TestNewYamlModelProviderAbsolutePath(t *testing.T) { + // Create a temporary YAML file + tempDir, err := os.MkdirTemp("", "yaml_catalog_test") + require.NoError(t, err) + defer os.RemoveAll(tempDir) + + yamlContent := ` +source: test +models: + - name: test-model + description: A test model +` - // Test case 3: Model not found - notFoundArtifacts, err := provider.GetArtifacts(context.Background(), "non-existent-model") - assert.NoError(err) - assert.Nil(notFoundArtifacts) + absolutePath := filepath.Join(tempDir, "catalog.yaml") + err = os.WriteFile(absolutePath, []byte(yamlContent), 0644) + require.NoError(t, err) + + // Test that absolute paths work correctly (bug fix) + t.Run("absolute path works correctly", func(t *testing.T) { + source := &Source{ + Properties: map[string]interface{}{ + yamlCatalogPathKey: absolutePath, // Use absolute path to existing file + }, + } + + // Use a different reldir - this shouldn't matter for absolute paths + reldir := "/some/other/directory" + + _, err := newYamlModelProvider(t.Context(), source, reldir) + + // This should succeed because we provided a valid absolute path + require.NoError(t, err, "Absolute path should work without reldir being prepended") + }) + + // Test that relative paths work correctly + t.Run("relative path works correctly", func(t *testing.T) { + // Create the expected structure for relative path + reldir := tempDir + relativePath := "catalog.yaml" + + source := &Source{ + Properties: map[string]interface{}{ + yamlCatalogPathKey: relativePath, + }, + } + + _, err := newYamlModelProvider(t.Context(), source, reldir) + + // This should work because filepath.Join(reldir, relativePath) points to our file + require.NoError(t, err, "Relative paths should work correctly") + }) } -func TestYAMLCatalogListModels(t *testing.T) { - assert := assert.New(t) - provider := testYAMLProvider(t, "testdata/test-list-models-catalog.yaml") - ctx := context.Background() - - // Test case 1: List all models, default sort (by name ascending) - models, err := provider.ListModels(ctx, ListModelsParams{}) - if assert.NoError(err) { - assert.NotNil(models) - assert.Equal(int32(6), models.Size) - assert.Equal(int32(6), models.PageSize) - assert.Len(models.Items, 6) - assert.Equal("Z-model", models.Items[0].Name) // Z-model should be first due to string comparison for alphabetical sort - assert.Equal("another-model-alpha", models.Items[1].Name) - assert.Equal("model-alpha", models.Items[2].Name) - assert.Equal("model-beta", models.Items[3].Name) - assert.Equal("model-gamma", models.Items[4].Name) - assert.Equal("model-with-no-tasks", models.Items[5].Name) - } +func TestYamlModelProviderFiltersApplied(t *testing.T) { + catalogPath := writeMiniCatalog(t, []string{"Granite/alpha", "Granite/beta-release", "DeepSeek/v1"}) - // Test case 2: List all models, sort by name ascending - models, err = provider.ListModels(ctx, ListModelsParams{OrderBy: model.ORDERBYFIELD_NAME, SortOrder: model.SORTORDER_ASC}) - if assert.NoError(err) { - assert.Equal(int32(6), models.Size) - assert.Equal("Z-model", models.Items[0].Name) - assert.Equal("another-model-alpha", models.Items[1].Name) + source := &Source{ + CatalogSource: model.CatalogSource{ + Id: "test", + Name: "Test source", + Labels: []string{}, + IncludedModels: []string{"Granite/*"}, + ExcludedModels: []string{"Granite/beta-*"}, + }, } - // Test case 3: List all models, sort by name descending - models, err = provider.ListModels(ctx, ListModelsParams{OrderBy: model.ORDERBYFIELD_NAME, SortOrder: model.SORTORDER_DESC}) - if assert.NoError(err) { - assert.Equal(int32(6), models.Size) - assert.Equal("model-with-no-tasks", models.Items[0].Name) - assert.Equal("model-gamma", models.Items[1].Name) - } + filter, err := NewModelFilterFromSource(source, nil, nil) + require.NoError(t, err) - // Test case 4: List all models, sort by created (CreateTimeSinceEpoch) ascending - models, err = provider.ListModels(ctx, ListModelsParams{OrderBy: model.ORDERBYFIELD_CREATE_TIME, SortOrder: model.SORTORDER_ASC}) - if assert.NoError(err) { - assert.Equal(int32(6), models.Size) - assert.Equal("model-with-no-tasks", models.Items[0].Name) // Jan 1, 2023 - assert.Equal("model-gamma", models.Items[1].Name) // Feb 1, 2023 - } + names := collectNamesWithFilter(t, catalogPath, filter) + assert.ElementsMatch(t, []string{"Granite/alpha"}, names) +} - // Test case 5: List all models, sort by published (CreateTimeSinceEpoch) descending - models, err = provider.ListModels(ctx, ListModelsParams{OrderBy: model.ORDERBYFIELD_CREATE_TIME, SortOrder: model.SORTORDER_DESC}) - if assert.NoError(err) { - assert.Equal(int32(6), models.Size) - assert.Equal("Z-model", models.Items[0].Name) // Aug 2, 2023 - assert.Equal("another-model-alpha", models.Items[1].Name) // May 16, 2023 +func TestYamlModelProviderLegacyExcludesMerged(t *testing.T) { + catalogPath := writeMiniCatalog(t, []string{"Granite/alpha", "Granite/beta-release", "DeepSeek/v1"}) + + source := &Source{ + CatalogSource: model.CatalogSource{ + Id: "test", + Name: "Test source", + Labels: []string{}, + ExcludedModels: []string{"Granite/beta-*"}, + }, + Properties: map[string]any{ + yamlCatalogPathKey: catalogPath, + excludedModelsKey: []any{"DeepSeek/v1"}, + }, } - // Test case 6: Filter by query "model" (should match all 6 models) - models, err = provider.ListModels(ctx, ListModelsParams{Query: "model"}) - if assert.NoError(err) { - assert.Equal(int32(6), models.Size) - assert.Equal("Z-model", models.Items[0].Name) - assert.Equal("another-model-alpha", models.Items[1].Name) - assert.Equal("model-alpha", models.Items[2].Name) - assert.Equal("model-beta", models.Items[3].Name) - assert.Equal("model-gamma", models.Items[4].Name) - assert.Equal("model-with-no-tasks", models.Items[5].Name) + ctx, cancel := context.WithCancel(context.Background()) + t.Cleanup(cancel) + + records, err := newYamlModelProvider(ctx, source, filepath.Dir(catalogPath)) + require.NoError(t, err) + + names := collectRecordsFromChannel(t, records, 1, cancel) + assert.Equal(t, []string{"Granite/alpha"}, names) +} + +func TestYamlModelProviderInvalidPattern(t *testing.T) { + catalogPath := writeMiniCatalog(t, []string{"Granite/alpha"}) + + source := &Source{ + CatalogSource: model.CatalogSource{ + Id: "test", + Name: "Test source", + Labels: []string{}, + IncludedModels: []string{""}, + }, + Properties: map[string]any{ + yamlCatalogPathKey: catalogPath, + }, } - // Test case 7: Filter by query "text" (should match model-alpha, another-model-alpha) - models, err = provider.ListModels(ctx, ListModelsParams{Query: "text"}) - if assert.NoError(err) { - assert.Equal(int32(2), models.Size) - assert.Equal("another-model-alpha", models.Items[0].Name) // Alphabetical order - assert.Equal("model-alpha", models.Items[1].Name) + _, err := newYamlModelProvider(context.Background(), source, filepath.Dir(catalogPath)) + require.Error(t, err) + assert.Contains(t, err.Error(), "pattern cannot be empty") +} + +func writeMiniCatalog(t *testing.T, modelNames []string) string { + t.Helper() + + dir := t.TempDir() + path := filepath.Join(dir, "catalog.yaml") + + var b strings.Builder + b.WriteString("source: Test\nmodels:\n") + for _, name := range modelNames { + b.WriteString(fmt.Sprintf(" - name: %s\n", name)) } - // Test case 8: Filter by query "nlp" (should match model-alpha, model-gamma, another-model-alpha) - models, err = provider.ListModels(ctx, ListModelsParams{Query: "nlp"}) - if assert.NoError(err) { - assert.Equal(int32(3), models.Size) - assert.Equal("another-model-alpha", models.Items[0].Name) - assert.Equal("model-alpha", models.Items[1].Name) - assert.Equal("model-gamma", models.Items[2].Name) + err := os.WriteFile(path, []byte(b.String()), 0o644) + require.NoError(t, err) + + return path +} + +func collectNamesWithFilter(t *testing.T, catalogPath string, filter *ModelFilter) []string { + t.Helper() + + provider := &yamlModelProvider{ + path: catalogPath, + filter: filter, } - // Test case 9: Filter by query "IBM" (should match model-alpha, model-gamma) - models, err = provider.ListModels(ctx, ListModelsParams{Query: "IBM"}) - if assert.NoError(err) { - assert.Equal(int32(2), models.Size) - assert.Equal("model-alpha", models.Items[0].Name) - assert.Equal("model-gamma", models.Items[1].Name) + catalog, err := provider.read() + require.NoError(t, err) + + out := make(chan ModelProviderRecord, len(catalog.Models)) + provider.emit(context.Background(), catalog, out) + close(out) + + var names []string + for record := range out { + names = append(names, modelNameFromRecord(t, record)) } - // Test case 10: Filter by query "transformers" (should match model-alpha) - models, err = provider.ListModels(ctx, ListModelsParams{Query: "transformers"}) - if assert.NoError(err) { - assert.Equal(int32(1), models.Size) - assert.Equal("model-alpha", models.Items[0].Name) + return names +} + +func collectRecordsFromChannel(t *testing.T, records <-chan ModelProviderRecord, expected int, cancel context.CancelFunc) []string { + t.Helper() + + names := make([]string, 0, expected) + timeout := time.After(2 * time.Second) + + for len(names) < expected { + select { + case record, ok := <-records: + if !ok { + t.Fatalf("channel closed before receiving %d records", expected) + } + names = append(names, modelNameFromRecord(t, record)) + case <-timeout: + t.Fatalf("timed out waiting for %d records", expected) + } } - // Test case 11: Filter by query "nonexistent" (should return empty list) - models, err = provider.ListModels(ctx, ListModelsParams{Query: "nonexistent"}) - assert.NoError(err) - assert.NotNil(models) - assert.Equal(int32(0), models.Size) - assert.Equal(int32(0), models.PageSize) - assert.Len(models.Items, 0) - - // Test case 12: Empty catalog - emptyProvider := testYAMLProvider(t, "testdata/empty-catalog.yaml") // Assuming an empty-catalog.yaml exists or will be created - emptyModels, err := emptyProvider.ListModels(ctx, ListModelsParams{}) - assert.NoError(err) - assert.NotNil(emptyModels) - assert.Equal(int32(0), emptyModels.Size) - assert.Equal(int32(0), emptyModels.PageSize) - assert.Len(emptyModels.Items, 0) - - // Test case 13: Test with excluded models - excludedProvider := testYAMLProviderWithExclusions(t, "testdata/test-list-models-catalog.yaml", []any{ - "model-alpha", - }) - excludedModels, err := excludedProvider.ListModels(ctx, ListModelsParams{}) - if assert.NoError(err) { - assert.NotNil(excludedModels) - assert.Equal(int32(5), excludedModels.Size) - for _, m := range excludedModels.Items { - assert.NotEqual("model-alpha", m.Name) + cancel() + + select { + case _, ok := <-records: + if ok { + t.Fatalf("received more than %d records", expected) } + case <-time.After(500 * time.Millisecond): + t.Fatalf("channel did not close after cancellation") } -} -func testYAMLProvider(t *testing.T, path string) CatalogSourceProvider { - return testYAMLProviderWithExclusions(t, path, nil) + return names } -func testYAMLProviderWithExclusions(t *testing.T, path string, excludedModels []any) CatalogSourceProvider { - properties := map[string]any{ - yamlCatalogPath: path, - } - if excludedModels != nil { - properties["excludedModels"] = excludedModels - } - provider, err := newYamlCatalog(&CatalogSourceConfig{ - Properties: properties, - }) - if err != nil { - t.Fatalf("newYamlCatalog(%s) with exclusions failed: %v", path, err) - } - return provider +func modelNameFromRecord(t *testing.T, record ModelProviderRecord) string { + t.Helper() + + require.NotNil(t, record.Model) + attrs := record.Model.GetAttributes() + require.NotNil(t, attrs) + require.NotNil(t, attrs.Name) + return *attrs.Name } diff --git a/catalog/internal/db/filter/entity_mappings.go b/catalog/internal/db/filter/entity_mappings.go new file mode 100644 index 0000000000..c0430f1106 --- /dev/null +++ b/catalog/internal/db/filter/entity_mappings.go @@ -0,0 +1,121 @@ +package filter + +import ( + "strings" + + "github.com/kubeflow/model-registry/internal/db/filter" +) + +// CatalogRestEntityType represents catalog-specific REST API entity types +type CatalogRestEntityType string + +const ( + RestEntityCatalogModel CatalogRestEntityType = "CatalogModel" + RestEntityCatalogArtifact CatalogRestEntityType = "CatalogArtifact" +) + +// catalogEntityMappings implements EntityMappingFunctions for the catalog package +type catalogEntityMappings struct{} + +// NewCatalogEntityMappings creates a new instance of catalog entity mappings +func NewCatalogEntityMappings() filter.EntityMappingFunctions { + return &catalogEntityMappings{} +} + +// GetMLMDEntityType maps catalog REST entity types to their underlying MLMD entity type +func (c *catalogEntityMappings) GetMLMDEntityType(restEntityType filter.RestEntityType) filter.EntityType { + switch restEntityType { + case filter.RestEntityType(RestEntityCatalogArtifact): + return filter.EntityTypeArtifact + default: + return filter.EntityTypeContext + } +} + +// GetPropertyDefinitionForRestEntity returns property definition for a catalog REST entity type +func (c *catalogEntityMappings) GetPropertyDefinitionForRestEntity(restEntityType filter.RestEntityType, propertyName string) filter.PropertyDefinition { + // Check if this is a well-known property for catalog entities + if restEntityType == filter.RestEntityType(RestEntityCatalogModel) { + if _, isWellKnown := catalogModelProperties[propertyName]; isWellKnown { + // Use the well-known property definition + return catalogModelProperties[propertyName] + } + + // Check if this is a property path referencing a related artifact + // Format: artifacts. or artifacts.customProperties. + if strings.HasPrefix(propertyName, "artifacts.") { + // Extract the artifact property path (everything after "artifacts.") + artifactPropertyPath := strings.TrimPrefix(propertyName, "artifacts.") + + // Return a RelatedEntity property definition + // ValueType is left empty to allow runtime type inference from the value + return filter.PropertyDefinition{ + Location: filter.RelatedEntity, + ValueType: "", // Empty to enable runtime type inference + Column: artifactPropertyPath, + RelatedEntityType: filter.RelatedEntityArtifact, + RelatedProperty: artifactPropertyPath, + JoinTable: "Attribution", // Join through Attribution table + } + } + } + + if restEntityType == filter.RestEntityType(RestEntityCatalogArtifact) { + if _, isWellKnown := catalogArtifactProperties[propertyName]; isWellKnown { + // Use the well-known property definition + return catalogArtifactProperties[propertyName] + } + } + + // Not a well-known property for this entity type, treat as custom + return filter.PropertyDefinition{ + Location: filter.Custom, + ValueType: filter.StringValueType, // Default, will be inferred at runtime + Column: propertyName, // Use the property name as-is for custom properties + } +} + +// IsChildEntity returns true if the catalog REST entity type uses prefixed names (parentId:name) +func (c *catalogEntityMappings) IsChildEntity(entityType filter.RestEntityType) bool { + return false +} + +// catalogModelProperties defines the allowed properties for CatalogModel entities +var catalogModelProperties = map[string]filter.PropertyDefinition{ + // Common Context properties + "id": {Location: filter.EntityTable, ValueType: filter.IntValueType, Column: "id"}, + "name": {Location: filter.EntityTable, ValueType: filter.StringValueType, Column: "name"}, + "externalId": {Location: filter.EntityTable, ValueType: filter.StringValueType, Column: "external_id"}, + "createTimeSinceEpoch": {Location: filter.EntityTable, ValueType: filter.IntValueType, Column: "create_time_since_epoch"}, + "lastUpdateTimeSinceEpoch": {Location: filter.EntityTable, ValueType: filter.IntValueType, Column: "last_update_time_since_epoch"}, + + // CatalogModel-specific properties stored in ContextProperty table + "source_id": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "source_id"}, + "description": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "description"}, + "owner": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "owner"}, + "state": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "state"}, + "language": {Location: filter.PropertyTable, ValueType: filter.ArrayValueType, Column: "language"}, + "library_name": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "library_name"}, + "license_link": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "license_link"}, + "license": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "license"}, + "logo": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "logo"}, + "maturity": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "maturity"}, + "provider": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "provider"}, + "readme": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "readme"}, + "tasks": {Location: filter.PropertyTable, ValueType: filter.ArrayValueType, Column: "tasks"}, +} + +// catalogArtifactProperties defines the allowed properties for CatalogArtifact entities +var catalogArtifactProperties = map[string]filter.PropertyDefinition{ + // Common Artifact properties + "id": {Location: filter.EntityTable, ValueType: filter.IntValueType, Column: "id"}, + "name": {Location: filter.EntityTable, ValueType: filter.StringValueType, Column: "name"}, + "externalId": {Location: filter.EntityTable, ValueType: filter.StringValueType, Column: "external_id"}, + "createTimeSinceEpoch": {Location: filter.EntityTable, ValueType: filter.IntValueType, Column: "create_time_since_epoch"}, + "lastUpdateTimeSinceEpoch": {Location: filter.EntityTable, ValueType: filter.IntValueType, Column: "last_update_time_since_epoch"}, + "uri": {Location: filter.EntityTable, ValueType: filter.StringValueType, Column: "uri"}, + "state": {Location: filter.EntityTable, ValueType: filter.StringValueType, Column: "state"}, + + // Artifact type (stored in type_id but we can filter by string representation) + "artifactType": {Location: filter.PropertyTable, ValueType: filter.StringValueType, Column: "artifactType"}, +} diff --git a/catalog/internal/db/models/catalog_artifact.go b/catalog/internal/db/models/catalog_artifact.go new file mode 100644 index 0000000000..958d6a1169 --- /dev/null +++ b/catalog/internal/db/models/catalog_artifact.go @@ -0,0 +1,34 @@ +package models + +import ( + "github.com/kubeflow/model-registry/catalog/internal/db/filter" + dbfilter "github.com/kubeflow/model-registry/internal/db/filter" + "github.com/kubeflow/model-registry/internal/db/models" +) + +type CatalogArtifactListOptions struct { + models.Pagination + Name *string + ExternalID *string + ParentResourceID *int32 + ArtifactType *string + ArtifactTypesFilter []string +} + +// GetRestEntityType implements the FilterApplier interface +// This enables advanced filtering support for catalog artifacts +func (c *CatalogArtifactListOptions) GetRestEntityType() dbfilter.RestEntityType { + return dbfilter.RestEntityType(filter.RestEntityCatalogArtifact) +} + +// CatalogArtifact is a discriminated union that can hold different catalog artifact types +type CatalogArtifact struct { + CatalogModelArtifact CatalogModelArtifact + CatalogMetricsArtifact CatalogMetricsArtifact +} + +type CatalogArtifactRepository interface { + GetByID(id int32) (CatalogArtifact, error) + List(listOptions CatalogArtifactListOptions) (*models.ListWrapper[CatalogArtifact], error) + DeleteByParentID(artifactType string, parentResourceID int32) error +} diff --git a/catalog/internal/db/models/catalog_metrics_artifact.go b/catalog/internal/db/models/catalog_metrics_artifact.go new file mode 100644 index 0000000000..b0064f3979 --- /dev/null +++ b/catalog/internal/db/models/catalog_metrics_artifact.go @@ -0,0 +1,49 @@ +package models + +import ( + "github.com/kubeflow/model-registry/internal/db/filter" + "github.com/kubeflow/model-registry/internal/db/models" +) + +type MetricsType string + +const ( + MetricsTypePerformance MetricsType = "performance-metrics" + MetricsTypeAccuracy MetricsType = "accuracy-metrics" + CatalogMetricsArtifactType = "metrics-artifact" +) + +type CatalogMetricsArtifactListOptions struct { + models.Pagination + Name *string + ExternalID *string + ParentResourceID *int32 +} + +// GetRestEntityType implements the FilterApplier interface +func (c *CatalogMetricsArtifactListOptions) GetRestEntityType() filter.RestEntityType { + return filter.RestEntityModelArtifact // Reusing existing filter type +} + +type CatalogMetricsArtifactAttributes struct { + Name *string + ArtifactType *string + MetricsType MetricsType + ExternalID *string + CreateTimeSinceEpoch *int64 + LastUpdateTimeSinceEpoch *int64 +} + +type CatalogMetricsArtifact interface { + models.Entity[CatalogMetricsArtifactAttributes] +} + +type CatalogMetricsArtifactImpl = models.BaseEntity[CatalogMetricsArtifactAttributes] + +type CatalogMetricsArtifactRepository interface { + GetByID(id int32) (CatalogMetricsArtifact, error) + List(listOptions CatalogMetricsArtifactListOptions) (*models.ListWrapper[CatalogMetricsArtifact], error) + Save(metricsArtifact CatalogMetricsArtifact, parentResourceID *int32) (CatalogMetricsArtifact, error) + // BatchSave inserts multiple metrics artifacts in a single batch operation + BatchSave(metricsArtifacts []CatalogMetricsArtifact, parentResourceID *int32) ([]CatalogMetricsArtifact, error) +} diff --git a/catalog/internal/db/models/catalog_model.go b/catalog/internal/db/models/catalog_model.go new file mode 100644 index 0000000000..1769e7d259 --- /dev/null +++ b/catalog/internal/db/models/catalog_model.go @@ -0,0 +1,40 @@ +package models + +import ( + catalogfilter "github.com/kubeflow/model-registry/catalog/internal/db/filter" + "github.com/kubeflow/model-registry/internal/db/filter" + "github.com/kubeflow/model-registry/internal/db/models" +) + +type CatalogModelListOptions struct { + models.Pagination + Name *string + ExternalID *string + SourceIDs *[]string + Query *string +} + +// GetRestEntityType implements the FilterApplier interface +func (c *CatalogModelListOptions) GetRestEntityType() filter.RestEntityType { + return filter.RestEntityType(catalogfilter.RestEntityCatalogModel) +} + +type CatalogModelAttributes struct { + Name *string + ExternalID *string + CreateTimeSinceEpoch *int64 + LastUpdateTimeSinceEpoch *int64 +} + +type CatalogModel interface { + models.Entity[CatalogModelAttributes] +} + +type CatalogModelImpl = models.BaseEntity[CatalogModelAttributes] + +type CatalogModelRepository interface { + GetByID(id int32) (CatalogModel, error) + GetByName(name string) (CatalogModel, error) + List(listOptions CatalogModelListOptions) (*models.ListWrapper[CatalogModel], error) + Save(model CatalogModel) (CatalogModel, error) +} diff --git a/catalog/internal/db/models/catalog_model_artifact.go b/catalog/internal/db/models/catalog_model_artifact.go new file mode 100644 index 0000000000..c39b3b2488 --- /dev/null +++ b/catalog/internal/db/models/catalog_model_artifact.go @@ -0,0 +1,41 @@ +package models + +import ( + "github.com/kubeflow/model-registry/internal/db/filter" + "github.com/kubeflow/model-registry/internal/db/models" +) + +const CatalogModelArtifactType = "model-artifact" + +type CatalogModelArtifactListOptions struct { + models.Pagination + Name *string + ExternalID *string + ParentResourceID *int32 +} + +// GetRestEntityType implements the FilterApplier interface +func (c *CatalogModelArtifactListOptions) GetRestEntityType() filter.RestEntityType { + return filter.RestEntityModelArtifact // Reusing existing filter type +} + +type CatalogModelArtifactAttributes struct { + Name *string + URI *string + ArtifactType *string + ExternalID *string + CreateTimeSinceEpoch *int64 + LastUpdateTimeSinceEpoch *int64 +} + +type CatalogModelArtifact interface { + models.Entity[CatalogModelArtifactAttributes] +} + +type CatalogModelArtifactImpl = models.BaseEntity[CatalogModelArtifactAttributes] + +type CatalogModelArtifactRepository interface { + GetByID(id int32) (CatalogModelArtifact, error) + List(listOptions CatalogModelArtifactListOptions) (*models.ListWrapper[CatalogModelArtifact], error) + Save(modelArtifact CatalogModelArtifact, parentResourceID *int32) (CatalogModelArtifact, error) +} diff --git a/catalog/internal/db/models/property_options.go b/catalog/internal/db/models/property_options.go new file mode 100644 index 0000000000..946cb1251d --- /dev/null +++ b/catalog/internal/db/models/property_options.go @@ -0,0 +1,127 @@ +package models + +import ( + "context" + "strings" + "sync" + "time" + + "github.com/golang/glog" +) + +type PropertyOptionType int + +const ( + ContextPropertyOptionType PropertyOptionType = iota + ArtifactPropertyOptionType +) + +type PropertyOption struct { + TypeID int32 `json:"type_id"` + Name string `json:"name"` + IsCustomProperty bool `json:"is_custom_property"` + StringValue []string `json:"string_value"` + ArrayValue []string `json:"array_value"` + MinDoubleValue *float64 `json:"min_double_value"` + MaxDoubleValue *float64 `json:"max_double_value"` + MinIntValue *int64 `json:"min_int_value"` + MaxIntValue *int64 `json:"max_int_value"` +} + +const ( + StringValueField = "string_value" + DoubleValueField = "double_value" + IntValueField = "int_value" + ArrayValueField = "array_value" +) + +// ValueField returns string_value, array_value, double_value or int_value +// depending on which fields are non-nil. +func (po *PropertyOption) ValueField() string { + switch { + case po.MinDoubleValue != nil || po.MaxDoubleValue != nil: + return DoubleValueField + case po.MinIntValue != nil || po.MaxIntValue != nil: + return IntValueField + case len(po.ArrayValue) > 0: + return ArrayValueField + } + + return StringValueField +} + +// FullName returns the complete name of the property to pass to filterQuery. Prefix is optional. +func (po *PropertyOption) FullName(prefix string) string { + parts := make([]string, 0, 3) + + if prefix != "" { + parts = append(parts, prefix) + } + + parts = append(parts, po.Name) + + if po.IsCustomProperty { + parts = append(parts, po.ValueField()) + } + + return strings.Join(parts, ".") +} + +type PropertyOptionsRepository interface { + // Refresh rebuilds the materialized view. + Refresh(t PropertyOptionType) error + // List returns all the options for a type. If typeID is 0, all types are returned. + List(t PropertyOptionType, typeID int32) ([]PropertyOption, error) +} + +// PropertyOptionsRefresher refreshes the materialized views after a short +// delay to prevent unnecessary duplicate refreshes. +type PropertyOptionsRefresher struct { + ticker *time.Ticker + delay time.Duration + mu sync.Mutex +} + +func NewPropertyOptionsRefresher(ctx context.Context, repo PropertyOptionsRepository, delay time.Duration) *PropertyOptionsRefresher { + ticker := time.NewTicker(time.Hour) + ticker.Stop() + + r := &PropertyOptionsRefresher{ + ticker: ticker, + delay: delay, + } + go r.bg(ctx, repo) + return r +} + +func (r *PropertyOptionsRefresher) Trigger() { + r.mu.Lock() + defer r.mu.Unlock() + r.ticker.Reset(r.delay) +} + +func (r *PropertyOptionsRefresher) bg(ctx context.Context, repo PropertyOptionsRepository) { + done := ctx.Done() + for { + select { + case <-done: + return + case <-r.ticker.C: + // Fallthrough + } + + r.mu.Lock() + r.ticker.Stop() + r.mu.Unlock() + + err := repo.Refresh(ContextPropertyOptionType) + if err != nil { + glog.Warningf("Failed to refresh context property options: %v", err) + } + + err = repo.Refresh(ArtifactPropertyOptionType) + if err != nil { + glog.Warningf("Failed to refresh artifact property options: %v", err) + } + } +} diff --git a/catalog/internal/db/schema/artifact_property_options.go b/catalog/internal/db/schema/artifact_property_options.go new file mode 100644 index 0000000000..cec1edf3d4 --- /dev/null +++ b/catalog/internal/db/schema/artifact_property_options.go @@ -0,0 +1,23 @@ +package schema + +import "github.com/lib/pq" + +const TableNameArtifactPropertyOption = "artifact_property_options" + +// ArtifactPropertyOption mapped from materialized view +type ArtifactPropertyOption struct { + TypeID int32 `gorm:"column:type_id;not null" json:"type_id"` + Name string `gorm:"column:name;not null" json:"name"` + IsCustomProperty bool `gorm:"column:is_custom_property;not null" json:"is_custom_property"` + StringValue *pq.StringArray `gorm:"column:string_value;type:text[]" json:"string_value"` + ArrayValue *pq.StringArray `gorm:"column:array_value;type:text[]" json:"array_value"` + MinDoubleValue *float64 `gorm:"column:min_double_value" json:"min_double_value"` + MaxDoubleValue *float64 `gorm:"column:max_double_value" json:"max_double_value"` + MinIntValue *int64 `gorm:"column:min_int_value" json:"min_int_value"` + MaxIntValue *int64 `gorm:"column:max_int_value" json:"max_int_value"` +} + +// TableName ArtifactPropertyOption's table name +func (*ArtifactPropertyOption) TableName() string { + return TableNameArtifactPropertyOption +} diff --git a/catalog/internal/db/schema/context_property_options.go b/catalog/internal/db/schema/context_property_options.go new file mode 100644 index 0000000000..43862f4b67 --- /dev/null +++ b/catalog/internal/db/schema/context_property_options.go @@ -0,0 +1,23 @@ +package schema + +import "github.com/lib/pq" + +const TableNameContextPropertyOption = "context_property_options" + +// ContextPropertyOption mapped from materialized view +type ContextPropertyOption struct { + TypeID int32 `gorm:"column:type_id;not null" json:"type_id"` + Name string `gorm:"column:name;not null" json:"name"` + IsCustomProperty bool `gorm:"column:is_custom_property;not null" json:"is_custom_property"` + StringValue *pq.StringArray `gorm:"column:string_value;type:text[]" json:"string_value"` + ArrayValue *pq.StringArray `gorm:"column:array_value;type:text[]" json:"array_value"` + MinDoubleValue *float64 `gorm:"column:min_double_value" json:"min_double_value"` + MaxDoubleValue *float64 `gorm:"column:max_double_value" json:"max_double_value"` + MinIntValue *int64 `gorm:"column:min_int_value" json:"min_int_value"` + MaxIntValue *int64 `gorm:"column:max_int_value" json:"max_int_value"` +} + +// TableName ContextPropertyOption's table name +func (*ContextPropertyOption) TableName() string { + return TableNameContextPropertyOption +} diff --git a/catalog/internal/db/service/catalog_artifact.go b/catalog/internal/db/service/catalog_artifact.go new file mode 100644 index 0000000000..c6882e90f9 --- /dev/null +++ b/catalog/internal/db/service/catalog_artifact.go @@ -0,0 +1,453 @@ +package service + +import ( + "errors" + "fmt" + "strings" + + "github.com/golang/glog" + catalogfilter "github.com/kubeflow/model-registry/catalog/internal/db/filter" + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/internal/datastore" + "github.com/kubeflow/model-registry/internal/db/dbutil" + dbmodels "github.com/kubeflow/model-registry/internal/db/models" + "github.com/kubeflow/model-registry/internal/db/schema" + "github.com/kubeflow/model-registry/internal/db/scopes" + "github.com/kubeflow/model-registry/internal/db/service" + "github.com/kubeflow/model-registry/internal/db/utils" + "github.com/kubeflow/model-registry/pkg/api" + "gorm.io/gorm" +) + +var ErrCatalogArtifactNotFound = errors.New("catalog artifact by id not found") + +// isValidPropertyName validates basic property name constraints +func isValidPropertyName(name string) bool { + // Empty names are not valid + if name == "" { + return false + } + // Check length (reasonable limit to prevent abuse) + if len(name) > 255 { + return false + } + + return true +} + +type CatalogArtifactRepositoryImpl struct { + db *gorm.DB + idToName map[int32]string + nameToID datastore.ArtifactTypeMap +} + +func NewCatalogArtifactRepository(db *gorm.DB, artifactTypes datastore.ArtifactTypeMap) models.CatalogArtifactRepository { + idToName := make(map[int32]string, len(artifactTypes)) + for name, id := range artifactTypes { + idToName[id] = name + } + + return &CatalogArtifactRepositoryImpl{ + db: db, + nameToID: artifactTypes, + idToName: idToName, + } +} + +func (r *CatalogArtifactRepositoryImpl) GetByID(id int32) (models.CatalogArtifact, error) { + artifact := &schema.Artifact{} + properties := []schema.ArtifactProperty{} + + if err := r.db.Where("id = ?", id).First(artifact).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return models.CatalogArtifact{}, fmt.Errorf("%w: %v", ErrCatalogArtifactNotFound, err) + } + return models.CatalogArtifact{}, fmt.Errorf("error getting catalog artifact by id: %w", err) + } + + if err := r.db.Where("artifact_id = ?", artifact.ID).Find(&properties).Error; err != nil { + return models.CatalogArtifact{}, fmt.Errorf("error getting properties by artifact id: %w", err) + } + + // Use the same logic as mapDataLayerToCatalogArtifact to handle artifact types + mappedArtifact, err := r.mapDataLayerToCatalogArtifact(*artifact, properties) + if err != nil { + return models.CatalogArtifact{}, fmt.Errorf("error mapping catalog artifact: %w", err) + } + + return mappedArtifact, nil +} + +// List retrieves catalog artifacts with support for filtering, pagination, and custom property ordering. +// +// The method handles ordering in the following priority: +// 1. NAME - Special catalog-specific ordering +// 2. Standard columns (ID, CREATE_TIME, LAST_UPDATE_TIME) - Uses allowed column map +// 3. Custom properties (e.g., accuracy.double_value) - Dynamic property-based ordering +// 4. Fallback to ID ordering for invalid or unrecognized inputs +func (r *CatalogArtifactRepositoryImpl) List(listOptions models.CatalogArtifactListOptions) (*dbmodels.ListWrapper[models.CatalogArtifact], error) { + list := dbmodels.ListWrapper[models.CatalogArtifact]{ + PageSize: listOptions.GetPageSize(), + } + + artifacts := []models.CatalogArtifact{} + artifactsArt := []schema.Artifact{} + + query := r.db.Model(&schema.Artifact{}) + + // Apply filters similar to the internal artifact service + if listOptions.Name != nil { + // Name is not prefixed with the parent resource id to allow for filtering by name only + // Parent resource Id is used later to filter by Attribution.context_id + query = query.Where("name LIKE ?", fmt.Sprintf("%%:%s", *listOptions.Name)) + } else if listOptions.ExternalID != nil { + query = query.Where("external_id = ?", listOptions.ExternalID) + } + + // Filter by artifact type(s) if specified + if len(listOptions.ArtifactTypesFilter) > 0 { + // Handle multiple artifact types + typeIDs := []int32{} + for _, artifactType := range listOptions.ArtifactTypesFilter { + // Handle "null" string as invalid artifact type + if artifactType == "null" || artifactType == "" { + return nil, fmt.Errorf("invalid artifact type: empty or null value provided: %w", api.ErrBadRequest) + } + typeID, err := r.getTypeIDFromArtifactType(artifactType) + if err != nil { + return nil, fmt.Errorf("invalid catalog artifact type %s: %w", artifactType, err) + } + typeIDs = append(typeIDs, typeID) + } + query = query.Where("type_id IN ?", typeIDs) + } else if listOptions.ArtifactType != nil { + // Handle single artifact type for backward compatibility + // Handle "null" string as invalid artifact type + if *listOptions.ArtifactType == "null" || *listOptions.ArtifactType == "" { + return nil, fmt.Errorf("invalid artifact type: empty or null value provided: %w", api.ErrBadRequest) + } + typeID, err := r.getTypeIDFromArtifactType(*listOptions.ArtifactType) + if err != nil { + return nil, fmt.Errorf("invalid catalog artifact type %s: %w", *listOptions.ArtifactType, err) + } + query = query.Where("type_id = ?", typeID) + } else { + // Only include catalog artifact types + catalogTypeIDs := []int32{} + for _, typeID := range r.nameToID { + catalogTypeIDs = append(catalogTypeIDs, typeID) + } + query = query.Where("type_id IN ?", catalogTypeIDs) + } + + // Apply parent resource filtering if specified + if listOptions.ParentResourceID != nil { + // Proper GORM JOIN: Use helper that respects naming strategy + query = query.Joins(utils.BuildAttributionJoin(query)). + Where(utils.GetColumnRef(query, &schema.Attribution{}, "context_id")+" = ?", listOptions.ParentResourceID). + Select(utils.GetTableName(query, &schema.Artifact{}) + ".*") // Explicitly select from Artifact table to avoid ambiguity + } + + // Apply advanced filter query if supported + var err error + query, err = service.ApplyFilterQuery(query, &listOptions, catalogfilter.NewCatalogEntityMappings()) + if err != nil { + return nil, err + } + + orderBy := listOptions.GetOrderBy() + sortOrder := listOptions.GetSortOrder() + nextPageToken := listOptions.GetNextPageToken() + pageSize := listOptions.GetPageSize() + + // Handle NAME ordering specially (catalog-specific) to avoid string-to-integer cast issues + if orderBy == "NAME" { + artifactTable := utils.GetTableName(query, &schema.Artifact{}) + query = ApplyNameOrdering(query, artifactTable, sortOrder, nextPageToken, pageSize) + } else if _, isAllowedColumn := CatalogOrderByColumns[orderBy]; isAllowedColumn { + // Handle standard allowed columns (ID, CREATE_TIME, LAST_UPDATE_TIME) + pagination := &dbmodels.Pagination{ + PageSize: &pageSize, + OrderBy: &orderBy, + SortOrder: &sortOrder, + NextPageToken: &nextPageToken, + } + + // Use catalog-specific allowed columns + query = query.Scopes(scopes.PaginateWithOptions(artifactsArt, pagination, r.db, "Artifact", CatalogOrderByColumns)) + } else { + // Assume it's a custom property ordering (e.g., accuracy.double_value, timestamp.string_value) + query, err = r.applyCustomOrdering(query, &listOptions) + if err != nil { + return nil, err + } + } + + if err := query.Find(&artifactsArt).Error; err != nil { + // Sanitize database errors to avoid exposing internal details to users + err = dbutil.SanitizeDatabaseError(err) + return nil, fmt.Errorf("error listing catalog artifacts: %w", err) + } + + hasMore := false + if pageSize > 0 { + hasMore = len(artifactsArt) > int(pageSize) + if hasMore { + artifactsArt = artifactsArt[:len(artifactsArt)-1] // Remove the extra item used for hasMore detection + } + } + + // Map each artifact with its properties + for _, artifactArt := range artifactsArt { + properties := []schema.ArtifactProperty{} + if err := r.db.Where("artifact_id = ?", artifactArt.ID).Find(&properties).Error; err != nil { + return nil, fmt.Errorf("error getting properties by artifact id: %w", err) + } + + artifact, err := r.mapDataLayerToCatalogArtifact(artifactArt, properties) + if err != nil { + return nil, fmt.Errorf("error mapping catalog artifact: %w", err) + } + artifacts = append(artifacts, artifact) + } + + // Handle pagination token - generate token when there are more pages + if hasMore && len(artifactsArt) > 0 { + // Use the last artifact to generate pagination token + lastArtifact := artifactsArt[len(artifactsArt)-1] + nextToken := r.createPaginationToken(lastArtifact, listOptions) + listOptions.NextPageToken = &nextToken + } else { + listOptions.NextPageToken = nil + } + + list.Items = artifacts + list.NextPageToken = listOptions.GetNextPageToken() + list.Size = int32(len(artifacts)) + + return &list, nil +} + +// getTypeIDFromArtifactType maps catalog artifact type strings to their corresponding type IDs +func (r *CatalogArtifactRepositoryImpl) getTypeIDFromArtifactType(artifactType string) (int32, error) { + switch artifactType { + case "model-artifact": + return r.nameToID[CatalogModelArtifactTypeName], nil + case "metrics-artifact": + return r.nameToID[CatalogMetricsArtifactTypeName], nil + default: + return 0, fmt.Errorf("unsupported catalog artifact type: %s", artifactType) + } +} + +func (r *CatalogArtifactRepositoryImpl) mapDataLayerToCatalogArtifact(artifact schema.Artifact, properties []schema.ArtifactProperty) (models.CatalogArtifact, error) { + artToReturn := models.CatalogArtifact{} + + typeName := r.idToName[artifact.TypeID] + + switch typeName { + case CatalogModelArtifactTypeName: + artToReturn.CatalogModelArtifact = mapDataLayerToCatalogModelArtifact(artifact, properties) + case CatalogMetricsArtifactTypeName: + artToReturn.CatalogMetricsArtifact = mapDataLayerToCatalogMetricsArtifact(artifact, properties) + default: + return models.CatalogArtifact{}, fmt.Errorf("invalid catalog artifact type: %s=%d (expected: %v)", typeName, artifact.TypeID, r.idToName) + } + + return artToReturn, nil +} + +// createPaginationToken generates a pagination token based on the last artifact and ordering +func (r *CatalogArtifactRepositoryImpl) createPaginationToken(artifact schema.Artifact, listOptions models.CatalogArtifactListOptions) string { + orderBy := listOptions.GetOrderBy() + + // Handle NAME ordering (catalog-specific) + if orderBy == "NAME" { + return CreateNamePaginationToken(artifact.ID, artifact.Name) + } + + // Handle custom property ordering + sortValueQuery, column, err := r.sortValueQuery(&listOptions) + if err != nil { + // If there's an error in the sort value query (e.g., invalid value type), + // fall back to ID ordering for the token + // Note: This shouldn't normally happen as the error would be caught earlier in List() + glog.Warningf("Error in sortValueQuery during pagination token creation: %v", err) + } else if sortValueQuery != nil { + artifactTable := utils.GetTableName(r.db, &schema.Artifact{}) + sortValueQuery = sortValueQuery.Where(artifactTable+".id=?", artifact.ID) + + var result struct { + IntValue *int64 `gorm:"int_value"` + DoubleValue *float64 `gorm:"double_value"` + StringValue *string `gorm:"string_value"` + } + err := sortValueQuery.Scan(&result).Error + if err != nil { + // Log warning and fall back to default + glog.Warningf("Failed to get sort value for pagination token: %v", err) + } else { + switch column { + case "int_value": + return scopes.CreateNextPageToken(artifact.ID, result.IntValue) + case "double_value": + return scopes.CreateNextPageToken(artifact.ID, result.DoubleValue) + case "string_value": + return scopes.CreateNextPageToken(artifact.ID, result.StringValue) + } + } + } + + // Standard ordering fields + value := "" + switch orderBy { + case "ID": + value = fmt.Sprintf("%d", artifact.ID) + case "CREATE_TIME": + value = fmt.Sprintf("%d", artifact.CreateTimeSinceEpoch) + case "LAST_UPDATE_TIME": + value = fmt.Sprintf("%d", artifact.LastUpdateTimeSinceEpoch) + default: + // Default to ID ordering + value = fmt.Sprintf("%d", artifact.ID) + } + + return scopes.CreateNextPageToken(artifact.ID, value) +} + +// sortValueQuery returns a query that will produce the value to sort on for +// the List response. The returned string is the column name. +// +// If the sort does not require a subquery, sortValueQuery returns nil, "". +// If the format is correct but the value type is invalid, returns nil, "" and an error. +func (r *CatalogArtifactRepositoryImpl) sortValueQuery(listOptions *models.CatalogArtifactListOptions, extraColumns ...any) (*gorm.DB, string, error) { + db := r.db + artifactTable := utils.GetTableName(db, &schema.Artifact{}) + + query := db.Table(artifactTable) + + orderBy := strings.Split(listOptions.GetOrderBy(), ".") + + var valueColumn string + + // Handle . e.g. accuracy.double_value, timestamp.string_value + if len(orderBy) == 2 { + propertyName := orderBy[0] + valueColumn = orderBy[1] + + switch valueColumn { + case "int_value", "double_value", "string_value": + // OK - valid value type + default: + // Invalid value type - return error immediately + return nil, "", fmt.Errorf("invalid custom property value type '%s': must be one of 'int_value', 'double_value', or 'string_value': %w", valueColumn, api.ErrBadRequest) + } + + if !isValidPropertyName(propertyName) { + return nil, "", fmt.Errorf("invalid custom property name '%s': %w", propertyName, api.ErrBadRequest) + } + + propertyTable := utils.GetTableName(db, &schema.ArtifactProperty{}) + query = query. + Select(fmt.Sprintf("max(%s.%s) AS %s", propertyTable, valueColumn, valueColumn), extraColumns...). + Joins(fmt.Sprintf("LEFT JOIN %s ON %s.id=%s.artifact_id AND %s.name=?", propertyTable, artifactTable, propertyTable, propertyTable), propertyName) + + return query, valueColumn, nil + } + + // Standard sort will work (not a custom property format) + return nil, "", nil +} + +// applyCustomOrdering applies custom ordering logic for non-standard orderBy field +func (r *CatalogArtifactRepositoryImpl) applyCustomOrdering(query *gorm.DB, listOptions *models.CatalogArtifactListOptions) (*gorm.DB, error) { + db := r.db + artifactTable := utils.GetTableName(db, &schema.Artifact{}) + orderBy := listOptions.GetOrderBy() + + // Handle NAME ordering specially (catalog-specific) + if orderBy == "NAME" { + return ApplyNameOrdering(query, artifactTable, listOptions.GetSortOrder(), listOptions.GetNextPageToken(), listOptions.GetPageSize()), nil + } + + subquery, sortColumn, err := r.sortValueQuery(listOptions, artifactTable+".id") + if err != nil { + // Error in custom property format (e.g., invalid value type) + return nil, err + } + if subquery == nil { + // Fall back to standard pagination with catalog-specific allowed columns + // If the orderBy is not in CatalogOrderByColumns, PaginateWithOptions will default to ID ordering + // This handles invalid custom property formats (e.g., "accuracy" without ".double_value") + pageSize := listOptions.GetPageSize() + sortOrder := listOptions.GetSortOrder() + nextPageToken := listOptions.GetNextPageToken() + pagination := &dbmodels.Pagination{ + PageSize: &pageSize, + OrderBy: &orderBy, + SortOrder: &sortOrder, + NextPageToken: &nextPageToken, + } + return query.Scopes(scopes.PaginateWithOptions([]schema.Artifact{}, pagination, r.db, "Artifact", CatalogOrderByColumns)), nil + } + subquery = subquery.Group(artifactTable + ".id") + + // Join the main query with the subquery + query = query. + Joins(fmt.Sprintf("LEFT JOIN (?) sort_value ON %s.id=sort_value.id", artifactTable), subquery) + + // Apply sorting order + sortOrder := listOptions.GetSortOrder() + if sortOrder != "ASC" { + sortOrder = "DESC" + } + query = query.Order(fmt.Sprintf("sort_value.%s %s NULLS LAST, %s.id", sortColumn, sortOrder, artifactTable)) + + // Handle cursor-based pagination with nextPageToken + nextPageToken := listOptions.GetNextPageToken() + if nextPageToken != "" { + // Parse the cursor from the token + if cursor, err := scopes.DecodeCursor(nextPageToken); err == nil { + // Apply WHERE clause for cursor-based pagination + query = r.applyCursorPagination(query, cursor, sortColumn, sortOrder) + } + // If token parsing fails, fall back to no cursor (first page) + } + + // Apply pagination limit + pageSize := listOptions.GetPageSize() + if pageSize > 0 { + query = query.Limit(int(pageSize) + 1) // +1 to detect if there are more pages + } + + return query, nil +} + +// applyCursorPagination applies WHERE clause for cursor-based pagination with custom property sorting +func (r *CatalogArtifactRepositoryImpl) applyCursorPagination(query *gorm.DB, cursor *scopes.Cursor, sortColumn, sortOrder string) *gorm.DB { + artifactTable := utils.GetTableName(query, &schema.Artifact{}) + + // Handle NULL values in cursor + if cursor.Value == "" { + // Items without the sort value will be sorted to the bottom, just use ID-based pagination. + return query.Where(fmt.Sprintf("sort_value.%s IS NULL AND %s.id > ?", sortColumn, artifactTable), cursor.ID) + } + + cmp := "<" + if sortOrder == "ASC" { + cmp = ">" + } + + // Note that we sort ID ASCENDING as a tie-breaker, so ">" is correct below. + return query.Where(fmt.Sprintf("(sort_value.%s %s ? OR (sort_value.%s = ? AND %s.id > ?) OR sort_value.%s IS NULL)", sortColumn, cmp, sortColumn, artifactTable, sortColumn), + cursor.Value, cursor.Value, cursor.ID) +} + +func (r *CatalogArtifactRepositoryImpl) DeleteByParentID(artifactTypeName string, parentResourceID int32) error { + typeID, ok := r.nameToID[artifactTypeName] + if !ok { + return fmt.Errorf("unknown artifact type name: %s", artifactTypeName) + } + + return r.db.Exec(`DELETE FROM "Artifact" WHERE id IN (SELECT artifact_id from "Attribution" INNER JOIN "Artifact" artifact ON artifact.id=artifact_id where context_id=? and type_id=?)`, parentResourceID, typeID).Error +} diff --git a/catalog/internal/db/service/catalog_artifact_test.go b/catalog/internal/db/service/catalog_artifact_test.go new file mode 100644 index 0000000000..629ef63944 --- /dev/null +++ b/catalog/internal/db/service/catalog_artifact_test.go @@ -0,0 +1,1706 @@ +package service_test + +import ( + "fmt" + "testing" + + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/service" + "github.com/kubeflow/model-registry/internal/apiutils" + dbmodels "github.com/kubeflow/model-registry/internal/db/models" + "github.com/kubeflow/model-registry/internal/testutils" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// Test constants for custom property names and value types +const ( + testPropertyAccuracy = "accuracy" + testPropertyTimestamp = "timestamp" + testPropertyVersion = "version" + testPropertyScore = "score" + + testValueTypeDouble = "double_value" + testValueTypeString = "string_value" + testValueTypeInt = "int_value" + + testSortOrderASC = "ASC" + testSortOrderDESC = "DESC" +) + +func TestCatalogArtifactRepository(t *testing.T) { + sharedDB, cleanup := testutils.SetupPostgresWithMigrations(t, service.DatastoreSpec()) + defer cleanup() + + // Get the catalog artifact type IDs + modelArtifactTypeID := getCatalogModelArtifactTypeID(t, sharedDB) + metricsArtifactTypeID := getCatalogMetricsArtifactTypeID(t, sharedDB) + + // Create unified artifact repository with both types + artifactTypeMap := map[string]int32{ + service.CatalogModelArtifactTypeName: modelArtifactTypeID, + service.CatalogMetricsArtifactTypeName: metricsArtifactTypeID, + } + repo := service.NewCatalogArtifactRepository(sharedDB, artifactTypeMap) + + // Also get CatalogModel type ID for creating parent entities + catalogModelTypeID := getCatalogModelTypeID(t, sharedDB) + catalogModelRepo := service.NewCatalogModelRepository(sharedDB, catalogModelTypeID) + modelArtifactRepo := service.NewCatalogModelArtifactRepository(sharedDB, modelArtifactTypeID) + metricsArtifactRepo := service.NewCatalogMetricsArtifactRepository(sharedDB, metricsArtifactTypeID) + + // Create shared test data + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-artifacts"), + ExternalID: apiutils.Of("catalog-model-artifacts-ext-123"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + t.Run("GetByID_ModelArtifact", func(t *testing.T) { + // Create a model artifact using the specific repository + modelArtifact := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(modelArtifactTypeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("test-model-artifact-getbyid"), + ExternalID: apiutils.Of("model-art-getbyid-ext-123"), + URI: apiutils.Of("s3://test-bucket/model.bin"), + ArtifactType: apiutils.Of(models.CatalogModelArtifactType), + }, + } + savedModelArtifact, err := modelArtifactRepo.Save(modelArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + + // Retrieve using unified repository + retrieved, err := repo.GetByID(*savedModelArtifact.GetID()) + require.NoError(t, err) + + // Verify it's a model artifact + assert.NotNil(t, retrieved.CatalogModelArtifact) + assert.Nil(t, retrieved.CatalogMetricsArtifact) + assert.Equal(t, "test-model-artifact-getbyid", *retrieved.CatalogModelArtifact.GetAttributes().Name) + assert.Equal(t, "model-art-getbyid-ext-123", *retrieved.CatalogModelArtifact.GetAttributes().ExternalID) + assert.Equal(t, "s3://test-bucket/model.bin", *retrieved.CatalogModelArtifact.GetAttributes().URI) + }) + + t.Run("GetByID_MetricsArtifact", func(t *testing.T) { + // Create a metrics artifact using the specific repository + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("test-metrics-artifact-getbyid"), + ExternalID: apiutils.Of("metrics-art-getbyid-ext-123"), + MetricsType: models.MetricsTypeAccuracy, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + } + savedMetricsArtifact, err := metricsArtifactRepo.Save(metricsArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + + // Retrieve using unified repository + retrieved, err := repo.GetByID(*savedMetricsArtifact.GetID()) + require.NoError(t, err) + + // Verify it's a metrics artifact + assert.Nil(t, retrieved.CatalogModelArtifact) + assert.NotNil(t, retrieved.CatalogMetricsArtifact) + assert.Equal(t, "test-metrics-artifact-getbyid", *retrieved.CatalogMetricsArtifact.GetAttributes().Name) + assert.Equal(t, "metrics-art-getbyid-ext-123", *retrieved.CatalogMetricsArtifact.GetAttributes().ExternalID) + assert.Equal(t, models.MetricsTypeAccuracy, retrieved.CatalogMetricsArtifact.GetAttributes().MetricsType) + }) + + t.Run("GetByID_NotFound", func(t *testing.T) { + nonExistentID := int32(99999) + _, err := repo.GetByID(nonExistentID) + require.Error(t, err) + assert.Contains(t, err.Error(), "catalog artifact by id not found") + }) + + t.Run("List_AllArtifacts", func(t *testing.T) { + // Create test artifacts of both types + modelArtifact1 := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(modelArtifactTypeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("test-model-artifact-list-1"), + ExternalID: apiutils.Of("model-list-1-ext"), + URI: apiutils.Of("s3://test/model1.bin"), + ArtifactType: apiutils.Of(models.CatalogModelArtifactType), + }, + } + + modelArtifact2 := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(modelArtifactTypeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("test-model-artifact-list-2"), + ExternalID: apiutils.Of("model-list-2-ext"), + URI: apiutils.Of("s3://test/model2.bin"), + ArtifactType: apiutils.Of(models.CatalogModelArtifactType), + }, + } + + metricsArtifact1 := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("test-metrics-artifact-list-1"), + ExternalID: apiutils.Of("metrics-list-1-ext"), + MetricsType: models.MetricsTypeAccuracy, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + } + + // Save artifacts + savedModelArt1, err := modelArtifactRepo.Save(modelArtifact1, savedCatalogModel.GetID()) + require.NoError(t, err) + savedModelArt2, err := modelArtifactRepo.Save(modelArtifact2, savedCatalogModel.GetID()) + require.NoError(t, err) + savedMetricsArt1, err := metricsArtifactRepo.Save(metricsArtifact1, savedCatalogModel.GetID()) + require.NoError(t, err) + + // List all artifacts for the parent resource + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedCatalogModel.GetID(), + } + + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Should return all 3 artifacts (2 model + 1 metrics) + assert.GreaterOrEqual(t, len(result.Items), 3, "Should return at least the 3 artifacts we created") + + // Verify we got both types + var modelArtifactCount, metricsArtifactCount int + artifactIDs := make(map[int32]bool) + + for _, artifact := range result.Items { + if artifact.CatalogModelArtifact != nil { + modelArtifactCount++ + artifactIDs[*artifact.CatalogModelArtifact.GetID()] = true + } else if artifact.CatalogMetricsArtifact != nil { + metricsArtifactCount++ + artifactIDs[*artifact.CatalogMetricsArtifact.GetID()] = true + } + } + + assert.GreaterOrEqual(t, modelArtifactCount, 2, "Should have at least 2 model artifacts") + assert.GreaterOrEqual(t, metricsArtifactCount, 1, "Should have at least 1 metrics artifact") + + // Verify our specific artifacts are in the results + assert.True(t, artifactIDs[*savedModelArt1.GetID()], "Should contain first model artifact") + assert.True(t, artifactIDs[*savedModelArt2.GetID()], "Should contain second model artifact") + assert.True(t, artifactIDs[*savedMetricsArt1.GetID()], "Should contain metrics artifact") + }) + + t.Run("List_FilterByArtifactType_ModelArtifact", func(t *testing.T) { + // Filter by model artifact type only + artifactType := "model-artifact" + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedCatalogModel.GetID(), + ArtifactType: &artifactType, + } + + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // All results should be model artifacts + for _, artifact := range result.Items { + assert.NotNil(t, artifact.CatalogModelArtifact, "Should only return model artifacts") + assert.Nil(t, artifact.CatalogMetricsArtifact, "Should not return metrics artifacts") + } + }) + + t.Run("List_FilterByArtifactType_MetricsArtifact", func(t *testing.T) { + // Filter by metrics artifact type only + artifactType := "metrics-artifact" + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedCatalogModel.GetID(), + ArtifactType: &artifactType, + } + + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // All results should be metrics artifacts + for _, artifact := range result.Items { + assert.Nil(t, artifact.CatalogModelArtifact, "Should not return model artifacts") + assert.NotNil(t, artifact.CatalogMetricsArtifact, "Should only return metrics artifacts") + } + }) + + t.Run("List_FilterByExternalID", func(t *testing.T) { + // Create artifact with specific external ID for filtering + testArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("external-id-filter-test"), + ExternalID: apiutils.Of("unique-external-id-123"), + MetricsType: models.MetricsTypePerformance, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + } + savedArtifact, err := metricsArtifactRepo.Save(testArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + + // Filter by external ID + externalID := "unique-external-id-123" + listOptions := models.CatalogArtifactListOptions{ + ExternalID: &externalID, + } + + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + assert.Len(t, result.Items, 1, "Should find exactly one artifact with the external ID") + + // Verify it's the correct artifact + artifact := result.Items[0] + assert.NotNil(t, artifact.CatalogMetricsArtifact) + assert.Equal(t, *savedArtifact.GetID(), *artifact.CatalogMetricsArtifact.GetID()) + assert.Equal(t, "unique-external-id-123", *artifact.CatalogMetricsArtifact.GetAttributes().ExternalID) + }) + + t.Run("List_WithPagination", func(t *testing.T) { + // Create multiple artifacts for pagination testing + for i := 0; i < 5; i++ { + artifact := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(modelArtifactTypeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of(fmt.Sprintf("pagination-test-%d", i)), + ExternalID: apiutils.Of(fmt.Sprintf("pagination-ext-%d", i)), + URI: apiutils.Of(fmt.Sprintf("s3://test/pagination-%d.bin", i)), + ArtifactType: apiutils.Of(models.CatalogModelArtifactType), + }, + } + _, err := modelArtifactRepo.Save(artifact, savedCatalogModel.GetID()) + require.NoError(t, err) + } + + // Test pagination + pageSize := int32(3) + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedCatalogModel.GetID(), + Pagination: dbmodels.Pagination{ + PageSize: &pageSize, + OrderBy: apiutils.Of("ID"), + }, + } + + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + assert.LessOrEqual(t, len(result.Items), 3, "Should respect page size limit") + assert.GreaterOrEqual(t, len(result.Items), 1, "Should return at least one item") + }) + + t.Run("List_InvalidArtifactType", func(t *testing.T) { + // Test with invalid artifact type + invalidType := "invalid-artifact-type" + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedCatalogModel.GetID(), + ArtifactType: &invalidType, + } + + _, err := repo.List(listOptions) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid catalog artifact type") + assert.Contains(t, err.Error(), "invalid-artifact-type") + }) + + t.Run("List_WithCustomProperties", func(t *testing.T) { + // Create artifacts with custom properties + customProps := []dbmodels.Properties{ + { + Name: "custom_prop_1", + StringValue: apiutils.Of("custom_value_1"), + }, + { + Name: "custom_prop_2", + StringValue: apiutils.Of("custom_value_2"), + }, + } + + artifactWithCustomProps := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(modelArtifactTypeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("artifact-with-custom-props"), + ExternalID: apiutils.Of("custom-props-ext"), + URI: apiutils.Of("s3://test/custom-props.bin"), + ArtifactType: apiutils.Of(models.CatalogModelArtifactType), + }, + CustomProperties: &customProps, + } + + savedArtifact, err := modelArtifactRepo.Save(artifactWithCustomProps, savedCatalogModel.GetID()) + require.NoError(t, err) + + // Retrieve using unified repository + retrieved, err := repo.GetByID(*savedArtifact.GetID()) + require.NoError(t, err) + + // Verify custom properties are preserved + require.NotNil(t, retrieved.CatalogModelArtifact) + assert.NotNil(t, retrieved.CatalogModelArtifact.GetCustomProperties()) + + customPropsMap := make(map[string]string) + for _, prop := range *retrieved.CatalogModelArtifact.GetCustomProperties() { + if prop.StringValue != nil { + customPropsMap[prop.Name] = *prop.StringValue + } + } + + assert.Equal(t, "custom_value_1", customPropsMap["custom_prop_1"]) + assert.Equal(t, "custom_value_2", customPropsMap["custom_prop_2"]) + }) + + t.Run("MappingErrors", func(t *testing.T) { + // Test error handling for invalid type mapping + // This would typically happen if there's data inconsistency in the database + + // We can't easily test this without directly manipulating the database + // but we can test the GetByID with an artifact that has an unknown type + // by temporarily modifying the repository's type mapping + + // Create a repository with incomplete type mapping + incompleteTypeMap := map[string]int32{ + service.CatalogModelArtifactTypeName: modelArtifactTypeID, + // Missing CatalogMetricsArtifactTypeName intentionally + } + incompleteRepo := service.NewCatalogArtifactRepository(sharedDB, incompleteTypeMap) + + // Create a metrics artifact first using the complete repo + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("test-mapping-error"), + ExternalID: apiutils.Of("mapping-error-ext"), + MetricsType: models.MetricsTypeAccuracy, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + } + savedMetricsArtifact, err := metricsArtifactRepo.Save(metricsArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + + // Try to retrieve using incomplete repo - should get mapping error + _, err = incompleteRepo.GetByID(*savedMetricsArtifact.GetID()) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid catalog artifact type") + }) + + t.Run("TestNameOrdering", func(t *testing.T) { + // Create a new model for this test + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-for-name-ordering"), + ExternalID: apiutils.Of("test-model-name-ordering-ext"), + }, + } + savedTestModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create artifacts with various names (including null) + testArtifacts := []struct { + name *string + desc string + }{ + {apiutils.Of("zebra-artifact"), "zebra"}, + {apiutils.Of("alpha-artifact"), "alpha"}, + {apiutils.Of("beta-artifact"), "beta"}, + {nil, "null-name"}, // Artifact with no name (like real model artifacts) + {apiutils.Of("gamma-artifact"), "gamma"}, + } + + for _, artifact := range testArtifacts { + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: artifact.name, + ExternalID: apiutils.Of(fmt.Sprintf("name-test-%s", artifact.desc)), + MetricsType: models.MetricsTypePerformance, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + } + _, err := metricsArtifactRepo.Save(metricsArtifact, savedTestModel.GetID()) + require.NoError(t, err) + } + + // Test NAME ordering ASC + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("NAME"), + SortOrder: apiutils.Of("ASC"), + }, + } + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract artifact names (including nulls) + var foundArtifacts []struct { + name *string + desc string + } + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + foundArtifacts = append(foundArtifacts, struct { + name *string + desc string + }{name, fmt.Sprintf("%v", name)}) + } + } + + require.GreaterOrEqual(t, len(foundArtifacts), 5, "Should find all test artifacts") + + // Find positions of named artifacts + var alphaIdx, betaIdx, gammaIdx, zebraIdx, nullIdx int = -1, -1, -1, -1, -1 + for i, artifact := range foundArtifacts { + if artifact.name != nil { + switch *artifact.name { + case "alpha-artifact": + alphaIdx = i + case "beta-artifact": + betaIdx = i + case "gamma-artifact": + gammaIdx = i + case "zebra-artifact": + zebraIdx = i + } + } else { + nullIdx = i + } + } + + // Verify ASC ordering: alpha < beta < gamma < zebra, and null at the end + require.NotEqual(t, -1, alphaIdx, "alpha-artifact not found") + require.NotEqual(t, -1, betaIdx, "beta-artifact not found") + require.NotEqual(t, -1, gammaIdx, "gamma-artifact not found") + require.NotEqual(t, -1, zebraIdx, "zebra-artifact not found") + require.NotEqual(t, -1, nullIdx, "null-name artifact not found") + + assert.Less(t, alphaIdx, betaIdx, "alpha should come before beta in ASC") + assert.Less(t, betaIdx, gammaIdx, "beta should come before gamma in ASC") + assert.Less(t, gammaIdx, zebraIdx, "gamma should come before zebra in ASC") + assert.Less(t, zebraIdx, nullIdx, "named artifacts should come before null in ASC") + + // Test NAME ordering DESC + listOptions = models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("NAME"), + SortOrder: apiutils.Of("DESC"), + }, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract artifact names from DESC results + foundArtifacts = []struct { + name *string + desc string + }{} + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + foundArtifacts = append(foundArtifacts, struct { + name *string + desc string + }{name, fmt.Sprintf("%v", name)}) + } + } + + // Find positions in DESC order + alphaIdx, betaIdx, gammaIdx, zebraIdx, nullIdx = -1, -1, -1, -1, -1 + for i, artifact := range foundArtifacts { + if artifact.name != nil { + switch *artifact.name { + case "alpha-artifact": + alphaIdx = i + case "beta-artifact": + betaIdx = i + case "gamma-artifact": + gammaIdx = i + case "zebra-artifact": + zebraIdx = i + } + } else { + nullIdx = i + } + } + + // Verify DESC ordering: In SQL DESC, NULL comes first, then zebra > gamma > beta > alpha + assert.Less(t, nullIdx, zebraIdx, "null should come first in DESC (SQL default behavior)") + assert.Less(t, zebraIdx, gammaIdx, "zebra should come before gamma in DESC") + assert.Less(t, gammaIdx, betaIdx, "gamma should come before beta in DESC") + assert.Less(t, betaIdx, alphaIdx, "beta should come before alpha in DESC") + }) + + t.Run("TestNameOrderingPagination", func(t *testing.T) { + // Create a new model for this test + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-for-name-pagination"), + ExternalID: apiutils.Of("test-model-name-pagination-ext"), + }, + } + savedTestModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create artifacts with sequential names for pagination testing + artifactNames := []string{ + "artifact-alpha", + "artifact-beta", + "artifact-gamma", + "artifact-delta", + "artifact-epsilon", + } + + for i, name := range artifactNames { + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(name), + ExternalID: apiutils.Of(fmt.Sprintf("pagination-test-%d", i)), + MetricsType: models.MetricsTypePerformance, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + } + _, err := metricsArtifactRepo.Save(metricsArtifact, savedTestModel.GetID()) + require.NoError(t, err) + } + + // Test pagination with NAME ordering (ASC) + pageSize := int32(2) + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("NAME"), + SortOrder: apiutils.Of("ASC"), + PageSize: &pageSize, + }, + } + + // First page + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Filter to only our test artifacts + var page1Artifacts []string + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil && artifact.CatalogMetricsArtifact.GetAttributes().Name != nil { + name := *artifact.CatalogMetricsArtifact.GetAttributes().Name + if name == "artifact-alpha" || name == "artifact-beta" || name == "artifact-gamma" || name == "artifact-delta" || name == "artifact-epsilon" { + page1Artifacts = append(page1Artifacts, name) + } + } + } + + require.LessOrEqual(t, len(page1Artifacts), 2, "First page should have at most 2 artifacts") + require.GreaterOrEqual(t, len(page1Artifacts), 1, "First page should have at least 1 artifact") + assert.NotNil(t, result.NextPageToken, "Should have next page token") + + // Verify first page ordering + if len(page1Artifacts) >= 2 { + assert.Less(t, page1Artifacts[0], page1Artifacts[1], "First page should be ordered") + } + + // Second page + listOptions.Pagination.NextPageToken = &result.NextPageToken + result2, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result2) + + var page2Artifacts []string + for _, artifact := range result2.Items { + if artifact.CatalogMetricsArtifact != nil && artifact.CatalogMetricsArtifact.GetAttributes().Name != nil { + name := *artifact.CatalogMetricsArtifact.GetAttributes().Name + if name == "artifact-alpha" || name == "artifact-beta" || name == "artifact-gamma" || name == "artifact-delta" || name == "artifact-epsilon" { + page2Artifacts = append(page2Artifacts, name) + } + } + } + + require.GreaterOrEqual(t, len(page2Artifacts), 1, "Second page should have at least 1 artifact") + + // Verify second page ordering + if len(page2Artifacts) >= 2 { + assert.Less(t, page2Artifacts[0], page2Artifacts[1], "Second page should be ordered") + } + + // Verify no overlap between pages + for _, name1 := range page1Artifacts { + for _, name2 := range page2Artifacts { + assert.NotEqual(t, name1, name2, "Pages should not have overlapping artifacts") + } + } + + // Verify page 2 comes after page 1 + if len(page1Artifacts) > 0 && len(page2Artifacts) > 0 { + assert.Less(t, page1Artifacts[len(page1Artifacts)-1], page2Artifacts[0], "Page 2 should continue where page 1 ended") + } + + // Test DESC pagination + listOptions = models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("NAME"), + SortOrder: apiutils.Of("DESC"), + PageSize: &pageSize, + }, + } + + resultDesc, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, resultDesc) + + var pageDescArtifacts []string + expectedNames := map[string]bool{ + "artifact-alpha": true, + "artifact-beta": true, + "artifact-gamma": true, + "artifact-delta": true, + "artifact-epsilon": true, + } + for _, artifact := range resultDesc.Items { + if artifact.CatalogMetricsArtifact != nil && + artifact.CatalogMetricsArtifact.GetAttributes().Name != nil { + name := *artifact.CatalogMetricsArtifact.GetAttributes().Name + if expectedNames[name] { + pageDescArtifacts = append(pageDescArtifacts, name) + } + } + } + + require.GreaterOrEqual(t, len(pageDescArtifacts), 1, "DESC first page should have at least 1 artifact") + + // Verify DESC ordering + if len(pageDescArtifacts) >= 2 { + assert.Greater(t, pageDescArtifacts[0], pageDescArtifacts[1], "DESC page should be reverse ordered") + } + }) + + t.Run("TestCustomPropertyOrdering_DoubleValue", func(t *testing.T) { + // Create a new model for this test + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-custom-property-ordering"), + ExternalID: apiutils.Of("test-model-custom-property-ext"), + }, + } + savedTestModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create artifacts with custom properties (accuracy as double_value) + testArtifacts := []struct { + name string + accuracy float64 + }{ + {"artifact-high-accuracy", 0.95}, + {"artifact-low-accuracy", 0.75}, + {"artifact-medium-accuracy", 0.85}, + {"artifact-perfect-accuracy", 0.99}, + {"artifact-poor-accuracy", 0.60}, + } + + for _, tc := range testArtifacts { + customProps := []dbmodels.Properties{ + { + Name: testPropertyAccuracy, + DoubleValue: apiutils.Of(tc.accuracy), + }, + } + + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(tc.name), + ExternalID: apiutils.Of(fmt.Sprintf("custom-prop-test-%s", tc.name)), + MetricsType: models.MetricsTypeAccuracy, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + CustomProperties: &customProps, + } + _, err := metricsArtifactRepo.Save(metricsArtifact, savedTestModel.GetID()) + require.NoError(t, err) + } + + // Test ordering by accuracy.double_value ASC + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of( + fmt.Sprintf("%s.%s", testPropertyAccuracy, testValueTypeDouble), + ), + SortOrder: apiutils.Of(testSortOrderASC), + }, + } + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract artifacts with accuracy property + var foundArtifacts []struct { + name string + accuracy float64 + } + expectedArtifactNames := map[string]bool{ + "artifact-high-accuracy": true, + "artifact-low-accuracy": true, + "artifact-medium-accuracy": true, + "artifact-perfect-accuracy": true, + "artifact-poor-accuracy": true, + } + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + if name != nil && expectedArtifactNames[*name] { + // Get accuracy from custom properties + if artifact.CatalogMetricsArtifact.GetCustomProperties() != nil { + for _, prop := range *artifact.CatalogMetricsArtifact.GetCustomProperties() { + if prop.Name == testPropertyAccuracy && prop.DoubleValue != nil { + foundArtifacts = append(foundArtifacts, struct { + name string + accuracy float64 + }{*name, *prop.DoubleValue}) + } + } + } + } + } + } + + require.GreaterOrEqual(t, len(foundArtifacts), 5, "Should find all test artifacts") + + // Verify ASC ordering by accuracy + for i := 0; i < len(foundArtifacts)-1; i++ { + assert.LessOrEqual(t, foundArtifacts[i].accuracy, foundArtifacts[i+1].accuracy, + fmt.Sprintf("Artifact %s (%.2f) should come before or equal to %s (%.2f) in ASC order", + foundArtifacts[i].name, foundArtifacts[i].accuracy, + foundArtifacts[i+1].name, foundArtifacts[i+1].accuracy)) + } + + // Test ordering by accuracy.double_value DESC + listOptions = models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of( + fmt.Sprintf("%s.%s", testPropertyAccuracy, testValueTypeDouble), + ), + SortOrder: apiutils.Of(testSortOrderDESC), + }, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract artifacts again + foundArtifacts = []struct { + name string + accuracy float64 + }{} + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + if name != nil && expectedArtifactNames[*name] { + // Get accuracy from custom properties + if artifact.CatalogMetricsArtifact.GetCustomProperties() != nil { + for _, prop := range *artifact.CatalogMetricsArtifact.GetCustomProperties() { + if prop.Name == testPropertyAccuracy && prop.DoubleValue != nil { + foundArtifacts = append(foundArtifacts, struct { + name string + accuracy float64 + }{*name, *prop.DoubleValue}) + } + } + } + } + } + } + + require.GreaterOrEqual(t, len(foundArtifacts), 5, "Should find all test artifacts") + + // Verify DESC ordering by accuracy + for i := 0; i < len(foundArtifacts)-1; i++ { + assert.GreaterOrEqual(t, foundArtifacts[i].accuracy, foundArtifacts[i+1].accuracy, + fmt.Sprintf("Artifact %s (%.2f) should come after or equal to %s (%.2f) in DESC order", + foundArtifacts[i].name, foundArtifacts[i].accuracy, + foundArtifacts[i+1].name, foundArtifacts[i+1].accuracy)) + } + }) + + t.Run("TestCustomPropertyOrdering_StringValue", func(t *testing.T) { + // Create a new model for this test + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-string-property-ordering"), + ExternalID: apiutils.Of("test-model-string-property-ext"), + }, + } + savedTestModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create artifacts with custom properties (timestamp as string_value) + testArtifacts := []struct { + name string + timestamp string + }{ + {"artifact-2024-01-15", "2024-01-15"}, + {"artifact-2024-01-10", "2024-01-10"}, + {"artifact-2024-01-20", "2024-01-20"}, + {"artifact-2024-01-05", "2024-01-05"}, + {"artifact-2024-01-25", "2024-01-25"}, + } + + for _, tc := range testArtifacts { + customProps := []dbmodels.Properties{ + { + Name: testPropertyTimestamp, + StringValue: apiutils.Of(tc.timestamp), + }, + } + + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(tc.name), + ExternalID: apiutils.Of(fmt.Sprintf("string-prop-test-%s", tc.name)), + MetricsType: models.MetricsTypePerformance, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + CustomProperties: &customProps, + } + _, err := metricsArtifactRepo.Save(metricsArtifact, savedTestModel.GetID()) + require.NoError(t, err) + } + + // Test ordering by timestamp.string_value ASC + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of( + fmt.Sprintf("%s.%s", testPropertyTimestamp, testValueTypeString), + ), + SortOrder: apiutils.Of(testSortOrderASC), + }, + } + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract artifacts with timestamp property + var foundArtifacts []struct { + name string + timestamp string + } + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + if name != nil && (*name == "artifact-2024-01-15" || *name == "artifact-2024-01-10" || *name == "artifact-2024-01-20" || *name == "artifact-2024-01-05" || *name == "artifact-2024-01-25") { + // Get timestamp from custom properties + if artifact.CatalogMetricsArtifact.GetCustomProperties() != nil { + for _, prop := range *artifact.CatalogMetricsArtifact.GetCustomProperties() { + if prop.Name == "timestamp" && prop.StringValue != nil { + foundArtifacts = append(foundArtifacts, struct { + name string + timestamp string + }{*name, *prop.StringValue}) + } + } + } + } + } + } + + require.GreaterOrEqual(t, len(foundArtifacts), 5, "Should find all test artifacts") + + // Verify ASC ordering by timestamp + for i := 0; i < len(foundArtifacts)-1; i++ { + assert.LessOrEqual(t, foundArtifacts[i].timestamp, foundArtifacts[i+1].timestamp, + fmt.Sprintf("Artifact %s (%s) should come before or equal to %s (%s) in ASC order", + foundArtifacts[i].name, foundArtifacts[i].timestamp, + foundArtifacts[i+1].name, foundArtifacts[i+1].timestamp)) + } + + // Test ordering by timestamp.string_value DESC + listOptions = models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("timestamp.string_value"), + SortOrder: apiutils.Of("DESC"), + }, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract artifacts again + foundArtifacts = []struct { + name string + timestamp string + }{} + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + if name != nil && (*name == "artifact-2024-01-15" || *name == "artifact-2024-01-10" || *name == "artifact-2024-01-20" || *name == "artifact-2024-01-05" || *name == "artifact-2024-01-25") { + // Get timestamp from custom properties + if artifact.CatalogMetricsArtifact.GetCustomProperties() != nil { + for _, prop := range *artifact.CatalogMetricsArtifact.GetCustomProperties() { + if prop.Name == "timestamp" && prop.StringValue != nil { + foundArtifacts = append(foundArtifacts, struct { + name string + timestamp string + }{*name, *prop.StringValue}) + } + } + } + } + } + } + + require.GreaterOrEqual(t, len(foundArtifacts), 5, "Should find all test artifacts") + + // Verify DESC ordering by timestamp + for i := 0; i < len(foundArtifacts)-1; i++ { + assert.GreaterOrEqual(t, foundArtifacts[i].timestamp, foundArtifacts[i+1].timestamp, + fmt.Sprintf("Artifact %s (%s) should come after or equal to %s (%s) in DESC order", + foundArtifacts[i].name, foundArtifacts[i].timestamp, + foundArtifacts[i+1].name, foundArtifacts[i+1].timestamp)) + } + }) + + t.Run("TestCustomPropertyOrdering_IntValue", func(t *testing.T) { + // Create a new model for this test + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-int-property-ordering"), + ExternalID: apiutils.Of("test-model-int-property-ext"), + }, + } + savedTestModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create artifacts with custom properties (version as int_value) + testArtifacts := []struct { + name string + version int32 + }{ + {"artifact-v3", 3}, + {"artifact-v1", 1}, + {"artifact-v5", 5}, + {"artifact-v2", 2}, + {"artifact-v4", 4}, + } + + for _, tc := range testArtifacts { + customProps := []dbmodels.Properties{ + { + Name: "version", + IntValue: apiutils.Of(tc.version), + }, + } + + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(tc.name), + ExternalID: apiutils.Of(fmt.Sprintf("int-prop-test-%s", tc.name)), + MetricsType: models.MetricsTypePerformance, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + CustomProperties: &customProps, + } + _, err := metricsArtifactRepo.Save(metricsArtifact, savedTestModel.GetID()) + require.NoError(t, err) + } + + // Test ordering by version.int_value ASC + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("version.int_value"), + SortOrder: apiutils.Of("ASC"), + }, + } + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract artifacts with version property + var foundArtifacts []struct { + name string + version int32 + } + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + if name != nil && (*name == "artifact-v3" || *name == "artifact-v1" || *name == "artifact-v5" || *name == "artifact-v2" || *name == "artifact-v4") { + // Get version from custom properties + if artifact.CatalogMetricsArtifact.GetCustomProperties() != nil { + for _, prop := range *artifact.CatalogMetricsArtifact.GetCustomProperties() { + if prop.Name == "version" && prop.IntValue != nil { + foundArtifacts = append(foundArtifacts, struct { + name string + version int32 + }{*name, *prop.IntValue}) + } + } + } + } + } + } + + require.GreaterOrEqual(t, len(foundArtifacts), 5, "Should find all test artifacts") + + // Verify ASC ordering by version + for i := 0; i < len(foundArtifacts)-1; i++ { + assert.LessOrEqual(t, foundArtifacts[i].version, foundArtifacts[i+1].version, + fmt.Sprintf("Artifact %s (%d) should come before or equal to %s (%d) in ASC order", + foundArtifacts[i].name, foundArtifacts[i].version, + foundArtifacts[i+1].name, foundArtifacts[i+1].version)) + } + + // Test ordering by version.int_value DESC + listOptions = models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("version.int_value"), + SortOrder: apiutils.Of("DESC"), + }, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract artifacts again + foundArtifacts = []struct { + name string + version int32 + }{} + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + if name != nil && (*name == "artifact-v3" || *name == "artifact-v1" || *name == "artifact-v5" || *name == "artifact-v2" || *name == "artifact-v4") { + // Get version from custom properties + if artifact.CatalogMetricsArtifact.GetCustomProperties() != nil { + for _, prop := range *artifact.CatalogMetricsArtifact.GetCustomProperties() { + if prop.Name == "version" && prop.IntValue != nil { + foundArtifacts = append(foundArtifacts, struct { + name string + version int32 + }{*name, *prop.IntValue}) + } + } + } + } + } + } + + require.GreaterOrEqual(t, len(foundArtifacts), 5, "Should find all test artifacts") + + // Verify DESC ordering by version + for i := 0; i < len(foundArtifacts)-1; i++ { + assert.GreaterOrEqual(t, foundArtifacts[i].version, foundArtifacts[i+1].version, + fmt.Sprintf("Artifact %s (%d) should come after or equal to %s (%d) in DESC order", + foundArtifacts[i].name, foundArtifacts[i].version, + foundArtifacts[i+1].name, foundArtifacts[i+1].version)) + } + }) + + t.Run("TestCustomPropertyOrderingWithPagination", func(t *testing.T) { + // Create a new model for this test + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-custom-pagination-unique"), + ExternalID: apiutils.Of("test-model-custom-pagination-unique-ext"), + }, + } + savedTestModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create artifacts with custom properties for pagination testing + for i := 1; i <= 10; i++ { + customProps := []dbmodels.Properties{ + { + Name: "score", + DoubleValue: apiutils.Of(float64(i) * 10.0), + }, + } + + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(fmt.Sprintf("pagination-artifact-unique-%d", i)), + ExternalID: apiutils.Of(fmt.Sprintf("pagination-unique-ext-%d", i)), + MetricsType: models.MetricsTypeAccuracy, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + CustomProperties: &customProps, + } + _, err := metricsArtifactRepo.Save(metricsArtifact, savedTestModel.GetID()) + require.NoError(t, err) + } + + // Test pagination with custom property ordering + pageSize := int32(3) + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("score.double_value"), + SortOrder: apiutils.Of("ASC"), + PageSize: &pageSize, + }, + } + + // First page + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Filter to only our test artifacts + var page1Scores []float64 + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil && artifact.CatalogMetricsArtifact.GetAttributes().Name != nil { + name := *artifact.CatalogMetricsArtifact.GetAttributes().Name + if len(name) > 27 && name[:27] == "pagination-artifact-unique-" { + if artifact.CatalogMetricsArtifact.GetCustomProperties() != nil { + for _, prop := range *artifact.CatalogMetricsArtifact.GetCustomProperties() { + if prop.Name == "score" && prop.DoubleValue != nil { + page1Scores = append(page1Scores, *prop.DoubleValue) + } + } + } + } + } + } + + require.LessOrEqual(t, len(page1Scores), 3, "First page should have at most 3 artifacts") + require.GreaterOrEqual(t, len(page1Scores), 1, "First page should have at least 1 artifact") + + // Verify first page is ordered + for i := 0; i < len(page1Scores)-1; i++ { + assert.LessOrEqual(t, page1Scores[i], page1Scores[i+1], "First page should be ordered") + } + + if result.NextPageToken != "" { + // Second page + listOptions.Pagination.NextPageToken = &result.NextPageToken + result2, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result2) + + var page2Scores []float64 + for _, artifact := range result2.Items { + if artifact.CatalogMetricsArtifact != nil && artifact.CatalogMetricsArtifact.GetAttributes().Name != nil { + name := *artifact.CatalogMetricsArtifact.GetAttributes().Name + if len(name) > 27 && name[:27] == "pagination-artifact-unique-" { + if artifact.CatalogMetricsArtifact.GetCustomProperties() != nil { + for _, prop := range *artifact.CatalogMetricsArtifact.GetCustomProperties() { + if prop.Name == "score" && prop.DoubleValue != nil { + page2Scores = append(page2Scores, *prop.DoubleValue) + } + } + } + } + } + } + + require.GreaterOrEqual(t, len(page2Scores), 1, "Second page should have at least 1 artifact") + + // Verify second page is ordered + for i := 0; i < len(page2Scores)-1; i++ { + assert.LessOrEqual(t, page2Scores[i], page2Scores[i+1], "Second page should be ordered") + } + + // Verify page 2 comes after page 1 + if len(page1Scores) > 0 && len(page2Scores) > 0 { + assert.Less(t, page1Scores[len(page1Scores)-1], page2Scores[0], "Page 2 should continue where page 1 ended") + } + + // Verify no overlap between pages + for _, score1 := range page1Scores { + for _, score2 := range page2Scores { + assert.NotEqual(t, score1, score2, "Pages should not have overlapping scores") + } + } + } + }) + + t.Run("TestEmptyPropertyName_Error", func(t *testing.T) { + // Create a new model for this test + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-invalid-property-name"), + ExternalID: apiutils.Of("test-model-invalid-property-name-ext"), + }, + } + savedTestModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create an artifact + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("test-artifact"), + ExternalID: apiutils.Of("test-artifact-ext"), + MetricsType: models.MetricsTypeAccuracy, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + } + _, err = metricsArtifactRepo.Save(metricsArtifact, savedTestModel.GetID()) + require.NoError(t, err) + + // Test with empty property name - should return error + testCases := []struct { + name string + orderBy string + expectedErr string + }{ + { + name: "Empty property name", + orderBy: fmt.Sprintf(".%s", testValueTypeDouble), + expectedErr: "invalid custom property name", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of(tc.orderBy), + SortOrder: apiutils.Of("ASC"), + }, + } + _, err := repo.List(listOptions) + require.Error(t, err, "Should return error for invalid property name") + assert.Contains(t, err.Error(), tc.expectedErr, "Error message should mention invalid property name") + }) + } + + // Test that various property names work (they fallback to ID if non-existent) + // Property names can contain any characters - they're user-defined metadata + validTestCases := []string{ + fmt.Sprintf("%s.%s", testPropertyAccuracy, testValueTypeDouble), + fmt.Sprintf("model_%s.%s", testPropertyAccuracy, testValueTypeDouble), + fmt.Sprintf("model-%s.%s", testPropertyAccuracy, testValueTypeDouble), + fmt.Sprintf("v1.%s.%s", testPropertyAccuracy, testValueTypeDouble), + fmt.Sprintf("%s_v2.%s", testPropertyAccuracy, testValueTypeDouble), + fmt.Sprintf("my %s.%s", testPropertyAccuracy, testValueTypeDouble), // spaces are allowed + fmt.Sprintf("%s@special.%s", testPropertyAccuracy, testValueTypeDouble), // special chars allowed + } + + for _, validOrderBy := range validTestCases { + t.Run("PropertyName: "+validOrderBy, func(t *testing.T) { + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of(validOrderBy), + SortOrder: apiutils.Of("ASC"), + }, + } + _, err := repo.List(listOptions) + // These should not error - non-existent properties just fallback to ID ordering + require.NoError(t, err, "Should not error for property name: "+validOrderBy) + }) + } + }) + + t.Run("TestInvalidCustomPropertyValueType_Error", func(t *testing.T) { + // Create a new model for this test + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-invalid-value-type"), + ExternalID: apiutils.Of("test-model-invalid-value-type-ext"), + }, + } + savedTestModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create an artifact + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("test-artifact"), + ExternalID: apiutils.Of("test-artifact-ext"), + MetricsType: models.MetricsTypeAccuracy, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + } + _, err = metricsArtifactRepo.Save(metricsArtifact, savedTestModel.GetID()) + require.NoError(t, err) + + // Test with invalid value type - should return error + testCases := []struct { + name string + orderBy string + expectedErr string + }{ + { + name: "Invalid value type: int_valueeee", + orderBy: fmt.Sprintf("%s.int_valueeee", + testPropertyAccuracy), + expectedErr: "invalid custom property value type 'int_valueeee'", + }, + { + name: "Invalid value type: double_val", + orderBy: fmt.Sprintf("%s.double_val", + testPropertyScore), + expectedErr: "invalid custom property value type 'double_val'", + }, + { + name: "Invalid value type: str_value", + orderBy: fmt.Sprintf("%s.str_value", + testPropertyTimestamp), + expectedErr: "invalid custom property value type 'str_value'", + }, + { + name: "Invalid value type: random", + orderBy: "property.random", + expectedErr: "invalid custom property value type 'random'", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of(tc.orderBy), + SortOrder: apiutils.Of("ASC"), + }, + } + _, err := repo.List(listOptions) + require.Error(t, err, "Should return error for invalid value type") + assert.Contains(t, err.Error(), tc.expectedErr, "Error message should mention the invalid value type") + }) + } + }) + + t.Run("TestInvalidCustomPropertyFormat_FallbackToID", func(t *testing.T) { + // Create a new model for this test + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-invalid-property-format"), + ExternalID: apiutils.Of("test-model-invalid-property-ext"), + }, + } + savedTestModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create some artifacts + for i := 1; i <= 3; i++ { + customProps := []dbmodels.Properties{ + { + Name: "accuracy", + DoubleValue: apiutils.Of(float64(i) * 0.1), + }, + } + + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(fmt.Sprintf("invalid-format-artifact-%d", i)), + ExternalID: apiutils.Of(fmt.Sprintf("invalid-format-ext-%d", i)), + MetricsType: models.MetricsTypeAccuracy, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + CustomProperties: &customProps, + } + _, err := metricsArtifactRepo.Save(metricsArtifact, savedTestModel.GetID()) + require.NoError(t, err) + } + + // Test with invalid format (no .double_value suffix) - should fallback to ID ordering + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("accuracy"), // Invalid: missing .double_value + SortOrder: apiutils.Of("ASC"), + }, + } + result, err := repo.List(listOptions) + require.NoError(t, err, "Should not error on invalid custom property format") + require.NotNil(t, result) + + // Extract our test artifacts + var foundIDs []int32 + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + if name != nil && len(*name) > 24 && (*name)[:24] == "invalid-format-artifact-" { + foundIDs = append(foundIDs, *artifact.CatalogMetricsArtifact.GetID()) + } + } + } + + require.GreaterOrEqual(t, len(foundIDs), 3, "Should find all test artifacts") + + // Verify it's ordered by ID (ascending) since it fell back to default + for i := 0; i < len(foundIDs)-1; i++ { + assert.Less(t, foundIDs[i], foundIDs[i+1], + "Should be ordered by ID (fallback) when custom property format is invalid") + } + + // Test with another invalid format (random string) - should also fallback to ID ordering + listOptions = models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("nonexistent_property"), + SortOrder: apiutils.Of("ASC"), + }, + } + result, err = repo.List(listOptions) + require.NoError(t, err, "Should not error on nonexistent property") + require.NotNil(t, result) + + // Should still return results, just ordered by ID + foundIDs = []int32{} + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + if name != nil && len(*name) > 24 && (*name)[:24] == "invalid-format-artifact-" { + foundIDs = append(foundIDs, *artifact.CatalogMetricsArtifact.GetID()) + } + } + } + + require.GreaterOrEqual(t, len(foundIDs), 3, "Should still find all test artifacts") + + // Verify it's ordered by ID + for i := 0; i < len(foundIDs)-1; i++ { + assert.Less(t, foundIDs[i], foundIDs[i+1], + "Should be ordered by ID (fallback) when property doesn't exist") + } + }) + + t.Run("TestCustomPropertyOrdering_WithAndWithoutProperty", func(t *testing.T) { + // Create a new model for this test + testModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-mixed-properties"), + ExternalID: apiutils.Of("test-model-mixed-properties-ext"), + }, + } + savedTestModel, err := catalogModelRepo.Save(testModel) + require.NoError(t, err) + + // Create artifacts: some WITH accuracy property, some WITHOUT + testArtifacts := []struct { + name string + accuracy *float64 // nil means no property + }{ + {"artifact-with-high-accuracy", apiutils.Of(0.95)}, + {"artifact-without-property-1", nil}, // No accuracy property + {"artifact-with-low-accuracy", apiutils.Of(0.60)}, + {"artifact-without-property-2", nil}, // No accuracy property + {"artifact-with-medium-accuracy", apiutils.Of(0.80)}, + {"artifact-without-property-3", nil}, // No accuracy property + } + + artifactIDMap := make(map[string]int32) + for _, tc := range testArtifacts { + var customProps *[]dbmodels.Properties + if tc.accuracy != nil { + customProps = &[]dbmodels.Properties{ + { + Name: "accuracy", + DoubleValue: tc.accuracy, + }, + } + } + + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + TypeID: apiutils.Of(int32(metricsArtifactTypeID)), + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(tc.name), + ExternalID: apiutils.Of(fmt.Sprintf("mixed-prop-test-%s", tc.name)), + MetricsType: models.MetricsTypeAccuracy, + ArtifactType: apiutils.Of("metrics-artifact"), + }, + CustomProperties: customProps, + } + saved, err := metricsArtifactRepo.Save(metricsArtifact, savedTestModel.GetID()) + require.NoError(t, err) + artifactIDMap[tc.name] = *saved.GetID() + } + + // Test ordering by accuracy.double_value ASC + // Expected order: + // 1. artifact-with-low-accuracy (0.60) + // 2. artifact-with-medium-accuracy (0.80) + // 3. artifact-with-high-accuracy (0.95) + // 4. artifact-without-property-1 (ordered by ID) + // 5. artifact-without-property-2 (ordered by ID) + // 6. artifact-without-property-3 (ordered by ID) + listOptions := models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("accuracy.double_value"), + SortOrder: apiutils.Of("ASC"), + }, + } + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract our test artifacts in order + var orderedArtifacts []struct { + name string + accuracy *float64 + id int32 + } + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + if name != nil { + // Check if it's one of our test artifacts + if expectedID, exists := artifactIDMap[*name]; exists { + var accuracy *float64 + if artifact.CatalogMetricsArtifact.GetCustomProperties() != nil { + for _, prop := range *artifact.CatalogMetricsArtifact.GetCustomProperties() { + if prop.Name == "accuracy" && prop.DoubleValue != nil { + accuracy = prop.DoubleValue + break + } + } + } + orderedArtifacts = append(orderedArtifacts, struct { + name string + accuracy *float64 + id int32 + }{*name, accuracy, expectedID}) + } + } + } + } + + require.Equal(t, 6, len(orderedArtifacts), "Should find all 6 test artifacts") + + // Verify ordering: + // 1. First should be artifacts WITH accuracy property, ordered by accuracy ASC + withPropertyCount := 0 + for i := 0; i < len(orderedArtifacts); i++ { + if orderedArtifacts[i].accuracy != nil { + withPropertyCount++ + // Verify ascending order among artifacts with property + if i > 0 && orderedArtifacts[i-1].accuracy != nil { + assert.LessOrEqual(t, *orderedArtifacts[i-1].accuracy, *orderedArtifacts[i].accuracy, + "Artifacts with accuracy should be ordered by accuracy ASC") + } + } else { + // Once we hit artifacts without property, all remaining should be without property + for j := i; j < len(orderedArtifacts); j++ { + assert.Nil(t, orderedArtifacts[j].accuracy, + "Artifacts without property should come after artifacts with property") + } + break + } + } + + assert.Equal(t, 3, withPropertyCount, "Should have 3 artifacts with accuracy property first") + + // 2. Verify artifacts WITHOUT property are ordered by ID + withoutPropertyArtifacts := orderedArtifacts[withPropertyCount:] + for i := 0; i < len(withoutPropertyArtifacts)-1; i++ { + assert.Less(t, withoutPropertyArtifacts[i].id, withoutPropertyArtifacts[i+1].id, + "Artifacts without property should be ordered by ID") + } + + // Test with DESC order + listOptions = models.CatalogArtifactListOptions{ + ParentResourceID: savedTestModel.GetID(), + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("accuracy.double_value"), + SortOrder: apiutils.Of("DESC"), + }, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract artifacts again + orderedArtifacts = []struct { + name string + accuracy *float64 + id int32 + }{} + for _, artifact := range result.Items { + if artifact.CatalogMetricsArtifact != nil { + name := artifact.CatalogMetricsArtifact.GetAttributes().Name + if name != nil { + if expectedID, exists := artifactIDMap[*name]; exists { + var accuracy *float64 + if artifact.CatalogMetricsArtifact.GetCustomProperties() != nil { + for _, prop := range *artifact.CatalogMetricsArtifact.GetCustomProperties() { + if prop.Name == "accuracy" && prop.DoubleValue != nil { + accuracy = prop.DoubleValue + break + } + } + } + orderedArtifacts = append(orderedArtifacts, struct { + name string + accuracy *float64 + id int32 + }{*name, accuracy, expectedID}) + } + } + } + } + + require.Equal(t, 6, len(orderedArtifacts), "Should find all 6 test artifacts") + + // Verify DESC ordering: + // Expected order: + // 1. artifact-with-high-accuracy (0.95) + // 2. artifact-with-medium-accuracy (0.80) + // 3. artifact-with-low-accuracy (0.60) + // 4. artifact-without-property-1 (ordered by ID) + // 5. artifact-without-property-2 (ordered by ID) + // 6. artifact-without-property-3 (ordered by ID) + withPropertyCount = 0 + for i := 0; i < len(orderedArtifacts); i++ { + if orderedArtifacts[i].accuracy != nil { + withPropertyCount++ + // Verify descending order among artifacts with property + if i > 0 && orderedArtifacts[i-1].accuracy != nil { + assert.GreaterOrEqual(t, *orderedArtifacts[i-1].accuracy, *orderedArtifacts[i].accuracy, + "Artifacts with accuracy should be ordered by accuracy DESC") + } + } else { + // Once we hit artifacts without property, all remaining should be without property + for j := i; j < len(orderedArtifacts); j++ { + assert.Nil(t, orderedArtifacts[j].accuracy, + "Artifacts without property should come after artifacts with property (DESC)") + } + break + } + } + + assert.Equal(t, 3, withPropertyCount, "Should have 3 artifacts with accuracy property first (DESC)") + + // Verify artifacts WITHOUT property are ordered by ID in DESC too + withoutPropertyArtifacts = orderedArtifacts[withPropertyCount:] + for i := 0; i < len(withoutPropertyArtifacts)-1; i++ { + assert.Less(t, withoutPropertyArtifacts[i].id, withoutPropertyArtifacts[i+1].id, + "Artifacts without property should be ordered by ID (even in DESC mode)") + } + }) +} diff --git a/catalog/internal/db/service/catalog_common.go b/catalog/internal/db/service/catalog_common.go new file mode 100644 index 0000000000..306dd902be --- /dev/null +++ b/catalog/internal/db/service/catalog_common.go @@ -0,0 +1,72 @@ +package service + +import ( + "fmt" + + "github.com/kubeflow/model-registry/internal/db/scopes" + "gorm.io/gorm" +) + +// CatalogOrderByColumns includes NAME in addition to standard columns. +// This is specific to catalog and not available in model registry. +var CatalogOrderByColumns = map[string]string{ + "ID": "id", + "CREATE_TIME": "create_time_since_epoch", + "LAST_UPDATE_TIME": "last_update_time_since_epoch", + "NAME": "name", + "id": "id", // default fallback +} + +// CreateNamePaginationToken creates a pagination token for NAME ordering. +// If name is nil, it falls back to using the entity ID. +func CreateNamePaginationToken(entityID int32, name *string) string { + if name != nil { + return scopes.CreateNextPageToken(entityID, name) + } + // Fallback to ID if name is nil + return scopes.CreateNextPageToken(entityID, fmt.Sprintf("%d", entityID)) +} + +// ApplyNameOrdering applies NAME-based ordering with cursor pagination to a query. +// This handles the catalog-specific NAME ordering which requires string comparison +// in WHERE clauses (not integer casting like standard pagination). +// +// Parameters: +// - query: The GORM query to modify +// - tableName: The table name to use in SQL (e.g., "Context" or "Artifact") +// - sortOrder: The sort order ("ASC" or "DESC") +// - nextPageToken: Optional pagination token for cursor-based pagination +// - pageSize: The page size (0 means no limit) +// +// Returns the modified query with NAME ordering and pagination applied. +func ApplyNameOrdering(query *gorm.DB, tableName string, sortOrder string, nextPageToken string, pageSize int32) *gorm.DB { + // Normalize sort order + order := "ASC" + if sortOrder == "DESC" { + order = "DESC" + } + + // Apply name-based ordering with ID as tie-breaker + query = query.Order(fmt.Sprintf("%s.name %s, %s.id ASC", tableName, order, tableName)) + + // Handle cursor-based pagination for NAME + if nextPageToken != "" { + if cursor, err := scopes.DecodeCursor(nextPageToken); err == nil { + // Cursor pagination based on name (string comparison) + cmp := ">" + if order == "DESC" { + cmp = "<" + } + // Use proper string comparison with name and ID as tie-breaker + query = query.Where(fmt.Sprintf("(%s.name %s ? OR (%s.name = ? AND %s.id > ?))", tableName, cmp, tableName, tableName), + cursor.Value, cursor.Value, cursor.ID) + } + } + + // Apply pagination limit + if pageSize > 0 { + query = query.Limit(int(pageSize) + 1) // +1 to detect if there are more pages + } + + return query +} diff --git a/catalog/internal/db/service/catalog_metrics_artifact.go b/catalog/internal/db/service/catalog_metrics_artifact.go new file mode 100644 index 0000000000..71d6463980 --- /dev/null +++ b/catalog/internal/db/service/catalog_metrics_artifact.go @@ -0,0 +1,290 @@ +package service + +import ( + "errors" + "fmt" + "math" + + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/internal/apiutils" + dbmodels "github.com/kubeflow/model-registry/internal/db/models" + "github.com/kubeflow/model-registry/internal/db/schema" + "github.com/kubeflow/model-registry/internal/db/service" + "github.com/kubeflow/model-registry/internal/db/utils" + "gorm.io/gorm" +) + +var ErrCatalogMetricsArtifactNotFound = errors.New("catalog metrics artifact by id not found") + +type CatalogMetricsArtifactRepositoryImpl struct { + *service.GenericRepository[models.CatalogMetricsArtifact, schema.Artifact, schema.ArtifactProperty, *models.CatalogMetricsArtifactListOptions] +} + +func NewCatalogMetricsArtifactRepository(db *gorm.DB, typeID int32) models.CatalogMetricsArtifactRepository { + config := service.GenericRepositoryConfig[models.CatalogMetricsArtifact, schema.Artifact, schema.ArtifactProperty, *models.CatalogMetricsArtifactListOptions]{ + DB: db, + TypeID: typeID, + EntityToSchema: mapCatalogMetricsArtifactToArtifact, + SchemaToEntity: mapDataLayerToCatalogMetricsArtifact, + EntityToProperties: mapCatalogMetricsArtifactToArtifactProperties, + NotFoundError: ErrCatalogMetricsArtifactNotFound, + EntityName: "catalog metrics artifact", + PropertyFieldName: "artifact_id", + ApplyListFilters: applyCatalogMetricsArtifactListFilters, + IsNewEntity: func(entity models.CatalogMetricsArtifact) bool { return entity.GetID() == nil }, + HasCustomProperties: func(entity models.CatalogMetricsArtifact) bool { return entity.GetCustomProperties() != nil }, + PreserveHistoricalTimes: true, // Catalog preserves timestamps from YAML source data + } + + return &CatalogMetricsArtifactRepositoryImpl{ + GenericRepository: service.NewGenericRepository(config), + } +} + +func (r *CatalogMetricsArtifactRepositoryImpl) List(listOptions models.CatalogMetricsArtifactListOptions) (*dbmodels.ListWrapper[models.CatalogMetricsArtifact], error) { + return r.GenericRepository.List(&listOptions) +} + +func (r *CatalogMetricsArtifactRepositoryImpl) Save(ma models.CatalogMetricsArtifact, parentResourceID *int32) (models.CatalogMetricsArtifact, error) { + config := r.GetConfig() + if ma.GetTypeID() == nil { + if config.TypeID > 0 { + ma.SetTypeID(config.TypeID) + } + } + + attr := ma.GetAttributes() + if attr == nil { + return ma, fmt.Errorf("invalid artifact: nil attributes") + } + + if ma.GetID() == nil && attr.Name != nil { + existing, err := r.lookupMetricsArtifactByName(*attr.Name) + if err != nil { + if !errors.Is(err, ErrCatalogMetricsArtifactNotFound) { + return ma, fmt.Errorf("error finding existing metrics artifact named %s: %w", *attr.Name, err) + } + } else { + ma.SetID(existing.ID) + } + } + + switch attr.MetricsType { + case models.MetricsTypeAccuracy, models.MetricsTypePerformance: + // OK + default: + return ma, fmt.Errorf("invalid artifact: unknown metrics type: %s", attr.MetricsType) + } + + return r.GenericRepository.Save(ma, parentResourceID) +} + +func (r *CatalogMetricsArtifactRepositoryImpl) BatchSave(artifacts []models.CatalogMetricsArtifact, parentResourceID *int32) ([]models.CatalogMetricsArtifact, error) { + numArtifacts := len(artifacts) + if numArtifacts == 0 { + return artifacts, nil + } + + config := r.GetConfig() + + // Pre-allocate schema artifacts slice + schemaArtifacts := make([]schema.Artifact, numArtifacts) + + // Validate, prepare, and convert all artifacts in one pass + for i, ma := range artifacts { + if ma.GetTypeID() == nil { + if config.TypeID > 0 && config.TypeID < math.MaxInt32 { + ma.SetTypeID(int32(config.TypeID)) + } + } + + attr := ma.GetAttributes() + if attr == nil { + return nil, fmt.Errorf("invalid artifact at index %d: nil attributes", i) + } + + switch attr.MetricsType { + case models.MetricsTypeAccuracy, models.MetricsTypePerformance: + // OK + default: + return nil, fmt.Errorf("invalid artifact at index %d: unknown metrics type: %s", i, attr.MetricsType) + } + + schemaArtifacts[i] = mapCatalogMetricsArtifactToArtifact(ma) + artifacts[i] = ma + } + + // Execute all batch operations in a single transaction + err := config.DB.Transaction(func(tx *gorm.DB) error { + // Batch insert artifacts (batch size of 100) + if err := tx.CreateInBatches(&schemaArtifacts, 100).Error; err != nil { + return fmt.Errorf("failed to batch insert artifacts: %w", err) + } + + // Pre-allocate slices for properties and attributions + // Estimate ~10 properties per artifact on average + allProperties := []schema.ArtifactProperty{} + var allAttributions []schema.Attribution + if parentResourceID != nil { + allAttributions = make([]schema.Attribution, 0, numArtifacts) + } + + // Collect all properties and attributions + for i, schemaArtifact := range schemaArtifacts { + artifactID := schemaArtifact.ID + artifacts[i].SetID(artifactID) + + // Collect properties + properties := mapCatalogMetricsArtifactToArtifactProperties(artifacts[i], artifactID) + allProperties = append(allProperties, properties...) + + // Collect attribution if parentResourceID is provided + if parentResourceID != nil { + allAttributions = append(allAttributions, schema.Attribution{ + ContextID: *parentResourceID, + ArtifactID: artifactID, + }) + } + } + + // Batch insert all properties + if len(allProperties) > 0 { + if err := tx.CreateInBatches(&allProperties, 100).Error; err != nil { + return fmt.Errorf("failed to batch insert properties: %w", err) + } + } + + // Batch insert all attributions + if len(allAttributions) > 0 { + if err := tx.CreateInBatches(&allAttributions, 100).Error; err != nil { + return fmt.Errorf("failed to batch insert attributions: %w", err) + } + } + + return nil + }) + + if err != nil { + return nil, err + } + + return artifacts, nil +} + +func (r *CatalogMetricsArtifactRepositoryImpl) lookupMetricsArtifactByName(name string) (*schema.Artifact, error) { + var entity schema.Artifact + + config := r.GetConfig() + + if err := config.DB.Where("name = ? AND type_id = ?", name, config.TypeID).First(&entity).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil, fmt.Errorf("%w: %v", config.NotFoundError, err) + } + return nil, fmt.Errorf("error getting %s by name: %w", config.EntityName, err) + } + + return &entity, nil +} + +func applyCatalogMetricsArtifactListFilters(query *gorm.DB, listOptions *models.CatalogMetricsArtifactListOptions) *gorm.DB { + if listOptions.Name != nil { + query = query.Where("name LIKE ?", fmt.Sprintf("%%:%s", *listOptions.Name)) + } else if listOptions.ExternalID != nil { + query = query.Where("external_id = ?", listOptions.ExternalID) + } + + if listOptions.ParentResourceID != nil { + query = query.Joins(utils.BuildAttributionJoin(query)). + Where(utils.GetColumnRef(query, &schema.Attribution{}, "context_id")+" = ?", listOptions.ParentResourceID) + } + + return query +} + +func mapCatalogMetricsArtifactToArtifact(catalogMetricsArtifact models.CatalogMetricsArtifact) schema.Artifact { + if catalogMetricsArtifact == nil { + return schema.Artifact{} + } + + artifact := schema.Artifact{ + ID: apiutils.ZeroIfNil(catalogMetricsArtifact.GetID()), + TypeID: apiutils.ZeroIfNil(catalogMetricsArtifact.GetTypeID()), + } + + if catalogMetricsArtifact.GetAttributes() != nil { + artifact.Name = catalogMetricsArtifact.GetAttributes().Name + artifact.ExternalID = catalogMetricsArtifact.GetAttributes().ExternalID + artifact.CreateTimeSinceEpoch = apiutils.ZeroIfNil(catalogMetricsArtifact.GetAttributes().CreateTimeSinceEpoch) + artifact.LastUpdateTimeSinceEpoch = apiutils.ZeroIfNil(catalogMetricsArtifact.GetAttributes().LastUpdateTimeSinceEpoch) + } + + return artifact +} + +func mapCatalogMetricsArtifactToArtifactProperties(catalogMetricsArtifact models.CatalogMetricsArtifact, artifactID int32) []schema.ArtifactProperty { + if catalogMetricsArtifact == nil { + return []schema.ArtifactProperty{} + } + + properties := []schema.ArtifactProperty{} + + // Add the metricsType as a property + if catalogMetricsArtifact.GetAttributes() != nil { + metricsTypeProp := dbmodels.Properties{ + Name: "metricsType", + StringValue: apiutils.Of(string(catalogMetricsArtifact.GetAttributes().MetricsType)), + } + properties = append(properties, service.MapPropertiesToArtifactProperty(metricsTypeProp, artifactID, false)) + } + + if catalogMetricsArtifact.GetProperties() != nil { + for _, prop := range *catalogMetricsArtifact.GetProperties() { + properties = append(properties, service.MapPropertiesToArtifactProperty(prop, artifactID, false)) + } + } + + if catalogMetricsArtifact.GetCustomProperties() != nil { + for _, prop := range *catalogMetricsArtifact.GetCustomProperties() { + properties = append(properties, service.MapPropertiesToArtifactProperty(prop, artifactID, true)) + } + } + + return properties +} + +func mapDataLayerToCatalogMetricsArtifact(artifact schema.Artifact, artProperties []schema.ArtifactProperty) models.CatalogMetricsArtifact { + catalogMetricsArtifact := models.CatalogMetricsArtifactImpl{ + ID: &artifact.ID, + TypeID: &artifact.TypeID, + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: artifact.Name, + ArtifactType: apiutils.Of(models.CatalogMetricsArtifactType), + ExternalID: artifact.ExternalID, + CreateTimeSinceEpoch: &artifact.CreateTimeSinceEpoch, + LastUpdateTimeSinceEpoch: &artifact.LastUpdateTimeSinceEpoch, + }, + } + + customProperties := []dbmodels.Properties{} + properties := []dbmodels.Properties{} + + for _, prop := range artProperties { + mappedProperty := service.MapArtifactPropertyToProperties(prop) + + // Extract metricsType from properties and set it as an attribute + if mappedProperty.Name == "metricsType" && !prop.IsCustomProperty { + if mappedProperty.StringValue != nil { + catalogMetricsArtifact.Attributes.MetricsType = models.MetricsType(*mappedProperty.StringValue) + } + } else if prop.IsCustomProperty { + customProperties = append(customProperties, mappedProperty) + } else { + properties = append(properties, mappedProperty) + } + } + + catalogMetricsArtifact.CustomProperties = &customProperties + catalogMetricsArtifact.Properties = &properties + + return &catalogMetricsArtifact +} diff --git a/catalog/internal/db/service/catalog_metrics_artifact_test.go b/catalog/internal/db/service/catalog_metrics_artifact_test.go new file mode 100644 index 0000000000..c9e160dd20 --- /dev/null +++ b/catalog/internal/db/service/catalog_metrics_artifact_test.go @@ -0,0 +1,640 @@ +package service_test + +import ( + "fmt" + "testing" + "time" + + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/service" + "github.com/kubeflow/model-registry/internal/apiutils" + dbmodels "github.com/kubeflow/model-registry/internal/db/models" + "github.com/kubeflow/model-registry/internal/db/schema" + "github.com/kubeflow/model-registry/internal/testutils" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/gorm" +) + +func TestCatalogMetricsArtifactRepository(t *testing.T) { + sharedDB, cleanup := testutils.SetupPostgresWithMigrations(t, service.DatastoreSpec()) + defer cleanup() + + // Get the CatalogMetricsArtifact type ID + typeID := getCatalogMetricsArtifactTypeID(t, sharedDB) + repo := service.NewCatalogMetricsArtifactRepository(sharedDB, typeID) + + // Also get CatalogModel type ID for creating parent entities + catalogModelTypeID := getCatalogModelTypeID(t, sharedDB) + catalogModelRepo := service.NewCatalogModelRepository(sharedDB, catalogModelTypeID) + + t.Run("TestSave", func(t *testing.T) { + // First create a catalog model for attribution + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-metrics"), + ExternalID: apiutils.Of("catalog-model-metrics-ext-123"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Test creating a new catalog metrics artifact + catalogMetricsArtifact := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("test-catalog-metrics-artifact"), + ExternalID: apiutils.Of("catalog-metrics-ext-123"), + MetricsType: models.MetricsTypeAccuracy, + }, + Properties: &[]dbmodels.Properties{ + { + Name: "description", + StringValue: apiutils.Of("Test catalog metrics artifact description"), + }, + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "custom-metrics-prop", + StringValue: apiutils.Of("custom-metrics-value"), + }, + }, + } + + saved, err := repo.Save(catalogMetricsArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved) + require.NotNil(t, saved.GetID()) + assert.Equal(t, "test-catalog-metrics-artifact", *saved.GetAttributes().Name) + assert.Equal(t, "catalog-metrics-ext-123", *saved.GetAttributes().ExternalID) + assert.Equal(t, models.MetricsTypeAccuracy, saved.GetAttributes().MetricsType) + + // Test updating the same catalog metrics artifact + catalogMetricsArtifact.ID = saved.GetID() + catalogMetricsArtifact.GetAttributes().Name = apiutils.Of("updated-catalog-metrics-artifact") + catalogMetricsArtifact.GetAttributes().MetricsType = models.MetricsTypePerformance + // Preserve CreateTimeSinceEpoch from the saved entity + catalogMetricsArtifact.GetAttributes().CreateTimeSinceEpoch = saved.GetAttributes().CreateTimeSinceEpoch + + updated, err := repo.Save(catalogMetricsArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, updated) + assert.Equal(t, *saved.GetID(), *updated.GetID()) + assert.Equal(t, "updated-catalog-metrics-artifact", *updated.GetAttributes().Name) + assert.Equal(t, models.MetricsTypePerformance, updated.GetAttributes().MetricsType) + }) + + t.Run("TestGetByID", func(t *testing.T) { + // First create a catalog model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-getbyid-metrics"), + ExternalID: apiutils.Of("catalog-model-getbyid-metrics-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create a catalog metrics artifact to retrieve + catalogMetricsArtifact := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("get-test-catalog-metrics-artifact"), + ExternalID: apiutils.Of("get-catalog-metrics-ext-123"), + MetricsType: models.MetricsTypeAccuracy, + }, + } + + saved, err := repo.Save(catalogMetricsArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved.GetID()) + + // Test retrieving by ID + retrieved, err := repo.GetByID(*saved.GetID()) + require.NoError(t, err) + require.NotNil(t, retrieved) + assert.Equal(t, *saved.GetID(), *retrieved.GetID()) + assert.Equal(t, "get-test-catalog-metrics-artifact", *retrieved.GetAttributes().Name) + assert.Equal(t, "get-catalog-metrics-ext-123", *retrieved.GetAttributes().ExternalID) + assert.Equal(t, models.MetricsTypeAccuracy, retrieved.GetAttributes().MetricsType) + + // Test retrieving non-existent ID + _, err = repo.GetByID(99999) + assert.ErrorIs(t, err, service.ErrCatalogMetricsArtifactNotFound) + }) + + t.Run("TestList", func(t *testing.T) { + // Create a catalog model for the artifacts + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-list-metrics"), + ExternalID: apiutils.Of("catalog-model-list-metrics-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create multiple catalog metrics artifacts for listing + testArtifacts := []*models.CatalogMetricsArtifactImpl{ + { + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("list-catalog-metrics-artifact-1"), + ExternalID: apiutils.Of("list-catalog-metrics-ext-1"), + MetricsType: models.MetricsTypeAccuracy, + }, + }, + { + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("list-catalog-metrics-artifact-2"), + ExternalID: apiutils.Of("list-catalog-metrics-ext-2"), + MetricsType: models.MetricsTypePerformance, + }, + }, + { + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("list-catalog-metrics-artifact-3"), + ExternalID: apiutils.Of("list-catalog-metrics-ext-3"), + MetricsType: models.MetricsTypePerformance, + }, + }, + } + + // Save all test artifacts + var savedArtifacts []models.CatalogMetricsArtifact + for _, artifact := range testArtifacts { + saved, err := repo.Save(artifact, savedCatalogModel.GetID()) + require.NoError(t, err) + savedArtifacts = append(savedArtifacts, saved) + } + + // Test listing all artifacts + listOptions := models.CatalogMetricsArtifactListOptions{} + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + assert.GreaterOrEqual(t, len(result.Items), 3) // At least our 3 test artifacts + + // Test filtering by name + nameFilter := "list-catalog-metrics-artifact-1" + listOptions = models.CatalogMetricsArtifactListOptions{ + Name: &nameFilter, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + if len(result.Items) > 0 { + assert.Equal(t, 1, len(result.Items)) + assert.Equal(t, "list-catalog-metrics-artifact-1", *result.Items[0].GetAttributes().Name) + } + + // Test filtering by external ID + externalIDFilter := "list-catalog-metrics-ext-2" + listOptions = models.CatalogMetricsArtifactListOptions{ + ExternalID: &externalIDFilter, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + if len(result.Items) > 0 { + assert.Equal(t, 1, len(result.Items)) + assert.Equal(t, "list-catalog-metrics-ext-2", *result.Items[0].GetAttributes().ExternalID) + } + + // Test filtering by parent resource ID (catalog model) + listOptions = models.CatalogMetricsArtifactListOptions{ + ParentResourceID: savedCatalogModel.GetID(), + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + assert.GreaterOrEqual(t, len(result.Items), 3) // Should find our 3 test artifacts + }) + + t.Run("TestListWithPropertiesAndCustomProperties", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-props-metrics"), + ExternalID: apiutils.Of("catalog-model-props-metrics-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create a catalog metrics artifact with both properties and custom properties + catalogMetricsArtifact := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("props-test-catalog-metrics-artifact"), + ExternalID: apiutils.Of("props-catalog-metrics-ext-123"), + MetricsType: models.MetricsTypeAccuracy, + }, + Properties: &[]dbmodels.Properties{ + { + Name: "version", + StringValue: apiutils.Of("1.0.0"), + }, + { + Name: "value", + DoubleValue: apiutils.Of(0.95), + }, + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "team", + StringValue: apiutils.Of("catalog-metrics-team"), + }, + { + Name: "is_validated", + BoolValue: apiutils.Of(true), + }, + }, + } + + saved, err := repo.Save(catalogMetricsArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved) + + // Retrieve and verify properties + retrieved, err := repo.GetByID(*saved.GetID()) + require.NoError(t, err) + require.NotNil(t, retrieved) + + // Check that metricsType is properly set + assert.Equal(t, models.MetricsTypeAccuracy, retrieved.GetAttributes().MetricsType) + + // Check regular properties + require.NotNil(t, retrieved.GetProperties()) + assert.Len(t, *retrieved.GetProperties(), 2) + + // Check custom properties + require.NotNil(t, retrieved.GetCustomProperties()) + assert.Len(t, *retrieved.GetCustomProperties(), 2) + + // Verify specific properties exist + properties := *retrieved.GetProperties() + var foundVersion, foundValue bool + for _, prop := range properties { + switch prop.Name { + case "version": + foundVersion = true + assert.Equal(t, "1.0.0", *prop.StringValue) + case "value": + foundValue = true + assert.Equal(t, 0.95, *prop.DoubleValue) + } + } + assert.True(t, foundVersion, "Should find version property") + assert.True(t, foundValue, "Should find value property") + + // Verify custom properties + customProperties := *retrieved.GetCustomProperties() + var foundTeam, foundIsValidated bool + for _, prop := range customProperties { + switch prop.Name { + case "team": + foundTeam = true + assert.Equal(t, "catalog-metrics-team", *prop.StringValue) + case "is_validated": + foundIsValidated = true + assert.Equal(t, true, *prop.BoolValue) + } + } + assert.True(t, foundTeam, "Should find team custom property") + assert.True(t, foundIsValidated, "Should find is_validated custom property") + }) + + t.Run("TestSaveWithoutParentResource", func(t *testing.T) { + // Test creating a catalog metrics artifact without parent resource attribution + catalogMetricsArtifact := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("standalone-catalog-metrics-artifact"), + ExternalID: apiutils.Of("standalone-catalog-metrics-ext"), + MetricsType: models.MetricsTypeAccuracy, + }, + Properties: &[]dbmodels.Properties{ + { + Name: "description", + StringValue: apiutils.Of("Standalone catalog metrics artifact without parent"), + }, + }, + } + + saved, err := repo.Save(catalogMetricsArtifact, nil) + require.NoError(t, err) + require.NotNil(t, saved) + require.NotNil(t, saved.GetID()) + assert.Equal(t, "standalone-catalog-metrics-artifact", *saved.GetAttributes().Name) + assert.Equal(t, models.MetricsTypeAccuracy, saved.GetAttributes().MetricsType) + + // Verify it can be retrieved + retrieved, err := repo.GetByID(*saved.GetID()) + require.NoError(t, err) + assert.Equal(t, "standalone-catalog-metrics-artifact", *retrieved.GetAttributes().Name) + assert.Equal(t, models.MetricsTypeAccuracy, retrieved.GetAttributes().MetricsType) + }) + + t.Run("TestListOrdering", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-ordering-metrics"), + ExternalID: apiutils.Of("catalog-model-ordering-metrics-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create artifacts sequentially with time delays to ensure deterministic ordering + artifact1 := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("time-test-catalog-metrics-artifact-1"), + ExternalID: apiutils.Of("time-catalog-metrics-ext-1"), + MetricsType: models.MetricsTypeAccuracy, + }, + } + saved1, err := repo.Save(artifact1, savedCatalogModel.GetID()) + require.NoError(t, err) + + // Small delay to ensure different timestamps + time.Sleep(10 * time.Millisecond) + + artifact2 := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("time-test-catalog-metrics-artifact-2"), + ExternalID: apiutils.Of("time-catalog-metrics-ext-2"), + MetricsType: models.MetricsTypePerformance, + }, + } + saved2, err := repo.Save(artifact2, savedCatalogModel.GetID()) + require.NoError(t, err) + + // Test ordering by CREATE_TIME + listOptions := models.CatalogMetricsArtifactListOptions{ + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("CREATE_TIME"), + }, + } + + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Find our test artifacts in the results + var foundArtifact1, foundArtifact2 models.CatalogMetricsArtifact + var index1, index2 = -1, -1 + + for i, item := range result.Items { + if *item.GetID() == *saved1.GetID() { + foundArtifact1 = item + index1 = i + } + if *item.GetID() == *saved2.GetID() { + foundArtifact2 = item + index2 = i + } + } + + // Verify both artifacts were found and artifact1 comes before artifact2 (ascending order) + require.NotEqual(t, -1, index1, "Artifact 1 should be found in results") + require.NotEqual(t, -1, index2, "Artifact 2 should be found in results") + assert.Less(t, index1, index2, "Artifact 1 should come before Artifact 2 when ordered by CREATE_TIME") + assert.Less(t, *foundArtifact1.GetAttributes().CreateTimeSinceEpoch, *foundArtifact2.GetAttributes().CreateTimeSinceEpoch, "Artifact 1 should have earlier create time") + }) + + t.Run("TestMetricsTypeField", func(t *testing.T) { + // Test various metrics types + metricsTypes := []models.MetricsType{models.MetricsTypeAccuracy, models.MetricsTypePerformance} + + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-metrics-types"), + ExternalID: apiutils.Of("catalog-model-metrics-types-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + for i, metricsType := range metricsTypes { + artifact := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(fmt.Sprintf("metrics-type-test-%d", i)), + ExternalID: apiutils.Of(fmt.Sprintf("metrics-type-ext-%d", i)), + MetricsType: metricsType, + }, + } + + saved, err := repo.Save(artifact, savedCatalogModel.GetID()) + require.NoError(t, err) + assert.Equal(t, metricsType, saved.GetAttributes().MetricsType) + + // Verify retrieval preserves metricsType + retrieved, err := repo.GetByID(*saved.GetID()) + require.NoError(t, err) + assert.Equal(t, metricsType, retrieved.GetAttributes().MetricsType) + } + }) + + t.Run("TestSaveWithTypeIDSetting", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-typeid-metrics"), + ExternalID: apiutils.Of("catalog-model-typeid-metrics-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Test creating artifact without explicit type_id (should be set automatically) + catalogMetricsArtifact := &models.CatalogMetricsArtifactImpl{ + // Intentionally not setting TypeID to test auto-setting + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("typeid-test-metrics-artifact"), + ExternalID: apiutils.Of("typeid-metrics-ext-123"), + MetricsType: models.MetricsTypeAccuracy, + }, + } + + saved, err := repo.Save(catalogMetricsArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved) + require.NotNil(t, saved.GetTypeID()) + assert.Equal(t, int32(typeID), *saved.GetTypeID()) + assert.Equal(t, "typeid-test-metrics-artifact", *saved.GetAttributes().Name) + + // Test with explicitly set type_id (should not be overridden) + explicitTypeID := int32(typeID) + catalogMetricsArtifact2 := &models.CatalogMetricsArtifactImpl{ + TypeID: &explicitTypeID, + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("explicit-typeid-metrics-artifact"), + ExternalID: apiutils.Of("explicit-typeid-metrics-ext-123"), + MetricsType: models.MetricsTypePerformance, + }, + } + + saved2, err := repo.Save(catalogMetricsArtifact2, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved2) + require.NotNil(t, saved2.GetTypeID()) + assert.Equal(t, explicitTypeID, *saved2.GetTypeID()) + }) + + t.Run("TestSaveWithNameMatching", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-name-matching-metrics"), + ExternalID: apiutils.Of("catalog-model-name-match-metrics-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create initial metrics artifact + artifactName := "name-matching-metrics-artifact" + catalogMetricsArtifact1 := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(artifactName), + ExternalID: apiutils.Of("name-match-metrics-ext-123"), + MetricsType: models.MetricsTypeAccuracy, + }, + Properties: &[]dbmodels.Properties{ + { + Name: "accuracy", + StringValue: apiutils.Of("0.95"), + }, + }, + } + + saved1, err := repo.Save(catalogMetricsArtifact1, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved1) + originalID := *saved1.GetID() + assert.Equal(t, artifactName, *saved1.GetAttributes().Name) + assert.Equal(t, models.MetricsTypeAccuracy, saved1.GetAttributes().MetricsType) + + // Create second artifact with same name (should update existing) + catalogMetricsArtifact2 := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(artifactName), // Same name + ExternalID: apiutils.Of("name-match-metrics-ext-456"), + MetricsType: models.MetricsTypePerformance, // Different metrics type + }, + Properties: &[]dbmodels.Properties{ + { + Name: "latency", + StringValue: apiutils.Of("50ms"), + }, + }, + } + + saved2, err := repo.Save(catalogMetricsArtifact2, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved2) + + // Should have same ID (updated existing) + assert.Equal(t, originalID, *saved2.GetID()) + assert.Equal(t, artifactName, *saved2.GetAttributes().Name) + assert.Equal(t, models.MetricsTypePerformance, saved2.GetAttributes().MetricsType) + assert.Equal(t, "name-match-metrics-ext-456", *saved2.GetAttributes().ExternalID) + + // Verify by retrieving from database + retrieved, err := repo.GetByID(originalID) + require.NoError(t, err) + assert.Equal(t, models.MetricsTypePerformance, retrieved.GetAttributes().MetricsType) + assert.Equal(t, "name-match-metrics-ext-456", *retrieved.GetAttributes().ExternalID) + + // Verify properties were updated + require.NotNil(t, retrieved.GetProperties()) + properties := *retrieved.GetProperties() + var foundLatency bool + for _, prop := range properties { + if prop.Name == "latency" { + foundLatency = true + assert.Equal(t, "50ms", *prop.StringValue) + break + } + } + assert.True(t, foundLatency, "Should find updated latency property") + + // Test that artifact with different name creates new entity + catalogMetricsArtifact3 := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("different-name-metrics-artifact"), + ExternalID: apiutils.Of("different-name-metrics-ext-789"), + MetricsType: models.MetricsTypeAccuracy, + }, + } + + saved3, err := repo.Save(catalogMetricsArtifact3, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved3) + + // Should have different ID (new entity) + assert.NotEqual(t, originalID, *saved3.GetID()) + assert.Equal(t, "different-name-metrics-artifact", *saved3.GetAttributes().Name) + }) + + t.Run("TestSaveWithNameMatchingNoExistingName", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-no-match-metrics"), + ExternalID: apiutils.Of("catalog-model-no-match-metrics-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Test saving artifact when no existing artifact with same name exists + catalogMetricsArtifact := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("unique-metrics-artifact-name"), + ExternalID: apiutils.Of("unique-metrics-ext-123"), + MetricsType: models.MetricsTypeAccuracy, + }, + } + + saved, err := repo.Save(catalogMetricsArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved) + require.NotNil(t, saved.GetID()) + assert.Equal(t, "unique-metrics-artifact-name", *saved.GetAttributes().Name) + assert.Equal(t, models.MetricsTypeAccuracy, saved.GetAttributes().MetricsType) + }) + + t.Run("TestSaveWithInvalidMetricsType", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-invalid-metrics"), + ExternalID: apiutils.Of("catalog-model-invalid-metrics-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Test saving artifact with invalid metrics type (should fail) + catalogMetricsArtifact := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of("invalid-metrics-type-artifact"), + ExternalID: apiutils.Of("invalid-metrics-ext-123"), + MetricsType: models.MetricsType("invalid-type"), + }, + } + + _, err = repo.Save(catalogMetricsArtifact, savedCatalogModel.GetID()) + require.Error(t, err) + assert.Contains(t, err.Error(), "unknown metrics type") + }) +} + +// Helper function to get or create CatalogMetricsArtifact type ID +func getCatalogMetricsArtifactTypeID(t *testing.T, db *gorm.DB) int32 { + var typeRecord schema.Type + err := db.Where("name = ?", service.CatalogMetricsArtifactTypeName).First(&typeRecord).Error + if err != nil { + require.NoError(t, err, "Failed to query CatalogMetricsArtifact type") + } + + return typeRecord.ID +} diff --git a/catalog/internal/db/service/catalog_model.go b/catalog/internal/db/service/catalog_model.go new file mode 100644 index 0000000000..7be94889ca --- /dev/null +++ b/catalog/internal/db/service/catalog_model.go @@ -0,0 +1,422 @@ +package service + +import ( + "errors" + "fmt" + "strings" + + "github.com/golang/glog" + "github.com/kubeflow/model-registry/catalog/internal/db/filter" + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/internal/db/dbutil" + dbmodels "github.com/kubeflow/model-registry/internal/db/models" + "github.com/kubeflow/model-registry/internal/db/schema" + "github.com/kubeflow/model-registry/internal/db/scopes" + "github.com/kubeflow/model-registry/internal/db/service" + "github.com/kubeflow/model-registry/internal/db/utils" + "gorm.io/gorm" +) + +var ErrCatalogModelNotFound = errors.New("catalog model by id not found") + +type CatalogModelRepositoryImpl struct { + *service.GenericRepository[models.CatalogModel, schema.Context, schema.ContextProperty, *models.CatalogModelListOptions] +} + +func NewCatalogModelRepository(db *gorm.DB, typeID int32) models.CatalogModelRepository { + r := &CatalogModelRepositoryImpl{} + + r.GenericRepository = service.NewGenericRepository(service.GenericRepositoryConfig[models.CatalogModel, schema.Context, schema.ContextProperty, *models.CatalogModelListOptions]{ + DB: db, + TypeID: typeID, + EntityToSchema: mapCatalogModelToContext, + SchemaToEntity: mapDataLayerToCatalogModel, + EntityToProperties: mapCatalogModelToContextProperties, + NotFoundError: ErrCatalogModelNotFound, + EntityName: "catalog model", + PropertyFieldName: "context_id", + ApplyListFilters: applyCatalogModelListFilters, + CreatePaginationToken: r.createPaginationToken, + ApplyCustomOrdering: r.applyCustomOrdering, + IsNewEntity: func(entity models.CatalogModel) bool { return entity.GetID() == nil }, + HasCustomProperties: func(entity models.CatalogModel) bool { return entity.GetCustomProperties() != nil }, + EntityMappingFuncs: filter.NewCatalogEntityMappings(), + PreserveHistoricalTimes: true, // Catalog preserves timestamps from YAML source data + }) + + return r +} + +func (r *CatalogModelRepositoryImpl) Save(model models.CatalogModel) (models.CatalogModel, error) { + config := r.GetConfig() + if model.GetTypeID() == nil { + if config.TypeID > 0 { + model.SetTypeID(config.TypeID) + } + } + + attr := model.GetAttributes() + if model.GetID() == nil && attr != nil && attr.Name != nil { + existing, err := r.lookupModelByName(*attr.Name) + if err != nil { + if !errors.Is(err, ErrCatalogModelNotFound) { + return nil, fmt.Errorf("error finding existing model named %s: %w", *attr.Name, err) + } + } else { + model.SetID(existing.ID) + } + } + + return r.GenericRepository.Save(model, nil) +} + +// ApplyStandardPagination overrides the base implementation to use catalog-specific allowed columns +func (r *CatalogModelRepositoryImpl) ApplyStandardPagination(query *gorm.DB, listOptions *models.CatalogModelListOptions, entities any) *gorm.DB { + pageSize := listOptions.GetPageSize() + orderBy := listOptions.GetOrderBy() + sortOrder := listOptions.GetSortOrder() + nextPageToken := listOptions.GetNextPageToken() + + pagination := &dbmodels.Pagination{ + PageSize: &pageSize, + OrderBy: &orderBy, + SortOrder: &sortOrder, + NextPageToken: &nextPageToken, + } + + // Use catalog-specific allowed columns (includes NAME) + return query.Scopes(scopes.PaginateWithOptions(entities, pagination, r.GetConfig().DB, "Context", CatalogOrderByColumns)) +} + +func (r *CatalogModelRepositoryImpl) List(listOptions models.CatalogModelListOptions) (*dbmodels.ListWrapper[models.CatalogModel], error) { + return r.GenericRepository.List(&listOptions) +} + +func (r *CatalogModelRepositoryImpl) GetByName(name string) (models.CatalogModel, error) { + var zeroEntity models.CatalogModel + entity, err := r.lookupModelByName(name) + if err != nil { + return zeroEntity, err + } + + config := r.GetConfig() + + // Query properties + var properties []schema.ContextProperty + if err := config.DB.Where(config.PropertyFieldName+" = ?", entity.ID).Find(&properties).Error; err != nil { + // Sanitize database errors to avoid exposing internal details to users + err = dbutil.SanitizeDatabaseError(err) + return zeroEntity, fmt.Errorf("error getting properties by %s id: %w", config.EntityName, err) + } + + // Map to domain model + return config.SchemaToEntity(*entity, properties), nil +} + +func (r *CatalogModelRepositoryImpl) lookupModelByName(name string) (*schema.Context, error) { + var entity schema.Context + + config := r.GetConfig() + + if err := config.DB.Where("name = ? AND type_id = ?", name, config.TypeID).First(&entity).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil, fmt.Errorf("%w: %v", config.NotFoundError, err) + } + // Sanitize database errors to avoid exposing internal details to users + err = dbutil.SanitizeDatabaseError(err) + return nil, fmt.Errorf("error getting %s by name: %w", config.EntityName, err) + } + + return &entity, nil +} + +func applyCatalogModelListFilters(query *gorm.DB, listOptions *models.CatalogModelListOptions) *gorm.DB { + contextTable := utils.GetTableName(query.Statement.DB, &schema.Context{}) + + if listOptions.Name != nil { + query = query.Where(fmt.Sprintf("%s.name LIKE ?", contextTable), listOptions.Name) + } else if listOptions.ExternalID != nil { + query = query.Where(fmt.Sprintf("%s.external_id = ?", contextTable), listOptions.ExternalID) + } + + if listOptions.Query != nil && *listOptions.Query != "" { + queryPattern := fmt.Sprintf("%%%s%%", strings.ToLower(*listOptions.Query)) + propertyTable := utils.GetTableName(query.Statement.DB, &schema.ContextProperty{}) + + // Search in name (context table) + nameCondition := fmt.Sprintf("LOWER(%s.name) LIKE ?", contextTable) + + // Search in description, provider, libraryName properties + propertyCondition := fmt.Sprintf("EXISTS (SELECT 1 FROM %s cp WHERE cp.context_id = %s.id AND cp.name IN (?, ?, ?) AND LOWER(cp.string_value) LIKE ?)", + propertyTable, contextTable) + + // Search in tasks (assuming tasks are stored as comma-separated or multiple properties) + tasksCondition := fmt.Sprintf("EXISTS (SELECT 1 FROM %s cp WHERE cp.context_id = %s.id AND cp.name = ? AND LOWER(cp.string_value) LIKE ?)", + propertyTable, contextTable) + + query = query.Where(fmt.Sprintf("(%s OR %s OR %s)", nameCondition, propertyCondition, tasksCondition), + queryPattern, // for name + "description", "provider", "libraryName", queryPattern, // for properties + "tasks", queryPattern, // for tasks + ) + } + + // Filter out empty strings from SourceIDs, for some reason it's passed if no sources are specified + var nonEmptySourceIDs []string + if listOptions.SourceIDs != nil { + for _, sourceID := range *listOptions.SourceIDs { + if sourceID != "" { + nonEmptySourceIDs = append(nonEmptySourceIDs, sourceID) + } + } + } + + if len(nonEmptySourceIDs) > 0 { + propertyTable := utils.GetTableName(query.Statement.DB, &schema.ContextProperty{}) + + joinClause := fmt.Sprintf("JOIN %s cp ON cp.context_id = %s.id", propertyTable, contextTable) + query = query.Joins(joinClause). + Where("cp.name = ? AND cp.string_value IN ?", "source_id", nonEmptySourceIDs) + } + + return query +} + +func mapCatalogModelToContext(model models.CatalogModel) schema.Context { + attrs := model.GetAttributes() + context := schema.Context{} + + if typeID := model.GetTypeID(); typeID != nil { + context.TypeID = *typeID + } + + if model.GetID() != nil { + context.ID = *model.GetID() + } + + if attrs != nil { + if attrs.Name != nil { + context.Name = *attrs.Name + } + context.ExternalID = attrs.ExternalID + if attrs.CreateTimeSinceEpoch != nil { + context.CreateTimeSinceEpoch = *attrs.CreateTimeSinceEpoch + } + if attrs.LastUpdateTimeSinceEpoch != nil { + context.LastUpdateTimeSinceEpoch = *attrs.LastUpdateTimeSinceEpoch + } + } + + return context +} + +func mapCatalogModelToContextProperties(model models.CatalogModel, contextID int32) []schema.ContextProperty { + var properties []schema.ContextProperty + + if model.GetProperties() != nil { + for _, prop := range *model.GetProperties() { + properties = append(properties, service.MapPropertiesToContextProperty(prop, contextID, false)) + } + } + + if model.GetCustomProperties() != nil { + for _, prop := range *model.GetCustomProperties() { + properties = append(properties, service.MapPropertiesToContextProperty(prop, contextID, true)) + } + } + + return properties +} + +func mapDataLayerToCatalogModel(modelCtx schema.Context, propertiesCtx []schema.ContextProperty) models.CatalogModel { + catalogModel := &models.CatalogModelImpl{ + ID: &modelCtx.ID, + TypeID: &modelCtx.TypeID, + Attributes: &models.CatalogModelAttributes{ + Name: &modelCtx.Name, + ExternalID: modelCtx.ExternalID, + CreateTimeSinceEpoch: &modelCtx.CreateTimeSinceEpoch, + LastUpdateTimeSinceEpoch: &modelCtx.LastUpdateTimeSinceEpoch, + }, + } + + properties := []dbmodels.Properties{} + customProperties := []dbmodels.Properties{} + + for _, prop := range propertiesCtx { + mappedProperty := service.MapContextPropertyToProperties(prop) + + if prop.IsCustomProperty { + customProperties = append(customProperties, mappedProperty) + } else { + properties = append(properties, mappedProperty) + } + } + + catalogModel.Properties = &properties + catalogModel.CustomProperties = &customProperties + + return catalogModel +} + +// applyCustomOrdering applies custom ordering logic for non-standard orderBy field +func (r *CatalogModelRepositoryImpl) applyCustomOrdering(query *gorm.DB, listOptions *models.CatalogModelListOptions) *gorm.DB { + + db := r.GetConfig().DB + contextTable := utils.GetTableName(db, &schema.Context{}) + orderBy := listOptions.GetOrderBy() + + // Handle NAME ordering specially (catalog-specific) + if orderBy == "NAME" { + return ApplyNameOrdering(query, contextTable, listOptions.GetSortOrder(), listOptions.GetNextPageToken(), listOptions.GetPageSize()) + } + + subquery, sortColumn := r.sortValueQuery(listOptions, contextTable+".id") + if subquery == nil { + // Fall back to standard pagination with catalog-specific allowed columns + return r.ApplyStandardPagination(query, listOptions, []models.CatalogModel{}) + } + subquery = subquery.Group(contextTable + ".id") + + // Join the main query with the subquery + query = query. + Joins(fmt.Sprintf("LEFT JOIN (?) sort_value ON %s.id=sort_value.id", contextTable), subquery) + + // Apply sorting order + sortOrder := listOptions.GetSortOrder() + if sortOrder != "ASC" { + sortOrder = "DESC" + } + query = query.Order(fmt.Sprintf("sort_value.%s %s NULLS LAST, %s.id", sortColumn, sortOrder, contextTable)) + + // Handle cursor-based pagination with nextPageToken + nextPageToken := listOptions.GetNextPageToken() + if nextPageToken != "" { + // Parse the cursor from the token + if cursor, err := scopes.DecodeCursor(nextPageToken); err == nil { + // Apply WHERE clause for cursor-based pagination with ACCURACY + query = r.applyCursorPagination(query, cursor, sortColumn, sortOrder) + } + // If token parsing fails, fall back to no cursor (first page) + } + + // Apply pagination limit + pageSize := listOptions.GetPageSize() + if pageSize > 0 { + query = query.Limit(int(pageSize) + 1) // +1 to detect if there are more pages + } + + return query +} + +// applyCursorPagination applies WHERE clause for cursor-based pagination with ACCURACY sorting +func (r *CatalogModelRepositoryImpl) applyCursorPagination(query *gorm.DB, cursor *scopes.Cursor, sortColumn, sortOrder string) *gorm.DB { + contextTable := utils.GetTableName(query, &schema.Context{}) + + // Handle NULL values in cursor + if cursor.Value == "" { + // Items without the sort value will be sorted to the bottom, just use ID-based pagination. + return query.Where(fmt.Sprintf("sort_value.%s IS NULL AND %s.id > ?", sortColumn, contextTable), cursor.ID) + } + + cmp := "<" + if sortOrder == "ASC" { + cmp = ">" + } + + // Note that we sort ID ASCENDING as a tie-breaker, so ">" is correct below. + return query.Where(fmt.Sprintf("(sort_value.%s %s ? OR (sort_value.%s = ? AND %s.id > ?) OR sort_value.%s IS NULL)", sortColumn, cmp, sortColumn, contextTable, sortColumn), + cursor.Value, cursor.Value, cursor.ID) +} + +func (r *CatalogModelRepositoryImpl) createPaginationToken(lastItem schema.Context, listOptions *models.CatalogModelListOptions) string { + // Handle NAME ordering (catalog-specific) + if listOptions.GetOrderBy() == "NAME" { + return CreateNamePaginationToken(lastItem.ID, &lastItem.Name) + } + + sortValueQuery, column := r.sortValueQuery(listOptions) + if sortValueQuery != nil { + contextTable := utils.GetTableName(r.GetConfig().DB, &schema.Context{}) + sortValueQuery = sortValueQuery.Where(contextTable+".id=?", lastItem.ID) + + var result struct { + IntValue *int64 `gorm:"int_value"` + DoubleValue *float64 `gorm:"double_value"` + StringValue *string `gorm:"string_value"` + } + err := sortValueQuery.Scan(&result).Error + if err != nil { + glog.Warningf("Failed to get sort value: %v", err) + } else { + switch column { + case "int_value": + return scopes.CreateNextPageToken(lastItem.ID, result.IntValue) + case "double_value": + return scopes.CreateNextPageToken(lastItem.ID, result.DoubleValue) + case "string_value": + fallthrough + default: + return scopes.CreateNextPageToken(lastItem.ID, result.StringValue) + } + } + } + + return r.CreateDefaultPaginationToken(lastItem, listOptions) +} + +// sortValueQuery returns a query that will produce the value to sort on for +// the List response. The returned string is the column name. +// +// If the sort does not require a subquery, sortValueQuery returns nil. +func (r *CatalogModelRepositoryImpl) sortValueQuery(listOptions *models.CatalogModelListOptions, extraColumns ...any) (*gorm.DB, string) { + db := r.GetConfig().DB + contextTable := utils.GetTableName(db, &schema.Context{}) + + query := db.Table(contextTable). + Where(contextTable+".type_id=?", r.GetConfig().TypeID) + + orderBy := strings.Split(listOptions.GetOrderBy(), ".") + + var valueColumn string + + switch { + case len(orderBy) == 3 && orderBy[0] == "artifacts": + // artifacts.. e.g. artifacts.ttft_p90.double_value + + attributionTable := utils.GetTableName(db, &schema.Attribution{}) + propertyTable := utils.GetTableName(db, &schema.ArtifactProperty{}) + + aggFn := "max" + if listOptions.GetSortOrder() == "ASC" { + aggFn = "min" + } + valueColumn = orderBy[2] + + query = query. + Select(fmt.Sprintf("%s(%s.%s) AS %s", aggFn, propertyTable, valueColumn, valueColumn), extraColumns...). + Joins(fmt.Sprintf("LEFT JOIN %s ON %s.id=%s.context_id", attributionTable, contextTable, attributionTable)). + Joins(fmt.Sprintf("LEFT JOIN %s ON %s.artifact_id=%s.artifact_id AND %s.name=?", propertyTable, attributionTable, propertyTable, propertyTable), orderBy[1]) + case len(orderBy) == 2: + // . e.g. provider.string_value + propertyTable := utils.GetTableName(db, &schema.ContextProperty{}) + valueColumn = orderBy[1] + query = query. + Select(fmt.Sprintf("max(%s.%s) AS %s", propertyTable, valueColumn, valueColumn), extraColumns...). + Joins(fmt.Sprintf("LEFT JOIN %s ON %s.id=%s.context_id AND %s.name=?", propertyTable, contextTable, propertyTable, propertyTable), orderBy[0]) + default: + // Standard sort will work + return nil, "" + } + + // The query is built, but verify that the value column is valid before + // returning it. + switch valueColumn { + case "int_value", "double_value", "string_value": + // OK + default: + return nil, "" + } + + return query, valueColumn +} diff --git a/catalog/internal/db/service/catalog_model_artifact.go b/catalog/internal/db/service/catalog_model_artifact.go new file mode 100644 index 0000000000..cf517f4285 --- /dev/null +++ b/catalog/internal/db/service/catalog_model_artifact.go @@ -0,0 +1,172 @@ +package service + +import ( + "errors" + "fmt" + + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/internal/apiutils" + dbmodels "github.com/kubeflow/model-registry/internal/db/models" + "github.com/kubeflow/model-registry/internal/db/schema" + "github.com/kubeflow/model-registry/internal/db/service" + "github.com/kubeflow/model-registry/internal/db/utils" + "gorm.io/gorm" +) + +var ErrCatalogModelArtifactNotFound = errors.New("catalog model artifact by id not found") + +type CatalogModelArtifactRepositoryImpl struct { + *service.GenericRepository[models.CatalogModelArtifact, schema.Artifact, schema.ArtifactProperty, *models.CatalogModelArtifactListOptions] +} + +func NewCatalogModelArtifactRepository(db *gorm.DB, typeID int32) models.CatalogModelArtifactRepository { + config := service.GenericRepositoryConfig[models.CatalogModelArtifact, schema.Artifact, schema.ArtifactProperty, *models.CatalogModelArtifactListOptions]{ + DB: db, + TypeID: typeID, + EntityToSchema: mapCatalogModelArtifactToArtifact, + SchemaToEntity: mapDataLayerToCatalogModelArtifact, + EntityToProperties: mapCatalogModelArtifactToArtifactProperties, + NotFoundError: ErrCatalogModelArtifactNotFound, + EntityName: "catalog model artifact", + PropertyFieldName: "artifact_id", + ApplyListFilters: applyCatalogModelArtifactListFilters, + IsNewEntity: func(entity models.CatalogModelArtifact) bool { return entity.GetID() == nil }, + HasCustomProperties: func(entity models.CatalogModelArtifact) bool { return entity.GetCustomProperties() != nil }, + PreserveHistoricalTimes: true, // Catalog preserves timestamps from YAML source data + } + + return &CatalogModelArtifactRepositoryImpl{ + GenericRepository: service.NewGenericRepository(config), + } +} + +func (r *CatalogModelArtifactRepositoryImpl) Save(modelArtifact models.CatalogModelArtifact, parentResourceID *int32) (models.CatalogModelArtifact, error) { + config := r.GetConfig() + if modelArtifact.GetTypeID() == nil { + if config.TypeID > 0 { + modelArtifact.SetTypeID(config.TypeID) + } + } + + attr := modelArtifact.GetAttributes() + if modelArtifact.GetID() == nil && attr != nil && attr.Name != nil { + existing, err := r.lookupModelArtifactByName(*attr.Name) + if err != nil { + if !errors.Is(err, ErrCatalogModelArtifactNotFound) { + return nil, fmt.Errorf("error finding existing model artifact named %s: %w", *attr.Name, err) + } + } else { + modelArtifact.SetID(existing.ID) + } + } + + return r.GenericRepository.Save(modelArtifact, parentResourceID) +} + +func (r *CatalogModelArtifactRepositoryImpl) List(listOptions models.CatalogModelArtifactListOptions) (*dbmodels.ListWrapper[models.CatalogModelArtifact], error) { + return r.GenericRepository.List(&listOptions) +} + +func (r *CatalogModelArtifactRepositoryImpl) lookupModelArtifactByName(name string) (*schema.Artifact, error) { + var entity schema.Artifact + + config := r.GetConfig() + + if err := config.DB.Where("name = ? AND type_id = ?", name, config.TypeID).First(&entity).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil, fmt.Errorf("%w: %v", config.NotFoundError, err) + } + return nil, fmt.Errorf("error getting %s by name: %w", config.EntityName, err) + } + + return &entity, nil +} + +func applyCatalogModelArtifactListFilters(query *gorm.DB, listOptions *models.CatalogModelArtifactListOptions) *gorm.DB { + if listOptions.Name != nil { + query = query.Where("name LIKE ?", fmt.Sprintf("%%:%s", *listOptions.Name)) + } else if listOptions.ExternalID != nil { + query = query.Where("external_id = ?", listOptions.ExternalID) + } + + if listOptions.ParentResourceID != nil { + query = query.Joins(utils.BuildAttributionJoin(query)). + Where(utils.GetColumnRef(query, &schema.Attribution{}, "context_id")+" = ?", listOptions.ParentResourceID) + } + + return query +} + +func mapCatalogModelArtifactToArtifact(catalogModelArtifact models.CatalogModelArtifact) schema.Artifact { + if catalogModelArtifact == nil { + return schema.Artifact{} + } + + artifact := schema.Artifact{ + ID: apiutils.ZeroIfNil(catalogModelArtifact.GetID()), + TypeID: apiutils.ZeroIfNil(catalogModelArtifact.GetTypeID()), + } + + if catalogModelArtifact.GetAttributes() != nil { + artifact.Name = catalogModelArtifact.GetAttributes().Name + artifact.URI = catalogModelArtifact.GetAttributes().URI + artifact.ExternalID = catalogModelArtifact.GetAttributes().ExternalID + artifact.CreateTimeSinceEpoch = apiutils.ZeroIfNil(catalogModelArtifact.GetAttributes().CreateTimeSinceEpoch) + artifact.LastUpdateTimeSinceEpoch = apiutils.ZeroIfNil(catalogModelArtifact.GetAttributes().LastUpdateTimeSinceEpoch) + } + + return artifact +} + +func mapCatalogModelArtifactToArtifactProperties(catalogModelArtifact models.CatalogModelArtifact, artifactID int32) []schema.ArtifactProperty { + if catalogModelArtifact == nil { + return []schema.ArtifactProperty{} + } + + properties := []schema.ArtifactProperty{} + + if catalogModelArtifact.GetProperties() != nil { + for _, prop := range *catalogModelArtifact.GetProperties() { + properties = append(properties, service.MapPropertiesToArtifactProperty(prop, artifactID, false)) + } + } + + if catalogModelArtifact.GetCustomProperties() != nil { + for _, prop := range *catalogModelArtifact.GetCustomProperties() { + properties = append(properties, service.MapPropertiesToArtifactProperty(prop, artifactID, true)) + } + } + + return properties +} + +func mapDataLayerToCatalogModelArtifact(artifact schema.Artifact, artProperties []schema.ArtifactProperty) models.CatalogModelArtifact { + catalogModelArtifact := models.CatalogModelArtifactImpl{ + ID: &artifact.ID, + TypeID: &artifact.TypeID, + Attributes: &models.CatalogModelArtifactAttributes{ + Name: artifact.Name, + URI: artifact.URI, + ArtifactType: apiutils.Of(models.CatalogModelArtifactType), + ExternalID: artifact.ExternalID, + CreateTimeSinceEpoch: &artifact.CreateTimeSinceEpoch, + LastUpdateTimeSinceEpoch: &artifact.LastUpdateTimeSinceEpoch, + }, + } + + customProperties := []dbmodels.Properties{} + properties := []dbmodels.Properties{} + + for _, prop := range artProperties { + if prop.IsCustomProperty { + customProperties = append(customProperties, service.MapArtifactPropertyToProperties(prop)) + } else { + properties = append(properties, service.MapArtifactPropertyToProperties(prop)) + } + } + + catalogModelArtifact.CustomProperties = &customProperties + catalogModelArtifact.Properties = &properties + + return &catalogModelArtifact +} diff --git a/catalog/internal/db/service/catalog_model_artifact_test.go b/catalog/internal/db/service/catalog_model_artifact_test.go new file mode 100644 index 0000000000..1f8ac0a531 --- /dev/null +++ b/catalog/internal/db/service/catalog_model_artifact_test.go @@ -0,0 +1,642 @@ +package service_test + +import ( + "fmt" + "testing" + "time" + + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/service" + "github.com/kubeflow/model-registry/internal/apiutils" + dbmodels "github.com/kubeflow/model-registry/internal/db/models" + "github.com/kubeflow/model-registry/internal/db/schema" + "github.com/kubeflow/model-registry/internal/testutils" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/gorm" +) + +func TestCatalogModelArtifactRepository(t *testing.T) { + sharedDB, cleanup := testutils.SetupPostgresWithMigrations(t, service.DatastoreSpec()) + defer cleanup() + + // Get the CatalogModelArtifact type ID + typeID := getCatalogModelArtifactTypeID(t, sharedDB) + repo := service.NewCatalogModelArtifactRepository(sharedDB, typeID) + + // Also get CatalogModel type ID for creating parent entities + catalogModelTypeID := getCatalogModelTypeID(t, sharedDB) + catalogModelRepo := service.NewCatalogModelRepository(sharedDB, catalogModelTypeID) + + t.Run("TestSave", func(t *testing.T) { + // First create a catalog model for attribution + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-artifact"), + ExternalID: apiutils.Of("catalog-model-ext-123"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Test creating a new catalog model artifact + catalogModelArtifact := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("test-catalog-model-artifact"), + ExternalID: apiutils.Of("catalog-artifact-ext-123"), + URI: apiutils.Of("s3://catalog-bucket/model.pkl"), + }, + Properties: &[]dbmodels.Properties{ + { + Name: "description", + StringValue: apiutils.Of("Test catalog model artifact description"), + }, + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "custom-catalog-prop", + StringValue: apiutils.Of("custom-catalog-value"), + }, + }, + } + + saved, err := repo.Save(catalogModelArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved) + require.NotNil(t, saved.GetID()) + assert.Equal(t, "test-catalog-model-artifact", *saved.GetAttributes().Name) + assert.Equal(t, "catalog-artifact-ext-123", *saved.GetAttributes().ExternalID) + assert.Equal(t, "s3://catalog-bucket/model.pkl", *saved.GetAttributes().URI) + + // Test updating the same catalog model artifact + catalogModelArtifact.ID = saved.GetID() + catalogModelArtifact.GetAttributes().Name = apiutils.Of("updated-catalog-model-artifact") + catalogModelArtifact.GetAttributes().URI = apiutils.Of("s3://catalog-bucket/updated-model.pkl") + // Preserve CreateTimeSinceEpoch from the saved entity + catalogModelArtifact.GetAttributes().CreateTimeSinceEpoch = saved.GetAttributes().CreateTimeSinceEpoch + + updated, err := repo.Save(catalogModelArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, updated) + assert.Equal(t, *saved.GetID(), *updated.GetID()) + assert.Equal(t, "updated-catalog-model-artifact", *updated.GetAttributes().Name) + assert.Equal(t, "s3://catalog-bucket/updated-model.pkl", *updated.GetAttributes().URI) + }) + + t.Run("TestGetByID", func(t *testing.T) { + // First create a catalog model + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-getbyid"), + ExternalID: apiutils.Of("catalog-model-getbyid-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create a catalog model artifact to retrieve + catalogModelArtifact := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("get-test-catalog-model-artifact"), + ExternalID: apiutils.Of("get-catalog-artifact-ext-123"), + URI: apiutils.Of("s3://catalog-bucket/get-model.pkl"), + }, + } + + saved, err := repo.Save(catalogModelArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved.GetID()) + + // Test retrieving by ID + retrieved, err := repo.GetByID(*saved.GetID()) + require.NoError(t, err) + require.NotNil(t, retrieved) + assert.Equal(t, *saved.GetID(), *retrieved.GetID()) + assert.Equal(t, "get-test-catalog-model-artifact", *retrieved.GetAttributes().Name) + assert.Equal(t, "get-catalog-artifact-ext-123", *retrieved.GetAttributes().ExternalID) + assert.Equal(t, "s3://catalog-bucket/get-model.pkl", *retrieved.GetAttributes().URI) + + // Test retrieving non-existent ID + _, err = repo.GetByID(99999) + assert.ErrorIs(t, err, service.ErrCatalogModelArtifactNotFound) + }) + + t.Run("TestList", func(t *testing.T) { + // Create a catalog model for the artifacts + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-list"), + ExternalID: apiutils.Of("catalog-model-list-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create multiple catalog model artifacts for listing + testArtifacts := []*models.CatalogModelArtifactImpl{ + { + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("list-catalog-artifact-1"), + ExternalID: apiutils.Of("list-catalog-artifact-ext-1"), + URI: apiutils.Of("s3://catalog-bucket/list-model-1.pkl"), + }, + }, + { + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("list-catalog-artifact-2"), + ExternalID: apiutils.Of("list-catalog-artifact-ext-2"), + URI: apiutils.Of("s3://catalog-bucket/list-model-2.pkl"), + }, + }, + { + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("list-catalog-artifact-3"), + ExternalID: apiutils.Of("list-catalog-artifact-ext-3"), + URI: apiutils.Of("s3://catalog-bucket/list-model-3.pkl"), + }, + }, + } + + // Save all test artifacts + var savedArtifacts []models.CatalogModelArtifact + for _, artifact := range testArtifacts { + saved, err := repo.Save(artifact, savedCatalogModel.GetID()) + require.NoError(t, err) + savedArtifacts = append(savedArtifacts, saved) + } + + // Test listing all artifacts + listOptions := models.CatalogModelArtifactListOptions{} + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + assert.GreaterOrEqual(t, len(result.Items), 3) // At least our 3 test artifacts + + // Test filtering by name + nameFilter := "list-catalog-artifact-1" + listOptions = models.CatalogModelArtifactListOptions{ + Name: &nameFilter, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + if len(result.Items) > 0 { + assert.Equal(t, 1, len(result.Items)) + assert.Equal(t, "list-catalog-artifact-1", *result.Items[0].GetAttributes().Name) + } + + // Test filtering by external ID + externalIDFilter := "list-catalog-artifact-ext-2" + listOptions = models.CatalogModelArtifactListOptions{ + ExternalID: &externalIDFilter, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + if len(result.Items) > 0 { + assert.Equal(t, 1, len(result.Items)) + assert.Equal(t, "list-catalog-artifact-ext-2", *result.Items[0].GetAttributes().ExternalID) + } + + // Test filtering by parent resource ID (catalog model) + listOptions = models.CatalogModelArtifactListOptions{ + ParentResourceID: savedCatalogModel.GetID(), + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + assert.GreaterOrEqual(t, len(result.Items), 3) // Should find our 3 test artifacts + }) + + t.Run("TestListWithPropertiesAndCustomProperties", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-props"), + ExternalID: apiutils.Of("catalog-model-props-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create a catalog model artifact with both properties and custom properties + catalogModelArtifact := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("props-test-catalog-artifact"), + ExternalID: apiutils.Of("props-catalog-artifact-ext-123"), + URI: apiutils.Of("s3://catalog-bucket/props-model.pkl"), + }, + Properties: &[]dbmodels.Properties{ + { + Name: "version", + StringValue: apiutils.Of("1.0.0"), + }, + { + Name: "size_bytes", + IntValue: apiutils.Of(int32(2048000)), + }, + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "team", + StringValue: apiutils.Of("catalog-ml-team"), + }, + { + Name: "is_public", + BoolValue: apiutils.Of(true), + }, + }, + } + + saved, err := repo.Save(catalogModelArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved) + + // Retrieve and verify properties + retrieved, err := repo.GetByID(*saved.GetID()) + require.NoError(t, err) + require.NotNil(t, retrieved) + + // Check regular properties + require.NotNil(t, retrieved.GetProperties()) + assert.Len(t, *retrieved.GetProperties(), 2) + + // Check custom properties + require.NotNil(t, retrieved.GetCustomProperties()) + assert.Len(t, *retrieved.GetCustomProperties(), 2) + + // Verify specific properties exist + properties := *retrieved.GetProperties() + var foundVersion, foundSizeBytes bool + for _, prop := range properties { + switch prop.Name { + case "version": + foundVersion = true + assert.Equal(t, "1.0.0", *prop.StringValue) + case "size_bytes": + foundSizeBytes = true + assert.Equal(t, int32(2048000), *prop.IntValue) + } + } + assert.True(t, foundVersion, "Should find version property") + assert.True(t, foundSizeBytes, "Should find size_bytes property") + + // Verify custom properties + customProperties := *retrieved.GetCustomProperties() + var foundTeam, foundIsPublic bool + for _, prop := range customProperties { + switch prop.Name { + case "team": + foundTeam = true + assert.Equal(t, "catalog-ml-team", *prop.StringValue) + case "is_public": + foundIsPublic = true + assert.Equal(t, true, *prop.BoolValue) + } + } + assert.True(t, foundTeam, "Should find team custom property") + assert.True(t, foundIsPublic, "Should find is_public custom property") + }) + + t.Run("TestSaveWithoutParentResource", func(t *testing.T) { + // Test creating a catalog model artifact without parent resource attribution + catalogModelArtifact := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("standalone-catalog-artifact"), + ExternalID: apiutils.Of("standalone-catalog-artifact-ext"), + URI: apiutils.Of("s3://catalog-bucket/standalone-model.pkl"), + }, + Properties: &[]dbmodels.Properties{ + { + Name: "description", + StringValue: apiutils.Of("Standalone catalog artifact without parent"), + }, + }, + } + + saved, err := repo.Save(catalogModelArtifact, nil) + require.NoError(t, err) + require.NotNil(t, saved) + require.NotNil(t, saved.GetID()) + assert.Equal(t, "standalone-catalog-artifact", *saved.GetAttributes().Name) + assert.Equal(t, "s3://catalog-bucket/standalone-model.pkl", *saved.GetAttributes().URI) + + // Verify it can be retrieved + retrieved, err := repo.GetByID(*saved.GetID()) + require.NoError(t, err) + assert.Equal(t, "standalone-catalog-artifact", *retrieved.GetAttributes().Name) + }) + + t.Run("TestListOrdering", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-ordering"), + ExternalID: apiutils.Of("catalog-model-ordering-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create artifacts sequentially with time delays to ensure deterministic ordering + artifact1 := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("time-test-catalog-artifact-1"), + ExternalID: apiutils.Of("time-catalog-artifact-ext-1"), + URI: apiutils.Of("s3://catalog-bucket/time-model-1.pkl"), + }, + } + saved1, err := repo.Save(artifact1, savedCatalogModel.GetID()) + require.NoError(t, err) + + // Small delay to ensure different timestamps + time.Sleep(10 * time.Millisecond) + + artifact2 := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("time-test-catalog-artifact-2"), + ExternalID: apiutils.Of("time-catalog-artifact-ext-2"), + URI: apiutils.Of("s3://catalog-bucket/time-model-2.pkl"), + }, + } + saved2, err := repo.Save(artifact2, savedCatalogModel.GetID()) + require.NoError(t, err) + + // Test ordering by CREATE_TIME + listOptions := models.CatalogModelArtifactListOptions{ + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("CREATE_TIME"), + }, + } + + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Find our test artifacts in the results + var foundArtifact1, foundArtifact2 models.CatalogModelArtifact + var index1, index2 = -1, -1 + + for i, item := range result.Items { + if *item.GetID() == *saved1.GetID() { + foundArtifact1 = item + index1 = i + } + if *item.GetID() == *saved2.GetID() { + foundArtifact2 = item + index2 = i + } + } + + // Verify both artifacts were found and artifact1 comes before artifact2 (ascending order) + require.NotEqual(t, -1, index1, "Artifact 1 should be found in results") + require.NotEqual(t, -1, index2, "Artifact 2 should be found in results") + assert.Less(t, index1, index2, "Artifact 1 should come before Artifact 2 when ordered by CREATE_TIME") + assert.Less(t, *foundArtifact1.GetAttributes().CreateTimeSinceEpoch, *foundArtifact2.GetAttributes().CreateTimeSinceEpoch, "Artifact 1 should have earlier create time") + }) + + t.Run("TestListPagination", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-pagination"), + ExternalID: apiutils.Of("catalog-model-pagination-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create multiple artifacts for pagination testing + for i := 0; i < 5; i++ { + artifact := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of(fmt.Sprintf("pagination-artifact-%d", i)), + ExternalID: apiutils.Of(fmt.Sprintf("pagination-artifact-ext-%d", i)), + URI: apiutils.Of(fmt.Sprintf("s3://catalog-bucket/pagination-model-%d.pkl", i)), + }, + } + _, err := repo.Save(artifact, savedCatalogModel.GetID()) + require.NoError(t, err) + } + + // Test pagination with page size + pageSize := int32(2) + listOptions := models.CatalogModelArtifactListOptions{ + ParentResourceID: savedCatalogModel.GetID(), + Pagination: dbmodels.Pagination{ + PageSize: &pageSize, + OrderBy: apiutils.Of("ID"), + }, + } + + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + assert.LessOrEqual(t, len(result.Items), 2, "Should respect page size limit") + assert.GreaterOrEqual(t, len(result.Items), 1, "Should return at least one item") + }) + + t.Run("TestSaveWithTypeIDSetting", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-typeid"), + ExternalID: apiutils.Of("catalog-model-typeid-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Test creating artifact without explicit type_id (should be set automatically) + catalogModelArtifact := &models.CatalogModelArtifactImpl{ + // Intentionally not setting TypeID to test auto-setting + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("typeid-test-artifact"), + ExternalID: apiutils.Of("typeid-artifact-ext-123"), + URI: apiutils.Of("s3://catalog-bucket/typeid-model.pkl"), + }, + } + + saved, err := repo.Save(catalogModelArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved) + require.NotNil(t, saved.GetTypeID()) + assert.Equal(t, int32(typeID), *saved.GetTypeID()) + assert.Equal(t, "typeid-test-artifact", *saved.GetAttributes().Name) + + // Test with explicitly set type_id (should not be overridden) + explicitTypeID := int32(typeID) + catalogModelArtifact2 := &models.CatalogModelArtifactImpl{ + TypeID: &explicitTypeID, + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("explicit-typeid-artifact"), + ExternalID: apiutils.Of("explicit-typeid-ext-123"), + URI: apiutils.Of("s3://catalog-bucket/explicit-model.pkl"), + }, + } + + saved2, err := repo.Save(catalogModelArtifact2, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved2) + require.NotNil(t, saved2.GetTypeID()) + assert.Equal(t, explicitTypeID, *saved2.GetTypeID()) + }) + + t.Run("TestSaveWithNameMatching", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-name-matching"), + ExternalID: apiutils.Of("catalog-model-name-match-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create initial artifact + artifactName := "name-matching-artifact" + catalogModelArtifact1 := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of(artifactName), + ExternalID: apiutils.Of("name-match-ext-123"), + URI: apiutils.Of("s3://catalog-bucket/original.pkl"), + }, + Properties: &[]dbmodels.Properties{ + { + Name: "version", + StringValue: apiutils.Of("1.0.0"), + }, + }, + } + + saved1, err := repo.Save(catalogModelArtifact1, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved1) + originalID := *saved1.GetID() + assert.Equal(t, artifactName, *saved1.GetAttributes().Name) + assert.Equal(t, "s3://catalog-bucket/original.pkl", *saved1.GetAttributes().URI) + + // Create second artifact with same name (should update existing) + catalogModelArtifact2 := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of(artifactName), // Same name + ExternalID: apiutils.Of("name-match-ext-456"), + URI: apiutils.Of("s3://catalog-bucket/updated.pkl"), + }, + Properties: &[]dbmodels.Properties{ + { + Name: "version", + StringValue: apiutils.Of("2.0.0"), + }, + }, + } + + saved2, err := repo.Save(catalogModelArtifact2, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved2) + + // Should have same ID (updated existing) + assert.Equal(t, originalID, *saved2.GetID()) + assert.Equal(t, artifactName, *saved2.GetAttributes().Name) + assert.Equal(t, "s3://catalog-bucket/updated.pkl", *saved2.GetAttributes().URI) + assert.Equal(t, "name-match-ext-456", *saved2.GetAttributes().ExternalID) + + // Verify by retrieving from database + retrieved, err := repo.GetByID(originalID) + require.NoError(t, err) + assert.Equal(t, "s3://catalog-bucket/updated.pkl", *retrieved.GetAttributes().URI) + assert.Equal(t, "name-match-ext-456", *retrieved.GetAttributes().ExternalID) + + // Verify properties were updated + require.NotNil(t, retrieved.GetProperties()) + properties := *retrieved.GetProperties() + var foundVersion bool + for _, prop := range properties { + if prop.Name == "version" { + foundVersion = true + assert.Equal(t, "2.0.0", *prop.StringValue) + break + } + } + assert.True(t, foundVersion, "Should find updated version property") + + // Test that artifact with different name creates new entity + catalogModelArtifact3 := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("different-name-artifact"), + ExternalID: apiutils.Of("different-name-ext-789"), + URI: apiutils.Of("s3://catalog-bucket/different.pkl"), + }, + } + + saved3, err := repo.Save(catalogModelArtifact3, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved3) + + // Should have different ID (new entity) + assert.NotEqual(t, originalID, *saved3.GetID()) + assert.Equal(t, "different-name-artifact", *saved3.GetAttributes().Name) + }) + + t.Run("TestSaveWithNameMatchingNoExistingName", func(t *testing.T) { + // Create a catalog model + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model-for-no-match"), + ExternalID: apiutils.Of("catalog-model-no-match-ext"), + }, + } + savedCatalogModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Test saving artifact when no existing artifact with same name exists + catalogModelArtifact := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(typeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("unique-artifact-name"), + ExternalID: apiutils.Of("unique-ext-123"), + URI: apiutils.Of("s3://catalog-bucket/unique.pkl"), + }, + } + + saved, err := repo.Save(catalogModelArtifact, savedCatalogModel.GetID()) + require.NoError(t, err) + require.NotNil(t, saved) + require.NotNil(t, saved.GetID()) + assert.Equal(t, "unique-artifact-name", *saved.GetAttributes().Name) + assert.Equal(t, "s3://catalog-bucket/unique.pkl", *saved.GetAttributes().URI) + }) +} + +// Helper function to get or create CatalogModelArtifact type ID +func getCatalogModelArtifactTypeID(t *testing.T, db *gorm.DB) int32 { + var typeRecord schema.Type + err := db.Where("name = ?", service.CatalogModelArtifactTypeName).First(&typeRecord).Error + if err != nil { + require.NoError(t, err, "Failed to query CatalogModelArtifact type") + } + + return typeRecord.ID +} diff --git a/catalog/internal/db/service/catalog_model_test.go b/catalog/internal/db/service/catalog_model_test.go new file mode 100644 index 0000000000..80070f6dfd --- /dev/null +++ b/catalog/internal/db/service/catalog_model_test.go @@ -0,0 +1,982 @@ +package service_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/service" + "github.com/kubeflow/model-registry/internal/apiutils" + dbmodels "github.com/kubeflow/model-registry/internal/db/models" + "github.com/kubeflow/model-registry/internal/db/schema" + "github.com/kubeflow/model-registry/internal/testutils" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/gorm" +) + +func TestCatalogModelRepository(t *testing.T) { + sharedDB, cleanup := testutils.SetupPostgresWithMigrations(t, service.DatastoreSpec()) + defer cleanup() + + // Create or get the CatalogModel type ID + typeID := getCatalogModelTypeID(t, sharedDB) + repo := service.NewCatalogModelRepository(sharedDB, typeID) + + t.Run("TestSave", func(t *testing.T) { + // Test creating a new catalog model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-catalog-model"), + ExternalID: apiutils.Of("catalog-ext-123"), + }, + Properties: &[]dbmodels.Properties{ + { + Name: "description", + StringValue: apiutils.Of("Test catalog model description"), + }, + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "custom-prop", + StringValue: apiutils.Of("custom-value"), + }, + }, + } + + saved, err := repo.Save(catalogModel) + require.NoError(t, err) + require.NotNil(t, saved) + require.NotNil(t, saved.GetID()) + assert.Equal(t, "test-catalog-model", *saved.GetAttributes().Name) + assert.Equal(t, "catalog-ext-123", *saved.GetAttributes().ExternalID) + + // Test updating the same model + catalogModel.ID = saved.GetID() + catalogModel.GetAttributes().Name = apiutils.Of("updated-catalog-model") + // Preserve CreateTimeSinceEpoch from the saved entity + catalogModel.GetAttributes().CreateTimeSinceEpoch = saved.GetAttributes().CreateTimeSinceEpoch + + updated, err := repo.Save(catalogModel) + require.NoError(t, err) + require.NotNil(t, updated) + assert.Equal(t, *saved.GetID(), *updated.GetID()) + assert.Equal(t, "updated-catalog-model", *updated.GetAttributes().Name) + }) + + t.Run("TestGetByID", func(t *testing.T) { + // First create a model to retrieve + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("get-test-catalog-model"), + ExternalID: apiutils.Of("get-catalog-ext-123"), + }, + } + + saved, err := repo.Save(catalogModel) + require.NoError(t, err) + require.NotNil(t, saved.GetID()) + + // Test retrieving by ID + retrieved, err := repo.GetByID(*saved.GetID()) + require.NoError(t, err) + require.NotNil(t, retrieved) + assert.Equal(t, *saved.GetID(), *retrieved.GetID()) + assert.Equal(t, "get-test-catalog-model", *retrieved.GetAttributes().Name) + assert.Equal(t, "get-catalog-ext-123", *retrieved.GetAttributes().ExternalID) + + // Test retrieving non-existent ID + _, err = repo.GetByID(99999) + assert.ErrorIs(t, err, service.ErrCatalogModelNotFound) + }) + + t.Run("TestList", func(t *testing.T) { + // Create multiple models for listing + testModels := []*models.CatalogModelImpl{ + { + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("list-catalog-model-1"), + ExternalID: apiutils.Of("list-catalog-ext-1"), + }, + }, + { + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("list-catalog-model-2"), + ExternalID: apiutils.Of("list-catalog-ext-2"), + }, + }, + } + + // Save all test models + var savedModels []models.CatalogModel + for _, model := range testModels { + saved, err := repo.Save(model) + require.NoError(t, err) + savedModels = append(savedModels, saved) + } + + // Test listing all models + listOptions := models.CatalogModelListOptions{} + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + assert.GreaterOrEqual(t, len(result.Items), 2) // At least our 2 test models + + // Test filtering by name + nameFilter := "list-catalog-model-1" + listOptions = models.CatalogModelListOptions{ + Name: &nameFilter, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + assert.Equal(t, 1, len(result.Items)) + assert.Equal(t, "list-catalog-model-1", *result.Items[0].GetAttributes().Name) + + // Test filtering by external ID + externalIDFilter := "list-catalog-ext-2" + listOptions = models.CatalogModelListOptions{ + ExternalID: &externalIDFilter, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + assert.Equal(t, 1, len(result.Items)) + assert.Equal(t, "list-catalog-ext-2", *result.Items[0].GetAttributes().ExternalID) + }) + + t.Run("TestGetByName", func(t *testing.T) { + // First create a model to retrieve by name + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("get-by-name-test-model"), + ExternalID: apiutils.Of("get-by-name-ext-123"), + }, + } + + saved, err := repo.Save(catalogModel) + require.NoError(t, err) + require.NotNil(t, saved.GetID()) + + // Test retrieving by name + retrieved, err := repo.GetByName("get-by-name-test-model") + require.NoError(t, err) + require.NotNil(t, retrieved) + assert.Equal(t, *saved.GetID(), *retrieved.GetID()) + assert.Equal(t, "get-by-name-test-model", *retrieved.GetAttributes().Name) + assert.Equal(t, "get-by-name-ext-123", *retrieved.GetAttributes().ExternalID) + + // Test retrieving non-existent name + _, err = repo.GetByName("non-existent-model") + assert.ErrorIs(t, err, service.ErrCatalogModelNotFound) + }) + + t.Run("TestUpdateWithID", func(t *testing.T) { + // First create a model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("update-test-model"), + ExternalID: apiutils.Of("update-ext-123"), + }, + Properties: &[]dbmodels.Properties{ + { + Name: "version", + StringValue: apiutils.Of("1.0.0"), + }, + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "environment", + StringValue: apiutils.Of("dev"), + }, + }, + } + + saved, err := repo.Save(catalogModel) + require.NoError(t, err) + require.NotNil(t, saved.GetID()) + + // Update the model with ID specified + updateModel := &models.CatalogModelImpl{ + ID: saved.GetID(), // Specify the ID for update + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("updated-test-model"), + ExternalID: apiutils.Of("updated-ext-456"), + CreateTimeSinceEpoch: saved.GetAttributes().CreateTimeSinceEpoch, // Preserve create time + }, + Properties: &[]dbmodels.Properties{ + { + Name: "version", + StringValue: apiutils.Of("2.0.0"), // Updated version + }, + { + Name: "description", + StringValue: apiutils.Of("Updated description"), // New property + }, + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "environment", + StringValue: apiutils.Of("prod"), // Updated environment + }, + }, + } + + updated, err := repo.Save(updateModel) + require.NoError(t, err) + require.NotNil(t, updated) + + // Verify the update + assert.Equal(t, *saved.GetID(), *updated.GetID()) // Same ID + assert.Equal(t, "updated-test-model", *updated.GetAttributes().Name) + assert.Equal(t, "updated-ext-456", *updated.GetAttributes().ExternalID) + + // Verify properties were updated + require.NotNil(t, updated.GetProperties()) + assert.Len(t, *updated.GetProperties(), 2) + + // Verify custom properties were updated + require.NotNil(t, updated.GetCustomProperties()) + assert.Len(t, *updated.GetCustomProperties(), 1) + }) + + t.Run("TestUpdateWithName", func(t *testing.T) { + // First create a model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("update-by-name-model"), + ExternalID: apiutils.Of("update-by-name-ext-123"), + }, + Properties: &[]dbmodels.Properties{ + { + Name: "status", + StringValue: apiutils.Of("draft"), + }, + }, + } + + saved, err := repo.Save(catalogModel) + require.NoError(t, err) + require.NotNil(t, saved.GetID()) + + // Update the model without specifying ID (should lookup by name) + updateModel := &models.CatalogModelImpl{ + // No ID specified - should trigger name lookup in Save method + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("update-by-name-model"), // Same name to trigger lookup + ExternalID: apiutils.Of("updated-by-name-ext-789"), + }, + Properties: &[]dbmodels.Properties{ + { + Name: "status", + StringValue: apiutils.Of("published"), // Updated status + }, + { + Name: "category", + StringValue: apiutils.Of("ml-model"), // New property + }, + }, + } + + updated, err := repo.Save(updateModel) + require.NoError(t, err) + require.NotNil(t, updated) + + // Verify the update happened (same ID, updated fields) + assert.Equal(t, *saved.GetID(), *updated.GetID()) // Should have same ID from lookup + assert.Equal(t, "update-by-name-model", *updated.GetAttributes().Name) + assert.Equal(t, "updated-by-name-ext-789", *updated.GetAttributes().ExternalID) + + // Verify properties were updated + require.NotNil(t, updated.GetProperties()) + assert.Len(t, *updated.GetProperties(), 2) + }) + + t.Run("TestListWithPropertiesAndCustomProperties", func(t *testing.T) { + // Create a model with both properties and custom properties + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("props-test-catalog-model"), + ExternalID: apiutils.Of("props-catalog-ext-123"), + }, + Properties: &[]dbmodels.Properties{ + { + Name: "version", + StringValue: apiutils.Of("1.0.0"), + }, + { + Name: "priority", + IntValue: apiutils.Of(int32(5)), + }, + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "team", + StringValue: apiutils.Of("ml-team"), + }, + { + Name: "active", + BoolValue: apiutils.Of(true), + }, + }, + } + + saved, err := repo.Save(catalogModel) + require.NoError(t, err) + require.NotNil(t, saved) + + // Retrieve and verify properties + retrieved, err := repo.GetByID(*saved.GetID()) + require.NoError(t, err) + require.NotNil(t, retrieved) + + // Check regular properties + require.NotNil(t, retrieved.GetProperties()) + assert.Len(t, *retrieved.GetProperties(), 2) + + // Check custom properties + require.NotNil(t, retrieved.GetCustomProperties()) + assert.Len(t, *retrieved.GetCustomProperties(), 2) + }) + + t.Run("TestAccuracySorting", func(t *testing.T) { + // Get the CatalogMetricsArtifact type ID for creating accuracy metrics + metricsTypeID := getCatalogMetricsArtifactTypeID(t, sharedDB) + metricsRepo := service.NewCatalogMetricsArtifactRepository(sharedDB, metricsTypeID) + + // Create test models with different accuracy scores + testModels := []struct { + name string + accuracy *float64 // nil means no accuracy score + }{ + {"high-accuracy-model", apiutils.Of(95.5)}, + {"medium-accuracy-model", apiutils.Of(75.0)}, + {"low-accuracy-model", apiutils.Of(45.2)}, + {"no-accuracy-model", nil}, + {"zero-accuracy-model", apiutils.Of(0.0)}, + } + + var savedModels []models.CatalogModel + for _, testModel := range testModels { + // Create the model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of(testModel.name), + ExternalID: apiutils.Of(testModel.name + "-ext"), + }, + } + + savedModel, err := repo.Save(catalogModel) + require.NoError(t, err) + savedModels = append(savedModels, savedModel) + + // Create accuracy metrics artifact if accuracy score is provided + if testModel.accuracy != nil { + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(fmt.Sprintf("accuracy-metrics-%s", testModel.name)), + ExternalID: apiutils.Of(fmt.Sprintf("accuracy-metrics-%s", testModel.name)), + MetricsType: models.MetricsTypeAccuracy, + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "overall_average", + DoubleValue: testModel.accuracy, + }, + { + Name: "benchmark1", + DoubleValue: apiutils.Of(*testModel.accuracy + 1.0), // Individual benchmark score + }, + { + Name: "benchmark2", + DoubleValue: apiutils.Of(*testModel.accuracy - 1.0), // Individual benchmark score + }, + }, + } + + _, err := metricsRepo.Save(metricsArtifact, savedModel.GetID()) + require.NoError(t, err) + } + } + + // Test ACCURACY sorting DESC (default) + listOptions := models.CatalogModelListOptions{ + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("artifacts.overall_average.double_value"), + SortOrder: apiutils.Of("DESC"), + }, + } + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Verify DESC order: high accuracy first, then medium, low, zero, then models without accuracy scores + var accuracyModelsFound []string + for _, model := range result.Items { + name := *model.GetAttributes().Name + if name == "high-accuracy-model" || name == "medium-accuracy-model" || + name == "low-accuracy-model" || name == "zero-accuracy-model" || name == "no-accuracy-model" { + accuracyModelsFound = append(accuracyModelsFound, name) + } + } + + // We should have found all our test models + require.GreaterOrEqual(t, len(accuracyModelsFound), 5) + + // Check that high accuracy comes before medium accuracy + highIdx := findIndex(accuracyModelsFound, "high-accuracy-model") + mediumIdx := findIndex(accuracyModelsFound, "medium-accuracy-model") + lowIdx := findIndex(accuracyModelsFound, "low-accuracy-model") + zeroIdx := findIndex(accuracyModelsFound, "zero-accuracy-model") + noAccIdx := findIndex(accuracyModelsFound, "no-accuracy-model") + + require.NotEqual(t, -1, highIdx, "high-accuracy-model not found in results") + require.NotEqual(t, -1, mediumIdx, "medium-accuracy-model not found in results") + require.NotEqual(t, -1, lowIdx, "low-accuracy-model not found in results") + require.NotEqual(t, -1, zeroIdx, "zero-accuracy-model not found in results") + require.NotEqual(t, -1, noAccIdx, "no-accuracy-model not found in results") + + // Verify DESC ordering: high > medium > low > zero > no-accuracy + assert.Less(t, highIdx, mediumIdx, "high accuracy model should come before medium accuracy") + assert.Less(t, mediumIdx, lowIdx, "medium accuracy model should come before low accuracy") + assert.Less(t, lowIdx, zeroIdx, "low accuracy model should come before zero accuracy") + assert.Less(t, zeroIdx, noAccIdx, "zero accuracy model should come before no accuracy") + + // Test ACCURACY sorting ASC + listOptions = models.CatalogModelListOptions{ + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("artifacts.overall_average.double_value"), + SortOrder: apiutils.Of("ASC"), + }, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Find our test models in ASC results + accuracyModelsFound = []string{} + for _, model := range result.Items { + name := *model.GetAttributes().Name + if name == "high-accuracy-model" || name == "medium-accuracy-model" || + name == "low-accuracy-model" || name == "zero-accuracy-model" || name == "no-accuracy-model" { + accuracyModelsFound = append(accuracyModelsFound, name) + } + } + + // Get indices for ASC order + highIdxAsc := findIndex(accuracyModelsFound, "high-accuracy-model") + mediumIdxAsc := findIndex(accuracyModelsFound, "medium-accuracy-model") + lowIdxAsc := findIndex(accuracyModelsFound, "low-accuracy-model") + zeroIdxAsc := findIndex(accuracyModelsFound, "zero-accuracy-model") + noAccIdxAsc := findIndex(accuracyModelsFound, "no-accuracy-model") + + // Verify ASC ordering: zero < low < medium < high, with no-accuracy still last + assert.Less(t, zeroIdxAsc, lowIdxAsc, "zero accuracy model should come before low accuracy in ASC") + assert.Less(t, lowIdxAsc, mediumIdxAsc, "low accuracy model should come before medium accuracy in ASC") + assert.Less(t, mediumIdxAsc, highIdxAsc, "medium accuracy model should come before high accuracy in ASC") + assert.Less(t, highIdxAsc, noAccIdxAsc, "models with accuracy should come before models without accuracy in ASC") + + // Test fallback to standard sorting for non-ACCURACY orderBy + listOptions = models.CatalogModelListOptions{ + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("ID"), + SortOrder: apiutils.Of("ASC"), + }, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + // Should not error and should return results (detailed verification not needed since we're testing fallback) + assert.Greater(t, len(result.Items), 0) + }) + + t.Run("TestAccuracySortingPagination", func(t *testing.T) { + // Get the CatalogMetricsArtifact type ID for creating accuracy metrics + metricsTypeID := getCatalogMetricsArtifactTypeID(t, sharedDB) + metricsRepo := service.NewCatalogMetricsArtifactRepository(sharedDB, metricsTypeID) + + // Create 5 test models with accuracy scores for pagination testing + // Use unique names to avoid interference with other tests + testModels := []struct { + name string + accuracy float64 + }{ + {"pagination-test-model-a", 95.0}, // Should be first in DESC order + {"pagination-test-model-b", 85.0}, + {"pagination-test-model-c", 75.0}, + {"pagination-test-model-d", 65.0}, + {"pagination-test-model-e", 55.0}, // Should be last in DESC order + } + + var savedModels []models.CatalogModel + for _, testModel := range testModels { + // Create the model + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of(testModel.name), + ExternalID: apiutils.Of(testModel.name + "-ext"), + }, + } + + savedModel, err := repo.Save(catalogModel) + require.NoError(t, err) + savedModels = append(savedModels, savedModel) + + // Create accuracy metrics artifact + metricsArtifact := &models.CatalogMetricsArtifactImpl{ + Attributes: &models.CatalogMetricsArtifactAttributes{ + Name: apiutils.Of(fmt.Sprintf("accuracy-metrics-%s", testModel.name)), + ExternalID: apiutils.Of(fmt.Sprintf("accuracy-metrics-%s", testModel.name)), + MetricsType: models.MetricsTypeAccuracy, + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "overall_average", + DoubleValue: &testModel.accuracy, + }, + }, + } + + _, err = metricsRepo.Save(metricsArtifact, savedModel.GetID()) + require.NoError(t, err) + } + + // Test pagination by collecting all pages + // This approach is more robust and less sensitive to test interference + listOptions := models.CatalogModelListOptions{ + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("artifacts.overall_average.double_value"), + SortOrder: apiutils.Of("DESC"), + PageSize: apiutils.Of(int32(2)), + }, + } + + // Collect all our test models across pages + var allPaginatedModels []models.CatalogModel + var pageCount int + currentToken := (*string)(nil) + + for { + pageCount++ + if currentToken != nil { + listOptions.Pagination.NextPageToken = currentToken + } + + page, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, page) + assert.Equal(t, int32(2), page.PageSize) + + // Filter to only include our test models + for _, model := range page.Items { + if strings.HasPrefix(*model.GetAttributes().Name, "pagination-test-model-") { + allPaginatedModels = append(allPaginatedModels, model) + } + } + + // Stop if no more pages or we've collected all our test models + if page.NextPageToken == "" || len(allPaginatedModels) >= 5 { + if page.NextPageToken == "" { + t.Logf("Pagination completed in %d pages", pageCount) + } + break + } + currentToken = &page.NextPageToken + + // Safety check to prevent infinite loop + if pageCount > 10 { + t.Fatal("Too many pages, might be an infinite loop") + } + } + + // Verify we collected all our test models + assert.GreaterOrEqual(t, len(allPaginatedModels), 5, "Should have found all pagination test models") + + // Extract names and verify ordering (DESC by accuracy) + var modelNames []string + for _, model := range allPaginatedModels { + if strings.HasPrefix(*model.GetAttributes().Name, "pagination-test-model-") { + modelNames = append(modelNames, *model.GetAttributes().Name) + } + } + + // Check that pagination preserved the correct ordering + // In DESC order: a(95.0) -> b(85.0) -> c(75.0) -> d(65.0) -> e(55.0) + expectedOrder := []string{ + "pagination-test-model-a", // 95.0 (highest) + "pagination-test-model-b", // 85.0 + "pagination-test-model-c", // 75.0 + "pagination-test-model-d", // 65.0 + "pagination-test-model-e", // 55.0 (lowest) + } + + // Verify our test models appear in correct order (allowing for other models in between) + lastIndex := -1 + for _, expectedModel := range expectedOrder { + foundIndex := -1 + for i, actualModel := range modelNames { + if actualModel == expectedModel { + foundIndex = i + break + } + } + assert.NotEqual(t, -1, foundIndex, "Should find model %s", expectedModel) + if foundIndex != -1 { + assert.Greater(t, foundIndex, lastIndex, "Model %s should appear after previous models in DESC order", expectedModel) + lastIndex = foundIndex + } + } + + // Test ASC pagination briefly to verify token generation works in both directions + listOptions = models.CatalogModelListOptions{ + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("artifacts.overall_average.double_value"), + SortOrder: apiutils.Of("ASC"), + PageSize: apiutils.Of(int32(3)), + }, + } + + pageAsc, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, pageAsc) + + // Just verify that ASC pagination works and generates tokens when there are more results + if len(pageAsc.Items) == 3 { + assert.NotEmpty(t, pageAsc.NextPageToken, "Should have next page token in ASC order when page is full") + } + }) + + t.Run("TestNameOrdering", func(t *testing.T) { + // Create test models with specific names for ordering + testModels := []string{ + "zebra-model", + "alpha-model", + "beta-model", + "gamma-model", + "delta-model", + } + + var savedModels []models.CatalogModel + for _, name := range testModels { + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of(name), + ExternalID: apiutils.Of(name + "-ext"), + }, + } + + savedModel, err := repo.Save(catalogModel) + require.NoError(t, err) + savedModels = append(savedModels, savedModel) + } + + // Test NAME ordering ASC + listOptions := models.CatalogModelListOptions{ + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("NAME"), + SortOrder: apiutils.Of("ASC"), + }, + } + result, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract our test model names from results + var foundNames []string + for _, model := range result.Items { + name := *model.GetAttributes().Name + if name == "zebra-model" || name == "alpha-model" || name == "beta-model" || + name == "gamma-model" || name == "delta-model" { + foundNames = append(foundNames, name) + } + } + + // Verify we found all our test models + require.GreaterOrEqual(t, len(foundNames), 5, "Should find all test models") + + // Verify ASC ordering: alpha < beta < delta < gamma < zebra + alphaIdx := findIndex(foundNames, "alpha-model") + betaIdx := findIndex(foundNames, "beta-model") + deltaIdx := findIndex(foundNames, "delta-model") + gammaIdx := findIndex(foundNames, "gamma-model") + zebraIdx := findIndex(foundNames, "zebra-model") + + require.NotEqual(t, -1, alphaIdx, "alpha-model not found") + require.NotEqual(t, -1, betaIdx, "beta-model not found") + require.NotEqual(t, -1, deltaIdx, "delta-model not found") + require.NotEqual(t, -1, gammaIdx, "gamma-model not found") + require.NotEqual(t, -1, zebraIdx, "zebra-model not found") + + assert.Less(t, alphaIdx, betaIdx, "alpha should come before beta in ASC") + assert.Less(t, betaIdx, deltaIdx, "beta should come before delta in ASC") + assert.Less(t, deltaIdx, gammaIdx, "delta should come before gamma in ASC") + assert.Less(t, gammaIdx, zebraIdx, "gamma should come before zebra in ASC") + + // Test NAME ordering DESC + listOptions = models.CatalogModelListOptions{ + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("NAME"), + SortOrder: apiutils.Of("DESC"), + }, + } + result, err = repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, result) + + // Extract our test model names from DESC results + foundNames = []string{} + for _, model := range result.Items { + name := *model.GetAttributes().Name + if name == "zebra-model" || name == "alpha-model" || name == "beta-model" || + name == "gamma-model" || name == "delta-model" { + foundNames = append(foundNames, name) + } + } + + // Verify DESC ordering: zebra > gamma > delta > beta > alpha + alphaIdxDesc := findIndex(foundNames, "alpha-model") + betaIdxDesc := findIndex(foundNames, "beta-model") + deltaIdxDesc := findIndex(foundNames, "delta-model") + gammaIdxDesc := findIndex(foundNames, "gamma-model") + zebraIdxDesc := findIndex(foundNames, "zebra-model") + + assert.Less(t, zebraIdxDesc, gammaIdxDesc, "zebra should come before gamma in DESC") + assert.Less(t, gammaIdxDesc, deltaIdxDesc, "gamma should come before delta in DESC") + assert.Less(t, deltaIdxDesc, betaIdxDesc, "delta should come before beta in DESC") + assert.Less(t, betaIdxDesc, alphaIdxDesc, "beta should come before alpha in DESC") + }) + + t.Run("TestNameOrderingPagination", func(t *testing.T) { + // Create models with sequential names for pagination testing + testModels := []string{ + "page-test-model-01", + "page-test-model-02", + "page-test-model-03", + "page-test-model-04", + "page-test-model-05", + } + + for _, name := range testModels { + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of(name), + ExternalID: apiutils.Of(name + "-ext"), + }, + } + + _, err := repo.Save(catalogModel) + require.NoError(t, err) + } + + // Test pagination with NAME ordering + listOptions := models.CatalogModelListOptions{ + Pagination: dbmodels.Pagination{ + OrderBy: apiutils.Of("NAME"), + SortOrder: apiutils.Of("ASC"), + PageSize: apiutils.Of(int32(2)), + }, + } + + // Collect all our test models across pages + var allPaginatedModels []string + var pageCount int + currentToken := (*string)(nil) + + for { + pageCount++ + if currentToken != nil { + listOptions.Pagination.NextPageToken = currentToken + } + + page, err := repo.List(listOptions) + require.NoError(t, err) + require.NotNil(t, page) + assert.Equal(t, int32(2), page.PageSize) + + // Filter to only include our test models + for _, model := range page.Items { + name := *model.GetAttributes().Name + if strings.HasPrefix(name, "page-test-model-") { + allPaginatedModels = append(allPaginatedModels, name) + } + } + + // Stop if no more pages or we've collected all our test models + if page.NextPageToken == "" || len(allPaginatedModels) >= 5 { + if page.NextPageToken == "" { + t.Logf("NAME pagination completed in %d pages", pageCount) + } + break + } + currentToken = &page.NextPageToken + + // Safety check to prevent infinite loop + if pageCount > 10 { + t.Fatal("Too many pages, might be an infinite loop") + } + } + + // Verify we collected all our test models + assert.GreaterOrEqual(t, len(allPaginatedModels), 5, "Should have found all page-test models") + + // Verify ordering is maintained across pages + expectedOrder := []string{ + "page-test-model-01", + "page-test-model-02", + "page-test-model-03", + "page-test-model-04", + "page-test-model-05", + } + + // Verify our test models appear in correct order + lastIndex := -1 + for _, expectedModel := range expectedOrder { + foundIndex := findIndex(allPaginatedModels, expectedModel) + assert.NotEqual(t, -1, foundIndex, "Should find model %s", expectedModel) + if foundIndex != -1 { + assert.Greater(t, foundIndex, lastIndex, "Model %s should appear after previous models", expectedModel) + lastIndex = foundIndex + } + } + }) +} + +// Helper function to get or create CatalogModel type ID +func getCatalogModelTypeID(t *testing.T, db *gorm.DB) int32 { + var typeRecord schema.Type + err := db.Where("name = ?", service.CatalogModelTypeName).First(&typeRecord).Error + if err != nil { + require.NoError(t, err, "Failed to query CatalogModel type") + } + + return typeRecord.ID +} + +// Helper function to find index of string in slice +func findIndex(slice []string, target string) int { + for i, item := range slice { + if item == target { + return i + } + } + return -1 +} + +func TestCatalogModelRepository_TimestampPreservation(t *testing.T) { + sharedDB, cleanup := testutils.SetupPostgresWithMigrations(t, service.DatastoreSpec()) + defer cleanup() + + typeID := getCatalogModelTypeID(t, sharedDB) + repo := service.NewCatalogModelRepository(sharedDB, typeID) + + t.Run("Preserve historical timestamps from YAML catalog", func(t *testing.T) { + // Simulate loading a model from YAML with historical timestamps + historicalCreateTime := int64(1739776988000) // From YAML example + historicalUpdateTime := int64(1746720264000) // From YAML example + + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("yaml-loaded-model"), + ExternalID: apiutils.Of("yaml-model-123"), + CreateTimeSinceEpoch: &historicalCreateTime, + LastUpdateTimeSinceEpoch: &historicalUpdateTime, + }, + Properties: &[]dbmodels.Properties{ + { + Name: "description", + StringValue: apiutils.Of("Model loaded from YAML"), + }, + }, + } + + // Save the model - timestamps should be preserved + saved, err := repo.Save(catalogModel) + require.NoError(t, err) + require.NotNil(t, saved) + require.NotNil(t, saved.GetID()) + + // Verify historical timestamps were preserved + savedAttrs := saved.GetAttributes() + require.NotNil(t, savedAttrs.CreateTimeSinceEpoch) + require.NotNil(t, savedAttrs.LastUpdateTimeSinceEpoch) + assert.Equal(t, historicalCreateTime, *savedAttrs.CreateTimeSinceEpoch, + "CreateTimeSinceEpoch should be preserved from YAML") + assert.Equal(t, historicalUpdateTime, *savedAttrs.LastUpdateTimeSinceEpoch, + "LastUpdateTimeSinceEpoch should be preserved from YAML") + + // Reload from database to verify persistence + retrieved, err := repo.GetByID(*saved.GetID()) + require.NoError(t, err) + retrievedAttrs := retrieved.GetAttributes() + assert.Equal(t, historicalCreateTime, *retrievedAttrs.CreateTimeSinceEpoch, + "CreateTimeSinceEpoch should persist in database") + assert.Equal(t, historicalUpdateTime, *retrievedAttrs.LastUpdateTimeSinceEpoch, + "LastUpdateTimeSinceEpoch should persist in database") + }) + + t.Run("Auto-generate timestamps for API-created models", func(t *testing.T) { + // Model created via API without timestamps + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("api-created-model"), + ExternalID: apiutils.Of("api-model-456"), + // No timestamps set - should be auto-generated + }, + Properties: &[]dbmodels.Properties{ + { + Name: "description", + StringValue: apiutils.Of("Model created via API"), + }, + }, + } + + // Save the model - timestamps should be auto-generated + saved, err := repo.Save(catalogModel) + require.NoError(t, err) + require.NotNil(t, saved) + + // Verify timestamps were auto-generated (non-zero) + savedAttrs := saved.GetAttributes() + require.NotNil(t, savedAttrs.CreateTimeSinceEpoch) + require.NotNil(t, savedAttrs.LastUpdateTimeSinceEpoch) + assert.Greater(t, *savedAttrs.CreateTimeSinceEpoch, int64(0), + "CreateTimeSinceEpoch should be auto-generated") + assert.Greater(t, *savedAttrs.LastUpdateTimeSinceEpoch, int64(0), + "LastUpdateTimeSinceEpoch should be auto-generated") + }) + + t.Run("Update existing model from YAML preserves CreateTime", func(t *testing.T) { + // First save: Create model with historical timestamps + historicalCreateTime := int64(1739776988000) + firstUpdateTime := int64(1746720264000) + + catalogModel := &models.CatalogModelImpl{ + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("yaml-updated-model"), + ExternalID: apiutils.Of("yaml-updated-123"), + CreateTimeSinceEpoch: &historicalCreateTime, + LastUpdateTimeSinceEpoch: &firstUpdateTime, + }, + } + + saved, err := repo.Save(catalogModel) + require.NoError(t, err) + savedID := saved.GetID() + + // Second save: Update the model with new LastUpdateTime (simulating catalog reload) + newerUpdateTime := int64(1750000000000) + catalogModel.ID = savedID + catalogModel.GetAttributes().LastUpdateTimeSinceEpoch = &newerUpdateTime + + updated, err := repo.Save(catalogModel) + require.NoError(t, err) + + // Verify CreateTime is preserved but LastUpdateTime is updated + updatedAttrs := updated.GetAttributes() + assert.Equal(t, historicalCreateTime, *updatedAttrs.CreateTimeSinceEpoch, + "CreateTimeSinceEpoch should be preserved on update") + assert.Equal(t, newerUpdateTime, *updatedAttrs.LastUpdateTimeSinceEpoch, + "LastUpdateTimeSinceEpoch should be updated") + }) +} diff --git a/catalog/internal/db/service/property_options.go b/catalog/internal/db/service/property_options.go new file mode 100644 index 0000000000..9b0e98c037 --- /dev/null +++ b/catalog/internal/db/service/property_options.go @@ -0,0 +1,244 @@ +package service + +import ( + "fmt" + + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/schema" + "github.com/lib/pq" + "gorm.io/gorm" +) + +var _ models.PropertyOptionsRepository = (*PropertyOptionsRepositoryImpl)(nil) + +type PropertyOptionsRepositoryImpl struct { + db *gorm.DB +} + +func NewPropertyOptionsRepository(db *gorm.DB) models.PropertyOptionsRepository { + return &PropertyOptionsRepositoryImpl{ + db: db, + } +} + +func (r *PropertyOptionsRepositoryImpl) Refresh(t models.PropertyOptionType) error { + if r.db.Name() != "postgres" { + return nil + } + + var viewName string + switch t { + case models.ContextPropertyOptionType: + viewName = schema.TableNameContextPropertyOption + case models.ArtifactPropertyOptionType: + viewName = schema.TableNameArtifactPropertyOption + default: + return fmt.Errorf("invalid property option type: %d", t) + } + + sql := fmt.Sprintf("REFRESH MATERIALIZED VIEW %s", viewName) + if err := r.db.Exec(sql).Error; err != nil { + return fmt.Errorf("error refreshing materialized view %s: %w", viewName, err) + } + + return nil +} + +func (r *PropertyOptionsRepositoryImpl) List(t models.PropertyOptionType, typeID int32) ([]models.PropertyOption, error) { + if r.db.Name() != "postgres" { + return []models.PropertyOption{}, nil + } + + switch t { + case models.ContextPropertyOptionType: + return r.listContextPropertyOptions(typeID) + case models.ArtifactPropertyOptionType: + return r.listArtifactPropertyOptions(typeID) + default: + return nil, fmt.Errorf("invalid property option type: %d", t) + } +} + +func (r *PropertyOptionsRepositoryImpl) listContextPropertyOptions(typeID int32) ([]models.PropertyOption, error) { + q := r.db + if typeID > 0 { + q = q.Where("type_id = ?", typeID) + } + q = q.Order("name") + + var contextOptions []schema.ContextPropertyOption + if err := q.Find(&contextOptions).Error; err != nil { + return nil, fmt.Errorf("error querying context property options: %w", err) + } + + return convertSchemaToPropertyOptions(contextOptions), nil +} + +func (r *PropertyOptionsRepositoryImpl) listArtifactPropertyOptions(typeID int32) ([]models.PropertyOption, error) { + q := r.db + if typeID > 0 { + q = q.Where("type_id = ?", typeID) + } + q = q.Order("name") + + var artifactOptions []schema.ArtifactPropertyOption + if err := q.Find(&artifactOptions).Error; err != nil { + return nil, fmt.Errorf("error querying artifact property options: %w", err) + } + + return convertSchemaToPropertyOptions(artifactOptions), nil +} + +// Helper function to convert schema types to models.PropertyOption +// This works for both ContextPropertyOption and ArtifactPropertyOption since they have identical structure +func convertSchemaToPropertyOptions[T interface { + schema.ContextPropertyOption | schema.ArtifactPropertyOption +}](options []T) []models.PropertyOption { + result := make([]models.PropertyOption, len(options)) + + for i, option := range options { + var stringValue, arrayValue []string + + // Convert pq.StringArray to []string for StringValue + if stringVal := getStringValue(option); stringVal != nil { + stringValue = []string(*stringVal) + } + + // Convert pq.StringArray to []string for ArrayValue + if arrVal := getArrayValue(option); arrVal != nil { + arrayValue = []string(*arrVal) + } + + result[i] = models.PropertyOption{ + TypeID: getTypeID(option), + Name: getName(option), + IsCustomProperty: getIsCustomProperty(option), + StringValue: stringValue, + ArrayValue: arrayValue, + MinDoubleValue: getMinDoubleValue(option), + MaxDoubleValue: getMaxDoubleValue(option), + MinIntValue: getMinIntValue(option), + MaxIntValue: getMaxIntValue(option), + } + } + + return result +} + +// Helper functions to extract fields from schema types +func getStringValue[T interface { + schema.ContextPropertyOption | schema.ArtifactPropertyOption +}](option T) *pq.StringArray { + switch v := any(option).(type) { + case schema.ContextPropertyOption: + return v.StringValue + case schema.ArtifactPropertyOption: + return v.StringValue + default: + return nil + } +} + +func getArrayValue[T interface { + schema.ContextPropertyOption | schema.ArtifactPropertyOption +}](option T) *pq.StringArray { + switch v := any(option).(type) { + case schema.ContextPropertyOption: + return v.ArrayValue + case schema.ArtifactPropertyOption: + return v.ArrayValue + default: + return nil + } +} + +func getTypeID[T interface { + schema.ContextPropertyOption | schema.ArtifactPropertyOption +}](option T) int32 { + switch v := any(option).(type) { + case schema.ContextPropertyOption: + return v.TypeID + case schema.ArtifactPropertyOption: + return v.TypeID + default: + return 0 + } +} + +func getName[T interface { + schema.ContextPropertyOption | schema.ArtifactPropertyOption +}](option T) string { + switch v := any(option).(type) { + case schema.ContextPropertyOption: + return v.Name + case schema.ArtifactPropertyOption: + return v.Name + default: + return "" + } +} + +func getIsCustomProperty[T interface { + schema.ContextPropertyOption | schema.ArtifactPropertyOption +}](option T) bool { + switch v := any(option).(type) { + case schema.ContextPropertyOption: + return v.IsCustomProperty + case schema.ArtifactPropertyOption: + return v.IsCustomProperty + default: + return false + } +} + +func getMinDoubleValue[T interface { + schema.ContextPropertyOption | schema.ArtifactPropertyOption +}](option T) *float64 { + switch v := any(option).(type) { + case schema.ContextPropertyOption: + return v.MinDoubleValue + case schema.ArtifactPropertyOption: + return v.MinDoubleValue + default: + return nil + } +} + +func getMaxDoubleValue[T interface { + schema.ContextPropertyOption | schema.ArtifactPropertyOption +}](option T) *float64 { + switch v := any(option).(type) { + case schema.ContextPropertyOption: + return v.MaxDoubleValue + case schema.ArtifactPropertyOption: + return v.MaxDoubleValue + default: + return nil + } +} + +func getMinIntValue[T interface { + schema.ContextPropertyOption | schema.ArtifactPropertyOption +}](option T) *int64 { + switch v := any(option).(type) { + case schema.ContextPropertyOption: + return v.MinIntValue + case schema.ArtifactPropertyOption: + return v.MinIntValue + default: + return nil + } +} + +func getMaxIntValue[T interface { + schema.ContextPropertyOption | schema.ArtifactPropertyOption +}](option T) *int64 { + switch v := any(option).(type) { + case schema.ContextPropertyOption: + return v.MaxIntValue + case schema.ArtifactPropertyOption: + return v.MaxIntValue + default: + return nil + } +} diff --git a/catalog/internal/db/service/property_options_test.go b/catalog/internal/db/service/property_options_test.go new file mode 100644 index 0000000000..a225ae4f90 --- /dev/null +++ b/catalog/internal/db/service/property_options_test.go @@ -0,0 +1,229 @@ +package service_test + +import ( + "testing" + + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/catalog/internal/db/service" + "github.com/kubeflow/model-registry/internal/apiutils" + dbmodels "github.com/kubeflow/model-registry/internal/db/models" + "github.com/kubeflow/model-registry/internal/testutils" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestPropertyOptionsRepository(t *testing.T) { + sharedDB, cleanup := testutils.SetupPostgresWithMigrations(t, service.DatastoreSpec()) + defer cleanup() + + repo := service.NewPropertyOptionsRepository(sharedDB) + + // Get necessary type IDs for creating test data + catalogModelTypeID := getCatalogModelTypeID(t, sharedDB) + modelArtifactTypeID := getCatalogModelArtifactTypeID(t, sharedDB) + + // Create test repositories for setting up data + catalogModelRepo := service.NewCatalogModelRepository(sharedDB, catalogModelTypeID) + artifactRepo := service.NewCatalogModelArtifactRepository(sharedDB, modelArtifactTypeID) + + t.Run("Refresh_ContextPropertyOptions", func(t *testing.T) { + // Test refreshing context property options materialized view + err := repo.Refresh(models.ContextPropertyOptionType) + assert.NoError(t, err) + }) + + t.Run("Refresh_ArtifactPropertyOptions", func(t *testing.T) { + // Test refreshing artifact property options materialized view + err := repo.Refresh(models.ArtifactPropertyOptionType) + assert.NoError(t, err) + }) + + t.Run("Refresh_InvalidType", func(t *testing.T) { + // Test error handling for invalid property option type + err := repo.Refresh(models.PropertyOptionType(999)) + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid property option type") + }) + + t.Run("List_ContextPropertyOptions_SharedTestEnvironment", func(t *testing.T) { + // Refresh the view first + err := repo.Refresh(models.ContextPropertyOptionType) + require.NoError(t, err) + + // List context property options for the test type ID + options, err := repo.List(models.ContextPropertyOptionType, catalogModelTypeID) + assert.NoError(t, err) + assert.NotNil(t, options) + // In shared test environment, other tests may have created data already + // Just verify the function works and returns valid data if any exists + for _, option := range options { + assert.Equal(t, catalogModelTypeID, option.TypeID) + assert.NotEmpty(t, option.Name) + } + }) + + t.Run("List_ArtifactPropertyOptions_SharedTestEnvironment", func(t *testing.T) { + // Refresh the view first + err := repo.Refresh(models.ArtifactPropertyOptionType) + require.NoError(t, err) + + // List artifact property options for the test type ID + options, err := repo.List(models.ArtifactPropertyOptionType, modelArtifactTypeID) + assert.NoError(t, err) + assert.NotNil(t, options) + // In shared test environment, other tests may have created data already + // Just verify the function works and returns valid data if any exists + for _, option := range options { + assert.Equal(t, modelArtifactTypeID, option.TypeID) + assert.NotEmpty(t, option.Name) + } + }) + + t.Run("List_NonExistentTypeID", func(t *testing.T) { + // Test with a type ID that doesn't exist - should return empty results + nonExistentTypeID := int32(99999) + + // Test context property options + options, err := repo.List(models.ContextPropertyOptionType, nonExistentTypeID) + assert.NoError(t, err) + assert.NotNil(t, options) + assert.Len(t, options, 0) + + // Test artifact property options + options, err = repo.List(models.ArtifactPropertyOptionType, nonExistentTypeID) + assert.NoError(t, err) + assert.NotNil(t, options) + assert.Len(t, options, 0) + }) + + t.Run("List_InvalidType", func(t *testing.T) { + // Test error handling for invalid property option type + options, err := repo.List(models.PropertyOptionType(999), catalogModelTypeID) + assert.Error(t, err) + assert.Nil(t, options) + assert.Contains(t, err.Error(), "invalid property option type") + }) + + t.Run("List_ContextPropertyOptions_WithData", func(t *testing.T) { + // Create a catalog model with properties to populate the materialized view + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-for-context-properties"), + ExternalID: apiutils.Of("context-props-test-123"), + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "custom_prop_1", + StringValue: apiutils.Of("value1"), + }, + { + Name: "version_number", + IntValue: apiutils.Of(int32(1)), + }, + { + Name: "accuracy", + DoubleValue: apiutils.Of(0.95), + }, + }, + } + + savedModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + require.NotNil(t, savedModel) + + // Refresh the materialized view to include our new data + err = repo.Refresh(models.ContextPropertyOptionType) + require.NoError(t, err) + + // List context property options + options, err := repo.List(models.ContextPropertyOptionType, catalogModelTypeID) + assert.NoError(t, err) + assert.NotNil(t, options) + + // We should have at least some property options now + // The exact number depends on what properties were created + if len(options) > 0 { + // Verify the structure of returned options + for _, option := range options { + assert.Equal(t, catalogModelTypeID, option.TypeID) + assert.NotEmpty(t, option.Name) + // At least one of the value fields should be populated + hasValue := len(option.StringValue) > 0 || + len(option.ArrayValue) > 0 || + option.MinDoubleValue != nil || + option.MaxDoubleValue != nil || + option.MinIntValue != nil || + option.MaxIntValue != nil + assert.True(t, hasValue, "Option should have at least one value field populated") + } + } + }) + + t.Run("List_ArtifactPropertyOptions_WithData", func(t *testing.T) { + // First create a catalog model as parent + catalogModel := &models.CatalogModelImpl{ + TypeID: apiutils.Of(int32(catalogModelTypeID)), + Attributes: &models.CatalogModelAttributes{ + Name: apiutils.Of("test-model-for-artifact-properties"), + ExternalID: apiutils.Of("artifact-props-test-123"), + }, + } + savedModel, err := catalogModelRepo.Save(catalogModel) + require.NoError(t, err) + + // Create an artifact with properties + artifact := &models.CatalogModelArtifactImpl{ + TypeID: apiutils.Of(int32(modelArtifactTypeID)), + Attributes: &models.CatalogModelArtifactAttributes{ + Name: apiutils.Of("test-artifact-with-properties"), + ExternalID: apiutils.Of("artifact-props-test-456"), + URI: apiutils.Of("s3://bucket/model.pkl"), + }, + CustomProperties: &[]dbmodels.Properties{ + { + Name: "model_type", + StringValue: apiutils.Of("classification"), + }, + { + Name: "file_size", + IntValue: apiutils.Of(int32(1024)), + }, + { + Name: "validation_accuracy", + DoubleValue: apiutils.Of(0.92), + }, + }, + } + + savedArtifact, err := artifactRepo.Save(artifact, savedModel.GetID()) + require.NoError(t, err) + require.NotNil(t, savedArtifact) + + // Refresh the materialized view to include our new data + err = repo.Refresh(models.ArtifactPropertyOptionType) + require.NoError(t, err) + + // List artifact property options + options, err := repo.List(models.ArtifactPropertyOptionType, modelArtifactTypeID) + assert.NoError(t, err) + assert.NotNil(t, options) + + // We should have some property options now + if len(options) > 0 { + // Verify the structure of returned options + for _, option := range options { + assert.Equal(t, modelArtifactTypeID, option.TypeID) + assert.NotEmpty(t, option.Name) + // At least one of the value fields should be populated + hasValue := len(option.StringValue) > 0 || + len(option.ArrayValue) > 0 || + option.MinDoubleValue != nil || + option.MaxDoubleValue != nil || + option.MinIntValue != nil || + option.MaxIntValue != nil + assert.True(t, hasValue, "Option should have at least one value field populated") + } + } + }) +} diff --git a/catalog/internal/db/service/service_test.go b/catalog/internal/db/service/service_test.go new file mode 100644 index 0000000000..0dddcad52d --- /dev/null +++ b/catalog/internal/db/service/service_test.go @@ -0,0 +1,12 @@ +package service + +import ( + "os" + "testing" + + "github.com/kubeflow/model-registry/internal/testutils" +) + +func TestMain(m *testing.M) { + os.Exit(testutils.TestMainPostgresHelper(m)) +} diff --git a/catalog/internal/db/service/spec.go b/catalog/internal/db/service/spec.go new file mode 100644 index 0000000000..a27dc20482 --- /dev/null +++ b/catalog/internal/db/service/spec.go @@ -0,0 +1,63 @@ +package service + +import ( + "github.com/kubeflow/model-registry/catalog/internal/db/models" + "github.com/kubeflow/model-registry/internal/datastore" +) + +const ( + CatalogModelTypeName = "kf.CatalogModel" + CatalogModelArtifactTypeName = "kf.CatalogModelArtifact" + CatalogMetricsArtifactTypeName = "kf.CatalogMetricsArtifact" +) + +func DatastoreSpec() *datastore.Spec { + return datastore.NewSpec(). + AddContext(CatalogModelTypeName, datastore.NewSpecType(NewCatalogModelRepository). + AddString("source_id"). + AddString("description"). + AddString("owner"). + AddString("state"). + AddStruct("language"). + AddString("library_name"). + AddString("license_link"). + AddString("license"). + AddString("logo"). + AddString("maturity"). + AddString("provider"). + AddString("readme"). + AddStruct("tasks"), + ). + AddArtifact(CatalogModelArtifactTypeName, datastore.NewSpecType(NewCatalogModelArtifactRepository). + AddString("uri"), + ). + AddArtifact(CatalogMetricsArtifactTypeName, datastore.NewSpecType(NewCatalogMetricsArtifactRepository). + AddString("metricsType"), + ). + AddOther(NewCatalogArtifactRepository). + AddOther(NewPropertyOptionsRepository) +} + +type Services struct { + CatalogModelRepository models.CatalogModelRepository + CatalogArtifactRepository models.CatalogArtifactRepository + CatalogModelArtifactRepository models.CatalogModelArtifactRepository + CatalogMetricsArtifactRepository models.CatalogMetricsArtifactRepository + PropertyOptionsRepository models.PropertyOptionsRepository +} + +func NewServices( + catalogModelRepository models.CatalogModelRepository, + catalogArtifactRepository models.CatalogArtifactRepository, + catalogModelArtifactRepository models.CatalogModelArtifactRepository, + catalogMetricsArtifactRepository models.CatalogMetricsArtifactRepository, + propertyOptionsRepository models.PropertyOptionsRepository, +) Services { + return Services{ + CatalogModelRepository: catalogModelRepository, + CatalogArtifactRepository: catalogArtifactRepository, + CatalogModelArtifactRepository: catalogModelArtifactRepository, + CatalogMetricsArtifactRepository: catalogMetricsArtifactRepository, + PropertyOptionsRepository: propertyOptionsRepository, + } +} diff --git a/catalog/internal/db/service/testdata/testdb.cnf b/catalog/internal/db/service/testdata/testdb.cnf new file mode 100644 index 0000000000..873958cd70 --- /dev/null +++ b/catalog/internal/db/service/testdata/testdb.cnf @@ -0,0 +1,5 @@ +[mysqld] +character-set-server = utf8mb4 +collation-server = utf8mb4_general_ci + +!includedir /etc/mysql/conf.d/ diff --git a/catalog/internal/server/openapi/.openapi-generator/FILES b/catalog/internal/server/openapi/.openapi-generator/FILES index d408ed0d97..344857afc8 100644 --- a/catalog/internal/server/openapi/.openapi-generator/FILES +++ b/catalog/internal/server/openapi/.openapi-generator/FILES @@ -6,15 +6,23 @@ impl.go logger.go model_artifact_type_query_param.go model_base_model.go +model_base_resource.go model_base_resource_dates.go model_base_resource_list.go +model_catalog_artifact.go +model_catalog_artifact_list.go +model_catalog_label.go +model_catalog_label_list.go +model_catalog_metrics_artifact.go model_catalog_model.go model_catalog_model_artifact.go -model_catalog_model_artifact_list.go model_catalog_model_list.go model_catalog_source.go model_catalog_source_list.go model_error.go +model_filter_option.go +model_filter_option_range.go +model_filter_options_list.go model_metadata_bool_value.go model_metadata_double_value.go model_metadata_int_value.go diff --git a/catalog/internal/server/openapi/api.go b/catalog/internal/server/openapi/api.go index 5a58ecb98c..bc4b112ec3 100644 --- a/catalog/internal/server/openapi/api.go +++ b/catalog/internal/server/openapi/api.go @@ -1,3 +1,5 @@ +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + /* * Model Catalog REST API * @@ -6,8 +8,6 @@ * API version: v1alpha1 */ -// Generated by: OpenAPI Generator (https://openapi-generator.tech). DO NOT EDIT. - package openapi import ( @@ -21,7 +21,9 @@ import ( // The ModelCatalogServiceAPIRouter implementation should parse necessary information from the http request, // pass the data to a ModelCatalogServiceAPIServicer to perform the required actions, then write the service results to the http response. type ModelCatalogServiceAPIRouter interface { + FindLabels(http.ResponseWriter, *http.Request) FindModels(http.ResponseWriter, *http.Request) + FindModelsFilterOptions(http.ResponseWriter, *http.Request) FindSources(http.ResponseWriter, *http.Request) GetModel(http.ResponseWriter, *http.Request) GetAllModelArtifacts(http.ResponseWriter, *http.Request) @@ -32,8 +34,10 @@ type ModelCatalogServiceAPIRouter interface { // while the service implementation can be ignored with the .openapi-generator-ignore file // and updated with the logic required for the API. type ModelCatalogServiceAPIServicer interface { - FindModels(context.Context, string, string, string, model.OrderByField, model.SortOrder, string) (ImplResponse, error) + FindLabels(context.Context, string, string, model.SortOrder, string) (ImplResponse, error) + FindModels(context.Context, []string, string, []string, string, string, model.OrderByField, model.SortOrder, string) (ImplResponse, error) + FindModelsFilterOptions(context.Context) (ImplResponse, error) FindSources(context.Context, string, string, model.OrderByField, model.SortOrder, string) (ImplResponse, error) GetModel(context.Context, string, string) (ImplResponse, error) - GetAllModelArtifacts(context.Context, string, string) (ImplResponse, error) + GetAllModelArtifacts(context.Context, string, string, []model.ArtifactTypeQueryParam, []model.ArtifactTypeQueryParam, string, string, string, model.SortOrder, string) (ImplResponse, error) } diff --git a/catalog/internal/server/openapi/api_model_catalog_service.go b/catalog/internal/server/openapi/api_model_catalog_service.go index 5e26d534d4..ad9ec46744 100644 --- a/catalog/internal/server/openapi/api_model_catalog_service.go +++ b/catalog/internal/server/openapi/api_model_catalog_service.go @@ -1,3 +1,5 @@ +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + /* * Model Catalog REST API * @@ -6,8 +8,6 @@ * API version: v1alpha1 */ -// Generated by: OpenAPI Generator (https://openapi-generator.tech). DO NOT EDIT. - package openapi import ( @@ -36,7 +36,7 @@ func WithModelCatalogServiceAPIErrorHandler(h ErrorHandler) ModelCatalogServiceA } // NewModelCatalogServiceAPIController creates a default api controller -func NewModelCatalogServiceAPIController(s ModelCatalogServiceAPIServicer, opts ...ModelCatalogServiceAPIOption) Router { +func NewModelCatalogServiceAPIController(s ModelCatalogServiceAPIServicer, opts ...ModelCatalogServiceAPIOption) *ModelCatalogServiceAPIController { controller := &ModelCatalogServiceAPIController{ service: s, errorHandler: DefaultErrorHandler, @@ -52,22 +52,80 @@ func NewModelCatalogServiceAPIController(s ModelCatalogServiceAPIServicer, opts // Routes returns all the api routes for the ModelCatalogServiceAPIController func (c *ModelCatalogServiceAPIController) Routes() Routes { return Routes{ + "FindLabels": Route{ + "FindLabels", + strings.ToUpper("Get"), + "/api/model_catalog/v1alpha1/labels", + c.FindLabels, + }, "FindModels": Route{ + "FindModels", strings.ToUpper("Get"), "/api/model_catalog/v1alpha1/models", c.FindModels, }, + "FindModelsFilterOptions": Route{ + "FindModelsFilterOptions", + strings.ToUpper("Get"), + "/api/model_catalog/v1alpha1/models/filter_options", + c.FindModelsFilterOptions, + }, "FindSources": Route{ + "FindSources", strings.ToUpper("Get"), "/api/model_catalog/v1alpha1/sources", c.FindSources, }, "GetModel": Route{ + "GetModel", strings.ToUpper("Get"), "/api/model_catalog/v1alpha1/sources/{source_id}/models/*", c.GetModel, }, "GetAllModelArtifacts": Route{ + "GetAllModelArtifacts", + strings.ToUpper("Get"), + "/api/model_catalog/v1alpha1/sources/{source_id}/models/{model_name}/artifacts", + c.GetAllModelArtifacts, + }, + } +} + +// OrderedRoutes returns all the api routes in a deterministic order for the ModelCatalogServiceAPIController +func (c *ModelCatalogServiceAPIController) OrderedRoutes() []Route { + return []Route{ + Route{ + "FindLabels", + strings.ToUpper("Get"), + "/api/model_catalog/v1alpha1/labels", + c.FindLabels, + }, + Route{ + "FindModels", + strings.ToUpper("Get"), + "/api/model_catalog/v1alpha1/models", + c.FindModels, + }, + Route{ + "FindModelsFilterOptions", + strings.ToUpper("Get"), + "/api/model_catalog/v1alpha1/models/filter_options", + c.FindModelsFilterOptions, + }, + Route{ + "FindSources", + strings.ToUpper("Get"), + "/api/model_catalog/v1alpha1/sources", + c.FindSources, + }, + Route{ + "GetModel", + strings.ToUpper("Get"), + "/api/model_catalog/v1alpha1/sources/{source_id}/models/*", + c.GetModel, + }, + Route{ + "GetAllModelArtifacts", strings.ToUpper("Get"), "/api/model_catalog/v1alpha1/sources/{source_id}/models/{model_name}/artifacts", c.GetAllModelArtifacts, @@ -75,47 +133,209 @@ func (c *ModelCatalogServiceAPIController) Routes() Routes { } } +// FindLabels - List All CatalogLabels +func (c *ModelCatalogServiceAPIController) FindLabels(w http.ResponseWriter, r *http.Request) { + query, err := parseQuery(r.URL.RawQuery) + if err != nil { + c.errorHandler(w, r, &ParsingError{Err: err}, nil) + return + } + var pageSizeParam string + if query.Has("pageSize") { + param := query.Get("pageSize") + + pageSizeParam = param + } else { + } + var orderByParam string + if query.Has("orderBy") { + param := query.Get("orderBy") + + orderByParam = param + } else { + } + var sortOrderParam model.SortOrder + if query.Has("sortOrder") { + param := model.SortOrder(query.Get("sortOrder")) + + sortOrderParam = param + } else { + } + var nextPageTokenParam string + if query.Has("nextPageToken") { + param := query.Get("nextPageToken") + + nextPageTokenParam = param + } else { + } + result, err := c.service.FindLabels(r.Context(), pageSizeParam, orderByParam, sortOrderParam, nextPageTokenParam) + // If an error occurred, encode the error with the status code + if err != nil { + c.errorHandler(w, r, err, &result) + return + } + // If no error, encode the body and the result code + _ = EncodeJSONResponse(result.Body, &result.Code, w) +} + // FindModels - Search catalog models across sources. func (c *ModelCatalogServiceAPIController) FindModels(w http.ResponseWriter, r *http.Request) { - query := r.URL.Query() - sourceParam := query.Get("source") - qParam := query.Get("q") - pageSizeParam := query.Get("pageSize") - orderByParam := query.Get("orderBy") - sortOrderParam := query.Get("sortOrder") - nextPageTokenParam := query.Get("nextPageToken") - result, err := c.service.FindModels(r.Context(), sourceParam, qParam, pageSizeParam, model.OrderByField(orderByParam), model.SortOrder(sortOrderParam), nextPageTokenParam) + query, err := parseQuery(r.URL.RawQuery) + if err != nil { + c.errorHandler(w, r, &ParsingError{Err: err}, nil) + return + } + var sourceParam []string + if query.Has("source") { + sourceParam = strings.Split(query.Get("source"), ",") + } + var qParam string + if query.Has("q") { + param := query.Get("q") + + qParam = param + } else { + } + var sourceLabelParam []string + if query.Has("sourceLabel") { + sourceLabelParam = strings.Split(query.Get("sourceLabel"), ",") + } + var filterQueryParam string + if query.Has("filterQuery") { + param := query.Get("filterQuery") + + filterQueryParam = param + } else { + } + var pageSizeParam string + if query.Has("pageSize") { + param := query.Get("pageSize") + + pageSizeParam = param + } else { + } + var orderByParam model.OrderByField + if query.Has("orderBy") { + param := model.OrderByField(query.Get("orderBy")) + + orderByParam = param + } else { + } + var sortOrderParam model.SortOrder + if query.Has("sortOrder") { + param := model.SortOrder(query.Get("sortOrder")) + + sortOrderParam = param + } else { + } + var nextPageTokenParam string + if query.Has("nextPageToken") { + param := query.Get("nextPageToken") + + nextPageTokenParam = param + } else { + } + result, err := c.service.FindModels(r.Context(), sourceParam, qParam, sourceLabelParam, filterQueryParam, pageSizeParam, orderByParam, sortOrderParam, nextPageTokenParam) + // If an error occurred, encode the error with the status code + if err != nil { + c.errorHandler(w, r, err, &result) + return + } + // If no error, encode the body and the result code + _ = EncodeJSONResponse(result.Body, &result.Code, w) +} + +// FindModelsFilterOptions - Lists fields and available options that can be used in `filterQuery` on the list models endpoint. +func (c *ModelCatalogServiceAPIController) FindModelsFilterOptions(w http.ResponseWriter, r *http.Request) { + result, err := c.service.FindModelsFilterOptions(r.Context()) // If an error occurred, encode the error with the status code if err != nil { c.errorHandler(w, r, err, &result) return } // If no error, encode the body and the result code - EncodeJSONResponse(result.Body, &result.Code, w) + _ = EncodeJSONResponse(result.Body, &result.Code, w) } // FindSources - List All CatalogSources func (c *ModelCatalogServiceAPIController) FindSources(w http.ResponseWriter, r *http.Request) { - query := r.URL.Query() - nameParam := query.Get("name") - pageSizeParam := query.Get("pageSize") - orderByParam := query.Get("orderBy") - sortOrderParam := query.Get("sortOrder") - nextPageTokenParam := query.Get("nextPageToken") - result, err := c.service.FindSources(r.Context(), nameParam, pageSizeParam, model.OrderByField(orderByParam), model.SortOrder(sortOrderParam), nextPageTokenParam) + query, err := parseQuery(r.URL.RawQuery) + if err != nil { + c.errorHandler(w, r, &ParsingError{Err: err}, nil) + return + } + var nameParam string + if query.Has("name") { + param := query.Get("name") + + nameParam = param + } else { + } + var pageSizeParam string + if query.Has("pageSize") { + param := query.Get("pageSize") + + pageSizeParam = param + } else { + } + var orderByParam model.OrderByField + if query.Has("orderBy") { + param := model.OrderByField(query.Get("orderBy")) + + orderByParam = param + } else { + } + var sortOrderParam model.SortOrder + if query.Has("sortOrder") { + param := model.SortOrder(query.Get("sortOrder")) + + sortOrderParam = param + } else { + } + var nextPageTokenParam string + if query.Has("nextPageToken") { + param := query.Get("nextPageToken") + + nextPageTokenParam = param + } else { + } + result, err := c.service.FindSources(r.Context(), nameParam, pageSizeParam, orderByParam, sortOrderParam, nextPageTokenParam) // If an error occurred, encode the error with the status code if err != nil { c.errorHandler(w, r, err, &result) return } // If no error, encode the body and the result code - EncodeJSONResponse(result.Body, &result.Code, w) + _ = EncodeJSONResponse(result.Body, &result.Code, w) } // GetModel - Get a `CatalogModel`. func (c *ModelCatalogServiceAPIController) GetModel(w http.ResponseWriter, r *http.Request) { sourceIdParam := chi.URLParam(r, "source_id") + if sourceIdParam == "" { + c.errorHandler(w, r, &RequiredError{"source_id"}, nil) + return + } modelNameParam := chi.URLParam(r, "*") + if modelNameParam == "" { + c.errorHandler(w, r, &RequiredError{"*"}, nil) + return + } + + // Special handling for getModel to delegate /artifacts requests to getAllModelArtifacts + // The wildcard /* pattern catches /artifacts requests, but we want those to go to GetAllModelArtifacts + if strings.HasSuffix(r.URL.Path, "/artifacts") { + // Extract the model name by removing the /artifacts suffix + modelName := strings.TrimSuffix(modelNameParam, "/artifacts") + + // Add the model_name parameter to the route context so GetAllModelArtifacts can access it + chi.RouteContext(r.Context()).URLParams.Add("model_name", modelName) + + // Call the GetAllModelArtifacts handler directly + c.GetAllModelArtifacts(w, r) + return + } + result, err := c.service.GetModel(r.Context(), sourceIdParam, modelNameParam) // If an error occurred, encode the error with the status code if err != nil { @@ -123,19 +343,93 @@ func (c *ModelCatalogServiceAPIController) GetModel(w http.ResponseWriter, r *ht return } // If no error, encode the body and the result code - EncodeJSONResponse(result.Body, &result.Code, w) + _ = EncodeJSONResponse(result.Body, &result.Code, w) } -// GetAllModelArtifacts - List CatalogModelArtifacts. +// GetAllModelArtifacts - List CatalogArtifacts. func (c *ModelCatalogServiceAPIController) GetAllModelArtifacts(w http.ResponseWriter, r *http.Request) { + query, err := parseQuery(r.URL.RawQuery) + if err != nil { + c.errorHandler(w, r, &ParsingError{Err: err}, nil) + return + } sourceIdParam := chi.URLParam(r, "source_id") + if sourceIdParam == "" { + c.errorHandler(w, r, &RequiredError{"source_id"}, nil) + return + } modelNameParam := chi.URLParam(r, "model_name") - result, err := c.service.GetAllModelArtifacts(r.Context(), sourceIdParam, modelNameParam) + if modelNameParam == "" { + c.errorHandler(w, r, &RequiredError{"model_name"}, nil) + return + } + var artifactTypeParam []model.ArtifactTypeQueryParam + if query.Has("artifactType") { + paramSplits := strings.Split(query.Get("artifactType"), ",") + artifactTypeParam = make([]model.ArtifactTypeQueryParam, 0, len(paramSplits)) + for _, param := range paramSplits { + paramEnum, err := model.NewArtifactTypeQueryParamFromValue(param) + if err != nil { + c.errorHandler(w, r, &ParsingError{Param: "artifactType", Err: err}, nil) + return + } + artifactTypeParam = append(artifactTypeParam, *paramEnum) + } + } + var artifactType2Param []model.ArtifactTypeQueryParam + if query.Has("artifact_type") { + paramSplits := strings.Split(query.Get("artifact_type"), ",") + artifactType2Param = make([]model.ArtifactTypeQueryParam, 0, len(paramSplits)) + for _, param := range paramSplits { + paramEnum, err := model.NewArtifactTypeQueryParamFromValue(param) + if err != nil { + c.errorHandler(w, r, &ParsingError{Param: "artifact_type", Err: err}, nil) + return + } + artifactType2Param = append(artifactType2Param, *paramEnum) + } + } + var filterQueryParam string + if query.Has("filterQuery") { + param := query.Get("filterQuery") + + filterQueryParam = param + } else { + } + var pageSizeParam string + if query.Has("pageSize") { + param := query.Get("pageSize") + + pageSizeParam = param + } else { + } + var orderByParam string + if query.Has("orderBy") { + param := query.Get("orderBy") + + orderByParam = param + } else { + } + var sortOrderParam model.SortOrder + if query.Has("sortOrder") { + param := model.SortOrder(query.Get("sortOrder")) + + sortOrderParam = param + } else { + } + var nextPageTokenParam string + if query.Has("nextPageToken") { + param := query.Get("nextPageToken") + + nextPageTokenParam = param + } else { + } + result, err := c.service.GetAllModelArtifacts(r.Context(), sourceIdParam, modelNameParam, artifactTypeParam, artifactType2Param, filterQueryParam, pageSizeParam, orderByParam, sortOrderParam, nextPageTokenParam) // If an error occurred, encode the error with the status code if err != nil { c.errorHandler(w, r, err, &result) return } // If no error, encode the body and the result code - EncodeJSONResponse(result.Body, &result.Code, w) + _ = EncodeJSONResponse(result.Body, &result.Code, w) } diff --git a/catalog/internal/server/openapi/api_model_catalog_service_service.go b/catalog/internal/server/openapi/api_model_catalog_service_service.go index 37f2cc52a0..1788cc40ba 100644 --- a/catalog/internal/server/openapi/api_model_catalog_service_service.go +++ b/catalog/internal/server/openapi/api_model_catalog_service_service.go @@ -8,87 +8,220 @@ import ( "net/http" "net/url" "slices" + "strconv" "strings" "github.com/kubeflow/model-registry/catalog/internal/catalog" model "github.com/kubeflow/model-registry/catalog/pkg/openapi" + "github.com/kubeflow/model-registry/pkg/api" ) // ModelCatalogServiceAPIService is a service that implements the logic for the ModelCatalogServiceAPIServicer // This service should implement the business logic for every endpoint for the ModelCatalogServiceAPI s.coreApi. // Include any external packages or services that will be required by this service. type ModelCatalogServiceAPIService struct { - sources *catalog.SourceCollection + provider catalog.APIProvider + sources *catalog.SourceCollection + labels *catalog.LabelCollection } // GetAllModelArtifacts retrieves all model artifacts for a given model from the specified source. -func (m *ModelCatalogServiceAPIService) GetAllModelArtifacts(ctx context.Context, sourceID string, name string) (ImplResponse, error) { - source, ok := m.sources.Get(sourceID) - if !ok { - return notFound("Unknown source"), nil +func (m *ModelCatalogServiceAPIService) GetAllModelArtifacts(ctx context.Context, sourceID string, modelName string, artifactType []model.ArtifactTypeQueryParam, artifactType2 []model.ArtifactTypeQueryParam, filterQuery string, pageSize string, orderBy string, sortOrder model.SortOrder, nextPageToken string) (ImplResponse, error) { + // Handle multiple artifact_type parameters (snake case - deprecated, will be removed in future) + for _, v := range artifactType2 { + if v != "" { + artifactType = append(artifactType, v) + } + } + + if newName, err := url.PathUnescape(modelName); err == nil { + modelName = newName + } + + var err error + pageSizeInt := int32(10) + + if pageSize != "" { + parsed, err := strconv.ParseInt(pageSize, 10, 32) + if err != nil { + return Response(http.StatusBadRequest, err), err + } + pageSizeInt = int32(parsed) } - if newName, err := url.PathUnescape(name); err == nil { - name = newName + // Handle multiple artifact types + var artifactTypesFilter []string + + if len(artifactType) > 0 { + // Convert slice of ArtifactTypeQueryParam to slice of strings + artifactTypesFilter = make([]string, len(artifactType)) + for i, at := range artifactType { + artifactTypesFilter[i] = string(at) + } } - artifacts, err := source.Provider.GetArtifacts(ctx, name) + artifacts, err := m.provider.GetArtifacts(ctx, modelName, sourceID, catalog.ListArtifactsParams{ + FilterQuery: filterQuery, + ArtifactTypesFilter: artifactTypesFilter, + PageSize: pageSizeInt, + OrderBy: orderBy, + SortOrder: sortOrder, + NextPageToken: &nextPageToken, + }) if err != nil { - return Response(http.StatusInternalServerError, err), err + statusCode := api.ErrToStatus(err) + var errorMsg string + if errors.Is(err, api.ErrBadRequest) { + // Use the original error message which should be more specific + errorMsg = err.Error() + } else if errors.Is(err, api.ErrNotFound) { + errorMsg = fmt.Sprintf("No model found '%s' in source '%s'", modelName, sourceID) + } else { + errorMsg = err.Error() + } + return ErrorResponse(statusCode, errors.New(errorMsg)), err } return Response(http.StatusOK, artifacts), nil } -func (m *ModelCatalogServiceAPIService) FindModels(ctx context.Context, sourceID string, q string, pageSize string, orderBy model.OrderByField, sortOrder model.SortOrder, nextPageToken string) (ImplResponse, error) { - source, ok := m.sources.Get(sourceID) - if !ok { - return notFound("Unknown source"), errors.New("Unknown source") +func (m *ModelCatalogServiceAPIService) FindLabels(ctx context.Context, pageSize string, orderBy string, sortOrder model.SortOrder, nextPageToken string) (ImplResponse, error) { + labels := m.labels.All() + if len(labels) > math.MaxInt32 { + err := errors.New("too many registered labels") + return ErrorResponse(http.StatusInternalServerError, err), err } - p, err := newPaginator[model.CatalogModel](pageSize, orderBy, sortOrder, nextPageToken) + // Wrap labels to make them sortable + sortableLabels := make([]sortableLabel, len(labels)) + for i, label := range labels { + sortableLabels[i] = sortableLabel{ + data: label, + index: i, // Keep original index for stable sort + id: generateLabelID(i), + } + } + + // Create paginator - use empty OrderByField since we don't use it for labels + paginator, err := newPaginator[sortableLabel](pageSize, model.OrderByField(""), sortOrder, nextPageToken) if err != nil { return ErrorResponse(http.StatusBadRequest, err), err } + // Create comparison function for labels using the string key + cmpFunc := genLabelCmpFunc(orderBy, sortOrder) + slices.SortStableFunc(sortableLabels, cmpFunc) + + // Paginate the sorted labels + pagedSortableLabels, next := paginator.Paginate(sortableLabels) + + // Convert map[string]string to model.CatalogLabel + pagedLabels := make([]model.CatalogLabel, len(pagedSortableLabels)) + for i, sl := range pagedSortableLabels { + // Extract the "name" field (required) + name, ok := sl.data["name"] + if !ok || name == "" { + err := fmt.Errorf("internal error: label at index %d missing required name field", i) + return ErrorResponse(http.StatusInternalServerError, err), err + } + + // Create CatalogLabel with name (which may be null) + var label *model.CatalogLabel + if nameStr, ok := name.(string); ok { + label = model.NewCatalogLabel(*model.NewNullableString(&nameStr)) + } else { + label = model.NewCatalogLabel(*model.NewNullableString(nil)) + } + + // Add all other properties to AdditionalProperties + label.AdditionalProperties = make(map[string]any) + for key, value := range sl.data { + if key != "name" { + label.AdditionalProperties[key] = value + } + } + + pagedLabels[i] = *label + } + + res := model.CatalogLabelList{ + PageSize: paginator.PageSize, + Items: pagedLabels, + Size: int32(len(pagedLabels)), // Number of items in current page, not total + NextPageToken: next.Token(), + } + return Response(http.StatusOK, res), nil +} + +func (m *ModelCatalogServiceAPIService) FindModels(ctx context.Context, sourceIDs []string, q string, sourceLabels []string, filterQuery string, pageSize string, orderBy model.OrderByField, sortOrder model.SortOrder, nextPageToken string) (ImplResponse, error) { + var err error + pageSizeInt := int32(10) + + if pageSize != "" { + parsed, err := strconv.ParseInt(pageSize, 10, 32) + if err != nil { + return Response(http.StatusBadRequest, err), err + } + pageSizeInt = int32(parsed) + } + + if len(sourceIDs) == 1 && sourceIDs[0] == "" { + sourceIDs = nil + } + if len(sourceLabels) == 1 && sourceLabels[0] == "" { + sourceLabels = nil + } + + if len(sourceIDs) > 0 && len(sourceLabels) > 0 { + err := fmt.Errorf("source and sourceLabel cannot be used together") + return Response(http.StatusBadRequest, err), err + } + listModelsParams := catalog.ListModelsParams{ - Query: q, - OrderBy: p.OrderBy, - SortOrder: p.SortOrder, + Query: q, + FilterQuery: filterQuery, + SourceIDs: sourceIDs, + SourceLabels: sourceLabels, + PageSize: pageSizeInt, + OrderBy: orderBy, + SortOrder: sortOrder, + NextPageToken: &nextPageToken, } - models, err := source.Provider.ListModels(ctx, listModelsParams) + models, err := m.provider.ListModels(ctx, listModelsParams) if err != nil { return ErrorResponse(http.StatusInternalServerError, err), err } - page, next := p.Paginate(models.Items) - - models.Items = page - models.PageSize = p.PageSize - models.NextPageToken = next.Token() - return Response(http.StatusOK, models), nil } -func (m *ModelCatalogServiceAPIService) GetModel(ctx context.Context, sourceID string, name string) (ImplResponse, error) { - if name, ok := strings.CutSuffix(name, "/artifacts"); ok { - return m.GetAllModelArtifacts(ctx, sourceID, name) +func (m *ModelCatalogServiceAPIService) FindModelsFilterOptions(ctx context.Context) (ImplResponse, error) { + filterOptions, err := m.provider.GetFilterOptions(ctx) + if err != nil { + return ErrorResponse(http.StatusInternalServerError, err), err } - source, ok := m.sources.Get(sourceID) - if !ok { - return notFound("Unknown source"), nil - } + return Response(http.StatusOK, filterOptions), nil +} - if newName, err := url.PathUnescape(name); err == nil { - name = newName +func (m *ModelCatalogServiceAPIService) GetModel(ctx context.Context, sourceID, modelName string) (ImplResponse, error) { + if newName, err := url.PathUnescape(modelName); err == nil { + modelName = newName } - model, err := source.Provider.GetModel(ctx, name) + model, err := m.provider.GetModel(ctx, modelName, sourceID) if err != nil { - return Response(http.StatusInternalServerError, err), err + statusCode := api.ErrToStatus(err) + var errorMsg string + if errors.Is(err, api.ErrNotFound) { + errorMsg = fmt.Sprintf("No model found '%s' in source '%s'", modelName, sourceID) + } else { + errorMsg = err.Error() + } + return ErrorResponse(statusCode, errors.New(errorMsg)), err } + if model == nil { return notFound("Unknown model or version"), nil } @@ -113,11 +246,11 @@ func (m *ModelCatalogServiceAPIService) FindSources(ctx context.Context, name st name = strings.ToLower(name) for _, v := range sources { - if !strings.Contains(strings.ToLower(v.Metadata.Name), name) { + if !strings.Contains(strings.ToLower(v.Name), name) { continue } - items = append(items, v.Metadata) + items = append(items, v) } cmpFunc, err := genCatalogCmpFunc(orderBy, sortOrder) @@ -126,14 +259,12 @@ func (m *ModelCatalogServiceAPIService) FindSources(ctx context.Context, name st } slices.SortStableFunc(items, cmpFunc) - total := int32(len(items)) - pagedItems, next := paginator.Paginate(items) res := model.CatalogSourceList{ PageSize: paginator.PageSize, Items: pagedItems, - Size: total, + Size: int32(len(pagedItems)), // Number of items in current page, not total NextPageToken: next.Token(), } return Response(http.StatusOK, res), nil @@ -164,12 +295,95 @@ func genCatalogCmpFunc(orderBy model.OrderByField, sortOrder model.SortOrder) (f } } +// generateLabelID creates a stable, unique ID for a label based on its index +func generateLabelID(index int) string { + return strconv.Itoa(index) +} + +// sortableLabel wraps a label map to make it sortable +type sortableLabel struct { + data map[string]any + index int // Original position for stable sort when key is missing + id string // Stable ID for pagination +} + +// SortValue implements the Sortable interface for labels +func (sl sortableLabel) SortValue(field model.OrderByField) string { + // Return ID for pagination purposes + if field == model.ORDERBYFIELD_ID { + return sl.id + } + // For other fields, labels use string keys directly in genLabelCmpFunc + return "" +} + +// genLabelCmpFunc generates a comparison function for sorting labels by a string key +func genLabelCmpFunc(orderByKey string, sortOrder model.SortOrder) func(sortableLabel, sortableLabel) int { + multiplier := 1 + switch model.SortOrder(strings.ToUpper(string(sortOrder))) { + case model.SORTORDER_DESC: + multiplier = -1 + case model.SORTORDER_ASC, "": + multiplier = 1 + } + + return func(a, b sortableLabel) int { + // If no orderBy key specified, maintain original order + if orderByKey == "" { + if a.index < b.index { + return -1 + } + if a.index > b.index { + return 1 + } + return 0 + } + + // Get values for the orderBy key + aValRaw, aHasKey := a.data[orderByKey] + bValRaw, bHasKey := b.data[orderByKey] + + var aVal string + if aHasKey { + aVal, aHasKey = aValRaw.(string) + } + var bVal string + if bHasKey { + bVal, bHasKey = bValRaw.(string) + } + + // If both have the key, compare their values + if aHasKey && bHasKey { + return multiplier * strings.Compare(aVal, bVal) + } + + // If only one has the key, put it first + if aHasKey && !bHasKey { + return -1 // a comes first + } + if !aHasKey && bHasKey { + return 1 // b comes first + } + + // If neither has the key, maintain original order + if a.index < b.index { + return -1 + } + if a.index > b.index { + return 1 + } + return 0 + } +} + var _ ModelCatalogServiceAPIServicer = &ModelCatalogServiceAPIService{} // NewModelCatalogServiceAPIService creates a default api service -func NewModelCatalogServiceAPIService(sources *catalog.SourceCollection) ModelCatalogServiceAPIServicer { +func NewModelCatalogServiceAPIService(provider catalog.APIProvider, sources *catalog.SourceCollection, labels *catalog.LabelCollection) ModelCatalogServiceAPIServicer { return &ModelCatalogServiceAPIService{ - sources: sources, + provider: provider, + sources: sources, + labels: labels, } } diff --git a/catalog/internal/server/openapi/api_model_catalog_service_service_test.go b/catalog/internal/server/openapi/api_model_catalog_service_service_test.go index 400001a4c6..f71941b26c 100644 --- a/catalog/internal/server/openapi/api_model_catalog_service_service_test.go +++ b/catalog/internal/server/openapi/api_model_catalog_service_service_test.go @@ -46,6 +46,7 @@ func TestFindModels(t *testing.T) { name string sourceID string mockModels map[string]*model.CatalogModel + filterQuery string q string pageSize string orderBy model.OrderByField @@ -181,15 +182,20 @@ func TestFindModels(t *testing.T) { }, }, { - name: "Invalid source ID", - sourceID: "unknown-source", - mockModels: map[string]*model.CatalogModel{}, - q: "", - pageSize: "10", - orderBy: model.ORDERBYFIELD_ID, - sortOrder: model.SORTORDER_ASC, - expectedStatus: http.StatusNotFound, - expectedModelList: nil, + name: "Invalid source ID", + sourceID: "unknown-source", + mockModels: map[string]*model.CatalogModel{}, + q: "", + pageSize: "10", + orderBy: model.ORDERBYFIELD_ID, + sortOrder: model.SORTORDER_ASC, + expectedStatus: http.StatusOK, // Changed from http.StatusNotFound to http.StatusOK with an empty list -- now the source ID is just a field in the CatalogModel + expectedModelList: &model.CatalogModelList{ + Items: []model.CatalogModel{}, + Size: 0, + PageSize: 10, + NextPageToken: "", + }, }, { name: "Invalid pageSize string", @@ -210,12 +216,19 @@ func TestFindModels(t *testing.T) { mockModels: map[string]*model.CatalogModel{ "modelA": modelA, }, - q: "", - pageSize: "10", - orderBy: "UNSUPPORTED_FIELD", - sortOrder: model.SORTORDER_ASC, - expectedStatus: http.StatusBadRequest, - expectedModelList: nil, + q: "", + pageSize: "10", + orderBy: "UNSUPPORTED_FIELD", + sortOrder: model.SORTORDER_ASC, + expectedStatus: http.StatusOK, // Changed from http.StatusBadRequest to http.StatusOK -- in model registry we fallback to ID if the order by field is unsupported + expectedModelList: &model.CatalogModelList{ + Items: []model.CatalogModel{ + *modelA, + }, + Size: 1, + PageSize: 10, + NextPageToken: "", + }, }, { name: "Unsupported sortOrder field", @@ -223,12 +236,19 @@ func TestFindModels(t *testing.T) { mockModels: map[string]*model.CatalogModel{ "modelA": modelA, }, - q: "", - pageSize: "10", - orderBy: model.ORDERBYFIELD_ID, - sortOrder: "UNSUPPORTED_ORDER", - expectedStatus: http.StatusBadRequest, - expectedModelList: nil, + q: "", + pageSize: "10", + orderBy: model.ORDERBYFIELD_ID, + sortOrder: "UNSUPPORTED_ORDER", + expectedStatus: http.StatusOK, // Changed from http.StatusBadRequest to http.StatusOK -- in model registry we fallback to ASC if the sort order field is unsupported + expectedModelList: &model.CatalogModelList{ + Items: []model.CatalogModel{ + *modelA, + }, + Size: 1, + PageSize: 10, + NextPageToken: "", + }, }, { name: "Empty models in source", @@ -269,20 +289,27 @@ func TestFindModels(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { // Create mock source collection - sources := catalog.NewSourceCollection(map[string]catalog.CatalogSource{ - "source1": { - Metadata: model.CatalogSource{Id: "source1", Name: "Test Source 1"}, - Provider: &mockModelProvider{ - models: tc.mockModels, - }, + sources := catalog.NewSourceCollection() + sources.Merge("", + map[string]model.CatalogSource{ + "source1": model.CatalogSource{Id: "source1", Name: "Test Source 1"}, }, - }) - service := NewModelCatalogServiceAPIService(sources) + ) + + sourceLabels := catalog.NewLabelCollection() + + provider := &mockModelProvider{ + models: tc.mockModels, + } + + service := NewModelCatalogServiceAPIService(provider, sources, sourceLabels) resp, err := service.FindModels( context.Background(), - tc.sourceID, + []string{tc.sourceID}, tc.q, + []string{""}, + tc.filterQuery, tc.pageSize, tc.orderBy, tc.sortOrder, @@ -292,7 +319,7 @@ func TestFindModels(t *testing.T) { assert.Equal(t, tc.expectedStatus, resp.Code) if tc.expectedStatus != http.StatusOK { - assert.NotNil(t, err) + assert.Error(t, err) return } @@ -317,7 +344,7 @@ func TestFindSources(t *testing.T) { trueValue := true testCases := []struct { name string - catalogs map[string]catalog.CatalogSource + catalogs map[string]model.CatalogSource nameFilter string pageSize string orderBy model.OrderByField @@ -327,10 +354,11 @@ func TestFindSources(t *testing.T) { expectedSize int32 expectedItems int checkSorting bool + expectedLabels int }{ { name: "Empty catalog list", - catalogs: map[string]catalog.CatalogSource{}, + catalogs: map[string]model.CatalogSource{}, nameFilter: "", pageSize: "10", orderBy: model.ORDERBYFIELD_ID, @@ -341,10 +369,8 @@ func TestFindSources(t *testing.T) { }, { name: "Single catalog", - catalogs: map[string]catalog.CatalogSource{ - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog1": model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, }, nameFilter: "", pageSize: "10", @@ -356,16 +382,10 @@ func TestFindSources(t *testing.T) { }, { name: "Multiple catalogs with no filter", - catalogs: map[string]catalog.CatalogSource{ - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, - }, - "catalog2": { - Metadata: model.CatalogSource{Id: "catalog2", Name: "Test Catalog 2", Enabled: &trueValue}, - }, - "catalog3": { - Metadata: model.CatalogSource{Id: "catalog3", Name: "Another Catalog", Enabled: &trueValue}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog1": model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, + "catalog2": model.CatalogSource{Id: "catalog2", Name: "Test Catalog 2", Enabled: &trueValue}, + "catalog3": model.CatalogSource{Id: "catalog3", Name: "Another Catalog", Enabled: &trueValue}, }, nameFilter: "", pageSize: "10", @@ -377,16 +397,10 @@ func TestFindSources(t *testing.T) { }, { name: "Filter by name", - catalogs: map[string]catalog.CatalogSource{ - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, - }, - "catalog2": { - Metadata: model.CatalogSource{Id: "catalog2", Name: "Test Catalog 2", Enabled: &trueValue}, - }, - "catalog3": { - Metadata: model.CatalogSource{Id: "catalog3", Name: "Another Catalog", Enabled: &trueValue}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog1": model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, + "catalog2": model.CatalogSource{Id: "catalog2", Name: "Test Catalog 2", Enabled: &trueValue}, + "catalog3": model.CatalogSource{Id: "catalog3", Name: "Another Catalog", Enabled: &trueValue}, }, nameFilter: "Test", pageSize: "10", @@ -398,16 +412,10 @@ func TestFindSources(t *testing.T) { }, { name: "Filter by name case insensitive", - catalogs: map[string]catalog.CatalogSource{ - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, - }, - "catalog2": { - Metadata: model.CatalogSource{Id: "catalog2", Name: "Test Catalog 2", Enabled: &trueValue}, - }, - "catalog3": { - Metadata: model.CatalogSource{Id: "catalog3", Name: "Another Catalog", Enabled: &trueValue}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog1": model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, + "catalog2": model.CatalogSource{Id: "catalog2", Name: "Test Catalog 2", Enabled: &trueValue}, + "catalog3": model.CatalogSource{Id: "catalog3", Name: "Another Catalog", Enabled: &trueValue}, }, nameFilter: "test", pageSize: "10", @@ -419,34 +427,24 @@ func TestFindSources(t *testing.T) { }, { name: "Pagination - limit results", - catalogs: map[string]catalog.CatalogSource{ - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, - }, - "catalog2": { - Metadata: model.CatalogSource{Id: "catalog2", Name: "Test Catalog 2", Enabled: &trueValue}, - }, - "catalog3": { - Metadata: model.CatalogSource{Id: "catalog3", Name: "Another Catalog", Enabled: &trueValue}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog1": model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, + "catalog2": model.CatalogSource{Id: "catalog2", Name: "Test Catalog 2", Enabled: &trueValue}, + "catalog3": model.CatalogSource{Id: "catalog3", Name: "Another Catalog", Enabled: &trueValue}, }, nameFilter: "", pageSize: "2", orderBy: model.ORDERBYFIELD_ID, sortOrder: model.SORTORDER_ASC, expectedStatus: http.StatusOK, - expectedSize: 3, // Total size should be 3 - expectedItems: 2, // But only 2 items returned due to page size + expectedSize: 2, // Size is the number of items in current page + expectedItems: 2, // 2 items returned due to page size }, { name: "Default page size", - catalogs: map[string]catalog.CatalogSource{ - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, - }, - "catalog2": { - Metadata: model.CatalogSource{Id: "catalog2", Name: "Test Catalog 2", Enabled: &trueValue}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog1": model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, + "catalog2": model.CatalogSource{Id: "catalog2", Name: "Test Catalog 2", Enabled: &trueValue}, }, nameFilter: "", pageSize: "", // Empty to test default @@ -458,10 +456,8 @@ func TestFindSources(t *testing.T) { }, { name: "Invalid page size", - catalogs: map[string]catalog.CatalogSource{ - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog1": model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Enabled: &trueValue}, }, nameFilter: "", pageSize: "invalid", @@ -471,16 +467,10 @@ func TestFindSources(t *testing.T) { }, { name: "Sort by ID ascending", - catalogs: map[string]catalog.CatalogSource{ - "catalog2": { - Metadata: model.CatalogSource{Id: "catalog2", Name: "B Catalog", Enabled: &trueValue}, - }, - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "A Catalog", Enabled: &trueValue}, - }, - "catalog3": { - Metadata: model.CatalogSource{Id: "catalog3", Name: "C Catalog", Enabled: &trueValue}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog2": model.CatalogSource{Id: "catalog2", Name: "B Catalog", Enabled: &trueValue}, + "catalog1": model.CatalogSource{Id: "catalog1", Name: "A Catalog", Enabled: &trueValue}, + "catalog3": model.CatalogSource{Id: "catalog3", Name: "C Catalog", Enabled: &trueValue}, }, nameFilter: "", pageSize: "10", @@ -493,16 +483,10 @@ func TestFindSources(t *testing.T) { }, { name: "Sort by ID descending", - catalogs: map[string]catalog.CatalogSource{ - "catalog2": { - Metadata: model.CatalogSource{Id: "catalog2", Name: "B Catalog", Enabled: &trueValue}, - }, - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "A Catalog", Enabled: &trueValue}, - }, - "catalog3": { - Metadata: model.CatalogSource{Id: "catalog3", Name: "C Catalog", Enabled: &trueValue}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog2": model.CatalogSource{Id: "catalog2", Name: "B Catalog", Enabled: &trueValue}, + "catalog1": model.CatalogSource{Id: "catalog1", Name: "A Catalog", Enabled: &trueValue}, + "catalog3": model.CatalogSource{Id: "catalog3", Name: "C Catalog", Enabled: &trueValue}, }, nameFilter: "", pageSize: "10", @@ -515,16 +499,10 @@ func TestFindSources(t *testing.T) { }, { name: "Sort by name ascending", - catalogs: map[string]catalog.CatalogSource{ - "catalog2": { - Metadata: model.CatalogSource{Id: "catalog2", Name: "B Catalog", Enabled: &trueValue}, - }, - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "A Catalog", Enabled: &trueValue}, - }, - "catalog3": { - Metadata: model.CatalogSource{Id: "catalog3", Name: "C Catalog", Enabled: &trueValue}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog2": model.CatalogSource{Id: "catalog2", Name: "B Catalog", Enabled: &trueValue}, + "catalog1": model.CatalogSource{Id: "catalog1", Name: "A Catalog", Enabled: &trueValue}, + "catalog3": model.CatalogSource{Id: "catalog3", Name: "C Catalog", Enabled: &trueValue}, }, nameFilter: "", pageSize: "10", @@ -537,16 +515,10 @@ func TestFindSources(t *testing.T) { }, { name: "Sort by name descending", - catalogs: map[string]catalog.CatalogSource{ - "catalog2": { - Metadata: model.CatalogSource{Id: "catalog2", Name: "B Catalog", Enabled: &trueValue}, - }, - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "A Catalog", Enabled: &trueValue}, - }, - "catalog3": { - Metadata: model.CatalogSource{Id: "catalog3", Name: "C Catalog"}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog2": model.CatalogSource{Id: "catalog2", Name: "B Catalog", Enabled: &trueValue}, + "catalog1": model.CatalogSource{Id: "catalog1", Name: "A Catalog", Enabled: &trueValue}, + "catalog3": model.CatalogSource{Id: "catalog3", Name: "C Catalog"}, }, nameFilter: "", pageSize: "10", @@ -559,10 +531,8 @@ func TestFindSources(t *testing.T) { }, { name: "Invalid sort order", - catalogs: map[string]catalog.CatalogSource{ - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1"}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog1": model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1"}, }, nameFilter: "", pageSize: "10", @@ -572,10 +542,8 @@ func TestFindSources(t *testing.T) { }, { name: "Invalid order by field", - catalogs: map[string]catalog.CatalogSource{ - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1"}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog1": model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1"}, }, nameFilter: "", pageSize: "10", @@ -585,16 +553,10 @@ func TestFindSources(t *testing.T) { }, { name: "Default sort order (ID ascending)", - catalogs: map[string]catalog.CatalogSource{ - "catalog2": { - Metadata: model.CatalogSource{Id: "catalog2", Name: "B Catalog"}, - }, - "catalog1": { - Metadata: model.CatalogSource{Id: "catalog1", Name: "A Catalog"}, - }, - "catalog3": { - Metadata: model.CatalogSource{Id: "catalog3", Name: "C Catalog"}, - }, + catalogs: map[string]model.CatalogSource{ + "catalog2": model.CatalogSource{Id: "catalog2", Name: "B Catalog"}, + "catalog1": model.CatalogSource{Id: "catalog1", Name: "A Catalog"}, + "catalog3": model.CatalogSource{Id: "catalog3", Name: "C Catalog"}, }, nameFilter: "", pageSize: "10", @@ -605,13 +567,33 @@ func TestFindSources(t *testing.T) { expectedItems: 3, checkSorting: true, }, + { + name: "Labels should be returned if set", + catalogs: map[string]model.CatalogSource{ + "catalog1": model.CatalogSource{Id: "catalog1", Name: "Test Catalog 1", Labels: []string{"label1", "label2"}}, + "catalog2": model.CatalogSource{Id: "catalog2", Name: "Test Catalog 2", Labels: []string{"label3", "label4"}}, + "catalog3": model.CatalogSource{Id: "catalog3", Name: "Test Catalog 3", Labels: []string{"label5", "label6"}}, + }, + nameFilter: "", + pageSize: "10", + orderBy: model.ORDERBYFIELD_ID, + sortOrder: model.SORTORDER_ASC, + expectedStatus: http.StatusOK, + expectedSize: 3, + expectedItems: 3, + checkSorting: true, + expectedLabels: 6, + }, } // Run test cases for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { // Create service with test catalogs - service := NewModelCatalogServiceAPIService(catalog.NewSourceCollection(tc.catalogs)) + sources := catalog.NewSourceCollection() + sources.Merge("", tc.catalogs) + sourceLabels := catalog.NewLabelCollection() + service := NewModelCatalogServiceAPIService(&mockModelProvider{}, sources, sourceLabels) // Call FindSources resp, err := service.FindSources( @@ -688,6 +670,309 @@ func TestFindSources(t *testing.T) { } } } + + labels := make([]string, 0) + for _, item := range sourceList.Items { + labels = append(labels, item.Labels...) + } + assert.Equal(t, tc.expectedLabels, len(labels)) + }) + } +} + +func TestFindLabels(t *testing.T) { + testCases := []struct { + name string + labels []map[string]any + pageSize string + orderBy string + sortOrder model.SortOrder + nextPageToken string + expectedStatus int + expectedSize int32 + expectedItems int + checkSorting bool + checkOrderByKey string + expectNextToken bool + }{ + { + name: "Empty labels list", + labels: []map[string]any{}, + pageSize: "10", + orderBy: "", + sortOrder: model.SORTORDER_ASC, + expectedStatus: http.StatusOK, + expectedSize: 0, + expectedItems: 0, + expectNextToken: false, + }, + { + name: "Single label", + labels: []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + }, + pageSize: "10", + orderBy: "", + sortOrder: model.SORTORDER_ASC, + expectedStatus: http.StatusOK, + expectedSize: 1, + expectedItems: 1, + expectNextToken: false, + }, + { + name: "Multiple labels", + labels: []map[string]any{ + {"name": "labelNameOne", "displayName": "Label Name One"}, + {"name": "community", "displayName": "Community Models"}, + {"name": "enterprise", "displayName": "Enterprise"}, + }, + pageSize: "10", + orderBy: "", + sortOrder: model.SORTORDER_ASC, + expectedStatus: http.StatusOK, + expectedSize: 3, + expectedItems: 3, + expectNextToken: false, + }, + { + name: "Pagination - first page", + labels: []map[string]any{ + {"name": "label1", "displayName": "Label 1"}, + {"name": "label2", "displayName": "Label 2"}, + {"name": "label3", "displayName": "Label 3"}, + {"name": "label4", "displayName": "Label 4"}, + }, + pageSize: "2", + orderBy: "", + sortOrder: model.SORTORDER_ASC, + expectedStatus: http.StatusOK, + expectedSize: 2, + expectedItems: 2, + expectNextToken: true, + }, + { + name: "Pagination - last page", + labels: []map[string]any{ + {"name": "label1", "displayName": "Label 1"}, + {"name": "label2", "displayName": "Label 2"}, + {"name": "label3", "displayName": "Label 3"}, + }, + pageSize: "10", + orderBy: "", + sortOrder: model.SORTORDER_ASC, + expectedStatus: http.StatusOK, + expectedSize: 3, + expectedItems: 3, + expectNextToken: false, + }, + { + name: "Sort by name ascending", + labels: []map[string]any{ + {"name": "zebra", "displayName": "Zebra"}, + {"name": "alpha", "displayName": "Alpha"}, + {"name": "beta", "displayName": "Beta"}, + }, + pageSize: "10", + orderBy: "name", + sortOrder: model.SORTORDER_ASC, + expectedStatus: http.StatusOK, + expectedSize: 3, + expectedItems: 3, + checkSorting: true, + checkOrderByKey: "name", + expectNextToken: false, + }, + { + name: "Sort by name descending", + labels: []map[string]any{ + {"name": "alpha", "displayName": "Alpha"}, + {"name": "beta", "displayName": "Beta"}, + {"name": "zebra", "displayName": "Zebra"}, + }, + pageSize: "10", + orderBy: "name", + sortOrder: model.SORTORDER_DESC, + expectedStatus: http.StatusOK, + expectedSize: 3, + expectedItems: 3, + checkSorting: true, + checkOrderByKey: "name", + expectNextToken: false, + }, + { + name: "Sort by displayName", + labels: []map[string]any{ + {"name": "label1", "displayName": "Zebra Display"}, + {"name": "label2", "displayName": "Alpha Display"}, + {"name": "label3", "displayName": "Beta Display"}, + }, + pageSize: "10", + orderBy: "displayName", + sortOrder: model.SORTORDER_ASC, + expectedStatus: http.StatusOK, + expectedSize: 3, + expectedItems: 3, + checkSorting: true, + checkOrderByKey: "displayName", + expectNextToken: false, + }, + { + name: "Labels with missing sort key maintain order", + labels: []map[string]any{ + {"name": "has-priority", "priority": "high"}, + {"name": "no-priority-1"}, + {"name": "also-has-priority", "priority": "low"}, + {"name": "no-priority-2"}, + }, + pageSize: "10", + orderBy: "priority", + sortOrder: model.SORTORDER_ASC, + expectedStatus: http.StatusOK, + expectedSize: 4, + expectedItems: 4, + expectNextToken: false, + }, + { + name: "Default page size", + labels: []map[string]any{ + {"name": "label1"}, + {"name": "label2"}, + }, + pageSize: "", + orderBy: "", + sortOrder: "", + expectedStatus: http.StatusOK, + expectedSize: 2, + expectedItems: 2, + expectNextToken: false, + }, + { + name: "Invalid page size", + labels: []map[string]any{ + {"name": "label1"}, + }, + pageSize: "invalid", + orderBy: "", + sortOrder: "", + expectedStatus: http.StatusBadRequest, + }, + { + name: "Page size exactly matches items", + labels: []map[string]any{ + {"name": "label1"}, + {"name": "label2"}, + {"name": "label3"}, + }, + pageSize: "3", + orderBy: "", + sortOrder: "", + expectedStatus: http.StatusOK, + expectedSize: 3, + expectedItems: 3, + expectNextToken: false, + }, + } + + // Run test cases + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Create service with test labels + sources := catalog.NewSourceCollection() + labelCollection := catalog.NewLabelCollection() + labelCollection.Merge("test-source", tc.labels) + + service := NewModelCatalogServiceAPIService(&mockModelProvider{}, sources, labelCollection) + + // Call FindLabels + resp, err := service.FindLabels( + context.Background(), + tc.pageSize, + tc.orderBy, + tc.sortOrder, + tc.nextPageToken, + ) + + // Check response status + assert.Equal(t, tc.expectedStatus, resp.Code) + + // If we expect an error, we don't need to check the response body + if tc.expectedStatus != http.StatusOK { + assert.NotNil(t, err) + return + } + + // For successful responses, check the response body + require.NotNil(t, resp.Body) + + // Type assertion to access the CatalogLabelList + labelList, ok := resp.Body.(model.CatalogLabelList) + require.True(t, ok, "Response body should be a CatalogLabelList") + + // Check the size matches expected (should be current page size) + assert.Equal(t, tc.expectedSize, labelList.Size) + + // Check the number of items matches expected + assert.Equal(t, tc.expectedItems, len(labelList.Items)) + + // Check that page size is set correctly + if tc.pageSize == "" { + // Default page size should be 10 + assert.Equal(t, int32(10), labelList.PageSize) + } else if pageSizeInt, err := strconv.ParseInt(tc.pageSize, 10, 32); err == nil { + assert.Equal(t, int32(pageSizeInt), labelList.PageSize) + } + + // Check next page token + if tc.expectNextToken { + assert.NotEmpty(t, labelList.NextPageToken, "Should have next page token") + } else { + assert.Empty(t, labelList.NextPageToken, "Should not have next page token") + } + + // Check sorting if required + if tc.checkSorting && len(labelList.Items) > 1 && tc.checkOrderByKey != "" { + for i := 0; i < len(labelList.Items)-1; i++ { + // Get value from either Name field or AdditionalProperties + var val1, val2 *string + var ok1, ok2 bool + + if tc.checkOrderByKey == "name" { + val1 = labelList.Items[i].Name.Get() + val2 = labelList.Items[i+1].Name.Get() + ok1 = val1 != nil + ok2 = val2 != nil + } else { + var v1, v2 interface{} + v1, ok1 = labelList.Items[i].AdditionalProperties[tc.checkOrderByKey] + v2, ok2 = labelList.Items[i+1].AdditionalProperties[tc.checkOrderByKey] + if ok1 { + val1, _ = v1.(*string) + ok1 = val1 != nil + } + if ok2 { + val2, _ = v2.(*string) + ok2 = val2 != nil + } + } + + // Skip if either doesn't have the key + if !ok1 || !ok2 { + continue + } + + if tc.sortOrder == model.SORTORDER_DESC { + assert.GreaterOrEqual(t, + *val1, + *val2, + "Labels should be sorted by %s in descending order", tc.checkOrderByKey) + } else { + assert.LessOrEqual(t, + *val1, + *val2, + "Labels should be sorted by %s in ascending order", tc.checkOrderByKey) + } + } + } }) } } @@ -695,11 +980,11 @@ func TestFindSources(t *testing.T) { // Define a mock model provider type mockModelProvider struct { models map[string]*model.CatalogModel - artifacts map[string][]model.CatalogModelArtifact + artifacts map[string][]model.CatalogArtifact } // Implement GetModel method for the mock provider -func (m *mockModelProvider) GetModel(ctx context.Context, name string) (*model.CatalogModel, error) { +func (m *mockModelProvider) GetModel(ctx context.Context, name string, sourceID string) (*model.CatalogModel, error) { model, exists := m.models[name] if !exists { return nil, nil @@ -741,30 +1026,49 @@ func (m *mockModelProvider) ListModels(ctx context.Context, params catalog.ListM return cmp < 0 }) - items := make([]model.CatalogModel, len(filteredModels)) - for i, mdl := range filteredModels { + totalSize := int32(len(filteredModels)) + pageSize := params.PageSize + if pageSize <= 0 { + pageSize = 10 + } + + // Apply pagination - limit items to page size + endIndex := int(pageSize) + if endIndex > len(filteredModels) { + endIndex = len(filteredModels) + } + + pagedModels := filteredModels[:endIndex] + items := make([]model.CatalogModel, len(pagedModels)) + for i, mdl := range pagedModels { items[i] = *mdl } + nextPageToken := "" + if len(filteredModels) > int(pageSize) { + lastItem := pagedModels[len(pagedModels)-1] + nextPageToken = (&stringCursor{Value: lastItem.Name, ID: lastItem.Name}).String() + } + return model.CatalogModelList{ Items: items, - Size: int32(len(items)), - PageSize: int32(len(items)), // Mock returns all filtered items as one "page" - NextPageToken: "", + Size: totalSize, + PageSize: pageSize, + NextPageToken: nextPageToken, }, nil } -func (m *mockModelProvider) GetArtifacts(ctx context.Context, name string) (*model.CatalogModelArtifactList, error) { +func (m *mockModelProvider) GetArtifacts(ctx context.Context, name string, sourceID string, params catalog.ListArtifactsParams) (model.CatalogArtifactList, error) { artifacts, exists := m.artifacts[name] if !exists { - return &model.CatalogModelArtifactList{ - Items: []model.CatalogModelArtifact{}, + return model.CatalogArtifactList{ + Items: []model.CatalogArtifact{}, Size: 0, PageSize: 0, // Or a default page size if applicable NextPageToken: "", }, nil } - return &model.CatalogModelArtifactList{ + return model.CatalogArtifactList{ Items: artifacts, Size: int32(len(artifacts)), PageSize: int32(len(artifacts)), @@ -772,26 +1076,30 @@ func (m *mockModelProvider) GetArtifacts(ctx context.Context, name string) (*mod }, nil } +func (m *mockModelProvider) GetFilterOptions(ctx context.Context) (*model.FilterOptionsList, error) { + emptyFilters := make(map[string]model.FilterOption) + return &model.FilterOptionsList{Filters: &emptyFilters}, nil +} + func TestGetModel(t *testing.T) { testCases := []struct { name string - sources map[string]catalog.CatalogSource + sources map[string]model.CatalogSource sourceID string modelName string expectedStatus int expectedModel *model.CatalogModel + provider catalog.APIProvider }{ { name: "Existing model in source", - sources: map[string]catalog.CatalogSource{ - "source1": { - Metadata: model.CatalogSource{Id: "source1", Name: "Test Source"}, - Provider: &mockModelProvider{ - models: map[string]*model.CatalogModel{ - "test-model": { - Name: "test-model", - }, - }, + sources: map[string]model.CatalogSource{ + "source1": model.CatalogSource{Id: "source1", Name: "Test Source"}, + }, + provider: &mockModelProvider{ + models: map[string]*model.CatalogModel{ + "test-model": { + Name: "test-model", }, }, }, @@ -804,10 +1112,11 @@ func TestGetModel(t *testing.T) { }, { name: "Non-existing source", - sources: map[string]catalog.CatalogSource{ - "source1": { - Metadata: model.CatalogSource{Id: "source1", Name: "Test Source"}, - }, + sources: map[string]model.CatalogSource{ + "source1": model.CatalogSource{Id: "source1", Name: "Test Source"}, + }, + provider: &mockModelProvider{ + models: map[string]*model.CatalogModel{}, }, sourceID: "source2", modelName: "test-model", @@ -816,13 +1125,11 @@ func TestGetModel(t *testing.T) { }, { name: "Existing source, non-existing model", - sources: map[string]catalog.CatalogSource{ - "source1": { - Metadata: model.CatalogSource{Id: "source1", Name: "Test Source"}, - Provider: &mockModelProvider{ - models: map[string]*model.CatalogModel{}, - }, - }, + sources: map[string]model.CatalogSource{ + "source1": model.CatalogSource{Id: "source1", Name: "Test Source"}, + }, + provider: &mockModelProvider{ + models: map[string]*model.CatalogModel{}, }, sourceID: "source1", modelName: "test-model", @@ -831,15 +1138,13 @@ func TestGetModel(t *testing.T) { }, { name: "Model name with an escaped slash and version", - sources: map[string]catalog.CatalogSource{ - "source1": { - Metadata: model.CatalogSource{Id: "source1", Name: "Test Source"}, - Provider: &mockModelProvider{ - models: map[string]*model.CatalogModel{ - "some/model:v1.0.0": { - Name: "some/model:v1.0.0", - }, - }, + sources: map[string]model.CatalogSource{ + "source1": model.CatalogSource{Id: "source1", Name: "Test Source"}, + }, + provider: &mockModelProvider{ + models: map[string]*model.CatalogModel{ + "some/model:v1.0.0": { + Name: "some/model:v1.0.0", }, }, }, @@ -855,7 +1160,10 @@ func TestGetModel(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { // Create service with test sources - service := NewModelCatalogServiceAPIService(catalog.NewSourceCollection(tc.sources)) + sources := catalog.NewSourceCollection() + sources.Merge("", tc.sources) + sourceLabels := catalog.NewLabelCollection() + service := NewModelCatalogServiceAPIService(tc.provider, sources, sourceLabels) // Call GetModel resp, _ := service.GetModel( @@ -888,26 +1196,29 @@ func TestGetModel(t *testing.T) { func TestGetAllModelArtifacts(t *testing.T) { testCases := []struct { name string - sources map[string]catalog.CatalogSource + sources map[string]model.CatalogSource sourceID string modelName string expectedStatus int - expectedArtifacts []model.CatalogModelArtifact + expectedArtifacts []model.CatalogArtifact + provider catalog.APIProvider }{ { name: "Existing artifacts for model in source", - sources: map[string]catalog.CatalogSource{ - "source1": { - Metadata: model.CatalogSource{Id: "source1", Name: "Test Source"}, - Provider: &mockModelProvider{ - artifacts: map[string][]model.CatalogModelArtifact{ - "test-model": { - { - Uri: "s3://bucket/artifact1", - }, - { - Uri: "s3://bucket/artifact2", - }, + sources: map[string]model.CatalogSource{ + "source1": model.CatalogSource{Id: "source1", Name: "Test Source"}, + }, + provider: &mockModelProvider{ + artifacts: map[string][]model.CatalogArtifact{ + "test-model": { + { + CatalogModelArtifact: &model.CatalogModelArtifact{ + Uri: "s3://bucket/artifact1", + }, + }, + { + CatalogModelArtifact: &model.CatalogModelArtifact{ + Uri: "s3://bucket/artifact2", }, }, }, @@ -916,54 +1227,67 @@ func TestGetAllModelArtifacts(t *testing.T) { sourceID: "source1", modelName: "test-model", expectedStatus: http.StatusOK, - expectedArtifacts: []model.CatalogModelArtifact{ + expectedArtifacts: []model.CatalogArtifact{ { - Uri: "s3://bucket/artifact1", + CatalogModelArtifact: &model.CatalogModelArtifact{ + Uri: "s3://bucket/artifact1", + }, }, { - Uri: "s3://bucket/artifact2", + CatalogModelArtifact: &model.CatalogModelArtifact{ + Uri: "s3://bucket/artifact2", + }, }, }, }, { name: "Non-existing source", - sources: map[string]catalog.CatalogSource{ - "source1": { - Metadata: model.CatalogSource{Id: "source1", Name: "Test Source"}, - }, + sources: map[string]model.CatalogSource{ + "source1": model.CatalogSource{Id: "source1", Name: "Test Source"}, + }, + provider: &mockModelProvider{ + artifacts: map[string][]model.CatalogArtifact{}, }, sourceID: "source2", modelName: "test-model", - expectedStatus: http.StatusNotFound, - expectedArtifacts: nil, + expectedStatus: http.StatusOK, // Changed from http.StatusNotFound to http.StatusOK -- having the same behavior as the model registry + expectedArtifacts: []model.CatalogArtifact{}, }, { name: "Existing source, no artifacts for model", - sources: map[string]catalog.CatalogSource{ - "source1": { - Metadata: model.CatalogSource{Id: "source1", Name: "Test Source"}, - Provider: &mockModelProvider{ - artifacts: map[string][]model.CatalogModelArtifact{}, - }, - }, + sources: map[string]model.CatalogSource{ + "source1": model.CatalogSource{Id: "source1", Name: "Test Source"}, + }, + provider: &mockModelProvider{ + artifacts: map[string][]model.CatalogArtifact{}, }, sourceID: "source1", modelName: "test-model", expectedStatus: http.StatusOK, - expectedArtifacts: []model.CatalogModelArtifact{}, // Should be an empty slice, not nil + expectedArtifacts: []model.CatalogArtifact{}, // Should be an empty slice, not nil }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { // Create service with test sources - service := NewModelCatalogServiceAPIService(catalog.NewSourceCollection(tc.sources)) + sources := catalog.NewSourceCollection() + sources.Merge("", tc.sources) + sourceLabels := catalog.NewLabelCollection() + service := NewModelCatalogServiceAPIService(tc.provider, sources, sourceLabels) // Call GetAllModelArtifacts resp, _ := service.GetAllModelArtifacts( context.Background(), tc.sourceID, tc.modelName, + []model.ArtifactTypeQueryParam{}, + []model.ArtifactTypeQueryParam{}, + "", + "10", + string(model.ORDERBYFIELD_CREATE_TIME), + model.SORTORDER_ASC, + "", ) // Check response status @@ -978,8 +1302,8 @@ func TestGetAllModelArtifacts(t *testing.T) { require.NotNil(t, resp.Body) // Type assertion to access the list of artifacts - artifactList, ok := resp.Body.(*model.CatalogModelArtifactList) - require.True(t, ok, "Response body should be a CatalogModelArtifactList") + artifactList, ok := resp.Body.(model.CatalogArtifactList) + require.True(t, ok, "Response body should be a CatalogArtifactList") // Check the artifacts assert.Equal(t, tc.expectedArtifacts, artifactList.Items) @@ -987,3 +1311,45 @@ func TestGetAllModelArtifacts(t *testing.T) { }) } } + +func TestFindModelsFilterOptions(t *testing.T) { + testCases := []struct { + name string + provider catalog.APIProvider + expectedStatus int + expectedError bool + }{ + { + name: "Successfully retrieve filter options", + provider: &mockModelProvider{ + models: map[string]*model.CatalogModel{}, + }, + expectedStatus: http.StatusOK, + expectedError: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + sources := catalog.NewSourceCollection() + sourceLabels := catalog.NewLabelCollection() + service := NewModelCatalogServiceAPIService(tc.provider, sources, sourceLabels) + + resp, err := service.FindModelsFilterOptions(context.Background()) + + assert.Equal(t, tc.expectedStatus, resp.Code) + + if tc.expectedError { + assert.Error(t, err) + return + } + require.NotNil(t, resp.Body) + + // Type assertion to access the FilterOptionsList + filterOptions, ok := resp.Body.(*model.FilterOptionsList) + require.True(t, ok, "Response body should be a FilterOptionsList") + + require.NotNil(t, filterOptions.Filters) + }) + } +} diff --git a/catalog/internal/server/openapi/error.go b/catalog/internal/server/openapi/error.go index e7cfcc12cb..628fe717e9 100644 --- a/catalog/internal/server/openapi/error.go +++ b/catalog/internal/server/openapi/error.go @@ -1,3 +1,5 @@ +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + /* * Model Catalog REST API * @@ -6,8 +8,6 @@ * API version: v1alpha1 */ -// Generated by: OpenAPI Generator (https://openapi-generator.tech). DO NOT EDIT. - package openapi import ( @@ -21,7 +21,8 @@ var ErrTypeAssertionError = errors.New("unable to assert type") // ParsingError indicates that an error has occurred when parsing request parameters type ParsingError struct { - Err error + Param string + Err error } func (e *ParsingError) Unwrap() error { @@ -29,7 +30,11 @@ func (e *ParsingError) Unwrap() error { } func (e *ParsingError) Error() string { - return e.Err.Error() + if e.Param == "" { + return e.Err.Error() + } + + return e.Param + ": " + e.Err.Error() } // RequiredError indicates that an error has occurred when parsing request parameters @@ -47,15 +52,21 @@ type ErrorHandler func(w http.ResponseWriter, r *http.Request, err error, result // DefaultErrorHandler defines the default logic on how to handle errors from the controller. Any errors from parsing // request params will return a StatusBadRequest. Otherwise, the error code originating from the servicer will be used. -func DefaultErrorHandler(w http.ResponseWriter, r *http.Request, err error, result *ImplResponse) { - if _, ok := err.(*ParsingError); ok { +func DefaultErrorHandler(w http.ResponseWriter, _ *http.Request, err error, result *ImplResponse) { + var parsingErr *ParsingError + if ok := errors.As(err, &parsingErr); ok { // Handle parsing errors - EncodeJSONResponse(ErrorResponse(http.StatusBadRequest, err).Body, func(i int) *int { return &i }(http.StatusBadRequest), w) - } else if _, ok := err.(*RequiredError); ok { + _ = EncodeJSONResponse(ErrorResponse(http.StatusBadRequest, err).Body, func(i int) *int { return &i }(http.StatusBadRequest), w) + return + } + + var requiredErr *RequiredError + if ok := errors.As(err, &requiredErr); ok { // Handle missing required errors - EncodeJSONResponse(ErrorResponse(http.StatusBadRequest, err).Body, func(i int) *int { return &i }(http.StatusUnprocessableEntity), w) - } else { - // Handle all other errors - EncodeJSONResponse(result.Body, &result.Code, w) + _ = EncodeJSONResponse(ErrorResponse(http.StatusBadRequest, err).Body, func(i int) *int { return &i }(http.StatusUnprocessableEntity), w) + return } + + // Handle all other errors + _ = EncodeJSONResponse(result.Body, &result.Code, w) } diff --git a/catalog/internal/server/openapi/helpers.go b/catalog/internal/server/openapi/helpers.go index b6bff9d4ad..39d5ee9c23 100644 --- a/catalog/internal/server/openapi/helpers.go +++ b/catalog/internal/server/openapi/helpers.go @@ -1,3 +1,5 @@ +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + /* * Model Catalog REST API * @@ -6,17 +8,28 @@ * API version: v1alpha1 */ -// Generated by: OpenAPI Generator (https://openapi-generator.tech). DO NOT EDIT. - package openapi import ( + "encoding/json" + "errors" + "io" + "mime/multipart" "net/http" + "net/url" + "os" "reflect" + "strconv" + "strings" + "time" model "github.com/kubeflow/model-registry/pkg/openapi" ) +const errMsgRequiredMissing = "required parameter is missing" +const errMsgMinValueConstraint = "provided parameter is not respecting minimum value constraint" +const errMsgMaxValueConstraint = "provided parameter is not respecting maximum value constraint" + // Response return a ImplResponse struct filled func Response(code int, body interface{}) ImplResponse { return ImplResponse{ @@ -64,7 +77,7 @@ func AssertRecurseValueRequired[T any](value reflect.Value, callback func(T) err // If it is a slice we continue recursion case reflect.Slice: - for i := 0; i < value.Len(); i += 1 { + for i := 0; i < value.Len(); i++ { if err := AssertRecurseValueRequired(value.Index(i), callback); err != nil { return err } @@ -72,3 +85,284 @@ func AssertRecurseValueRequired[T any](value reflect.Value, callback func(T) err } return nil } + +// EncodeJSONResponse uses the json encoder to write an interface to the http response with an optional status code +func EncodeJSONResponse(i interface{}, status *int, w http.ResponseWriter) error { + wHeader := w.Header() + + f, ok := i.(*os.File) + if ok { + data, err := io.ReadAll(f) + if err != nil { + return err + } + wHeader.Set("Content-Type", http.DetectContentType(data)) + wHeader.Set("Content-Disposition", "attachment; filename="+f.Name()) + if status != nil { + w.WriteHeader(*status) + } else { + w.WriteHeader(http.StatusOK) + } + _, err = w.Write(data) + return err + } + wHeader.Set("Content-Type", "application/json; charset=UTF-8") + + if status != nil { + w.WriteHeader(*status) + } else { + w.WriteHeader(http.StatusOK) + } + + if i != nil { + return json.NewEncoder(w).Encode(i) + } + + return nil +} + +// ReadFormFileToTempFile reads file data from a request form and writes it to a temporary file +func ReadFormFileToTempFile(r *http.Request, key string) (*os.File, error) { + _, fileHeader, err := r.FormFile(key) + if err != nil { + return nil, err + } + + return readFileHeaderToTempFile(fileHeader) +} + +// ReadFormFilesToTempFiles reads files array data from a request form and writes it to a temporary files +func ReadFormFilesToTempFiles(r *http.Request, key string) ([]*os.File, error) { + if err := r.ParseMultipartForm(32 << 20); err != nil { + return nil, err + } + + files := make([]*os.File, 0, len(r.MultipartForm.File[key])) + + for _, fileHeader := range r.MultipartForm.File[key] { + file, err := readFileHeaderToTempFile(fileHeader) + if err != nil { + return nil, err + } + + files = append(files, file) + } + + return files, nil +} + +// readFileHeaderToTempFile reads multipart.FileHeader and writes it to a temporary file +func readFileHeaderToTempFile(fileHeader *multipart.FileHeader) (*os.File, error) { + formFile, err := fileHeader.Open() + if err != nil { + return nil, err + } + + defer formFile.Close() + + // Use .* as suffix, because the asterisk is a placeholder for the random value, + // and the period allows consumers of this file to remove the suffix to obtain the original file name + file, err := os.CreateTemp("", fileHeader.Filename+".*") + if err != nil { + return nil, err + } + + defer file.Close() + + _, err = io.Copy(file, formFile) + if err != nil { + return nil, err + } + + return file, nil +} + +func parseTimes(param string) ([]time.Time, error) { + splits := strings.Split(param, ",") + times := make([]time.Time, 0, len(splits)) + for _, v := range splits { + t, err := parseTime(v) + if err != nil { + return nil, err + } + times = append(times, t) + } + return times, nil +} + +// parseTime will parses a string parameter into a time.Time using the RFC3339 format +func parseTime(param string) (time.Time, error) { + if param == "" { + return time.Time{}, nil + } + return time.Parse(time.RFC3339, param) +} + +type Number interface { + ~int32 | ~int64 | ~float32 | ~float64 +} + +type ParseString[T Number | string | bool] func(v string) (T, error) + +// parseFloat64 parses a string parameter to an float64. +func parseFloat64(param string) (float64, error) { + if param == "" { + return 0, nil + } + + return strconv.ParseFloat(param, 64) +} + +// parseFloat32 parses a string parameter to an float32. +func parseFloat32(param string) (float32, error) { + if param == "" { + return 0, nil + } + + v, err := strconv.ParseFloat(param, 32) + return float32(v), err +} + +// parseInt64 parses a string parameter to an int64. +func parseInt64(param string) (int64, error) { + if param == "" { + return 0, nil + } + + return strconv.ParseInt(param, 10, 64) +} + +// parseInt32 parses a string parameter to an int32. +func parseInt32(param string) (int32, error) { + if param == "" { + return 0, nil + } + + val, err := strconv.ParseInt(param, 10, 32) + return int32(val), err +} + +// parseBool parses a string parameter to an bool. +func parseBool(param string) (bool, error) { + if param == "" { + return false, nil + } + + return strconv.ParseBool(param) +} + +type Operation[T Number | string | bool] func(actual string) (T, bool, error) + +func WithRequire[T Number | string | bool](parse ParseString[T]) Operation[T] { + var empty T + return func(actual string) (T, bool, error) { + if actual == "" { + return empty, false, errors.New(errMsgRequiredMissing) + } + + v, err := parse(actual) + return v, false, err + } +} + +func WithDefaultOrParse[T Number | string | bool](def T, parse ParseString[T]) Operation[T] { + return func(actual string) (T, bool, error) { + if actual == "" { + return def, true, nil + } + + v, err := parse(actual) + return v, false, err + } +} + +func WithParse[T Number | string | bool](parse ParseString[T]) Operation[T] { + return func(actual string) (T, bool, error) { + v, err := parse(actual) + return v, false, err + } +} + +type Constraint[T Number | string | bool] func(actual T) error + +func WithMinimum[T Number](expected T) Constraint[T] { + return func(actual T) error { + if actual < expected { + return errors.New(errMsgMinValueConstraint) + } + + return nil + } +} + +func WithMaximum[T Number](expected T) Constraint[T] { + return func(actual T) error { + if actual > expected { + return errors.New(errMsgMaxValueConstraint) + } + + return nil + } +} + +// parseNumericParameter parses a numeric parameter to its respective type. +func parseNumericParameter[T Number](param string, fn Operation[T], checks ...Constraint[T]) (T, error) { + v, ok, err := fn(param) + if err != nil { + return 0, err + } + + if !ok { + for _, check := range checks { + if err := check(v); err != nil { + return 0, err + } + } + } + + return v, nil +} + +// parseBoolParameter parses a string parameter to a bool +func parseBoolParameter(param string, fn Operation[bool]) (bool, error) { + v, _, err := fn(param) + return v, err +} + +// parseNumericArrayParameter parses a string parameter containing array of values to its respective type. +func parseNumericArrayParameter[T Number](param, delim string, required bool, fn Operation[T], checks ...Constraint[T]) ([]T, error) { + if param == "" { + if required { + return nil, errors.New(errMsgRequiredMissing) + } + + return nil, nil + } + + str := strings.Split(param, delim) + values := make([]T, len(str)) + + for i, s := range str { + v, ok, err := fn(s) + if err != nil { + return nil, err + } + + if !ok { + for _, check := range checks { + if err := check(v); err != nil { + return nil, err + } + } + } + + values[i] = v + } + + return values, nil +} + +// parseQuery parses query parameters and returns an error if any malformed value pairs are encountered. +func parseQuery(rawQuery string) (url.Values, error) { + return url.ParseQuery(rawQuery) +} diff --git a/catalog/internal/server/openapi/impl.go b/catalog/internal/server/openapi/impl.go index 23096a314a..36a7634744 100644 --- a/catalog/internal/server/openapi/impl.go +++ b/catalog/internal/server/openapi/impl.go @@ -1,3 +1,5 @@ +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + /* * Model Catalog REST API * @@ -6,8 +8,6 @@ * API version: v1alpha1 */ -// Generated by: OpenAPI Generator (https://openapi-generator.tech). DO NOT EDIT. - package openapi // ImplResponse defines an implementation response with error code and the associated body diff --git a/catalog/internal/server/openapi/logger.go b/catalog/internal/server/openapi/logger.go index e8b38e6977..fb3ebe69d0 100644 --- a/catalog/internal/server/openapi/logger.go +++ b/catalog/internal/server/openapi/logger.go @@ -1,3 +1,5 @@ +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + /* * Model Catalog REST API * @@ -6,13 +8,12 @@ * API version: v1alpha1 */ -// Generated by: OpenAPI Generator (https://openapi-generator.tech). DO NOT EDIT. - package openapi import ( - "github.com/go-chi/chi/v5/middleware" "net/http" + + "github.com/go-chi/chi/v5/middleware" ) func Logger(inner http.Handler) http.Handler { diff --git a/catalog/internal/server/openapi/routers.go b/catalog/internal/server/openapi/routers.go index f2c4ceec33..8d5a657775 100644 --- a/catalog/internal/server/openapi/routers.go +++ b/catalog/internal/server/openapi/routers.go @@ -1,3 +1,5 @@ +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + /* * Model Catalog REST API * @@ -6,30 +8,20 @@ * API version: v1alpha1 */ -// Generated by: OpenAPI Generator (https://openapi-generator.tech). DO NOT EDIT. - package openapi import ( - "encoding/json" - "errors" - "io" - "mime/multipart" "net/http" - "os" - "strconv" - "strings" "github.com/go-chi/chi/v5" - "github.com/go-chi/chi/v5/middleware" "github.com/go-chi/cors" - "github.com/golang/glog" ) //lint:file-ignore U1000 Ignore all unused code, it's generated // A Route defines the parameters for an api endpoint type Route struct { + Name string Method string Pattern string HandlerFunc http.HandlerFunc @@ -41,16 +33,13 @@ type Routes map[string]Route // Router defines the required methods for retrieving api routes type Router interface { Routes() Routes + OrderedRoutes() []Route } -const errMsgRequiredMissing = "required parameter is missing" -const errMsgMinValueConstraint = "provided parameter is not respecting minimum value constraint" -const errMsgMaxValueConstraint = "provided parameter is not respecting maximum value constraint" - // NewRouter creates a new router for any number of api routers func NewRouter(routers ...Router) chi.Router { router := chi.NewRouter() - router.Use(middleware.Logger) + router.Use(Logger) router.Use(cors.Handler(cors.Options{ AllowedOrigins: []string{"https://*", "http://*"}, AllowedMethods: []string{"GET", "POST", "PUT", "DELETE", "OPTIONS"}, @@ -60,251 +49,11 @@ func NewRouter(routers ...Router) chi.Router { MaxAge: 300, // Maximum value not ignored by any of major browsers })) for _, api := range routers { - for _, route := range api.Routes() { - handler := route.HandlerFunc + for _, route := range api.OrderedRoutes() { + var handler http.Handler = route.HandlerFunc router.Method(route.Method, route.Pattern, handler) } } return router } - -// EncodeJSONResponse uses the json encoder to write an interface to the http response with an optional status code -func EncodeJSONResponse(i interface{}, status *int, w http.ResponseWriter) { - w.Header().Set("Content-Type", "application/json; charset=UTF-8") - if status != nil { - w.WriteHeader(*status) - } else { - w.WriteHeader(http.StatusOK) - } - - if i != nil { - if err := json.NewEncoder(w).Encode(i); err != nil { - // FIXME: is it too late to inform the client of an error at this point?? - glog.Errorf("error encoding JSON response: %v", err) - } - } -} - -// ReadFormFileToTempFile reads file data from a request form and writes it to a temporary file -func ReadFormFileToTempFile(r *http.Request, key string) (*os.File, error) { - _, fileHeader, err := r.FormFile(key) - if err != nil { - return nil, err - } - - return readFileHeaderToTempFile(fileHeader) -} - -// ReadFormFilesToTempFiles reads files array data from a request form and writes it to a temporary files -func ReadFormFilesToTempFiles(r *http.Request, key string) ([]*os.File, error) { - if err := r.ParseMultipartForm(32 << 20); err != nil { - return nil, err - } - - files := make([]*os.File, 0, len(r.MultipartForm.File[key])) - - for _, fileHeader := range r.MultipartForm.File[key] { - file, err := readFileHeaderToTempFile(fileHeader) - if err != nil { - return nil, err - } - - files = append(files, file) - } - - return files, nil -} - -// readFileHeaderToTempFile reads multipart.FileHeader and writes it to a temporary file -func readFileHeaderToTempFile(fileHeader *multipart.FileHeader) (*os.File, error) { - formFile, err := fileHeader.Open() - if err != nil { - return nil, err - } - - //nolint:errcheck - defer formFile.Close() - - fileBytes, err := io.ReadAll(formFile) - if err != nil { - return nil, err - } - - file, err := os.CreateTemp("", fileHeader.Filename) - if err != nil { - return nil, err - } - - //nolint:errcheck - defer file.Close() - - // FIXME: return values are ignored!!! - _, _ = file.Write(fileBytes) - - return file, nil -} - -type Number interface { - ~int32 | ~int64 | ~float32 | ~float64 -} - -type ParseString[T Number | string | bool] func(v string) (T, error) - -// parseFloat64 parses a string parameter to an float64. -func parseFloat64(param string) (float64, error) { - if param == "" { - return 0, nil - } - - return strconv.ParseFloat(param, 64) -} - -// parseFloat32 parses a string parameter to an float32. -func parseFloat32(param string) (float32, error) { - if param == "" { - return 0, nil - } - - v, err := strconv.ParseFloat(param, 32) - return float32(v), err -} - -// parseInt64 parses a string parameter to an int64. -func parseInt64(param string) (int64, error) { - if param == "" { - return 0, nil - } - - return strconv.ParseInt(param, 10, 64) -} - -// parseInt32 parses a string parameter to an int32. -func parseInt32(param string) (int32, error) { - if param == "" { - return 0, nil - } - - val, err := strconv.ParseInt(param, 10, 32) - return int32(val), err -} - -// parseBool parses a string parameter to an bool. -func parseBool(param string) (bool, error) { - if param == "" { - return false, nil - } - - return strconv.ParseBool(param) -} - -type Operation[T Number | string | bool] func(actual string) (T, bool, error) - -func WithRequire[T Number | string | bool](parse ParseString[T]) Operation[T] { - var empty T - return func(actual string) (T, bool, error) { - if actual == "" { - return empty, false, errors.New(errMsgRequiredMissing) - } - - v, err := parse(actual) - return v, false, err - } -} - -func WithDefaultOrParse[T Number | string | bool](def T, parse ParseString[T]) Operation[T] { - return func(actual string) (T, bool, error) { - if actual == "" { - return def, true, nil - } - - v, err := parse(actual) - return v, false, err - } -} - -func WithParse[T Number | string | bool](parse ParseString[T]) Operation[T] { - return func(actual string) (T, bool, error) { - v, err := parse(actual) - return v, false, err - } -} - -type Constraint[T Number | string | bool] func(actual T) error - -func WithMinimum[T Number](expected T) Constraint[T] { - return func(actual T) error { - if actual < expected { - return errors.New(errMsgMinValueConstraint) - } - - return nil - } -} - -func WithMaximum[T Number](expected T) Constraint[T] { - return func(actual T) error { - if actual > expected { - return errors.New(errMsgMaxValueConstraint) - } - - return nil - } -} - -// parseNumericParameter parses a numeric parameter to its respective type. -func parseNumericParameter[T Number](param string, fn Operation[T], checks ...Constraint[T]) (T, error) { - v, ok, err := fn(param) - if err != nil { - return 0, err - } - - if !ok { - for _, check := range checks { - if err := check(v); err != nil { - return 0, err - } - } - } - - return v, nil -} - -// parseBoolParameter parses a string parameter to a bool -func parseBoolParameter(param string, fn Operation[bool]) (bool, error) { - v, _, err := fn(param) - return v, err -} - -// parseNumericArrayParameter parses a string parameter containing array of values to its respective type. -func parseNumericArrayParameter[T Number](param, delim string, required bool, fn Operation[T], checks ...Constraint[T]) ([]T, error) { - if param == "" { - if required { - return nil, errors.New(errMsgRequiredMissing) - } - - return nil, nil - } - - str := strings.Split(param, delim) - values := make([]T, len(str)) - - for i, s := range str { - v, ok, err := fn(s) - if err != nil { - return nil, err - } - - if !ok { - for _, check := range checks { - if err := check(v); err != nil { - return nil, err - } - } - } - - values[i] = v - } - - return values, nil -} diff --git a/catalog/internal/server/openapi/type_asserts.go b/catalog/internal/server/openapi/type_asserts.go index 6bed459987..6b866aae0b 100644 --- a/catalog/internal/server/openapi/type_asserts.go +++ b/catalog/internal/server/openapi/type_asserts.go @@ -36,6 +36,11 @@ func AssertBaseModelRequired(obj model.BaseModel) error { return nil } +// AssertBaseResourceConstraints checks if the values respects the defined constraints +func AssertBaseResourceConstraints(obj model.BaseResource) error { + return nil +} + // AssertBaseResourceDatesConstraints checks if the values respects the defined constraints func AssertBaseResourceDatesConstraints(obj model.BaseResourceDates) error { return nil @@ -67,18 +72,23 @@ func AssertBaseResourceListRequired(obj model.BaseResourceList) error { return nil } -// AssertCatalogModelArtifactConstraints checks if the values respects the defined constraints -func AssertCatalogModelArtifactConstraints(obj model.CatalogModelArtifact) error { +// AssertBaseResourceRequired checks if the required fields are not zero-ed +func AssertBaseResourceRequired(obj model.BaseResource) error { return nil } -// AssertCatalogModelArtifactListConstraints checks if the values respects the defined constraints -func AssertCatalogModelArtifactListConstraints(obj model.CatalogModelArtifactList) error { +// AssertCatalogArtifactListConstraints checks if the values respects the defined constraints +func AssertCatalogArtifactListConstraints(obj model.CatalogArtifactList) error { + for _, el := range obj.Items { + if err := AssertCatalogArtifactConstraints(el); err != nil { + return err + } + } return nil } -// AssertCatalogModelArtifactListRequired checks if the required fields are not zero-ed -func AssertCatalogModelArtifactListRequired(obj model.CatalogModelArtifactList) error { +// AssertCatalogArtifactListRequired checks if the required fields are not zero-ed +func AssertCatalogArtifactListRequired(obj model.CatalogArtifactList) error { elements := map[string]interface{}{ "nextPageToken": obj.NextPageToken, "pageSize": obj.PageSize, @@ -92,17 +102,84 @@ func AssertCatalogModelArtifactListRequired(obj model.CatalogModelArtifactList) } for _, el := range obj.Items { - if err := AssertCatalogModelArtifactRequired(el); err != nil { + if err := AssertCatalogArtifactRequired(el); err != nil { return err } } return nil } +// AssertCatalogLabelConstraints checks if the values respects the defined constraints +func AssertCatalogLabelConstraints(obj model.CatalogLabel) error { + return nil +} + +// AssertCatalogLabelListConstraints checks if the values respects the defined constraints +func AssertCatalogLabelListConstraints(obj model.CatalogLabelList) error { + return nil +} + +// AssertCatalogLabelListRequired checks if the required fields are not zero-ed +func AssertCatalogLabelListRequired(obj model.CatalogLabelList) error { + elements := map[string]interface{}{ + "nextPageToken": obj.NextPageToken, + "pageSize": obj.PageSize, + "size": obj.Size, + "items": obj.Items, + } + for name, el := range elements { + if isZero := IsZeroValue(el); isZero { + return &RequiredError{Field: name} + } + } + + return nil +} + +// AssertCatalogLabelRequired checks if the required fields are not zero-ed +func AssertCatalogLabelRequired(obj model.CatalogLabel) error { + elements := map[string]interface{}{ + "name": obj.Name, + } + for name, el := range elements { + if isZero := IsZeroValue(el); isZero { + return &RequiredError{Field: name} + } + } + + return nil +} + +// AssertCatalogMetricsArtifactConstraints checks if the values respects the defined constraints +func AssertCatalogMetricsArtifactConstraints(obj model.CatalogMetricsArtifact) error { + return nil +} + +// AssertCatalogMetricsArtifactRequired checks if the required fields are not zero-ed +func AssertCatalogMetricsArtifactRequired(obj model.CatalogMetricsArtifact) error { + elements := map[string]interface{}{ + "artifactType": obj.ArtifactType, + "metricsType": obj.MetricsType, + } + for name, el := range elements { + if isZero := IsZeroValue(el); isZero { + return &RequiredError{Field: name} + } + } + + return nil +} + +// AssertCatalogModelArtifactConstraints checks if the values respects the defined constraints +func AssertCatalogModelArtifactConstraints(obj model.CatalogModelArtifact) error { + return nil +} + // AssertCatalogModelArtifactRequired checks if the required fields are not zero-ed func AssertCatalogModelArtifactRequired(obj model.CatalogModelArtifact) error { elements := map[string]interface{}{ - "uri": obj.Uri, + "artifactType": obj.ArtifactType, + "uri": obj.Uri, } for name, el := range elements { if isZero := IsZeroValue(el); isZero { @@ -120,6 +197,11 @@ func AssertCatalogModelConstraints(obj model.CatalogModel) error { // AssertCatalogModelListConstraints checks if the values respects the defined constraints func AssertCatalogModelListConstraints(obj model.CatalogModelList) error { + for _, el := range obj.Items { + if err := AssertCatalogModelConstraints(el); err != nil { + return err + } + } return nil } @@ -166,6 +248,11 @@ func AssertCatalogSourceConstraints(obj model.CatalogSource) error { // AssertCatalogSourceListConstraints checks if the values respects the defined constraints func AssertCatalogSourceListConstraints(obj model.CatalogSourceList) error { + for _, el := range obj.Items { + if err := AssertCatalogSourceConstraints(el); err != nil { + return err + } + } return nil } @@ -193,8 +280,9 @@ func AssertCatalogSourceListRequired(obj model.CatalogSourceList) error { // AssertCatalogSourceRequired checks if the required fields are not zero-ed func AssertCatalogSourceRequired(obj model.CatalogSource) error { elements := map[string]interface{}{ - "id": obj.Id, - "name": obj.Name, + "id": obj.Id, + "name": obj.Name, + "labels": obj.Labels, } for name, el := range elements { if isZero := IsZeroValue(el); isZero { @@ -225,6 +313,26 @@ func AssertErrorRequired(obj model.Error) error { return nil } +// AssertFilterOptionRangeConstraints checks if the values respects the defined constraints +func AssertFilterOptionRangeConstraints(obj model.FilterOptionRange) error { + return nil +} + +// AssertFilterOptionRangeRequired checks if the required fields are not zero-ed +func AssertFilterOptionRangeRequired(obj model.FilterOptionRange) error { + return nil +} + +// AssertFilterOptionsListConstraints checks if the values respects the defined constraints +func AssertFilterOptionsListConstraints(obj model.FilterOptionsList) error { + return nil +} + +// AssertFilterOptionsListRequired checks if the required fields are not zero-ed +func AssertFilterOptionsListRequired(obj model.FilterOptionsList) error { + return nil +} + // AssertMetadataBoolValueConstraints checks if the values respects the defined constraints func AssertMetadataBoolValueConstraints(obj model.MetadataBoolValue) error { return nil diff --git a/catalog/internal/server/openapi/type_asserts_overrides.go b/catalog/internal/server/openapi/type_asserts_overrides.go new file mode 100644 index 0000000000..770455a945 --- /dev/null +++ b/catalog/internal/server/openapi/type_asserts_overrides.go @@ -0,0 +1,36 @@ +package openapi + +import ( + model "github.com/kubeflow/model-registry/catalog/pkg/openapi" +) + +// AssertCatalogArtifactRequired checks if the required fields are not zero-ed +func AssertCatalogArtifactRequired(obj model.CatalogArtifact) error { + // CatalogArtifact has no required fields but the openapi code gen + // checks the fields from CatalogModelArtifact, which doesn't compile. + return nil +} + +// AssertCatalogArtifactConstraints checks if the values respects the defined constraints +func AssertCatalogArtifactConstraints(obj model.CatalogArtifact) error { + return nil +} + +// AssertFilterOptionRequired checks if the required fields are not zero-ed +func AssertFilterOptionRequired(obj model.FilterOption) error { + elements := map[string]interface{}{ + "type": obj.Type, + } + for name, el := range elements { + if isZero := IsZeroValue(el); isZero { + return &RequiredError{Field: name} + } + } + + if obj.Range != nil { + if err := AssertFilterOptionRangeRequired(*obj.Range); err != nil { + return err + } + } + return nil +} diff --git a/catalog/pkg/openapi/.openapi-generator/FILES b/catalog/pkg/openapi/.openapi-generator/FILES index 3ef4963c87..ced04c21e1 100644 --- a/catalog/pkg/openapi/.openapi-generator/FILES +++ b/catalog/pkg/openapi/.openapi-generator/FILES @@ -3,15 +3,23 @@ client.go configuration.go model_artifact_type_query_param.go model_base_model.go +model_base_resource.go model_base_resource_dates.go model_base_resource_list.go +model_catalog_artifact.go +model_catalog_artifact_list.go +model_catalog_label.go +model_catalog_label_list.go +model_catalog_metrics_artifact.go model_catalog_model.go model_catalog_model_artifact.go -model_catalog_model_artifact_list.go model_catalog_model_list.go model_catalog_source.go model_catalog_source_list.go model_error.go +model_filter_option.go +model_filter_option_range.go +model_filter_options_list.go model_metadata_bool_value.go model_metadata_double_value.go model_metadata_int_value.go diff --git a/catalog/pkg/openapi/.openapi-generator/VERSION b/catalog/pkg/openapi/.openapi-generator/VERSION index 73a86b1970..6328c5424a 100644 --- a/catalog/pkg/openapi/.openapi-generator/VERSION +++ b/catalog/pkg/openapi/.openapi-generator/VERSION @@ -1 +1 @@ -7.0.1 \ No newline at end of file +7.17.0 diff --git a/catalog/pkg/openapi/api_model_catalog_service.go b/catalog/pkg/openapi/api_model_catalog_service.go index a0218789a9..62cfa9dae8 100644 --- a/catalog/pkg/openapi/api_model_catalog_service.go +++ b/catalog/pkg/openapi/api_model_catalog_service.go @@ -16,25 +16,211 @@ import ( "io" "net/http" "net/url" + "reflect" "strings" ) // ModelCatalogServiceAPIService ModelCatalogServiceAPI service type ModelCatalogServiceAPIService service +type ApiFindLabelsRequest struct { + ctx context.Context + ApiService *ModelCatalogServiceAPIService + pageSize *string + orderBy *string + sortOrder *SortOrder + nextPageToken *string +} + +// Number of entities in each page. +func (r ApiFindLabelsRequest) PageSize(pageSize string) ApiFindLabelsRequest { + r.pageSize = &pageSize + return r +} + +// Specifies the key to order catalog labels by. You can provide any string key that may exist in the label maps. Labels that contain the specified key will be sorted by that key's value. Labels that don't contain the key will maintain their original order and appear after labels that do contain the key. +func (r ApiFindLabelsRequest) OrderBy(orderBy string) ApiFindLabelsRequest { + r.orderBy = &orderBy + return r +} + +// Specifies the sort order for listing entities, defaults to ASC. +func (r ApiFindLabelsRequest) SortOrder(sortOrder SortOrder) ApiFindLabelsRequest { + r.sortOrder = &sortOrder + return r +} + +// Token to use to retrieve next page of results. +func (r ApiFindLabelsRequest) NextPageToken(nextPageToken string) ApiFindLabelsRequest { + r.nextPageToken = &nextPageToken + return r +} + +func (r ApiFindLabelsRequest) Execute() (*CatalogLabelList, *http.Response, error) { + return r.ApiService.FindLabelsExecute(r) +} + +/* +FindLabels List All CatalogLabels + +Gets a list of all `CatalogLabel` entities. + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @return ApiFindLabelsRequest +*/ +func (a *ModelCatalogServiceAPIService) FindLabels(ctx context.Context) ApiFindLabelsRequest { + return ApiFindLabelsRequest{ + ApiService: a, + ctx: ctx, + } +} + +// Execute executes the request +// +// @return CatalogLabelList +func (a *ModelCatalogServiceAPIService) FindLabelsExecute(r ApiFindLabelsRequest) (*CatalogLabelList, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodGet + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *CatalogLabelList + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ModelCatalogServiceAPIService.FindLabels") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/api/model_catalog/v1alpha1/labels" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if r.pageSize != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "pageSize", r.pageSize, "form", "") + } + if r.orderBy != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "orderBy", r.orderBy, "form", "") + } + if r.sortOrder != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "sortOrder", r.sortOrder, "form", "") + } + if r.nextPageToken != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "nextPageToken", r.nextPageToken, "form", "") + } + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + if localVarHTTPResponse.StatusCode == 400 { + var v Error + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 401 { + var v Error + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 404 { + var v Error + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 500 { + var v Error + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + type ApiFindModelsRequest struct { ctx context.Context ApiService *ModelCatalogServiceAPIService - source *string + source *[]string q *string + sourceLabel *[]string + filterQuery *string pageSize *string orderBy *OrderByField sortOrder *SortOrder nextPageToken *string } -// Filter models by source. This parameter is currently required and may only be specified once. -func (r ApiFindModelsRequest) Source(source string) ApiFindModelsRequest { +// Filter models by source. Multiple values can be separated by commas to filter by multiple sources (OR logic). For example: ?source=huggingface,local will return models from either huggingface OR local sources. +func (r ApiFindModelsRequest) Source(source []string) ApiFindModelsRequest { r.source = &source return r } @@ -45,13 +231,25 @@ func (r ApiFindModelsRequest) Q(q string) ApiFindModelsRequest { return r } +// Filter models by the label associated with the source. Multiple values can be separated by commas. If one of the values is the string `null`, then models from every source without a label will be returned. +func (r ApiFindModelsRequest) SourceLabel(sourceLabel []string) ApiFindModelsRequest { + r.sourceLabel = &sourceLabel + return r +} + +// A SQL-like query string to filter the list of entities. The query supports rich filtering capabilities with automatic type inference. **Supported Operators:** - Comparison: `=`, `!=`, `<>`, `>`, `<`, `>=`, `<=` - Pattern matching: `LIKE`, `ILIKE` (case-insensitive) - Set membership: `IN` - Logical: `AND`, `OR` - Grouping: `()` for complex expressions **Data Types:** - Strings: `\"value\"` or `'value'` - Numbers: `42`, `3.14`, `1e-5` - Booleans: `true`, `false` (case-insensitive) **Property Access:** - Standard properties: `name`, `id`, `state`, `createTimeSinceEpoch` - Custom properties: Any user-defined property name - Escaped properties: Use backticks for special characters: `` `custom-property` `` - Type-specific access: `property.string_value`, `property.double_value`, `property.int_value`, `property.bool_value` **Examples:** - Basic: `name = \"my-model\"` - Comparison: `accuracy > 0.95` - Pattern: `name LIKE \"%tensorflow%\"` - Complex: `(name = \"model-a\" OR name = \"model-b\") AND state = \"LIVE\"` - Custom property: `framework.string_value = \"pytorch\"` - Escaped property: `` `mlflow.source.type` = \"notebook\" `` +func (r ApiFindModelsRequest) FilterQuery(filterQuery string) ApiFindModelsRequest { + r.filterQuery = &filterQuery + return r +} + // Number of entities in each page. func (r ApiFindModelsRequest) PageSize(pageSize string) ApiFindModelsRequest { r.pageSize = &pageSize return r } -// Specifies the order by criteria for listing entities. +// Specifies the order by criteria for listing entities. Supported values are: - CREATE_TIME - LAST_UPDATE_TIME - ID - NAME - ACCURACY The `ACCURACY` sort will sort by the `overall_average` property in any linked metrics artifact. In addition, models can be sorted by properties. For example: - `provider.string_value` sorts by provider name - `artifacts.ifeval.double_value` sorts by the min/max value a property called ifeval across all associated artifacts func (r ApiFindModelsRequest) OrderBy(orderBy OrderByField) ApiFindModelsRequest { r.orderBy = &orderBy return r @@ -107,25 +305,46 @@ func (a *ModelCatalogServiceAPIService) FindModelsExecute(r ApiFindModelsRequest localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if r.source == nil { - return localVarReturnValue, nil, reportError("source is required and must be specified") - } - parameterAddToHeaderOrQuery(localVarQueryParams, "source", r.source, "") + if r.source != nil { + t := *r.source + if reflect.TypeOf(t).Kind() == reflect.Slice { + s := reflect.ValueOf(t) + for i := 0; i < s.Len(); i++ { + parameterAddToHeaderOrQuery(localVarQueryParams, "source", s.Index(i).Interface(), "form", "multi") + } + } else { + parameterAddToHeaderOrQuery(localVarQueryParams, "source", t, "form", "multi") + } + } if r.q != nil { - parameterAddToHeaderOrQuery(localVarQueryParams, "q", r.q, "") + parameterAddToHeaderOrQuery(localVarQueryParams, "q", r.q, "form", "") + } + if r.sourceLabel != nil { + t := *r.sourceLabel + if reflect.TypeOf(t).Kind() == reflect.Slice { + s := reflect.ValueOf(t) + for i := 0; i < s.Len(); i++ { + parameterAddToHeaderOrQuery(localVarQueryParams, "sourceLabel", s.Index(i).Interface(), "form", "multi") + } + } else { + parameterAddToHeaderOrQuery(localVarQueryParams, "sourceLabel", t, "form", "multi") + } + } + if r.filterQuery != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "filterQuery", r.filterQuery, "form", "") } if r.pageSize != nil { - parameterAddToHeaderOrQuery(localVarQueryParams, "pageSize", r.pageSize, "") + parameterAddToHeaderOrQuery(localVarQueryParams, "pageSize", r.pageSize, "form", "") } if r.orderBy != nil { - parameterAddToHeaderOrQuery(localVarQueryParams, "orderBy", r.orderBy, "") + parameterAddToHeaderOrQuery(localVarQueryParams, "orderBy", r.orderBy, "form", "") } if r.sortOrder != nil { - parameterAddToHeaderOrQuery(localVarQueryParams, "sortOrder", r.sortOrder, "") + parameterAddToHeaderOrQuery(localVarQueryParams, "sortOrder", r.sortOrder, "form", "") } if r.nextPageToken != nil { - parameterAddToHeaderOrQuery(localVarQueryParams, "nextPageToken", r.nextPageToken, "") + parameterAddToHeaderOrQuery(localVarQueryParams, "nextPageToken", r.nextPageToken, "form", "") } // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -224,6 +443,136 @@ func (a *ModelCatalogServiceAPIService) FindModelsExecute(r ApiFindModelsRequest return localVarReturnValue, localVarHTTPResponse, nil } +type ApiFindModelsFilterOptionsRequest struct { + ctx context.Context + ApiService *ModelCatalogServiceAPIService +} + +func (r ApiFindModelsFilterOptionsRequest) Execute() (*FilterOptionsList, *http.Response, error) { + return r.ApiService.FindModelsFilterOptionsExecute(r) +} + +/* +FindModelsFilterOptions Lists fields and available options that can be used in `filterQuery` on the list models endpoint. + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @return ApiFindModelsFilterOptionsRequest +*/ +func (a *ModelCatalogServiceAPIService) FindModelsFilterOptions(ctx context.Context) ApiFindModelsFilterOptionsRequest { + return ApiFindModelsFilterOptionsRequest{ + ApiService: a, + ctx: ctx, + } +} + +// Execute executes the request +// +// @return FilterOptionsList +func (a *ModelCatalogServiceAPIService) FindModelsFilterOptionsExecute(r ApiFindModelsFilterOptionsRequest) (*FilterOptionsList, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodGet + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *FilterOptionsList + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ModelCatalogServiceAPIService.FindModelsFilterOptions") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/api/model_catalog/v1alpha1/models/filter_options" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + if localVarHTTPResponse.StatusCode == 400 { + var v Error + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 401 { + var v Error + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 500 { + var v Error + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + type ApiFindSourcesRequest struct { ctx context.Context ApiService *ModelCatalogServiceAPIService @@ -306,19 +655,19 @@ func (a *ModelCatalogServiceAPIService) FindSourcesExecute(r ApiFindSourcesReque localVarFormParams := url.Values{} if r.name != nil { - parameterAddToHeaderOrQuery(localVarQueryParams, "name", r.name, "") + parameterAddToHeaderOrQuery(localVarQueryParams, "name", r.name, "form", "") } if r.pageSize != nil { - parameterAddToHeaderOrQuery(localVarQueryParams, "pageSize", r.pageSize, "") + parameterAddToHeaderOrQuery(localVarQueryParams, "pageSize", r.pageSize, "form", "") } if r.orderBy != nil { - parameterAddToHeaderOrQuery(localVarQueryParams, "orderBy", r.orderBy, "") + parameterAddToHeaderOrQuery(localVarQueryParams, "orderBy", r.orderBy, "form", "") } if r.sortOrder != nil { - parameterAddToHeaderOrQuery(localVarQueryParams, "sortOrder", r.sortOrder, "") + parameterAddToHeaderOrQuery(localVarQueryParams, "sortOrder", r.sortOrder, "form", "") } if r.nextPageToken != nil { - parameterAddToHeaderOrQuery(localVarQueryParams, "nextPageToken", r.nextPageToken, "") + parameterAddToHeaderOrQuery(localVarQueryParams, "nextPageToken", r.nextPageToken, "form", "") } // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -418,18 +767,68 @@ func (a *ModelCatalogServiceAPIService) FindSourcesExecute(r ApiFindSourcesReque } type ApiGetAllModelArtifactsRequest struct { - ctx context.Context - ApiService *ModelCatalogServiceAPIService - sourceId string - modelName string + ctx context.Context + ApiService *ModelCatalogServiceAPIService + sourceId string + modelName string + artifactType *[]ArtifactTypeQueryParam + artifactType2 *[]ArtifactTypeQueryParam + filterQuery *string + pageSize *string + orderBy *string + sortOrder *SortOrder + nextPageToken *string } -func (r ApiGetAllModelArtifactsRequest) Execute() (*CatalogModelArtifactList, *http.Response, error) { +// Specifies the artifact type for listing artifacts. +func (r ApiGetAllModelArtifactsRequest) ArtifactType(artifactType []ArtifactTypeQueryParam) ApiGetAllModelArtifactsRequest { + r.artifactType = &artifactType + return r +} + +// Specifies the artifact type for listing artifacts. +// Deprecated +func (r ApiGetAllModelArtifactsRequest) ArtifactType2(artifactType2 []ArtifactTypeQueryParam) ApiGetAllModelArtifactsRequest { + r.artifactType2 = &artifactType2 + return r +} + +// A SQL-like query string to filter catalog artifacts. The query supports rich filtering capabilities with automatic type inference. **Supported Operators:** - Comparison: `=`, `!=`, `<>`, `>`, `<`, `>=`, `<=` - Pattern matching: `LIKE`, `ILIKE` (case-insensitive) - Set membership: `IN` - Logical: `AND`, `OR` - Grouping: `()` for complex expressions **Data Types:** - Strings: `\"value\"` or `'value'` - Numbers: `42`, `3.14`, `1e-5` - Booleans: `true`, `false` (case-insensitive) **Property Access (Artifacts):** - Standard properties: `name`, `id`, `uri`, `artifactType`, `createTimeSinceEpoch` - Custom properties: Any user-defined property name in `customProperties` - Escaped properties: Use backticks for special characters: `` `custom-property` `` - Type-specific access: `property.string_value`, `property.double_value`, `property.int_value`, `property.bool_value` **Examples:** - Basic: `name = \"my-artifact\"` - Comparison: `ttft_mean > 90` - Pattern: `uri LIKE \"%s3.amazonaws.com%\"` - Complex: `(artifactType = \"model-artifact\" OR artifactType = \"metrics-artifact\") AND name LIKE \"%pytorch%\"` - Custom property: `format.string_value = \"pytorch\"` - Escaped property: `` `custom-key` = \"value\" `` +func (r ApiGetAllModelArtifactsRequest) FilterQuery(filterQuery string) ApiGetAllModelArtifactsRequest { + r.filterQuery = &filterQuery + return r +} + +// Number of entities in each page. +func (r ApiGetAllModelArtifactsRequest) PageSize(pageSize string) ApiGetAllModelArtifactsRequest { + r.pageSize = &pageSize + return r +} + +// Specifies the order by criteria for listing artifacts. **Standard Fields:** - `ID` - Order by artifact ID - `NAME` - Order by artifact name - `CREATE_TIME` - Order by creation timestamp - `LAST_UPDATE_TIME` - Order by last update timestamp **Custom Property Ordering:** Artifacts can be ordered by custom properties using the format: `<property_name>.<value_type>` Supported value types: - `double_value` - For numeric (floating-point) properties - `int_value` - For integer properties - `string_value` - For string properties Examples: - `mmlu.double_value` - Order by the 'mmlu' benchmark score - `accuracy.double_value` - Order by accuracy metric - `framework_type.string_value` - Order by framework type - `hardware_count.int_value` - Order by hardware count - `ttft_mean.double_value` - Order by time-to-first-token mean **Behavior:** - If an invalid value type is specified (e.g., `accuracy.invalid_type`), an error is returned - If an invalid format is used (e.g., `accuracy` without `.value_type`), it falls back to ID ordering - If a property doesn't exist, it falls back to ID ordering - Artifacts with the specified property are ordered first (by the property value), followed by artifacts without the property (ordered by ID) - Empty property names (e.g., `.double_value`) return an error +func (r ApiGetAllModelArtifactsRequest) OrderBy(orderBy string) ApiGetAllModelArtifactsRequest { + r.orderBy = &orderBy + return r +} + +// Specifies the sort order for listing entities, defaults to ASC. +func (r ApiGetAllModelArtifactsRequest) SortOrder(sortOrder SortOrder) ApiGetAllModelArtifactsRequest { + r.sortOrder = &sortOrder + return r +} + +// Token to use to retrieve next page of results. +func (r ApiGetAllModelArtifactsRequest) NextPageToken(nextPageToken string) ApiGetAllModelArtifactsRequest { + r.nextPageToken = &nextPageToken + return r +} + +func (r ApiGetAllModelArtifactsRequest) Execute() (*CatalogArtifactList, *http.Response, error) { return r.ApiService.GetAllModelArtifactsExecute(r) } /* -GetAllModelArtifacts List CatalogModelArtifacts. +GetAllModelArtifacts List CatalogArtifacts. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @param sourceId A unique identifier for a `CatalogSource`. @@ -447,13 +846,13 @@ func (a *ModelCatalogServiceAPIService) GetAllModelArtifacts(ctx context.Context // Execute executes the request // -// @return CatalogModelArtifactList -func (a *ModelCatalogServiceAPIService) GetAllModelArtifactsExecute(r ApiGetAllModelArtifactsRequest) (*CatalogModelArtifactList, *http.Response, error) { +// @return CatalogArtifactList +func (a *ModelCatalogServiceAPIService) GetAllModelArtifactsExecute(r ApiGetAllModelArtifactsRequest) (*CatalogArtifactList, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} formFiles []formFile - localVarReturnValue *CatalogModelArtifactList + localVarReturnValue *CatalogArtifactList ) localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ModelCatalogServiceAPIService.GetAllModelArtifacts") @@ -469,6 +868,43 @@ func (a *ModelCatalogServiceAPIService) GetAllModelArtifactsExecute(r ApiGetAllM localVarQueryParams := url.Values{} localVarFormParams := url.Values{} + if r.artifactType != nil { + t := *r.artifactType + if reflect.TypeOf(t).Kind() == reflect.Slice { + s := reflect.ValueOf(t) + for i := 0; i < s.Len(); i++ { + parameterAddToHeaderOrQuery(localVarQueryParams, "artifactType", s.Index(i).Interface(), "form", "multi") + } + } else { + parameterAddToHeaderOrQuery(localVarQueryParams, "artifactType", t, "form", "multi") + } + } + if r.artifactType2 != nil { + t := *r.artifactType2 + if reflect.TypeOf(t).Kind() == reflect.Slice { + s := reflect.ValueOf(t) + for i := 0; i < s.Len(); i++ { + parameterAddToHeaderOrQuery(localVarQueryParams, "artifact_type", s.Index(i).Interface(), "form", "multi") + } + } else { + parameterAddToHeaderOrQuery(localVarQueryParams, "artifact_type", t, "form", "multi") + } + } + if r.filterQuery != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "filterQuery", r.filterQuery, "form", "") + } + if r.pageSize != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "pageSize", r.pageSize, "form", "") + } + if r.orderBy != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "orderBy", r.orderBy, "form", "") + } + if r.sortOrder != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "sortOrder", r.sortOrder, "form", "") + } + if r.nextPageToken != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "nextPageToken", r.nextPageToken, "form", "") + } // to determine the Content-Type header localVarHTTPContentTypes := []string{} diff --git a/catalog/pkg/openapi/client.go b/catalog/pkg/openapi/client.go index fcd8e7ead0..e730c7efa7 100644 --- a/catalog/pkg/openapi/client.go +++ b/catalog/pkg/openapi/client.go @@ -34,8 +34,8 @@ import ( ) var ( - jsonCheck = regexp.MustCompile(`(?i:(?:application|text)/(?:vnd\.[^;]+\+)?json)`) - xmlCheck = regexp.MustCompile(`(?i:(?:application|text)/xml)`) + JsonCheck = regexp.MustCompile(`(?i:(?:application|text)/(?:[^;]+\+)?json)`) + XmlCheck = regexp.MustCompile(`(?i:(?:application|text)/(?:[^;]+\+)?xml)`) queryParamSplit = regexp.MustCompile(`(^|&)([^&]+)`) queryDescape = strings.NewReplacer("%5B", "[", "%5D", "]") ) @@ -126,6 +126,10 @@ func typeCheckParameter(obj interface{}, expected string, name string) error { func parameterValueToString(obj interface{}, key string) string { if reflect.TypeOf(obj).Kind() != reflect.Ptr { + if actualObj, ok := obj.(interface{ GetActualInstanceValue() interface{} }); ok { + return fmt.Sprintf("%v", actualObj.GetActualInstanceValue()) + } + return fmt.Sprintf("%v", obj) } var param, ok = obj.(MappedNullable) @@ -141,7 +145,7 @@ func parameterValueToString(obj interface{}, key string) string { // parameterAddToHeaderOrQuery adds the provided object to the request header or url query // supporting deep object syntax -func parameterAddToHeaderOrQuery(headerOrQueryParams interface{}, keyPrefix string, obj interface{}, collectionType string) { +func parameterAddToHeaderOrQuery(headerOrQueryParams interface{}, keyPrefix string, obj interface{}, style string, collectionType string) { var v = reflect.ValueOf(obj) var value = "" if v == reflect.ValueOf(nil) { @@ -157,11 +161,11 @@ func parameterAddToHeaderOrQuery(headerOrQueryParams interface{}, keyPrefix stri if err != nil { return } - parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefix, dataMap, collectionType) + parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefix, dataMap, style, collectionType) return } if t, ok := obj.(time.Time); ok { - parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefix, t.Format(time.RFC3339), collectionType) + parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefix, t.Format(time.RFC3339Nano), style, collectionType) return } value = v.Type().String() + " value" @@ -173,7 +177,11 @@ func parameterAddToHeaderOrQuery(headerOrQueryParams interface{}, keyPrefix stri var lenIndValue = indValue.Len() for i := 0; i < lenIndValue; i++ { var arrayValue = indValue.Index(i) - parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefix, arrayValue.Interface(), collectionType) + var keyPrefixForCollectionType = keyPrefix + if style == "deepObject" { + keyPrefixForCollectionType = keyPrefix + "[" + strconv.Itoa(i) + "]" + } + parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefixForCollectionType, arrayValue.Interface(), style, collectionType) } return @@ -185,14 +193,14 @@ func parameterAddToHeaderOrQuery(headerOrQueryParams interface{}, keyPrefix stri iter := indValue.MapRange() for iter.Next() { k, v := iter.Key(), iter.Value() - parameterAddToHeaderOrQuery(headerOrQueryParams, fmt.Sprintf("%s[%s]", keyPrefix, k.String()), v.Interface(), collectionType) + parameterAddToHeaderOrQuery(headerOrQueryParams, fmt.Sprintf("%s[%s]", keyPrefix, k.String()), v.Interface(), style, collectionType) } return case reflect.Interface: fallthrough case reflect.Ptr: - parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefix, v.Elem().Interface(), collectionType) + parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefix, v.Elem().Interface(), style, collectionType) return case reflect.Int, reflect.Int8, reflect.Int16, @@ -440,7 +448,6 @@ func (c *APIClient) decode(v interface{}, b []byte, contentType string) (err err return } _, err = f.Seek(0, io.SeekStart) - err = os.Remove(f.Name()) return } if f, ok := v.(**os.File); ok { @@ -453,16 +460,15 @@ func (c *APIClient) decode(v interface{}, b []byte, contentType string) (err err return } _, err = (*f).Seek(0, io.SeekStart) - err = os.Remove((*f).Name()) return } - if xmlCheck.MatchString(contentType) { + if XmlCheck.MatchString(contentType) { if err = xml.Unmarshal(b, v); err != nil { return err } return nil } - if jsonCheck.MatchString(contentType) { + if JsonCheck.MatchString(contentType) { if actualObj, ok := v.(interface{ GetActualInstance() interface{} }); ok { // oneOf, anyOf schemas if unmarshalObj, ok := actualObj.(interface{ UnmarshalJSON([]byte) error }); ok { // make sure it has UnmarshalJSON defined if err = unmarshalObj.UnmarshalJSON(b); err != nil { @@ -499,18 +505,6 @@ func addFile(w *multipart.Writer, fieldName, path string) error { return err } -// Prevent trying to import "fmt" -func reportError(format string, a ...interface{}) error { - return fmt.Errorf(format, a...) -} - -// A wrapper for strict JSON decoding -func newStrictDecoder(data []byte) *json.Decoder { - dec := json.NewDecoder(bytes.NewBuffer(data)) - dec.DisallowUnknownFields() - return dec -} - // Set request body from an interface{} func setBody(body interface{}, contentType string) (bodyBuf *bytes.Buffer, err error) { if bodyBuf == nil { @@ -527,9 +521,9 @@ func setBody(body interface{}, contentType string) (bodyBuf *bytes.Buffer, err e _, err = bodyBuf.WriteString(s) } else if s, ok := body.(*string); ok { _, err = bodyBuf.WriteString(*s) - } else if jsonCheck.MatchString(contentType) { + } else if JsonCheck.MatchString(contentType) { err = json.NewEncoder(bodyBuf).Encode(body) - } else if xmlCheck.MatchString(contentType) { + } else if XmlCheck.MatchString(contentType) { var bs []byte bs, err = xml.Marshal(body) if err == nil { diff --git a/catalog/pkg/openapi/go.mod b/catalog/pkg/openapi/go.mod index b9464b623b..61c3d581c0 100644 --- a/catalog/pkg/openapi/go.mod +++ b/catalog/pkg/openapi/go.mod @@ -1,3 +1,3 @@ module github.com/kubeflow/model-registry/catalog/pkg/openapi -go 1.24 +go 1.24.6 diff --git a/catalog/pkg/openapi/model_artifact_type_query_param.go b/catalog/pkg/openapi/model_artifact_type_query_param.go index a5d0590d5b..041c91f8e8 100644 --- a/catalog/pkg/openapi/model_artifact_type_query_param.go +++ b/catalog/pkg/openapi/model_artifact_type_query_param.go @@ -21,19 +21,13 @@ type ArtifactTypeQueryParam string // List of ArtifactTypeQueryParam const ( ARTIFACTTYPEQUERYPARAM_MODEL_ARTIFACT ArtifactTypeQueryParam = "model-artifact" - ARTIFACTTYPEQUERYPARAM_DOC_ARTIFACT ArtifactTypeQueryParam = "doc-artifact" - ARTIFACTTYPEQUERYPARAM_DATASET_ARTIFACT ArtifactTypeQueryParam = "dataset-artifact" - ARTIFACTTYPEQUERYPARAM_METRIC ArtifactTypeQueryParam = "metric" - ARTIFACTTYPEQUERYPARAM_PARAMETER ArtifactTypeQueryParam = "parameter" + ARTIFACTTYPEQUERYPARAM_METRICS_ARTIFACT ArtifactTypeQueryParam = "metrics-artifact" ) // All allowed values of ArtifactTypeQueryParam enum var AllowedArtifactTypeQueryParamEnumValues = []ArtifactTypeQueryParam{ "model-artifact", - "doc-artifact", - "dataset-artifact", - "metric", - "parameter", + "metrics-artifact", } func (v *ArtifactTypeQueryParam) UnmarshalJSON(src []byte) error { diff --git a/catalog/pkg/openapi/model_base_model.go b/catalog/pkg/openapi/model_base_model.go index 654d7fe392..83f5e13dc0 100644 --- a/catalog/pkg/openapi/model_base_model.go +++ b/catalog/pkg/openapi/model_base_model.go @@ -39,7 +39,7 @@ type BaseModel struct { LicenseLink *string `json:"licenseLink,omitempty"` LibraryName *string `json:"libraryName,omitempty"` // User provided custom properties which are not defined by its type. - CustomProperties *map[string]MetadataValue `json:"customProperties,omitempty"` + CustomProperties map[string]MetadataValue `json:"customProperties,omitempty"` } // NewBaseModel instantiates a new BaseModel object @@ -385,14 +385,14 @@ func (o *BaseModel) GetCustomProperties() map[string]MetadataValue { var ret map[string]MetadataValue return ret } - return *o.CustomProperties + return o.CustomProperties } // GetCustomPropertiesOk returns a tuple with the CustomProperties field value if set, nil otherwise // and a boolean to check if the value has been set. -func (o *BaseModel) GetCustomPropertiesOk() (*map[string]MetadataValue, bool) { +func (o *BaseModel) GetCustomPropertiesOk() (map[string]MetadataValue, bool) { if o == nil || IsNil(o.CustomProperties) { - return nil, false + return map[string]MetadataValue{}, false } return o.CustomProperties, true } @@ -408,7 +408,7 @@ func (o *BaseModel) HasCustomProperties() bool { // SetCustomProperties gets a reference to the given map[string]MetadataValue and assigns it to the CustomProperties field. func (o *BaseModel) SetCustomProperties(v map[string]MetadataValue) { - o.CustomProperties = &v + o.CustomProperties = v } func (o BaseModel) MarshalJSON() ([]byte, error) { diff --git a/catalog/pkg/openapi/model_base_resource.go b/catalog/pkg/openapi/model_base_resource.go new file mode 100644 index 0000000000..43938ac14c --- /dev/null +++ b/catalog/pkg/openapi/model_base_resource.go @@ -0,0 +1,347 @@ +/* +Model Catalog REST API + +REST API for Model Registry to create and manage ML model metadata + +API version: v1alpha1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the BaseResource type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &BaseResource{} + +// BaseResource struct for BaseResource +type BaseResource struct { + // Output only. Create time of the resource in millisecond since epoch. + CreateTimeSinceEpoch *string `json:"createTimeSinceEpoch,omitempty"` + // Output only. Last update time of the resource since epoch in millisecond since epoch. + LastUpdateTimeSinceEpoch *string `json:"lastUpdateTimeSinceEpoch,omitempty"` + // User provided custom properties which are not defined by its type. + CustomProperties map[string]MetadataValue `json:"customProperties,omitempty"` + // An optional description about the resource. + Description *string `json:"description,omitempty"` + // The external id that come from the clients’ system. This field is optional. If set, it must be unique among all resources within a database instance. + ExternalId *string `json:"externalId,omitempty"` + // The client provided name of the artifact. This field is optional. If set, it must be unique among all the artifacts of the same artifact type within a database instance and cannot be changed once set. + Name *string `json:"name,omitempty"` + // The unique server generated id of the resource. + Id *string `json:"id,omitempty"` +} + +// NewBaseResource instantiates a new BaseResource object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewBaseResource() *BaseResource { + this := BaseResource{} + return &this +} + +// NewBaseResourceWithDefaults instantiates a new BaseResource object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewBaseResourceWithDefaults() *BaseResource { + this := BaseResource{} + return &this +} + +// GetCreateTimeSinceEpoch returns the CreateTimeSinceEpoch field value if set, zero value otherwise. +func (o *BaseResource) GetCreateTimeSinceEpoch() string { + if o == nil || IsNil(o.CreateTimeSinceEpoch) { + var ret string + return ret + } + return *o.CreateTimeSinceEpoch +} + +// GetCreateTimeSinceEpochOk returns a tuple with the CreateTimeSinceEpoch field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BaseResource) GetCreateTimeSinceEpochOk() (*string, bool) { + if o == nil || IsNil(o.CreateTimeSinceEpoch) { + return nil, false + } + return o.CreateTimeSinceEpoch, true +} + +// HasCreateTimeSinceEpoch returns a boolean if a field has been set. +func (o *BaseResource) HasCreateTimeSinceEpoch() bool { + if o != nil && !IsNil(o.CreateTimeSinceEpoch) { + return true + } + + return false +} + +// SetCreateTimeSinceEpoch gets a reference to the given string and assigns it to the CreateTimeSinceEpoch field. +func (o *BaseResource) SetCreateTimeSinceEpoch(v string) { + o.CreateTimeSinceEpoch = &v +} + +// GetLastUpdateTimeSinceEpoch returns the LastUpdateTimeSinceEpoch field value if set, zero value otherwise. +func (o *BaseResource) GetLastUpdateTimeSinceEpoch() string { + if o == nil || IsNil(o.LastUpdateTimeSinceEpoch) { + var ret string + return ret + } + return *o.LastUpdateTimeSinceEpoch +} + +// GetLastUpdateTimeSinceEpochOk returns a tuple with the LastUpdateTimeSinceEpoch field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BaseResource) GetLastUpdateTimeSinceEpochOk() (*string, bool) { + if o == nil || IsNil(o.LastUpdateTimeSinceEpoch) { + return nil, false + } + return o.LastUpdateTimeSinceEpoch, true +} + +// HasLastUpdateTimeSinceEpoch returns a boolean if a field has been set. +func (o *BaseResource) HasLastUpdateTimeSinceEpoch() bool { + if o != nil && !IsNil(o.LastUpdateTimeSinceEpoch) { + return true + } + + return false +} + +// SetLastUpdateTimeSinceEpoch gets a reference to the given string and assigns it to the LastUpdateTimeSinceEpoch field. +func (o *BaseResource) SetLastUpdateTimeSinceEpoch(v string) { + o.LastUpdateTimeSinceEpoch = &v +} + +// GetCustomProperties returns the CustomProperties field value if set, zero value otherwise. +func (o *BaseResource) GetCustomProperties() map[string]MetadataValue { + if o == nil || IsNil(o.CustomProperties) { + var ret map[string]MetadataValue + return ret + } + return o.CustomProperties +} + +// GetCustomPropertiesOk returns a tuple with the CustomProperties field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BaseResource) GetCustomPropertiesOk() (map[string]MetadataValue, bool) { + if o == nil || IsNil(o.CustomProperties) { + return map[string]MetadataValue{}, false + } + return o.CustomProperties, true +} + +// HasCustomProperties returns a boolean if a field has been set. +func (o *BaseResource) HasCustomProperties() bool { + if o != nil && !IsNil(o.CustomProperties) { + return true + } + + return false +} + +// SetCustomProperties gets a reference to the given map[string]MetadataValue and assigns it to the CustomProperties field. +func (o *BaseResource) SetCustomProperties(v map[string]MetadataValue) { + o.CustomProperties = v +} + +// GetDescription returns the Description field value if set, zero value otherwise. +func (o *BaseResource) GetDescription() string { + if o == nil || IsNil(o.Description) { + var ret string + return ret + } + return *o.Description +} + +// GetDescriptionOk returns a tuple with the Description field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BaseResource) GetDescriptionOk() (*string, bool) { + if o == nil || IsNil(o.Description) { + return nil, false + } + return o.Description, true +} + +// HasDescription returns a boolean if a field has been set. +func (o *BaseResource) HasDescription() bool { + if o != nil && !IsNil(o.Description) { + return true + } + + return false +} + +// SetDescription gets a reference to the given string and assigns it to the Description field. +func (o *BaseResource) SetDescription(v string) { + o.Description = &v +} + +// GetExternalId returns the ExternalId field value if set, zero value otherwise. +func (o *BaseResource) GetExternalId() string { + if o == nil || IsNil(o.ExternalId) { + var ret string + return ret + } + return *o.ExternalId +} + +// GetExternalIdOk returns a tuple with the ExternalId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BaseResource) GetExternalIdOk() (*string, bool) { + if o == nil || IsNil(o.ExternalId) { + return nil, false + } + return o.ExternalId, true +} + +// HasExternalId returns a boolean if a field has been set. +func (o *BaseResource) HasExternalId() bool { + if o != nil && !IsNil(o.ExternalId) { + return true + } + + return false +} + +// SetExternalId gets a reference to the given string and assigns it to the ExternalId field. +func (o *BaseResource) SetExternalId(v string) { + o.ExternalId = &v +} + +// GetName returns the Name field value if set, zero value otherwise. +func (o *BaseResource) GetName() string { + if o == nil || IsNil(o.Name) { + var ret string + return ret + } + return *o.Name +} + +// GetNameOk returns a tuple with the Name field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BaseResource) GetNameOk() (*string, bool) { + if o == nil || IsNil(o.Name) { + return nil, false + } + return o.Name, true +} + +// HasName returns a boolean if a field has been set. +func (o *BaseResource) HasName() bool { + if o != nil && !IsNil(o.Name) { + return true + } + + return false +} + +// SetName gets a reference to the given string and assigns it to the Name field. +func (o *BaseResource) SetName(v string) { + o.Name = &v +} + +// GetId returns the Id field value if set, zero value otherwise. +func (o *BaseResource) GetId() string { + if o == nil || IsNil(o.Id) { + var ret string + return ret + } + return *o.Id +} + +// GetIdOk returns a tuple with the Id field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BaseResource) GetIdOk() (*string, bool) { + if o == nil || IsNil(o.Id) { + return nil, false + } + return o.Id, true +} + +// HasId returns a boolean if a field has been set. +func (o *BaseResource) HasId() bool { + if o != nil && !IsNil(o.Id) { + return true + } + + return false +} + +// SetId gets a reference to the given string and assigns it to the Id field. +func (o *BaseResource) SetId(v string) { + o.Id = &v +} + +func (o BaseResource) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o BaseResource) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.CreateTimeSinceEpoch) { + toSerialize["createTimeSinceEpoch"] = o.CreateTimeSinceEpoch + } + if !IsNil(o.LastUpdateTimeSinceEpoch) { + toSerialize["lastUpdateTimeSinceEpoch"] = o.LastUpdateTimeSinceEpoch + } + if !IsNil(o.CustomProperties) { + toSerialize["customProperties"] = o.CustomProperties + } + if !IsNil(o.Description) { + toSerialize["description"] = o.Description + } + if !IsNil(o.ExternalId) { + toSerialize["externalId"] = o.ExternalId + } + if !IsNil(o.Name) { + toSerialize["name"] = o.Name + } + if !IsNil(o.Id) { + toSerialize["id"] = o.Id + } + return toSerialize, nil +} + +type NullableBaseResource struct { + value *BaseResource + isSet bool +} + +func (v NullableBaseResource) Get() *BaseResource { + return v.value +} + +func (v *NullableBaseResource) Set(val *BaseResource) { + v.value = val + v.isSet = true +} + +func (v NullableBaseResource) IsSet() bool { + return v.isSet +} + +func (v *NullableBaseResource) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableBaseResource(val *BaseResource) *NullableBaseResource { + return &NullableBaseResource{value: val, isSet: true} +} + +func (v NullableBaseResource) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableBaseResource) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/catalog/pkg/openapi/model_base_resource_list.go b/catalog/pkg/openapi/model_base_resource_list.go index 829e14c9c0..ac240bbd30 100644 --- a/catalog/pkg/openapi/model_base_resource_list.go +++ b/catalog/pkg/openapi/model_base_resource_list.go @@ -27,6 +27,8 @@ type BaseResourceList struct { Size int32 `json:"size"` } +type _BaseResourceList BaseResourceList + // NewBaseResourceList instantiates a new BaseResourceList object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments diff --git a/catalog/pkg/openapi/model_catalog_artifact.go b/catalog/pkg/openapi/model_catalog_artifact.go new file mode 100644 index 0000000000..cd61bc3907 --- /dev/null +++ b/catalog/pkg/openapi/model_catalog_artifact.go @@ -0,0 +1,153 @@ +/* +Model Catalog REST API + +REST API for Model Registry to create and manage ML model metadata + +API version: v1alpha1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" + "fmt" +) + +// CatalogArtifact - A single artifact in the catalog API. +type CatalogArtifact struct { + CatalogMetricsArtifact *CatalogMetricsArtifact + CatalogModelArtifact *CatalogModelArtifact +} + +// CatalogMetricsArtifactAsCatalogArtifact is a convenience function that returns CatalogMetricsArtifact wrapped in CatalogArtifact +func CatalogMetricsArtifactAsCatalogArtifact(v *CatalogMetricsArtifact) CatalogArtifact { + return CatalogArtifact{ + CatalogMetricsArtifact: v, + } +} + +// CatalogModelArtifactAsCatalogArtifact is a convenience function that returns CatalogModelArtifact wrapped in CatalogArtifact +func CatalogModelArtifactAsCatalogArtifact(v *CatalogModelArtifact) CatalogArtifact { + return CatalogArtifact{ + CatalogModelArtifact: v, + } +} + +// Unmarshal JSON data into one of the pointers in the struct +func (dst *CatalogArtifact) UnmarshalJSON(data []byte) error { + var err error + // use discriminator value to speed up the lookup + var jsonDict map[string]interface{} + err = newStrictDecoder(data).Decode(&jsonDict) + if err != nil { + return fmt.Errorf("failed to unmarshal JSON into map for the discriminator lookup") + } + + // check if the discriminator value is 'metrics-artifact' + if jsonDict["artifactType"] == "metrics-artifact" { + // try to unmarshal JSON data into CatalogMetricsArtifact + err = json.Unmarshal(data, &dst.CatalogMetricsArtifact) + if err == nil { + return nil // data stored in dst.CatalogMetricsArtifact, return on the first match + } else { + dst.CatalogMetricsArtifact = nil + return fmt.Errorf("failed to unmarshal CatalogArtifact as CatalogMetricsArtifact: %s", err.Error()) + } + } + + // check if the discriminator value is 'model-artifact' + if jsonDict["artifactType"] == "model-artifact" { + // try to unmarshal JSON data into CatalogModelArtifact + err = json.Unmarshal(data, &dst.CatalogModelArtifact) + if err == nil { + return nil // data stored in dst.CatalogModelArtifact, return on the first match + } else { + dst.CatalogModelArtifact = nil + return fmt.Errorf("failed to unmarshal CatalogArtifact as CatalogModelArtifact: %s", err.Error()) + } + } + + return nil +} + +// Marshal data from the first non-nil pointers in the struct to JSON +func (src CatalogArtifact) MarshalJSON() ([]byte, error) { + if src.CatalogMetricsArtifact != nil { + return json.Marshal(&src.CatalogMetricsArtifact) + } + + if src.CatalogModelArtifact != nil { + return json.Marshal(&src.CatalogModelArtifact) + } + + return nil, nil // no data in oneOf schemas +} + +// Get the actual instance +func (obj *CatalogArtifact) GetActualInstance() interface{} { + if obj == nil { + return nil + } + if obj.CatalogMetricsArtifact != nil { + return obj.CatalogMetricsArtifact + } + + if obj.CatalogModelArtifact != nil { + return obj.CatalogModelArtifact + } + + // all schemas are nil + return nil +} + +// Get the actual instance value +func (obj CatalogArtifact) GetActualInstanceValue() interface{} { + if obj.CatalogMetricsArtifact != nil { + return *obj.CatalogMetricsArtifact + } + + if obj.CatalogModelArtifact != nil { + return *obj.CatalogModelArtifact + } + + // all schemas are nil + return nil +} + +type NullableCatalogArtifact struct { + value *CatalogArtifact + isSet bool +} + +func (v NullableCatalogArtifact) Get() *CatalogArtifact { + return v.value +} + +func (v *NullableCatalogArtifact) Set(val *CatalogArtifact) { + v.value = val + v.isSet = true +} + +func (v NullableCatalogArtifact) IsSet() bool { + return v.isSet +} + +func (v *NullableCatalogArtifact) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableCatalogArtifact(val *CatalogArtifact) *NullableCatalogArtifact { + return &NullableCatalogArtifact{value: val, isSet: true} +} + +func (v NullableCatalogArtifact) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableCatalogArtifact) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/catalog/pkg/openapi/model_catalog_artifact_list.go b/catalog/pkg/openapi/model_catalog_artifact_list.go new file mode 100644 index 0000000000..8dd348d010 --- /dev/null +++ b/catalog/pkg/openapi/model_catalog_artifact_list.go @@ -0,0 +1,202 @@ +/* +Model Catalog REST API + +REST API for Model Registry to create and manage ML model metadata + +API version: v1alpha1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the CatalogArtifactList type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &CatalogArtifactList{} + +// CatalogArtifactList List of CatalogModel entities. +type CatalogArtifactList struct { + // Token to use to retrieve next page of results. + NextPageToken string `json:"nextPageToken"` + // Maximum number of resources to return in the result. + PageSize int32 `json:"pageSize"` + // Number of items in result list. + Size int32 `json:"size"` + // Array of `CatalogArtifact` entities. + Items []CatalogArtifact `json:"items"` +} + +type _CatalogArtifactList CatalogArtifactList + +// NewCatalogArtifactList instantiates a new CatalogArtifactList object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewCatalogArtifactList(nextPageToken string, pageSize int32, size int32, items []CatalogArtifact) *CatalogArtifactList { + this := CatalogArtifactList{} + this.NextPageToken = nextPageToken + this.PageSize = pageSize + this.Size = size + this.Items = items + return &this +} + +// NewCatalogArtifactListWithDefaults instantiates a new CatalogArtifactList object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewCatalogArtifactListWithDefaults() *CatalogArtifactList { + this := CatalogArtifactList{} + return &this +} + +// GetNextPageToken returns the NextPageToken field value +func (o *CatalogArtifactList) GetNextPageToken() string { + if o == nil { + var ret string + return ret + } + + return o.NextPageToken +} + +// GetNextPageTokenOk returns a tuple with the NextPageToken field value +// and a boolean to check if the value has been set. +func (o *CatalogArtifactList) GetNextPageTokenOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.NextPageToken, true +} + +// SetNextPageToken sets field value +func (o *CatalogArtifactList) SetNextPageToken(v string) { + o.NextPageToken = v +} + +// GetPageSize returns the PageSize field value +func (o *CatalogArtifactList) GetPageSize() int32 { + if o == nil { + var ret int32 + return ret + } + + return o.PageSize +} + +// GetPageSizeOk returns a tuple with the PageSize field value +// and a boolean to check if the value has been set. +func (o *CatalogArtifactList) GetPageSizeOk() (*int32, bool) { + if o == nil { + return nil, false + } + return &o.PageSize, true +} + +// SetPageSize sets field value +func (o *CatalogArtifactList) SetPageSize(v int32) { + o.PageSize = v +} + +// GetSize returns the Size field value +func (o *CatalogArtifactList) GetSize() int32 { + if o == nil { + var ret int32 + return ret + } + + return o.Size +} + +// GetSizeOk returns a tuple with the Size field value +// and a boolean to check if the value has been set. +func (o *CatalogArtifactList) GetSizeOk() (*int32, bool) { + if o == nil { + return nil, false + } + return &o.Size, true +} + +// SetSize sets field value +func (o *CatalogArtifactList) SetSize(v int32) { + o.Size = v +} + +// GetItems returns the Items field value +func (o *CatalogArtifactList) GetItems() []CatalogArtifact { + if o == nil { + var ret []CatalogArtifact + return ret + } + + return o.Items +} + +// GetItemsOk returns a tuple with the Items field value +// and a boolean to check if the value has been set. +func (o *CatalogArtifactList) GetItemsOk() ([]CatalogArtifact, bool) { + if o == nil { + return nil, false + } + return o.Items, true +} + +// SetItems sets field value +func (o *CatalogArtifactList) SetItems(v []CatalogArtifact) { + o.Items = v +} + +func (o CatalogArtifactList) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o CatalogArtifactList) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["nextPageToken"] = o.NextPageToken + toSerialize["pageSize"] = o.PageSize + toSerialize["size"] = o.Size + toSerialize["items"] = o.Items + return toSerialize, nil +} + +type NullableCatalogArtifactList struct { + value *CatalogArtifactList + isSet bool +} + +func (v NullableCatalogArtifactList) Get() *CatalogArtifactList { + return v.value +} + +func (v *NullableCatalogArtifactList) Set(val *CatalogArtifactList) { + v.value = val + v.isSet = true +} + +func (v NullableCatalogArtifactList) IsSet() bool { + return v.isSet +} + +func (v *NullableCatalogArtifactList) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableCatalogArtifactList(val *CatalogArtifactList) *NullableCatalogArtifactList { + return &NullableCatalogArtifactList{value: val, isSet: true} +} + +func (v NullableCatalogArtifactList) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableCatalogArtifactList) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/catalog/pkg/openapi/model_catalog_label.go b/catalog/pkg/openapi/model_catalog_label.go new file mode 100644 index 0000000000..95f2a6af54 --- /dev/null +++ b/catalog/pkg/openapi/model_catalog_label.go @@ -0,0 +1,163 @@ +/* +Model Catalog REST API + +REST API for Model Registry to create and manage ML model metadata + +API version: v1alpha1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the CatalogLabel type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &CatalogLabel{} + +// CatalogLabel A catalog label. Labels are used to categorize catalog sources. Represented as a flexible map of string key-value pairs with a required 'name' field. +type CatalogLabel struct { + // The unique name identifier for the label. + Name NullableString `json:"name"` + // An optional human-readable name to show in place of `name`. + DisplayName *string `json:"displayName,omitempty"` + AdditionalProperties map[string]interface{} +} + +type _CatalogLabel CatalogLabel + +// NewCatalogLabel instantiates a new CatalogLabel object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewCatalogLabel(name NullableString) *CatalogLabel { + this := CatalogLabel{} + this.Name = name + return &this +} + +// NewCatalogLabelWithDefaults instantiates a new CatalogLabel object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewCatalogLabelWithDefaults() *CatalogLabel { + this := CatalogLabel{} + return &this +} + +// GetName returns the Name field value +// If the value is explicit nil, the zero value for string will be returned +func (o *CatalogLabel) GetName() string { + if o == nil || o.Name.Get() == nil { + var ret string + return ret + } + + return *o.Name.Get() +} + +// GetNameOk returns a tuple with the Name field value +// and a boolean to check if the value has been set. +// NOTE: If the value is an explicit nil, `nil, true` will be returned +func (o *CatalogLabel) GetNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return o.Name.Get(), o.Name.IsSet() +} + +// SetName sets field value +func (o *CatalogLabel) SetName(v string) { + o.Name.Set(&v) +} + +// GetDisplayName returns the DisplayName field value if set, zero value otherwise. +func (o *CatalogLabel) GetDisplayName() string { + if o == nil || IsNil(o.DisplayName) { + var ret string + return ret + } + return *o.DisplayName +} + +// GetDisplayNameOk returns a tuple with the DisplayName field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogLabel) GetDisplayNameOk() (*string, bool) { + if o == nil || IsNil(o.DisplayName) { + return nil, false + } + return o.DisplayName, true +} + +// HasDisplayName returns a boolean if a field has been set. +func (o *CatalogLabel) HasDisplayName() bool { + if o != nil && !IsNil(o.DisplayName) { + return true + } + + return false +} + +// SetDisplayName gets a reference to the given string and assigns it to the DisplayName field. +func (o *CatalogLabel) SetDisplayName(v string) { + o.DisplayName = &v +} + +func (o CatalogLabel) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o CatalogLabel) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["name"] = o.Name.Get() + if !IsNil(o.DisplayName) { + toSerialize["displayName"] = o.DisplayName + } + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + + return toSerialize, nil +} + +type NullableCatalogLabel struct { + value *CatalogLabel + isSet bool +} + +func (v NullableCatalogLabel) Get() *CatalogLabel { + return v.value +} + +func (v *NullableCatalogLabel) Set(val *CatalogLabel) { + v.value = val + v.isSet = true +} + +func (v NullableCatalogLabel) IsSet() bool { + return v.isSet +} + +func (v *NullableCatalogLabel) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableCatalogLabel(val *CatalogLabel) *NullableCatalogLabel { + return &NullableCatalogLabel{value: val, isSet: true} +} + +func (v NullableCatalogLabel) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableCatalogLabel) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/catalog/pkg/openapi/model_catalog_model_artifact_list.go b/catalog/pkg/openapi/model_catalog_label_list.go similarity index 54% rename from catalog/pkg/openapi/model_catalog_model_artifact_list.go rename to catalog/pkg/openapi/model_catalog_label_list.go index 8fe1a36199..4e1581e54b 100644 --- a/catalog/pkg/openapi/model_catalog_model_artifact_list.go +++ b/catalog/pkg/openapi/model_catalog_label_list.go @@ -14,27 +14,29 @@ import ( "encoding/json" ) -// checks if the CatalogModelArtifactList type satisfies the MappedNullable interface at compile time -var _ MappedNullable = &CatalogModelArtifactList{} +// checks if the CatalogLabelList type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &CatalogLabelList{} -// CatalogModelArtifactList List of CatalogModel entities. -type CatalogModelArtifactList struct { +// CatalogLabelList List of CatalogLabel entities. +type CatalogLabelList struct { // Token to use to retrieve next page of results. NextPageToken string `json:"nextPageToken"` // Maximum number of resources to return in the result. PageSize int32 `json:"pageSize"` // Number of items in result list. Size int32 `json:"size"` - // Array of `CatalogModelArtifact` entities. - Items []CatalogModelArtifact `json:"items"` + // Array of `CatalogLabel` entities. + Items []CatalogLabel `json:"items"` } -// NewCatalogModelArtifactList instantiates a new CatalogModelArtifactList object +type _CatalogLabelList CatalogLabelList + +// NewCatalogLabelList instantiates a new CatalogLabelList object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed -func NewCatalogModelArtifactList(nextPageToken string, pageSize int32, size int32, items []CatalogModelArtifact) *CatalogModelArtifactList { - this := CatalogModelArtifactList{} +func NewCatalogLabelList(nextPageToken string, pageSize int32, size int32, items []CatalogLabel) *CatalogLabelList { + this := CatalogLabelList{} this.NextPageToken = nextPageToken this.PageSize = pageSize this.Size = size @@ -42,16 +44,16 @@ func NewCatalogModelArtifactList(nextPageToken string, pageSize int32, size int3 return &this } -// NewCatalogModelArtifactListWithDefaults instantiates a new CatalogModelArtifactList object +// NewCatalogLabelListWithDefaults instantiates a new CatalogLabelList object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set -func NewCatalogModelArtifactListWithDefaults() *CatalogModelArtifactList { - this := CatalogModelArtifactList{} +func NewCatalogLabelListWithDefaults() *CatalogLabelList { + this := CatalogLabelList{} return &this } // GetNextPageToken returns the NextPageToken field value -func (o *CatalogModelArtifactList) GetNextPageToken() string { +func (o *CatalogLabelList) GetNextPageToken() string { if o == nil { var ret string return ret @@ -62,7 +64,7 @@ func (o *CatalogModelArtifactList) GetNextPageToken() string { // GetNextPageTokenOk returns a tuple with the NextPageToken field value // and a boolean to check if the value has been set. -func (o *CatalogModelArtifactList) GetNextPageTokenOk() (*string, bool) { +func (o *CatalogLabelList) GetNextPageTokenOk() (*string, bool) { if o == nil { return nil, false } @@ -70,12 +72,12 @@ func (o *CatalogModelArtifactList) GetNextPageTokenOk() (*string, bool) { } // SetNextPageToken sets field value -func (o *CatalogModelArtifactList) SetNextPageToken(v string) { +func (o *CatalogLabelList) SetNextPageToken(v string) { o.NextPageToken = v } // GetPageSize returns the PageSize field value -func (o *CatalogModelArtifactList) GetPageSize() int32 { +func (o *CatalogLabelList) GetPageSize() int32 { if o == nil { var ret int32 return ret @@ -86,7 +88,7 @@ func (o *CatalogModelArtifactList) GetPageSize() int32 { // GetPageSizeOk returns a tuple with the PageSize field value // and a boolean to check if the value has been set. -func (o *CatalogModelArtifactList) GetPageSizeOk() (*int32, bool) { +func (o *CatalogLabelList) GetPageSizeOk() (*int32, bool) { if o == nil { return nil, false } @@ -94,12 +96,12 @@ func (o *CatalogModelArtifactList) GetPageSizeOk() (*int32, bool) { } // SetPageSize sets field value -func (o *CatalogModelArtifactList) SetPageSize(v int32) { +func (o *CatalogLabelList) SetPageSize(v int32) { o.PageSize = v } // GetSize returns the Size field value -func (o *CatalogModelArtifactList) GetSize() int32 { +func (o *CatalogLabelList) GetSize() int32 { if o == nil { var ret int32 return ret @@ -110,7 +112,7 @@ func (o *CatalogModelArtifactList) GetSize() int32 { // GetSizeOk returns a tuple with the Size field value // and a boolean to check if the value has been set. -func (o *CatalogModelArtifactList) GetSizeOk() (*int32, bool) { +func (o *CatalogLabelList) GetSizeOk() (*int32, bool) { if o == nil { return nil, false } @@ -118,14 +120,14 @@ func (o *CatalogModelArtifactList) GetSizeOk() (*int32, bool) { } // SetSize sets field value -func (o *CatalogModelArtifactList) SetSize(v int32) { +func (o *CatalogLabelList) SetSize(v int32) { o.Size = v } // GetItems returns the Items field value -func (o *CatalogModelArtifactList) GetItems() []CatalogModelArtifact { +func (o *CatalogLabelList) GetItems() []CatalogLabel { if o == nil { - var ret []CatalogModelArtifact + var ret []CatalogLabel return ret } @@ -134,7 +136,7 @@ func (o *CatalogModelArtifactList) GetItems() []CatalogModelArtifact { // GetItemsOk returns a tuple with the Items field value // and a boolean to check if the value has been set. -func (o *CatalogModelArtifactList) GetItemsOk() ([]CatalogModelArtifact, bool) { +func (o *CatalogLabelList) GetItemsOk() ([]CatalogLabel, bool) { if o == nil { return nil, false } @@ -142,11 +144,11 @@ func (o *CatalogModelArtifactList) GetItemsOk() ([]CatalogModelArtifact, bool) { } // SetItems sets field value -func (o *CatalogModelArtifactList) SetItems(v []CatalogModelArtifact) { +func (o *CatalogLabelList) SetItems(v []CatalogLabel) { o.Items = v } -func (o CatalogModelArtifactList) MarshalJSON() ([]byte, error) { +func (o CatalogLabelList) MarshalJSON() ([]byte, error) { toSerialize, err := o.ToMap() if err != nil { return []byte{}, err @@ -154,7 +156,7 @@ func (o CatalogModelArtifactList) MarshalJSON() ([]byte, error) { return json.Marshal(toSerialize) } -func (o CatalogModelArtifactList) ToMap() (map[string]interface{}, error) { +func (o CatalogLabelList) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} toSerialize["nextPageToken"] = o.NextPageToken toSerialize["pageSize"] = o.PageSize @@ -163,38 +165,38 @@ func (o CatalogModelArtifactList) ToMap() (map[string]interface{}, error) { return toSerialize, nil } -type NullableCatalogModelArtifactList struct { - value *CatalogModelArtifactList +type NullableCatalogLabelList struct { + value *CatalogLabelList isSet bool } -func (v NullableCatalogModelArtifactList) Get() *CatalogModelArtifactList { +func (v NullableCatalogLabelList) Get() *CatalogLabelList { return v.value } -func (v *NullableCatalogModelArtifactList) Set(val *CatalogModelArtifactList) { +func (v *NullableCatalogLabelList) Set(val *CatalogLabelList) { v.value = val v.isSet = true } -func (v NullableCatalogModelArtifactList) IsSet() bool { +func (v NullableCatalogLabelList) IsSet() bool { return v.isSet } -func (v *NullableCatalogModelArtifactList) Unset() { +func (v *NullableCatalogLabelList) Unset() { v.value = nil v.isSet = false } -func NewNullableCatalogModelArtifactList(val *CatalogModelArtifactList) *NullableCatalogModelArtifactList { - return &NullableCatalogModelArtifactList{value: val, isSet: true} +func NewNullableCatalogLabelList(val *CatalogLabelList) *NullableCatalogLabelList { + return &NullableCatalogLabelList{value: val, isSet: true} } -func (v NullableCatalogModelArtifactList) MarshalJSON() ([]byte, error) { +func (v NullableCatalogLabelList) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } -func (v *NullableCatalogModelArtifactList) UnmarshalJSON(src []byte) error { +func (v *NullableCatalogLabelList) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) } diff --git a/catalog/pkg/openapi/model_catalog_metrics_artifact.go b/catalog/pkg/openapi/model_catalog_metrics_artifact.go new file mode 100644 index 0000000000..c4f254caf8 --- /dev/null +++ b/catalog/pkg/openapi/model_catalog_metrics_artifact.go @@ -0,0 +1,405 @@ +/* +Model Catalog REST API + +REST API for Model Registry to create and manage ML model metadata + +API version: v1alpha1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the CatalogMetricsArtifact type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &CatalogMetricsArtifact{} + +// CatalogMetricsArtifact A metadata Artifact Entity. +type CatalogMetricsArtifact struct { + // User provided custom properties which are not defined by its type. + CustomProperties map[string]MetadataValue `json:"customProperties,omitempty"` + // An optional description about the resource. + Description *string `json:"description,omitempty"` + // The external id that come from the clients’ system. This field is optional. If set, it must be unique among all resources within a database instance. + ExternalId *string `json:"externalId,omitempty"` + // The client provided name of the artifact. This field is optional. If set, it must be unique among all the artifacts of the same artifact type within a database instance and cannot be changed once set. + Name *string `json:"name,omitempty"` + // The unique server generated id of the resource. + Id *string `json:"id,omitempty"` + // Output only. Create time of the resource in millisecond since epoch. + CreateTimeSinceEpoch *string `json:"createTimeSinceEpoch,omitempty"` + // Output only. Last update time of the resource since epoch in millisecond since epoch. + LastUpdateTimeSinceEpoch *string `json:"lastUpdateTimeSinceEpoch,omitempty"` + ArtifactType string `json:"artifactType"` + MetricsType string `json:"metricsType"` +} + +type _CatalogMetricsArtifact CatalogMetricsArtifact + +// NewCatalogMetricsArtifact instantiates a new CatalogMetricsArtifact object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewCatalogMetricsArtifact(artifactType string, metricsType string) *CatalogMetricsArtifact { + this := CatalogMetricsArtifact{} + this.ArtifactType = artifactType + this.MetricsType = metricsType + return &this +} + +// NewCatalogMetricsArtifactWithDefaults instantiates a new CatalogMetricsArtifact object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewCatalogMetricsArtifactWithDefaults() *CatalogMetricsArtifact { + this := CatalogMetricsArtifact{} + var artifactType string = "metrics-artifact" + this.ArtifactType = artifactType + return &this +} + +// GetCustomProperties returns the CustomProperties field value if set, zero value otherwise. +func (o *CatalogMetricsArtifact) GetCustomProperties() map[string]MetadataValue { + if o == nil || IsNil(o.CustomProperties) { + var ret map[string]MetadataValue + return ret + } + return o.CustomProperties +} + +// GetCustomPropertiesOk returns a tuple with the CustomProperties field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogMetricsArtifact) GetCustomPropertiesOk() (map[string]MetadataValue, bool) { + if o == nil || IsNil(o.CustomProperties) { + return map[string]MetadataValue{}, false + } + return o.CustomProperties, true +} + +// HasCustomProperties returns a boolean if a field has been set. +func (o *CatalogMetricsArtifact) HasCustomProperties() bool { + if o != nil && !IsNil(o.CustomProperties) { + return true + } + + return false +} + +// SetCustomProperties gets a reference to the given map[string]MetadataValue and assigns it to the CustomProperties field. +func (o *CatalogMetricsArtifact) SetCustomProperties(v map[string]MetadataValue) { + o.CustomProperties = v +} + +// GetDescription returns the Description field value if set, zero value otherwise. +func (o *CatalogMetricsArtifact) GetDescription() string { + if o == nil || IsNil(o.Description) { + var ret string + return ret + } + return *o.Description +} + +// GetDescriptionOk returns a tuple with the Description field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogMetricsArtifact) GetDescriptionOk() (*string, bool) { + if o == nil || IsNil(o.Description) { + return nil, false + } + return o.Description, true +} + +// HasDescription returns a boolean if a field has been set. +func (o *CatalogMetricsArtifact) HasDescription() bool { + if o != nil && !IsNil(o.Description) { + return true + } + + return false +} + +// SetDescription gets a reference to the given string and assigns it to the Description field. +func (o *CatalogMetricsArtifact) SetDescription(v string) { + o.Description = &v +} + +// GetExternalId returns the ExternalId field value if set, zero value otherwise. +func (o *CatalogMetricsArtifact) GetExternalId() string { + if o == nil || IsNil(o.ExternalId) { + var ret string + return ret + } + return *o.ExternalId +} + +// GetExternalIdOk returns a tuple with the ExternalId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogMetricsArtifact) GetExternalIdOk() (*string, bool) { + if o == nil || IsNil(o.ExternalId) { + return nil, false + } + return o.ExternalId, true +} + +// HasExternalId returns a boolean if a field has been set. +func (o *CatalogMetricsArtifact) HasExternalId() bool { + if o != nil && !IsNil(o.ExternalId) { + return true + } + + return false +} + +// SetExternalId gets a reference to the given string and assigns it to the ExternalId field. +func (o *CatalogMetricsArtifact) SetExternalId(v string) { + o.ExternalId = &v +} + +// GetName returns the Name field value if set, zero value otherwise. +func (o *CatalogMetricsArtifact) GetName() string { + if o == nil || IsNil(o.Name) { + var ret string + return ret + } + return *o.Name +} + +// GetNameOk returns a tuple with the Name field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogMetricsArtifact) GetNameOk() (*string, bool) { + if o == nil || IsNil(o.Name) { + return nil, false + } + return o.Name, true +} + +// HasName returns a boolean if a field has been set. +func (o *CatalogMetricsArtifact) HasName() bool { + if o != nil && !IsNil(o.Name) { + return true + } + + return false +} + +// SetName gets a reference to the given string and assigns it to the Name field. +func (o *CatalogMetricsArtifact) SetName(v string) { + o.Name = &v +} + +// GetId returns the Id field value if set, zero value otherwise. +func (o *CatalogMetricsArtifact) GetId() string { + if o == nil || IsNil(o.Id) { + var ret string + return ret + } + return *o.Id +} + +// GetIdOk returns a tuple with the Id field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogMetricsArtifact) GetIdOk() (*string, bool) { + if o == nil || IsNil(o.Id) { + return nil, false + } + return o.Id, true +} + +// HasId returns a boolean if a field has been set. +func (o *CatalogMetricsArtifact) HasId() bool { + if o != nil && !IsNil(o.Id) { + return true + } + + return false +} + +// SetId gets a reference to the given string and assigns it to the Id field. +func (o *CatalogMetricsArtifact) SetId(v string) { + o.Id = &v +} + +// GetCreateTimeSinceEpoch returns the CreateTimeSinceEpoch field value if set, zero value otherwise. +func (o *CatalogMetricsArtifact) GetCreateTimeSinceEpoch() string { + if o == nil || IsNil(o.CreateTimeSinceEpoch) { + var ret string + return ret + } + return *o.CreateTimeSinceEpoch +} + +// GetCreateTimeSinceEpochOk returns a tuple with the CreateTimeSinceEpoch field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogMetricsArtifact) GetCreateTimeSinceEpochOk() (*string, bool) { + if o == nil || IsNil(o.CreateTimeSinceEpoch) { + return nil, false + } + return o.CreateTimeSinceEpoch, true +} + +// HasCreateTimeSinceEpoch returns a boolean if a field has been set. +func (o *CatalogMetricsArtifact) HasCreateTimeSinceEpoch() bool { + if o != nil && !IsNil(o.CreateTimeSinceEpoch) { + return true + } + + return false +} + +// SetCreateTimeSinceEpoch gets a reference to the given string and assigns it to the CreateTimeSinceEpoch field. +func (o *CatalogMetricsArtifact) SetCreateTimeSinceEpoch(v string) { + o.CreateTimeSinceEpoch = &v +} + +// GetLastUpdateTimeSinceEpoch returns the LastUpdateTimeSinceEpoch field value if set, zero value otherwise. +func (o *CatalogMetricsArtifact) GetLastUpdateTimeSinceEpoch() string { + if o == nil || IsNil(o.LastUpdateTimeSinceEpoch) { + var ret string + return ret + } + return *o.LastUpdateTimeSinceEpoch +} + +// GetLastUpdateTimeSinceEpochOk returns a tuple with the LastUpdateTimeSinceEpoch field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogMetricsArtifact) GetLastUpdateTimeSinceEpochOk() (*string, bool) { + if o == nil || IsNil(o.LastUpdateTimeSinceEpoch) { + return nil, false + } + return o.LastUpdateTimeSinceEpoch, true +} + +// HasLastUpdateTimeSinceEpoch returns a boolean if a field has been set. +func (o *CatalogMetricsArtifact) HasLastUpdateTimeSinceEpoch() bool { + if o != nil && !IsNil(o.LastUpdateTimeSinceEpoch) { + return true + } + + return false +} + +// SetLastUpdateTimeSinceEpoch gets a reference to the given string and assigns it to the LastUpdateTimeSinceEpoch field. +func (o *CatalogMetricsArtifact) SetLastUpdateTimeSinceEpoch(v string) { + o.LastUpdateTimeSinceEpoch = &v +} + +// GetArtifactType returns the ArtifactType field value +func (o *CatalogMetricsArtifact) GetArtifactType() string { + if o == nil { + var ret string + return ret + } + + return o.ArtifactType +} + +// GetArtifactTypeOk returns a tuple with the ArtifactType field value +// and a boolean to check if the value has been set. +func (o *CatalogMetricsArtifact) GetArtifactTypeOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.ArtifactType, true +} + +// SetArtifactType sets field value +func (o *CatalogMetricsArtifact) SetArtifactType(v string) { + o.ArtifactType = v +} + +// GetMetricsType returns the MetricsType field value +func (o *CatalogMetricsArtifact) GetMetricsType() string { + if o == nil { + var ret string + return ret + } + + return o.MetricsType +} + +// GetMetricsTypeOk returns a tuple with the MetricsType field value +// and a boolean to check if the value has been set. +func (o *CatalogMetricsArtifact) GetMetricsTypeOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.MetricsType, true +} + +// SetMetricsType sets field value +func (o *CatalogMetricsArtifact) SetMetricsType(v string) { + o.MetricsType = v +} + +func (o CatalogMetricsArtifact) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o CatalogMetricsArtifact) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.CustomProperties) { + toSerialize["customProperties"] = o.CustomProperties + } + if !IsNil(o.Description) { + toSerialize["description"] = o.Description + } + if !IsNil(o.ExternalId) { + toSerialize["externalId"] = o.ExternalId + } + if !IsNil(o.Name) { + toSerialize["name"] = o.Name + } + if !IsNil(o.Id) { + toSerialize["id"] = o.Id + } + if !IsNil(o.CreateTimeSinceEpoch) { + toSerialize["createTimeSinceEpoch"] = o.CreateTimeSinceEpoch + } + if !IsNil(o.LastUpdateTimeSinceEpoch) { + toSerialize["lastUpdateTimeSinceEpoch"] = o.LastUpdateTimeSinceEpoch + } + toSerialize["artifactType"] = o.ArtifactType + toSerialize["metricsType"] = o.MetricsType + return toSerialize, nil +} + +type NullableCatalogMetricsArtifact struct { + value *CatalogMetricsArtifact + isSet bool +} + +func (v NullableCatalogMetricsArtifact) Get() *CatalogMetricsArtifact { + return v.value +} + +func (v *NullableCatalogMetricsArtifact) Set(val *CatalogMetricsArtifact) { + v.value = val + v.isSet = true +} + +func (v NullableCatalogMetricsArtifact) IsSet() bool { + return v.isSet +} + +func (v *NullableCatalogMetricsArtifact) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableCatalogMetricsArtifact(val *CatalogMetricsArtifact) *NullableCatalogMetricsArtifact { + return &NullableCatalogMetricsArtifact{value: val, isSet: true} +} + +func (v NullableCatalogMetricsArtifact) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableCatalogMetricsArtifact) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/catalog/pkg/openapi/model_catalog_model.go b/catalog/pkg/openapi/model_catalog_model.go index 0ece8c75a8..79ef66fe10 100644 --- a/catalog/pkg/openapi/model_catalog_model.go +++ b/catalog/pkg/openapi/model_catalog_model.go @@ -19,11 +19,7 @@ var _ MappedNullable = &CatalogModel{} // CatalogModel A model in the model catalog. type CatalogModel struct { - // Output only. Create time of the resource in millisecond since epoch. - CreateTimeSinceEpoch *string `json:"createTimeSinceEpoch,omitempty"` - // Output only. Last update time of the resource since epoch in millisecond since epoch. - LastUpdateTimeSinceEpoch *string `json:"lastUpdateTimeSinceEpoch,omitempty"` - // Human-readable description of the model. + // An optional description about the resource. Description *string `json:"description,omitempty"` // Model documentation in Markdown. Readme *string `json:"readme,omitempty"` @@ -43,13 +39,23 @@ type CatalogModel struct { LicenseLink *string `json:"licenseLink,omitempty"` LibraryName *string `json:"libraryName,omitempty"` // User provided custom properties which are not defined by its type. - CustomProperties *map[string]MetadataValue `json:"customProperties,omitempty"` + CustomProperties map[string]MetadataValue `json:"customProperties,omitempty"` + // The external id that come from the clients’ system. This field is optional. If set, it must be unique among all resources within a database instance. + ExternalId *string `json:"externalId,omitempty"` // Name of the model. Must be unique within a source. Name string `json:"name"` + // The unique server generated id of the resource. + Id *string `json:"id,omitempty"` + // Output only. Create time of the resource in millisecond since epoch. + CreateTimeSinceEpoch *string `json:"createTimeSinceEpoch,omitempty"` + // Output only. Last update time of the resource since epoch in millisecond since epoch. + LastUpdateTimeSinceEpoch *string `json:"lastUpdateTimeSinceEpoch,omitempty"` // ID of the source this model belongs to. SourceId *string `json:"source_id,omitempty"` } +type _CatalogModel CatalogModel + // NewCatalogModel instantiates a new CatalogModel object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments @@ -68,70 +74,6 @@ func NewCatalogModelWithDefaults() *CatalogModel { return &this } -// GetCreateTimeSinceEpoch returns the CreateTimeSinceEpoch field value if set, zero value otherwise. -func (o *CatalogModel) GetCreateTimeSinceEpoch() string { - if o == nil || IsNil(o.CreateTimeSinceEpoch) { - var ret string - return ret - } - return *o.CreateTimeSinceEpoch -} - -// GetCreateTimeSinceEpochOk returns a tuple with the CreateTimeSinceEpoch field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *CatalogModel) GetCreateTimeSinceEpochOk() (*string, bool) { - if o == nil || IsNil(o.CreateTimeSinceEpoch) { - return nil, false - } - return o.CreateTimeSinceEpoch, true -} - -// HasCreateTimeSinceEpoch returns a boolean if a field has been set. -func (o *CatalogModel) HasCreateTimeSinceEpoch() bool { - if o != nil && !IsNil(o.CreateTimeSinceEpoch) { - return true - } - - return false -} - -// SetCreateTimeSinceEpoch gets a reference to the given string and assigns it to the CreateTimeSinceEpoch field. -func (o *CatalogModel) SetCreateTimeSinceEpoch(v string) { - o.CreateTimeSinceEpoch = &v -} - -// GetLastUpdateTimeSinceEpoch returns the LastUpdateTimeSinceEpoch field value if set, zero value otherwise. -func (o *CatalogModel) GetLastUpdateTimeSinceEpoch() string { - if o == nil || IsNil(o.LastUpdateTimeSinceEpoch) { - var ret string - return ret - } - return *o.LastUpdateTimeSinceEpoch -} - -// GetLastUpdateTimeSinceEpochOk returns a tuple with the LastUpdateTimeSinceEpoch field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *CatalogModel) GetLastUpdateTimeSinceEpochOk() (*string, bool) { - if o == nil || IsNil(o.LastUpdateTimeSinceEpoch) { - return nil, false - } - return o.LastUpdateTimeSinceEpoch, true -} - -// HasLastUpdateTimeSinceEpoch returns a boolean if a field has been set. -func (o *CatalogModel) HasLastUpdateTimeSinceEpoch() bool { - if o != nil && !IsNil(o.LastUpdateTimeSinceEpoch) { - return true - } - - return false -} - -// SetLastUpdateTimeSinceEpoch gets a reference to the given string and assigns it to the LastUpdateTimeSinceEpoch field. -func (o *CatalogModel) SetLastUpdateTimeSinceEpoch(v string) { - o.LastUpdateTimeSinceEpoch = &v -} - // GetDescription returns the Description field value if set, zero value otherwise. func (o *CatalogModel) GetDescription() string { if o == nil || IsNil(o.Description) { @@ -458,14 +400,14 @@ func (o *CatalogModel) GetCustomProperties() map[string]MetadataValue { var ret map[string]MetadataValue return ret } - return *o.CustomProperties + return o.CustomProperties } // GetCustomPropertiesOk returns a tuple with the CustomProperties field value if set, nil otherwise // and a boolean to check if the value has been set. -func (o *CatalogModel) GetCustomPropertiesOk() (*map[string]MetadataValue, bool) { +func (o *CatalogModel) GetCustomPropertiesOk() (map[string]MetadataValue, bool) { if o == nil || IsNil(o.CustomProperties) { - return nil, false + return map[string]MetadataValue{}, false } return o.CustomProperties, true } @@ -481,7 +423,39 @@ func (o *CatalogModel) HasCustomProperties() bool { // SetCustomProperties gets a reference to the given map[string]MetadataValue and assigns it to the CustomProperties field. func (o *CatalogModel) SetCustomProperties(v map[string]MetadataValue) { - o.CustomProperties = &v + o.CustomProperties = v +} + +// GetExternalId returns the ExternalId field value if set, zero value otherwise. +func (o *CatalogModel) GetExternalId() string { + if o == nil || IsNil(o.ExternalId) { + var ret string + return ret + } + return *o.ExternalId +} + +// GetExternalIdOk returns a tuple with the ExternalId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogModel) GetExternalIdOk() (*string, bool) { + if o == nil || IsNil(o.ExternalId) { + return nil, false + } + return o.ExternalId, true +} + +// HasExternalId returns a boolean if a field has been set. +func (o *CatalogModel) HasExternalId() bool { + if o != nil && !IsNil(o.ExternalId) { + return true + } + + return false +} + +// SetExternalId gets a reference to the given string and assigns it to the ExternalId field. +func (o *CatalogModel) SetExternalId(v string) { + o.ExternalId = &v } // GetName returns the Name field value @@ -508,6 +482,102 @@ func (o *CatalogModel) SetName(v string) { o.Name = v } +// GetId returns the Id field value if set, zero value otherwise. +func (o *CatalogModel) GetId() string { + if o == nil || IsNil(o.Id) { + var ret string + return ret + } + return *o.Id +} + +// GetIdOk returns a tuple with the Id field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogModel) GetIdOk() (*string, bool) { + if o == nil || IsNil(o.Id) { + return nil, false + } + return o.Id, true +} + +// HasId returns a boolean if a field has been set. +func (o *CatalogModel) HasId() bool { + if o != nil && !IsNil(o.Id) { + return true + } + + return false +} + +// SetId gets a reference to the given string and assigns it to the Id field. +func (o *CatalogModel) SetId(v string) { + o.Id = &v +} + +// GetCreateTimeSinceEpoch returns the CreateTimeSinceEpoch field value if set, zero value otherwise. +func (o *CatalogModel) GetCreateTimeSinceEpoch() string { + if o == nil || IsNil(o.CreateTimeSinceEpoch) { + var ret string + return ret + } + return *o.CreateTimeSinceEpoch +} + +// GetCreateTimeSinceEpochOk returns a tuple with the CreateTimeSinceEpoch field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogModel) GetCreateTimeSinceEpochOk() (*string, bool) { + if o == nil || IsNil(o.CreateTimeSinceEpoch) { + return nil, false + } + return o.CreateTimeSinceEpoch, true +} + +// HasCreateTimeSinceEpoch returns a boolean if a field has been set. +func (o *CatalogModel) HasCreateTimeSinceEpoch() bool { + if o != nil && !IsNil(o.CreateTimeSinceEpoch) { + return true + } + + return false +} + +// SetCreateTimeSinceEpoch gets a reference to the given string and assigns it to the CreateTimeSinceEpoch field. +func (o *CatalogModel) SetCreateTimeSinceEpoch(v string) { + o.CreateTimeSinceEpoch = &v +} + +// GetLastUpdateTimeSinceEpoch returns the LastUpdateTimeSinceEpoch field value if set, zero value otherwise. +func (o *CatalogModel) GetLastUpdateTimeSinceEpoch() string { + if o == nil || IsNil(o.LastUpdateTimeSinceEpoch) { + var ret string + return ret + } + return *o.LastUpdateTimeSinceEpoch +} + +// GetLastUpdateTimeSinceEpochOk returns a tuple with the LastUpdateTimeSinceEpoch field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogModel) GetLastUpdateTimeSinceEpochOk() (*string, bool) { + if o == nil || IsNil(o.LastUpdateTimeSinceEpoch) { + return nil, false + } + return o.LastUpdateTimeSinceEpoch, true +} + +// HasLastUpdateTimeSinceEpoch returns a boolean if a field has been set. +func (o *CatalogModel) HasLastUpdateTimeSinceEpoch() bool { + if o != nil && !IsNil(o.LastUpdateTimeSinceEpoch) { + return true + } + + return false +} + +// SetLastUpdateTimeSinceEpoch gets a reference to the given string and assigns it to the LastUpdateTimeSinceEpoch field. +func (o *CatalogModel) SetLastUpdateTimeSinceEpoch(v string) { + o.LastUpdateTimeSinceEpoch = &v +} + // GetSourceId returns the SourceId field value if set, zero value otherwise. func (o *CatalogModel) GetSourceId() string { if o == nil || IsNil(o.SourceId) { @@ -550,12 +620,6 @@ func (o CatalogModel) MarshalJSON() ([]byte, error) { func (o CatalogModel) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if !IsNil(o.CreateTimeSinceEpoch) { - toSerialize["createTimeSinceEpoch"] = o.CreateTimeSinceEpoch - } - if !IsNil(o.LastUpdateTimeSinceEpoch) { - toSerialize["lastUpdateTimeSinceEpoch"] = o.LastUpdateTimeSinceEpoch - } if !IsNil(o.Description) { toSerialize["description"] = o.Description } @@ -589,7 +653,19 @@ func (o CatalogModel) ToMap() (map[string]interface{}, error) { if !IsNil(o.CustomProperties) { toSerialize["customProperties"] = o.CustomProperties } + if !IsNil(o.ExternalId) { + toSerialize["externalId"] = o.ExternalId + } toSerialize["name"] = o.Name + if !IsNil(o.Id) { + toSerialize["id"] = o.Id + } + if !IsNil(o.CreateTimeSinceEpoch) { + toSerialize["createTimeSinceEpoch"] = o.CreateTimeSinceEpoch + } + if !IsNil(o.LastUpdateTimeSinceEpoch) { + toSerialize["lastUpdateTimeSinceEpoch"] = o.LastUpdateTimeSinceEpoch + } if !IsNil(o.SourceId) { toSerialize["source_id"] = o.SourceId } diff --git a/catalog/pkg/openapi/model_catalog_model_artifact.go b/catalog/pkg/openapi/model_catalog_model_artifact.go index 2f6a089855..cf934793e9 100644 --- a/catalog/pkg/openapi/model_catalog_model_artifact.go +++ b/catalog/pkg/openapi/model_catalog_model_artifact.go @@ -17,24 +17,36 @@ import ( // checks if the CatalogModelArtifact type satisfies the MappedNullable interface at compile time var _ MappedNullable = &CatalogModelArtifact{} -// CatalogModelArtifact A single artifact for a catalog model. +// CatalogModelArtifact A Catalog Model Artifact Entity. type CatalogModelArtifact struct { + // User provided custom properties which are not defined by its type. + CustomProperties map[string]MetadataValue `json:"customProperties,omitempty"` + // An optional description about the resource. + Description *string `json:"description,omitempty"` + // The external id that come from the clients’ system. This field is optional. If set, it must be unique among all resources within a database instance. + ExternalId *string `json:"externalId,omitempty"` + // The client provided name of the artifact. This field is optional. If set, it must be unique among all the artifacts of the same artifact type within a database instance and cannot be changed once set. + Name *string `json:"name,omitempty"` + // The unique server generated id of the resource. + Id *string `json:"id,omitempty"` // Output only. Create time of the resource in millisecond since epoch. CreateTimeSinceEpoch *string `json:"createTimeSinceEpoch,omitempty"` // Output only. Last update time of the resource since epoch in millisecond since epoch. LastUpdateTimeSinceEpoch *string `json:"lastUpdateTimeSinceEpoch,omitempty"` - // URI where the artifact can be retrieved. + ArtifactType string `json:"artifactType"` + // URI where the model can be retrieved. Uri string `json:"uri"` - // User provided custom properties which are not defined by its type. - CustomProperties *map[string]MetadataValue `json:"customProperties,omitempty"` } +type _CatalogModelArtifact CatalogModelArtifact + // NewCatalogModelArtifact instantiates a new CatalogModelArtifact object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed -func NewCatalogModelArtifact(uri string) *CatalogModelArtifact { +func NewCatalogModelArtifact(artifactType string, uri string) *CatalogModelArtifact { this := CatalogModelArtifact{} + this.ArtifactType = artifactType this.Uri = uri return &this } @@ -44,9 +56,171 @@ func NewCatalogModelArtifact(uri string) *CatalogModelArtifact { // but it doesn't guarantee that properties required by API are set func NewCatalogModelArtifactWithDefaults() *CatalogModelArtifact { this := CatalogModelArtifact{} + var artifactType string = "model-artifact" + this.ArtifactType = artifactType return &this } +// GetCustomProperties returns the CustomProperties field value if set, zero value otherwise. +func (o *CatalogModelArtifact) GetCustomProperties() map[string]MetadataValue { + if o == nil || IsNil(o.CustomProperties) { + var ret map[string]MetadataValue + return ret + } + return o.CustomProperties +} + +// GetCustomPropertiesOk returns a tuple with the CustomProperties field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogModelArtifact) GetCustomPropertiesOk() (map[string]MetadataValue, bool) { + if o == nil || IsNil(o.CustomProperties) { + return map[string]MetadataValue{}, false + } + return o.CustomProperties, true +} + +// HasCustomProperties returns a boolean if a field has been set. +func (o *CatalogModelArtifact) HasCustomProperties() bool { + if o != nil && !IsNil(o.CustomProperties) { + return true + } + + return false +} + +// SetCustomProperties gets a reference to the given map[string]MetadataValue and assigns it to the CustomProperties field. +func (o *CatalogModelArtifact) SetCustomProperties(v map[string]MetadataValue) { + o.CustomProperties = v +} + +// GetDescription returns the Description field value if set, zero value otherwise. +func (o *CatalogModelArtifact) GetDescription() string { + if o == nil || IsNil(o.Description) { + var ret string + return ret + } + return *o.Description +} + +// GetDescriptionOk returns a tuple with the Description field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogModelArtifact) GetDescriptionOk() (*string, bool) { + if o == nil || IsNil(o.Description) { + return nil, false + } + return o.Description, true +} + +// HasDescription returns a boolean if a field has been set. +func (o *CatalogModelArtifact) HasDescription() bool { + if o != nil && !IsNil(o.Description) { + return true + } + + return false +} + +// SetDescription gets a reference to the given string and assigns it to the Description field. +func (o *CatalogModelArtifact) SetDescription(v string) { + o.Description = &v +} + +// GetExternalId returns the ExternalId field value if set, zero value otherwise. +func (o *CatalogModelArtifact) GetExternalId() string { + if o == nil || IsNil(o.ExternalId) { + var ret string + return ret + } + return *o.ExternalId +} + +// GetExternalIdOk returns a tuple with the ExternalId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogModelArtifact) GetExternalIdOk() (*string, bool) { + if o == nil || IsNil(o.ExternalId) { + return nil, false + } + return o.ExternalId, true +} + +// HasExternalId returns a boolean if a field has been set. +func (o *CatalogModelArtifact) HasExternalId() bool { + if o != nil && !IsNil(o.ExternalId) { + return true + } + + return false +} + +// SetExternalId gets a reference to the given string and assigns it to the ExternalId field. +func (o *CatalogModelArtifact) SetExternalId(v string) { + o.ExternalId = &v +} + +// GetName returns the Name field value if set, zero value otherwise. +func (o *CatalogModelArtifact) GetName() string { + if o == nil || IsNil(o.Name) { + var ret string + return ret + } + return *o.Name +} + +// GetNameOk returns a tuple with the Name field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogModelArtifact) GetNameOk() (*string, bool) { + if o == nil || IsNil(o.Name) { + return nil, false + } + return o.Name, true +} + +// HasName returns a boolean if a field has been set. +func (o *CatalogModelArtifact) HasName() bool { + if o != nil && !IsNil(o.Name) { + return true + } + + return false +} + +// SetName gets a reference to the given string and assigns it to the Name field. +func (o *CatalogModelArtifact) SetName(v string) { + o.Name = &v +} + +// GetId returns the Id field value if set, zero value otherwise. +func (o *CatalogModelArtifact) GetId() string { + if o == nil || IsNil(o.Id) { + var ret string + return ret + } + return *o.Id +} + +// GetIdOk returns a tuple with the Id field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogModelArtifact) GetIdOk() (*string, bool) { + if o == nil || IsNil(o.Id) { + return nil, false + } + return o.Id, true +} + +// HasId returns a boolean if a field has been set. +func (o *CatalogModelArtifact) HasId() bool { + if o != nil && !IsNil(o.Id) { + return true + } + + return false +} + +// SetId gets a reference to the given string and assigns it to the Id field. +func (o *CatalogModelArtifact) SetId(v string) { + o.Id = &v +} + // GetCreateTimeSinceEpoch returns the CreateTimeSinceEpoch field value if set, zero value otherwise. func (o *CatalogModelArtifact) GetCreateTimeSinceEpoch() string { if o == nil || IsNil(o.CreateTimeSinceEpoch) { @@ -111,60 +285,52 @@ func (o *CatalogModelArtifact) SetLastUpdateTimeSinceEpoch(v string) { o.LastUpdateTimeSinceEpoch = &v } -// GetUri returns the Uri field value -func (o *CatalogModelArtifact) GetUri() string { +// GetArtifactType returns the ArtifactType field value +func (o *CatalogModelArtifact) GetArtifactType() string { if o == nil { var ret string return ret } - return o.Uri + return o.ArtifactType } -// GetUriOk returns a tuple with the Uri field value +// GetArtifactTypeOk returns a tuple with the ArtifactType field value // and a boolean to check if the value has been set. -func (o *CatalogModelArtifact) GetUriOk() (*string, bool) { +func (o *CatalogModelArtifact) GetArtifactTypeOk() (*string, bool) { if o == nil { return nil, false } - return &o.Uri, true + return &o.ArtifactType, true } -// SetUri sets field value -func (o *CatalogModelArtifact) SetUri(v string) { - o.Uri = v +// SetArtifactType sets field value +func (o *CatalogModelArtifact) SetArtifactType(v string) { + o.ArtifactType = v } -// GetCustomProperties returns the CustomProperties field value if set, zero value otherwise. -func (o *CatalogModelArtifact) GetCustomProperties() map[string]MetadataValue { - if o == nil || IsNil(o.CustomProperties) { - var ret map[string]MetadataValue +// GetUri returns the Uri field value +func (o *CatalogModelArtifact) GetUri() string { + if o == nil { + var ret string return ret } - return *o.CustomProperties + + return o.Uri } -// GetCustomPropertiesOk returns a tuple with the CustomProperties field value if set, nil otherwise +// GetUriOk returns a tuple with the Uri field value // and a boolean to check if the value has been set. -func (o *CatalogModelArtifact) GetCustomPropertiesOk() (*map[string]MetadataValue, bool) { - if o == nil || IsNil(o.CustomProperties) { +func (o *CatalogModelArtifact) GetUriOk() (*string, bool) { + if o == nil { return nil, false } - return o.CustomProperties, true -} - -// HasCustomProperties returns a boolean if a field has been set. -func (o *CatalogModelArtifact) HasCustomProperties() bool { - if o != nil && !IsNil(o.CustomProperties) { - return true - } - - return false + return &o.Uri, true } -// SetCustomProperties gets a reference to the given map[string]MetadataValue and assigns it to the CustomProperties field. -func (o *CatalogModelArtifact) SetCustomProperties(v map[string]MetadataValue) { - o.CustomProperties = &v +// SetUri sets field value +func (o *CatalogModelArtifact) SetUri(v string) { + o.Uri = v } func (o CatalogModelArtifact) MarshalJSON() ([]byte, error) { @@ -177,16 +343,29 @@ func (o CatalogModelArtifact) MarshalJSON() ([]byte, error) { func (o CatalogModelArtifact) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} + if !IsNil(o.CustomProperties) { + toSerialize["customProperties"] = o.CustomProperties + } + if !IsNil(o.Description) { + toSerialize["description"] = o.Description + } + if !IsNil(o.ExternalId) { + toSerialize["externalId"] = o.ExternalId + } + if !IsNil(o.Name) { + toSerialize["name"] = o.Name + } + if !IsNil(o.Id) { + toSerialize["id"] = o.Id + } if !IsNil(o.CreateTimeSinceEpoch) { toSerialize["createTimeSinceEpoch"] = o.CreateTimeSinceEpoch } if !IsNil(o.LastUpdateTimeSinceEpoch) { toSerialize["lastUpdateTimeSinceEpoch"] = o.LastUpdateTimeSinceEpoch } + toSerialize["artifactType"] = o.ArtifactType toSerialize["uri"] = o.Uri - if !IsNil(o.CustomProperties) { - toSerialize["customProperties"] = o.CustomProperties - } return toSerialize, nil } diff --git a/catalog/pkg/openapi/model_catalog_model_list.go b/catalog/pkg/openapi/model_catalog_model_list.go index 79b010091b..a3a3543626 100644 --- a/catalog/pkg/openapi/model_catalog_model_list.go +++ b/catalog/pkg/openapi/model_catalog_model_list.go @@ -29,6 +29,8 @@ type CatalogModelList struct { Items []CatalogModel `json:"items"` } +type _CatalogModelList CatalogModelList + // NewCatalogModelList instantiates a new CatalogModelList object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments diff --git a/catalog/pkg/openapi/model_catalog_source.go b/catalog/pkg/openapi/model_catalog_source.go index 28617286c8..2e35fc98db 100644 --- a/catalog/pkg/openapi/model_catalog_source.go +++ b/catalog/pkg/openapi/model_catalog_source.go @@ -25,18 +25,27 @@ type CatalogSource struct { Name string `json:"name"` // Whether the catalog source is enabled. Enabled *bool `json:"enabled,omitempty"` + // Labels for the catalog source. + Labels []string `json:"labels"` + // Optional allow-list of models that are eligible for this source. Entries can be exact model names or patterns that use `*` as a wildcard. When provided, only models matching at least one pattern are considered. Pattern matching is case-insensitive, so `Granite/_*` will match `granite/model`, `Granite/model`, and `GRANITE/model`. + IncludedModels []string `json:"includedModels,omitempty"` + // Optional block-list of models that should be removed from the catalog even if they match `includedModels`. Patterns support the `*` wildcard. Pattern matching is case-insensitive, so `*-beta` will match `Model-Beta`, `model-beta`, and `MODEL-BETA`. + ExcludedModels []string `json:"excludedModels,omitempty"` } +type _CatalogSource CatalogSource + // NewCatalogSource instantiates a new CatalogSource object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed -func NewCatalogSource(id string, name string) *CatalogSource { +func NewCatalogSource(id string, name string, labels []string) *CatalogSource { this := CatalogSource{} this.Id = id this.Name = name var enabled bool = true this.Enabled = &enabled + this.Labels = labels return &this } @@ -130,6 +139,94 @@ func (o *CatalogSource) SetEnabled(v bool) { o.Enabled = &v } +// GetLabels returns the Labels field value +func (o *CatalogSource) GetLabels() []string { + if o == nil { + var ret []string + return ret + } + + return o.Labels +} + +// GetLabelsOk returns a tuple with the Labels field value +// and a boolean to check if the value has been set. +func (o *CatalogSource) GetLabelsOk() ([]string, bool) { + if o == nil { + return nil, false + } + return o.Labels, true +} + +// SetLabels sets field value +func (o *CatalogSource) SetLabels(v []string) { + o.Labels = v +} + +// GetIncludedModels returns the IncludedModels field value if set, zero value otherwise. +func (o *CatalogSource) GetIncludedModels() []string { + if o == nil || IsNil(o.IncludedModels) { + var ret []string + return ret + } + return o.IncludedModels +} + +// GetIncludedModelsOk returns a tuple with the IncludedModels field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogSource) GetIncludedModelsOk() ([]string, bool) { + if o == nil || IsNil(o.IncludedModels) { + return nil, false + } + return o.IncludedModels, true +} + +// HasIncludedModels returns a boolean if a field has been set. +func (o *CatalogSource) HasIncludedModels() bool { + if o != nil && !IsNil(o.IncludedModels) { + return true + } + + return false +} + +// SetIncludedModels gets a reference to the given []string and assigns it to the IncludedModels field. +func (o *CatalogSource) SetIncludedModels(v []string) { + o.IncludedModels = v +} + +// GetExcludedModels returns the ExcludedModels field value if set, zero value otherwise. +func (o *CatalogSource) GetExcludedModels() []string { + if o == nil || IsNil(o.ExcludedModels) { + var ret []string + return ret + } + return o.ExcludedModels +} + +// GetExcludedModelsOk returns a tuple with the ExcludedModels field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CatalogSource) GetExcludedModelsOk() ([]string, bool) { + if o == nil || IsNil(o.ExcludedModels) { + return nil, false + } + return o.ExcludedModels, true +} + +// HasExcludedModels returns a boolean if a field has been set. +func (o *CatalogSource) HasExcludedModels() bool { + if o != nil && !IsNil(o.ExcludedModels) { + return true + } + + return false +} + +// SetExcludedModels gets a reference to the given []string and assigns it to the ExcludedModels field. +func (o *CatalogSource) SetExcludedModels(v []string) { + o.ExcludedModels = v +} + func (o CatalogSource) MarshalJSON() ([]byte, error) { toSerialize, err := o.ToMap() if err != nil { @@ -145,6 +242,13 @@ func (o CatalogSource) ToMap() (map[string]interface{}, error) { if !IsNil(o.Enabled) { toSerialize["enabled"] = o.Enabled } + toSerialize["labels"] = o.Labels + if !IsNil(o.IncludedModels) { + toSerialize["includedModels"] = o.IncludedModels + } + if !IsNil(o.ExcludedModels) { + toSerialize["excludedModels"] = o.ExcludedModels + } return toSerialize, nil } diff --git a/catalog/pkg/openapi/model_catalog_source_list.go b/catalog/pkg/openapi/model_catalog_source_list.go index 8d66226fb7..e4df3cece3 100644 --- a/catalog/pkg/openapi/model_catalog_source_list.go +++ b/catalog/pkg/openapi/model_catalog_source_list.go @@ -29,6 +29,8 @@ type CatalogSourceList struct { Items []CatalogSource `json:"items,omitempty"` } +type _CatalogSourceList CatalogSourceList + // NewCatalogSourceList instantiates a new CatalogSourceList object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments diff --git a/catalog/pkg/openapi/model_error.go b/catalog/pkg/openapi/model_error.go index 70732b7fad..df6578b1f7 100644 --- a/catalog/pkg/openapi/model_error.go +++ b/catalog/pkg/openapi/model_error.go @@ -25,6 +25,8 @@ type Error struct { Message string `json:"message"` } +type _Error Error + // NewError instantiates a new Error object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments diff --git a/catalog/pkg/openapi/model_filter_option.go b/catalog/pkg/openapi/model_filter_option.go new file mode 100644 index 0000000000..7d8091c187 --- /dev/null +++ b/catalog/pkg/openapi/model_filter_option.go @@ -0,0 +1,191 @@ +/* +Model Catalog REST API + +REST API for Model Registry to create and manage ML model metadata + +API version: v1alpha1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the FilterOption type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &FilterOption{} + +// FilterOption struct for FilterOption +type FilterOption struct { + // The data type of the filter option + Type string `json:"type"` + // Known values of the property for string types with a small number of possible options. + Values []interface{} `json:"values,omitempty"` + Range *FilterOptionRange `json:"range,omitempty"` +} + +type _FilterOption FilterOption + +// NewFilterOption instantiates a new FilterOption object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewFilterOption(type_ string) *FilterOption { + this := FilterOption{} + this.Type = type_ + return &this +} + +// NewFilterOptionWithDefaults instantiates a new FilterOption object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewFilterOptionWithDefaults() *FilterOption { + this := FilterOption{} + return &this +} + +// GetType returns the Type field value +func (o *FilterOption) GetType() string { + if o == nil { + var ret string + return ret + } + + return o.Type +} + +// GetTypeOk returns a tuple with the Type field value +// and a boolean to check if the value has been set. +func (o *FilterOption) GetTypeOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Type, true +} + +// SetType sets field value +func (o *FilterOption) SetType(v string) { + o.Type = v +} + +// GetValues returns the Values field value if set, zero value otherwise. +func (o *FilterOption) GetValues() []interface{} { + if o == nil || IsNil(o.Values) { + var ret []interface{} + return ret + } + return o.Values +} + +// GetValuesOk returns a tuple with the Values field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterOption) GetValuesOk() ([]interface{}, bool) { + if o == nil || IsNil(o.Values) { + return nil, false + } + return o.Values, true +} + +// HasValues returns a boolean if a field has been set. +func (o *FilterOption) HasValues() bool { + if o != nil && !IsNil(o.Values) { + return true + } + + return false +} + +// SetValues gets a reference to the given []interface{} and assigns it to the Values field. +func (o *FilterOption) SetValues(v []interface{}) { + o.Values = v +} + +// GetRange returns the Range field value if set, zero value otherwise. +func (o *FilterOption) GetRange() FilterOptionRange { + if o == nil || IsNil(o.Range) { + var ret FilterOptionRange + return ret + } + return *o.Range +} + +// GetRangeOk returns a tuple with the Range field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterOption) GetRangeOk() (*FilterOptionRange, bool) { + if o == nil || IsNil(o.Range) { + return nil, false + } + return o.Range, true +} + +// HasRange returns a boolean if a field has been set. +func (o *FilterOption) HasRange() bool { + if o != nil && !IsNil(o.Range) { + return true + } + + return false +} + +// SetRange gets a reference to the given FilterOptionRange and assigns it to the Range field. +func (o *FilterOption) SetRange(v FilterOptionRange) { + o.Range = &v +} + +func (o FilterOption) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o FilterOption) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["type"] = o.Type + if !IsNil(o.Values) { + toSerialize["values"] = o.Values + } + if !IsNil(o.Range) { + toSerialize["range"] = o.Range + } + return toSerialize, nil +} + +type NullableFilterOption struct { + value *FilterOption + isSet bool +} + +func (v NullableFilterOption) Get() *FilterOption { + return v.value +} + +func (v *NullableFilterOption) Set(val *FilterOption) { + v.value = val + v.isSet = true +} + +func (v NullableFilterOption) IsSet() bool { + return v.isSet +} + +func (v *NullableFilterOption) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableFilterOption(val *FilterOption) *NullableFilterOption { + return &NullableFilterOption{value: val, isSet: true} +} + +func (v NullableFilterOption) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableFilterOption) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/catalog/pkg/openapi/model_filter_option_range.go b/catalog/pkg/openapi/model_filter_option_range.go new file mode 100644 index 0000000000..8e69383b67 --- /dev/null +++ b/catalog/pkg/openapi/model_filter_option_range.go @@ -0,0 +1,160 @@ +/* +Model Catalog REST API + +REST API for Model Registry to create and manage ML model metadata + +API version: v1alpha1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the FilterOptionRange type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &FilterOptionRange{} + +// FilterOptionRange Min and max values for number types. +type FilterOptionRange struct { + Min *float64 `json:"min,omitempty"` + Max *float64 `json:"max,omitempty"` +} + +// NewFilterOptionRange instantiates a new FilterOptionRange object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewFilterOptionRange() *FilterOptionRange { + this := FilterOptionRange{} + return &this +} + +// NewFilterOptionRangeWithDefaults instantiates a new FilterOptionRange object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewFilterOptionRangeWithDefaults() *FilterOptionRange { + this := FilterOptionRange{} + return &this +} + +// GetMin returns the Min field value if set, zero value otherwise. +func (o *FilterOptionRange) GetMin() float64 { + if o == nil || IsNil(o.Min) { + var ret float64 + return ret + } + return *o.Min +} + +// GetMinOk returns a tuple with the Min field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterOptionRange) GetMinOk() (*float64, bool) { + if o == nil || IsNil(o.Min) { + return nil, false + } + return o.Min, true +} + +// HasMin returns a boolean if a field has been set. +func (o *FilterOptionRange) HasMin() bool { + if o != nil && !IsNil(o.Min) { + return true + } + + return false +} + +// SetMin gets a reference to the given float64 and assigns it to the Min field. +func (o *FilterOptionRange) SetMin(v float64) { + o.Min = &v +} + +// GetMax returns the Max field value if set, zero value otherwise. +func (o *FilterOptionRange) GetMax() float64 { + if o == nil || IsNil(o.Max) { + var ret float64 + return ret + } + return *o.Max +} + +// GetMaxOk returns a tuple with the Max field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterOptionRange) GetMaxOk() (*float64, bool) { + if o == nil || IsNil(o.Max) { + return nil, false + } + return o.Max, true +} + +// HasMax returns a boolean if a field has been set. +func (o *FilterOptionRange) HasMax() bool { + if o != nil && !IsNil(o.Max) { + return true + } + + return false +} + +// SetMax gets a reference to the given float64 and assigns it to the Max field. +func (o *FilterOptionRange) SetMax(v float64) { + o.Max = &v +} + +func (o FilterOptionRange) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o FilterOptionRange) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Min) { + toSerialize["min"] = o.Min + } + if !IsNil(o.Max) { + toSerialize["max"] = o.Max + } + return toSerialize, nil +} + +type NullableFilterOptionRange struct { + value *FilterOptionRange + isSet bool +} + +func (v NullableFilterOptionRange) Get() *FilterOptionRange { + return v.value +} + +func (v *NullableFilterOptionRange) Set(val *FilterOptionRange) { + v.value = val + v.isSet = true +} + +func (v NullableFilterOptionRange) IsSet() bool { + return v.isSet +} + +func (v *NullableFilterOptionRange) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableFilterOptionRange(val *FilterOptionRange) *NullableFilterOptionRange { + return &NullableFilterOptionRange{value: val, isSet: true} +} + +func (v NullableFilterOptionRange) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableFilterOptionRange) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/catalog/pkg/openapi/model_filter_options_list.go b/catalog/pkg/openapi/model_filter_options_list.go new file mode 100644 index 0000000000..c5862fbe54 --- /dev/null +++ b/catalog/pkg/openapi/model_filter_options_list.go @@ -0,0 +1,125 @@ +/* +Model Catalog REST API + +REST API for Model Registry to create and manage ML model metadata + +API version: v1alpha1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the FilterOptionsList type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &FilterOptionsList{} + +// FilterOptionsList List of FilterOptions +type FilterOptionsList struct { + // A single filter option. + Filters *map[string]FilterOption `json:"filters,omitempty"` +} + +// NewFilterOptionsList instantiates a new FilterOptionsList object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewFilterOptionsList() *FilterOptionsList { + this := FilterOptionsList{} + return &this +} + +// NewFilterOptionsListWithDefaults instantiates a new FilterOptionsList object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewFilterOptionsListWithDefaults() *FilterOptionsList { + this := FilterOptionsList{} + return &this +} + +// GetFilters returns the Filters field value if set, zero value otherwise. +func (o *FilterOptionsList) GetFilters() map[string]FilterOption { + if o == nil || IsNil(o.Filters) { + var ret map[string]FilterOption + return ret + } + return *o.Filters +} + +// GetFiltersOk returns a tuple with the Filters field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterOptionsList) GetFiltersOk() (*map[string]FilterOption, bool) { + if o == nil || IsNil(o.Filters) { + return nil, false + } + return o.Filters, true +} + +// HasFilters returns a boolean if a field has been set. +func (o *FilterOptionsList) HasFilters() bool { + if o != nil && !IsNil(o.Filters) { + return true + } + + return false +} + +// SetFilters gets a reference to the given map[string]FilterOption and assigns it to the Filters field. +func (o *FilterOptionsList) SetFilters(v map[string]FilterOption) { + o.Filters = &v +} + +func (o FilterOptionsList) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o FilterOptionsList) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Filters) { + toSerialize["filters"] = o.Filters + } + return toSerialize, nil +} + +type NullableFilterOptionsList struct { + value *FilterOptionsList + isSet bool +} + +func (v NullableFilterOptionsList) Get() *FilterOptionsList { + return v.value +} + +func (v *NullableFilterOptionsList) Set(val *FilterOptionsList) { + v.value = val + v.isSet = true +} + +func (v NullableFilterOptionsList) IsSet() bool { + return v.isSet +} + +func (v *NullableFilterOptionsList) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableFilterOptionsList(val *FilterOptionsList) *NullableFilterOptionsList { + return &NullableFilterOptionsList{value: val, isSet: true} +} + +func (v NullableFilterOptionsList) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableFilterOptionsList) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/catalog/pkg/openapi/model_metadata_bool_value.go b/catalog/pkg/openapi/model_metadata_bool_value.go index 6387dc62aa..aeb75906e1 100644 --- a/catalog/pkg/openapi/model_metadata_bool_value.go +++ b/catalog/pkg/openapi/model_metadata_bool_value.go @@ -23,6 +23,8 @@ type MetadataBoolValue struct { MetadataType string `json:"metadataType"` } +type _MetadataBoolValue MetadataBoolValue + // NewMetadataBoolValue instantiates a new MetadataBoolValue object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments diff --git a/catalog/pkg/openapi/model_metadata_double_value.go b/catalog/pkg/openapi/model_metadata_double_value.go index a896c0300c..0f7656731e 100644 --- a/catalog/pkg/openapi/model_metadata_double_value.go +++ b/catalog/pkg/openapi/model_metadata_double_value.go @@ -23,6 +23,8 @@ type MetadataDoubleValue struct { MetadataType string `json:"metadataType"` } +type _MetadataDoubleValue MetadataDoubleValue + // NewMetadataDoubleValue instantiates a new MetadataDoubleValue object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments diff --git a/catalog/pkg/openapi/model_metadata_int_value.go b/catalog/pkg/openapi/model_metadata_int_value.go index d103797c38..90838106e5 100644 --- a/catalog/pkg/openapi/model_metadata_int_value.go +++ b/catalog/pkg/openapi/model_metadata_int_value.go @@ -23,6 +23,8 @@ type MetadataIntValue struct { MetadataType string `json:"metadataType"` } +type _MetadataIntValue MetadataIntValue + // NewMetadataIntValue instantiates a new MetadataIntValue object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments diff --git a/catalog/pkg/openapi/model_metadata_proto_value.go b/catalog/pkg/openapi/model_metadata_proto_value.go index 343a92c3bf..38cc2ebce6 100644 --- a/catalog/pkg/openapi/model_metadata_proto_value.go +++ b/catalog/pkg/openapi/model_metadata_proto_value.go @@ -26,6 +26,8 @@ type MetadataProtoValue struct { MetadataType string `json:"metadataType"` } +type _MetadataProtoValue MetadataProtoValue + // NewMetadataProtoValue instantiates a new MetadataProtoValue object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments diff --git a/catalog/pkg/openapi/model_metadata_string_value.go b/catalog/pkg/openapi/model_metadata_string_value.go index 65f0e29f65..5145325950 100644 --- a/catalog/pkg/openapi/model_metadata_string_value.go +++ b/catalog/pkg/openapi/model_metadata_string_value.go @@ -23,6 +23,8 @@ type MetadataStringValue struct { MetadataType string `json:"metadataType"` } +type _MetadataStringValue MetadataStringValue + // NewMetadataStringValue instantiates a new MetadataStringValue object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments diff --git a/catalog/pkg/openapi/model_metadata_struct_value.go b/catalog/pkg/openapi/model_metadata_struct_value.go index fdcf084d40..461cb3a807 100644 --- a/catalog/pkg/openapi/model_metadata_struct_value.go +++ b/catalog/pkg/openapi/model_metadata_struct_value.go @@ -24,6 +24,8 @@ type MetadataStructValue struct { MetadataType string `json:"metadataType"` } +type _MetadataStructValue MetadataStructValue + // NewMetadataStructValue instantiates a new MetadataStructValue object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments diff --git a/catalog/pkg/openapi/model_metadata_value.go b/catalog/pkg/openapi/model_metadata_value.go index 61caad0e04..d5da8bbb8b 100644 --- a/catalog/pkg/openapi/model_metadata_value.go +++ b/catalog/pkg/openapi/model_metadata_value.go @@ -214,6 +214,36 @@ func (obj *MetadataValue) GetActualInstance() interface{} { return nil } +// Get the actual instance value +func (obj MetadataValue) GetActualInstanceValue() interface{} { + if obj.MetadataBoolValue != nil { + return *obj.MetadataBoolValue + } + + if obj.MetadataDoubleValue != nil { + return *obj.MetadataDoubleValue + } + + if obj.MetadataIntValue != nil { + return *obj.MetadataIntValue + } + + if obj.MetadataProtoValue != nil { + return *obj.MetadataProtoValue + } + + if obj.MetadataStringValue != nil { + return *obj.MetadataStringValue + } + + if obj.MetadataStructValue != nil { + return *obj.MetadataStructValue + } + + // all schemas are nil + return nil +} + type NullableMetadataValue struct { value *MetadataValue isSet bool diff --git a/catalog/pkg/openapi/utils.go b/catalog/pkg/openapi/utils.go index 34bc3963c5..1f1b3f00d0 100644 --- a/catalog/pkg/openapi/utils.go +++ b/catalog/pkg/openapi/utils.go @@ -11,7 +11,9 @@ API version: v1alpha1 package openapi import ( + "bytes" "encoding/json" + "fmt" "reflect" "time" ) @@ -320,7 +322,7 @@ func NewNullableTime(val *time.Time) *NullableTime { } func (v NullableTime) MarshalJSON() ([]byte, error) { - return v.value.MarshalJSON() + return json.Marshal(v.value) } func (v *NullableTime) UnmarshalJSON(src []byte) error { @@ -345,3 +347,15 @@ func IsNil(i interface{}) bool { type MappedNullable interface { ToMap() (map[string]interface{}, error) } + +// A wrapper for strict JSON decoding +func newStrictDecoder(data []byte) *json.Decoder { + dec := json.NewDecoder(bytes.NewBuffer(data)) + dec.DisallowUnknownFields() + return dec +} + +// Prevent trying to import "fmt" +func reportError(format string, a ...interface{}) error { + return fmt.Errorf(format, a...) +} diff --git a/catalog/scripts/gen_openapi_server.sh b/catalog/scripts/gen_openapi_server.sh index 200f219b35..0745a17621 100755 --- a/catalog/scripts/gen_openapi_server.sh +++ b/catalog/scripts/gen_openapi_server.sh @@ -15,28 +15,40 @@ DST="$PROJECT_ROOT/${2:-internal/server/openapi}" --ignore-file-override "$PROJECT_ROOT"/.openapi-generator-ignore --additional-properties=outputAsLibrary=true,enumClassPrefix=true,router=chi,sourceFolder=,onlyInterfaces=true,isGoSubmodule=true,enumClassPrefix=true,useOneOfDiscriminatorLookup=true,featureCORS=true \ --template-dir "$PROJECT_ROOT"/../templates/go-server -function sed_inplace() { - if [[ $(uname) == "Darwin" ]]; then - # introduce -i parameter for Mac OSX sed compatibility - sed -i '' "$@" - else - sed -i "$@" - fi +# Python-based regex replace function +# Usage: py-re-replace [file2...] +# count=0: replace all occurrences (like sed with /g flag) +# count=1: replace first occurrence only (like sed without /g flag) +# count=N: replace first N occurrences +py-re-replace() { + python3 -c " +import fileinput, re, sys +count, pattern, replacement, filepaths = int(sys.argv[1]), sys.argv[2], sys.argv[3], sys.argv[4:] +for filepath in filepaths: + for line in fileinput.FileInput(filepath, inplace=True, backup=''): + sys.stdout.write(re.sub(pattern, replacement, line, count=count)) +" "$@" } -sed_inplace 's/, orderByParam/, model.OrderByField(orderByParam)/g' "$PROJECT_ROOT"/internal/server/openapi/api_model_catalog_service.go -sed_inplace 's/, sortOrderParam/, model.SortOrder(sortOrderParam)/g' "$PROJECT_ROOT"/internal/server/openapi/api_model_catalog_service.go +py-re-replace 0 'model\.\[\]ArtifactTypeQueryParam' '[]model.ArtifactTypeQueryParam' "$PROJECT_ROOT"/internal/server/openapi/api.go +py-re-replace 0 'model\.\[\]ArtifactType2QueryParam' '[]model.ArtifactTypeQueryParam' "$PROJECT_ROOT"/internal/server/openapi/api.go -sed_inplace 's/"encoding\/json"//' "$PROJECT_ROOT"/internal/server/openapi/api_model_catalog_service.go - -sed_inplace 's/github.com\/kubeflow\/model-registry\/pkg\/openapi/github.com\/kubeflow\/model-registry\/catalog\/pkg\/openapi/' \ +py-re-replace 1 'github\.com/kubeflow/model-registry/pkg/openapi' 'github.com/kubeflow/model-registry/catalog/pkg/openapi' \ "$PROJECT_ROOT"/internal/server/openapi/api_model_catalog_service.go \ "$PROJECT_ROOT"/internal/server/openapi/api.go -sed_inplace 's/{\?model_name+}\?/*/' "$PROJECT_ROOT"/internal/server/openapi/api_model_catalog_service.go + +py-re-replace 1 '\{model_name\+\}|model_name\+' '*' "$PROJECT_ROOT"/internal/server/openapi/api_model_catalog_service.go + +echo "Applying patches to generated code" +( + cd "$PROJECT_ROOT/.." + ./bin/goimports -w "$PROJECT_ROOT/internal/server/openapi/api_model_catalog_service.go" + git apply patches/api_model_catalog_service.patch +) echo "Assembling type_assert Go file" ./scripts/gen_type_asserts.sh "$DST" -gofmt -w "$DST" +$PROJECT_ROOT/../bin/goimports -w "$DST" echo "OpenAPI server generation completed" diff --git a/catalog/scripts/gen_type_asserts.py b/catalog/scripts/gen_type_asserts.py index 55d31fed7f..9491a5a829 100644 --- a/catalog/scripts/gen_type_asserts.py +++ b/catalog/scripts/gen_type_asserts.py @@ -1,7 +1,6 @@ import typing as t from pathlib import Path from textwrap import dedent -import sys def get_funcs(models: t.Iterable[Path]) -> t.Iterator[str]: for path in models: @@ -50,7 +49,7 @@ def get_name(func: str) -> str: src = root / "internal/server/openapi" print( - dedent(f""" + dedent(""" /* * Model Registry REST API * diff --git a/catalog/scripts/gen_type_asserts.sh b/catalog/scripts/gen_type_asserts.sh index fb64cb73cd..e5ed01f7ed 100755 --- a/catalog/scripts/gen_type_asserts.sh +++ b/catalog/scripts/gen_type_asserts.sh @@ -5,10 +5,11 @@ set -e ASSERT_FILE_PATH="$1/type_asserts.go" PROJECT_ROOT=$(realpath "$(dirname "$0")"/..) -PATCH="${PROJECT_ROOT}/patches/type_asserts.patch" -# AssertMetadataValueRequired from this file generates with the incorrect logic. -rm -f $1/model_metadata_value.go +# These files generate with incorrect logic: +rm -f "$1/model_metadata_value.go" \ + "$1/model_catalog_artifact.go" \ + "$1/model_filter_option.go" python3 "${PROJECT_ROOT}/scripts/gen_type_asserts.py" $1 >"$ASSERT_FILE_PATH" diff --git a/clients/python/Makefile b/clients/python/Makefile index 2f1c39ad90..69c9038ae7 100644 --- a/clients/python/Makefile +++ b/clients/python/Makefile @@ -23,18 +23,19 @@ deploy-latest-mr: $(if $(filter true,$(BUILD_IMAGE)),\ IMG_VERSION=${IMG_VERSION} IMG=${IMG} make image/build ARGS="--load$(if ${DEV_BUILD}, --target dev-build)" && \ ) \ - LOCAL=1 ./scripts/deploy_on_kind.sh - kubectl port-forward -n kubeflow services/model-registry-service 8080:8080 & echo $$! >> .port-forwards.pid + LOCAL=1 IMG=$(IMG):$(IMG_VERSION) ./scripts/deploy_on_kind.sh +# TODO RHOAIENG-30453 align consistency ./scripts/deploy_on_kind.sh uses IMG with :tag, Vs, Makefile(s) and ci/GHA we use IMG without trailing :tag + kubectl port-forward -n kubeflow services/model-registry-service 8080:8080 > /dev/null 2>&1 & echo $$! >> .port-forwards.pid .PHONY: deploy-test-minio deploy-test-minio: cd ../../ && ./scripts/deploy_minio_on_kind.sh - kubectl port-forward -n minio svc/minio 9000:9000 & echo $$! >> .port-forwards.pid + kubectl port-forward -n minio svc/minio 9000:9000 > /dev/null 2>&1 & echo $$! >> .port-forwards.pid .PHONY: deploy-local-registry deploy-local-registry: cd ../../ && ./scripts/deploy_local_kind_registry.sh - kubectl port-forward service/distribution-registry-test-service 5001:5001 & echo $$! >> .port-forwards.pid + kubectl port-forward service/distribution-registry-test-service 5001:5001 > /dev/null 2>&1 & echo $$! >> .port-forwards.pid .PHONY: test-e2e test-e2e: deploy-latest-mr deploy-local-registry deploy-test-minio @@ -45,10 +46,13 @@ test-e2e: deploy-latest-mr deploy-local-registry deploy-test-minio .PHONY: test-fuzz test-fuzz: deploy-latest-mr deploy-local-registry deploy-test-minio - @echo "Starting test-fuzz" + @echo "Starting stateless fuzz tests with parallel execution" poetry install --all-extras @set -a; . ../../scripts/manifests/minio/.env; set +a; \ - poetry run pytest --fuzz -v -s --hypothesis-show-statistics + poetry run pytest tests/fuzz_api/model_registry/test_mr_stateless.py tests/fuzz_api/model_catalog/test_catalog_stateless.py --fuzz -n auto -v --hypothesis-show-statistics + @echo "Running stateful fuzz tests sequentially" + @set -a; . ../../scripts/manifests/minio/.env; set +a; \ + poetry run pytest tests/fuzz_api/model_registry/test_mr_stateful.py --fuzz -v --hypothesis-show-statistics @rm -f ../../scripts/manifests/minio/.env $(MAKE) test-e2e-cleanup @exit $$STATUS @@ -88,12 +92,14 @@ test: .PHONY: lint lint: - poetry run ruff check src/model_registry + poetry run ruff check + poetry run mypy . .PHONY: tidy tidy: - poetry run ruff check --fix --unsafe-fixes || true - poetry run black src/mr_openapi + # Fix code issues everywhere, including src/mr_openapi/ (excluded by default in pyproject.toml) + poetry run ruff check --fix-only --unsafe-fixes . src/mr_openapi + poetry run ruff format src src/mr_openapi .PHONY: build build: install tidy diff --git a/clients/python/README.md b/clients/python/README.md index 1daab28e93..fe7d2b87ce 100644 --- a/clients/python/README.md +++ b/clients/python/README.md @@ -18,7 +18,7 @@ This library provides a high level interface for interacting with a model regist In your Python environment, you can install the latest version of the Model Registry Python client with: ``` -pip install --pre model-registry +pip install model-registry ``` ### Installing extras @@ -30,7 +30,7 @@ By [installing an extra variant](https://packaging.python.org/en/latest/tutorial the additional dependencies will be managed for you automatically, for instance with: ``` -pip install --pre "model-registry[hf]" +pip install "model-registry[hf]" ``` This step is not required if you already installed the additional dependencies already, for instance with: diff --git a/clients/python/noxfile.py b/clients/python/noxfile.py index f5aa5ae096..21624d94a5 100644 --- a/clients/python/noxfile.py +++ b/clients/python/noxfile.py @@ -34,8 +34,7 @@ def lint(session: Session) -> None: """Lint using ruff.""" session.install("ruff") - # can't check the whole project because of the generated code - session.run("ruff", "check", "src/model_registry", "tests") + session.run("ruff", "check") @session(python=python_versions) @@ -47,7 +46,7 @@ def mypy(session: Session) -> None: "types-python-dateutil", ) - session.run("mypy", "src/model_registry") + session.run("mypy", ".") @session(python=python_versions) @@ -73,9 +72,8 @@ def tests(session: Session) -> None: @session(name="e2e", python=python_versions) def e2e_tests(session: Session) -> None: """Run the test suite.""" - session.install( + packages = [ ".", - "ray", "requests", "pytest", "pytest-asyncio", @@ -87,7 +85,12 @@ def e2e_tests(session: Session) -> None: "olot", "uvloop", "schemathesis", - ) + ] + # Ray requires Python >3.9 + if session.python != "3.9": + packages.insert(1, "ray") + + session.install(*packages) try: session.run( "pytest", @@ -105,20 +108,35 @@ def e2e_tests(session: Session) -> None: @session(name="fuzz", python=python_versions) def fuzz_tests(session: Session) -> None: - """Run the fuzzing tests.""" + """Run the fuzzing tests with parallel execution for stateless tests.""" session.install( ".", "requests", "pytest", + "pytest-xdist", "pytest-mock", "uvloop", "olot", "schemathesis", ) + # Run stateless tests in parallel for faster execution session.run( "pytest", + "tests/fuzz_api/model_registry/test_mr_stateless.py", + "tests/fuzz_api/model_catalog/test_catalog_stateless.py", "--fuzz", - "-rA", + "-n", + "auto", + "-v", + "--hypothesis-show-statistics", + ) + # Run stateful tests sequentially (required for state consistency) + session.run( + "pytest", + "tests/fuzz_api/model_registry/test_mr_stateful.py", + "--fuzz", + "-v", + "--hypothesis-show-statistics", ) @session(python=python_versions[0]) def coverage(session: Session) -> None: diff --git a/clients/python/odh_rules.mk b/clients/python/odh_rules.mk index 8a28a76789..58f9245f3b 100644 --- a/clients/python/odh_rules.mk +++ b/clients/python/odh_rules.mk @@ -8,15 +8,15 @@ undeploy-mr-odh: .PHONY: test-e2e-odh test-e2e-odh: - @echo "Ensuring all extras are installed..." - poetry install --all-extras @echo "Running tests..." @set -a; . ../../scripts/manifests/minio/.env; set +a; \ - export AUTH_TOKEN=$$(kubectl config view --raw -o jsonpath="{.users[?(@.name==\"$$(kubectl config view -o jsonpath="{.contexts[?(@.name==\"$$(kubectl config current-context 2>/dev/null)\")].context.user}" 2>/dev/null)\")].user.token}" 2>/dev/null) && \ + mkdir -p ../../results; \ + export AUTH_TOKEN=$$(kubectl config view --raw -o jsonpath="{.users[?(@.name==\"$$(kubectl config view -o jsonpath="{.contexts[?(@.name==\"$$(kubectl config current-context)\")].context.user}")\")].user.token}") && \ export VERIFY_SSL=False && \ - export MR_NAMESPACE=$$(kubectl get datasciencecluster default-dsc -o jsonpath='{.spec.components.modelregistry.registriesNamespace}' 2>/dev/null) && \ - export MR_URL="https://$$(kubectl get service -n "$$MR_NAMESPACE" model-registry -o jsonpath='{.metadata.annotations.routing\.opendatahub\.io\/external-address-rest}' 2>/dev/null)" && poetry run pytest --e2e -s -rA \ - && rm -f ../../scripts/manifests/minio/.env + export MR_NAMESPACE=$$(kubectl get datasciencecluster default-dsc -o jsonpath='{.spec.components.modelregistry.registriesNamespace}') && \ + export MR_URL="https://$$(kubectl get service -n "$$MR_NAMESPACE" model-registry -o jsonpath='{.metadata.annotations.routing\.opendatahub\.io\/external-address-rest}')" && \ + poetry install --all-extras && poetry run pytest --e2e -svvv -rA --html=../../results/report.html --junit-xml=../../results/xunit_report.xml --self-contained-html && \ + rm -f ../../scripts/manifests/minio/.env .PHONY: test-e2e-port-cleanup test-e2e-port-cleanup: @@ -25,3 +25,15 @@ test-e2e-port-cleanup: kill $$(cat .port-forwards.pid) || true; \ rm -f .port-forwards.pid; \ fi + +.PHONY: test-fuzz-odh +test-fuzz-odh: + @echo "Starting test-fuzz" + poetry install --all-extras + @set -a; . ../../scripts/manifests/minio/.env; set +a; \ + export VERIFY_SSL=False && \ + export AUTH_TOKEN=$$(kubectl config view --raw -o jsonpath="{.users[?(@.name==\"$$(kubectl config view -o jsonpath="{.contexts[?(@.name==\"$$(kubectl config current-context)\")].context.user}")\")].user.token}") && \ + export MR_NAMESPACE=$$(kubectl get datasciencecluster default-dsc -o jsonpath='{.spec.components.modelregistry.registriesNamespace}') && \ + export MR_URL="https://$$(kubectl get service -n "$$MR_NAMESPACE" model-registry -o jsonpath='{.metadata.annotations.routing\.opendatahub\.io\/external-address-rest}')" && \ + poetry run pytest --fuzz -svvv --hypothesis-show-statistics tests/fuzz_api -rA --html=../../results/report.html --junit-xml=../../results/xunit_report.xml --self-contained-html + @exit $$STATUS \ No newline at end of file diff --git a/clients/python/openapitools.json b/clients/python/openapitools.json index 5c50d6a26f..f052220d14 100644 --- a/clients/python/openapitools.json +++ b/clients/python/openapitools.json @@ -2,6 +2,6 @@ "$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json", "spaces": 2, "generator-cli": { - "version": "7.6.0" + "version": "7.17.0" } } diff --git a/clients/python/patches/asyncio-only.patch b/clients/python/patches/asyncio-only.patch index 0fb2a0a4e9..c82b87cfa2 100644 --- a/clients/python/patches/asyncio-only.patch +++ b/clients/python/patches/asyncio-only.patch @@ -1,16 +1,17 @@ diff --git a/clients/python/src/mr_openapi/configuration.py b/clients/python/src/mr_openapi/configuration.py -index 795d09e8..bab01d69 100644 +index 50b0c1f2..068de622 100644 --- a/clients/python/src/mr_openapi/configuration.py +++ b/clients/python/src/mr_openapi/configuration.py -@@ -17,7 +17,6 @@ import logging - from logging import FileHandler - import sys - from typing import Optional --import urllib3 +@@ -21,8 +21,6 @@ import sys + from typing import Any, ClassVar, Dict, List, Literal, Optional, TypedDict, Union + from typing_extensions import NotRequired, Self - import http.client as httplib +-import urllib3 +- -@@ -110,7 +109,6 @@ class Configuration: + JSON_SCHEMA_VALIDATION_KEYWORDS = { + 'multipleOf', 'maximum', 'exclusiveMaximum', +@@ -238,7 +236,6 @@ class Configuration: """Logging Settings """ self.logger["package_logger"] = logging.getLogger("mr_openapi") @@ -18,11 +19,11 @@ index 795d09e8..bab01d69 100644 self.logger_format = '%(asctime)s %(levelname)s %(message)s' """Log format """ -@@ -335,21 +333,6 @@ class Configuration: - else: - return key +@@ -472,21 +469,6 @@ class Configuration: + + return None -- def get_basic_auth_token(self): +- def get_basic_auth_token(self) -> Optional[str]: - """Gets HTTP basic authentication header (string). - - :return: The token for basic HTTP authentication. @@ -37,6 +38,6 @@ index 795d09e8..bab01d69 100644 - basic_auth=username + ':' + password - ).get('authorization') - - def auth_settings(self): + def auth_settings(self)-> AuthSettings: """Gets Auth Settings dict for api client. diff --git a/clients/python/poetry.lock b/clients/python/poetry.lock index db3002b449..1240cef653 100644 --- a/clients/python/poetry.lock +++ b/clients/python/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -33,98 +33,132 @@ files = [ [[package]] name = "aiohttp" -version = "3.12.15" +version = "3.13.2" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "aiohttp-3.12.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc"}, - {file = "aiohttp-3.12.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af"}, - {file = "aiohttp-3.12.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421"}, - {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79"}, - {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77"}, - {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c"}, - {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4"}, - {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6"}, - {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2"}, - {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d"}, - {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb"}, - {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5"}, - {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b"}, - {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065"}, - {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1"}, - {file = "aiohttp-3.12.15-cp310-cp310-win32.whl", hash = "sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a"}, - {file = "aiohttp-3.12.15-cp310-cp310-win_amd64.whl", hash = "sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830"}, - {file = "aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117"}, - {file = "aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe"}, - {file = "aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9"}, - {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5"}, - {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728"}, - {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16"}, - {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0"}, - {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b"}, - {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd"}, - {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8"}, - {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50"}, - {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676"}, - {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7"}, - {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7"}, - {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685"}, - {file = "aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b"}, - {file = "aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d"}, - {file = "aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7"}, - {file = "aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444"}, - {file = "aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d"}, - {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c"}, - {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0"}, - {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab"}, - {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb"}, - {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545"}, - {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c"}, - {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd"}, - {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f"}, - {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d"}, - {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519"}, - {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea"}, - {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3"}, - {file = "aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1"}, - {file = "aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34"}, - {file = "aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315"}, - {file = "aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd"}, - {file = "aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4"}, - {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7"}, - {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d"}, - {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b"}, - {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d"}, - {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d"}, - {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645"}, - {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461"}, - {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9"}, - {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d"}, - {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693"}, - {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64"}, - {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51"}, - {file = "aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0"}, - {file = "aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84"}, - {file = "aiohttp-3.12.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:691d203c2bdf4f4637792efbbcdcd157ae11e55eaeb5e9c360c1206fb03d4d98"}, - {file = "aiohttp-3.12.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e995e1abc4ed2a454c731385bf4082be06f875822adc4c6d9eaadf96e20d406"}, - {file = "aiohttp-3.12.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bd44d5936ab3193c617bfd6c9a7d8d1085a8dc8c3f44d5f1dcf554d17d04cf7d"}, - {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46749be6e89cd78d6068cdf7da51dbcfa4321147ab8e4116ee6678d9a056a0cf"}, - {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0c643f4d75adea39e92c0f01b3fb83d57abdec8c9279b3078b68a3a52b3933b6"}, - {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a23918fedc05806966a2438489dcffccbdf83e921a1170773b6178d04ade142"}, - {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74bdd8c864b36c3673741023343565d95bfbd778ffe1eb4d412c135a28a8dc89"}, - {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a146708808c9b7a988a4af3821379e379e0f0e5e466ca31a73dbdd0325b0263"}, - {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7011a70b56facde58d6d26da4fec3280cc8e2a78c714c96b7a01a87930a9530"}, - {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3bdd6e17e16e1dbd3db74d7f989e8af29c4d2e025f9828e6ef45fbdee158ec75"}, - {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57d16590a351dfc914670bd72530fd78344b885a00b250e992faea565b7fdc05"}, - {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc9a0f6569ff990e0bbd75506c8d8fe7214c8f6579cca32f0546e54372a3bb54"}, - {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:536ad7234747a37e50e7b6794ea868833d5220b49c92806ae2d7e8a9d6b5de02"}, - {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f0adb4177fa748072546fb650d9bd7398caaf0e15b370ed3317280b13f4083b0"}, - {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14954a2988feae3987f1eb49c706bff39947605f4b6fa4027c1d75743723eb09"}, - {file = "aiohttp-3.12.15-cp39-cp39-win32.whl", hash = "sha256:b784d6ed757f27574dca1c336f968f4e81130b27595e458e69457e6878251f5d"}, - {file = "aiohttp-3.12.15-cp39-cp39-win_amd64.whl", hash = "sha256:86ceded4e78a992f835209e236617bffae649371c4a50d5e5a3987f237db84b8"}, - {file = "aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2"}, + {file = "aiohttp-3.13.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2372b15a5f62ed37789a6b383ff7344fc5b9f243999b0cd9b629d8bc5f5b4155"}, + {file = "aiohttp-3.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7f8659a48995edee7229522984bd1009c1213929c769c2daa80b40fe49a180c"}, + {file = "aiohttp-3.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:939ced4a7add92296b0ad38892ce62b98c619288a081170695c6babe4f50e636"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6315fb6977f1d0dd41a107c527fee2ed5ab0550b7d885bc15fee20ccb17891da"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6e7352512f763f760baaed2637055c49134fd1d35b37c2dedfac35bfe5cf8725"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e09a0a06348a2dd73e7213353c90d709502d9786219f69b731f6caa0efeb46f5"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a09a6d073fb5789456545bdee2474d14395792faa0527887f2f4ec1a486a59d3"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b59d13c443f8e049d9e94099c7e412e34610f1f49be0f230ec656a10692a5802"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:20db2d67985d71ca033443a1ba2001c4b5693fe09b0e29f6d9358a99d4d62a8a"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:960c2fc686ba27b535f9fd2b52d87ecd7e4fd1cf877f6a5cba8afb5b4a8bd204"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6c00dbcf5f0d88796151e264a8eab23de2997c9303dd7c0bf622e23b24d3ce22"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fed38a5edb7945f4d1bcabe2fcd05db4f6ec7e0e82560088b754f7e08d93772d"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:b395bbca716c38bef3c764f187860e88c724b342c26275bc03e906142fc5964f"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:204ffff2426c25dfda401ba08da85f9c59525cdc42bda26660463dd1cbcfec6f"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:05c4dd3c48fb5f15db31f57eb35374cb0c09afdde532e7fb70a75aede0ed30f6"}, + {file = "aiohttp-3.13.2-cp310-cp310-win32.whl", hash = "sha256:e574a7d61cf10351d734bcddabbe15ede0eaa8a02070d85446875dc11189a251"}, + {file = "aiohttp-3.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:364f55663085d658b8462a1c3f17b2b84a5c2e1ba858e1b79bff7b2e24ad1514"}, + {file = "aiohttp-3.13.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4647d02df098f6434bafd7f32ad14942f05a9caa06c7016fdcc816f343997dd0"}, + {file = "aiohttp-3.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e3403f24bcb9c3b29113611c3c16a2a447c3953ecf86b79775e7be06f7ae7ccb"}, + {file = "aiohttp-3.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:43dff14e35aba17e3d6d5ba628858fb8cb51e30f44724a2d2f0c75be492c55e9"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2a9ea08e8c58bb17655630198833109227dea914cd20be660f52215f6de5613"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53b07472f235eb80e826ad038c9d106c2f653584753f3ddab907c83f49eedead"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e736c93e9c274fce6419af4aac199984d866e55f8a4cec9114671d0ea9688780"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ff5e771f5dcbc81c64898c597a434f7682f2259e0cd666932a913d53d1341d1a"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3b6fb0c207cc661fa0bf8c66d8d9b657331ccc814f4719468af61034b478592"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:97a0895a8e840ab3520e2288db7cace3a1981300d48babeb50e7425609e2e0ab"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9e8f8afb552297aca127c90cb840e9a1d4bfd6a10d7d8f2d9176e1acc69bad30"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:ed2f9c7216e53c3df02264f25d824b079cc5914f9e2deba94155190ef648ee40"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:99c5280a329d5fa18ef30fd10c793a190d996567667908bef8a7f81f8202b948"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ca6ffef405fc9c09a746cb5d019c1672cd7f402542e379afc66b370833170cf"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:47f438b1a28e926c37632bff3c44df7d27c9b57aaf4e34b1def3c07111fdb782"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9acda8604a57bb60544e4646a4615c1866ee6c04a8edef9b8ee6fd1d8fa2ddc8"}, + {file = "aiohttp-3.13.2-cp311-cp311-win32.whl", hash = "sha256:868e195e39b24aaa930b063c08bb0c17924899c16c672a28a65afded9c46c6ec"}, + {file = "aiohttp-3.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:7fd19df530c292542636c2a9a85854fab93474396a52f1695e799186bbd7f24c"}, + {file = "aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b"}, + {file = "aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc"}, + {file = "aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248"}, + {file = "aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e"}, + {file = "aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45"}, + {file = "aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be"}, + {file = "aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742"}, + {file = "aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23"}, + {file = "aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254"}, + {file = "aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a"}, + {file = "aiohttp-3.13.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2475391c29230e063ef53a66669b7b691c9bfc3f1426a0f7bcdf1216bdbac38b"}, + {file = "aiohttp-3.13.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f33c8748abef4d8717bb20e8fb1b3e07c6adacb7fd6beaae971a764cf5f30d61"}, + {file = "aiohttp-3.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ae32f24bbfb7dbb485a24b30b1149e2f200be94777232aeadba3eecece4d0aa4"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d7f02042c1f009ffb70067326ef183a047425bb2ff3bc434ead4dd4a4a66a2b"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:93655083005d71cd6c072cdab54c886e6570ad2c4592139c3fb967bfc19e4694"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0db1e24b852f5f664cd728db140cf11ea0e82450471232a394b3d1a540b0f906"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b009194665bcd128e23eaddef362e745601afa4641930848af4c8559e88f18f9"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c038a8fdc8103cd51dbd986ecdce141473ffd9775a7a8057a6ed9c3653478011"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:66bac29b95a00db411cd758fea0e4b9bdba6d549dfe333f9a945430f5f2cc5a6"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4ebf9cfc9ba24a74cf0718f04aac2a3bbe745902cc7c5ebc55c0f3b5777ef213"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a4b88ebe35ce54205c7074f7302bd08a4cb83256a3e0870c72d6f68a3aaf8e49"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:98c4fb90bb82b70a4ed79ca35f656f4281885be076f3f970ce315402b53099ae"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:ec7534e63ae0f3759df3a1ed4fa6bc8f75082a924b590619c0dd2f76d7043caa"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5b927cf9b935a13e33644cbed6c8c4b2d0f25b713d838743f8fe7191b33829c4"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:88d6c017966a78c5265d996c19cdb79235be5e6412268d7e2ce7dee339471b7a"}, + {file = "aiohttp-3.13.2-cp314-cp314-win32.whl", hash = "sha256:f7c183e786e299b5d6c49fb43a769f8eb8e04a2726a2bd5887b98b5cc2d67940"}, + {file = "aiohttp-3.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:fe242cd381e0fb65758faf5ad96c2e460df6ee5b2de1072fe97e4127927e00b4"}, + {file = "aiohttp-3.13.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f10d9c0b0188fe85398c61147bbd2a657d616c876863bfeff43376e0e3134673"}, + {file = "aiohttp-3.13.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e7c952aefdf2460f4ae55c5e9c3e80aa72f706a6317e06020f80e96253b1accd"}, + {file = "aiohttp-3.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c20423ce14771d98353d2e25e83591fa75dfa90a3c1848f3d7c68243b4fbded3"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e96eb1a34396e9430c19d8338d2ec33015e4a87ef2b4449db94c22412e25ccdf"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:23fb0783bc1a33640036465019d3bba069942616a6a2353c6907d7fe1ccdaf4e"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1a9bea6244a1d05a4e57c295d69e159a5c50d8ef16aa390948ee873478d9a5"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0a3d54e822688b56e9f6b5816fb3de3a3a64660efac64e4c2dc435230ad23bad"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7a653d872afe9f33497215745da7a943d1dc15b728a9c8da1c3ac423af35178e"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:56d36e80d2003fa3fc0207fac644216d8532e9504a785ef9a8fd013f84a42c61"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:78cd586d8331fb8e241c2dd6b2f4061778cc69e150514b39a9e28dd050475661"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:20b10bbfbff766294fe99987f7bb3b74fdd2f1a2905f2562132641ad434dcf98"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9ec49dff7e2b3c85cdeaa412e9d438f0ecd71676fde61ec57027dd392f00c693"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:94f05348c4406450f9d73d38efb41d669ad6cd90c7ee194810d0eefbfa875a7a"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:fa4dcb605c6f82a80c7f95713c2b11c3b8e9893b3ebd2bc9bde93165ed6107be"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf00e5db968c3f67eccd2778574cf64d8b27d95b237770aa32400bd7a1ca4f6c"}, + {file = "aiohttp-3.13.2-cp314-cp314t-win32.whl", hash = "sha256:d23b5fe492b0805a50d3371e8a728a9134d8de5447dce4c885f5587294750734"}, + {file = "aiohttp-3.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:ff0a7b0a82a7ab905cbda74006318d1b12e37c797eb1b0d4eb3e316cf47f658f"}, + {file = "aiohttp-3.13.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7fbdf5ad6084f1940ce88933de34b62358d0f4a0b6ec097362dcd3e5a65a4989"}, + {file = "aiohttp-3.13.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7c3a50345635a02db61792c85bb86daffac05330f6473d524f1a4e3ef9d0046d"}, + {file = "aiohttp-3.13.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e87dff73f46e969af38ab3f7cb75316a7c944e2e574ff7c933bc01b10def7f5"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2adebd4577724dcae085665f294cc57c8701ddd4d26140504db622b8d566d7aa"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e036a3a645fe92309ec34b918394bb377950cbb43039a97edae6c08db64b23e2"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:23ad365e30108c422d0b4428cf271156dd56790f6dd50d770b8e360e6c5ab2e6"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1f9b2c2d4b9d958b1f9ae0c984ec1dd6b6689e15c75045be8ccb4011426268ca"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a92cf4b9bea33e15ecbaa5c59921be0f23222608143d025c989924f7e3e0c07"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:070599407f4954021509193404c4ac53153525a19531051661440644728ba9a7"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:29562998ec66f988d49fb83c9b01694fa927186b781463f376c5845c121e4e0b"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4dd3db9d0f4ebca1d887d76f7cdbcd1116ac0d05a9221b9dad82c64a62578c4d"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d7bc4b7f9c4921eba72677cd9fedd2308f4a4ca3e12fab58935295ad9ea98700"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:dacd50501cd017f8cccb328da0c90823511d70d24a323196826d923aad865901"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8b2f1414f6a1e0683f212ec80e813f4abef94c739fd090b66c9adf9d2a05feac"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04c3971421576ed24c191f610052bcb2f059e395bc2489dd99e397f9bc466329"}, + {file = "aiohttp-3.13.2-cp39-cp39-win32.whl", hash = "sha256:9f377d0a924e5cc94dc620bc6366fc3e889586a7f18b748901cf016c916e2084"}, + {file = "aiohttp-3.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:9c705601e16c03466cb72011bd1af55d68fa65b045356d8f96c216e5f6db0fa5"}, + {file = "aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca"}, ] [package.dependencies] @@ -138,7 +172,7 @@ propcache = ">=0.2.0" yarl = ">=1.17.0,<2.0" [package.extras] -speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "brotlicffi ; platform_python_implementation != \"CPython\""] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "backports.zstd ; platform_python_implementation == \"CPython\" and python_version < \"3.14\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiohttp-retry" @@ -201,11 +235,12 @@ version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" -groups = ["dev", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, ] +markers = {main = "extra == \"hf\""} [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} @@ -333,85 +368,38 @@ soupsieve = ">1.2" html5lib = ["html5lib"] lxml = ["lxml"] -[[package]] -name = "black" -version = "25.1.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, - {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, - {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, - {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, - {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, - {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, - {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, - {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, - {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, - {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, - {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, - {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, - {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, - {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, - {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, - {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, - {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, - {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, - {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, - {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, - {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, - {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.10)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "boto3" -version = "1.40.25" +version = "1.41.2" description = "The AWS SDK for Python" optional = true python-versions = ">=3.9" groups = ["main"] markers = "extra == \"boto3\"" files = [ - {file = "boto3-1.40.25-py3-none-any.whl", hash = "sha256:d39bc3deb6780d910f00580837b720132055b0604769fd978780865ed3c019ea"}, - {file = "boto3-1.40.25.tar.gz", hash = "sha256:debfa4b2c67492d53629a52c999d71cddc31041a8b62ca1a8b1fb60fb0712ee1"}, + {file = "boto3-1.41.2-py3-none-any.whl", hash = "sha256:edcde82fdae4201aa690e3683f8e5b1a846cf1bbf79d03db4fa8a2f6f46dba9c"}, + {file = "boto3-1.41.2.tar.gz", hash = "sha256:7054fbc61cadab383f40ea6d725013ba6c8f569641dddb14c0055e790280ad6c"}, ] [package.dependencies] -botocore = ">=1.40.25,<1.41.0" +botocore = ">=1.41.2,<1.42.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.13.0,<0.14.0" +s3transfer = ">=0.15.0,<0.16.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.40.25" +version = "1.41.2" description = "Low-level, data-driven core of boto 3." optional = true python-versions = ">=3.9" groups = ["main"] markers = "extra == \"boto3\"" files = [ - {file = "botocore-1.40.25-py3-none-any.whl", hash = "sha256:5603ea9955cd31974446f0b5688911a5dad71fbdfbf7457944cda8a83fcf2a9e"}, - {file = "botocore-1.40.25.tar.gz", hash = "sha256:41fd186018a48dc517a4312a8d3085d548cb3fb1f463972134140bf7ee55a397"}, + {file = "botocore-1.41.2-py3-none-any.whl", hash = "sha256:154052dfaa7292212f01c8fab822c76cd10a15a7e164e4c45e4634eb40214b90"}, + {file = "botocore-1.41.2.tar.gz", hash = "sha256:49a3e8f4c1a1759a687941fef8b36efd7bafcf63c1ef74aa75d6497eb4887c9c"}, ] [package.dependencies] @@ -423,7 +411,7 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.27.6)"] +crt = ["awscrt (==0.28.4)"] [[package]] name = "certifi" @@ -436,6 +424,7 @@ files = [ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] +markers = {main = "extra == \"hf\""} [[package]] name = "charset-normalizer" @@ -443,7 +432,7 @@ version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" -groups = ["main", "dev", "docs"] +groups = ["dev", "docs"] files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, @@ -548,7 +537,7 @@ files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] -markers = {main = "extra == \"olot\""} +markers = {main = "extra == \"hf\" or extra == \"olot\""} [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -568,100 +557,116 @@ markers = {main = "(extra == \"hf\" or extra == \"olot\") and platform_system == [[package]] name = "coverage" -version = "7.10.6" +version = "7.10.7" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70e7bfbd57126b5554aa482691145f798d7df77489a177a6bef80de78860a356"}, - {file = "coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e41be6f0f19da64af13403e52f2dec38bbc2937af54df8ecef10850ff8d35301"}, - {file = "coverage-7.10.6-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c61fc91ab80b23f5fddbee342d19662f3d3328173229caded831aa0bd7595460"}, - {file = "coverage-7.10.6-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10356fdd33a7cc06e8051413140bbdc6f972137508a3572e3f59f805cd2832fd"}, - {file = "coverage-7.10.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80b1695cf7c5ebe7b44bf2521221b9bb8cdf69b1f24231149a7e3eb1ae5fa2fb"}, - {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2e4c33e6378b9d52d3454bd08847a8651f4ed23ddbb4a0520227bd346382bbc6"}, - {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c8a3ec16e34ef980a46f60dc6ad86ec60f763c3f2fa0db6d261e6e754f72e945"}, - {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7d79dabc0a56f5af990cc6da9ad1e40766e82773c075f09cc571e2076fef882e"}, - {file = "coverage-7.10.6-cp310-cp310-win32.whl", hash = "sha256:86b9b59f2b16e981906e9d6383eb6446d5b46c278460ae2c36487667717eccf1"}, - {file = "coverage-7.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:e132b9152749bd33534e5bd8565c7576f135f157b4029b975e15ee184325f528"}, - {file = "coverage-7.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c706db3cabb7ceef779de68270150665e710b46d56372455cd741184f3868d8f"}, - {file = "coverage-7.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e0c38dc289e0508ef68ec95834cb5d2e96fdbe792eaccaa1bccac3966bbadcc"}, - {file = "coverage-7.10.6-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:752a3005a1ded28f2f3a6e8787e24f28d6abe176ca64677bcd8d53d6fe2ec08a"}, - {file = "coverage-7.10.6-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:689920ecfd60f992cafca4f5477d55720466ad2c7fa29bb56ac8d44a1ac2b47a"}, - {file = "coverage-7.10.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec98435796d2624d6905820a42f82149ee9fc4f2d45c2c5bc5a44481cc50db62"}, - {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b37201ce4a458c7a758ecc4efa92fa8ed783c66e0fa3c42ae19fc454a0792153"}, - {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2904271c80898663c810a6b067920a61dd8d38341244a3605bd31ab55250dad5"}, - {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5aea98383463d6e1fa4e95416d8de66f2d0cb588774ee20ae1b28df826bcb619"}, - {file = "coverage-7.10.6-cp311-cp311-win32.whl", hash = "sha256:e3fb1fa01d3598002777dd259c0c2e6d9d5e10e7222976fc8e03992f972a2cba"}, - {file = "coverage-7.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:f35ed9d945bece26553d5b4c8630453169672bea0050a564456eb88bdffd927e"}, - {file = "coverage-7.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:99e1a305c7765631d74b98bf7dbf54eeea931f975e80f115437d23848ee8c27c"}, - {file = "coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b2dd6059938063a2c9fee1af729d4f2af28fd1a545e9b7652861f0d752ebcea"}, - {file = "coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:388d80e56191bf846c485c14ae2bc8898aa3124d9d35903fef7d907780477634"}, - {file = "coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:90cb5b1a4670662719591aa92d0095bb41714970c0b065b02a2610172dbf0af6"}, - {file = "coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:961834e2f2b863a0e14260a9a273aff07ff7818ab6e66d2addf5628590c628f9"}, - {file = "coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf9a19f5012dab774628491659646335b1928cfc931bf8d97b0d5918dd58033c"}, - {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99c4283e2a0e147b9c9cc6bc9c96124de9419d6044837e9799763a0e29a7321a"}, - {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:282b1b20f45df57cc508c1e033403f02283adfb67d4c9c35a90281d81e5c52c5"}, - {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cdbe264f11afd69841bd8c0d83ca10b5b32853263ee62e6ac6a0ab63895f972"}, - {file = "coverage-7.10.6-cp312-cp312-win32.whl", hash = "sha256:a517feaf3a0a3eca1ee985d8373135cfdedfbba3882a5eab4362bda7c7cf518d"}, - {file = "coverage-7.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:856986eadf41f52b214176d894a7de05331117f6035a28ac0016c0f63d887629"}, - {file = "coverage-7.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:acf36b8268785aad739443fa2780c16260ee3fa09d12b3a70f772ef100939d80"}, - {file = "coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ffea0575345e9ee0144dfe5701aa17f3ba546f8c3bb48db62ae101afb740e7d6"}, - {file = "coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d91d7317cde40a1c249d6b7382750b7e6d86fad9d8eaf4fa3f8f44cf171e80"}, - {file = "coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e23dd5408fe71a356b41baa82892772a4cefcf758f2ca3383d2aa39e1b7a003"}, - {file = "coverage-7.10.6-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0f3f56e4cb573755e96a16501a98bf211f100463d70275759e73f3cbc00d4f27"}, - {file = "coverage-7.10.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db4a1d897bbbe7339946ffa2fe60c10cc81c43fab8b062d3fcb84188688174a4"}, - {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8fd7879082953c156d5b13c74aa6cca37f6a6f4747b39538504c3f9c63d043d"}, - {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:28395ca3f71cd103b8c116333fa9db867f3a3e1ad6a084aa3725ae002b6583bc"}, - {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:61c950fc33d29c91b9e18540e1aed7d9f6787cc870a3e4032493bbbe641d12fc"}, - {file = "coverage-7.10.6-cp313-cp313-win32.whl", hash = "sha256:160c00a5e6b6bdf4e5984b0ef21fc860bc94416c41b7df4d63f536d17c38902e"}, - {file = "coverage-7.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:628055297f3e2aa181464c3808402887643405573eb3d9de060d81531fa79d32"}, - {file = "coverage-7.10.6-cp313-cp313-win_arm64.whl", hash = "sha256:df4ec1f8540b0bcbe26ca7dd0f541847cc8a108b35596f9f91f59f0c060bfdd2"}, - {file = "coverage-7.10.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c9a8b7a34a4de3ed987f636f71881cd3b8339f61118b1aa311fbda12741bff0b"}, - {file = "coverage-7.10.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dd5af36092430c2b075cee966719898f2ae87b636cefb85a653f1d0ba5d5393"}, - {file = "coverage-7.10.6-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b0353b0f0850d49ada66fdd7d0c7cdb0f86b900bb9e367024fd14a60cecc1e27"}, - {file = "coverage-7.10.6-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d6b9ae13d5d3e8aeca9ca94198aa7b3ebbc5acfada557d724f2a1f03d2c0b0df"}, - {file = "coverage-7.10.6-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:675824a363cc05781b1527b39dc2587b8984965834a748177ee3c37b64ffeafb"}, - {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:692d70ea725f471a547c305f0d0fc6a73480c62fb0da726370c088ab21aed282"}, - {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:851430a9a361c7a8484a36126d1d0ff8d529d97385eacc8dfdc9bfc8c2d2cbe4"}, - {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d9369a23186d189b2fc95cc08b8160ba242057e887d766864f7adf3c46b2df21"}, - {file = "coverage-7.10.6-cp313-cp313t-win32.whl", hash = "sha256:92be86fcb125e9bda0da7806afd29a3fd33fdf58fba5d60318399adf40bf37d0"}, - {file = "coverage-7.10.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6b3039e2ca459a70c79523d39347d83b73f2f06af5624905eba7ec34d64d80b5"}, - {file = "coverage-7.10.6-cp313-cp313t-win_arm64.whl", hash = "sha256:3fb99d0786fe17b228eab663d16bee2288e8724d26a199c29325aac4b0319b9b"}, - {file = "coverage-7.10.6-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6008a021907be8c4c02f37cdc3ffb258493bdebfeaf9a839f9e71dfdc47b018e"}, - {file = "coverage-7.10.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5e75e37f23eb144e78940b40395b42f2321951206a4f50e23cfd6e8a198d3ceb"}, - {file = "coverage-7.10.6-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0f7cb359a448e043c576f0da00aa8bfd796a01b06aa610ca453d4dde09cc1034"}, - {file = "coverage-7.10.6-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c68018e4fc4e14b5668f1353b41ccf4bc83ba355f0e1b3836861c6f042d89ac1"}, - {file = "coverage-7.10.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cd4b2b0707fc55afa160cd5fc33b27ccbf75ca11d81f4ec9863d5793fc6df56a"}, - {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cec13817a651f8804a86e4f79d815b3b28472c910e099e4d5a0e8a3b6a1d4cb"}, - {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f2a6a8e06bbda06f78739f40bfb56c45d14eb8249d0f0ea6d4b3d48e1f7c695d"}, - {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:081b98395ced0d9bcf60ada7661a0b75f36b78b9d7e39ea0790bb4ed8da14747"}, - {file = "coverage-7.10.6-cp314-cp314-win32.whl", hash = "sha256:6937347c5d7d069ee776b2bf4e1212f912a9f1f141a429c475e6089462fcecc5"}, - {file = "coverage-7.10.6-cp314-cp314-win_amd64.whl", hash = "sha256:adec1d980fa07e60b6ef865f9e5410ba760e4e1d26f60f7e5772c73b9a5b0713"}, - {file = "coverage-7.10.6-cp314-cp314-win_arm64.whl", hash = "sha256:a80f7aef9535442bdcf562e5a0d5a5538ce8abe6bb209cfbf170c462ac2c2a32"}, - {file = "coverage-7.10.6-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:0de434f4fbbe5af4fa7989521c655c8c779afb61c53ab561b64dcee6149e4c65"}, - {file = "coverage-7.10.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6e31b8155150c57e5ac43ccd289d079eb3f825187d7c66e755a055d2c85794c6"}, - {file = "coverage-7.10.6-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:98cede73eb83c31e2118ae8d379c12e3e42736903a8afcca92a7218e1f2903b0"}, - {file = "coverage-7.10.6-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f863c08f4ff6b64fa8045b1e3da480f5374779ef187f07b82e0538c68cb4ff8e"}, - {file = "coverage-7.10.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b38261034fda87be356f2c3f42221fdb4171c3ce7658066ae449241485390d5"}, - {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e93b1476b79eae849dc3872faeb0bf7948fd9ea34869590bc16a2a00b9c82a7"}, - {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ff8a991f70f4c0cf53088abf1e3886edcc87d53004c7bb94e78650b4d3dac3b5"}, - {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ac765b026c9f33044419cbba1da913cfb82cca1b60598ac1c7a5ed6aac4621a0"}, - {file = "coverage-7.10.6-cp314-cp314t-win32.whl", hash = "sha256:441c357d55f4936875636ef2cfb3bee36e466dcf50df9afbd398ce79dba1ebb7"}, - {file = "coverage-7.10.6-cp314-cp314t-win_amd64.whl", hash = "sha256:073711de3181b2e204e4870ac83a7c4853115b42e9cd4d145f2231e12d670930"}, - {file = "coverage-7.10.6-cp314-cp314t-win_arm64.whl", hash = "sha256:137921f2bac5559334ba66122b753db6dc5d1cf01eb7b64eb412bb0d064ef35b"}, - {file = "coverage-7.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90558c35af64971d65fbd935c32010f9a2f52776103a259f1dee865fe8259352"}, - {file = "coverage-7.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8953746d371e5695405806c46d705a3cd170b9cc2b9f93953ad838f6c1e58612"}, - {file = "coverage-7.10.6-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c83f6afb480eae0313114297d29d7c295670a41c11b274e6bca0c64540c1ce7b"}, - {file = "coverage-7.10.6-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7eb68d356ba0cc158ca535ce1381dbf2037fa8cb5b1ae5ddfc302e7317d04144"}, - {file = "coverage-7.10.6-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b15a87265e96307482746d86995f4bff282f14b027db75469c446da6127433b"}, - {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fc53ba868875bfbb66ee447d64d6413c2db91fddcfca57025a0e7ab5b07d5862"}, - {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efeda443000aa23f276f4df973cb82beca682fd800bb119d19e80504ffe53ec2"}, - {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9702b59d582ff1e184945d8b501ffdd08d2cee38d93a2206aa5f1365ce0b8d78"}, - {file = "coverage-7.10.6-cp39-cp39-win32.whl", hash = "sha256:2195f8e16ba1a44651ca684db2ea2b2d4b5345da12f07d9c22a395202a05b23c"}, - {file = "coverage-7.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:f32ff80e7ef6a5b5b606ea69a36e97b219cd9dc799bcf2963018a4d8f788cfbf"}, - {file = "coverage-7.10.6-py3-none-any.whl", hash = "sha256:92c4ecf6bf11b2e85fd4d8204814dc26e6a19f0c9d938c207c5cb0eadfcabbe3"}, - {file = "coverage-7.10.6.tar.gz", hash = "sha256:f644a3ae5933a552a29dbb9aa2f90c677a875f80ebea028e5a52a4f429044b90"}, + {file = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"}, + {file = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13"}, + {file = "coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b"}, + {file = "coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807"}, + {file = "coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59"}, + {file = "coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61"}, + {file = "coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14"}, + {file = "coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2"}, + {file = "coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a"}, + {file = "coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417"}, + {file = "coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1"}, + {file = "coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256"}, + {file = "coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba"}, + {file = "coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf"}, + {file = "coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d"}, + {file = "coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f"}, + {file = "coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698"}, + {file = "coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843"}, + {file = "coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546"}, + {file = "coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c"}, + {file = "coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2"}, + {file = "coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a"}, + {file = "coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb"}, + {file = "coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb"}, + {file = "coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520"}, + {file = "coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd"}, + {file = "coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2"}, + {file = "coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681"}, + {file = "coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880"}, + {file = "coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63"}, + {file = "coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399"}, + {file = "coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235"}, + {file = "coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d"}, + {file = "coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a"}, + {file = "coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3"}, + {file = "coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f"}, + {file = "coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431"}, + {file = "coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07"}, + {file = "coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260"}, + {file = "coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239"}, ] [package.dependencies] @@ -684,14 +689,14 @@ files = [ [[package]] name = "eval-type-backport" -version = "0.2.2" +version = "0.3.0" description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}, - {file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}, + {file = "eval_type_backport-0.3.0-py3-none-any.whl", hash = "sha256:975a10a0fe333c8b6260d7fdb637698c9a16c3a9e3b6eb943fee6a6f67a37fe8"}, + {file = "eval_type_backport-0.3.0.tar.gz", hash = "sha256:1638210401e184ff17f877e9a2fa076b60b5838790f4532a21761cc2be67aea1"}, ] [package.extras] @@ -703,16 +708,31 @@ version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["dev", "docs"] -markers = "python_version < \"3.11\"" +groups = ["main", "dev", "docs"] files = [ {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] +markers = {main = "extra == \"hf\" and python_version < \"3.11\"", dev = "python_version < \"3.11\"", docs = "python_version < \"3.11\""} [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "execnet" +version = "2.1.1" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + [[package]] name = "filelock" version = "3.13.1" @@ -724,7 +744,7 @@ files = [ {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] -markers = {main = "extra == \"hf\"", dev = "python_version < \"3.13\""} +markers = {main = "extra == \"hf\"", dev = "python_full_version > \"3.9.0\""} [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] @@ -869,14 +889,14 @@ tqdm = ["tqdm"] [[package]] name = "furo" -version = "2025.7.19" +version = "2025.9.25" description = "A clean customisable Sphinx documentation theme." optional = false python-versions = ">=3.8" groups = ["docs"] files = [ - {file = "furo-2025.7.19-py3-none-any.whl", hash = "sha256:bdea869822dfd2b494ea84c0973937e35d1575af088b6721a29c7f7878adc9e3"}, - {file = "furo-2025.7.19.tar.gz", hash = "sha256:4164b2cafcf4023a59bb3c594e935e2516f6b9d35e9a5ea83d8f6b43808fe91f"}, + {file = "furo-2025.9.25-py3-none-any.whl", hash = "sha256:2937f68e823b8e37b410c972c371bc2b1d88026709534927158e0cb3fac95afe"}, + {file = "furo-2025.9.25.tar.gz", hash = "sha256:3eac05582768fdbbc2bdfa1cdbcdd5d33cfc8b4bd2051729ff4e026a1d7e0a98"}, ] [package.dependencies] @@ -907,22 +927,23 @@ version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.8" -groups = ["dev", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] +markers = {main = "extra == \"hf\""} [[package]] name = "harfile" -version = "0.3.1" +version = "0.4.0" description = "Writer for HTTP Archive (HAR) files" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "harfile-0.3.1-py3-none-any.whl", hash = "sha256:27e63c98cba93b94175ac6407a5717badc2cd993daffd5a5bfdc893ebffb56ac"}, - {file = "harfile-0.3.1.tar.gz", hash = "sha256:8998fa4410ad17d7b5671bab36ea058adde835bbb27f6a3da4b423019da584da"}, + {file = "harfile-0.4.0-py3-none-any.whl", hash = "sha256:ddb1483cb30f7549ddc67c0b7fdc6424f1feb19373b67e33e429b02f09bf43a8"}, + {file = "harfile-0.4.0.tar.gz", hash = "sha256:34e2d9ef34101d769566bffab3c420e147776174308bed1a036ed8db600cabde"}, ] [package.extras] @@ -933,21 +954,35 @@ tests = ["coverage (>=7)", "hypothesis (>=6)", "hypothesis-jsonschema (>=0.23.1) [[package]] name = "hf-xet" -version = "1.1.5" +version = "1.2.0" description = "Fast transfer of large files with the Hugging Face Hub." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"hf\" and (platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\")" -files = [ - {file = "hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f52c2fa3635b8c37c7764d8796dfa72706cc4eded19d638331161e82b0792e23"}, - {file = "hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:9fa6e3ee5d61912c4a113e0708eaaef987047616465ac7aa30f7121a48fc1af8"}, - {file = "hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc874b5c843e642f45fd85cda1ce599e123308ad2901ead23d3510a47ff506d1"}, - {file = "hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dbba1660e5d810bd0ea77c511a99e9242d920790d0e63c0e4673ed36c4022d18"}, - {file = "hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ab34c4c3104133c495785d5d8bba3b1efc99de52c02e759cf711a91fd39d3a14"}, - {file = "hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:83088ecea236d5113de478acb2339f92c95b4fb0462acaa30621fac02f5a534a"}, - {file = "hf_xet-1.1.5-cp37-abi3-win_amd64.whl", hash = "sha256:73e167d9807d166596b4b2f0b585c6d5bd84a26dea32843665a8b58f6edba245"}, - {file = "hf_xet-1.1.5.tar.gz", hash = "sha256:69ebbcfd9ec44fdc2af73441619eeb06b94ee34511bbcf57cd423820090f5694"}, +markers = "extra == \"hf\" and (platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\")" +files = [ + {file = "hf_xet-1.2.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:ceeefcd1b7aed4956ae8499e2199607765fbd1c60510752003b6cc0b8413b649"}, + {file = "hf_xet-1.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b70218dd548e9840224df5638fdc94bd033552963cfa97f9170829381179c813"}, + {file = "hf_xet-1.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d40b18769bb9a8bc82a9ede575ce1a44c75eb80e7375a01d76259089529b5dc"}, + {file = "hf_xet-1.2.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:cd3a6027d59cfb60177c12d6424e31f4b5ff13d8e3a1247b3a584bf8977e6df5"}, + {file = "hf_xet-1.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6de1fc44f58f6dd937956c8d304d8c2dea264c80680bcfa61ca4a15e7b76780f"}, + {file = "hf_xet-1.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f182f264ed2acd566c514e45da9f2119110e48a87a327ca271027904c70c5832"}, + {file = "hf_xet-1.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:293a7a3787e5c95d7be1857358a9130694a9c6021de3f27fa233f37267174382"}, + {file = "hf_xet-1.2.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:10bfab528b968c70e062607f663e21e34e2bba349e8038db546646875495179e"}, + {file = "hf_xet-1.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a212e842647b02eb6a911187dc878e79c4aa0aa397e88dd3b26761676e8c1f8"}, + {file = "hf_xet-1.2.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30e06daccb3a7d4c065f34fc26c14c74f4653069bb2b194e7f18f17cbe9939c0"}, + {file = "hf_xet-1.2.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:29c8fc913a529ec0a91867ce3d119ac1aac966e098cf49501800c870328cc090"}, + {file = "hf_xet-1.2.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e159cbfcfbb29f920db2c09ed8b660eb894640d284f102ada929b6e3dc410a"}, + {file = "hf_xet-1.2.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9c91d5ae931510107f148874e9e2de8a16052b6f1b3ca3c1b12f15ccb491390f"}, + {file = "hf_xet-1.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:210d577732b519ac6ede149d2f2f34049d44e8622bf14eb3d63bbcd2d4b332dc"}, + {file = "hf_xet-1.2.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:46740d4ac024a7ca9b22bebf77460ff43332868b661186a8e46c227fdae01848"}, + {file = "hf_xet-1.2.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:27df617a076420d8845bea087f59303da8be17ed7ec0cd7ee3b9b9f579dff0e4"}, + {file = "hf_xet-1.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3651fd5bfe0281951b988c0facbe726aa5e347b103a675f49a3fa8144c7968fd"}, + {file = "hf_xet-1.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d06fa97c8562fb3ee7a378dd9b51e343bc5bc8190254202c9771029152f5e08c"}, + {file = "hf_xet-1.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4c1428c9ae73ec0939410ec73023c4f842927f39db09b063b9482dac5a3bb737"}, + {file = "hf_xet-1.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a55558084c16b09b5ed32ab9ed38421e2d87cf3f1f89815764d1177081b99865"}, + {file = "hf_xet-1.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69"}, + {file = "hf_xet-1.2.0.tar.gz", hash = "sha256:a8c27070ca547293b6890c4bf389f713f80e8c478631432962bb7f4bc0bd7d7f"}, ] [package.extras] @@ -959,11 +994,12 @@ version = "1.0.9" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, ] +markers = {main = "extra == \"hf\""} [package.dependencies] certifi = "*" @@ -981,11 +1017,12 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] +markers = {main = "extra == \"hf\""} [package.dependencies] anyio = "*" @@ -1002,43 +1039,40 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "huggingface-hub" -version = "0.34.4" +version = "1.1.5" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = true -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" groups = ["main"] markers = "extra == \"hf\"" files = [ - {file = "huggingface_hub-0.34.4-py3-none-any.whl", hash = "sha256:9b365d781739c93ff90c359844221beef048403f1bc1f1c123c191257c3c890a"}, - {file = "huggingface_hub-0.34.4.tar.gz", hash = "sha256:a4228daa6fb001be3f4f4bdaf9a0db00e1739235702848df00885c9b5742c85c"}, + {file = "huggingface_hub-1.1.5-py3-none-any.whl", hash = "sha256:e88ecc129011f37b868586bbcfae6c56868cae80cd56a79d61575426a3aa0d7d"}, + {file = "huggingface_hub-1.1.5.tar.gz", hash = "sha256:40ba5c9a08792d888fde6088920a0a71ab3cd9d5e6617c81a797c657f1fd9968"}, ] [package.dependencies] filelock = "*" fsspec = ">=2023.5.0" -hf-xet = {version = ">=1.1.3,<2.0.0", markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\""} +hf-xet = {version = ">=1.2.0,<2.0.0", markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\""} +httpx = ">=0.23.0,<1" packaging = ">=20.9" pyyaml = ">=5.1" -requests = "*" +shellingham = "*" tqdm = ">=4.42.1" +typer-slim = "*" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +all = ["Jinja2", "Pillow", "authlib (>=1.3.2)", "fastapi", "fastapi", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0)", "numpy", "pytest (>=8.4.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "ty", "types-PyYAML", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +dev = ["Jinja2", "Pillow", "authlib (>=1.3.2)", "fastapi", "fastapi", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0)", "numpy", "pytest (>=8.4.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "ty", "types-PyYAML", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -hf-transfer = ["hf-transfer (>=0.1.4)"] -hf-xet = ["hf-xet (>=1.1.2,<2.0.0)"] -inference = ["aiohttp"] -mcp = ["aiohttp", "mcp (>=1.8.0)", "typer"] +hf-xet = ["hf-xet (>=1.1.3,<2.0.0)"] +mcp = ["mcp (>=1.8.0)"] oauth = ["authlib (>=1.3.2)", "fastapi", "httpx", "itsdangerous"] -quality = ["libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "ruff (>=0.9.0)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +quality = ["libcst (>=1.4.0)", "mypy (==1.15.0)", "ruff (>=0.9.0)", "ty"] +testing = ["Jinja2", "Pillow", "authlib (>=1.3.2)", "fastapi", "fastapi", "httpx", "itsdangerous", "jedi", "numpy", "pytest (>=8.4.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] torch = ["safetensors[torch]", "torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] +typing = ["types-PyYAML", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] [[package]] name = "hypothesis" @@ -1143,7 +1177,7 @@ description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version < \"3.10\"" +markers = "python_version == \"3.9\"" files = [ {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, @@ -1442,7 +1476,7 @@ description = "MessagePack serializer" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version < \"3.13\"" +markers = "python_full_version > \"3.9.0\"" files = [ {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, @@ -1612,50 +1646,50 @@ files = [ [[package]] name = "mypy" -version = "1.18.1" +version = "1.18.2" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "mypy-1.18.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2761b6ae22a2b7d8e8607fb9b81ae90bc2e95ec033fd18fa35e807af6c657763"}, - {file = "mypy-1.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b10e3ea7f2eec23b4929a3fabf84505da21034a4f4b9613cda81217e92b74f3"}, - {file = "mypy-1.18.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:261fbfced030228bc0f724d5d92f9ae69f46373bdfd0e04a533852677a11dbea"}, - {file = "mypy-1.18.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4dc6b34a1c6875e6286e27d836a35c0d04e8316beac4482d42cfea7ed2527df8"}, - {file = "mypy-1.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1cabb353194d2942522546501c0ff75c4043bf3b63069cb43274491b44b773c9"}, - {file = "mypy-1.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:738b171690c8e47c93569635ee8ec633d2cdb06062f510b853b5f233020569a9"}, - {file = "mypy-1.18.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c903857b3e28fc5489e54042684a9509039ea0aedb2a619469438b544ae1961"}, - {file = "mypy-1.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a0c8392c19934c2b6c65566d3a6abdc6b51d5da7f5d04e43f0eb627d6eeee65"}, - {file = "mypy-1.18.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f85eb7efa2ec73ef63fc23b8af89c2fe5bf2a4ad985ed2d3ff28c1bb3c317c92"}, - {file = "mypy-1.18.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:82ace21edf7ba8af31c3308a61dc72df30500f4dbb26f99ac36b4b80809d7e94"}, - {file = "mypy-1.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a2dfd53dfe632f1ef5d161150a4b1f2d0786746ae02950eb3ac108964ee2975a"}, - {file = "mypy-1.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:320f0ad4205eefcb0e1a72428dde0ad10be73da9f92e793c36228e8ebf7298c0"}, - {file = "mypy-1.18.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:502cde8896be8e638588b90fdcb4c5d5b8c1b004dfc63fd5604a973547367bb9"}, - {file = "mypy-1.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7509549b5e41be279afc1228242d0e397f1af2919a8f2877ad542b199dc4083e"}, - {file = "mypy-1.18.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5956ecaabb3a245e3f34100172abca1507be687377fe20e24d6a7557e07080e2"}, - {file = "mypy-1.18.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8750ceb014a96c9890421c83f0db53b0f3b8633e2864c6f9bc0a8e93951ed18d"}, - {file = "mypy-1.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fb89ea08ff41adf59476b235293679a6eb53a7b9400f6256272fb6029bec3ce5"}, - {file = "mypy-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:2657654d82fcd2a87e02a33e0d23001789a554059bbf34702d623dafe353eabf"}, - {file = "mypy-1.18.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d70d2b5baf9b9a20bc9c730015615ae3243ef47fb4a58ad7b31c3e0a59b5ef1f"}, - {file = "mypy-1.18.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b8367e33506300f07a43012fc546402f283c3f8bcff1dc338636affb710154ce"}, - {file = "mypy-1.18.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:913f668ec50c3337b89df22f973c1c8f0b29ee9e290a8b7fe01cc1ef7446d42e"}, - {file = "mypy-1.18.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a0e70b87eb27b33209fa4792b051c6947976f6ab829daa83819df5f58330c71"}, - {file = "mypy-1.18.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c378d946e8a60be6b6ede48c878d145546fb42aad61df998c056ec151bf6c746"}, - {file = "mypy-1.18.1-cp313-cp313-win_amd64.whl", hash = "sha256:2cd2c1e0f3a7465f22731987fff6fc427e3dcbb4ca5f7db5bbeaff2ff9a31f6d"}, - {file = "mypy-1.18.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ba24603c58e34dd5b096dfad792d87b304fc6470cbb1c22fd64e7ebd17edcc61"}, - {file = "mypy-1.18.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ed36662fb92ae4cb3cacc682ec6656208f323bbc23d4b08d091eecfc0863d4b5"}, - {file = "mypy-1.18.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:040ecc95e026f71a9ad7956fea2724466602b561e6a25c2e5584160d3833aaa8"}, - {file = "mypy-1.18.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:937e3ed86cb731276706e46e03512547e43c391a13f363e08d0fee49a7c38a0d"}, - {file = "mypy-1.18.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1f95cc4f01c0f1701ca3b0355792bccec13ecb2ec1c469e5b85a6ef398398b1d"}, - {file = "mypy-1.18.1-cp314-cp314-win_amd64.whl", hash = "sha256:e4f16c0019d48941220ac60b893615be2f63afedaba6a0801bdcd041b96991ce"}, - {file = "mypy-1.18.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e37763af63a8018308859bc83d9063c501a5820ec5bd4a19f0a2ac0d1c25c061"}, - {file = "mypy-1.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:51531b6e94f34b8bd8b01dee52bbcee80daeac45e69ec5c36e25bce51cbc46e6"}, - {file = "mypy-1.18.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dbfdea20e90e9c5476cea80cfd264d8e197c6ef2c58483931db2eefb2f7adc14"}, - {file = "mypy-1.18.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99f272c9b59f5826fffa439575716276d19cbf9654abc84a2ba2d77090a0ba14"}, - {file = "mypy-1.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8c05a7f8c00300a52f3a4fcc95a185e99bf944d7e851ff141bae8dcf6dcfeac4"}, - {file = "mypy-1.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:2fbcecbe5cf213ba294aa8c0b8c104400bf7bb64db82fb34fe32a205da4b3531"}, - {file = "mypy-1.18.1-py3-none-any.whl", hash = "sha256:b76a4de66a0ac01da1be14ecc8ae88ddea33b8380284a9e3eae39d57ebcbe26e"}, - {file = "mypy-1.18.1.tar.gz", hash = "sha256:9e988c64ad3ac5987f43f5154f884747faf62141b7f842e87465b45299eea5a9"}, + {file = "mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c"}, + {file = "mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e"}, + {file = "mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b"}, + {file = "mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66"}, + {file = "mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428"}, + {file = "mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed"}, + {file = "mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f"}, + {file = "mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341"}, + {file = "mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d"}, + {file = "mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86"}, + {file = "mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37"}, + {file = "mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8"}, + {file = "mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34"}, + {file = "mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764"}, + {file = "mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893"}, + {file = "mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914"}, + {file = "mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8"}, + {file = "mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074"}, + {file = "mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc"}, + {file = "mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e"}, + {file = "mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986"}, + {file = "mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d"}, + {file = "mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba"}, + {file = "mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544"}, + {file = "mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce"}, + {file = "mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d"}, + {file = "mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c"}, + {file = "mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb"}, + {file = "mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075"}, + {file = "mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf"}, + {file = "mypy-1.18.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25a9c8fb67b00599f839cf472713f54249a62efd53a54b565eb61956a7e3296b"}, + {file = "mypy-1.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2b9c7e284ee20e7598d6f42e13ca40b4928e6957ed6813d1ab6348aa3f47133"}, + {file = "mypy-1.18.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6985ed057513e344e43a26cc1cd815c7a94602fb6a3130a34798625bc2f07b6"}, + {file = "mypy-1.18.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22f27105f1525ec024b5c630c0b9f36d5c1cc4d447d61fe51ff4bd60633f47ac"}, + {file = "mypy-1.18.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:030c52d0ea8144e721e49b1f68391e39553d7451f0c3f8a7565b59e19fcb608b"}, + {file = "mypy-1.18.2-cp39-cp39-win_amd64.whl", hash = "sha256:aa5e07ac1a60a253445797e42b8b2963c9675563a94f11291ab40718b016a7a0"}, + {file = "mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e"}, + {file = "mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b"}, ] [package.dependencies] @@ -1725,15 +1759,15 @@ files = [ [[package]] name = "olot" -version = "0.1.8" +version = "0.1.11" description = "oci layers on top" optional = true python-versions = "<4.0,>=3.9" groups = ["main"] markers = "extra == \"olot\"" files = [ - {file = "olot-0.1.8-py3-none-any.whl", hash = "sha256:a4b2e7547429bd09d2d4c868682e1110f310bfa707b95adf1e3533b7afc1c280"}, - {file = "olot-0.1.8.tar.gz", hash = "sha256:2082753e95537308677ec813693a05363b09f9fb7ec27d6a3507037d95321520"}, + {file = "olot-0.1.11-py3-none-any.whl", hash = "sha256:a1e6949bb3c559aa4829f854ad20ebe3d72c73815369403c62e2529e5c5ca3bd"}, + {file = "olot-0.1.11.tar.gz", hash = "sha256:2e3be9fa871e5e53ef551ab9906197f7a9f795c6e21739d163041bef40571416"}, ] [package.dependencies] @@ -1765,23 +1799,6 @@ files = [ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - [[package]] name = "pluggy" version = "1.5.0" @@ -1913,7 +1930,7 @@ description = "" optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version < \"3.13\"" +markers = "python_full_version > \"3.9.0\"" files = [ {file = "protobuf-6.31.1-cp310-abi3-win32.whl", hash = "sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9"}, {file = "protobuf-6.31.1-cp310-abi3-win_amd64.whl", hash = "sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447"}, @@ -1928,21 +1945,21 @@ files = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.12.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, - {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, + {file = "pydantic-2.12.4-py3-none-any.whl", hash = "sha256:92d3d202a745d46f9be6df459ac5a064fdaa3c1c4cd8adcfa332ccf3c05f871e"}, + {file = "pydantic-2.12.4.tar.gz", hash = "sha256:0f8cb9555000a4b5b617f66bfd2566264c4984b27589d3b845685983e8ea85ac"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.33.2" -typing-extensions = ">=4.12.2" -typing-inspection = ">=0.4.0" +pydantic-core = "2.41.5" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -1950,115 +1967,137 @@ timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.5" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, - {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, +] + +[package.dependencies] +typing-extensions = ">=4.14.1" [[package]] name = "pygments" @@ -2198,14 +2237,14 @@ test = ["black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "tox (> [[package]] name = "pytest-mock" -version = "3.14.1" +version = "3.15.1" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0"}, - {file = "pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e"}, + {file = "pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d"}, + {file = "pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f"}, ] [package.dependencies] @@ -2230,6 +2269,27 @@ files = [ attrs = ">=19.2.0" pytest = ">=7.4" +[[package]] +name = "pytest-xdist" +version = "3.8.0" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88"}, + {file = "pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1"}, +] + +[package.dependencies] +execnet = ">=2.1" +pytest = ">=7.0.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -2309,41 +2369,32 @@ markers = {main = "extra == \"hf\""} [[package]] name = "ray" -version = "2.49.1" +version = "2.52.1" description = "Ray provides a simple, universal API for building distributed applications." optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version < \"3.13\"" -files = [ - {file = "ray-2.49.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f8e12dd7db8215a86ef7183a2c9c22102880e0ecd08f94b1d17ad9e607e4a359"}, - {file = "ray-2.49.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:99086b4bb32038bd63b7575667fdc1425cb751afe0434ed0d158e3d3ee0c726f"}, - {file = "ray-2.49.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:8f39abe1e4ea5e4dde2567e7e7af7b41f7eb53f6a9c3d3d1cb800fb7a3652104"}, - {file = "ray-2.49.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:9384d27059caf86a38cbbcb422ab61b68de87333784bf1b22722d74fdba01ef6"}, - {file = "ray-2.49.1-cp310-cp310-win_amd64.whl", hash = "sha256:e7050b6fc49af1de33dc6e4cd1368e6b408a5d773baf484a54694f93aa8c5cba"}, - {file = "ray-2.49.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:94326db0c83f7f391352b135b37a8eca737c1addf18902ab190be6b8608a8039"}, - {file = "ray-2.49.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:5b1e00086156a1d589664d1ecce3d4589b089cbab09d7b8780360e5b34ae907a"}, - {file = "ray-2.49.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:cb6fde412e634f93333c646b1089e4d3184bc7fcb7fc02818891b281a80240d2"}, - {file = "ray-2.49.1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:c8a039447c3049e336dbaf9ff16732943aff8bde7d5376390bc5b71eb08cb996"}, - {file = "ray-2.49.1-cp311-cp311-win_amd64.whl", hash = "sha256:0d97b2cd1ffa6b7f9e965d25471584ef172581f1b8d4b8413fcdfb843debe9f6"}, - {file = "ray-2.49.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f7a715855d179c1dd6ae2e8b5f8919638cde379a5b157963a0bd74d1178b8b5a"}, - {file = "ray-2.49.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:f9365de3a9a661ccf089dfaac01c8b68ba00c98443330ef678e0c0248272c722"}, - {file = "ray-2.49.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:cf63b916e399c2a4d484249611a96cee283cef32e544126115a4ad3e872c34eb"}, - {file = "ray-2.49.1-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:484064fca02732e0b6f4a09dad0d1fb6abd4ca4b6d9bf7c26ab7a17a2887cd09"}, - {file = "ray-2.49.1-cp312-cp312-win_amd64.whl", hash = "sha256:96bfdc301f38ce626fd638396cd2f52c6e3c6ba751b475f54db17152c4bdf5ab"}, - {file = "ray-2.49.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:5d19e568a8cfbccf128bf34f9ce48bcbd11e9f0b94db190404f6beb55ae495d2"}, - {file = "ray-2.49.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:0fb2e28c80e4599ffec3a34e926b9b012ad1c350f49cdb8d8892dd7ab93b4789"}, - {file = "ray-2.49.1-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:94e1c5068897c63546d09ea263a8844ce163c6d80ce30b1af3adc753708be63c"}, - {file = "ray-2.49.1-cp313-cp313-manylinux2014_x86_64.whl", hash = "sha256:d78ef3a7ae48819d640fc05006bf2a7caab48b15c567a53a79c015f3d0054b3d"}, - {file = "ray-2.49.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:abd7cfa2c1b044f5b6780c9e6e8ce064eb832d09bcc897f33106717911e818ac"}, - {file = "ray-2.49.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:ed6844351456a9745bbf51e58935943d96570f9607d9dc07e86580c2fd193d7f"}, - {file = "ray-2.49.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bdc57394711fdbc020d57bb9bac5d78956d92b963bdf100e55e774ed814361a4"}, - {file = "ray-2.49.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:3d98a7abdf7f40a05eb5211c5e82687d99f93c5f674fb8e90bbd8ee2a6975476"}, - {file = "ray-2.49.1-cp39-cp39-win_amd64.whl", hash = "sha256:a62977cc83d9b38d0e5fbe37596cd4354ec838d3d52427f5d7fb193724c10819"}, -] - -[package.dependencies] -click = ">=7.0" +markers = "python_full_version > \"3.9.0\"" +files = [ + {file = "ray-2.52.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:993194a8be70540e0f819862031bbf19a64401fbe6c31b42065fd313ba466d34"}, + {file = "ray-2.52.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:65bf461fdfe4ffa667c46f9455f8740b2ad6c1fa471b461d5f5cf6b7baf177b5"}, + {file = "ray-2.52.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:b3f9e61b799fb3cc8fd7077a3d2eb676ddfef7db644f6b6a2b657c5c3214cf19"}, + {file = "ray-2.52.1-cp310-cp310-win_amd64.whl", hash = "sha256:24694e60cdc7770b90f123cc578cabb9d1a231c1fe673b5da0027b118de45846"}, + {file = "ray-2.52.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f59e3b2d1a1466ac0778f2c6fac9ccb5f30107d77e3dddd1d60167248d268474"}, + {file = "ray-2.52.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:2b57ef272a2a0a0dbae6d18d70aa541eab620b4fe3b44d50466d3a533c16f9d9"}, + {file = "ray-2.52.1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:a5a3c268d45060c50cd029979ecc5f1eaaec040b19fa88dd4fe9e927d19ff13e"}, + {file = "ray-2.52.1-cp311-cp311-win_amd64.whl", hash = "sha256:4e8478544fef69a17d865431c0bebdcfeff7c0f76a306f29b73c3bc3cbb0bdb9"}, + {file = "ray-2.52.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:6831592fedf0a122016f5dab4b67d85fa3d4db3b21f588d18834b5c031396d1c"}, + {file = "ray-2.52.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:08eb8f5fd55292ba6bee363a32491136a5e54af54e007f81e0603986fbea41a4"}, + {file = "ray-2.52.1-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:843c0108ad72bb7fc6c23a22e29e6099546a5eaad3ad675c78a146d9080f6ec6"}, + {file = "ray-2.52.1-cp312-cp312-win_amd64.whl", hash = "sha256:8045172ad3fcff62b9dab9a4cd2e0991ad0e27fc814fe625a8d3a120306651d6"}, + {file = "ray-2.52.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:b5bc29548abb0a0a7ae9e6ff3b0ccca2824edaf011a4336e15a32793d574fbfd"}, + {file = "ray-2.52.1-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:e3826aeb4e4399de0c6885bd8be7ce2f629fa0010f0013f1183e0726b3d25e40"}, + {file = "ray-2.52.1-cp313-cp313-manylinux2014_x86_64.whl", hash = "sha256:bbe492c780a39a64bd3d0766cad10d54cf12222df88d287ec2d8f2d52de37c79"}, +] + +[package.dependencies] +click = ">=7.0,<8.3.dev0 || >=8.4.dev0" filelock = "*" jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" @@ -2356,20 +2407,20 @@ requests = "*" adag = ["cupy-cuda12x ; sys_platform != \"darwin\""] air = ["aiohttp (>=3.7)", "aiohttp_cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "numpy (>=1.20)", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk (>=1.30.0)", "pandas", "pandas (>=1.3)", "prometheus_client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart_open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] all = ["aiohttp (>=3.7)", "aiohttp_cors", "celery", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm_tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.1.1)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk (>=1.30.0)", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus_client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "scipy", "smart_open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all-cpp = ["aiohttp (>=3.7)", "aiohttp_cors", "celery", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm_tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.1.1)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk (>=1.30.0)", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus_client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.49.1)", "requests", "scipy", "smart_open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all-cpp = ["aiohttp (>=3.7)", "aiohttp_cors", "celery", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm_tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.1.1)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk (>=1.30.0)", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus_client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.52.1)", "requests", "scipy", "smart_open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] cgraph = ["cupy-cuda12x ; sys_platform != \"darwin\""] client = ["grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\""] -cpp = ["ray-cpp (==2.49.1)"] +cpp = ["ray-cpp (==2.52.1)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=9.0.0)"] default = ["aiohttp (>=3.7)", "aiohttp_cors", "colorful", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk (>=1.30.0)", "prometheus_client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart_open", "virtualenv (>=20.0.24,!=20.21.1)"] -llm = ["aiohttp (>=3.7)", "aiohttp_cors", "async-timeout ; python_version < \"3.11\"", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "hf_transfer", "jsonref (>=1.1.0)", "jsonschema", "ninja", "numpy (>=1.20)", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk (>=1.30.0)", "pandas (>=1.3)", "prometheus_client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart_open", "starlette", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "vllm (>=0.10.0)", "watchfiles"] +llm = ["aiohttp (>=3.7)", "aiohttp_cors", "async-timeout ; python_version < \"3.11\"", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "hf_transfer", "jsonref (>=1.1.0)", "jsonschema", "meson", "ninja", "nixl (>=0.6.1)", "numpy (>=1.20)", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk (>=1.30.0)", "pandas (>=1.3)", "prometheus_client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (>=9.0.0)", "pybind11", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart_open", "starlette", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "vllm[audio] (>=0.11.0)", "watchfiles"] observability = ["memray ; sys_platform != \"win32\""] -rllib = ["dm_tree", "fsspec", "gymnasium (==1.1.1)", "lz4", "ormsgpack (==1.7.0)", "pandas", "pyarrow (>=9.0.0)", "pyyaml", "requests", "scipy", "tensorboardX (>=1.9)"] +rllib = ["dm_tree", "fsspec", "gymnasium (==1.1.1)", "lz4", "ormsgpack (==1.7.0)", "pandas", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "scipy", "tensorboardX (>=1.9)"] serve = ["aiohttp (>=3.7)", "aiohttp_cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk (>=1.30.0)", "prometheus_client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart_open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] serve-async-inference = ["aiohttp (>=3.7)", "aiohttp_cors", "celery", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk (>=1.30.0)", "prometheus_client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart_open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] serve-grpc = ["aiohttp (>=3.7)", "aiohttp_cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk (>=1.30.0)", "prometheus_client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart_open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -train = ["fsspec", "pandas", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "tensorboardX (>=1.9)"] -tune = ["fsspec", "pandas", "pyarrow (>=9.0.0)", "requests", "tensorboardX (>=1.9)"] +train = ["fsspec", "pandas", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "tensorboardX (>=1.9)"] +tune = ["fsspec", "pandas", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "tensorboardX (>=1.9)"] [[package]] name = "referencing" @@ -2394,7 +2445,7 @@ version = "2.32.5" description = "Python HTTP for Humans." optional = false python-versions = ">=3.9" -groups = ["main", "dev", "docs"] +groups = ["dev", "docs"] files = [ {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, @@ -2613,44 +2664,44 @@ files = [ [[package]] name = "ruff" -version = "0.12.12" +version = "0.14.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "ruff-0.12.12-py3-none-linux_armv6l.whl", hash = "sha256:de1c4b916d98ab289818e55ce481e2cacfaad7710b01d1f990c497edf217dafc"}, - {file = "ruff-0.12.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7acd6045e87fac75a0b0cdedacf9ab3e1ad9d929d149785903cff9bb69ad9727"}, - {file = "ruff-0.12.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:abf4073688d7d6da16611f2f126be86523a8ec4343d15d276c614bda8ec44edb"}, - {file = "ruff-0.12.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:968e77094b1d7a576992ac078557d1439df678a34c6fe02fd979f973af167577"}, - {file = "ruff-0.12.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42a67d16e5b1ffc6d21c5f67851e0e769517fb57a8ebad1d0781b30888aa704e"}, - {file = "ruff-0.12.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b216ec0a0674e4b1214dcc998a5088e54eaf39417327b19ffefba1c4a1e4971e"}, - {file = "ruff-0.12.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:59f909c0fdd8f1dcdbfed0b9569b8bf428cf144bec87d9de298dcd4723f5bee8"}, - {file = "ruff-0.12.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ac93d87047e765336f0c18eacad51dad0c1c33c9df7484c40f98e1d773876f5"}, - {file = "ruff-0.12.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01543c137fd3650d322922e8b14cc133b8ea734617c4891c5a9fccf4bfc9aa92"}, - {file = "ruff-0.12.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afc2fa864197634e549d87fb1e7b6feb01df0a80fd510d6489e1ce8c0b1cc45"}, - {file = "ruff-0.12.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0c0945246f5ad776cb8925e36af2438e66188d2b57d9cf2eed2c382c58b371e5"}, - {file = "ruff-0.12.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0fbafe8c58e37aae28b84a80ba1817f2ea552e9450156018a478bf1fa80f4e4"}, - {file = "ruff-0.12.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b9c456fb2fc8e1282affa932c9e40f5ec31ec9cbb66751a316bd131273b57c23"}, - {file = "ruff-0.12.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5f12856123b0ad0147d90b3961f5c90e7427f9acd4b40050705499c98983f489"}, - {file = "ruff-0.12.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:26a1b5a2bf7dd2c47e3b46d077cd9c0fc3b93e6c6cc9ed750bd312ae9dc302ee"}, - {file = "ruff-0.12.12-py3-none-win32.whl", hash = "sha256:173be2bfc142af07a01e3a759aba6f7791aa47acf3604f610b1c36db888df7b1"}, - {file = "ruff-0.12.12-py3-none-win_amd64.whl", hash = "sha256:e99620bf01884e5f38611934c09dd194eb665b0109104acae3ba6102b600fd0d"}, - {file = "ruff-0.12.12-py3-none-win_arm64.whl", hash = "sha256:2a8199cab4ce4d72d158319b63370abf60991495fb733db96cd923a34c52d093"}, - {file = "ruff-0.12.12.tar.gz", hash = "sha256:b86cd3415dbe31b3b46a71c598f4c4b2f550346d1ccf6326b347cc0c8fd063d6"}, + {file = "ruff-0.14.6-py3-none-linux_armv6l.whl", hash = "sha256:d724ac2f1c240dbd01a2ae98db5d1d9a5e1d9e96eba999d1c48e30062df578a3"}, + {file = "ruff-0.14.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9f7539ea257aa4d07b7ce87aed580e485c40143f2473ff2f2b75aee003186004"}, + {file = "ruff-0.14.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7f6007e55b90a2a7e93083ba48a9f23c3158c433591c33ee2e99a49b889c6332"}, + {file = "ruff-0.14.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8e7b9d73d8728b68f632aa8e824ef041d068d231d8dbc7808532d3629a6bef"}, + {file = "ruff-0.14.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d50d45d4553a3ebcbd33e7c5e0fe6ca4aafd9a9122492de357205c2c48f00775"}, + {file = "ruff-0.14.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:118548dd121f8a21bfa8ab2c5b80e5b4aed67ead4b7567790962554f38e598ce"}, + {file = "ruff-0.14.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:57256efafbfefcb8748df9d1d766062f62b20150691021f8ab79e2d919f7c11f"}, + {file = "ruff-0.14.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff18134841e5c68f8e5df1999a64429a02d5549036b394fafbe410f886e1989d"}, + {file = "ruff-0.14.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29c4b7ec1e66a105d5c27bd57fa93203637d66a26d10ca9809dc7fc18ec58440"}, + {file = "ruff-0.14.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167843a6f78680746d7e226f255d920aeed5e4ad9c03258094a2d49d3028b105"}, + {file = "ruff-0.14.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:16a33af621c9c523b1ae006b1b99b159bf5ac7e4b1f20b85b2572455018e0821"}, + {file = "ruff-0.14.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1432ab6e1ae2dc565a7eea707d3b03a0c234ef401482a6f1621bc1f427c2ff55"}, + {file = "ruff-0.14.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c55cfbbe7abb61eb914bfd20683d14cdfb38a6d56c6c66efa55ec6570ee4e71"}, + {file = "ruff-0.14.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:efea3c0f21901a685fff4befda6d61a1bf4cb43de16da87e8226a281d614350b"}, + {file = "ruff-0.14.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:344d97172576d75dc6afc0e9243376dbe1668559c72de1864439c4fc95f78185"}, + {file = "ruff-0.14.6-py3-none-win32.whl", hash = "sha256:00169c0c8b85396516fdd9ce3446c7ca20c2a8f90a77aa945ba6b8f2bfe99e85"}, + {file = "ruff-0.14.6-py3-none-win_amd64.whl", hash = "sha256:390e6480c5e3659f8a4c8d6a0373027820419ac14fa0d2713bd8e6c3e125b8b9"}, + {file = "ruff-0.14.6-py3-none-win_arm64.whl", hash = "sha256:d43c81fbeae52cfa8728d8766bbf46ee4298c888072105815b392da70ca836b2"}, + {file = "ruff-0.14.6.tar.gz", hash = "sha256:6f0c742ca6a7783a736b867a263b9a7a80a45ce9bee391eeda296895f1b4e1cc"}, ] [[package]] name = "s3transfer" -version = "0.13.0" +version = "0.15.0" description = "An Amazon S3 Transfer Manager" optional = true python-versions = ">=3.9" groups = ["main"] markers = "extra == \"boto3\"" files = [ - {file = "s3transfer-0.13.0-py3-none-any.whl", hash = "sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be"}, - {file = "s3transfer-0.13.0.tar.gz", hash = "sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177"}, + {file = "s3transfer-0.15.0-py3-none-any.whl", hash = "sha256:6f8bf5caa31a0865c4081186689db1b2534cef721d104eb26101de4b9d6a5852"}, + {file = "s3transfer-0.15.0.tar.gz", hash = "sha256:d36fac8d0e3603eff9b5bfa4282c7ce6feb0301a633566153cbd0b93d11d8379"}, ] [package.dependencies] @@ -2661,21 +2712,21 @@ crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"] [[package]] name = "schemathesis" -version = "4.1.4" +version = "4.3.6" description = "Property-based testing framework for Open API and GraphQL based apps" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "schemathesis-4.1.4-py3-none-any.whl", hash = "sha256:49ab86d79be871a38df0b12712f79e639cf1dc2405fe3dd1c1d2e84c6655ceb7"}, - {file = "schemathesis-4.1.4.tar.gz", hash = "sha256:ab42836b4e082910d7c8456c1e9cb9e3852a7dc8c248ade263ac92453880a2e7"}, + {file = "schemathesis-4.3.6-py3-none-any.whl", hash = "sha256:d4e926371efd4cae80ae36581e7a7a2abd2bf603c183f1c1205c7cfac7dfec79"}, + {file = "schemathesis-4.3.6.tar.gz", hash = "sha256:240ebe8ed5531cc97ba4f3eddba1873e536b6f76f0f1857e174d616f0670ddce"}, ] [package.dependencies] backoff = ">=2.1.2,<3.0" click = ">=8.0,<9" colorama = ">=0.4,<1.0" -harfile = ">=0.3.1,<1.0" +harfile = ">=0.4.0,<1.0" httpx = ">=0.22.0,<1.0" hypothesis = ">=6.108.0,<7" hypothesis-graphql = ">=0.11.1,<1" @@ -2722,6 +2773,19 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = true +python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"hf\"" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + [[package]] name = "six" version = "1.16.0" @@ -2740,11 +2804,12 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" -groups = ["dev", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +markers = {main = "extra == \"hf\""} [[package]] name = "snowballstemmer" @@ -2972,14 +3037,14 @@ test = ["pytest"] [[package]] name = "starlette" -version = "0.47.2" +version = "0.49.1" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" groups = ["dev", "docs"] files = [ - {file = "starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b"}, - {file = "starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8"}, + {file = "starlette-0.49.1-py3-none-any.whl", hash = "sha256:d92ce9f07e4a3caa3ac13a79523bd18e3bc0042bb8ff2d759a8e7dd0e1859875"}, + {file = "starlette-0.49.1.tar.gz", hash = "sha256:481a43b71e24ed8c43b11ea02f5353d77840e01480881b8cb5a26b8cae64a8cb"}, ] [package.dependencies] @@ -3070,40 +3135,60 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "typer-slim" +version = "0.20.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"hf\"" +files = [ + {file = "typer_slim-0.20.0-py3-none-any.whl", hash = "sha256:f42a9b7571a12b97dddf364745d29f12221865acef7a2680065f9bb29c7dc89d"}, + {file = "typer_slim-0.20.0.tar.gz", hash = "sha256:9fc6607b3c6c20f5c33ea9590cbeb17848667c51feee27d9e314a579ab07d1a3"}, +] + +[package.dependencies] +click = ">=8.0.0" +typing-extensions = ">=3.7.4.3" + +[package.extras] +standard = ["rich (>=10.11.0)", "shellingham (>=1.3.0)"] + [[package]] name = "types-python-dateutil" -version = "2.9.0.20250822" +version = "2.9.0.20251108" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_python_dateutil-2.9.0.20250822-py3-none-any.whl", hash = "sha256:849d52b737e10a6dc6621d2bd7940ec7c65fcb69e6aa2882acf4e56b2b508ddc"}, - {file = "types_python_dateutil-2.9.0.20250822.tar.gz", hash = "sha256:84c92c34bd8e68b117bff742bc00b692a1e8531262d4507b33afcc9f7716cd53"}, + {file = "types_python_dateutil-2.9.0.20251108-py3-none-any.whl", hash = "sha256:a4a537f0ea7126f8ccc2763eec9aa31ac8609e3c8e530eb2ddc5ee234b3cd764"}, + {file = "types_python_dateutil-2.9.0.20251108.tar.gz", hash = "sha256:d8a6687e197f2fa71779ce36176c666841f811368710ab8d274b876424ebfcaa"}, ] [[package]] name = "typing-extensions" -version = "4.14.1" +version = "4.15.0" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" groups = ["main", "dev", "docs"] files = [ - {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, - {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] [[package]] name = "typing-inspection" -version = "0.4.0" +version = "0.4.2" description = "Runtime typing introspection tools" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, - {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, ] [package.dependencies] @@ -3150,6 +3235,7 @@ files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] +markers = {main = "extra == \"boto3\""} [package.extras] brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] @@ -3178,55 +3264,67 @@ standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.5.0) [[package]] name = "uvloop" -version = "0.21.0" +version = "0.22.1" description = "Fast implementation of asyncio event loop on top of libuv" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.8.1" groups = ["dev"] files = [ - {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, - {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, - {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, - {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, - {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, - {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, - {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, - {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, - {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, - {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, - {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, - {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, - {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, - {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, - {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, - {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, - {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, - {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, - {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, - {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, - {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, - {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, - {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, - {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, - {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, - {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, - {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, - {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, - {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, - {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, - {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, - {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, - {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, - {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, - {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, - {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, - {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, + {file = "uvloop-0.22.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ef6f0d4cc8a9fa1f6a910230cd53545d9a14479311e87e3cb225495952eb672c"}, + {file = "uvloop-0.22.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7cd375a12b71d33d46af85a3343b35d98e8116134ba404bd657b3b1d15988792"}, + {file = "uvloop-0.22.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac33ed96229b7790eb729702751c0e93ac5bc3bcf52ae9eccbff30da09194b86"}, + {file = "uvloop-0.22.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:481c990a7abe2c6f4fc3d98781cc9426ebd7f03a9aaa7eb03d3bfc68ac2a46bd"}, + {file = "uvloop-0.22.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a592b043a47ad17911add5fbd087c76716d7c9ccc1d64ec9249ceafd735f03c2"}, + {file = "uvloop-0.22.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1489cf791aa7b6e8c8be1c5a080bae3a672791fcb4e9e12249b05862a2ca9cec"}, + {file = "uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9"}, + {file = "uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77"}, + {file = "uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21"}, + {file = "uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702"}, + {file = "uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733"}, + {file = "uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473"}, + {file = "uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42"}, + {file = "uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6"}, + {file = "uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370"}, + {file = "uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4"}, + {file = "uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2"}, + {file = "uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0"}, + {file = "uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705"}, + {file = "uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8"}, + {file = "uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d"}, + {file = "uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e"}, + {file = "uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e"}, + {file = "uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad"}, + {file = "uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142"}, + {file = "uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74"}, + {file = "uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35"}, + {file = "uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25"}, + {file = "uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6"}, + {file = "uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079"}, + {file = "uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289"}, + {file = "uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3"}, + {file = "uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c"}, + {file = "uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21"}, + {file = "uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88"}, + {file = "uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e"}, + {file = "uvloop-0.22.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:80eee091fe128e425177fbd82f8635769e2f32ec9daf6468286ec57ec0313efa"}, + {file = "uvloop-0.22.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:017bd46f9e7b78e81606329d07141d3da446f8798c6baeec124260e22c262772"}, + {file = "uvloop-0.22.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3e5c6727a57cb6558592a95019e504f605d1c54eb86463ee9f7a2dbd411c820"}, + {file = "uvloop-0.22.1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:57df59d8b48feb0e613d9b1f5e57b7532e97cbaf0d61f7aa9aa32221e84bc4b6"}, + {file = "uvloop-0.22.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:55502bc2c653ed2e9692e8c55cb95b397d33f9f2911e929dc97c4d6b26d04242"}, + {file = "uvloop-0.22.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4a968a72422a097b09042d5fa2c5c590251ad484acf910a651b4b620acd7f193"}, + {file = "uvloop-0.22.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b45649628d816c030dba3c80f8e2689bab1c89518ed10d426036cdc47874dfc4"}, + {file = "uvloop-0.22.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea721dd3203b809039fcc2983f14608dae82b212288b346e0bfe46ec2fab0b7c"}, + {file = "uvloop-0.22.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ae676de143db2b2f60a9696d7eca5bb9d0dd6cc3ac3dad59a8ae7e95f9e1b54"}, + {file = "uvloop-0.22.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:17d4e97258b0172dfa107b89aa1eeba3016f4b1974ce85ca3ef6a66b35cbf659"}, + {file = "uvloop-0.22.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:05e4b5f86e621cf3927631789999e697e58f0d2d32675b67d9ca9eb0bca55743"}, + {file = "uvloop-0.22.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:286322a90bea1f9422a470d5d2ad82d38080be0a29c4dd9b3e6384320a4d11e7"}, + {file = "uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f"}, ] [package.extras] dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] -docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx_rtd_theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=6.1,<7.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=25.3.0,<25.4.0)", "pycodestyle (>=2.11.0,<2.12.0)"] [[package]] name = "watchfiles" @@ -3532,7 +3630,7 @@ description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version < \"3.10\"" +markers = "python_version == \"3.9\"" files = [ {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, @@ -3550,4 +3648,4 @@ olot = ["olot"] [metadata] lock-version = "2.1" python-versions = ">= 3.9, < 4.0" -content-hash = "0b5224086e02f5926521d135054ca8c452375cabafa413c32d84f867c94eebf7" +content-hash = "f4af3060de942411733ce8a88d160cfa4ac489e5d288b44b79dbb0e27458135b" diff --git a/clients/python/pyproject.toml b/clients/python/pyproject.toml index 38e9165cd4..c76345e2b2 100644 --- a/clients/python/pyproject.toml +++ b/clients/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "model-registry" -version = "0.3.1" +version = "0.3.4" description = "Client for Kubeflow Model Registry" authors = ["Isabella Basso do Amaral "] license = "Apache-2.0" @@ -25,9 +25,9 @@ aiohttp-retry = "^2.8.3" # allows for reentrant event loops (used for sync client) nest-asyncio = "^1.6.0" # necessary for modern type annotations using pydantic on 3.9 -eval-type-backport = "^0.2.0" +eval-type-backport = ">=0.2,<0.4" -huggingface-hub = { version = ">=0.20.1,<0.35.0", optional = true } +huggingface-hub = { version = ">=0.20.1,<1.2.0", optional = true } olot = { version = "^0.1.6", optional = true } boto3 = { version = "^1.37.34", optional = true } @@ -50,19 +50,19 @@ pytest = ">=7.4.2,<9.0.0" coverage = { extras = ["toml"], version = "^7.3.2" } pytest-cov = ">=4.1,<8.0" pytest-mock = ">=3.7.0" -ruff = ">=0.5.2,<0.13.0" +pytest-xdist = "^3.5.0" +ruff = ">=0.5.2,<0.15.0" mypy = "^1.7.0" -# atm Ray is only available <3.13, so we will E2E test using Ray in compatible py environments. +# E2E test using Ray in compatible py environments: ray = [ - {version = "^2.43.0", python = ">=3.9, <3.13"} + {version = "^2.52.1", python = ">3.9"} ] -uvloop = "^0.21.0" +uvloop = ">=0.21,<0.23" pytest-asyncio = "^1.1.0" requests = "^2.32.2" -black = ">=24.4.2,<26.0.0" types-python-dateutil = "^2.9.0.20240906" pytest-html = "^4.1.1" -schemathesis = ">=4.0.3" +schemathesis = ">=4.0.3,<=4.3.6" [tool.coverage.run] branch = true @@ -79,19 +79,21 @@ show_missing = true requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" -[tool.black] -line-length = 119 - [tool.pytest.ini_options] asyncio_mode = "auto" markers = [ "e2e: end-to-end testing", "fuzz: mark a test as a fuzzing (property-based or randomized) test" ] +# Note: Parallel execution is enabled in noxfile.py for fuzz tests via "-n auto" +# To run fuzz tests manually in parallel: pytest tests/fuzz_api --fuzz -n auto +addopts = "--tb=short --strict-markers" [tool.ruff] target-version = "py39" respect-gitignore = true +line-length = 119 +extend-exclude = ["src/mr_openapi/"] [tool.ruff.lint] select = [ @@ -135,3 +137,13 @@ show_column_numbers = true show_error_codes = true show_error_context = true ignore_missing_imports = true +check_untyped_defs = true +warn_unused_configs = true +warn_redundant_casts = true +warn_unused_ignores = true +warn_no_return = true +exclude_gitignore = true + +[[tool.mypy.overrides]] +module = "mr_openapi.*" +ignore_errors = true diff --git a/clients/python/schemathesis.toml b/clients/python/schemathesis.toml index 4a649b6a90..89479b8661 100644 --- a/clients/python/schemathesis.toml +++ b/clients/python/schemathesis.toml @@ -1,5 +1,5 @@ base-url = "${API_HOST}" - +tls-verify = true [generation] # Don't shrink failing examples to save time -no-shrink = true \ No newline at end of file +no-shrink = true diff --git a/clients/python/src/.openapi-generator/VERSION b/clients/python/src/.openapi-generator/VERSION index 93c8ddab9f..6328c5424a 100644 --- a/clients/python/src/.openapi-generator/VERSION +++ b/clients/python/src/.openapi-generator/VERSION @@ -1 +1 @@ -7.6.0 +7.17.0 diff --git a/clients/python/src/model_registry/__init__.py b/clients/python/src/model_registry/__init__.py index 5dff835e46..6ba8b26c0e 100644 --- a/clients/python/src/model_registry/__init__.py +++ b/clients/python/src/model_registry/__init__.py @@ -1,6 +1,6 @@ """Main package for the Kubeflow model registry.""" -__version__ = "0.3.1" +__version__ = "0.3.4" from ._client import ModelRegistry diff --git a/clients/python/src/model_registry/_client.py b/clients/python/src/model_registry/_client.py index 623e351589..e1a7855a20 100644 --- a/clients/python/src/model_registry/_client.py +++ b/clients/python/src/model_registry/_client.py @@ -6,20 +6,20 @@ import inspect import logging import os -from collections.abc import Awaitable, Coroutine, Mapping +from collections.abc import Coroutine, Mapping from dataclasses import asdict from pathlib import Path from typing import ( Any, Callable, TypeVar, - Union, get_args, overload, ) from warnings import warn from model_registry.types.artifacts import ExperimentRunArtifact +from model_registry.types.base import BaseResourceModel from ._experiments import ActiveExperimentRun, RunContext from .core import ModelRegistryAPIClient @@ -46,8 +46,8 @@ save_to_oci_registry, ) -ModelTypes = Union[RegisteredModel, ModelVersion, ModelArtifact, Experiment] -TModel = TypeVar("TModel", bound=ModelTypes) +TModel = TypeVar("TModel", bound=BaseResourceModel) +T = TypeVar("T") logging.basicConfig( format="%(asctime)s.%(msecs)03d - %(name)s:%(levelname)s: %(message)s", @@ -72,7 +72,7 @@ logger = logging.getLogger("model-registry") DEFAULT_USER_TOKEN_ENVVAR = "KF_PIPELINES_SA_TOKEN_PATH" # noqa: S105 -DEFAULT_K8S_SA_TOKEN_PATH = "/var/run/secrets/kubernetes.io/serviceaccount/token" # noqa: S105 +DEFAULT_K8S_SA_TOKEN_PATH = "/var/run/secrets/kubernetes.io/serviceaccount/token" # noqa: S105 class ModelRegistry: @@ -86,11 +86,11 @@ def __init__( # noqa: C901 author: str, is_secure: bool = True, user_token: str | None = None, - user_token_envvar: str = DEFAULT_USER_TOKEN_ENVVAR, + user_token_envvar: str | None = None, custom_ca: str | None = None, custom_ca_envvar: str | None = None, log_level: int = logging.WARNING, - async_runner: Callable[[Coroutine[Any, Any, Any]], Any] = None, + async_runner: Callable[[Coroutine[Any, Any, T]], T] | None = None, ): """Constructor. @@ -124,28 +124,14 @@ def __init__( # noqa: C901 logger.debug("Setting up reentrant async event loop") nest_asyncio.apply() - if not user_token and user_token_envvar: - logger.info("Reading user token from %s", user_token_envvar) - # /var/run/secrets/kubernetes.io/serviceaccount/token - if sa_token := os.environ.get(user_token_envvar): - if user_token_envvar == DEFAULT_USER_TOKEN_ENVVAR: - logger.info( - f"Sourcing user token from default envvar: {DEFAULT_USER_TOKEN_ENVVAR}" - ) - user_token = Path(sa_token).read_text() - elif Path(DEFAULT_K8S_SA_TOKEN_PATH).exists(): - user_token = Path(DEFAULT_K8S_SA_TOKEN_PATH).read_text() - logger.info("Sourced user token from K8s default path: %s.", DEFAULT_K8S_SA_TOKEN_PATH) - else: + if not user_token: + user_token = self._get_user_token(user_token_envvar) + if not user_token: warn("User access token is missing", stacklevel=2) self.hint_server_address_port(server_address, port) if is_secure: - if ( - not custom_ca - and custom_ca_envvar - and (cert := os.getenv(custom_ca_envvar)) - ): + if not custom_ca and custom_ca_envvar and (cert := os.getenv(custom_ca_envvar)): logger.info( "Using custom CA envvar %s", custom_ca_envvar, @@ -161,12 +147,47 @@ def __init__( # noqa: C901 server_address, port, user_token=user_token, custom_ca=custom_ca ) else: - self._api = ModelRegistryAPIClient.insecure_connection( - server_address, port, user_token - ) + self._api = ModelRegistryAPIClient.insecure_connection(server_address, port, user_token) self._active_experiment_context = ThreadSafeVariable(value=RunContext()) self.get_registered_models().page_size(1)._next_page() + @staticmethod + def _get_user_token(user_token_envvar: str | None = None) -> str | None: + sa_token_path: str + user_provided: bool = True + if user_token_envvar: + try: + sa_token_path = os.environ[user_token_envvar] + except KeyError: + msg = f"user_token_envvar is {user_token_envvar!r} but no such env var is set" + raise ValueError(msg) from None + logger.info( + "Reading user token from path: user_token_envvar %r specifies path %r", + user_token_envvar, + sa_token_path, + ) + elif DEFAULT_USER_TOKEN_ENVVAR in os.environ: + sa_token_path = os.environ[DEFAULT_USER_TOKEN_ENVVAR] + logger.info( + "Reading user token from path: The default user token env var value %r specifies path %r", + DEFAULT_USER_TOKEN_ENVVAR, + sa_token_path, + ) + else: + sa_token_path = DEFAULT_K8S_SA_TOKEN_PATH + user_provided = False + logger.info( + "Reading user token from path: No user_token_envvar. Attempting to read from default K8s service account path %r", + DEFAULT_K8S_SA_TOKEN_PATH, + ) + try: + return Path(sa_token_path).read_text() + except OSError as exc: + msg = f"Unable read user token from {sa_token_path!r}" + if user_provided: + raise StoreError(msg) from exc + logger.info(msg) + return None @staticmethod def hint_server_address_port(server_address: str, port: int) -> None: @@ -180,8 +201,16 @@ def hint_server_address_port(server_address: str, port: int) -> None: "Server address protocol is http://, but port is not 80 or ending with 80. You may want to verify the configuration is correct." ) + @overload + def async_runner(self, coro: Coroutine[Any, Any, TModel]) -> TModel: ... + + @overload + def async_runner(self, coro: Coroutine[Any, Any, list[TModel]]) -> list[TModel]: ... + + @overload + def async_runner(self, coro: Coroutine[Any, Any, TModel | None]) -> TModel | None: ... - def async_runner(self, coro: Awaitable[TModel]) -> TModel: + def async_runner(self, coro: Coroutine[Any, Any, T]) -> T: if hasattr(self, "_user_async_runner"): return self._user_async_runner(coro) @@ -198,29 +227,19 @@ async def _register_model(self, name: str, **kwargs) -> RegisteredModel: if rm := await self._api.get_registered_model_by_params(name): return rm - return await self._api.upsert_registered_model( - RegisteredModel(name=name, **kwargs) - ) + return await self._api.upsert_registered_model(RegisteredModel(name=name, **kwargs)) - async def _register_new_version( - self, rm: RegisteredModel, version: str, author: str, /, **kwargs - ) -> ModelVersion: + async def _register_new_version(self, rm: RegisteredModel, version: str, author: str, /, **kwargs) -> ModelVersion: assert rm.id is not None, "Registered model must have an ID" if await self._api.get_model_version_by_params(rm.id, version): msg = f"Version {version} already exists" raise StoreError(msg) - return await self._api.upsert_model_version( - ModelVersion(name=version, author=author, **kwargs), rm.id - ) + return await self._api.upsert_model_version(ModelVersion(name=version, author=author, **kwargs), rm.id) - async def _register_model_artifact( - self, mv: ModelVersion, name: str, uri: str, /, **kwargs - ) -> ModelArtifact: + async def _register_model_artifact(self, mv: ModelVersion, name: str, uri: str, /, **kwargs) -> ModelArtifact: assert mv.id is not None, "Model version must have an ID" - return await self._api.upsert_model_version_artifact( - ModelArtifact(name=name, uri=uri, **kwargs), mv.id - ) + return await self._api.upsert_model_version_artifact(ModelArtifact(name=name, uri=uri, **kwargs), mv.id) def upload_artifact_and_register_model( self, @@ -276,9 +295,7 @@ def upload_artifact_and_register_model( raise StoreError(msg) if isinstance(upload_params, S3Params): - destination_uri = self.save_to_s3( - **asdict(upload_params), path=model_files_path - ) + destination_uri = self.save_to_s3(**asdict(upload_params), path=model_files_path) elif isinstance(upload_params, OCIParams): dict_params = asdict(upload_params) del dict_params["custom_oci_backend"] @@ -396,14 +413,14 @@ def update(self, model: TModel) -> TModel: if not model.id: msg = "Model must have an ID" raise StoreError(msg) - if not isinstance(model, get_args(ModelTypes)): - msg = f"Model must be one of {get_args(ModelTypes)}" + if not isinstance(model, BaseResourceModel): + msg = f"Model must be an instance of {BaseResourceModel.__name__} or a subclass" raise StoreError(msg) if isinstance(model, RegisteredModel): - return self.async_runner(self._api.upsert_registered_model(model)) + return self.async_runner(self._api.upsert_registered_model(model)) # type: ignore[return-value] if isinstance(model, ModelVersion): - return self.async_runner(self._api.upsert_model_version(model, None)) - return self.async_runner(self._api.upsert_model_artifact(model)) + return self.async_runner(self._api.upsert_model_version(model, None)) # type: ignore[return-value] + return self.async_runner(self._api.upsert_model_artifact(model)) # type: ignore[arg-type,return-value] def register_hf_model( self, @@ -690,26 +707,18 @@ def start_experiment_run( self._validate_nested_run(active_ctx, nested) # Resolve experiment details - exp_name, exp_id = self._resolve_experiment_info( - experiment_name, experiment_id, active_ctx, nested - ) + exp_name, exp_id = self._resolve_experiment_info(experiment_name, experiment_id, active_ctx, nested) # Get or create experiment - experiment = self._get_or_create_experiment( - exp_name, exp_id, owner, description - ) + experiment = self._get_or_create_experiment(exp_name, exp_id, owner, description) # Get or create run - parent_props = ( - self._get_parent_properties(active_ctx, nested_tag) if nested else {} - ) - exp_run = self._get_or_create_run( - experiment, run_name, run_id, run_description, parent_props, nested - ) + parent_props = self._get_parent_properties(active_ctx, nested_tag) if nested else {} # type: ignore[arg-type] + exp_run = self._get_or_create_run(experiment, run_name, run_id, run_description, parent_props, nested) # Update context if not nested if not active_ctx.active: - self._set_active_context(experiment.id, exp_name, exp_run.id) + self._set_active_context(experiment.id, exp_name, exp_run.id) # type: ignore[arg-type] return ActiveExperimentRun( thread_safe_ctx=self._active_experiment_context, @@ -768,7 +777,7 @@ def _get_or_create_experiment( exp = self.async_runner( self._api.upsert_experiment( Experiment( - name=exp_name, + name=exp_name, # type: ignore[arg-type] owner=owner, description=description, ) @@ -801,8 +810,8 @@ def _get_or_create_run( if run_name: exp_run = self.async_runner( self._api.get_experiment_run_by_experiment_and_run_name( - run_id=run_id, - **exp_run_args, + run_name=run_name, + **exp_run_args, # type: ignore[arg-type] ) ) elif run_id: @@ -817,7 +826,7 @@ def _get_or_create_run( exp_run = self.async_runner( self._api.upsert_experiment_run( ExperimentRun( - experiment_id=experiment.id, + experiment_id=experiment.id, # type: ignore[arg-type] name=generate_name("run"), description=run_description, custom_properties=parent_props, @@ -825,9 +834,7 @@ def _get_or_create_run( ) ) prefix = "Nested " if nested else "" - print( - f"{prefix}Experiment Run {exp_run.name} created with ID: {exp_run.id}" - ) + print(f"{prefix}Experiment Run {exp_run.name} created with ID: {exp_run.id}") return exp_run @@ -870,7 +877,7 @@ def get_experiment_runs(self, experiment_id: str) -> Pager[ExperimentRun]: ... @overload def get_experiment_runs(self, experiment_name: str) -> Pager[ExperimentRun]: ... - @required_args(("experiment_id",), ("experiment_name",)) + @required_args(("experiment_id",), ("experiment_name",)) # type: ignore[misc] def get_experiment_runs( self, experiment_id: str | None = None, experiment_name: str | None = None ) -> Pager[ExperimentRun]: @@ -882,16 +889,8 @@ def get_experiment_runs( def exp_run_list(options: ListOptions) -> list[ExperimentRun]: if experiment_id: - return self.async_runner( - self._api.get_experiment_runs_by_experiment_id( - experiment_id, options - ) - ) - return self.async_runner( - self._api.get_experiment_runs_by_experiment_name( - experiment_name, options - ) - ) + return self.async_runner(self._api.get_experiment_runs_by_experiment_id(experiment_id, options)) + return self.async_runner(self._api.get_experiment_runs_by_experiment_name(experiment_name, options)) # type: ignore[arg-type,type-var] return Pager[ExperimentRun](exp_run_list) @@ -915,7 +914,7 @@ def get_experiment_run_logs( experiment_id: str, ) -> Pager[ExperimentRunArtifact]: ... - @required_args( + @required_args( # type: ignore[misc] ("run_id",), ( "run_name", @@ -948,9 +947,7 @@ def get_experiment_run_logs( def exp_run_logs(options: ListOptions) -> list[ExperimentRunArtifact]: if run_id: return self.async_runner( - self._api.get_artifacts_by_experiment_run_params( - run_id=run_id, options=options - ) + self._api.get_artifacts_by_experiment_run_params(run_id=run_id, options=options) ) if run_name and experiment_name: return self.async_runner( @@ -963,10 +960,10 @@ def exp_run_logs(options: ListOptions) -> list[ExperimentRunArtifact]: if run_name and experiment_id: return self.async_runner( self._api.get_artifacts_by_experiment_run_params( - experiment_id=experiment_id, options=options + run_name=run_name, experiment_id=experiment_id, options=options ) ) - return None + return None # type: ignore[return-value] return Pager[ExperimentRunArtifact](exp_run_logs) diff --git a/clients/python/src/model_registry/_experiments.py b/clients/python/src/model_registry/_experiments.py index 26dac39cdf..026d3fb54b 100644 --- a/clients/python/src/model_registry/_experiments.py +++ b/clients/python/src/model_registry/_experiments.py @@ -4,6 +4,7 @@ from contextlib import AbstractContextManager from dataclasses import dataclass from typing import Any, Callable, Literal +from types import TracebackType from model_registry.core import ModelRegistryAPIClient from model_registry.exceptions import StoreError @@ -49,7 +50,7 @@ def __init__( self._thread_safe_ctx = thread_safe_ctx self._exp_run = experiment_run self.info = RunInfo( - id=experiment_run.id, + id=experiment_run.id, # type: ignore[arg-type] name=experiment_run.name, experiment_id=experiment_run.experiment_id, ) @@ -57,25 +58,28 @@ def __init__( self.__async_runner = async_runner self._logs: ExperimentRunArtifactTypes = ExperimentRunArtifactTypes() - def __enter__(self): + def __enter__(self) -> ActiveExperimentRun: return self - def __exit__(self, exc_type, exc_value, traceback): + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: """Exit the context manager and upsert the logs to the experiment run.""" temp_artifacts: ExperimentRunArtifactTypes = ExperimentRunArtifactTypes() for log in self.get_logs(): server_log = self.__async_runner( - self.__api.upsert_experiment_run_artifact( - experiment_run_id=self.info.id, artifact=log - ) + self.__api.upsert_experiment_run_artifact(experiment_run_id=self.info.id, artifact=log) ) log_type = type(server_log) if log_type is Parameter: - temp_artifacts.params[log.name] = server_log + temp_artifacts.params[log.name] = server_log # type: ignore[index] elif log_type is Metric: - temp_artifacts.metrics[log.name] = server_log + temp_artifacts.metrics[log.name] = server_log # type: ignore[index] elif log_type is DataSet: - temp_artifacts.datasets[log.name] = server_log + temp_artifacts.datasets[log.name] = server_log # type: ignore[index] self._logs = temp_artifacts self._thread_safe_ctx.set(RunContext(active=False)) @@ -133,7 +137,7 @@ def log_metric( value=value, step=step, state=ArtifactState.LIVE, - timestamp=timestamp or str(int(time.time() * 1000)), + timestamp=timestamp or str(int(time.time() * 1000)), # type: ignore[arg-type] description=description, ) @@ -171,7 +175,7 @@ def log_dataset( try: uri = ( upload_to_s3( - s3_auth=s3_auth, + s3_auth=s3_auth, # type: ignore[arg-type] path=file_path, ) if file_path @@ -180,13 +184,13 @@ def log_dataset( except Exception as e: msg = f"Failed to upload dataset to S3: {e}" raise StoreError(msg) from e - self._logs.datasets[name] = DataSet( + self._logs.datasets[name] = DataSet( # type: ignore[index] name=name, uri=uri, source_type=source_type, source=source, - schema=schema, - profile=profile, + schema=schema, # type: ignore[arg-type] + profile=profile, # type: ignore[arg-type] description=description, ) diff --git a/clients/python/src/model_registry/_utils.py b/clients/python/src/model_registry/_utils.py index c5fbe82163..c30f2e9bf3 100644 --- a/clients/python/src/model_registry/_utils.py +++ b/clients/python/src/model_registry/_utils.py @@ -93,12 +93,7 @@ def wrapper(*args: object, **kwargs: object) -> object: else: # no break if len(variants) > 1: variations = human_join( - [ - "(" - + human_join([quote(arg) for arg in variant], final="and") - + ")" - for variant in variants - ] + ["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants] ) msg = f"Missing required arguments; Expected either {variations} arguments to be given" else: diff --git a/clients/python/src/model_registry/core.py b/clients/python/src/model_registry/core.py index be042ab73f..ea4a0f05e7 100644 --- a/clients/python/src/model_registry/core.py +++ b/clients/python/src/model_registry/core.py @@ -96,9 +96,7 @@ async def get_client(self) -> AsyncIterator[ModelRegistryServiceApi]: finally: await api_client.close() - async def upsert_registered_model( - self, registered_model: RegisteredModel - ) -> RegisteredModel: + async def upsert_registered_model(self, registered_model: RegisteredModel) -> RegisteredModel: """Upsert a registered model. Updates or creates a registered model on the server. @@ -111,9 +109,7 @@ async def upsert_registered_model( """ async with self.get_client() as client: if registered_model.id: - rm = await client.update_registered_model( - registered_model.id, registered_model.update() - ) + rm = await client.update_registered_model(registered_model.id, registered_model.update()) else: rm = await client.create_registered_model(registered_model.create()) @@ -157,17 +153,13 @@ async def get_registered_model_by_params( """ async with self.get_client() as client: try: - rm = await client.find_registered_model( - name=name, external_id=external_id - ) + rm = await client.find_registered_model(name=name, external_id=external_id) except mr_exceptions.NotFoundException: return None return RegisteredModel.from_basemodel(rm) - async def get_registered_models( - self, options: ListOptions | None = None - ) -> list[RegisteredModel]: + async def get_registered_models(self, options: ListOptions | None = None) -> list[RegisteredModel]: """Fetch registered models. Args: @@ -177,9 +169,7 @@ async def get_registered_models( Registered models. """ async with self.get_client() as client: - rm_list = await client.get_registered_models( - **(options or ListOptions()).as_options() - ) + rm_list = await client.get_registered_models(**(options or ListOptions()).as_options()) if options: options.next_page_token = rm_list.next_page_token @@ -202,22 +192,16 @@ async def upsert_model_version( """ async with self.get_client() as client: if model_version.id: - mv = await client.update_model_version( - model_version.id, model_version.update() - ) + mv = await client.update_model_version(model_version.id, model_version.update()) elif registered_model_id: - mv = await client.create_model_version( - model_version.create(registered_model_id=registered_model_id) - ) + mv = await client.create_model_version(model_version.create(registered_model_id=registered_model_id)) else: msg = f"Registered model ID required for creating a new model version: {model_version}" raise ValueError(msg) return ModelVersion.from_basemodel(mv) - async def get_model_version_by_id( - self, model_version_id: str - ) -> ModelVersion | None: + async def get_model_version_by_id(self, model_version_id: str) -> ModelVersion | None: """Fetch a model version by its ID. Args: @@ -257,9 +241,7 @@ async def get_model_versions( return [ModelVersion.from_basemodel(mv) for mv in mv_list.items or []] @overload - async def get_model_version_by_params( - self, registered_model_id: str, name: str - ): ... + async def get_model_version_by_params(self, registered_model_id: str, name: str): ... @overload async def get_model_version_by_params(self, *, external_id: str): ... @@ -301,9 +283,7 @@ async def get_model_version_by_params( return ModelVersion.from_basemodel(mv) - async def upsert_model_artifact( - self, model_artifact: ModelArtifact - ) -> ModelArtifact: + async def upsert_model_artifact(self, model_artifact: ModelArtifact) -> ModelArtifact: """Upsert a model artifact. Updates or creates a model artifact on the server. @@ -319,14 +299,10 @@ async def upsert_model_artifact( if not model_artifact.id: ma = await client.create_model_artifact(model_artifact.create()) else: - ma = await client.update_model_artifact( - model_artifact.id, model_artifact.update() - ) + ma = await client.update_model_artifact(model_artifact.id, model_artifact.update()) return ModelArtifact.from_basemodel(ma) - async def upsert_model_version_artifact( - self, artifact: ArtifactT, model_version_id: str - ) -> ArtifactT: + async def upsert_model_version_artifact(self, artifact: ArtifactT, model_version_id: str) -> ArtifactT: """Creates a model version artifact. Creates a model version artifact on the server. @@ -342,9 +318,7 @@ async def upsert_model_version_artifact( return cast( ArtifactT, Artifact.validate_artifact( - await client.upsert_model_version_artifact( - model_version_id, artifact.wrap() - ) + await client.upsert_model_version_artifact(model_version_id, artifact.wrap()) ), ) @@ -438,9 +412,7 @@ async def get_model_artifacts( models.append(converted) return models - ma_list = await client.get_model_artifacts( - **(options or ListOptions()).as_options() - ) + ma_list = await client.get_model_artifacts(**(options or ListOptions()).as_options()) if options: options.next_page_token = ma_list.next_page_token return [ModelArtifact.from_basemodel(ma) for ma in ma_list.items or []] @@ -479,8 +451,8 @@ async def upsert_experiment(self, experiment: Experiment) -> Experiment: if experiment.id: exp = await client.update_experiment(experiment.id, experiment.update()) elif experiment.name: - if exp := await self.get_experiment_by_name(experiment.name): - exp = await client.update_experiment(exp.id, experiment.update()) + if exp := await self.get_experiment_by_name(experiment.name): # type: ignore[assignment] + exp = await client.update_experiment(exp.id, experiment.update()) # type: ignore[arg-type] else: exp = await client.create_experiment(experiment.create()) return Experiment.from_basemodel(exp) @@ -511,27 +483,21 @@ async def get_experiment_by_id(self, id: str | int) -> Experiment | None: except mr_exceptions.NotFoundException: return None - return RegisteredModel.from_basemodel(exp) + return RegisteredModel.from_basemodel(exp) # type: ignore[return-value,arg-type] - async def get_experiments( - self, options: ListOptions | None = None - ) -> list[Experiment]: + async def get_experiments(self, options: ListOptions | None = None) -> list[Experiment]: """Fetch experiments. Args: options: Options for listing experiments. """ async with self.get_client() as client: - exp_list = await client.get_experiments( - **(options or ListOptions()).as_options() - ) + exp_list = await client.get_experiments(**(options or ListOptions()).as_options()) if options: options.next_page_token = exp_list.next_page_token return [Experiment.from_basemodel(exp) for exp in exp_list.items or []] - async def upsert_experiment_run( - self, experiment_run: ExperimentRun - ) -> ExperimentRun: + async def upsert_experiment_run(self, experiment_run: ExperimentRun) -> ExperimentRun: """Upsert an experiment run. Updates or creates an experiment run on the server. @@ -541,9 +507,7 @@ async def upsert_experiment_run( """ async with self.get_client() as client: if experiment_run.id: - exp_run = await client.create_experiment_run( - experiment_run.id, experiment_run.update() - ) + exp_run = await client.create_experiment_run(experiment_run.id, experiment_run.update()) # type: ignore[arg-type] else: exp_run = await client.create_experiment_run(experiment_run.create()) @@ -569,9 +533,7 @@ async def get_experiment_runs_by_experiment_id( if options: options.next_page_token = exp_runs.next_page_token - return [ - ExperimentRun.from_basemodel(exp_run) for exp_run in exp_runs.items or [] - ] + return [ExperimentRun.from_basemodel(exp_run) for exp_run in exp_runs.items or []] async def get_experiment_runs_by_experiment_name( self, experiment_name: str, options: ListOptions | None = None @@ -597,9 +559,7 @@ async def get_experiment_runs_by_experiment_name( if options: options.next_page_token = exp_runs.next_page_token - return [ - ExperimentRun.from_basemodel(exp_run) for exp_run in exp_runs.items or [] - ] + return [ExperimentRun.from_basemodel(exp_run) for exp_run in exp_runs.items or []] async def get_experiment_run_by_experiment_and_run_id( self, @@ -628,11 +588,11 @@ async def get_experiment_run_by_experiment_and_run_id( msg = "Either experiment_name or experiment_id must be provided" raise ValueError(msg) if not exp: - return None + return None # type: ignore[return-value] exp_run = await client.get_experiment_run(str(run_id)) except mr_exceptions.NotFoundException: - return None + return None # type: ignore[return-value] return ExperimentRun.from_basemodel(exp_run) @@ -663,11 +623,11 @@ async def get_experiment_run_by_experiment_and_run_name( exp = await self.get_experiment_by_id(str(experiment_id)) if not exp: - return None + return None # type: ignore[return-value] - exp_run = await client.get_experiment_run(exp.id) + exp_run = await client.get_experiment_run(exp.id) # type: ignore[arg-type] except mr_exceptions.NotFoundException: - return None + return None # type: ignore[return-value] return ExperimentRun.from_basemodel(exp_run) @@ -681,7 +641,7 @@ async def get_experiment_run_by_id(self, id: str) -> ExperimentRun: try: exp_run = await client.get_experiment_run(id) except mr_exceptions.NotFoundException: - return None + return None # type: ignore[return-value] return ExperimentRun.from_basemodel(exp_run) @@ -697,16 +657,14 @@ async def upsert_experiment_run_artifact( artifact: Artifact to upsert. """ async with self.get_client() as client: - return Artifact.validate_artifact( + return Artifact.validate_artifact( # type: ignore[return-value] await client.upsert_experiment_run_artifact( experimentrun_id=experiment_run_id, artifact=artifact.wrap() ) ) @overload - async def get_artifacts_by_experiment_run_params( - self, run_id: str | int, options: ListOptions | None = None - ): ... + async def get_artifacts_by_experiment_run_params(self, run_id: str | int, options: ListOptions | None = None): ... @overload async def get_artifacts_by_experiment_run_params( @@ -724,9 +682,7 @@ async def get_artifacts_by_experiment_run_params( options: ListOptions | None = None, ): ... - @required_args( - ("run_id",), ("run_name", "experiment_name"), ("run_name", "experiment_id") - ) + @required_args(("run_id",), ("run_name", "experiment_name"), ("run_name", "experiment_id")) # type: ignore[misc] async def get_artifacts_by_experiment_run_params( self, run_id: str | int | None = None, @@ -771,18 +727,18 @@ async def get_artifacts_by_experiment_run_params( f"in experiment {experiment_name} within the first 100 runs. " "Please narrow your search by run id." ) - return [] + return [] # type: ignore[return-value] run_id = run.id logs = await client.get_experiment_run_artifacts( str(run_id), **(options or ListOptions()).as_options() ) except mr_exceptions.NotFoundException: - return [] + return [] # type: ignore[return-value] if options: options.next_page_token = logs.next_page_token - return [Artifact.validate_artifact(log) for log in logs.items or []] + return [Artifact.validate_artifact(log) for log in logs.items or []] # type: ignore[return-value] async def get_artifacts( self, diff --git a/clients/python/src/model_registry/exceptions.py b/clients/python/src/model_registry/exceptions.py index 8fdb5a80e6..c75b3275d9 100644 --- a/clients/python/src/model_registry/exceptions.py +++ b/clients/python/src/model_registry/exceptions.py @@ -24,10 +24,10 @@ class ServerError(StoreError): class DuplicateError(StoreError): """Raised when the user tries to put an object with a conflicting property.""" + class ExperimentRunError(Exception): """Raised when an error occurs while working with an experiment run.""" + class ExperimentRunStoreError(StoreError): """Raised when an error occurs while storing an experiment run.""" - - diff --git a/clients/python/src/model_registry/types/artifacts.py b/clients/python/src/model_registry/types/artifacts.py index e95f1e31ec..1797d9e74f 100644 --- a/clients/python/src/model_registry/types/artifacts.py +++ b/clients/python/src/model_registry/types/artifacts.py @@ -291,7 +291,7 @@ class DataSet(Artifact): digest: str | None = None source_type: str | None = None source: str | None = None - schema: str | None = None + schema: str | None = None # type: ignore[assignment] profile: str | None = None @override @@ -342,7 +342,7 @@ def from_basemodel(cls, source: DataSetBaseModel) -> DataSet: source=source.source, schema=source.var_schema, profile=source.profile, - state=source.state, + state=source.state, # type: ignore[arg-type] custom_properties=cls._unmap_custom_properties(source.custom_properties) if source.custom_properties else None, @@ -382,9 +382,7 @@ def update(self, **kwargs) -> MetricUpdate: return MetricUpdate( customProperties=self._map_custom_properties(), timestamp=self.timestamp, - **self._props_as_dict( - exclude=("id", "name", "timestamp", "custom_properties") - ), + **self._props_as_dict(exclude=("id", "name", "timestamp", "custom_properties")), artifactType="metric", **kwargs, ) @@ -412,10 +410,10 @@ def from_basemodel(cls, source: MetricBaseModel) -> Metric: last_update_time_since_epoch=source.last_update_time_since_epoch, experiment_id=source.experiment_id, experiment_run_id=source.experiment_run_id, - value=source.value, + value=source.value, # type: ignore[arg-type] timestamp=source.timestamp, - step=source.step, - state=source.state, + step=source.step, # type: ignore[arg-type] + state=source.state, # type: ignore[arg-type] custom_properties=cls._unmap_custom_properties(source.custom_properties) if source.custom_properties else None, @@ -475,11 +473,11 @@ def from_basemodel(cls, source: ParameterBaseModel) -> Parameter: assert source.parameter_type value = source.value if source.parameter_type is ParameterType.NUMBER: - value = float(value) + value = float(value) # type: ignore[assignment,arg-type] elif source.parameter_type is ParameterType.BOOLEAN: - value = bool(value) + value = bool(value) # type: ignore[assignment] elif source.parameter_type is ParameterType.OBJECT: - value = json.loads(value) + value = json.loads(value) # type: ignore[arg-type] return cls( id=source.id, name=source.name, @@ -489,9 +487,9 @@ def from_basemodel(cls, source: ParameterBaseModel) -> Parameter: last_update_time_since_epoch=source.last_update_time_since_epoch, experiment_id=source.experiment_id, experiment_run_id=source.experiment_run_id, - value=value, + value=value, # type: ignore[arg-type] parameter_type=source.parameter_type, - state=source.state, + state=source.state, # type: ignore[arg-type] custom_properties=cls._unmap_custom_properties(source.custom_properties) if source.custom_properties else None, diff --git a/clients/python/src/model_registry/types/base.py b/clients/python/src/model_registry/types/base.py index a1d0b40a6d..ba293600a6 100644 --- a/clients/python/src/model_registry/types/base.py +++ b/clients/python/src/model_registry/types/base.py @@ -87,9 +87,7 @@ def get_meta_value(v: SupportedTypes) -> MetadataValue: return dest @classmethod - def _unmap_custom_properties( - cls, custom_properties: dict[str, MetadataValue] - ) -> dict[str, SupportedTypes]: + def _unmap_custom_properties(cls, custom_properties: dict[str, MetadataValue]) -> dict[str, SupportedTypes]: def get_meta_value(meta: Any) -> SupportedTypes: type_name = meta.metadata_type[8:-5].lower() # Metadata type names are in the format MetadataValue @@ -107,15 +105,9 @@ def get_meta_value(meta: Any) -> SupportedTypes: ) } - def _props_as_dict( - self, exclude: Sequence[str] | None = None, alias: bool = False - ) -> dict[str, Any]: + def _props_as_dict(self, exclude: Sequence[str] | None = None, alias: bool = False) -> dict[str, Any]: exclude = exclude or [] - return { - k: getattr(self, k) - for k in self.model_json_schema(alias).get("properties", {}) - if k not in exclude - } + return {k: getattr(self, k) for k in self.model_json_schema(alias).get("properties", {}) if k not in exclude} def __repr_str__(self, join_str: str) -> str: """Represent the object as a string.""" diff --git a/clients/python/src/model_registry/types/experiments.py b/clients/python/src/model_registry/types/experiments.py index e201423bf5..550083c1b0 100644 --- a/clients/python/src/model_registry/types/experiments.py +++ b/clients/python/src/model_registry/types/experiments.py @@ -44,7 +44,7 @@ class Experiment(BaseResourceModel): owner: str | None = None description: str | None = None external_id: str | None = None - state: ExperimentState | None= None + state: ExperimentState | None = None custom_properties: dict[str, Any] | None = None @override diff --git a/clients/python/src/model_registry/types/options.py b/clients/python/src/model_registry/types/options.py index fbc11f6998..4edf61e741 100644 --- a/clients/python/src/model_registry/types/options.py +++ b/clients/python/src/model_registry/types/options.py @@ -45,9 +45,7 @@ def order_by_id(cls, **kwargs) -> ListOptions: return cls(order_by=OrderByField.ID, **kwargs) @classmethod - def artifact_type( - cls, artifact_type: ArtifactTypeQueryParam, **kwargs - ) -> ListOptions: + def artifact_type(cls, artifact_type: ArtifactTypeQueryParam, **kwargs) -> ListOptions: """Return options to filter by artifact type.""" return cls(artifact_type_param=artifact_type, **kwargs) diff --git a/clients/python/src/model_registry/types/pager.py b/clients/python/src/model_registry/types/pager.py index 359b008dd3..4e602ecf49 100644 --- a/clients/python/src/model_registry/types/pager.py +++ b/clients/python/src/model_registry/types/pager.py @@ -20,21 +20,19 @@ class Pager(Generic[T], Iterator[T], AsyncIterator[T]): Assumes that page_fn is a paged function that takes ListOptions and returns a list of items. """ - page_fn: ( - Callable[[ListOptions], list[T]] | Callable[[ListOptions], Awaitable[list[T]]] - ) + page_fn: Callable[[ListOptions], list[T]] | Callable[[ListOptions], Awaitable[list[T]]] options: ListOptions = field(default_factory=ListOptions) def __post_init__(self): self.restart() if asyncio.iscoroutinefunction(self.page_fn): - self.__next__ = NotImplemented + self.__next__ = NotImplemented # type: ignore[method-assign] self.next_page = self._anext_page self.next_item = self._anext_item else: - self.__anext__ = NotImplemented - self.next_page = self._next_page - self.next_item = self._next_item + self.__anext__ = NotImplemented # type: ignore[method-assign] + self.next_page = self._next_page # type: ignore[assignment] + self.next_item = self._next_item # type: ignore[assignment] def restart(self) -> Pager[T]: """Reset the pager. @@ -115,9 +113,7 @@ async def _anext_page(self) -> list[T]: return await cast(Awaitable[list[T]], self.page_fn(self.options)) def _needs_fetch(self) -> bool: - return not self._current_page or ( - self._i >= len(self._current_page) and self._start is not None - ) + return not self._current_page or (self._i >= len(self._current_page) and self._start is not None) def _next_item(self) -> T: """Get the next item in the pager. @@ -130,7 +126,7 @@ def _next_item(self) -> T: if self._needs_fetch(): self._current_page = self._next_page() self._i = 0 - if self._current_page is None: # for example when the MR server is empty + if self._current_page is None: # for example when the MR server is empty raise StopIteration if self._i >= len(self._current_page): raise StopIteration @@ -150,7 +146,7 @@ async def _anext_item(self) -> T: if self._needs_fetch(): self._current_page = await self._anext_page() self._i = 0 - if self._current_page is None: # for example when the MR server is empty + if self._current_page is None: # for example when the MR server is empty raise StopAsyncIteration if self._i >= len(self._current_page): raise StopAsyncIteration diff --git a/clients/python/src/model_registry/utils.py b/clients/python/src/model_registry/utils.py index a01619500d..1be8830941 100644 --- a/clients/python/src/model_registry/utils.py +++ b/clients/python/src/model_registry/utils.py @@ -9,19 +9,22 @@ import shutil import tempfile import threading -from contextlib import AbstractContextManager, contextmanager, suppress +from collections.abc import Generator +from contextlib import contextmanager, suppress from dataclasses import asdict, dataclass from pathlib import Path -from subprocess import CalledProcessError -from typing import TYPE_CHECKING, Callable, Protocol, TextIO, TypeVar +from subprocess import CalledProcessError, CompletedProcess +from typing import TYPE_CHECKING, Callable, Generic, Protocol, TextIO, TypeVar, cast from typing_extensions import Literal, overload from ._utils import required_args from .exceptions import MissingMetadata, StoreError -# Generic return type +# Generic return types T = TypeVar("T") +TPull = TypeVar("TPull", covariant=True) +TPush = TypeVar("TPush", covariant=True) # If we want to forward reference if TYPE_CHECKING: @@ -110,30 +113,30 @@ def s3_uri_from( return f"s3://{bucket}/{path}?endpoint={endpoint}&defaultRegion={region}" -class PullFn(Protocol): +class PullFn(Protocol[TPull]): """Pull function definition.""" - def __call__(self, base: str, dest: Path, **kwargs) -> T: ... # noqa: D102 + def __call__(self, base: str, dest: Path, **kwargs) -> TPull: ... # noqa: D102 -class PushFn(Protocol): +class PushFn(Protocol[TPush]): """Push function definition.""" - def __call__(self, src: Path, oci_ref: str, **kwargs) -> T: ... # noqa: D102 + def __call__(self, src: Path, oci_ref: str, **kwargs) -> TPush: ... # noqa: D102 @dataclass -class BackendDefinition: +class BackendDefinition(Generic[TPull, TPush]): """Holds the 3 core callables for a backend. - is_available() -> bool - - pull(base_image: str, dest_dir: Path, **kwargs) -> T - - push(local_image_path: Path, oci_ref: str, **kwargs) -> T. + - pull(base_image: str, dest_dir: Path, **kwargs) -> TPull + - push(local_image_path: Path, oci_ref: str, **kwargs) -> TPush. """ is_available: Callable[[], bool] - pull: PullFn - push: PushFn + pull: PullFn[TPull] + push: PushFn[TPush] def _kwargs_to_params(kwargs: dict[str, str]) -> list[str]: @@ -151,21 +154,21 @@ def _kwargs_to_params(kwargs: dict[str, str]) -> list[str]: def _get_skopeo_backend( pull_args: list[str] | None = None, push_args: list[str] | None = None -) -> BackendDefinition: +) -> BackendDefinition[CompletedProcess[bytes], CompletedProcess[bytes]]: try: from olot.backend.skopeo import is_skopeo, skopeo_pull, skopeo_push except ImportError as e: msg = "Could not import 'olot.backend.skopeo'. Ensure that 'olot' is installed if you want to use the 'skopeo' backend." raise ImportError(msg) from e - def wrapped_pull(base_image: str, dest: Path, **kwargs) -> T: + def wrapped_pull(base_image: str, dest: Path, **kwargs) -> CompletedProcess[bytes]: kwargs = _backend_specific_params("skopeo", "pull", **kwargs) params = _kwargs_to_params(kwargs) params.extend(pull_args or []) return _scrub_errors(lambda: skopeo_pull(base_image, dest, params)) - def wrapped_push(src: Path, oci_ref: str, **kwargs) -> T: + def wrapped_push(src: Path, oci_ref: str, **kwargs) -> CompletedProcess[bytes]: kwargs = _backend_specific_params("skopeo", "push", **kwargs) params = _kwargs_to_params(kwargs) params.extend(push_args or []) @@ -173,27 +176,27 @@ def wrapped_push(src: Path, oci_ref: str, **kwargs) -> T: return _scrub_errors(lambda: skopeo_push(src, oci_ref, params)) return BackendDefinition( - is_available=is_skopeo, pull=wrapped_pull, push=wrapped_push + is_available=is_skopeo, pull=cast(PullFn[CompletedProcess[bytes]], wrapped_pull), push=wrapped_push ) def _get_oras_backend( pull_args: list[str] | None = None, push_args: list[str] | None = None -) -> BackendDefinition: +) -> BackendDefinition[CompletedProcess[bytes], CompletedProcess[bytes]]: try: from olot.backend.oras_cp import is_oras, oras_pull, oras_push except ImportError as e: msg = "Could not import 'olot.backend.oras_cp'. Ensure that 'olot' is installed if you want to use the 'oras_cp' backend." raise ImportError(msg) from e - def wrapped_pull(base_image: str, dest: Path, **kwargs) -> T: + def wrapped_pull(base_image: str, dest: Path, **kwargs) -> CompletedProcess[bytes]: kwargs = _backend_specific_params("oras", "pull", **kwargs) params = _kwargs_to_params(kwargs) params.extend(pull_args or []) return _scrub_errors(lambda: oras_pull(base_image, dest, params)) - def wrapped_push(src: Path, oci_ref: str, **kwargs) -> T: + def wrapped_push(src: Path, oci_ref: str, **kwargs) -> CompletedProcess[bytes]: kwargs = _backend_specific_params("oras", "push", **kwargs) params = _kwargs_to_params(kwargs) params.extend(push_args or []) @@ -202,14 +205,12 @@ def wrapped_push(src: Path, oci_ref: str, **kwargs) -> T: return BackendDefinition( is_available=is_oras, - pull=wrapped_pull, + pull=cast(PullFn[CompletedProcess[bytes]], wrapped_pull), push=wrapped_push, ) -def _backend_specific_params( - backend: Literal["skopeo", "oras"], type: Literal["push", "pull"], **kwargs -) -> dict: +def _backend_specific_params(backend: Literal["skopeo", "oras"], type: Literal["push", "pull"], **kwargs) -> dict: """Generate params based on the backend and action. Args: @@ -239,6 +240,8 @@ def _backend_specific_params( prefix = "--from" if type == "pull" else "--to" auth_suffix = "registry-config" else: + # This is defensive code - the Literal type hint makes this unreachable + # but we keep it for runtime safety if the type hint is bypassed msg = f"invalid backend: {backend!r}" raise ValueError(msg) @@ -249,7 +252,7 @@ def _backend_specific_params( return kwargs -def _scrub_errors(func: Callable[[], T]) -> T: +def _scrub_errors(func: Callable[[], CompletedProcess[bytes]]) -> CompletedProcess[bytes]: """Scrub errors of any subprocess command with sensitive data. Args: @@ -259,7 +262,7 @@ def _scrub_errors(func: Callable[[], T]) -> T: return func() except (CalledProcessError, Exception) as e: msg = """Problem with command""" - raise RuntimeError(msg, e.returncode, e.stderr) from None + raise RuntimeError(msg, e.returncode, e.stderr) from None # type: ignore[attr-defined] @dataclass @@ -271,10 +274,10 @@ class OCIParams: base_image: str oci_ref: str - dest_dir: str | os.PathLike = None + dest_dir: str | os.PathLike | None = None backend: str = "skopeo" modelcard: os.PathLike | None = None - custom_oci_backend: BackendDefinition = None + custom_oci_backend: BackendDefinition[CompletedProcess[bytes], CompletedProcess[bytes]] | None = None oci_auth_env_var: str | None = None oci_username: str | None = None oci_password: str | None = None @@ -299,7 +302,7 @@ class S3Params: # A dict mapping backend names to their definitions -BackendDict = dict[str, Callable[[], BackendDefinition]] +BackendDict = dict[str, Callable[[], BackendDefinition[CompletedProcess[bytes], CompletedProcess[bytes]]]] DEFAULT_BACKENDS: BackendDict = { "skopeo": _get_skopeo_backend, @@ -311,10 +314,10 @@ def save_to_oci_registry( # noqa: C901 ( complex args >8 ) base_image: str, oci_ref: str, model_files_path: str | os.PathLike, - dest_dir: str | os.PathLike = None, + dest_dir: str | os.PathLike | None = None, backend: str = "skopeo", modelcard: os.PathLike | None = None, - custom_oci_backend: BackendDefinition | None = None, + custom_oci_backend: BackendDefinition[CompletedProcess[bytes], CompletedProcess[bytes]] | None = None, oci_auth_env_var: str | None = None, oci_username: str | None = None, oci_password: str | None = None, @@ -361,7 +364,7 @@ def save_to_oci_registry( # noqa: C901 ( complex args >8 ) if oci_auth_env_var: auth = _validate_env_var(oci_auth_env_var) elif ".dockerconfigjson" in os.environ: - auth = os.environ[".dockerconfigjson"] # noqa: SIM112 + auth = os.environ[".dockerconfigjson"] # noqa: SIM112 elif oci_username and oci_password: auth = json.dumps(create_auth_object(oci_ref, oci_username, oci_password)) @@ -391,7 +394,7 @@ def save_to_oci_registry( # noqa: C901 ( complex args >8 ) params["authfile"] = auth_file.name backend_def.pull(base_image, local_image_path, **params) # Extract the absolute path from the files found in the path - files = [file[0] for file in _get_files_from_path(model_files_path)] + files = [file[0] for file in _get_files_from_path(model_files_path)] # type: ignore[arg-type] oci_layers_on_top(local_image_path, files, modelcard) backend_def.push(local_image_path, oci_ref, **params) @@ -402,17 +405,17 @@ def save_to_oci_registry( # noqa: C901 ( complex args >8 ) @overload -def temp_auth_file(auth: str) -> AbstractContextManager[TextIO]: - ... +@contextmanager +def temp_auth_file(auth: str) -> Generator[TextIO, None, None]: ... @overload -def temp_auth_file(auth: None) -> AbstractContextManager[None]: - ... +@contextmanager +def temp_auth_file(auth: None) -> Generator[None, None, None]: ... @contextmanager -def temp_auth_file(auth: str | None) -> AbstractContextManager[TextIO | None]: +def temp_auth_file(auth: str | None) -> Generator[TextIO | None, None, None]: """Create a temporary auth file with optional auth data. If auth is None, yields None. Otherwise creates a temporary JSON file @@ -427,10 +430,12 @@ def temp_auth_file(auth: str | None) -> AbstractContextManager[TextIO | None]: # delete=True, delete_on_close=False with Python 3.12 or later. path: str | None = None try: - with tempfile.NamedTemporaryFile(mode="w+", encoding="utf-8", suffix=".json", delete=False) as temp_auth_file: + with tempfile.NamedTemporaryFile( + mode="w+", encoding="utf-8", suffix=".json", delete=False + ) as temp_auth_file: path = temp_auth_file.name temp_auth_file.write(auth) - yield temp_auth_file + yield temp_auth_file # type: ignore[misc] finally: if path is not None: with suppress(OSError): @@ -518,8 +523,8 @@ def _upload_to_s3( # noqa: C901 uri = s3_uri_from( path=path_prefix, bucket=bucket, - endpoint=endpoint_url, - region=region, + endpoint=endpoint_url, # type: ignore[arg-type] + region=region, # type: ignore[arg-type] ) files = _get_files_from_path(path) for absolute_path_filename, relative_path_filename in files: @@ -539,9 +544,9 @@ def _connect_to_s3( access_key_id: str | None = None, secret_access_key: str | None = None, region: str | None = None, - multipart_threshold: int = None, - multipart_chunksize: int = None, - max_pool_connections: int = None, + multipart_threshold: int | None = None, + multipart_chunksize: int | None = None, + max_pool_connections: int | None = None, ) -> tuple[BaseClient, TransferConfig]: """Internal method to connect to Boto3 Client. @@ -562,7 +567,7 @@ def _connect_to_s3( ValueError: If the appropriate values are not supplied. """ try: - from boto3 import client # type: ignore + from boto3 import client from boto3.s3.transfer import TransferConfig from botocore.config import Config @@ -724,7 +729,7 @@ def get_auth_reference(image_path: str) -> str: return repo_path -def create_auth_object(oci_ref: str, username: str, password: str) -> dict[str: dict[str, dict[str, str]]]: +def create_auth_object(oci_ref: str, username: str, password: str) -> dict[str, dict[str, dict[str, str]]]: """Create an auth object for container registry authentication. This object can be encoded as json with json.dumps() producing the @@ -768,9 +773,7 @@ def upload_to_s3( transfer_config: The transfer config to use for the upload. If not provided, a new transfer config will be created. """ if s3_client and not transfer_config: - msg = ( - "Both `transfer_config` and `s3_client` must be provided if S3 is provided." - ) + msg = "Both `transfer_config` and `s3_client` must be provided if S3 is provided." raise ValueError(msg) if not s3_client: @@ -790,7 +793,7 @@ def upload_to_s3( ) -class ThreadSafeVariable: +class ThreadSafeVariable(Generic[T]): """Thread safe variable.""" def __init__(self, value: T): diff --git a/clients/python/src/mr_openapi/README.md b/clients/python/src/mr_openapi/README.md index 1186a1387e..5e8b2ccbe7 100644 --- a/clients/python/src/mr_openapi/README.md +++ b/clients/python/src/mr_openapi/README.md @@ -5,12 +5,12 @@ The `mr_openapi` package is automatically generated by the [OpenAPI Generator](h - API version: v1alpha3 - Package version: 1.0.0 -- Generator version: 7.6.0 +- Generator version: 7.17.0 - Build package: org.openapitools.codegen.languages.PythonClientCodegen ## Requirements. -Python 3.7+ +Python 3.9+ ## Installation & Usage @@ -18,10 +18,12 @@ This python library package is generated without supporting files like setup.py To be able to use it, you will need these dependencies in your own package that uses this library: -* urllib3 >= 1.25.3 -* python-dateutil -* aiohttp -* pydantic +* urllib3 >= 2.1.0, < 3.0.0 +* python-dateutil >= 2.8.2 +* aiohttp >= 3.8.4 +* aiohttp-retry >= 2.8.3 +* pydantic >= 2 +* typing-extensions >= 4.7.1 ## Getting Started diff --git a/clients/python/src/mr_openapi/__init__.py b/clients/python/src/mr_openapi/__init__.py index 54aa93b633..31131c4daf 100644 --- a/clients/python/src/mr_openapi/__init__.py +++ b/clients/python/src/mr_openapi/__init__.py @@ -13,97 +13,186 @@ Do not edit the class manually. """ # noqa: E501 - __version__ = "1.0.0" +# Define package exports +__all__ = [ + "ModelRegistryServiceApi", + "ApiResponse", + "ApiClient", + "Configuration", + "OpenApiException", + "ApiTypeError", + "ApiValueError", + "ApiKeyError", + "ApiAttributeError", + "ApiException", + "Artifact", + "ArtifactCreate", + "ArtifactList", + "ArtifactState", + "ArtifactTypeQueryParam", + "ArtifactUpdate", + "BaseArtifact", + "BaseModel", + "BaseResource", + "BaseResourceCreate", + "BaseResourceDates", + "BaseResourceList", + "BaseResourceUpdate", + "DataSet", + "DataSetCreate", + "DataSetUpdate", + "DocArtifact", + "DocArtifactCreate", + "DocArtifactUpdate", + "Error", + "ExecutionState", + "Experiment", + "ExperimentCreate", + "ExperimentList", + "ExperimentRun", + "ExperimentRunCreate", + "ExperimentRunList", + "ExperimentRunState", + "ExperimentRunStatus", + "ExperimentRunUpdate", + "ExperimentState", + "ExperimentUpdate", + "InferenceService", + "InferenceServiceCreate", + "InferenceServiceList", + "InferenceServiceState", + "InferenceServiceUpdate", + "MetadataBoolValue", + "MetadataDoubleValue", + "MetadataIntValue", + "MetadataProtoValue", + "MetadataStringValue", + "MetadataStructValue", + "MetadataValue", + "Metric", + "MetricCreate", + "MetricList", + "MetricUpdate", + "ModelArtifact", + "ModelArtifactCreate", + "ModelArtifactList", + "ModelArtifactUpdate", + "ModelVersion", + "ModelVersionCreate", + "ModelVersionList", + "ModelVersionState", + "ModelVersionUpdate", + "OrderByField", + "Parameter", + "ParameterCreate", + "ParameterType", + "ParameterUpdate", + "RegisteredModel", + "RegisteredModelCreate", + "RegisteredModelList", + "RegisteredModelState", + "RegisteredModelUpdate", + "ServeModel", + "ServeModelCreate", + "ServeModelList", + "ServeModelUpdate", + "ServingEnvironment", + "ServingEnvironmentCreate", + "ServingEnvironmentList", + "ServingEnvironmentUpdate", + "SortOrder", +] + # import apis into sdk package -from mr_openapi.api.model_registry_service_api import ModelRegistryServiceApi +from mr_openapi.api.model_registry_service_api import ModelRegistryServiceApi as ModelRegistryServiceApi # import ApiClient -from mr_openapi.api_response import ApiResponse -from mr_openapi.api_client import ApiClient -from mr_openapi.configuration import Configuration -from mr_openapi.exceptions import OpenApiException -from mr_openapi.exceptions import ApiTypeError -from mr_openapi.exceptions import ApiValueError -from mr_openapi.exceptions import ApiKeyError -from mr_openapi.exceptions import ApiAttributeError -from mr_openapi.exceptions import ApiException +from mr_openapi.api_response import ApiResponse as ApiResponse +from mr_openapi.api_client import ApiClient as ApiClient +from mr_openapi.configuration import Configuration as Configuration +from mr_openapi.exceptions import OpenApiException as OpenApiException +from mr_openapi.exceptions import ApiTypeError as ApiTypeError +from mr_openapi.exceptions import ApiValueError as ApiValueError +from mr_openapi.exceptions import ApiKeyError as ApiKeyError +from mr_openapi.exceptions import ApiAttributeError as ApiAttributeError +from mr_openapi.exceptions import ApiException as ApiException # import models into sdk package -from mr_openapi.models.artifact import Artifact -from mr_openapi.models.artifact_create import ArtifactCreate -from mr_openapi.models.artifact_list import ArtifactList -from mr_openapi.models.artifact_state import ArtifactState -from mr_openapi.models.artifact_type_query_param import ArtifactTypeQueryParam -from mr_openapi.models.artifact_update import ArtifactUpdate -from mr_openapi.models.base_artifact import BaseArtifact -from mr_openapi.models.base_model import BaseModel -from mr_openapi.models.base_resource import BaseResource -from mr_openapi.models.base_resource_create import BaseResourceCreate -from mr_openapi.models.base_resource_dates import BaseResourceDates -from mr_openapi.models.base_resource_list import BaseResourceList -from mr_openapi.models.base_resource_update import BaseResourceUpdate -from mr_openapi.models.data_set import DataSet -from mr_openapi.models.data_set_create import DataSetCreate -from mr_openapi.models.data_set_update import DataSetUpdate -from mr_openapi.models.doc_artifact import DocArtifact -from mr_openapi.models.doc_artifact_create import DocArtifactCreate -from mr_openapi.models.doc_artifact_update import DocArtifactUpdate -from mr_openapi.models.error import Error -from mr_openapi.models.execution_state import ExecutionState -from mr_openapi.models.experiment import Experiment -from mr_openapi.models.experiment_create import ExperimentCreate -from mr_openapi.models.experiment_list import ExperimentList -from mr_openapi.models.experiment_run import ExperimentRun -from mr_openapi.models.experiment_run_create import ExperimentRunCreate -from mr_openapi.models.experiment_run_list import ExperimentRunList -from mr_openapi.models.experiment_run_state import ExperimentRunState -from mr_openapi.models.experiment_run_status import ExperimentRunStatus -from mr_openapi.models.experiment_run_update import ExperimentRunUpdate -from mr_openapi.models.experiment_state import ExperimentState -from mr_openapi.models.experiment_update import ExperimentUpdate -from mr_openapi.models.inference_service import InferenceService -from mr_openapi.models.inference_service_create import InferenceServiceCreate -from mr_openapi.models.inference_service_list import InferenceServiceList -from mr_openapi.models.inference_service_state import InferenceServiceState -from mr_openapi.models.inference_service_update import InferenceServiceUpdate -from mr_openapi.models.metadata_bool_value import MetadataBoolValue -from mr_openapi.models.metadata_double_value import MetadataDoubleValue -from mr_openapi.models.metadata_int_value import MetadataIntValue -from mr_openapi.models.metadata_proto_value import MetadataProtoValue -from mr_openapi.models.metadata_string_value import MetadataStringValue -from mr_openapi.models.metadata_struct_value import MetadataStructValue -from mr_openapi.models.metadata_value import MetadataValue -from mr_openapi.models.metric import Metric -from mr_openapi.models.metric_create import MetricCreate -from mr_openapi.models.metric_list import MetricList -from mr_openapi.models.metric_update import MetricUpdate -from mr_openapi.models.model_artifact import ModelArtifact -from mr_openapi.models.model_artifact_create import ModelArtifactCreate -from mr_openapi.models.model_artifact_list import ModelArtifactList -from mr_openapi.models.model_artifact_update import ModelArtifactUpdate -from mr_openapi.models.model_version import ModelVersion -from mr_openapi.models.model_version_create import ModelVersionCreate -from mr_openapi.models.model_version_list import ModelVersionList -from mr_openapi.models.model_version_state import ModelVersionState -from mr_openapi.models.model_version_update import ModelVersionUpdate -from mr_openapi.models.order_by_field import OrderByField -from mr_openapi.models.parameter import Parameter -from mr_openapi.models.parameter_create import ParameterCreate -from mr_openapi.models.parameter_type import ParameterType -from mr_openapi.models.parameter_update import ParameterUpdate -from mr_openapi.models.registered_model import RegisteredModel -from mr_openapi.models.registered_model_create import RegisteredModelCreate -from mr_openapi.models.registered_model_list import RegisteredModelList -from mr_openapi.models.registered_model_state import RegisteredModelState -from mr_openapi.models.registered_model_update import RegisteredModelUpdate -from mr_openapi.models.serve_model import ServeModel -from mr_openapi.models.serve_model_create import ServeModelCreate -from mr_openapi.models.serve_model_list import ServeModelList -from mr_openapi.models.serve_model_update import ServeModelUpdate -from mr_openapi.models.serving_environment import ServingEnvironment -from mr_openapi.models.serving_environment_create import ServingEnvironmentCreate -from mr_openapi.models.serving_environment_list import ServingEnvironmentList -from mr_openapi.models.serving_environment_update import ServingEnvironmentUpdate -from mr_openapi.models.sort_order import SortOrder +from mr_openapi.models.artifact import Artifact as Artifact +from mr_openapi.models.artifact_create import ArtifactCreate as ArtifactCreate +from mr_openapi.models.artifact_list import ArtifactList as ArtifactList +from mr_openapi.models.artifact_state import ArtifactState as ArtifactState +from mr_openapi.models.artifact_type_query_param import ArtifactTypeQueryParam as ArtifactTypeQueryParam +from mr_openapi.models.artifact_update import ArtifactUpdate as ArtifactUpdate +from mr_openapi.models.base_artifact import BaseArtifact as BaseArtifact +from mr_openapi.models.base_model import BaseModel as BaseModel +from mr_openapi.models.base_resource import BaseResource as BaseResource +from mr_openapi.models.base_resource_create import BaseResourceCreate as BaseResourceCreate +from mr_openapi.models.base_resource_dates import BaseResourceDates as BaseResourceDates +from mr_openapi.models.base_resource_list import BaseResourceList as BaseResourceList +from mr_openapi.models.base_resource_update import BaseResourceUpdate as BaseResourceUpdate +from mr_openapi.models.data_set import DataSet as DataSet +from mr_openapi.models.data_set_create import DataSetCreate as DataSetCreate +from mr_openapi.models.data_set_update import DataSetUpdate as DataSetUpdate +from mr_openapi.models.doc_artifact import DocArtifact as DocArtifact +from mr_openapi.models.doc_artifact_create import DocArtifactCreate as DocArtifactCreate +from mr_openapi.models.doc_artifact_update import DocArtifactUpdate as DocArtifactUpdate +from mr_openapi.models.error import Error as Error +from mr_openapi.models.execution_state import ExecutionState as ExecutionState +from mr_openapi.models.experiment import Experiment as Experiment +from mr_openapi.models.experiment_create import ExperimentCreate as ExperimentCreate +from mr_openapi.models.experiment_list import ExperimentList as ExperimentList +from mr_openapi.models.experiment_run import ExperimentRun as ExperimentRun +from mr_openapi.models.experiment_run_create import ExperimentRunCreate as ExperimentRunCreate +from mr_openapi.models.experiment_run_list import ExperimentRunList as ExperimentRunList +from mr_openapi.models.experiment_run_state import ExperimentRunState as ExperimentRunState +from mr_openapi.models.experiment_run_status import ExperimentRunStatus as ExperimentRunStatus +from mr_openapi.models.experiment_run_update import ExperimentRunUpdate as ExperimentRunUpdate +from mr_openapi.models.experiment_state import ExperimentState as ExperimentState +from mr_openapi.models.experiment_update import ExperimentUpdate as ExperimentUpdate +from mr_openapi.models.inference_service import InferenceService as InferenceService +from mr_openapi.models.inference_service_create import InferenceServiceCreate as InferenceServiceCreate +from mr_openapi.models.inference_service_list import InferenceServiceList as InferenceServiceList +from mr_openapi.models.inference_service_state import InferenceServiceState as InferenceServiceState +from mr_openapi.models.inference_service_update import InferenceServiceUpdate as InferenceServiceUpdate +from mr_openapi.models.metadata_bool_value import MetadataBoolValue as MetadataBoolValue +from mr_openapi.models.metadata_double_value import MetadataDoubleValue as MetadataDoubleValue +from mr_openapi.models.metadata_int_value import MetadataIntValue as MetadataIntValue +from mr_openapi.models.metadata_proto_value import MetadataProtoValue as MetadataProtoValue +from mr_openapi.models.metadata_string_value import MetadataStringValue as MetadataStringValue +from mr_openapi.models.metadata_struct_value import MetadataStructValue as MetadataStructValue +from mr_openapi.models.metadata_value import MetadataValue as MetadataValue +from mr_openapi.models.metric import Metric as Metric +from mr_openapi.models.metric_create import MetricCreate as MetricCreate +from mr_openapi.models.metric_list import MetricList as MetricList +from mr_openapi.models.metric_update import MetricUpdate as MetricUpdate +from mr_openapi.models.model_artifact import ModelArtifact as ModelArtifact +from mr_openapi.models.model_artifact_create import ModelArtifactCreate as ModelArtifactCreate +from mr_openapi.models.model_artifact_list import ModelArtifactList as ModelArtifactList +from mr_openapi.models.model_artifact_update import ModelArtifactUpdate as ModelArtifactUpdate +from mr_openapi.models.model_version import ModelVersion as ModelVersion +from mr_openapi.models.model_version_create import ModelVersionCreate as ModelVersionCreate +from mr_openapi.models.model_version_list import ModelVersionList as ModelVersionList +from mr_openapi.models.model_version_state import ModelVersionState as ModelVersionState +from mr_openapi.models.model_version_update import ModelVersionUpdate as ModelVersionUpdate +from mr_openapi.models.order_by_field import OrderByField as OrderByField +from mr_openapi.models.parameter import Parameter as Parameter +from mr_openapi.models.parameter_create import ParameterCreate as ParameterCreate +from mr_openapi.models.parameter_type import ParameterType as ParameterType +from mr_openapi.models.parameter_update import ParameterUpdate as ParameterUpdate +from mr_openapi.models.registered_model import RegisteredModel as RegisteredModel +from mr_openapi.models.registered_model_create import RegisteredModelCreate as RegisteredModelCreate +from mr_openapi.models.registered_model_list import RegisteredModelList as RegisteredModelList +from mr_openapi.models.registered_model_state import RegisteredModelState as RegisteredModelState +from mr_openapi.models.registered_model_update import RegisteredModelUpdate as RegisteredModelUpdate +from mr_openapi.models.serve_model import ServeModel as ServeModel +from mr_openapi.models.serve_model_create import ServeModelCreate as ServeModelCreate +from mr_openapi.models.serve_model_list import ServeModelList as ServeModelList +from mr_openapi.models.serve_model_update import ServeModelUpdate as ServeModelUpdate +from mr_openapi.models.serving_environment import ServingEnvironment as ServingEnvironment +from mr_openapi.models.serving_environment_create import ServingEnvironmentCreate as ServingEnvironmentCreate +from mr_openapi.models.serving_environment_list import ServingEnvironmentList as ServingEnvironmentList +from mr_openapi.models.serving_environment_update import ServingEnvironmentUpdate as ServingEnvironmentUpdate +from mr_openapi.models.sort_order import SortOrder as SortOrder diff --git a/clients/python/src/mr_openapi/api/model_registry_service_api.py b/clients/python/src/mr_openapi/api/model_registry_service_api.py index 1235db6451..d47e769c58 100644 --- a/clients/python/src/mr_openapi/api/model_registry_service_api.py +++ b/clients/python/src/mr_openapi/api/model_registry_service_api.py @@ -264,7 +264,6 @@ def _create_artifact_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -273,7 +272,7 @@ def _create_artifact_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -285,7 +284,8 @@ def _create_artifact_serialize( _body_params = artifact_create # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -537,7 +537,6 @@ def _create_environment_inference_service_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -546,7 +545,7 @@ def _create_environment_inference_service_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -560,7 +559,8 @@ def _create_environment_inference_service_serialize( _body_params = inference_service_create # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -784,7 +784,6 @@ def _create_experiment_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -793,7 +792,7 @@ def _create_experiment_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -805,7 +804,8 @@ def _create_experiment_serialize( _body_params = experiment_create # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -1045,7 +1045,6 @@ def _create_experiment_experiment_run_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -1054,7 +1053,7 @@ def _create_experiment_experiment_run_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1068,7 +1067,8 @@ def _create_experiment_experiment_run_serialize( _body_params = experiment_run # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -1301,7 +1301,6 @@ def _create_experiment_run_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -1310,7 +1309,7 @@ def _create_experiment_run_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1322,7 +1321,8 @@ def _create_experiment_run_serialize( _body_params = experiment_run_create # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -1552,7 +1552,6 @@ def _create_inference_service_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -1561,7 +1560,7 @@ def _create_inference_service_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1573,7 +1572,8 @@ def _create_inference_service_serialize( _body_params = inference_service_create # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -1819,7 +1819,6 @@ def _create_inference_service_serve_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -1828,7 +1827,7 @@ def _create_inference_service_serve_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1842,7 +1841,8 @@ def _create_inference_service_serve_serialize( _body_params = serve_model_create # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -2072,7 +2072,6 @@ def _create_model_artifact_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -2081,7 +2080,7 @@ def _create_model_artifact_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2093,7 +2092,8 @@ def _create_model_artifact_serialize( _body_params = model_artifact_create # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -2320,7 +2320,6 @@ def _create_model_version_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -2329,7 +2328,7 @@ def _create_model_version_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2341,7 +2340,8 @@ def _create_model_version_serialize( _body_params = model_version_create # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -2571,7 +2571,6 @@ def _create_registered_model_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -2580,7 +2579,7 @@ def _create_registered_model_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2592,7 +2591,8 @@ def _create_registered_model_serialize( _body_params = registered_model_create # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -2832,7 +2832,6 @@ def _create_registered_model_version_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -2841,7 +2840,7 @@ def _create_registered_model_version_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2855,7 +2854,8 @@ def _create_registered_model_version_serialize( _body_params = model_version # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -3085,7 +3085,6 @@ def _create_serving_environment_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -3094,7 +3093,7 @@ def _create_serving_environment_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3106,7 +3105,8 @@ def _create_serving_environment_serialize( _body_params = serving_environment_create # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -3362,7 +3362,6 @@ def _find_artifact_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -3371,21 +3370,18 @@ def _find_artifact_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) if parent_resource_id is not None: - _query_params.append(("parentResourceId", parent_resource_id)) # process the header parameters @@ -3393,7 +3389,8 @@ def _find_artifact_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -3619,7 +3616,6 @@ def _find_experiment_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -3628,17 +3624,15 @@ def _find_experiment_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) # process the header parameters @@ -3646,7 +3640,8 @@ def _find_experiment_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -3894,7 +3889,6 @@ def _find_experiment_run_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -3903,21 +3897,18 @@ def _find_experiment_run_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) if parent_resource_id is not None: - _query_params.append(("parentResourceId", parent_resource_id)) # process the header parameters @@ -3925,7 +3916,8 @@ def _find_experiment_run_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -4173,7 +4165,6 @@ def _find_inference_service_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -4182,21 +4173,18 @@ def _find_inference_service_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) if parent_resource_id is not None: - _query_params.append(("parentResourceId", parent_resource_id)) # process the header parameters @@ -4204,7 +4192,8 @@ def _find_inference_service_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -4452,7 +4441,6 @@ def _find_model_artifact_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -4461,21 +4449,18 @@ def _find_model_artifact_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) if parent_resource_id is not None: - _query_params.append(("parentResourceId", parent_resource_id)) # process the header parameters @@ -4483,7 +4468,8 @@ def _find_model_artifact_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -4731,7 +4717,6 @@ def _find_model_version_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -4740,21 +4725,18 @@ def _find_model_version_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) if parent_resource_id is not None: - _query_params.append(("parentResourceId", parent_resource_id)) # process the header parameters @@ -4762,7 +4744,8 @@ def _find_model_version_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -4988,7 +4971,6 @@ def _find_registered_model_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -4997,17 +4979,15 @@ def _find_registered_model_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) # process the header parameters @@ -5015,7 +4995,8 @@ def _find_registered_model_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -5241,7 +5222,6 @@ def _find_serving_environment_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -5250,17 +5230,15 @@ def _find_serving_environment_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) # process the header parameters @@ -5268,7 +5246,8 @@ def _find_serving_environment_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -5472,7 +5451,6 @@ def _get_artifact_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -5481,7 +5459,7 @@ def _get_artifact_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -5493,7 +5471,8 @@ def _get_artifact_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -5813,7 +5792,6 @@ def _get_artifacts_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -5822,33 +5800,27 @@ def _get_artifacts_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if artifact_type is not None: - _query_params.append(("artifactType", artifact_type.value)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -5856,7 +5828,8 @@ def _get_artifacts_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -6199,7 +6172,6 @@ def _get_environment_inference_services_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -6208,7 +6180,7 @@ def _get_environment_inference_services_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -6216,31 +6188,24 @@ def _get_environment_inference_services_serialize( _path_params["servingenvironmentId"] = servingenvironment_id # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -6248,7 +6213,8 @@ def _get_environment_inference_services_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -6461,7 +6427,6 @@ def _get_experiment_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -6470,7 +6435,7 @@ def _get_experiment_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -6482,7 +6447,8 @@ def _get_experiment_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -6819,7 +6785,6 @@ def _get_experiment_experiment_runs_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -6828,7 +6793,7 @@ def _get_experiment_experiment_runs_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -6836,31 +6801,24 @@ def _get_experiment_experiment_runs_serialize( _path_params["experimentId"] = experiment_id # process the query parameters if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -6868,7 +6826,8 @@ def _get_experiment_experiment_runs_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -7081,7 +7040,6 @@ def _get_experiment_run_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -7090,7 +7048,7 @@ def _get_experiment_run_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -7102,7 +7060,8 @@ def _get_experiment_run_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -7452,7 +7411,6 @@ def _get_experiment_run_artifacts_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -7461,7 +7419,7 @@ def _get_experiment_run_artifacts_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -7469,35 +7427,27 @@ def _get_experiment_run_artifacts_serialize( _path_params["experimentrunId"] = experimentrun_id # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) if artifact_type is not None: - _query_params.append(("artifactType", artifact_type.value)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -7505,7 +7455,8 @@ def _get_experiment_run_artifacts_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -7848,7 +7799,6 @@ def _get_experiment_run_metric_history_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -7857,7 +7807,7 @@ def _get_experiment_run_metric_history_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -7865,31 +7815,24 @@ def _get_experiment_run_metric_history_serialize( _path_params["experimentrunId"] = experimentrun_id # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if name is not None: - _query_params.append(("name", name)) if step_ids is not None: - _query_params.append(("stepIds", step_ids)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -7897,7 +7840,8 @@ def _get_experiment_run_metric_history_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -8192,7 +8136,6 @@ def _get_experiment_runs_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -8201,29 +8144,24 @@ def _get_experiment_runs_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -8231,7 +8169,8 @@ def _get_experiment_runs_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -8561,7 +8500,6 @@ def _get_experiment_runs_metric_history_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -8570,37 +8508,30 @@ def _get_experiment_runs_metric_history_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if name is not None: - _query_params.append(("name", name)) if step_ids is not None: - _query_params.append(("stepIds", step_ids)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -8608,7 +8539,8 @@ def _get_experiment_runs_metric_history_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -8903,7 +8835,6 @@ def _get_experiments_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -8912,29 +8843,24 @@ def _get_experiments_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -8942,7 +8868,8 @@ def _get_experiments_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -9155,7 +9082,6 @@ def _get_inference_service_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -9164,7 +9090,7 @@ def _get_inference_service_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -9176,7 +9102,8 @@ def _get_inference_service_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -9389,7 +9316,6 @@ def _get_inference_service_model_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -9398,7 +9324,7 @@ def _get_inference_service_model_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -9410,7 +9336,8 @@ def _get_inference_service_model_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -9747,7 +9674,6 @@ def _get_inference_service_serves_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -9756,7 +9682,7 @@ def _get_inference_service_serves_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -9764,31 +9690,24 @@ def _get_inference_service_serves_serialize( _path_params["inferenceserviceId"] = inferenceservice_id # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -9796,7 +9715,8 @@ def _get_inference_service_serves_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -10009,7 +9929,6 @@ def _get_inference_service_version_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -10018,7 +9937,7 @@ def _get_inference_service_version_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -10030,7 +9949,8 @@ def _get_inference_service_version_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -10331,7 +10251,6 @@ def _get_inference_services_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -10340,29 +10259,24 @@ def _get_inference_services_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -10370,7 +10284,8 @@ def _get_inference_services_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -10583,7 +10498,6 @@ def _get_model_artifact_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -10592,7 +10506,7 @@ def _get_model_artifact_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -10604,7 +10518,8 @@ def _get_model_artifact_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -10905,7 +10820,6 @@ def _get_model_artifacts_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -10914,29 +10828,24 @@ def _get_model_artifacts_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -10944,7 +10853,8 @@ def _get_model_artifacts_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -11157,7 +11067,6 @@ def _get_model_version_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -11166,7 +11075,7 @@ def _get_model_version_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -11178,7 +11087,8 @@ def _get_model_version_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -11528,7 +11438,6 @@ def _get_model_version_artifacts_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -11537,7 +11446,7 @@ def _get_model_version_artifacts_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -11545,35 +11454,27 @@ def _get_model_version_artifacts_serialize( _path_params["modelversionId"] = modelversion_id # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) if artifact_type is not None: - _query_params.append(("artifactType", artifact_type.value)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -11581,7 +11482,8 @@ def _get_model_version_artifacts_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -11876,7 +11778,6 @@ def _get_model_versions_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -11885,29 +11786,24 @@ def _get_model_versions_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -11915,7 +11811,8 @@ def _get_model_versions_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -12128,7 +12025,6 @@ def _get_registered_model_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -12137,7 +12033,7 @@ def _get_registered_model_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -12149,7 +12045,8 @@ def _get_registered_model_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -12486,7 +12383,6 @@ def _get_registered_model_versions_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -12495,7 +12391,7 @@ def _get_registered_model_versions_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -12503,31 +12399,24 @@ def _get_registered_model_versions_serialize( _path_params["registeredmodelId"] = registeredmodel_id # process the query parameters if name is not None: - _query_params.append(("name", name)) if external_id is not None: - _query_params.append(("externalId", external_id)) if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -12535,7 +12424,8 @@ def _get_registered_model_versions_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -12830,7 +12720,6 @@ def _get_registered_models_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -12839,29 +12728,24 @@ def _get_registered_models_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -12869,7 +12753,8 @@ def _get_registered_models_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -13088,7 +12973,6 @@ def _get_serving_environment_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -13097,7 +12981,7 @@ def _get_serving_environment_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -13109,7 +12993,8 @@ def _get_serving_environment_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -13404,7 +13289,6 @@ def _get_serving_environments_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -13413,29 +13297,24 @@ def _get_serving_environments_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters # process the query parameters if filter_query is not None: - _query_params.append(("filterQuery", filter_query)) if page_size is not None: - _query_params.append(("pageSize", page_size)) if order_by is not None: - _query_params.append(("orderBy", order_by.value)) if sort_order is not None: - _query_params.append(("sortOrder", sort_order.value)) if next_page_token is not None: - _query_params.append(("nextPageToken", next_page_token)) # process the header parameters @@ -13443,7 +13322,8 @@ def _get_serving_environments_serialize( # process the body parameter # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # authentication setting _auth_settings: list[str] = ["Bearer"] @@ -13672,7 +13552,6 @@ def _update_artifact_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -13681,7 +13560,7 @@ def _update_artifact_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -13695,7 +13574,8 @@ def _update_artifact_serialize( _body_params = artifact_update # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -13932,7 +13812,6 @@ def _update_experiment_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -13941,7 +13820,7 @@ def _update_experiment_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -13955,7 +13834,8 @@ def _update_experiment_serialize( _body_params = experiment_update # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -14198,7 +14078,6 @@ def _update_experiment_run_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -14207,7 +14086,7 @@ def _update_experiment_run_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -14221,7 +14100,8 @@ def _update_experiment_run_serialize( _body_params = experiment_run_update # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -14464,7 +14344,6 @@ def _update_inference_service_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -14473,7 +14352,7 @@ def _update_inference_service_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -14487,7 +14366,8 @@ def _update_inference_service_serialize( _body_params = inference_service_update # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -14730,7 +14610,6 @@ def _update_model_artifact_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -14739,7 +14618,7 @@ def _update_model_artifact_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -14753,7 +14632,8 @@ def _update_model_artifact_serialize( _body_params = model_artifact_update # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -14990,7 +14870,6 @@ def _update_model_version_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -14999,7 +14878,7 @@ def _update_model_version_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -15013,7 +14892,8 @@ def _update_model_version_serialize( _body_params = model_version_update # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -15256,7 +15136,6 @@ def _update_registered_model_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -15265,7 +15144,7 @@ def _update_registered_model_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -15279,7 +15158,8 @@ def _update_registered_model_serialize( _body_params = registered_model_update # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -15528,7 +15408,6 @@ def _update_serving_environment_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -15537,7 +15416,7 @@ def _update_serving_environment_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -15551,7 +15430,8 @@ def _update_serving_environment_serialize( _body_params = serving_environment_update # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -15800,7 +15680,6 @@ def _upsert_experiment_run_artifact_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -15809,7 +15688,7 @@ def _upsert_experiment_run_artifact_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -15823,7 +15702,8 @@ def _upsert_experiment_run_artifact_serialize( _body_params = artifact # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: @@ -16072,7 +15952,6 @@ def _upsert_model_version_artifact_serialize( _headers, _host_index, ) -> RequestSerialized: - _host = None _collection_formats: dict[str, str] = {} @@ -16081,7 +15960,7 @@ def _upsert_model_version_artifact_serialize( _query_params: list[tuple[str, str]] = [] _header_params: dict[str, Optional[str]] = _headers or {} _form_params: list[tuple[str, str]] = [] - _files: dict[str, Union[str, bytes]] = {} + _files: dict[str, Union[str, bytes, list[str], list[bytes], list[tuple[str, bytes]]]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -16095,7 +15974,8 @@ def _upsert_model_version_artifact_serialize( _body_params = artifact # set the HTTP header `Accept` - _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # set the HTTP header `Content-Type` if _content_type: diff --git a/clients/python/src/mr_openapi/api_client.py b/clients/python/src/mr_openapi/api_client.py index b10fb38e7b..32b411f64f 100644 --- a/clients/python/src/mr_openapi/api_client.py +++ b/clients/python/src/mr_openapi/api_client.py @@ -9,11 +9,13 @@ """ # noqa: E501 import datetime +import decimal import json import mimetypes import os import re import tempfile +import uuid from enum import Enum from typing import Optional, Union from urllib.parse import quote @@ -59,6 +61,7 @@ class ApiClient: "bool": bool, "date": datetime.date, "datetime": datetime.datetime, + "decimal": decimal.Decimal, "object": object, } _pool = None @@ -198,7 +201,7 @@ def param_serialize( body = self.sanitize_for_serialization(body) # request url - if _host is None: + if _host is None or self.configuration.ignore_operation_servers: url = self.configuration.host + resource_path else: # use server/host defined in path or operation instead @@ -273,10 +276,7 @@ def response_deserialize( match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) encoding = match.group(1) if match else "utf-8" response_text = response_data.data.decode(encoding) - if response_type in ["bytearray", "str"]: - return_data = self.__deserialize_primitive(response_text, response_type) - else: - return_data = self.deserialize(response_text, response_type) + return_data = self.deserialize(response_text, response_type, content_type) finally: if not 200 <= response_data.status <= 299: raise ApiException.from_response( @@ -300,6 +300,7 @@ def sanitize_for_serialization(self, obj): If obj is str, int, long, float, bool, return directly. If obj is datetime.datetime, datetime.date convert to string in iso8601 format. + If obj is decimal.Decimal return string representation. If obj is list, sanitize each element in the list. If obj is dict, return the dict. If obj is OpenAPI model, return the properties dict. @@ -315,12 +316,16 @@ def sanitize_for_serialization(self, obj): return obj.get_secret_value() if isinstance(obj, self.PRIMITIVE_TYPES): return obj + if isinstance(obj, uuid.UUID): + return str(obj) if isinstance(obj, list): return [self.sanitize_for_serialization(sub_obj) for sub_obj in obj] if isinstance(obj, tuple): return tuple(self.sanitize_for_serialization(sub_obj) for sub_obj in obj) if isinstance(obj, (datetime.datetime, datetime.date)): return obj.isoformat() + if isinstance(obj, decimal.Decimal): + return str(obj) if isinstance(obj, dict): obj_dict = obj @@ -330,27 +335,36 @@ def sanitize_for_serialization(self, obj): # and attributes which value is not None. # Convert attribute name to json key in # model definition for request. - if hasattr(obj, "to_dict") and callable(obj.to_dict): - obj_dict = obj.to_dict() - else: - obj_dict = obj.__dict__ + obj_dict = obj.to_dict() if hasattr(obj, "to_dict") and callable(obj.to_dict) else obj.__dict__ + + if isinstance(obj_dict, list): + # here we handle instances that can either be a list or something else, and only became a real list by calling to_dict() + return self.sanitize_for_serialization(obj_dict) return {key: self.sanitize_for_serialization(val) for key, val in obj_dict.items()} - def deserialize(self, response_text, response_type): + def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]): """Deserializes response into an object. :param response: RESTResponse object to be deserialized. :param response_type: class literal for deserialized object, or string of class name. + :param content_type: content type of response. :return: deserialized object. """ # fetch data from response object - try: - data = json.loads(response_text) - except ValueError: + if content_type is None: + try: + data = json.loads(response_text) + except ValueError: + data = response_text + elif re.match(r"^application/(json|[\w!#$&.+\-^_]+\+json)\s*(;|$)", content_type, re.IGNORECASE): + data = "" if response_text == "" else json.loads(response_text) + elif re.match(r"^text\/[a-z.+-]+\s*(;|$)", content_type, re.IGNORECASE): data = response_text + else: + raise ApiException(status=0, reason=f"Unsupported content type: {content_type}") return self.__deserialize(data, response_type) @@ -386,12 +400,14 @@ def __deserialize(self, data, klass): if klass in self.PRIMITIVE_TYPES: return self.__deserialize_primitive(data, klass) - if klass == object: + if klass is object: return self.__deserialize_object(data) - if klass == datetime.date: + if klass is datetime.date: return self.__deserialize_date(data) - if klass == datetime.datetime: + if klass is datetime.datetime: return self.__deserialize_datetime(data) + if klass is decimal.Decimal: + return decimal.Decimal(data) if issubclass(klass, Enum): return self.__deserialize_enum(data, klass) return self.__deserialize_model(data, klass) @@ -446,7 +462,7 @@ def parameters_to_url_query(self, params, collection_formats): if k in collection_formats: collection_format = collection_formats[k] if collection_format == "multi": - new_params.extend((k, str(value)) for value in v) + new_params.extend((k, quote(str(value))) for value in v) else: if collection_format == "ssv": delimiter = " " @@ -462,7 +478,10 @@ def parameters_to_url_query(self, params, collection_formats): return "&".join(["=".join(map(str, item)) for item in new_params]) - def files_parameters(self, files: dict[str, Union[str, bytes]]): + def files_parameters( + self, + files: dict[str, Union[str, bytes, list[str], list[bytes], tuple[str, bytes]]], + ): """Builds form parameters. :param files: File parameters. @@ -477,6 +496,12 @@ def files_parameters(self, files: dict[str, Union[str, bytes]]): elif isinstance(v, bytes): filename = k filedata = v + elif isinstance(v, tuple): + filename, filedata = v + elif isinstance(v, list): + for file_param in v: + params.extend(self.files_parameters({k: file_param})) + continue else: msg = "Unsupported file value" raise ValueError(msg) diff --git a/clients/python/src/mr_openapi/configuration.py b/clients/python/src/mr_openapi/configuration.py index be890bb13a..7adf45b52d 100644 --- a/clients/python/src/mr_openapi/configuration.py +++ b/clients/python/src/mr_openapi/configuration.py @@ -13,7 +13,9 @@ import logging import sys from logging import FileHandler -from typing import Optional +from typing import Any, ClassVar, Literal, Optional, TypedDict, Union + +from typing_extensions import NotRequired, Self JSON_SCHEMA_VALIDATION_KEYWORDS = { "multipleOf", @@ -28,11 +30,109 @@ "minItems", } +ServerVariablesT = dict[str, str] + +GenericAuthSetting = TypedDict( + "GenericAuthSetting", + { + "type": str, + "in": str, + "key": str, + "value": str, + }, +) + + +OAuth2AuthSetting = TypedDict( + "OAuth2AuthSetting", + { + "type": Literal["oauth2"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": str, + }, +) + + +APIKeyAuthSetting = TypedDict( + "APIKeyAuthSetting", + { + "type": Literal["api_key"], + "in": str, + "key": str, + "value": Optional[str], + }, +) + + +BasicAuthSetting = TypedDict( + "BasicAuthSetting", + { + "type": Literal["basic"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": Optional[str], + }, +) + + +BearerFormatAuthSetting = TypedDict( + "BearerFormatAuthSetting", + { + "type": Literal["bearer"], + "in": Literal["header"], + "format": Literal["JWT"], + "key": Literal["Authorization"], + "value": str, + }, +) + + +BearerAuthSetting = TypedDict( + "BearerAuthSetting", + { + "type": Literal["bearer"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": str, + }, +) + + +HTTPSignatureAuthSetting = TypedDict( + "HTTPSignatureAuthSetting", + { + "type": Literal["http-signature"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": None, + }, +) + + +class AuthSettings(TypedDict, total=False): + Bearer: BearerFormatAuthSetting + + +class HostSettingVariable(TypedDict): + description: str + default_value: str + enum_values: list[str] + + +class HostSetting(TypedDict): + url: str + description: str + variables: NotRequired[dict[str, HostSettingVariable]] + class Configuration: """This class contains various settings of the API client. :param host: Base url. + :param ignore_operation_servers + Boolean to ignore operation servers for the API client. + Config will use `host` as the base url regardless of the operation servers. :param api_key: Dict to store API key(s). Each entry in the dict specifies an API key. The dict key is the name of the security scheme in the OAS specification. @@ -58,26 +158,34 @@ class Configuration: :param verify_ssl: bool - Whether to verify the SSL certificate when making API requests to an HTTPS server. Set to False to disable verification, default=True. + :param retries: Number of retries for API requests. + :param ca_cert_data: verify the peer using concatenated CA certificate data + in PEM (str) or DER (bytes) format. :Example: """ - _default = None + _default: ClassVar[Optional[Self]] = None def __init__( self, - host=None, - api_key=None, - api_key_prefix=None, - username=None, - password=None, - access_token=None, - server_index=None, - server_variables=None, - server_operation_index=None, - server_operation_variables=None, - ssl_ca_cert=None, - verify_ssl=True, + host: Optional[str] = None, + api_key: Optional[dict[str, str]] = None, + api_key_prefix: Optional[dict[str, str]] = None, + username: Optional[str] = None, + password: Optional[str] = None, + access_token: Optional[str] = None, + server_index: Optional[int] = None, + server_variables: Optional[ServerVariablesT] = None, + server_operation_index: Optional[dict[int, int]] = None, + server_operation_variables: Optional[dict[int, ServerVariablesT]] = None, + ignore_operation_servers: bool = False, + ssl_ca_cert: Optional[str] = None, + verify_ssl: bool = True, + retries: Optional[int] = None, + ca_cert_data: Optional[Union[str, bytes]] = None, + *, + debug: Optional[bool] = None, ) -> None: """Constructor.""" self._base_path = "https://localhost:8080" if host is None else host @@ -91,6 +199,9 @@ def __init__( self.server_operation_variables = server_operation_variables or {} """Default server variables """ + self.ignore_operation_servers = ignore_operation_servers + """Ignore operation servers + """ self.temp_folder_path = None """Temp file folder for downloading files """ @@ -133,7 +244,10 @@ def __init__( self.logger_file = None """Debug file location """ - self.debug = False + if debug is not None: + self.debug = debug + else: + self.__debug = False """Debug switch """ @@ -145,6 +259,10 @@ def __init__( self.ssl_ca_cert = ssl_ca_cert """Set this to customize the certificate file to verify the peer. """ + self.ca_cert_data = ca_cert_data + """Set this to verify the peer using PEM (str) or DER (bytes) + certificate data. + """ self.cert_file = None """client certificate file """ @@ -173,7 +291,7 @@ def __init__( self.safe_chars_for_path_param = "" """Safe chars for path_param """ - self.retries = None + self.retries = retries """Adding retries to override urllib3 default value 3 """ # Enable client side validation @@ -191,7 +309,7 @@ def __init__( """date format """ - def __deepcopy__(self, memo): + def __deepcopy__(self, memo: dict[int, Any]) -> Self: cls = self.__class__ result = cls.__new__(cls) memo[id(self)] = result @@ -205,11 +323,11 @@ def __deepcopy__(self, memo): result.debug = self.debug return result - def __setattr__(self, name, value): + def __setattr__(self, name: str, value: Any) -> None: object.__setattr__(self, name, value) @classmethod - def set_default(cls, default): + def set_default(cls, default: Optional[Self]) -> None: """Set default instance of configuration. It stores default configuration, which can be @@ -220,7 +338,7 @@ def set_default(cls, default): cls._default = default @classmethod - def get_default_copy(cls): + def get_default_copy(cls) -> Self: """Deprecated. Please use `get_default` instead. Deprecated. Please use `get_default` instead. @@ -230,7 +348,7 @@ def get_default_copy(cls): return cls.get_default() @classmethod - def get_default(cls): + def get_default(cls) -> Self: """Return the default configuration. This method returns newly created, based on default constructor, @@ -240,11 +358,11 @@ def get_default(cls): :return: The configuration object. """ if cls._default is None: - cls._default = Configuration() + cls._default = cls() return cls._default @property - def logger_file(self): + def logger_file(self) -> Optional[str]: """The logger file. If the logger_file is None, then add stream handler and remove file @@ -256,7 +374,7 @@ def logger_file(self): return self.__logger_file @logger_file.setter - def logger_file(self, value): + def logger_file(self, value: Optional[str]) -> None: """The logger file. If the logger_file is None, then add stream handler and remove file @@ -275,7 +393,7 @@ def logger_file(self, value): logger.addHandler(self.logger_file_handler) @property - def debug(self): + def debug(self) -> bool: """Debug status. :param value: The debug status, True or False. @@ -284,7 +402,7 @@ def debug(self): return self.__debug @debug.setter - def debug(self, value): + def debug(self, value: bool) -> None: """Debug status. :param value: The debug status, True or False. @@ -306,7 +424,7 @@ def debug(self, value): httplib.HTTPConnection.debuglevel = 0 @property - def logger_format(self): + def logger_format(self) -> str: """The logger format. The logger_formatter will be updated when sets logger_format. @@ -317,7 +435,7 @@ def logger_format(self): return self.__logger_format @logger_format.setter - def logger_format(self, value): + def logger_format(self, value: str) -> None: """The logger format. The logger_formatter will be updated when sets logger_format. @@ -328,7 +446,7 @@ def logger_format(self, value): self.__logger_format = value self.logger_formatter = logging.Formatter(self.__logger_format) - def get_api_key_with_prefix(self, identifier, alias=None): + def get_api_key_with_prefix(self, identifier: str, alias: Optional[str] = None) -> Optional[str]: """Gets API key (with prefix if set). :param identifier: The identifier of apiKey. @@ -343,14 +461,15 @@ def get_api_key_with_prefix(self, identifier, alias=None): if prefix: return f"{prefix} {key}" return key + return None - def auth_settings(self): + def auth_settings(self) -> AuthSettings: """Gets Auth Settings dict for api client. :return: The Auth Settings information dict. """ - auth = {} + auth: AuthSettings = {} if self.access_token is not None: auth["Bearer"] = { "type": "bearer", @@ -361,7 +480,7 @@ def auth_settings(self): } return auth - def to_debug_report(self): + def to_debug_report(self) -> str: """Gets the essential information for debugging. :return: The report for debugging. @@ -374,7 +493,7 @@ def to_debug_report(self): "SDK Package Version: 1.0.0" ) - def get_host_settings(self): + def get_host_settings(self) -> list[HostSetting]: """Gets an array of host settings. :return: An array of host settings @@ -390,7 +509,12 @@ def get_host_settings(self): }, ] - def get_host_from_settings(self, index, variables=None, servers=None): + def get_host_from_settings( + self, + index: Optional[int], + variables: Optional[ServerVariablesT] = None, + servers: Optional[list[HostSetting]] = None, + ) -> str: """Gets host URL based on the index and variables :param index: array index of the host settings :param variables: hash of variable and the corresponding value @@ -406,7 +530,7 @@ def get_host_from_settings(self, index, variables=None, servers=None): try: server = servers[index] except IndexError: - msg = f"Invalid index {index} when selecting the host settings. " f"Must be less than {len(servers)}" + msg = f"Invalid index {index} when selecting the host settings. Must be less than {len(servers)}" raise ValueError(msg) url = server["url"] @@ -416,7 +540,7 @@ def get_host_from_settings(self, index, variables=None, servers=None): used_value = variables.get(variable_name, variable["default_value"]) if "enum_values" in variable and used_value not in variable["enum_values"]: - msg = "The variable `{}` in the host URL has invalid value " "{}. Must be {}.".format( + msg = "The variable `{}` in the host URL has invalid value {}. Must be {}.".format( variable_name, variables[variable_name], variable["enum_values"] ) raise ValueError(msg) @@ -426,12 +550,12 @@ def get_host_from_settings(self, index, variables=None, servers=None): return url @property - def host(self): + def host(self) -> str: """Return generated host.""" return self.get_host_from_settings(self.server_index, variables=self.server_variables) @host.setter - def host(self, value): + def host(self, value: str) -> None: """Fix base path.""" self._base_path = value self.server_index = None diff --git a/clients/python/src/mr_openapi/exceptions.py b/clients/python/src/mr_openapi/exceptions.py index 9a3f720ab9..b036e9ff9f 100644 --- a/clients/python/src/mr_openapi/exceptions.py +++ b/clients/python/src/mr_openapi/exceptions.py @@ -97,7 +97,6 @@ def __init__(self, msg, path_to_item=None) -> None: class ApiException(OpenApiException): - def __init__( self, status=None, @@ -143,13 +142,20 @@ def from_response( if http_resp.status == 404: raise NotFoundException(http_resp=http_resp, body=body, data=data) + # Added new conditions for 409 and 422 + if http_resp.status == 409: + raise ConflictException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 422: + raise UnprocessableEntityException(http_resp=http_resp, body=body, data=data) + if 500 <= http_resp.status <= 599: raise ServiceException(http_resp=http_resp, body=body, data=data) raise ApiException(http_resp=http_resp, body=body, data=data) def __str__(self): """Custom error messages for exception.""" - error_message = f"({self.status})\n" f"Reason: {self.reason}\n" + error_message = f"({self.status})\nReason: {self.reason}\n" if self.headers: error_message += f"HTTP response headers: {self.headers}\n" @@ -179,6 +185,18 @@ class ServiceException(ApiException): pass +class ConflictException(ApiException): + """Exception for HTTP 409 Conflict.""" + + pass + + +class UnprocessableEntityException(ApiException): + """Exception for HTTP 422 Unprocessable Entity.""" + + pass + + def render_path(path_to_item): """Returns a string representation of a path.""" result = "" diff --git a/clients/python/src/mr_openapi/models/__init__.py b/clients/python/src/mr_openapi/models/__init__.py index 9d8a578f74..dc80239897 100644 --- a/clients/python/src/mr_openapi/models/__init__.py +++ b/clients/python/src/mr_openapi/models/__init__.py @@ -12,7 +12,6 @@ Do not edit the class manually. """ # noqa: E501 - # import models into model package from mr_openapi.models.artifact import Artifact from mr_openapi.models.artifact_create import ArtifactCreate diff --git a/clients/python/src/mr_openapi/models/artifact.py b/clients/python/src/mr_openapi/models/artifact.py index 3dce4dfa82..67eec6d4bb 100644 --- a/clients/python/src/mr_openapi/models/artifact.py +++ b/clients/python/src/mr_openapi/models/artifact.py @@ -14,12 +14,7 @@ import pprint from typing import Any -from pydantic import ( - BaseModel, - ConfigDict, - ValidationError, - field_validator, -) +from pydantic import BaseModel, ConfigDict, ValidationError, field_validator from typing_extensions import Self from mr_openapi.models.data_set import DataSet @@ -152,31 +147,6 @@ def from_json(cls, json_str: str) -> Self: instance.actual_instance = Parameter.from_json(json_str) return instance - # check if data type is `DataSet` - if _data_type == "DataSet": - instance.actual_instance = DataSet.from_json(json_str) - return instance - - # check if data type is `DocArtifact` - if _data_type == "DocArtifact": - instance.actual_instance = DocArtifact.from_json(json_str) - return instance - - # check if data type is `Metric` - if _data_type == "Metric": - instance.actual_instance = Metric.from_json(json_str) - return instance - - # check if data type is `ModelArtifact` - if _data_type == "ModelArtifact": - instance.actual_instance = ModelArtifact.from_json(json_str) - return instance - - # check if data type is `Parameter` - if _data_type == "Parameter": - instance.actual_instance = Parameter.from_json(json_str) - return instance - # deserialize data into ModelArtifact try: instance.actual_instance = ModelArtifact.from_json(json_str) diff --git a/clients/python/src/mr_openapi/models/artifact_create.py b/clients/python/src/mr_openapi/models/artifact_create.py index 3650d2a7dc..7e19e00cca 100644 --- a/clients/python/src/mr_openapi/models/artifact_create.py +++ b/clients/python/src/mr_openapi/models/artifact_create.py @@ -14,12 +14,7 @@ import pprint from typing import Any -from pydantic import ( - BaseModel, - ConfigDict, - ValidationError, - field_validator, -) +from pydantic import BaseModel, ConfigDict, ValidationError, field_validator from typing_extensions import Self from mr_openapi.models.data_set_create import DataSetCreate @@ -166,31 +161,6 @@ def from_json(cls, json_str: str) -> Self: instance.actual_instance = ParameterCreate.from_json(json_str) return instance - # check if data type is `DataSetCreate` - if _data_type == "DataSetCreate": - instance.actual_instance = DataSetCreate.from_json(json_str) - return instance - - # check if data type is `DocArtifactCreate` - if _data_type == "DocArtifactCreate": - instance.actual_instance = DocArtifactCreate.from_json(json_str) - return instance - - # check if data type is `MetricCreate` - if _data_type == "MetricCreate": - instance.actual_instance = MetricCreate.from_json(json_str) - return instance - - # check if data type is `ModelArtifactCreate` - if _data_type == "ModelArtifactCreate": - instance.actual_instance = ModelArtifactCreate.from_json(json_str) - return instance - - # check if data type is `ParameterCreate` - if _data_type == "ParameterCreate": - instance.actual_instance = ParameterCreate.from_json(json_str) - return instance - # deserialize data into ModelArtifactCreate try: instance.actual_instance = ModelArtifactCreate.from_json(json_str) diff --git a/clients/python/src/mr_openapi/models/artifact_list.py b/clients/python/src/mr_openapi/models/artifact_list.py index af93478aa2..1468901d47 100644 --- a/clients/python/src/mr_openapi/models/artifact_list.py +++ b/clients/python/src/mr_openapi/models/artifact_list.py @@ -72,9 +72,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in items (list) _items = [] if self.items: - for _item in self.items: - if _item: - _items.append(_item.to_dict()) + for _item_items in self.items: + if _item_items: + _items.append(_item_items.to_dict()) _dict["items"] = _items return _dict @@ -92,8 +92,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "nextPageToken": obj.get("nextPageToken"), "pageSize": obj.get("pageSize"), "size": obj.get("size"), - "items": ( - [Artifact.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None - ), + "items": [Artifact.from_dict(_item) for _item in obj["items"]] + if obj.get("items") is not None + else None, } ) diff --git a/clients/python/src/mr_openapi/models/artifact_update.py b/clients/python/src/mr_openapi/models/artifact_update.py index b63e67d190..2008322248 100644 --- a/clients/python/src/mr_openapi/models/artifact_update.py +++ b/clients/python/src/mr_openapi/models/artifact_update.py @@ -14,12 +14,7 @@ import pprint from typing import Any -from pydantic import ( - BaseModel, - ConfigDict, - ValidationError, - field_validator, -) +from pydantic import BaseModel, ConfigDict, ValidationError, field_validator from typing_extensions import Self from mr_openapi.models.data_set_update import DataSetUpdate @@ -166,31 +161,6 @@ def from_json(cls, json_str: str) -> Self: instance.actual_instance = ParameterUpdate.from_json(json_str) return instance - # check if data type is `DataSetUpdate` - if _data_type == "DataSetUpdate": - instance.actual_instance = DataSetUpdate.from_json(json_str) - return instance - - # check if data type is `DocArtifactUpdate` - if _data_type == "DocArtifactUpdate": - instance.actual_instance = DocArtifactUpdate.from_json(json_str) - return instance - - # check if data type is `MetricUpdate` - if _data_type == "MetricUpdate": - instance.actual_instance = MetricUpdate.from_json(json_str) - return instance - - # check if data type is `ModelArtifactUpdate` - if _data_type == "ModelArtifactUpdate": - instance.actual_instance = ModelArtifactUpdate.from_json(json_str) - return instance - - # check if data type is `ParameterUpdate` - if _data_type == "ParameterUpdate": - instance.actual_instance = ParameterUpdate.from_json(json_str) - return instance - # deserialize data into ModelArtifactUpdate try: instance.actual_instance = ModelArtifactUpdate.from_json(json_str) diff --git a/clients/python/src/mr_openapi/models/base_artifact.py b/clients/python/src/mr_openapi/models/base_artifact.py index dbfbf7f502..03d7d7dfc2 100644 --- a/clients/python/src/mr_openapi/models/base_artifact.py +++ b/clients/python/src/mr_openapi/models/base_artifact.py @@ -115,9 +115,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -132,11 +132,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), diff --git a/clients/python/src/mr_openapi/models/base_model.py b/clients/python/src/mr_openapi/models/base_model.py index eb1bd3b612..92f8759358 100644 --- a/clients/python/src/mr_openapi/models/base_model.py +++ b/clients/python/src/mr_openapi/models/base_model.py @@ -101,9 +101,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -128,10 +128,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "license": obj.get("license"), "licenseLink": obj.get("licenseLink"), "libraryName": obj.get("libraryName"), - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, } ) diff --git a/clients/python/src/mr_openapi/models/base_resource.py b/clients/python/src/mr_openapi/models/base_resource.py index b6e8ae04f9..68162e0b19 100644 --- a/clients/python/src/mr_openapi/models/base_resource.py +++ b/clients/python/src/mr_openapi/models/base_resource.py @@ -105,9 +105,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -124,11 +124,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: { "createTimeSinceEpoch": obj.get("createTimeSinceEpoch"), "lastUpdateTimeSinceEpoch": obj.get("lastUpdateTimeSinceEpoch"), - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), diff --git a/clients/python/src/mr_openapi/models/base_resource_create.py b/clients/python/src/mr_openapi/models/base_resource_create.py index 84123a00e3..c703d45903 100644 --- a/clients/python/src/mr_openapi/models/base_resource_create.py +++ b/clients/python/src/mr_openapi/models/base_resource_create.py @@ -81,9 +81,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -98,11 +98,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), diff --git a/clients/python/src/mr_openapi/models/base_resource_update.py b/clients/python/src/mr_openapi/models/base_resource_update.py index b690d8575c..b0608b6cd2 100644 --- a/clients/python/src/mr_openapi/models/base_resource_update.py +++ b/clients/python/src/mr_openapi/models/base_resource_update.py @@ -77,9 +77,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -94,11 +94,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), } diff --git a/clients/python/src/mr_openapi/models/data_set.py b/clients/python/src/mr_openapi/models/data_set.py index 02d24ae1fa..f5953bb2b0 100644 --- a/clients/python/src/mr_openapi/models/data_set.py +++ b/clients/python/src/mr_openapi/models/data_set.py @@ -77,7 +77,7 @@ class DataSet(BaseModel): default=None, description="The uniform resource identifier of the physical dataset. May be empty if there is no physical dataset.", ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -143,9 +143,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -160,11 +160,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -180,6 +178,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "schema": obj.get("schema"), "profile": obj.get("profile"), "uri": obj.get("uri"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/data_set_create.py b/clients/python/src/mr_openapi/models/data_set_create.py index e661dc2e65..3f4f476809 100644 --- a/clients/python/src/mr_openapi/models/data_set_create.py +++ b/clients/python/src/mr_openapi/models/data_set_create.py @@ -58,7 +58,7 @@ class DataSetCreate(BaseModel): default=None, description="The uniform resource identifier of the physical dataset. May be empty if there is no physical dataset.", ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -114,9 +114,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -131,11 +131,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -146,6 +144,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "schema": obj.get("schema"), "profile": obj.get("profile"), "uri": obj.get("uri"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/data_set_update.py b/clients/python/src/mr_openapi/models/data_set_update.py index ba8d431b2b..c13a5f70ff 100644 --- a/clients/python/src/mr_openapi/models/data_set_update.py +++ b/clients/python/src/mr_openapi/models/data_set_update.py @@ -54,7 +54,7 @@ class DataSetUpdate(BaseModel): default=None, description="The uniform resource identifier of the physical dataset. May be empty if there is no physical dataset.", ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -109,9 +109,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -126,11 +126,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "artifactType": obj.get("artifactType") if obj.get("artifactType") is not None else "dataset-artifact", @@ -140,6 +138,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "schema": obj.get("schema"), "profile": obj.get("profile"), "uri": obj.get("uri"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/doc_artifact.py b/clients/python/src/mr_openapi/models/doc_artifact.py index 2b93e3f3fb..55ee3d363a 100644 --- a/clients/python/src/mr_openapi/models/doc_artifact.py +++ b/clients/python/src/mr_openapi/models/doc_artifact.py @@ -64,7 +64,7 @@ class DocArtifact(BaseModel): default=None, description="The uniform resource identifier of the physical artifact. May be empty if there is no physical artifact.", ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -125,9 +125,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -143,11 +143,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { "artifactType": "doc-artifact", - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -158,6 +156,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "experimentRunId": obj.get("experimentRunId"), "artifactType": obj.get("artifactType") if obj.get("artifactType") is not None else "doc-artifact", "uri": obj.get("uri"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/doc_artifact_create.py b/clients/python/src/mr_openapi/models/doc_artifact_create.py index ff24f41790..cb004ad7b5 100644 --- a/clients/python/src/mr_openapi/models/doc_artifact_create.py +++ b/clients/python/src/mr_openapi/models/doc_artifact_create.py @@ -45,7 +45,7 @@ class DocArtifactCreate(BaseModel): default=None, description="The uniform resource identifier of the physical artifact. May be empty if there is no physical artifact.", ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -96,9 +96,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -113,16 +113,14 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), "artifactType": obj.get("artifactType") if obj.get("artifactType") is not None else "doc-artifact", "uri": obj.get("uri"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/doc_artifact_update.py b/clients/python/src/mr_openapi/models/doc_artifact_update.py index ef86ee4516..0a1fa9e518 100644 --- a/clients/python/src/mr_openapi/models/doc_artifact_update.py +++ b/clients/python/src/mr_openapi/models/doc_artifact_update.py @@ -41,7 +41,7 @@ class DocArtifactUpdate(BaseModel): default=None, description="The uniform resource identifier of the physical artifact. May be empty if there is no physical artifact.", ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -91,9 +91,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -108,15 +108,13 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "artifactType": obj.get("artifactType") if obj.get("artifactType") is not None else "doc-artifact", "uri": obj.get("uri"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/experiment.py b/clients/python/src/mr_openapi/models/experiment.py index 7d4fe77d8e..2b6c5cd3a0 100644 --- a/clients/python/src/mr_openapi/models/experiment.py +++ b/clients/python/src/mr_openapi/models/experiment.py @@ -51,7 +51,7 @@ class Experiment(BaseModel): alias="lastUpdateTimeSinceEpoch", ) owner: StrictStr | None = None - state: ExperimentState | None = None + state: ExperimentState | None = ExperimentState.LIVE __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -109,9 +109,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -126,11 +126,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -138,6 +136,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "createTimeSinceEpoch": obj.get("createTimeSinceEpoch"), "lastUpdateTimeSinceEpoch": obj.get("lastUpdateTimeSinceEpoch"), "owner": obj.get("owner"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ExperimentState.LIVE, } ) diff --git a/clients/python/src/mr_openapi/models/experiment_create.py b/clients/python/src/mr_openapi/models/experiment_create.py index 8d71c82e10..c985d77594 100644 --- a/clients/python/src/mr_openapi/models/experiment_create.py +++ b/clients/python/src/mr_openapi/models/experiment_create.py @@ -40,7 +40,7 @@ class ExperimentCreate(BaseModel): description="The client provided name of the experiment. It must be unique among all the Experiments of the same type within a Model Registry instance and cannot be changed once set." ) owner: StrictStr | None = None - state: ExperimentState | None = None + state: ExperimentState | None = ExperimentState.LIVE __properties: ClassVar[list[str]] = ["customProperties", "description", "externalId", "name", "owner", "state"] model_config = ConfigDict( @@ -83,9 +83,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -100,15 +100,13 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), "owner": obj.get("owner"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ExperimentState.LIVE, } ) diff --git a/clients/python/src/mr_openapi/models/experiment_list.py b/clients/python/src/mr_openapi/models/experiment_list.py index 1d20116051..aadd98f75d 100644 --- a/clients/python/src/mr_openapi/models/experiment_list.py +++ b/clients/python/src/mr_openapi/models/experiment_list.py @@ -72,9 +72,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in items (list) _items = [] if self.items: - for _item in self.items: - if _item: - _items.append(_item.to_dict()) + for _item_items in self.items: + if _item_items: + _items.append(_item_items.to_dict()) _dict["items"] = _items return _dict @@ -92,8 +92,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "nextPageToken": obj.get("nextPageToken"), "pageSize": obj.get("pageSize"), "size": obj.get("size"), - "items": ( - [Experiment.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None - ), + "items": [Experiment.from_dict(_item) for _item in obj["items"]] + if obj.get("items") is not None + else None, } ) diff --git a/clients/python/src/mr_openapi/models/experiment_run.py b/clients/python/src/mr_openapi/models/experiment_run.py index 2f8df92ae4..5308e1f719 100644 --- a/clients/python/src/mr_openapi/models/experiment_run.py +++ b/clients/python/src/mr_openapi/models/experiment_run.py @@ -46,8 +46,8 @@ class ExperimentRun(BaseModel): description="End time of the actual experiment run in milliseconds since epoch. Different from lastUpdateTimeSinceEpoch, which is registry resource update time.", alias="endTimeSinceEpoch", ) - status: ExperimentRunStatus | None = None - state: ExperimentRunState | None = None + status: ExperimentRunStatus | None = ExperimentRunStatus.RUNNING + state: ExperimentRunState | None = ExperimentRunState.LIVE owner: StrictStr | None = Field(default=None, description="Experiment run owner id or name.") experiment_id: StrictStr = Field( description="ID of the `Experiment` to which this experiment run belongs.", alias="experimentId" @@ -129,9 +129,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -146,17 +146,15 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), "endTimeSinceEpoch": obj.get("endTimeSinceEpoch"), - "status": obj.get("status"), - "state": obj.get("state"), + "status": obj.get("status") if obj.get("status") is not None else ExperimentRunStatus.RUNNING, + "state": obj.get("state") if obj.get("state") is not None else ExperimentRunState.LIVE, "owner": obj.get("owner"), "experimentId": obj.get("experimentId"), "startTimeSinceEpoch": obj.get("startTimeSinceEpoch"), diff --git a/clients/python/src/mr_openapi/models/experiment_run_create.py b/clients/python/src/mr_openapi/models/experiment_run_create.py index 2b051fcf4c..32b8f32322 100644 --- a/clients/python/src/mr_openapi/models/experiment_run_create.py +++ b/clients/python/src/mr_openapi/models/experiment_run_create.py @@ -46,8 +46,8 @@ class ExperimentRunCreate(BaseModel): description="End time of the actual experiment run in milliseconds since epoch. Different from lastUpdateTimeSinceEpoch, which is registry resource update time.", alias="endTimeSinceEpoch", ) - status: ExperimentRunStatus | None = None - state: ExperimentRunState | None = None + status: ExperimentRunStatus | None = ExperimentRunStatus.RUNNING + state: ExperimentRunState | None = ExperimentRunState.LIVE owner: StrictStr | None = Field(default=None, description="Experiment run owner id or name.") experiment_id: StrictStr = Field( description="ID of the `Experiment` to which this experiment run belongs.", alias="experimentId" @@ -110,9 +110,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -127,17 +127,15 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), "endTimeSinceEpoch": obj.get("endTimeSinceEpoch"), - "status": obj.get("status"), - "state": obj.get("state"), + "status": obj.get("status") if obj.get("status") is not None else ExperimentRunStatus.RUNNING, + "state": obj.get("state") if obj.get("state") is not None else ExperimentRunState.LIVE, "owner": obj.get("owner"), "experimentId": obj.get("experimentId"), "startTimeSinceEpoch": obj.get("startTimeSinceEpoch"), diff --git a/clients/python/src/mr_openapi/models/experiment_run_list.py b/clients/python/src/mr_openapi/models/experiment_run_list.py index 9249bc76b4..3fb83d158e 100644 --- a/clients/python/src/mr_openapi/models/experiment_run_list.py +++ b/clients/python/src/mr_openapi/models/experiment_run_list.py @@ -72,9 +72,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in items (list) _items = [] if self.items: - for _item in self.items: - if _item: - _items.append(_item.to_dict()) + for _item_items in self.items: + if _item_items: + _items.append(_item_items.to_dict()) _dict["items"] = _items return _dict @@ -92,10 +92,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "nextPageToken": obj.get("nextPageToken"), "pageSize": obj.get("pageSize"), "size": obj.get("size"), - "items": ( - [ExperimentRun.from_dict(_item) for _item in obj["items"]] - if obj.get("items") is not None - else None - ), + "items": [ExperimentRun.from_dict(_item) for _item in obj["items"]] + if obj.get("items") is not None + else None, } ) diff --git a/clients/python/src/mr_openapi/models/experiment_run_update.py b/clients/python/src/mr_openapi/models/experiment_run_update.py index b3da1d9ebd..e54b187a0c 100644 --- a/clients/python/src/mr_openapi/models/experiment_run_update.py +++ b/clients/python/src/mr_openapi/models/experiment_run_update.py @@ -42,8 +42,8 @@ class ExperimentRunUpdate(BaseModel): description="End time of the actual experiment run in milliseconds since epoch. Different from lastUpdateTimeSinceEpoch, which is registry resource update time.", alias="endTimeSinceEpoch", ) - status: ExperimentRunStatus | None = None - state: ExperimentRunState | None = None + status: ExperimentRunStatus | None = ExperimentRunStatus.RUNNING + state: ExperimentRunState | None = ExperimentRunState.LIVE owner: StrictStr | None = Field(default=None, description="Experiment run owner id or name.") __properties: ClassVar[list[str]] = [ "customProperties", @@ -95,9 +95,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -112,16 +112,14 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "endTimeSinceEpoch": obj.get("endTimeSinceEpoch"), - "status": obj.get("status"), - "state": obj.get("state"), + "status": obj.get("status") if obj.get("status") is not None else ExperimentRunStatus.RUNNING, + "state": obj.get("state") if obj.get("state") is not None else ExperimentRunState.LIVE, "owner": obj.get("owner"), } ) diff --git a/clients/python/src/mr_openapi/models/experiment_update.py b/clients/python/src/mr_openapi/models/experiment_update.py index 175c834cbf..ebbc0fa3ca 100644 --- a/clients/python/src/mr_openapi/models/experiment_update.py +++ b/clients/python/src/mr_openapi/models/experiment_update.py @@ -37,7 +37,7 @@ class ExperimentUpdate(BaseModel): alias="externalId", ) owner: StrictStr | None = None - state: ExperimentState | None = None + state: ExperimentState | None = ExperimentState.LIVE __properties: ClassVar[list[str]] = ["customProperties", "description", "externalId", "owner", "state"] model_config = ConfigDict( @@ -80,9 +80,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -97,14 +97,12 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "owner": obj.get("owner"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ExperimentState.LIVE, } ) diff --git a/clients/python/src/mr_openapi/models/inference_service.py b/clients/python/src/mr_openapi/models/inference_service.py index 15ed614688..29ada11bc1 100644 --- a/clients/python/src/mr_openapi/models/inference_service.py +++ b/clients/python/src/mr_openapi/models/inference_service.py @@ -57,7 +57,7 @@ class InferenceService(BaseModel): alias="modelVersionId", ) runtime: StrictStr | None = Field(default=None, description="Model runtime.") - desired_state: InferenceServiceState | None = Field(default=None, alias="desiredState") + desired_state: InferenceServiceState | None = Field(default=InferenceServiceState.DEPLOYED, alias="desiredState") registered_model_id: StrictStr = Field( description="ID of the `RegisteredModel` to serve.", alias="registeredModelId" ) @@ -125,9 +125,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -142,11 +142,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -155,7 +153,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "lastUpdateTimeSinceEpoch": obj.get("lastUpdateTimeSinceEpoch"), "modelVersionId": obj.get("modelVersionId"), "runtime": obj.get("runtime"), - "desiredState": obj.get("desiredState"), + "desiredState": obj.get("desiredState") + if obj.get("desiredState") is not None + else InferenceServiceState.DEPLOYED, "registeredModelId": obj.get("registeredModelId"), "servingEnvironmentId": obj.get("servingEnvironmentId"), } diff --git a/clients/python/src/mr_openapi/models/inference_service_create.py b/clients/python/src/mr_openapi/models/inference_service_create.py index 7ef1f8332f..4f325dd8bb 100644 --- a/clients/python/src/mr_openapi/models/inference_service_create.py +++ b/clients/python/src/mr_openapi/models/inference_service_create.py @@ -46,7 +46,7 @@ class InferenceServiceCreate(BaseModel): alias="modelVersionId", ) runtime: StrictStr | None = Field(default=None, description="Model runtime.") - desired_state: InferenceServiceState | None = Field(default=None, alias="desiredState") + desired_state: InferenceServiceState | None = Field(default=InferenceServiceState.DEPLOYED, alias="desiredState") registered_model_id: StrictStr = Field( description="ID of the `RegisteredModel` to serve.", alias="registeredModelId" ) @@ -106,9 +106,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -123,17 +123,17 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), "modelVersionId": obj.get("modelVersionId"), "runtime": obj.get("runtime"), - "desiredState": obj.get("desiredState"), + "desiredState": obj.get("desiredState") + if obj.get("desiredState") is not None + else InferenceServiceState.DEPLOYED, "registeredModelId": obj.get("registeredModelId"), "servingEnvironmentId": obj.get("servingEnvironmentId"), } diff --git a/clients/python/src/mr_openapi/models/inference_service_list.py b/clients/python/src/mr_openapi/models/inference_service_list.py index 7d629b6754..d5ab2ba638 100644 --- a/clients/python/src/mr_openapi/models/inference_service_list.py +++ b/clients/python/src/mr_openapi/models/inference_service_list.py @@ -72,9 +72,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in items (list) _items = [] if self.items: - for _item in self.items: - if _item: - _items.append(_item.to_dict()) + for _item_items in self.items: + if _item_items: + _items.append(_item_items.to_dict()) _dict["items"] = _items return _dict @@ -92,10 +92,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "nextPageToken": obj.get("nextPageToken"), "pageSize": obj.get("pageSize"), "size": obj.get("size"), - "items": ( - [InferenceService.from_dict(_item) for _item in obj["items"]] - if obj.get("items") is not None - else None - ), + "items": [InferenceService.from_dict(_item) for _item in obj["items"]] + if obj.get("items") is not None + else None, } ) diff --git a/clients/python/src/mr_openapi/models/inference_service_update.py b/clients/python/src/mr_openapi/models/inference_service_update.py index 3de9961c7a..f3f2e75276 100644 --- a/clients/python/src/mr_openapi/models/inference_service_update.py +++ b/clients/python/src/mr_openapi/models/inference_service_update.py @@ -42,7 +42,7 @@ class InferenceServiceUpdate(BaseModel): alias="modelVersionId", ) runtime: StrictStr | None = Field(default=None, description="Model runtime.") - desired_state: InferenceServiceState | None = Field(default=None, alias="desiredState") + desired_state: InferenceServiceState | None = Field(default=InferenceServiceState.DEPLOYED, alias="desiredState") __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -92,9 +92,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -109,15 +109,15 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "modelVersionId": obj.get("modelVersionId"), "runtime": obj.get("runtime"), - "desiredState": obj.get("desiredState"), + "desiredState": obj.get("desiredState") + if obj.get("desiredState") is not None + else InferenceServiceState.DEPLOYED, } ) diff --git a/clients/python/src/mr_openapi/models/metadata_bool_value.py b/clients/python/src/mr_openapi/models/metadata_bool_value.py index 4bb55a6f10..a516d09013 100644 --- a/clients/python/src/mr_openapi/models/metadata_bool_value.py +++ b/clients/python/src/mr_openapi/models/metadata_bool_value.py @@ -76,8 +76,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { "bool_value": obj.get("bool_value"), - "metadataType": ( - obj.get("metadataType") if obj.get("metadataType") is not None else "MetadataBoolValue" - ), + "metadataType": obj.get("metadataType") + if obj.get("metadataType") is not None + else "MetadataBoolValue", } ) diff --git a/clients/python/src/mr_openapi/models/metadata_double_value.py b/clients/python/src/mr_openapi/models/metadata_double_value.py index d39d11f09d..e48ae64fb5 100644 --- a/clients/python/src/mr_openapi/models/metadata_double_value.py +++ b/clients/python/src/mr_openapi/models/metadata_double_value.py @@ -76,8 +76,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { "double_value": obj.get("double_value"), - "metadataType": ( - obj.get("metadataType") if obj.get("metadataType") is not None else "MetadataDoubleValue" - ), + "metadataType": obj.get("metadataType") + if obj.get("metadataType") is not None + else "MetadataDoubleValue", } ) diff --git a/clients/python/src/mr_openapi/models/metadata_proto_value.py b/clients/python/src/mr_openapi/models/metadata_proto_value.py index 7dceb2338e..b1cec36061 100644 --- a/clients/python/src/mr_openapi/models/metadata_proto_value.py +++ b/clients/python/src/mr_openapi/models/metadata_proto_value.py @@ -78,8 +78,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: { "type": obj.get("type"), "proto_value": obj.get("proto_value"), - "metadataType": ( - obj.get("metadataType") if obj.get("metadataType") is not None else "MetadataProtoValue" - ), + "metadataType": obj.get("metadataType") + if obj.get("metadataType") is not None + else "MetadataProtoValue", } ) diff --git a/clients/python/src/mr_openapi/models/metadata_string_value.py b/clients/python/src/mr_openapi/models/metadata_string_value.py index 49f8df564d..0e4ac46069 100644 --- a/clients/python/src/mr_openapi/models/metadata_string_value.py +++ b/clients/python/src/mr_openapi/models/metadata_string_value.py @@ -76,8 +76,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { "string_value": obj.get("string_value"), - "metadataType": ( - obj.get("metadataType") if obj.get("metadataType") is not None else "MetadataStringValue" - ), + "metadataType": obj.get("metadataType") + if obj.get("metadataType") is not None + else "MetadataStringValue", } ) diff --git a/clients/python/src/mr_openapi/models/metadata_struct_value.py b/clients/python/src/mr_openapi/models/metadata_struct_value.py index 7c19f4d06e..271049ec6e 100644 --- a/clients/python/src/mr_openapi/models/metadata_struct_value.py +++ b/clients/python/src/mr_openapi/models/metadata_struct_value.py @@ -76,8 +76,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { "struct_value": obj.get("struct_value"), - "metadataType": ( - obj.get("metadataType") if obj.get("metadataType") is not None else "MetadataStructValue" - ), + "metadataType": obj.get("metadataType") + if obj.get("metadataType") is not None + else "MetadataStructValue", } ) diff --git a/clients/python/src/mr_openapi/models/metadata_value.py b/clients/python/src/mr_openapi/models/metadata_value.py index a5f12684e7..b3fd3f2b95 100644 --- a/clients/python/src/mr_openapi/models/metadata_value.py +++ b/clients/python/src/mr_openapi/models/metadata_value.py @@ -14,12 +14,7 @@ import pprint from typing import Any -from pydantic import ( - BaseModel, - ConfigDict, - ValidationError, - field_validator, -) +from pydantic import BaseModel, ConfigDict, ValidationError, field_validator from typing_extensions import Self from mr_openapi.models.metadata_bool_value import MetadataBoolValue diff --git a/clients/python/src/mr_openapi/models/metric.py b/clients/python/src/mr_openapi/models/metric.py index c0c92e7d31..2c94ac4965 100644 --- a/clients/python/src/mr_openapi/models/metric.py +++ b/clients/python/src/mr_openapi/models/metric.py @@ -66,7 +66,7 @@ class Metric(BaseModel): step: StrictInt | None = Field( default=None, description="The step number for multi-step metrics (e.g., training epochs)." ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -129,9 +129,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -146,11 +146,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -163,6 +161,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "value": obj.get("value"), "timestamp": obj.get("timestamp"), "step": obj.get("step"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/metric_create.py b/clients/python/src/mr_openapi/models/metric_create.py index c771ce2f40..3d4b47b919 100644 --- a/clients/python/src/mr_openapi/models/metric_create.py +++ b/clients/python/src/mr_openapi/models/metric_create.py @@ -47,7 +47,7 @@ class MetricCreate(BaseModel): step: StrictInt | None = Field( default=None, description="The step number for multi-step metrics (e.g., training epochs)." ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -100,9 +100,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -117,11 +117,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -129,6 +127,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "value": obj.get("value"), "timestamp": obj.get("timestamp"), "step": obj.get("step"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/metric_list.py b/clients/python/src/mr_openapi/models/metric_list.py index 4c9d954c6b..ed5312ca4a 100644 --- a/clients/python/src/mr_openapi/models/metric_list.py +++ b/clients/python/src/mr_openapi/models/metric_list.py @@ -72,9 +72,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in items (list) _items = [] if self.items: - for _item in self.items: - if _item: - _items.append(_item.to_dict()) + for _item_items in self.items: + if _item_items: + _items.append(_item_items.to_dict()) _dict["items"] = _items return _dict diff --git a/clients/python/src/mr_openapi/models/metric_update.py b/clients/python/src/mr_openapi/models/metric_update.py index b37fb60421..671e59956e 100644 --- a/clients/python/src/mr_openapi/models/metric_update.py +++ b/clients/python/src/mr_openapi/models/metric_update.py @@ -44,7 +44,7 @@ class MetricUpdate(BaseModel): step: StrictInt | None = Field( default=None, description="The step number for multi-step metrics (e.g., training epochs)." ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -96,9 +96,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -113,17 +113,15 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "artifactType": obj.get("artifactType") if obj.get("artifactType") is not None else "metric", "value": obj.get("value"), "timestamp": obj.get("timestamp"), "step": obj.get("step"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/model_artifact.py b/clients/python/src/mr_openapi/models/model_artifact.py index 0ed5955ceb..24c64b3061 100644 --- a/clients/python/src/mr_openapi/models/model_artifact.py +++ b/clients/python/src/mr_openapi/models/model_artifact.py @@ -102,7 +102,7 @@ class ModelArtifact(BaseModel): default=None, description="The uniform resource identifier of the physical artifact. May be empty if there is no physical artifact.", ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -173,9 +173,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -191,11 +191,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { "artifactType": "model-artifact", - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -216,6 +214,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "modelSourceId": obj.get("modelSourceId"), "modelSourceName": obj.get("modelSourceName"), "uri": obj.get("uri"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/model_artifact_create.py b/clients/python/src/mr_openapi/models/model_artifact_create.py index 7b79f4521c..625dcf560e 100644 --- a/clients/python/src/mr_openapi/models/model_artifact_create.py +++ b/clients/python/src/mr_openapi/models/model_artifact_create.py @@ -23,9 +23,8 @@ class ModelArtifactCreate(BaseModel): - """An ML model artifact.""" # noqa: E501 + """ModelArtifactCreate.""" # noqa: E501 - artifact_type: StrictStr | None = Field(default="model-artifact", alias="artifactType") custom_properties: dict[str, MetadataValue] | None = Field( default=None, description="User provided custom properties which are not defined by its type.", @@ -41,6 +40,7 @@ class ModelArtifactCreate(BaseModel): default=None, description="The client provided name of the artifact. This field is optional. If set, it must be unique among all the artifacts of the same artifact type within a database instance and cannot be changed once set.", ) + artifact_type: StrictStr | None = Field(default="model-artifact", alias="artifactType") model_format_name: StrictStr | None = Field( default=None, description="Name of the model format.", alias="modelFormatName" ) @@ -83,7 +83,7 @@ class ModelArtifactCreate(BaseModel): default=None, description="The uniform resource identifier of the physical artifact. May be empty if there is no physical artifact.", ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -144,9 +144,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -161,11 +161,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -181,6 +179,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "modelSourceId": obj.get("modelSourceId"), "modelSourceName": obj.get("modelSourceName"), "uri": obj.get("uri"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/model_artifact_list.py b/clients/python/src/mr_openapi/models/model_artifact_list.py index 62c6a4a231..c8912321de 100644 --- a/clients/python/src/mr_openapi/models/model_artifact_list.py +++ b/clients/python/src/mr_openapi/models/model_artifact_list.py @@ -72,9 +72,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in items (list) _items = [] if self.items: - for _item in self.items: - if _item: - _items.append(_item.to_dict()) + for _item_items in self.items: + if _item_items: + _items.append(_item_items.to_dict()) _dict["items"] = _items return _dict @@ -92,10 +92,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "nextPageToken": obj.get("nextPageToken"), "pageSize": obj.get("pageSize"), "size": obj.get("size"), - "items": ( - [ModelArtifact.from_dict(_item) for _item in obj["items"]] - if obj.get("items") is not None - else None - ), + "items": [ModelArtifact.from_dict(_item) for _item in obj["items"]] + if obj.get("items") is not None + else None, } ) diff --git a/clients/python/src/mr_openapi/models/model_artifact_update.py b/clients/python/src/mr_openapi/models/model_artifact_update.py index ad68b4213e..a0507a5c41 100644 --- a/clients/python/src/mr_openapi/models/model_artifact_update.py +++ b/clients/python/src/mr_openapi/models/model_artifact_update.py @@ -79,7 +79,7 @@ class ModelArtifactUpdate(BaseModel): default=None, description="The uniform resource identifier of the physical artifact. May be empty if there is no physical artifact.", ) - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -139,9 +139,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -156,11 +156,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "artifactType": obj.get("artifactType") if obj.get("artifactType") is not None else "model-artifact", @@ -175,6 +173,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "modelSourceId": obj.get("modelSourceId"), "modelSourceName": obj.get("modelSourceName"), "uri": obj.get("uri"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/model_version.py b/clients/python/src/mr_openapi/models/model_version.py index 8b42c7030c..67c579cb45 100644 --- a/clients/python/src/mr_openapi/models/model_version.py +++ b/clients/python/src/mr_openapi/models/model_version.py @@ -39,7 +39,7 @@ class ModelVersion(BaseModel): name: StrictStr = Field( description="The client provided name of the artifact. This field is optional. If set, it must be unique among all the artifacts of the same artifact type within a database instance and cannot be changed once set." ) - state: ModelVersionState | None = None + state: ModelVersionState | None = ModelVersionState.LIVE author: StrictStr | None = Field(default=None, description="Name of the author.") registered_model_id: StrictStr = Field( description="ID of the `RegisteredModel` to which this version belongs.", alias="registeredModelId" @@ -113,9 +113,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -130,15 +130,13 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ModelVersionState.LIVE, "author": obj.get("author"), "registeredModelId": obj.get("registeredModelId"), "id": obj.get("id"), diff --git a/clients/python/src/mr_openapi/models/model_version_create.py b/clients/python/src/mr_openapi/models/model_version_create.py index ec0cbf2d13..eb8d9d39ea 100644 --- a/clients/python/src/mr_openapi/models/model_version_create.py +++ b/clients/python/src/mr_openapi/models/model_version_create.py @@ -39,7 +39,7 @@ class ModelVersionCreate(BaseModel): name: StrictStr = Field( description="The client provided name of the model's version. It must be unique among all the ModelVersions of the same type within a Model Registry instance and cannot be changed once set." ) - state: ModelVersionState | None = None + state: ModelVersionState | None = ModelVersionState.LIVE author: StrictStr | None = Field(default=None, description="Name of the author.") registered_model_id: StrictStr = Field( description="ID of the `RegisteredModel` to which this version belongs.", alias="registeredModelId" @@ -94,9 +94,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -111,15 +111,13 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ModelVersionState.LIVE, "author": obj.get("author"), "registeredModelId": obj.get("registeredModelId"), } diff --git a/clients/python/src/mr_openapi/models/model_version_list.py b/clients/python/src/mr_openapi/models/model_version_list.py index cee6864046..4edf3850e5 100644 --- a/clients/python/src/mr_openapi/models/model_version_list.py +++ b/clients/python/src/mr_openapi/models/model_version_list.py @@ -72,9 +72,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in items (list) _items = [] if self.items: - for _item in self.items: - if _item: - _items.append(_item.to_dict()) + for _item_items in self.items: + if _item_items: + _items.append(_item_items.to_dict()) _dict["items"] = _items return _dict @@ -92,8 +92,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "nextPageToken": obj.get("nextPageToken"), "pageSize": obj.get("pageSize"), "size": obj.get("size"), - "items": ( - [ModelVersion.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None - ), + "items": [ModelVersion.from_dict(_item) for _item in obj["items"]] + if obj.get("items") is not None + else None, } ) diff --git a/clients/python/src/mr_openapi/models/model_version_update.py b/clients/python/src/mr_openapi/models/model_version_update.py index eba671fb7e..a46c26c672 100644 --- a/clients/python/src/mr_openapi/models/model_version_update.py +++ b/clients/python/src/mr_openapi/models/model_version_update.py @@ -36,7 +36,7 @@ class ModelVersionUpdate(BaseModel): description="The external id that come from the clients’ system. This field is optional. If set, it must be unique among all resources within a database instance.", alias="externalId", ) - state: ModelVersionState | None = None + state: ModelVersionState | None = ModelVersionState.LIVE author: StrictStr | None = Field(default=None, description="Name of the author.") __properties: ClassVar[list[str]] = ["customProperties", "description", "externalId", "state", "author"] @@ -80,9 +80,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -97,14 +97,12 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ModelVersionState.LIVE, "author": obj.get("author"), } ) diff --git a/clients/python/src/mr_openapi/models/parameter.py b/clients/python/src/mr_openapi/models/parameter.py index 37f400c9a5..80cf8b4573 100644 --- a/clients/python/src/mr_openapi/models/parameter.py +++ b/clients/python/src/mr_openapi/models/parameter.py @@ -62,7 +62,7 @@ class Parameter(BaseModel): artifact_type: StrictStr | None = Field(default="parameter", alias="artifactType") value: StrictStr | None = Field(default=None, description="The value of the parameter.") parameter_type: ParameterType | None = Field(default=None, alias="parameterType") - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -124,9 +124,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -141,11 +141,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -157,6 +155,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "artifactType": obj.get("artifactType") if obj.get("artifactType") is not None else "parameter", "value": obj.get("value"), "parameterType": obj.get("parameterType"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/parameter_create.py b/clients/python/src/mr_openapi/models/parameter_create.py index 552cde442f..9083a08b38 100644 --- a/clients/python/src/mr_openapi/models/parameter_create.py +++ b/clients/python/src/mr_openapi/models/parameter_create.py @@ -43,7 +43,7 @@ class ParameterCreate(BaseModel): artifact_type: StrictStr | None = Field(default="parameter", alias="artifactType") value: StrictStr | None = Field(default=None, description="The value of the parameter.") parameter_type: ParameterType | None = Field(default=None, alias="parameterType") - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -95,9 +95,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -112,17 +112,15 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), "artifactType": obj.get("artifactType") if obj.get("artifactType") is not None else "parameter", "value": obj.get("value"), "parameterType": obj.get("parameterType"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/parameter_update.py b/clients/python/src/mr_openapi/models/parameter_update.py index f72f77b103..7c9532b1c3 100644 --- a/clients/python/src/mr_openapi/models/parameter_update.py +++ b/clients/python/src/mr_openapi/models/parameter_update.py @@ -40,7 +40,7 @@ class ParameterUpdate(BaseModel): artifact_type: StrictStr | None = Field(default="parameter", alias="artifactType") value: StrictStr | None = Field(default=None, description="The value of the parameter.") parameter_type: ParameterType | None = Field(default=None, alias="parameterType") - state: ArtifactState | None = None + state: ArtifactState | None = ArtifactState.UNKNOWN __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -91,9 +91,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -108,16 +108,14 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "artifactType": obj.get("artifactType") if obj.get("artifactType") is not None else "parameter", "value": obj.get("value"), "parameterType": obj.get("parameterType"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else ArtifactState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/registered_model.py b/clients/python/src/mr_openapi/models/registered_model.py index 0080354a6f..efa1e44191 100644 --- a/clients/python/src/mr_openapi/models/registered_model.py +++ b/clients/python/src/mr_openapi/models/registered_model.py @@ -68,7 +68,7 @@ class RegisteredModel(BaseModel): license_link: StrictStr | None = Field(default=None, description="URL to the license text.", alias="licenseLink") library_name: StrictStr | None = Field(default=None, alias="libraryName") owner: StrictStr | None = None - state: RegisteredModelState | None = None + state: RegisteredModelState | None = RegisteredModelState.LIVE __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -135,9 +135,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -152,11 +152,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -173,6 +171,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "licenseLink": obj.get("licenseLink"), "libraryName": obj.get("libraryName"), "owner": obj.get("owner"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else RegisteredModelState.LIVE, } ) diff --git a/clients/python/src/mr_openapi/models/registered_model_create.py b/clients/python/src/mr_openapi/models/registered_model_create.py index 147970d48f..0aad3bd480 100644 --- a/clients/python/src/mr_openapi/models/registered_model_create.py +++ b/clients/python/src/mr_openapi/models/registered_model_create.py @@ -57,7 +57,7 @@ class RegisteredModelCreate(BaseModel): license_link: StrictStr | None = Field(default=None, description="URL to the license text.", alias="licenseLink") library_name: StrictStr | None = Field(default=None, alias="libraryName") owner: StrictStr | None = None - state: RegisteredModelState | None = None + state: RegisteredModelState | None = RegisteredModelState.LIVE __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -116,9 +116,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -133,11 +133,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), @@ -151,6 +149,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "licenseLink": obj.get("licenseLink"), "libraryName": obj.get("libraryName"), "owner": obj.get("owner"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else RegisteredModelState.LIVE, } ) diff --git a/clients/python/src/mr_openapi/models/registered_model_list.py b/clients/python/src/mr_openapi/models/registered_model_list.py index 6ef1a532c4..d269995eb8 100644 --- a/clients/python/src/mr_openapi/models/registered_model_list.py +++ b/clients/python/src/mr_openapi/models/registered_model_list.py @@ -72,9 +72,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in items (list) _items = [] if self.items: - for _item in self.items: - if _item: - _items.append(_item.to_dict()) + for _item_items in self.items: + if _item_items: + _items.append(_item_items.to_dict()) _dict["items"] = _items return _dict @@ -92,10 +92,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "nextPageToken": obj.get("nextPageToken"), "pageSize": obj.get("pageSize"), "size": obj.get("size"), - "items": ( - [RegisteredModel.from_dict(_item) for _item in obj["items"]] - if obj.get("items") is not None - else None - ), + "items": [RegisteredModel.from_dict(_item) for _item in obj["items"]] + if obj.get("items") is not None + else None, } ) diff --git a/clients/python/src/mr_openapi/models/registered_model_update.py b/clients/python/src/mr_openapi/models/registered_model_update.py index d7f3d1f9c2..2fc1d1bdae 100644 --- a/clients/python/src/mr_openapi/models/registered_model_update.py +++ b/clients/python/src/mr_openapi/models/registered_model_update.py @@ -54,7 +54,7 @@ class RegisteredModelUpdate(BaseModel): license_link: StrictStr | None = Field(default=None, description="URL to the license text.", alias="licenseLink") library_name: StrictStr | None = Field(default=None, alias="libraryName") owner: StrictStr | None = None - state: RegisteredModelState | None = None + state: RegisteredModelState | None = RegisteredModelState.LIVE __properties: ClassVar[list[str]] = [ "customProperties", "description", @@ -112,9 +112,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -129,11 +129,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "readme": obj.get("readme"), @@ -146,6 +144,6 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "licenseLink": obj.get("licenseLink"), "libraryName": obj.get("libraryName"), "owner": obj.get("owner"), - "state": obj.get("state"), + "state": obj.get("state") if obj.get("state") is not None else RegisteredModelState.LIVE, } ) diff --git a/clients/python/src/mr_openapi/models/serve_model.py b/clients/python/src/mr_openapi/models/serve_model.py index 4b95ecef5a..ae734f39f9 100644 --- a/clients/python/src/mr_openapi/models/serve_model.py +++ b/clients/python/src/mr_openapi/models/serve_model.py @@ -51,7 +51,7 @@ class ServeModel(BaseModel): description="Output only. Last update time of the resource since epoch in millisecond since epoch.", alias="lastUpdateTimeSinceEpoch", ) - last_known_state: ExecutionState | None = Field(default=None, alias="lastKnownState") + last_known_state: ExecutionState | None = Field(default=ExecutionState.UNKNOWN, alias="lastKnownState") model_version_id: StrictStr = Field( description="ID of the `ModelVersion` that was served in `InferenceService`.", alias="modelVersionId" ) @@ -112,9 +112,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -129,18 +129,18 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), "id": obj.get("id"), "createTimeSinceEpoch": obj.get("createTimeSinceEpoch"), "lastUpdateTimeSinceEpoch": obj.get("lastUpdateTimeSinceEpoch"), - "lastKnownState": obj.get("lastKnownState"), + "lastKnownState": obj.get("lastKnownState") + if obj.get("lastKnownState") is not None + else ExecutionState.UNKNOWN, "modelVersionId": obj.get("modelVersionId"), } ) diff --git a/clients/python/src/mr_openapi/models/serve_model_create.py b/clients/python/src/mr_openapi/models/serve_model_create.py index 95893d3be9..54354cac18 100644 --- a/clients/python/src/mr_openapi/models/serve_model_create.py +++ b/clients/python/src/mr_openapi/models/serve_model_create.py @@ -40,7 +40,7 @@ class ServeModelCreate(BaseModel): default=None, description="The client provided name of the artifact. This field is optional. If set, it must be unique among all the artifacts of the same artifact type within a database instance and cannot be changed once set.", ) - last_known_state: ExecutionState | None = Field(default=None, alias="lastKnownState") + last_known_state: ExecutionState | None = Field(default=ExecutionState.UNKNOWN, alias="lastKnownState") model_version_id: StrictStr = Field( description="ID of the `ModelVersion` that was served in `InferenceService`.", alias="modelVersionId" ) @@ -93,9 +93,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -110,15 +110,15 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), - "lastKnownState": obj.get("lastKnownState"), + "lastKnownState": obj.get("lastKnownState") + if obj.get("lastKnownState") is not None + else ExecutionState.UNKNOWN, "modelVersionId": obj.get("modelVersionId"), } ) diff --git a/clients/python/src/mr_openapi/models/serve_model_list.py b/clients/python/src/mr_openapi/models/serve_model_list.py index 22cb05437c..b7927ac6bc 100644 --- a/clients/python/src/mr_openapi/models/serve_model_list.py +++ b/clients/python/src/mr_openapi/models/serve_model_list.py @@ -72,9 +72,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in items (list) _items = [] if self.items: - for _item in self.items: - if _item: - _items.append(_item.to_dict()) + for _item_items in self.items: + if _item_items: + _items.append(_item_items.to_dict()) _dict["items"] = _items return _dict @@ -92,8 +92,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "nextPageToken": obj.get("nextPageToken"), "pageSize": obj.get("pageSize"), "size": obj.get("size"), - "items": ( - [ServeModel.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None - ), + "items": [ServeModel.from_dict(_item) for _item in obj["items"]] + if obj.get("items") is not None + else None, } ) diff --git a/clients/python/src/mr_openapi/models/serve_model_update.py b/clients/python/src/mr_openapi/models/serve_model_update.py index 7a12448ac1..2ecd50ce72 100644 --- a/clients/python/src/mr_openapi/models/serve_model_update.py +++ b/clients/python/src/mr_openapi/models/serve_model_update.py @@ -36,7 +36,7 @@ class ServeModelUpdate(BaseModel): description="The external id that come from the clients’ system. This field is optional. If set, it must be unique among all resources within a database instance.", alias="externalId", ) - last_known_state: ExecutionState | None = Field(default=None, alias="lastKnownState") + last_known_state: ExecutionState | None = Field(default=ExecutionState.UNKNOWN, alias="lastKnownState") __properties: ClassVar[list[str]] = ["customProperties", "description", "externalId", "lastKnownState"] model_config = ConfigDict( @@ -79,9 +79,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -96,13 +96,13 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), - "lastKnownState": obj.get("lastKnownState"), + "lastKnownState": obj.get("lastKnownState") + if obj.get("lastKnownState") is not None + else ExecutionState.UNKNOWN, } ) diff --git a/clients/python/src/mr_openapi/models/serving_environment.py b/clients/python/src/mr_openapi/models/serving_environment.py index 2d1d7096dd..0a47cdde5a 100644 --- a/clients/python/src/mr_openapi/models/serving_environment.py +++ b/clients/python/src/mr_openapi/models/serving_environment.py @@ -102,9 +102,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -119,11 +119,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), diff --git a/clients/python/src/mr_openapi/models/serving_environment_create.py b/clients/python/src/mr_openapi/models/serving_environment_create.py index 4c449caf86..7453ab0b9f 100644 --- a/clients/python/src/mr_openapi/models/serving_environment_create.py +++ b/clients/python/src/mr_openapi/models/serving_environment_create.py @@ -78,9 +78,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -95,11 +95,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), "name": obj.get("name"), diff --git a/clients/python/src/mr_openapi/models/serving_environment_list.py b/clients/python/src/mr_openapi/models/serving_environment_list.py index dcd6ea3bf8..89fa961a6f 100644 --- a/clients/python/src/mr_openapi/models/serving_environment_list.py +++ b/clients/python/src/mr_openapi/models/serving_environment_list.py @@ -72,9 +72,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in items (list) _items = [] if self.items: - for _item in self.items: - if _item: - _items.append(_item.to_dict()) + for _item_items in self.items: + if _item_items: + _items.append(_item_items.to_dict()) _dict["items"] = _items return _dict @@ -92,10 +92,8 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: "nextPageToken": obj.get("nextPageToken"), "pageSize": obj.get("pageSize"), "size": obj.get("size"), - "items": ( - [ServingEnvironment.from_dict(_item) for _item in obj["items"]] - if obj.get("items") is not None - else None - ), + "items": [ServingEnvironment.from_dict(_item) for _item in obj["items"]] + if obj.get("items") is not None + else None, } ) diff --git a/clients/python/src/mr_openapi/models/serving_environment_update.py b/clients/python/src/mr_openapi/models/serving_environment_update.py index 365ca82278..a5ea9bab22 100644 --- a/clients/python/src/mr_openapi/models/serving_environment_update.py +++ b/clients/python/src/mr_openapi/models/serving_environment_update.py @@ -77,9 +77,9 @@ def to_dict(self) -> dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each value in custom_properties (dict) _field_dict = {} if self.custom_properties: - for _key in self.custom_properties: - if self.custom_properties[_key]: - _field_dict[_key] = self.custom_properties[_key].to_dict() + for _key_custom_properties in self.custom_properties: + if self.custom_properties[_key_custom_properties]: + _field_dict[_key_custom_properties] = self.custom_properties[_key_custom_properties].to_dict() _dict["customProperties"] = _field_dict return _dict @@ -94,11 +94,9 @@ def from_dict(cls, obj: dict[str, Any] | None) -> Self | None: return cls.model_validate( { - "customProperties": ( - {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} - if obj.get("customProperties") is not None - else None - ), + "customProperties": {_k: MetadataValue.from_dict(_v) for _k, _v in obj["customProperties"].items()} + if obj.get("customProperties") is not None + else None, "description": obj.get("description"), "externalId": obj.get("externalId"), } diff --git a/clients/python/src/mr_openapi/rest.py b/clients/python/src/mr_openapi/rest.py index e5a6116bf3..c4ed8d41e4 100644 --- a/clients/python/src/mr_openapi/rest.py +++ b/clients/python/src/mr_openapi/rest.py @@ -25,7 +25,6 @@ class RESTResponse(io.IOBase): - def __init__(self, resp) -> None: self.response = resp self.status = resp.status @@ -47,42 +46,32 @@ def getheader(self, name, default=None): class RESTClientObject: - def __init__(self, configuration) -> None: - # maxsize is number of requests to host that are allowed in parallel - maxsize = configuration.connection_pool_maxsize + self.maxsize = configuration.connection_pool_maxsize - ssl_context = ssl.create_default_context(cafile=configuration.ssl_ca_cert) + self.ssl_context = ssl.create_default_context( + cafile=configuration.ssl_ca_cert, + cadata=configuration.ca_cert_data, + ) if configuration.cert_file: - ssl_context.load_cert_chain(configuration.cert_file, keyfile=configuration.key_file) + self.ssl_context.load_cert_chain(configuration.cert_file, keyfile=configuration.key_file) if not configuration.verify_ssl: - ssl_context.check_hostname = False - ssl_context.verify_mode = ssl.CERT_NONE - - connector = aiohttp.TCPConnector(limit=maxsize, ssl=ssl_context) + self.ssl_context.check_hostname = False + self.ssl_context.verify_mode = ssl.CERT_NONE self.proxy = configuration.proxy self.proxy_headers = configuration.proxy_headers - # https pool manager - self.pool_manager = aiohttp.ClientSession(connector=connector, trust_env=True) - - retries = configuration.retries - self.retry_client: Optional[aiohttp_retry.RetryClient] - if retries is not None: - self.retry_client = aiohttp_retry.RetryClient( - client_session=self.pool_manager, - retry_options=aiohttp_retry.ExponentialRetry( - attempts=retries, factor=0.0, start_timeout=0.0, max_timeout=120.0 - ), - ) - else: - self.retry_client = None + self.retries = configuration.retries + + self.pool_manager: Optional[aiohttp.ClientSession] = None + self.retry_client: Optional[aiohttp_retry.RetryClient] = None - async def close(self): - await self.pool_manager.close() + async def close(self) -> None: + if self.pool_manager: + await self.pool_manager.close() if self.retry_client is not None: await self.retry_client.close() @@ -141,13 +130,18 @@ async def request(self, method, url, headers=None, body=None, post_params=None, if isinstance(v, tuple) and len(v) == 3: data.add_field(k, value=v[1], filename=v[0], content_type=v[2]) else: + # Ensures that dict objects are serialized + if isinstance(v, dict): + v = json.dumps(v) + elif isinstance(v, int): + v = str(v) data.add_field(k, v) args["data"] = data - # Pass a `bytes` parameter directly in the body to support + # Pass a `bytes` or `str` parameter directly in the body to support # other content types than Json when `body` argument is provided # in serialized form - elif isinstance(body, bytes): + elif isinstance(body, (str, bytes)): args["data"] = body else: # Cannot generate the request from given parameters @@ -157,10 +151,24 @@ async def request(self, method, url, headers=None, body=None, post_params=None, raise ApiException(status=0, reason=msg) pool_manager: Union[aiohttp.ClientSession, aiohttp_retry.RetryClient] - if self.retry_client is not None and method in ALLOW_RETRY_METHODS: + + # https pool manager + if self.pool_manager is None: + self.pool_manager = aiohttp.ClientSession( + connector=aiohttp.TCPConnector(limit=self.maxsize, ssl=self.ssl_context), + trust_env=True, + ) + pool_manager = self.pool_manager + + if self.retries is not None and method in ALLOW_RETRY_METHODS: + if self.retry_client is None: + self.retry_client = aiohttp_retry.RetryClient( + client_session=self.pool_manager, + retry_options=aiohttp_retry.ExponentialRetry( + attempts=self.retries, factor=2.0, start_timeout=0.1, max_timeout=120.0 + ), + ) pool_manager = self.retry_client - else: - pool_manager = self.pool_manager r = await pool_manager.request(**args) diff --git a/clients/python/templates/configuration.mustache b/clients/python/templates/configuration.mustache index 3ecf1dff50..6450b68ca5 100644 --- a/clients/python/templates/configuration.mustache +++ b/clients/python/templates/configuration.mustache @@ -3,16 +3,22 @@ {{>partial_header}} import copy +import http.client as httplib import logging from logging import FileHandler +from logging import FileHandler {{^asyncio}} import multiprocessing {{/asyncio}} import sys -from typing import Optional +from typing import Any, ClassVar, Dict, List, Literal, Optional, TypedDict, Union +from typing_extensions import NotRequired, Self + import urllib3 -import http.client as httplib +{{#hasHttpSignatureMethods}} +from {{packageName}}.signing import HttpSigningConfiguration +{{/hasHttpSignatureMethods}} JSON_SCHEMA_VALIDATION_KEYWORDS = { 'multipleOf', 'maximum', 'exclusiveMaximum', @@ -20,10 +26,137 @@ JSON_SCHEMA_VALIDATION_KEYWORDS = { 'minLength', 'pattern', 'maxItems', 'minItems' } +ServerVariablesT = Dict[str, str] + +GenericAuthSetting = TypedDict( + "GenericAuthSetting", + { + "type": str, + "in": str, + "key": str, + "value": str, + }, +) + + +OAuth2AuthSetting = TypedDict( + "OAuth2AuthSetting", + { + "type": Literal["oauth2"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": str, + }, +) + + +APIKeyAuthSetting = TypedDict( + "APIKeyAuthSetting", + { + "type": Literal["api_key"], + "in": str, + "key": str, + "value": Optional[str], + }, +) + + +BasicAuthSetting = TypedDict( + "BasicAuthSetting", + { + "type": Literal["basic"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": Optional[str], + }, +) + + +BearerFormatAuthSetting = TypedDict( + "BearerFormatAuthSetting", + { + "type": Literal["bearer"], + "in": Literal["header"], + "format": Literal["JWT"], + "key": Literal["Authorization"], + "value": str, + }, +) + + +BearerAuthSetting = TypedDict( + "BearerAuthSetting", + { + "type": Literal["bearer"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": str, + }, +) + + +HTTPSignatureAuthSetting = TypedDict( + "HTTPSignatureAuthSetting", + { + "type": Literal["http-signature"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": None, + }, +) + + +AuthSettings = TypedDict( + "AuthSettings", + { +{{#authMethods}} +{{#isOAuth}} + "{{name}}": OAuth2AuthSetting, +{{/isOAuth}} +{{#isApiKey}} + "{{name}}": APIKeyAuthSetting, +{{/isApiKey}} +{{#isBasic}} + {{#isBasicBasic}} + "{{name}}": BasicAuthSetting, + {{/isBasicBasic}} + {{#isBasicBearer}} + {{#bearerFormat}} + "{{name}}": BearerFormatAuthSetting, + {{/bearerFormat}} + {{^bearerFormat}} + "{{name}}": BearerAuthSetting, + {{/bearerFormat}} + {{/isBasicBearer}} + {{#isHttpSignature}} + "{{name}}": HTTPSignatureAuthSetting, + {{/isHttpSignature}} +{{/isBasic}} +{{/authMethods}} + }, + total=False, +) + + +class HostSettingVariable(TypedDict): + description: str + default_value: str + enum_values: List[str] + + +class HostSetting(TypedDict): + url: str + description: str + variables: NotRequired[Dict[str, HostSettingVariable]] + + class Configuration: """This class contains various settings of the API client. :param host: Base url. + :param ignore_operation_servers + Boolean to ignore operation servers for the API client. + Config will use `host` as the base url regardless of the operation servers. :param api_key: Dict to store API key(s). Each entry in the dict specifies an API key. The dict key is the name of the security scheme in the OAS specification. @@ -53,6 +186,9 @@ class Configuration: :param verify_ssl: bool - Whether to verify the SSL certificate when making API requests to an HTTPS server. Set to False to disable verification, default=True. + :param retries: Number of retries for API requests. + :param ca_cert_data: verify the peer using concatenated CA certificate data + in PEM (str) or DER (bytes) format. {{#hasAuthMethods}} :Example: @@ -139,20 +275,31 @@ conf = {{{packageName}}}.Configuration( {{/hasAuthMethods}} """ - _default = None + _default: ClassVar[Optional[Self]] = None - def __init__(self, host=None, - api_key=None, api_key_prefix=None, - username=None, password=None, - access_token=None, + def __init__( + self, + host: Optional[str]=None, + api_key: Optional[Dict[str, str]]=None, + api_key_prefix: Optional[Dict[str, str]]=None, + username: Optional[str]=None, + password: Optional[str]=None, + access_token: Optional[str]=None, {{#hasHttpSignatureMethods}} - signing_info=None, + signing_info: Optional[HttpSigningConfiguration]=None, {{/hasHttpSignatureMethods}} - server_index=None, server_variables=None, - server_operation_index=None, server_operation_variables=None, - ssl_ca_cert=None, - verify_ssl=True, - ) -> None: + server_index: Optional[int]=None, + server_variables: Optional[ServerVariablesT]=None, + server_operation_index: Optional[Dict[int, int]]=None, + server_operation_variables: Optional[Dict[int, ServerVariablesT]]=None, + ignore_operation_servers: bool=False, + ssl_ca_cert: Optional[str]=None, + verify_ssl: bool=True, + retries: Optional[int] = None, + ca_cert_data: Optional[Union[str, bytes]] = None, + *, + debug: Optional[bool] = None, + ) -> None: """Constructor """ self._base_path = "{{{basePath}}}" if host is None else host @@ -166,6 +313,9 @@ conf = {{{packageName}}}.Configuration( self.server_operation_variables = server_operation_variables or {} """Default server variables """ + self.ignore_operation_servers = ignore_operation_servers + """Ignore operation servers + """ self.temp_folder_path = None """Temp file folder for downloading files """ @@ -216,7 +366,10 @@ conf = {{{packageName}}}.Configuration( self.logger_file = None """Debug file location """ - self.debug = False + if debug is not None: + self.debug = debug + else: + self.__debug = False """Debug switch """ @@ -228,6 +381,10 @@ conf = {{{packageName}}}.Configuration( self.ssl_ca_cert = ssl_ca_cert """Set this to customize the certificate file to verify the peer. """ + self.ca_cert_data = ca_cert_data + """Set this to verify the peer using PEM (str) or DER (bytes) + certificate data. + """ self.cert_file = None """client certificate file """ @@ -267,7 +424,7 @@ conf = {{{packageName}}}.Configuration( self.safe_chars_for_path_param = '' """Safe chars for path_param """ - self.retries = None + self.retries = retries """Adding retries to override urllib3 default value 3 """ # Enable client side validation @@ -285,7 +442,7 @@ conf = {{{packageName}}}.Configuration( """date format """ - def __deepcopy__(self, memo): + def __deepcopy__(self, memo: Dict[int, Any]) -> Self: cls = self.__class__ result = cls.__new__(cls) memo[id(self)] = result @@ -299,7 +456,7 @@ conf = {{{packageName}}}.Configuration( result.debug = self.debug return result - def __setattr__(self, name, value): + def __setattr__(self, name: str, value: Any) -> None: object.__setattr__(self, name, value) {{#hasHttpSignatureMethods}} if name == "signing_info" and value is not None: @@ -309,7 +466,7 @@ conf = {{{packageName}}}.Configuration( {{/hasHttpSignatureMethods}} @classmethod - def set_default(cls, default): + def set_default(cls, default: Optional[Self]) -> None: """Set default instance of configuration. It stores default configuration, which can be @@ -320,7 +477,7 @@ conf = {{{packageName}}}.Configuration( cls._default = default @classmethod - def get_default_copy(cls): + def get_default_copy(cls) -> Self: """Deprecated. Please use `get_default` instead. Deprecated. Please use `get_default` instead. @@ -330,7 +487,7 @@ conf = {{{packageName}}}.Configuration( return cls.get_default() @classmethod - def get_default(cls): + def get_default(cls) -> Self: """Return the default configuration. This method returns newly created, based on default constructor, @@ -340,11 +497,11 @@ conf = {{{packageName}}}.Configuration( :return: The configuration object. """ if cls._default is None: - cls._default = Configuration() + cls._default = cls() return cls._default @property - def logger_file(self): + def logger_file(self) -> Optional[str]: """The logger file. If the logger_file is None, then add stream handler and remove file @@ -356,7 +513,7 @@ conf = {{{packageName}}}.Configuration( return self.__logger_file @logger_file.setter - def logger_file(self, value): + def logger_file(self, value: Optional[str]) -> None: """The logger file. If the logger_file is None, then add stream handler and remove file @@ -375,7 +532,7 @@ conf = {{{packageName}}}.Configuration( logger.addHandler(self.logger_file_handler) @property - def debug(self): + def debug(self) -> bool: """Debug status :param value: The debug status, True or False. @@ -384,7 +541,7 @@ conf = {{{packageName}}}.Configuration( return self.__debug @debug.setter - def debug(self, value): + def debug(self, value: bool) -> None: """Debug status :param value: The debug status, True or False. @@ -406,7 +563,7 @@ conf = {{{packageName}}}.Configuration( httplib.HTTPConnection.debuglevel = 0 @property - def logger_format(self): + def logger_format(self) -> str: """The logger format. The logger_formatter will be updated when sets logger_format. @@ -417,7 +574,7 @@ conf = {{{packageName}}}.Configuration( return self.__logger_format @logger_format.setter - def logger_format(self, value): + def logger_format(self, value: str) -> None: """The logger format. The logger_formatter will be updated when sets logger_format. @@ -428,7 +585,7 @@ conf = {{{packageName}}}.Configuration( self.__logger_format = value self.logger_formatter = logging.Formatter(self.__logger_format) - def get_api_key_with_prefix(self, identifier, alias=None): + def get_api_key_with_prefix(self, identifier: str, alias: Optional[str]=None) -> Optional[str]: """Gets API key (with prefix if set). :param identifier: The identifier of apiKey. @@ -445,7 +602,9 @@ conf = {{{packageName}}}.Configuration( else: return key - def get_basic_auth_token(self): + return None + + def get_basic_auth_token(self) -> Optional[str]: """Gets HTTP basic authentication header (string). :return: The token for basic HTTP authentication. @@ -460,12 +619,12 @@ conf = {{{packageName}}}.Configuration( basic_auth=username + ':' + password ).get('authorization') - def auth_settings(self): + def auth_settings(self)-> AuthSettings: """Gets Auth Settings dict for api client. :return: The Auth Settings information dict. """ - auth = {} + auth: AuthSettings = {} {{#authMethods}} {{#isApiKey}} if '{{name}}' in self.api_key{{#vendorExtensions.x-auth-id-alias}} or '{{.}}' in self.api_key{{/vendorExtensions.x-auth-id-alias}}: @@ -523,7 +682,7 @@ conf = {{{packageName}}}.Configuration( {{/authMethods}} return auth - def to_debug_report(self): + def to_debug_report(self) -> str: """Gets the essential information for debugging. :return: The report for debugging. @@ -535,7 +694,7 @@ conf = {{{packageName}}}.Configuration( "SDK Package Version: {{packageVersion}}".\ format(env=sys.platform, pyversion=sys.version) - def get_host_settings(self): + def get_host_settings(self) -> List[HostSetting]: """Gets an array of host settings :return: An array of host settings @@ -570,7 +729,12 @@ conf = {{{packageName}}}.Configuration( {{/servers}} ] - def get_host_from_settings(self, index, variables=None, servers=None): + def get_host_from_settings( + self, + index: Optional[int], + variables: Optional[ServerVariablesT]=None, + servers: Optional[List[HostSetting]]=None, + ) -> str: """Gets host URL based on the index and variables :param index: array index of the host settings :param variables: hash of variable and the corresponding value @@ -610,12 +774,12 @@ conf = {{{packageName}}}.Configuration( return url @property - def host(self): + def host(self) -> str: """Return generated host.""" return self.get_host_from_settings(self.server_index, variables=self.server_variables) @host.setter - def host(self, value): + def host(self, value: str) -> None: """Fix base path.""" self._base_path = value self.server_index = None diff --git a/clients/python/tests/REST_bindings_test.py b/clients/python/tests/REST_bindings_test.py index a4f3824e17..ee8a0ed880 100644 --- a/clients/python/tests/REST_bindings_test.py +++ b/clients/python/tests/REST_bindings_test.py @@ -22,7 +22,7 @@ @cleanup async def client(user_token: str, verify_ssl: bool) -> AsyncIterator[ModelRegistryServiceApi]: params = {"verify_ssl": verify_ssl, "access_token": user_token} - config = mr_openapi.Configuration(REGISTRY_URL, **params) + config = mr_openapi.Configuration(REGISTRY_URL, **params) # type: ignore[arg-type] api_client = mr_openapi.ApiClient(config) client = mr_openapi.ModelRegistryServiceApi(api_client) yield client diff --git a/clients/python/tests/conftest.py b/clients/python/tests/conftest.py index a78cbbe2a9..e8bed9989f 100644 --- a/clients/python/tests/conftest.py +++ b/clients/python/tests/conftest.py @@ -12,7 +12,7 @@ from unittest.mock import Mock, patch import pytest -import requests +import requests # type: ignore[import-untyped,unused-ignore] from model_registry import ModelRegistry from model_registry.utils import BackendDefinition, _get_skopeo_backend @@ -83,7 +83,7 @@ def root(request) -> Path: @pytest.fixture(scope="session") def user_token() -> str: - return os.getenv("AUTH_TOKEN", None) + return os.getenv("AUTH_TOKEN", None) # type: ignore[arg-type] @pytest.fixture(scope="session") @@ -98,7 +98,7 @@ def request_headers(user_token: str) -> dict[str, str]: def verify_ssl() -> bool: verify_ssl_env = os.environ.get("VERIFY_SSL") if verify_ssl_env is None: - return None + return None # type: ignore[return-value] return verify_ssl_env.lower() == "true" @@ -168,7 +168,7 @@ def client(user_token: str) -> ModelRegistry: @pytest.fixture @cleanup -def client_attrs() -> dict[str, any]: +def client_attrs() -> dict[str, any]: # type: ignore[valid-type] return { "host": REGISTRY_HOST, "port": REGISTRY_PORT, @@ -178,9 +178,10 @@ def client_attrs() -> dict[str, any]: @pytest.fixture(scope="module") -def setup_env_user_token(): +def setup_env_user_token(user_token: str) -> str: # type: ignore[misc] + token_bytes = (user_token or "Token").encode("utf-8") with tempfile.NamedTemporaryFile(delete=False) as token_file: - token_file.write(b"Token") + token_file.write(token_bytes) old_token_path = os.getenv("KF_PIPELINES_SA_TOKEN_PATH") os.environ["KF_PIPELINES_SA_TOKEN_PATH"] = token_file.name @@ -286,9 +287,9 @@ def patch_s3_env(monkeypatch: pytest.MonkeyPatch): bucket = os.getenv("KF_MR_TEST_BUCKET_NAME") or "default" region = "east" - monkeypatch.setenv("AWS_S3_ENDPOINT", s3_endpoint) - monkeypatch.setenv("AWS_ACCESS_KEY_ID", access_key_id) - monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", secret_access_key) + monkeypatch.setenv("AWS_S3_ENDPOINT", s3_endpoint) # type: ignore[arg-type,unused-ignore] + monkeypatch.setenv("AWS_ACCESS_KEY_ID", access_key_id) # type: ignore[arg-type,unused-ignore] + monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", secret_access_key) # type: ignore[arg-type,unused-ignore] monkeypatch.setenv("AWS_DEFAULT_REGION", region) monkeypatch.setenv("AWS_S3_BUCKET", bucket) @@ -317,6 +318,9 @@ def get_mock_custom_oci_backend(): def pull_mock_imple(base_image, dest_dir, **kwargs): pathlib.Path(dest_dir).joinpath("oci-layout").write_text(oci_layout_contents) pathlib.Path(dest_dir).joinpath("index.json").write_text(index_json_contents) + blobs_sha256_dir = pathlib.Path(dest_dir).joinpath("blobs").joinpath("sha256") + blobs_sha256_dir.mkdir(parents=True, exist_ok=True) + blobs_sha256_dir.joinpath("unused-blob").write_text("{}") pull_mock.side_effect = pull_mock_imple return BackendDefinition( diff --git a/clients/python/tests/extras/async_task_runner.py b/clients/python/tests/extras/async_task_runner.py index 3028a0b515..d9a4ada5b8 100644 --- a/clients/python/tests/extras/async_task_runner.py +++ b/clients/python/tests/extras/async_task_runner.py @@ -52,7 +52,7 @@ def get_instance(): assert AsyncTaskRunner.__instance is not None return AsyncTaskRunner.__instance - def __init__(self): + def __init__(self) -> None: """Initialize.""" # make sure it is a singleton if AsyncTaskRunner.__instance is not None: @@ -66,7 +66,7 @@ def __init__(self): # register exit handler atexit.register(self._close) - def _close(self): + def _close(self) -> None: """Clean up. Stop the loop if running.""" if self.__io_loop: self.__io_loop.stop() diff --git a/clients/python/tests/fuzz_api/conftest.py b/clients/python/tests/fuzz_api/conftest.py index 845ab796ee..3b1bbf681a 100644 --- a/clients/python/tests/fuzz_api/conftest.py +++ b/clients/python/tests/fuzz_api/conftest.py @@ -5,7 +5,7 @@ from typing import Any import pytest -import requests +import requests # type: ignore[import-untyped,unused-ignore] import schemathesis from schemathesis import Case, Response from schemathesis.generation.stateful.state_machine import APIStateMachine @@ -15,33 +15,60 @@ @pytest.fixture -def generated_schema(request: pytest.FixtureRequest, pytestconfig: pytest.Config) -> BaseOpenAPISchema: +def generated_schema(request: pytest.FixtureRequest, pytestconfig: pytest.Config, + verify_ssl: bool) -> BaseOpenAPISchema: """Generate schema for the API based on the schema_file parameter""" schema_file = getattr(request, "param", "model-registry.yaml") os.environ["API_HOST"] = REGISTRY_URL - config = schemathesis.config.SchemathesisConfig.from_path(f"{pytestconfig.rootpath}/schemathesis.toml") - local_schema_path = f"{pytestconfig.rootpath}/../../api/openapi/{schema_file}" + + # Read and modify schemathesis.toml if verify_ssl is False + toml_path = f"{pytestconfig.rootpath}/schemathesis.toml" + config = schemathesis.config.SchemathesisConfig.from_path(toml_path) + # tls-verify is by default true + if verify_ssl is False: + with open(toml_path) as f: + toml_content = f.read() + + # Replace tls-verify = true with tls-verify = false + modified_content = toml_content.replace("tls-verify = true", "tls-verify = false") + + # Write to temporary file + import tempfile + with tempfile.NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as temp_file: + temp_file.write(modified_content) + temp_toml_path = temp_file.name + + config = schemathesis.config.SchemathesisConfig.from_path(temp_toml_path) + + # Clean up temp file later + os.unlink(temp_toml_path) + print(f"Generating schema for {config}") schema = schemathesis.openapi.from_path( - path=local_schema_path, + path=f"{pytestconfig.rootpath}/../../api/openapi/{schema_file}", config=config, ) schema.config.output.sanitization.update(enabled=False) + return schema + @pytest.fixture -def auth_headers(setup_env_user_token): +def auth_headers(user_token: str) -> dict[str, str]: """Provides authorization headers for API requests.""" return { "Content-Type": "application/json", - "Authorization": f"Bearer {setup_env_user_token}" + "Authorization": f"Bearer {user_token}", } + @pytest.fixture -def state_machine(generated_schema: BaseOpenAPISchema, auth_headers: str, pytestconfig: pytest.Config) -> APIStateMachine: +def state_machine(generated_schema: BaseOpenAPISchema, auth_headers: str, pytestconfig: pytest.Config, + verify_ssl: bool) -> APIStateMachine: BaseAPIWorkflow = generated_schema.as_state_machine() class APIWorkflow(BaseAPIWorkflow): # type: ignore headers: dict[str, str] + verify: bool def setup(self) -> None: print("Cleaning up database") @@ -52,22 +79,26 @@ def setup(self) -> None: capture_output=True, check=True ) - self.headers = auth_headers + self.headers = auth_headers # type: ignore[assignment] + self.verify = verify_ssl def before_call(self, case: Case) -> None: print(f"Checking: {case.method} {case.path}") + def get_call_kwargs(self, case: Case) -> dict[str, Any]: - return {"verify": False, "headers": self.headers} + return {"verify": self.verify, "headers": self.headers} def after_call(self, response: Response, case: Case) -> None: print(f"{case.method} {case.path} -> {response.status_code},") - return APIWorkflow + + return APIWorkflow # type: ignore[return-value,unused-ignore] @pytest.fixture -def cleanup_artifacts(request: pytest.FixtureRequest, auth_headers: dict): +def cleanup_artifacts(request: pytest.FixtureRequest, auth_headers: dict, verify_ssl: bool): """Cleanup artifacts created during the test.""" created_ids = [] + def register(artifact_id): created_ids.append(artifact_id) @@ -76,17 +107,20 @@ def register(artifact_id): for artifact_id in created_ids: del_url = f"{REGISTRY_URL}/api/model_registry/v1alpha3/artifacts/{artifact_id}" try: - requests.delete(del_url, headers=auth_headers, timeout=DEFAULT_API_TIMEOUT) + requests.delete(del_url, headers=auth_headers, timeout=DEFAULT_API_TIMEOUT, verify=verify_ssl) except Exception as e: print(f"Failed to delete artifact {artifact_id}: {e}") + @pytest.fixture -def artifact_resource(): +def artifact_resource(verify_ssl: bool): """Create an artifact resource for the test.""" + @contextlib.contextmanager def _artifact_resource(auth_headers: dict, payload: dict) -> Generator[str, None, None]: create_endpoint = f"{REGISTRY_URL}/api/model_registry/v1alpha3/artifacts" - resp = requests.post(create_endpoint, headers=auth_headers, json=payload, timeout=DEFAULT_API_TIMEOUT) + resp = requests.post(create_endpoint, headers=auth_headers, json=payload, timeout=DEFAULT_API_TIMEOUT, + verify=verify_ssl) resp.raise_for_status() artifact_id = resp.json()["id"] try: @@ -94,8 +128,9 @@ def _artifact_resource(auth_headers: dict, payload: dict) -> Generator[str, None finally: del_url = f"{REGISTRY_URL}/api/model_registry/v1alpha3/artifacts/{artifact_id}" try: - requests.delete(del_url, headers=auth_headers, timeout=DEFAULT_API_TIMEOUT) + requests.delete(del_url, headers=auth_headers, timeout=DEFAULT_API_TIMEOUT, verify=verify_ssl) except Exception as e: print(f"Failed to delete artifact {artifact_id}: {e}") + return _artifact_resource diff --git a/clients/python/tests/fuzz_api/model_catalog/test_catalog_stateless.py b/clients/python/tests/fuzz_api/model_catalog/test_catalog_stateless.py index 766954e262..1da92c1f31 100644 --- a/clients/python/tests/fuzz_api/model_catalog/test_catalog_stateless.py +++ b/clients/python/tests/fuzz_api/model_catalog/test_catalog_stateless.py @@ -10,9 +10,9 @@ deadline=None, ) @pytest.mark.fuzz -def test_catalog_api_stateless(auth_headers: dict, case: schemathesis.Case): +def test_catalog_api_stateless(auth_headers: dict, case: schemathesis.Case, verify_ssl: bool) -> None: """Test the Model Catalog API endpoints. This test uses schemathesis to generate and validate API requests """ - case.call_and_validate(headers=auth_headers) + case.call_and_validate(headers=auth_headers, verify=verify_ssl) diff --git a/clients/python/tests/fuzz_api/model_registry/test_mr_stateless.py b/clients/python/tests/fuzz_api/model_registry/test_mr_stateless.py index e370574621..b83ebf05f3 100644 --- a/clients/python/tests/fuzz_api/model_registry/test_mr_stateless.py +++ b/clients/python/tests/fuzz_api/model_registry/test_mr_stateless.py @@ -1,10 +1,12 @@ +from __future__ import annotations + import logging import secrets import time from typing import Any, Callable import pytest -import requests +import requests # type: ignore[import-untyped,unused-ignore] import schemathesis from hypothesis import HealthCheck, settings @@ -16,6 +18,54 @@ ) +def contains_null_bytes(data: Any) -> bool: + """Check if the data contains null bytes that would cause validation errors.""" + if isinstance(data, str): + return "\x00" in data + if isinstance(data, dict): + # Check both keys and values for null bytes + return any(contains_null_bytes(key) or contains_null_bytes(value) for key, value in data.items()) + if isinstance(data, list): + return any(contains_null_bytes(item) for item in data) + return False + + +def call_and_validate_with_null_byte_handling(case: schemathesis.Case, auth_headers: dict, verify_ssl: bool) -> None: + """Execute case validation with proper null byte error handling. + + This utility function handles the common pattern of calling case.call_and_validate() + and catching exceptions related to null bytes in the request. If null bytes are detected + in the query or body and the error is a 400 or CheckFailed, the test is skipped as expected. + Otherwise, the exception is re-raised. + + Args: + case: The Schemathesis test case to validate + auth_headers: Authentication headers for the API + verify_ssl: Whether to verify SSL certificates + + Raises: + Exception: Re-raises any exception that is not related to null byte validation + """ + try: + case.call_and_validate(headers=auth_headers, verify=verify_ssl) + except Exception as e: + request_has_null_bytes = False + if case.query: + request_has_null_bytes = contains_null_bytes(case.query) + if not request_has_null_bytes and hasattr(case, "body") and case.body: + try: + if isinstance(case.body, (str, bytes)): + body_str = case.body.decode("utf-8") if isinstance(case.body, bytes) else case.body + request_has_null_bytes = contains_null_bytes(body_str) + except (UnicodeDecodeError, AttributeError): + pass + if request_has_null_bytes and ("400" in str(e) or "CheckFailed" in str(type(e).__name__)): + logging.info(f"Expected 400 error for null bytes: {case.path}") + pytest.skip("Expected validation error for null bytes") + else: + raise + + # Helper functions for common operations def generate_random_id() -> str: """Generate a random ID between 100000 and 2000000000.""" @@ -30,7 +80,7 @@ def generate_unique_timestamp() -> str: def build_artifact_payload(artifact_type: str, uri_prefix: str, state: str, name: str, - description: str = None, external_id: str = None) -> dict[str, Any]: + description: str | None = None, external_id: str | None = None) -> dict[str, Any]: """Build a payload for creating an artifact based on its type. Args: @@ -44,7 +94,7 @@ def build_artifact_payload(artifact_type: str, uri_prefix: str, state: str, name Returns: Dictionary containing the artifact payload """ - payload = { + payload: dict[str, Any] = { "artifactType": artifact_type, "name": name, "state": state, @@ -75,7 +125,7 @@ def build_artifact_payload(artifact_type: str, uri_prefix: str, state: str, name return payload -def validate_artifact_response(response: requests.Response, expected_payload: dict[str, Any]) -> str: +def validate_artifact_response(response: requests.Response, expected_payload: dict[str, Any]) -> str | None: """Validate artifact creation response and return the artifact ID. Args: @@ -89,9 +139,18 @@ def validate_artifact_response(response: requests.Response, expected_payload: di AssertionError: If validation fails """ # Check response status - assert response.status_code in {200, 201}, f"Expected 200 or 201, got {response.status_code}: {response.text}" + assert response.status_code in {200, 201, 404}, f"Expected 200, 201, or 404, got {response.status_code}: {response.text}" response_json = response.json() + + # Handle error responses (404) + if response.status_code == 404: + assert "message" in response_json, "Error response should contain 'message' field" + assert "not found" in response_json["message"].lower(), f"Error message should contain 'not found', got: {response_json['message']}" + # For 404 responses, we don't return an ID since the artifact wasn't created + return None + + # Handle success responses (200, 201) assert response_json.get("id"), "Response body should contain 'id'" # Validate response matches payload @@ -101,12 +160,12 @@ def validate_artifact_response(response: requests.Response, expected_payload: di return response_json["id"] -def create_experiment_and_run(auth_headers: dict[str, str]) -> tuple[str, str]: +def create_experiment_and_run(auth_headers: dict[str, str], verify_tls: bool) -> tuple[str, str]: """Create an experiment and an experiment run. Args: auth_headers: Authentication headers for the API - + verify_tls: Verify TLS Returns: Tuple of (experiment_id, experiment_run_id) @@ -123,7 +182,7 @@ def create_experiment_and_run(auth_headers: dict[str, str]) -> tuple[str, str]: f"{REGISTRY_URL}/api/model_registry/v1alpha3/experiments", headers=auth_headers, json=experiment_payload, - timeout=DEFAULT_API_TIMEOUT + timeout=DEFAULT_API_TIMEOUT, verify=verify_tls ) assert exp_response.status_code in {200, 201}, f"Failed to create experiment: {exp_response.text}" experiment_id = exp_response.json()["id"] @@ -139,7 +198,7 @@ def create_experiment_and_run(auth_headers: dict[str, str]) -> tuple[str, str]: f"{REGISTRY_URL}/api/model_registry/v1alpha3/experiment_runs", headers=auth_headers, json=experiment_run_payload, - timeout=DEFAULT_API_TIMEOUT + timeout=DEFAULT_API_TIMEOUT, verify=verify_tls ) assert run_response.status_code in {200, 201}, f"Failed to create experiment run: {run_response.text}" experiment_run_id = run_response.json()["id"] @@ -147,33 +206,46 @@ def create_experiment_and_run(auth_headers: dict[str, str]) -> tuple[str, str]: return experiment_id, experiment_run_id -def cleanup_experiment_and_run(auth_headers: dict[str, str], experiment_id: str, experiment_run_id: str) -> None: +def cleanup_experiment_and_run(auth_headers: dict[str, str], experiment_id: str, experiment_run_id: str, + verify_tls: bool) -> None: """Best effort cleanup of experiment run and experiment. Args: auth_headers: Authentication headers for the API experiment_id: ID of the experiment to delete experiment_run_id: ID of the experiment run to delete + verify_tls: Verify TLS """ try: requests.delete( f"{REGISTRY_URL}/api/model_registry/v1alpha3/experiment_runs/{experiment_run_id}", headers=auth_headers, - timeout=DEFAULT_API_TIMEOUT + timeout=DEFAULT_API_TIMEOUT, verify=verify_tls ) requests.delete( f"{REGISTRY_URL}/api/model_registry/v1alpha3/experiments/{experiment_id}", headers=auth_headers, - timeout=DEFAULT_API_TIMEOUT + timeout=DEFAULT_API_TIMEOUT, verify=verify_tls ) except Exception as e: logging.warning(f"Failed to cleanup experiment (id={experiment_id}) and/or experiment run (id={experiment_run_id}): {e}") +# Null byte validation for Model Registry API endpoints specifically +@schemathesis.check +def check_null_byte_validation_mr(ctx, response, case): + """Validate that Model Registry requests with PostgreSQL null byte errors return 400 Bad Request.""" + + # Only check model registry endpoints + if not case.path.startswith("/api/model_registry/"): + return # Skip validation for non-model-registry endpoints + + schema = schemathesis.pytest.from_fixture("generated_schema") -schema = ( +# Base schema with common exclusions +base_schema = ( schema .exclude( path="/api/model_registry/v1alpha3/artifacts/{id}", @@ -188,8 +260,13 @@ def cleanup_experiment_and_run(auth_headers: dict[str, str], experiment_id: str, method="POST" ) ) + +# Split tests by HTTP method AND resource type for maximum parallelization +# This creates many small test functions that can be distributed across workers + +# GET endpoints - split by resource type @pytest.mark.parametrize("generated_schema", ["model-registry.yaml"], indirect=True) -@schema.parametrize() +@base_schema.include(method="GET", path_regex=".*/(registered_model|model_version).*").parametrize() @settings( max_examples=100, deadline=None, @@ -200,17 +277,170 @@ def cleanup_experiment_and_run(auth_headers: dict[str, str], experiment_id: str, ], ) @pytest.mark.fuzz -def test_mr_api_stateless(auth_headers: dict, case: schemathesis.Case): - """Test the Model Registry API endpoints. +def test_mr_api_stateless_get_models(auth_headers: dict, case: schemathesis.Case, verify_ssl: bool) -> None: + """Test GET endpoints for RegisteredModels and ModelVersions.""" + call_and_validate_with_null_byte_handling(case, auth_headers, verify_ssl) - This test uses schemathesis to generate and validate API requests - """ - case.call_and_validate(headers=auth_headers) + +@pytest.mark.parametrize("generated_schema", ["model-registry.yaml"], indirect=True) +@base_schema.include(method="GET", path_regex=".*/artifact.*").parametrize() +@settings( + max_examples=100, + deadline=None, + suppress_health_check=[ + HealthCheck.filter_too_much, + HealthCheck.too_slow, + HealthCheck.data_too_large, + ], +) +@pytest.mark.fuzz +def test_mr_api_stateless_get_artifacts(auth_headers: dict, case: schemathesis.Case, verify_ssl: bool) -> None: + """Test GET endpoints for Artifacts and ModelArtifacts.""" + call_and_validate_with_null_byte_handling(case, auth_headers, verify_ssl) + + +@pytest.mark.parametrize("generated_schema", ["model-registry.yaml"], indirect=True) +@base_schema.include(method="GET", path_regex=".*/experiment.*").parametrize() +@settings( + max_examples=100, + deadline=None, + suppress_health_check=[ + HealthCheck.filter_too_much, + HealthCheck.too_slow, + HealthCheck.data_too_large, + ], +) +@pytest.mark.fuzz +def test_mr_api_stateless_get_experiments(auth_headers: dict, case: schemathesis.Case, verify_ssl: bool) -> None: + """Test GET endpoints for Experiments and ExperimentRuns.""" + call_and_validate_with_null_byte_handling(case, auth_headers, verify_ssl) + + +@pytest.mark.parametrize("generated_schema", ["model-registry.yaml"], indirect=True) +@base_schema.include(method="GET", path_regex=".*/inference_service.*|.*/serving_environment.*|.*/serve.*").parametrize() +@settings( + max_examples=100, + deadline=None, + suppress_health_check=[ + HealthCheck.filter_too_much, + HealthCheck.too_slow, + HealthCheck.data_too_large, + ], +) +@pytest.mark.fuzz +def test_mr_api_stateless_get_serving(auth_headers: dict, case: schemathesis.Case, verify_ssl: bool) -> None: + """Test GET endpoints for InferenceServices and ServingEnvironments.""" + call_and_validate_with_null_byte_handling(case, auth_headers, verify_ssl) + + +# POST endpoints - split by resource type +@pytest.mark.parametrize("generated_schema", ["model-registry.yaml"], indirect=True) +@base_schema.include(method="POST", path_regex=".*/(registered_model|model_version).*").parametrize() +@settings( + max_examples=100, + deadline=None, + suppress_health_check=[ + HealthCheck.filter_too_much, + HealthCheck.too_slow, + HealthCheck.data_too_large, + ], +) +@pytest.mark.fuzz +def test_mr_api_stateless_post_models(auth_headers: dict, case: schemathesis.Case, verify_ssl: bool) -> None: + """Test POST endpoints for RegisteredModels and ModelVersions.""" + call_and_validate_with_null_byte_handling(case, auth_headers, verify_ssl) + + +@pytest.mark.parametrize("generated_schema", ["model-registry.yaml"], indirect=True) +@base_schema.include(method="POST", path_regex=".*/artifact.*").parametrize() +@settings( + max_examples=100, + deadline=None, + suppress_health_check=[ + HealthCheck.filter_too_much, + HealthCheck.too_slow, + HealthCheck.data_too_large, + ], +) +@pytest.mark.fuzz +def test_mr_api_stateless_post_artifacts(auth_headers: dict, case: schemathesis.Case, verify_ssl: bool) -> None: + """Test POST endpoints for Artifacts.""" + call_and_validate_with_null_byte_handling(case, auth_headers, verify_ssl) + + +@pytest.mark.parametrize("generated_schema", ["model-registry.yaml"], indirect=True) +@base_schema.include(method="POST", path_regex=".*/experiment.*").parametrize() +@settings( + max_examples=100, + deadline=None, + suppress_health_check=[ + HealthCheck.filter_too_much, + HealthCheck.too_slow, + HealthCheck.data_too_large, + ], +) +@pytest.mark.fuzz +def test_mr_api_stateless_post_experiments(auth_headers: dict, case: schemathesis.Case, verify_ssl: bool) -> None: + """Test POST endpoints for Experiments and ExperimentRuns.""" + call_and_validate_with_null_byte_handling(case, auth_headers, verify_ssl) + + +@pytest.mark.parametrize("generated_schema", ["model-registry.yaml"], indirect=True) +@base_schema.include(method="POST", path_regex=".*/inference_service.*|.*/serving_environment.*").parametrize() +@settings( + max_examples=100, + deadline=None, + suppress_health_check=[ + HealthCheck.filter_too_much, + HealthCheck.too_slow, + HealthCheck.data_too_large, + ], +) +@pytest.mark.fuzz +def test_mr_api_stateless_post_serving(auth_headers: dict, case: schemathesis.Case, verify_ssl: bool) -> None: + """Test POST endpoints for InferenceServices and ServingEnvironments.""" + call_and_validate_with_null_byte_handling(case, auth_headers, verify_ssl) + + +# PATCH endpoints - split by resource type +@pytest.mark.parametrize("generated_schema", ["model-registry.yaml"], indirect=True) +@base_schema.include(method="PATCH", path_regex=".*/(registered_model|model_version).*").parametrize() +@settings( + max_examples=100, + deadline=None, + suppress_health_check=[ + HealthCheck.filter_too_much, + HealthCheck.too_slow, + HealthCheck.data_too_large, + ], +) +@pytest.mark.fuzz +def test_mr_api_stateless_patch_models(auth_headers: dict, case: schemathesis.Case, verify_ssl: bool) -> None: + """Test PATCH endpoints for RegisteredModels and ModelVersions.""" + call_and_validate_with_null_byte_handling(case, auth_headers, verify_ssl) + + +@pytest.mark.parametrize("generated_schema", ["model-registry.yaml"], indirect=True) +@base_schema.include(method="PATCH", path_regex=".*/artifact.*|.*/experiment.*|.*/inference_service.*|.*/serving_environment.*").parametrize() +@settings( + max_examples=100, + deadline=None, + suppress_health_check=[ + HealthCheck.filter_too_much, + HealthCheck.too_slow, + HealthCheck.data_too_large, + ], +) +@pytest.mark.fuzz +def test_mr_api_stateless_patch_others(auth_headers: dict, case: schemathesis.Case, verify_ssl: bool) -> None: + """Test PATCH endpoints for Artifacts, Experiments, and Serving resources.""" + call_and_validate_with_null_byte_handling(case, auth_headers, verify_ssl) @pytest.mark.fuzz @pytest.mark.parametrize(("artifact_type", "uri_prefix"), ARTIFACT_TYPE_PARAMS) @pytest.mark.parametrize("state", ARTIFACT_STATES) -def test_post_model_version_artifacts(auth_headers: dict, artifact_type: str, uri_prefix: str, state: str, cleanup_artifacts: Callable): +def test_post_model_version_artifacts(auth_headers: dict, artifact_type: str, uri_prefix: str, state: str, + cleanup_artifacts: Callable, verify_ssl: bool): """ Direct test for POST /api/model_registry/v1alpha3/model_versions/{modelversionId}/artifacts. """ @@ -227,24 +457,26 @@ def test_post_model_version_artifacts(auth_headers: dict, artifact_type: str, ur ) # Make the API request - response = requests.post(endpoint, headers=auth_headers, json=payload, timeout=DEFAULT_API_TIMEOUT) + response = requests.post(endpoint, headers=auth_headers, json=payload, timeout=DEFAULT_API_TIMEOUT, verify=verify_ssl) # Validate response and get artifact ID artifact_id = validate_artifact_response(response, payload) - # Cleanup after successful creation - cleanup_artifacts(artifact_id) + # Cleanup after successful creation (only if artifact was created) + if artifact_id is not None: + cleanup_artifacts(artifact_id) @pytest.mark.fuzz @pytest.mark.parametrize(("artifact_type", "uri_prefix"), ARTIFACT_TYPE_PARAMS) @pytest.mark.parametrize("state", ARTIFACT_STATES) -def test_post_experiment_run_artifacts(auth_headers: dict, artifact_type: str, uri_prefix: str, state: str, cleanup_artifacts: Callable): +def test_post_experiment_run_artifacts(auth_headers: dict, artifact_type: str, uri_prefix: str, state: str, + cleanup_artifacts: Callable, verify_ssl: bool): """ Direct test for POST /api/model_registry/v1alpha3/experiment_runs/{experimentrunId}/artifacts. """ # Create experiment and experiment run using helper - experiment_id, experiment_run_id = create_experiment_and_run(auth_headers) + experiment_id, experiment_run_id = create_experiment_and_run(auth_headers=auth_headers, verify_tls=verify_ssl) endpoint = f"{REGISTRY_URL}/api/model_registry/v1alpha3/experiment_runs/{experiment_run_id}/artifacts" @@ -258,21 +490,23 @@ def test_post_experiment_run_artifacts(auth_headers: dict, artifact_type: str, u ) # Make the API request - response = requests.post(endpoint, headers=auth_headers, json=payload, timeout=DEFAULT_API_TIMEOUT) + response = requests.post(endpoint, headers=auth_headers, json=payload, timeout=DEFAULT_API_TIMEOUT, verify=verify_ssl) # Validate response and get artifact ID artifact_id = validate_artifact_response(response, payload) - # Cleanup artifacts - cleanup_artifacts(artifact_id) + # Cleanup artifacts (only if artifact was created) + if artifact_id is not None: + cleanup_artifacts(artifact_id) # Cleanup experiment and run - cleanup_experiment_and_run(auth_headers, experiment_id, experiment_run_id) + cleanup_experiment_and_run(auth_headers=auth_headers, experiment_id=experiment_id, experiment_run_id=experiment_run_id, verify_tls=verify_ssl) @pytest.mark.fuzz @pytest.mark.parametrize(("artifact_type", "uri_prefix"), ARTIFACT_TYPE_PARAMS) -def test_patch_artifact(auth_headers: dict, artifact_resource: Callable, artifact_type: str, uri_prefix: str): +def test_patch_artifact(auth_headers: dict, artifact_resource: Callable, artifact_type: str, uri_prefix: str, + verify_ssl: bool): """ Direct test for PATCH /api/model_registry/v1alpha3/artifacts/{id}. """ @@ -306,7 +540,7 @@ def test_patch_artifact(auth_headers: dict, artifact_resource: Callable, artifac create_payload["uri"] = "s3://my-test-bucket/datasets/initial-dataset.parquet" create_payload["sourceType"] = "s3" elif artifact_type == "metric": - create_payload["value"] = 0.85 + create_payload["value"] = 0.85 # type: ignore[assignment] create_payload["timestamp"] = "1000000000" elif artifact_type == "parameter": create_payload["value"] = "0.01" @@ -322,11 +556,11 @@ def test_patch_artifact(auth_headers: dict, artifact_resource: Callable, artifac # Add type-specific update properties if needed if artifact_type == "metric": - patch_payload["value"] = 0.99 # Updated metric value + patch_payload["value"] = 0.99 # type: ignore[assignment] # Updated metric value elif artifact_type == "parameter": patch_payload["value"] = "0.001" # Updated parameter value - patch_response = requests.patch(patch_endpoint, headers=auth_headers, json=patch_payload, timeout=DEFAULT_API_TIMEOUT) + patch_response = requests.patch(patch_endpoint, headers=auth_headers, json=patch_payload, timeout=DEFAULT_API_TIMEOUT, verify=verify_ssl) assert patch_response.status_code == 200 patch_response_json = patch_response.json() assert patch_response_json.get("id") == artifact_id diff --git a/clients/python/tests/regression_test.py b/clients/python/tests/regression_test.py index 4faa024792..07b6b76da0 100644 --- a/clients/python/tests/regression_test.py +++ b/clients/python/tests/regression_test.py @@ -1,5 +1,5 @@ import pytest -import requests +import requests # type: ignore[import-untyped,unused-ignore] from model_registry import ModelRegistry from model_registry.types.artifacts import ModelArtifact @@ -44,7 +44,7 @@ def test_get_model_without_user_token(setup_env_user_token, client: ModelRegistr model_format_name="test_format", model_format_version="test_version", version=version, - metadata=metadata, + metadata=metadata, # type: ignore[arg-type] ) assert rm.id assert (_rm := client.get_registered_model(name)) diff --git a/clients/python/tests/test_client.py b/clients/python/tests/test_client.py index 9e15049940..15eca5856e 100644 --- a/clients/python/tests/test_client.py +++ b/clients/python/tests/test_client.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock import pytest -import requests +import requests # type: ignore[import-untyped,unused-ignore] from model_registry import ModelRegistry, utils from model_registry.exceptions import StoreError @@ -13,9 +13,9 @@ from model_registry.types.artifacts import DocArtifact -def test_secure_client(): - os.environ["CERT"] = "" - os.environ["KF_PIPELINES_SA_TOKEN_PATH"] = "" +def test_secure_client(monkeypatch): + monkeypatch.delenv("CERT", raising=False) + monkeypatch.delenv("KF_PIPELINES_SA_TOKEN_PATH", raising=False) with pytest.raises(StoreError) as e: ModelRegistry("anything", author="test_author") @@ -122,8 +122,8 @@ def test_register_version_long_name(client: ModelRegistry): version=lorem, ) ma = client.get_model_artifact(name="test_model", version=lorem) - assert ma.uri == "https://acme.org/something" - assert ma.model_format_name == "test_format_name" + assert ma.uri == "https://acme.org/something" # type: ignore[union-attr] + assert ma.model_format_name == "test_format_name" # type: ignore[union-attr] with pytest.raises(Exception): # noqa the focus of this test is the failure case, not to fix on the exception being raised client.register_model( @@ -208,7 +208,7 @@ async def test_update_logical_model_with_labels(client: ModelRegistry): ma.custom_properties = ma_labels client.update(ma) - rm = client.get_registered_model(name) + rm = client.get_registered_model(name) # type: ignore[assignment] assert rm assert rm.custom_properties == rm_labels mv = client.get_model_version(name, version) @@ -324,7 +324,7 @@ async def test_update_existing_model_artifact(client: ModelRegistry): assert response.uri == something_else ma = client.get_model_artifact(name, version) - assert ma.uri == something_else + assert ma.uri == something_else # type: ignore[union-attr] @pytest.mark.e2e @@ -339,7 +339,7 @@ async def test_get(client: ModelRegistry): model_format_name="test_format", model_format_version="test_version", version=version, - metadata=metadata, + metadata=metadata, # type: ignore[arg-type] ) assert rm.id @@ -543,7 +543,7 @@ def test_get_model_versions_order_by(client: ModelRegistry): mvs, client.get_model_versions(name).order_by_id(), ): - assert mv.id == by_id.id + assert mv.id == by_id.id # type: ignore[union-attr] i += 1 assert i == models @@ -553,7 +553,7 @@ def test_get_model_versions_order_by(client: ModelRegistry): mvs, client.get_model_versions(name).order_by_creation_time(), ): - assert mv.id == by_creation.id + assert mv.id == by_creation.id # type: ignore[union-attr] i += 1 assert i == models @@ -563,21 +563,21 @@ def test_get_model_versions_order_by(client: ModelRegistry): mvs, client.get_model_versions(name).order_by_update_time(), ): - assert mv.id == by_update.id + assert mv.id == by_update.id # type: ignore[union-attr] i += 1 assert i == models for mv in reversed(mvs): - mv.description = "updated" - client.update(mv) + mv.description = "updated" # type: ignore[union-attr] + client.update(mv) # type: ignore[type-var] i = 0 for mv, by_update in zip( reversed(mvs), client.get_model_versions(name).order_by_update_time(), ): - assert mv.id == by_update.id + assert mv.id == by_update.id # type: ignore[union-attr] i += 1 assert i == models @@ -587,7 +587,7 @@ def test_get_model_versions_order_by(client: ModelRegistry): mvs, client.get_model_versions(name).order_by_update_time().descending(), ): - assert mv.id == by_update.id + assert mv.id == by_update.id # type: ignore[union-attr] i += 1 assert i == models @@ -862,7 +862,7 @@ def test_nested_recursive_store_in_s3( @pytest.mark.e2e def test_custom_async_runner_with_ray( - client_attrs: dict[str, any], client: ModelRegistry, monkeypatch + client_attrs: dict[str, any], client: ModelRegistry, user_token: str, monkeypatch # type: ignore[valid-type] ): """Test Ray integration with uvloop event loop policy""" import asyncio @@ -901,6 +901,7 @@ def test_with_ray(): author=client_attrs["author"], is_secure=client_attrs["ssl"], async_runner=atr.run, + user_token=user_token, ) client.register_model( name="test_model", @@ -910,8 +911,8 @@ def test_with_ray(): model_format_name="onnx", ) ma = client.get_model_artifact(name="test_model", version="v1") - assert ma.uri == "https://acme.org/something" - assert ma.model_format_name == "onnx" + assert ma.uri == "https://acme.org/something" # type: ignore[union-attr] + assert ma.model_format_name == "onnx" # type: ignore[union-attr] # Run the Ray test - ray.get is synchronous ray.get(test_with_ray.remote()) @@ -1074,25 +1075,25 @@ async def test_register_model_with_s3_data_connection(client: ModelRegistry): } # Register the model with S3 connection details - rm = client.register_model(**model_params) + rm = client.register_model(**model_params) # type: ignore[arg-type] assert rm.id # Get and verify the registered model rm_by_name = client.get_registered_model(model_params["name"]) - assert rm_by_name.id == rm.id + assert rm_by_name.id == rm.id # type: ignore[union-attr] # Get and verify the model version mv = client.get_model_version(model_params["name"], model_params["version"]) - assert mv.description == "The Model" - assert mv.name == "v1.0" + assert mv.description == "The Model" # type: ignore[union-attr] + assert mv.name == "v1.0" # type: ignore[union-attr] # Get and verify the model artifact ma = client.get_model_artifact(model_params["name"], model_params["version"]) - assert ma.uri == uri - assert ma.model_format_name == "onnx" - assert ma.model_format_version == "1" - assert ma.storage_key == data_connection_name - assert ma.storage_path == s3_path + assert ma.uri == uri # type: ignore[union-attr] + assert ma.model_format_name == "onnx" # type: ignore[union-attr] + assert ma.model_format_version == "1" # type: ignore[union-attr] + assert ma.storage_key == data_connection_name # type: ignore[union-attr] + assert ma.storage_path == s3_path # type: ignore[union-attr] @pytest.mark.e2e diff --git a/clients/python/tests/test_core.py b/clients/python/tests/test_core.py index 890261de9e..d2e1504ebb 100644 --- a/clients/python/tests/test_core.py +++ b/clients/python/tests/test_core.py @@ -369,10 +369,7 @@ async def test_page_through_model_version_artifacts( _ = registered_model models = 6 for i in range(models): - if i % 2 == 0: - art = ModelArtifact(name=f"ma{i}", uri="uri") - else: - art = DocArtifact(name=f"ma{i}", uri="uri") + art = ModelArtifact(name=f"ma{i}", uri="uri") if i % 2 == 0 else DocArtifact(name=f"ma{i}", uri="uri") await client.upsert_model_version_artifact(art, str(model_version.id)) pager = Pager( lambda o: client.get_model_version_artifacts(str(model_version.id), o) diff --git a/clients/python/tests/test_experiments.py b/clients/python/tests/test_experiments.py index b23c8316cd..bf5f6a84d9 100644 --- a/clients/python/tests/test_experiments.py +++ b/clients/python/tests/test_experiments.py @@ -28,7 +28,7 @@ def artifacts_list(options: ListOptions) -> list[Artifact]: @pytest.fixture def schema_json(): - schema = {"epochs": {}} + schema: dict[str, dict] = {"epochs": {}} return json.dumps(schema) @@ -40,15 +40,15 @@ def test_start_experiment_run(client: ModelRegistry, schema_json: str): key="rval", value=10, step=4, - timestamp="0", + timestamp="0", # type: ignore[arg-type] description="This is a test metric", ) run.log_dataset( name="dataset_1", source_type="local", uri="s3://datasets/test", - schema=schema_json, - profile="random_profile", + schema=schema_json, # type: ignore[arg-type] + profile="random_profile", # type: ignore[arg-type] ) assert len(run.get_logs()) == 3 @@ -59,10 +59,10 @@ def test_start_experiment_run(client: ModelRegistry, schema_json: str): assert metric assert dataset - assert param.value == 5.75 - assert metric.value == 10 - assert metric.step == 4 - assert metric.timestamp == "0" + assert param.value == 5.75 # type: ignore[union-attr] + assert metric.value == 10 # type: ignore[union-attr] + assert metric.step == 4 # type: ignore[union-attr] + assert metric.timestamp == "0" # type: ignore[union-attr] assert metric.description == "This is a test metric" assert metric.name == "rval" @@ -76,10 +76,10 @@ def test_start_experiment_run_with_advanced_scenarios( run.log_param("input1", 5.75) run.log_param("input1", 500) for i in range(10): - run.log_metric(f"metric_{i}", value=i * 1000, step=i, timestamp="0") + run.log_metric(f"metric_{i}", value=i * 1000, step=i, timestamp="0") # type: ignore[arg-type] assert len(run.get_logs()) == 11 - assert run.get_log("params", "input1").value == 500 + assert run.get_log("params", "input1").value == 500 # type: ignore[union-attr] with client.start_experiment_run( experiment_name="Experiment_Test_URI_Provided" @@ -88,10 +88,10 @@ def test_start_experiment_run_with_advanced_scenarios( name="dataset_1", source_type="s3", uri="s3://datasets/test", - schema=schema_json, - profile="random_profile", + schema=schema_json, # type: ignore[arg-type] + profile="random_profile", # type: ignore[arg-type] ) - assert run.get_log("datasets", "dataset_1").uri == "s3://datasets/test" + assert run.get_log("datasets", "dataset_1").uri == "s3://datasets/test" # type: ignore[union-attr] # Test actual model_dir, _ = get_temp_dir_with_models @@ -100,8 +100,8 @@ def test_start_experiment_run_with_advanced_scenarios( run.log_dataset( name="dataset_1", source_type="local", - schema=schema_json, - profile="random_profile", + schema=schema_json, # type: ignore[arg-type] + profile="random_profile", # type: ignore[arg-type] file_path=model_dir, s3_auth=utils.S3Params( endpoint_url=s3_endpoint, @@ -112,7 +112,7 @@ def test_start_experiment_run_with_advanced_scenarios( region=region, ), ) - assert run.get_log("datasets", "dataset_1").uri.startswith("s3://") + assert run.get_log("datasets", "dataset_1").uri.startswith("s3://") # type: ignore[union-attr] @pytest.mark.e2e @@ -137,7 +137,7 @@ def test_get_experiment_runs(client: ModelRegistry): runs_by_name = client.get_experiment_runs(experiment_name="Experiment_Test_3") runs_by_id = client.get_experiment_runs(experiment_id=run.info.experiment_id) - assert runs_by_name.next_item().id == runs_by_id.next_item().id + assert runs_by_name.next_item().id == runs_by_id.next_item().id # type: ignore[attr-defined] runs_by_name.restart() runs_by_id.restart() @@ -165,15 +165,15 @@ def test_get_experiment_run_with_artifact_types( name="dataset_1", source_type="local", uri="s3://datasets/test", - schema=schema_json, - profile="random_profile", + schema=schema_json, # type: ignore[arg-type] + profile="random_profile", # type: ignore[arg-type] description="This is a test dataset", ) run.log_metric( key="metric_1", value=10, step=4, - timestamp="0", + timestamp="0", # type: ignore[arg-type] description="This is a test metric", ) run.log_param( @@ -185,12 +185,12 @@ def test_get_experiment_run_with_artifact_types( dataset_log = client.get_experiment_run_logs( run_id=run.info.id, ) - assert dataset_log.next_item().name.endswith("1") + assert dataset_log.next_item().name.endswith("1") # type: ignore[attr-defined] assert dataset_log.next_item() assert dataset_log.next_item() try: # fail if we get a 4th item - dataset_log.next_item() + dataset_log.next_item() # type: ignore[unused-coroutine] pytest.fail("Expected StopIteration") except StopIteration: assert True @@ -220,20 +220,20 @@ def test_start_experiment_run_nested(client: ModelRegistry): ) # Assert logs are correct for artifact in client.get_experiment_run_logs(run_id=run.info.id): - assert artifact.value == 10 - assert "nested" not in artifact.description + assert artifact.value == 10 # type: ignore[union-attr] + assert "nested" not in artifact.description # type: ignore[operator] for artifact in client.get_experiment_run_logs(run_id=run2.info.id): - assert artifact.value == 20 - assert "nested" in artifact.description + assert artifact.value == 20 # type: ignore[union-attr] + assert "nested" in artifact.description # type: ignore[operator] exp_run = client.get_experiment_run(run_id=run.info.id) assert exp_run.custom_properties is None assert exp_run.experiment_id == run.info.experiment_id exp_run = client.get_experiment_run(run_id=run2.info.id) - assert "kubeflow.parent_run_id" in exp_run.custom_properties - assert exp_run.custom_properties["kubeflow.parent_run_id"] == run.info.id + assert "kubeflow.parent_run_id" in exp_run.custom_properties # type: ignore[operator] + assert exp_run.custom_properties["kubeflow.parent_run_id"] == run.info.id # type: ignore[index] assert exp_run.experiment_id == run.info.experiment_id assert exp_run.name == run2.info.name assert exp_run.id == run2.info.id @@ -410,11 +410,11 @@ def sql_quote(s): assert param_attributes == expected_param_attributes # Verify values are reasonable for metric in metrics: - assert isinstance(metric.value, (int, float)) - assert metric.value > 0 + assert isinstance(metric.value, (int, float)) # type: ignore[attr-defined] + assert metric.value > 0 # type: ignore[attr-defined] for param in params: - assert param.value is not None + assert param.value is not None # type: ignore[attr-defined] # Test filtering by specific metric names using SQL accuracy_metrics = list(get_artifacts(client, artifact_type="metric", filter_query=f'{run_ids_filter} AND name LIKE "%accuracy%"')) diff --git a/clients/python/tests/test_utils.py b/clients/python/tests/test_utils.py index 4951a12064..6b92a3d7c5 100644 --- a/clients/python/tests/test_utils.py +++ b/clients/python/tests/test_utils.py @@ -203,7 +203,7 @@ def test_save_to_oci_registry_backend_not_found(): with pytest.raises( ValueError, match=f"'{backend}' is not an available backend to use." ) as e: - save_to_oci_registry("", "", [], "", backend) + save_to_oci_registry("", "", [], "", backend) # type: ignore[arg-type] assert f"'{backend}' is not an available backend to use." in str(e.value) diff --git a/clients/ui/Dockerfile b/clients/ui/Dockerfile index ea4d3db42a..4dbf1f18be 100644 --- a/clients/ui/Dockerfile +++ b/clients/ui/Dockerfile @@ -3,7 +3,7 @@ ARG UI_SOURCE_CODE=./frontend ARG BFF_SOURCE_CODE=./bff # Set the base images for the build stages -ARG NODE_BASE_IMAGE=node:20 +ARG NODE_BASE_IMAGE=node:22 ARG GOLANG_BASE_IMAGE=golang:1.24.6 ARG DISTROLESS_BASE_IMAGE=gcr.io/distroless/static:nonroot diff --git a/clients/ui/Makefile b/clients/ui/Makefile index 16eca7af3d..736f4298dd 100644 --- a/clients/ui/Makefile +++ b/clients/ui/Makefile @@ -32,6 +32,7 @@ dev-install-dependencies: .PHONY: dev-bff dev-bff: + trap 'exit 0' INT; \ cd bff && make run PORT=4000 MOCK_K8S_CLIENT=true MOCK_MR_CLIENT=true MOCK_MR_CATALOG_CLIENT=true DEV_MODE=true DEPLOYMENT_MODE=standalone .PHONY: dev-frontend @@ -39,12 +40,14 @@ dev-frontend: cd frontend && DEPLOYMENT_MODE=standalone STYLE_THEME=mui-theme npm run start:dev .PHONY: dev-start -dev-start: +dev-start: + @trap 'exit 0' INT; \ make -j 2 dev-bff dev-frontend ########### Dev Kubeflow ############ .PHONY: dev-start-kubeflow -dev-start-kubeflow: +dev-start-kubeflow: + @trap 'exit 0' INT; \ make -j 2 dev-bff-kubeflow dev-frontend-kubeflow .PHONY: dev-frontend-kubeflow @@ -57,12 +60,13 @@ dev-bff-kubeflow: ########### Dev Federated ########### .PHONY: dev-start-federated -dev-start-federated: +dev-start-federated: + @trap 'exit 0' INT; \ make -j 2 dev-bff-federated dev-frontend-federated .PHONY: dev-frontend-federated dev-frontend-federated: - cd frontend && AUTH_METHOD=user_token DEPLOYMENT_MODE=federated STYLE_THEME=patternfly npm run start:dev + cd frontend && AUTH_METHOD=user_token DEPLOYMENT_MODE=federated STYLE_THEME=patternfly PORT=9100 npm run start:dev .PHONY: dev-bff-federated dev-bff-federated: diff --git a/clients/ui/OWNERS b/clients/ui/OWNERS index 5c600aa275..a0d5f1c037 100644 --- a/clients/ui/OWNERS +++ b/clients/ui/OWNERS @@ -4,3 +4,4 @@ approvers: - Griffin-Sullivan - lucferbux - mturley + - manaswinidas diff --git a/clients/ui/api/openapi/mod-arch.yaml b/clients/ui/api/openapi/mod-arch.yaml index d0ae21e7db..dec29d1d92 100644 --- a/clients/ui/api/openapi/mod-arch.yaml +++ b/clients/ui/api/openapi/mod-arch.yaml @@ -84,7 +84,7 @@ paths: - $ref: "#/components/parameters/kubeflowUserId" responses: "200": - $ref: "#/components/responses/ModelRegistryRespone" + $ref: "#/components/responses/ModelRegistryResponse" "401": $ref: "#/components/responses/Unauthorized" "500": @@ -701,12 +701,21 @@ paths: - $ref: "#/components/parameters/kubeflowUserId" - name: source description: |- - Filter models by source. This parameter is currently required and - may only be specified once. + Filter models by source. This parameter may be included multiple + times to include models from multiple sources. schema: type: string in: query - required: true + required: false + - name: sourceLabel + description: |- + Filter models by the label associated with the source. This + parameter may be included multiple times to include models from + multiple sources. + schema: + type: string + in: query + required: false - name: q description: Free-form keyword search used to filter the response. schema: @@ -717,6 +726,7 @@ paths: - $ref: "#/components/parameters/orderBy" - $ref: "#/components/parameters/sortOrder" - $ref: "#/components/parameters/nextPageToken" + - $ref: "#/components/parameters/filterQuery" responses: "200": $ref: "#/components/responses/CatalogModelListResponse" @@ -729,6 +739,24 @@ paths: "500": $ref: "#/components/responses/InternalServerError" operationId: findModels + /api/v1/model_catalog/models/filter_options: + description: Lists options for `filterQuery` when listing models. + get: + summary: Lists fields and available options that can be used in `filterQuery` on the list models endpoint. + tags: + - ModelCatalogService + parameters: + - $ref: "#/components/parameters/kubeflowUserId" + responses: + "200": + $ref: "#/components/responses/FilterOptionsResponse" + "400": + $ref: "#/components/responses/BadRequest" + "401": + $ref: "#/components/responses/Unauthorized" + "500": + $ref: "#/components/responses/InternalServerError" + operationId: findModelsFilterOptions /api/v1/model_catalog/sources: summary: Path used to get the list of catalog sources. description: >- @@ -803,12 +831,13 @@ paths: type: string in: path required: true - - name: CatalogModelName + - name: CatalogModelName+ description: A unique identifier for the model. schema: type: string in: path required: true + - $ref: "#/components/parameters/catalogArtifactType" responses: "200": $ref: "#/components/responses/CatalogModelArtifactListResponse" @@ -819,7 +848,180 @@ paths: "500": $ref: "#/components/responses/InternalServerError" operationId: getAllModelArtifacts - # TODO: Admin endpoints for model catalog + + # Model catalog settings endpoints + /api/v1/settings/model_catalog/source_configs: + summary: Path used to managae Model catalog sources. + description: The REST endpoint/path used to list and create model catalog sources. + get: + tags: + - K8SOperation + parameters: + - $ref: "#/components/parameters/kubeflowUserId" + responses: + "200": + $ref: "#/components/responses/CatalogSourceConfigListResponse" + "401": + $ref: "#/components/responses/Unauthorized" + "500": + $ref: "#/components/responses/InternalServerError" + operationId: getCatalogSources + summary: List ALL the catalog sources. + description: Gets a list of all catalog sources. + post: + requestBody: + description: A new catalog source to be created, potentially including credentials/yaml. + content: + application/json: + schema: + $ref: "#/components/schemas/CatalogSourceConfigPayload" + required: true + tags: + - K8SOperation + parameters: + - $ref: "#/components/parameters/kubeflowUserId" + responses: + "201": + $ref: "#/components/responses/CatalogSourceConfigResponse" + "400": + $ref: "#/components/responses/BadRequest" + "401": + $ref: "#/components/responses/Unauthorized" + "500": + $ref: "#/components/responses/InternalServerError" + operationId: createCatalogSource + summary: Create a Catalog source + description: Creates a new model catalog source. + /api/v1/settings/model_catalog/source_configs/{sourceId}: + summary: Path used to manage Model catalog sources. + description: >- + The REST endpoint/path used to get, update and delete a single ModelCatalog Source. + # Add GET operation + get: + tags: + - K8SOperation + parameters: + - $ref: "#/components/parameters/kubeflowUserId" + - $ref: "#/components/parameters/sourceId" # Use the correct parameter name + responses: + "200": + $ref: "#/components/responses/CatalogSourceConfigResponse" + "401": + $ref: "#/components/responses/Unauthorized" + "404": + $ref: "#/components/responses/NotFound" + "500": + $ref: "#/components/responses/InternalServerError" + operationId: getCatalogSource + summary: Get a Model Catalog Source + description: Gets the details of a single Model Catalog source potentially including related credentials. + patch: + requestBody: + description: Updated Model Catalog Source information. + content: + application/json: + schema: + $ref: "#/components/schemas/CatalogSourceConfigPayload" + required: true + tags: + - K8SOperation + parameters: + - $ref: "#/components/parameters/kubeflowUserId" + - $ref: "#/components/parameters/sourceId" + responses: + "200": + $ref: "#/components/responses/CatalogSourceConfigResponse" + "400": + $ref: "#/components/responses/BadRequest" + "401": + $ref: "#/components/responses/Unauthorized" + "404": + $ref: "#/components/responses/NotFound" + "500": + $ref: "#/components/responses/InternalServerError" + operationId: updateModelCatalogSource + summary: Update a Model Catalog Sources + description: Updates an existing Model Catalog source. + delete: + tags: + - K8SOperation + parameters: + - $ref: "#/components/parameters/kubeflowUserId" + - $ref: "#/components/parameters/sourceId" + responses: + "204": + description: Successfully deleted + "400": + $ref: "#/components/responses/BadRequest" + "401": + $ref: "#/components/responses/Unauthorized" + "404": + $ref: "#/components/responses/NotFound" + "500": + $ref: "#/components/responses/InternalServerError" + operationId: deleteModelCatalogSource + summary: Delete a Model Catalog source + description: Deletes an existing Model Catalog Source. + + /api/v1/settings/model_catalog/source_preview: + description: >- + The REST endpoint/path used to preview the effects of catalog source configuration changes. + post: + summary: Preview catalog source configuration effects + description: >- + Accepts catalog source definitions and returns a preview of which models would be + included or excluded based on the configuration. Supports multipart form uploads + for YAML catalogs files. + tags: + - ModelCatalogService + parameters: + - name: statusFilter + description: |- + Filter the response to show specific model statuses. + - `all` shows all models (default) + - `included` shows only models that would be included + - `excluded` shows only models that would be excluded + schema: + type: string + enum: + - all + - included + - excluded + default: all + in: query + required: false + - $ref: "#/components/parameters/pageSize" + - $ref: "#/components/parameters/nextPageToken" + requestBody: + description: Catalog source configuration + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/CatalogSourcePreviewRequest" + multipart/form-data: + schema: + type: object + properties: + file: + type: string + format: yaml + description: + type: string + description: + Should include a file named `source.json`, and may optionally + include a YAML catalog. The name referenced in the source + definition should match uploaded file name. + responses: + "200": + $ref: "#/components/responses/CatalogSourcePreviewResponse" + "400": + $ref: "#/components/responses/BadRequest" + "401": + $ref: "#/components/responses/Unauthorized" + "500": + $ref: "#/components/responses/InternalServerError" + operationId: previewCatalogSource components: schemas: @@ -835,6 +1037,7 @@ components: clusterAdmin: type: boolean example: true + ModelRegistry: required: - name @@ -1781,6 +1984,36 @@ components: format: int32 description: Number of items in result list. type: integer + CatalogArtifact: + description: A single artifact in the catalog API. + oneOf: + - $ref: "#/components/schemas/CatalogModelArtifact" + - $ref: "#/components/schemas/CatalogMetricsArtifact" + discriminator: + propertyName: artifactType + mapping: + model-artifact: "#/components/schemas/CatalogModelArtifact" + metrics-artifact: "#/components/schemas/CatalogMetricsArtifact" + CatalogMetricsArtifact: + description: A metadata Artifact Entity. + allOf: + - type: object + required: + - artifactType + properties: + artifactType: + type: string + default: metrics-artifact + metricsType: + type: string + example: “accuracy-metrics” + description: An artifact type representing a collection of related metrics, which will either be “performance-metrics” or “accuracy-metrics”. + customProperties: + description: User provided custom properties which are not defined by its type. + type: object + additionalProperties: + $ref: "#/components/schemas/MetadataValue" + - $ref: "#/components/schemas/BaseResourceDates" CatalogModel: description: A model in the model catalog. allOf: @@ -1798,16 +2031,19 @@ components: - $ref: "#/components/schemas/BaseResourceDates" - $ref: "#/components/schemas/CatalogBaseModel" CatalogModelArtifact: - description: A single artifact for a catalog model. + description: A metadata Artifact Entity. allOf: - type: object required: - - uri + - artifactType properties: + artifactType: + type: string + default: model-artifact uri: type: string format: uri - description: URI where the artifact can be retrieved. + description: URI where the model can be retrieved. customProperties: description: User provided custom properties which are not defined by its type. type: object @@ -1820,10 +2056,10 @@ components: - type: object properties: items: - description: Array of `CatalogModelArtifact` entities. + description: Array of `CatalogArtifact` entities. type: array items: - $ref: "#/components/schemas/CatalogModelArtifact" + $ref: "#/components/schemas/CatalogArtifact" required: - items - $ref: "#/components/schemas/BaseResourceList" @@ -1857,6 +2093,29 @@ components: description: Whether the catalog source is enabled. type: boolean default: true + labels: + description: |- + Labels associated with the source. The catalog source labels can be + used to filter models from sources with the same label (see + `sourceLabel` on `/api/model_catalog/v1alpha1/models`) + type: array + items: + type: string + status: + description: |- + Operational status of a catalog source. + - `available`: The source is functioning correctly and models can be retrieved + - `error`: The source is experiencing issues and cannot provide models + - `disabled`: The source has been intentionally disabled + type: string + enum: + - available + - error + - disabled + error: + description: |- + Error message when status is Failed. Contains details about what went wrong. + type: string CatalogSourceList: description: List of CatalogSource entities. allOf: @@ -1868,6 +2127,184 @@ components: items: $ref: "#/components/schemas/CatalogSource" - $ref: "#/components/schemas/BaseResourceList" + FilterOption: + type: object + required: + - type + properties: + type: + type: string + description: The data type of the filter option + enum: + - string + - number + values: + type: array + description: |- + Known values of the property for string types with a small number of possible options. + items: {} + range: + type: object + description: Min and max values for number types. + properties: + min: + type: number + max: + type: number + FilterOptionsList: + description: List of FilterOptions + type: object + properties: + filters: + type: object + description: A single filter option. + additionalProperties: + $ref: "#/components/schemas/FilterOption" + CatalogSourcePreviewRequest: + description: Request to preview catalog source configuration effects. + type: object + required: + - type + properties: + type: + type: string + description: The type of catalog source (e.g., 'huggingface', 'yaml') + example: yaml + includedModels: + type: array + items: + type: string + excludedModels: + type: array + items: + type: string + properties: + type: object + description: Type-specific configuration properties + additionalProperties: true + CatalogSourcePreviewModel: + description: A model in the preview response with inclusion status. + type: object + required: + - name + - included + properties: + name: + type: string + description: Name of the model + example: microsoft/DialoGPT-medium + included: + type: boolean + description: Whether this model would be included based on the source configuration + CatalogSourcePreviewResult: + description: Preview result for catalog source configuration. + allOf: + - type: object + properties: + items: + description: Array of models with their inclusion status. + type: array + items: + $ref: "#/components/schemas/CatalogSourcePreviewModel" + summary: + description: Summary of the preview results + type: object + properties: + totalModels: + type: integer + description: Total number of models evaluated + example: 1500 + includedModels: + type: integer + description: Number of models that would be included + example: 850 + excludedModels: + type: integer + description: Number of models that would be excluded + example: 650 + required: + - totalModels + - includedModels + - excludedModels + required: + - items + - summary + - $ref: "#/components/schemas/BaseResourceList" + + # Schema for catalog settings + CatalogSourceConfig: + description: A single catalog source configuration + type: object + allOf: + - $ref: "#/components/schemas/CatalogSourceConfigPayload" + CatalogSourceConfigList: + description: List of catalog sources. + type: object + properties: + catalogs: + description: Array of catalog sources. + type: array + items: + $ref: "#/components/schemas/CatalogSourceConfig" + + # scehma for model catalog source payload + CatalogSourceConfigPayload: + description: Payload for creating the catalog source + type: object + required: + - name + - id + - type + properties: + name: + type: string + description: The name of the catalog source. + example: Sample Catalog + id: + type: string + description: A unique identifier for a `CatalogSource`. + example: sample_custom_catalog + type: + type: string + example: yaml + enabled: + type: boolean + description: Whether the catalog source is enabled. + default: true + example: true + labels: + description: |- + Labels associated with the source. The catalog source labels can be + used to filter models from sources with the same label (see + `sourceLabel` on `/api/model_catalog/v1alpha1/models`) + type: array + items: + type: string + isDefault: + type: boolean + example: true + yaml: + type: string + description: Complete YAML catalog content for yaml-type sources. + apiKey: + type: string + description: API key for authenticating with the catalog source. + example: "apiKey" + includedModels: + type: array + items: + type: string + example: rhelai1/modelcar-granite-7b-starter + excludedModels: + type: array + items: + type: string + example: model-a:1.0 + description: list of models name, that will be excluded. + allowedOrganization: + type: string + description: name of the organization that need to ingest from the source. + example: org1 responses: NotFound: @@ -1912,7 +2349,7 @@ components: schema: $ref: "#/components/schemas/Error" description: Conflict - Role Binding with the same name already exists - ModelRegistryRespone: + ModelRegistryResponse: content: application/json: schema: @@ -2184,7 +2621,7 @@ components: application/json: schema: $ref: "#/components/schemas/CatalogModelArtifactList" - description: A response containing a list of CatalogModelArtifact entities. + description: A response containing a list of CatalogArtifact entities. CatalogModelListResponse: content: application/json: @@ -2203,12 +2640,38 @@ components: schema: $ref: "#/components/schemas/CatalogSourceList" description: A response containing a list of CatalogSource entities. - CatalogSourceResponse: + FilterOptionsResponse: content: application/json: schema: - $ref: "#/components/schemas/CatalogSource" - description: A response containing a `CatalogSource` entity. + $ref: "#/components/schemas/FilterOptionsList" + description: A response containing options for a `filterQuery` parameter. + CatalogSourcePreviewResponse: + content: + application/json: + schema: + $ref: "#/components/schemas/CatalogSourcePreviewResult" + description: A response containing the preview results of catalog source configuration effects. + + # Catalog settings response + CatalogSourceConfigListResponse: + description: A response containing a list of catalog sources. + content: + application/json: + schema: + type: object + properties: + data: + $ref: "#/components/schemas/CatalogSourceConfigList" + CatalogSourceConfigResponse: + content: + application/json: + schema: + type: object + properties: + data: + $ref: "#/components/schemas/CatalogSourceConfig" + description: A response containing a catalog source. parameters: registeredmodelId: @@ -2351,6 +2814,56 @@ components: type: string in: query required: false + filterQuery: + examples: + filterQuery: + value: "name='my-model' AND state='LIVE'" + name: filterQuery + description: | + A SQL-like query string to filter the list of entities. The query supports rich filtering capabilities with automatic type inference. + + **Supported Operators:** + - Comparison: `=`, `!=`, `<>`, `>`, `<`, `>=`, `<=` + - Pattern matching: `LIKE`, `ILIKE` (case-insensitive) + - Set membership: `IN` + - Logical: `AND`, `OR` + - Grouping: `()` for complex expressions + + **Data Types:** + - Strings: `"value"` or `'value'` + - Numbers: `42`, `3.14`, `1e-5` + - Booleans: `true`, `false` (case-insensitive) + + **Property Access:** + - Standard properties: `name`, `id`, `state`, `createTimeSinceEpoch` + - Custom properties: Any user-defined property name + - Escaped properties: Use backticks for special characters: `` `custom-property` `` + - Type-specific access: `property.string_value`, `property.double_value`, `property.int_value`, `property.bool_value` + + **Examples:** + - Basic: `name = "my-model"` + - Comparison: `accuracy > 0.95` + - Pattern: `name LIKE "%tensorflow%"` + - Complex: `(name = "model-a" OR name = "model-b") AND state = "LIVE"` + - Custom property: `framework.string_value = "pytorch"` + - Escaped property: `` `mlflow.source.type` = "notebook" `` + schema: + type: string + in: query + required: false + catalogArtifactType: + name: artifactType + description: |- + Specifies the artifact types to return. May be specified multiple + times to retrieve multiple artifact types. + schema: + type: string + enum: + - model-artifact + - doc-artifact + - metrics-artifact + in: query + required: false securitySchemes: Bearer: diff --git a/clients/ui/bff/Makefile b/clients/ui/bff/Makefile index d8a605323b..1f17b77501 100644 --- a/clients/ui/bff/Makefile +++ b/clients/ui/bff/Makefile @@ -8,7 +8,7 @@ DEV_MODE_CATALOG_PORT ?= 8081 DEPLOYMENT_MODE ?= kubeflow AUTH_METHOD ?= internal AUTH_TOKEN_HEADER ?= Authorization -AUTH_TOKEN_PREFIX ?= Bearer\ +AUTH_TOKEN_PREFIX ?= Bearer INSECURE_SKIP_VERIFY ?= false #frontend static assets root directory STATIC_ASSETS_DIR ?= ./static @@ -61,6 +61,7 @@ endif .PHONY: run run: fmt vet envtest ## Runs the project. + trap 'exit 0' INT; \ ENVTEST_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" \ go run ./cmd --port=$(PORT) --auth-method=${AUTH_METHOD} --auth-token-header=$(AUTH_TOKEN_HEADER) --auth-token-prefix="$(AUTH_TOKEN_PREFIX)" --static-assets-dir=$(STATIC_ASSETS_DIR) --mock-k8s-client=$(MOCK_K8S_CLIENT) --mock-mr-client=$(MOCK_MR_CLIENT) --mock-mr-catalog-client=$(MOCK_MR_CATALOG_CLIENT) --dev-mode=$(DEV_MODE) --dev-mode-model-registry-port=$(DEV_MODE_MODEL_REGISTRY_PORT) --dev-mode-catalog-port=$(DEV_MODE_CATALOG_PORT) --deployment-mode=$(DEPLOYMENT_MODE) --log-level=$(LOG_LEVEL) --allowed-origins=$(ALLOWED_ORIGINS) --insecure-skip-verify=$(INSECURE_SKIP_VERIFY) diff --git a/clients/ui/bff/README.md b/clients/ui/bff/README.md index ae511c0fa6..ea84ee000c 100644 --- a/clients/ui/bff/README.md +++ b/clients/ui/bff/README.md @@ -320,6 +320,12 @@ curl -i -H "kubeflow-userid: user@example.com" "http://localhost:4000/api/v1/mod curl -i -H "Authorization: Bearer $TOKEN" "http://localhost:4000/api/v1/model_catalog/sources?namespace=kubeflow" ``` +``` +# GET /api/v1/model_catalog/models/filter_options +curl -i -H "kubeflow-userid: user@example.com" "http://localhost:4000/api/v1/model_catalog/models/filter_options?namespace=kubeflow" +curl -i -H "Authorization: Bearer $TOKEN" "http://localhost:4000/api/v1/model_catalog/models/filter_options?namespace=kubeflow" +``` + ``` # GET /api/v1/model_catalog/sources with name filter curl -i -H "kubeflow-userid: user@example.com" "http://localhost:4000/api/v1/model_catalog/sources?namespace=kubeflow&name=sample-source" diff --git a/clients/ui/bff/go.mod b/clients/ui/bff/go.mod index 946cf97224..4e351c83f3 100644 --- a/clients/ui/bff/go.mod +++ b/clients/ui/bff/go.mod @@ -3,24 +3,24 @@ module github.com/kubeflow/model-registry/ui/bff go 1.24.6 require ( - github.com/brianvoe/gofakeit/v7 v7.3.0 + github.com/brianvoe/gofakeit/v7 v7.7.3 github.com/google/uuid v1.6.0 github.com/julienschmidt/httprouter v1.3.0 - github.com/kubeflow/model-registry v0.3.0 - github.com/kubeflow/model-registry/pkg/openapi v0.3.0 - github.com/onsi/ginkgo/v2 v2.25.1 - github.com/onsi/gomega v1.38.0 + github.com/kubeflow/model-registry/pkg/openapi v0.3.2 + github.com/onsi/ginkgo/v2 v2.27.1 + github.com/onsi/gomega v1.38.2 github.com/rs/cors v1.11.1 github.com/stretchr/testify v1.11.0 - k8s.io/api v0.34.1 - k8s.io/apimachinery v0.34.1 - k8s.io/client-go v0.34.1 - sigs.k8s.io/controller-runtime v0.21.0 + k8s.io/api v0.34.2 + k8s.io/apimachinery v0.34.2 + k8s.io/client-go v0.34.2 + sigs.k8s.io/controller-runtime v0.22.4 ) require ( - github.com/Masterminds/semver/v3 v3.3.1 // indirect - github.com/blang/semver/v4 v4.0.0 // indirect + github.com/Masterminds/semver/v3 v3.4.0 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/emicklei/go-restful/v3 v3.12.2 // indirect github.com/evanphx/json-patch/v5 v5.9.11 // indirect @@ -43,6 +43,10 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/prometheus/client_golang v1.22.0 // indirect + github.com/prometheus/client_model v0.6.2 // indirect + github.com/prometheus/common v0.63.0 // indirect + github.com/prometheus/procfs v0.16.0 // indirect github.com/spf13/pflag v1.0.7 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/x448/float16 v0.8.4 // indirect @@ -51,18 +55,21 @@ require ( go.uber.org/zap v1.27.0 // indirect go.yaml.in/yaml/v2 v2.4.2 // indirect go.yaml.in/yaml/v3 v3.0.4 // indirect + golang.org/x/mod v0.27.0 // indirect golang.org/x/net v0.43.0 // indirect golang.org/x/oauth2 v0.30.0 // indirect + golang.org/x/sync v0.16.0 // indirect golang.org/x/sys v0.35.0 // indirect golang.org/x/term v0.34.0 // indirect golang.org/x/text v0.28.0 // indirect golang.org/x/time v0.11.0 // indirect golang.org/x/tools v0.36.0 // indirect + gomodules.xyz/jsonpatch/v2 v2.5.0 // indirect google.golang.org/protobuf v1.36.8 // indirect gopkg.in/evanphx/json-patch.v4 v4.12.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - k8s.io/apiextensions-apiserver v0.33.0 // indirect + k8s.io/apiextensions-apiserver v0.34.1 // indirect k8s.io/klog/v2 v2.130.1 // indirect k8s.io/kube-openapi v0.0.0-20250710124328-f3f2b991d03b // indirect k8s.io/utils v0.0.0-20250604170112-4c0f3b243397 // indirect diff --git a/clients/ui/bff/go.sum b/clients/ui/bff/go.sum index 1a21f11907..81669c6117 100644 --- a/clients/ui/bff/go.sum +++ b/clients/ui/bff/go.sum @@ -1,11 +1,9 @@ -github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7rj+4nv4= -github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= +github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM= -github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= -github.com/brianvoe/gofakeit/v7 v7.3.0 h1:TWStf7/lLpAjKw+bqwzeORo9jvrxToWEwp9b1J2vApQ= -github.com/brianvoe/gofakeit/v7 v7.3.0/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA= +github.com/brianvoe/gofakeit/v7 v7.7.3 h1:RWOATEGpJ5EVg2nN8nlaEyaV/aB4d6c3GqYrbqQekss= +github.com/brianvoe/gofakeit/v7 v7.7.3/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -20,6 +18,12 @@ github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/fxamacker/cbor/v2 v2.9.0 h1:NpKPmjDBgUfBms6tr6JZkTHtfFGcMKsw3eGcmD/sapM= github.com/fxamacker/cbor/v2 v2.9.0/go.mod h1:vM4b+DJCtHn+zz7h3FFp/hDAI9WNWCsZj23V5ytsSxQ= +github.com/gkampitakis/ciinfo v0.3.2 h1:JcuOPk8ZU7nZQjdUhctuhQofk7BGHuIy0c9Ez8BNhXs= +github.com/gkampitakis/ciinfo v0.3.2/go.mod h1:1NIwaOcFChN4fa/B0hEBdAb6npDlFL8Bwx4dfRLRqAo= +github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZdC4M= +github.com/gkampitakis/go-diff v1.3.2/go.mod h1:LLgOrpqleQe26cte8s36HTWcTmMEur6OPYerdAAS9tk= +github.com/gkampitakis/go-snaps v0.5.15 h1:amyJrvM1D33cPHwVrjo9jQxX8g/7E2wYdZ+01KS3zGE= +github.com/gkampitakis/go-snaps v0.5.15/go.mod h1:HNpx/9GoKisdhw9AFOBT1N7DBs9DiHo/hGheFGBZ+mc= github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/zapr v1.3.0 h1:XGdV8XW8zdwFiwOA2Dryh1gj2KRQyOOoNmBy4EplIcQ= @@ -32,6 +36,8 @@ github.com/go-openapi/swag v0.23.1 h1:lpsStH0n2ittzTnbaSloVZLuB5+fvSY/+hnagBjSNZ github.com/go-openapi/swag v0.23.1/go.mod h1:STZs8TbRvEQQKUA+JZNAm3EWlgaOBGpyFDqQnDHMef0= github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= +github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw= +github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/google/gnostic-models v0.7.0 h1:qwTtogB15McXDaNqTZdzPJRHvaVJlAl+HVQnLmJEJxo= @@ -45,6 +51,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/joshdk/go-junit v1.0.0 h1:S86cUKIdwBHWwA6xCmFlf3RTLfVXYQfvanM5Uh+K6GE= +github.com/joshdk/go-junit v1.0.0/go.mod h1:TiiV0PqkaNfFXjEiyjWM3XXrhVyCa1K4Zfga6W52ung= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/julienschmidt/httprouter v1.3.0 h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U= @@ -55,12 +63,14 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kubeflow/model-registry v0.3.0 h1:03w2GX14Y/psOqrF2mWwxKIUxWekJ20igQQvbCKLTas= -github.com/kubeflow/model-registry v0.3.0/go.mod h1:e4xz2RCQRlT/qOBSShmp9OxqhzgLQ6lrMLL4VMRq9xc= -github.com/kubeflow/model-registry/pkg/openapi v0.3.0 h1:ioNr/dm37J6eW63DQAcIHIPZ8zDF4v5GJwMHfs9doUM= -github.com/kubeflow/model-registry/pkg/openapi v0.3.0/go.mod h1:0V0wF5hGlLDSNS+on0MTnEOFiubfVYNc7QhuthKBu+8= +github.com/kubeflow/model-registry/pkg/openapi v0.3.2 h1:t/H+zxHiwcPGUITG/fWHUrTrJwoi9IlVa7vmzZI1eZk= +github.com/kubeflow/model-registry/pkg/openapi v0.3.2/go.mod h1:0V0wF5hGlLDSNS+on0MTnEOFiubfVYNc7QhuthKBu+8= github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4= github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= +github.com/maruel/natural v1.1.1 h1:Hja7XhhmvEFhcByqDoHz9QZbkWey+COd9xWfCfn1ioo= +github.com/maruel/natural v1.1.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg= +github.com/mfridman/tparse v0.18.0 h1:wh6dzOKaIwkUGyKgOntDW4liXSo37qg5AXbIhkMV3vE= +github.com/mfridman/tparse v0.18.0/go.mod h1:gEvqZTuCgEhPbYk/2lS3Kcxg1GmTxxU7kTC8DvP0i/A= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -69,10 +79,10 @@ github.com/modern-go/reflect2 v1.0.3-0.20250322232337-35a7c28c31ee h1:W5t00kpgFd github.com/modern-go/reflect2 v1.0.3-0.20250322232337-35a7c28c31ee/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= -github.com/onsi/ginkgo/v2 v2.25.1 h1:Fwp6crTREKM+oA6Cz4MsO8RhKQzs2/gOIVOUscMAfZY= -github.com/onsi/ginkgo/v2 v2.25.1/go.mod h1:ppTWQ1dh9KM/F1XgpeRqelR+zHVwV81DGRSDnFxK7Sk= -github.com/onsi/gomega v1.38.0 h1:c/WX+w8SLAinvuKKQFh77WEucCnPk4j2OTUr7lt7BeY= -github.com/onsi/gomega v1.38.0/go.mod h1:OcXcwId0b9QsE7Y49u+BTrL4IdKOBOKnD6VQNTJEB6o= +github.com/onsi/ginkgo/v2 v2.27.1 h1:0LJC8MpUSQnfnp4n/3W3GdlmJP3ENGF0ZPzjQGLPP7s= +github.com/onsi/ginkgo/v2 v2.27.1/go.mod h1:wmy3vCqiBjirARfVhAqFpYt8uvX0yaFe+GudAqqcCqA= +github.com/onsi/gomega v1.38.2 h1:eZCjf2xjZAqe+LeWvKb5weQ+NcPwX84kqJ0cZNxok2A= +github.com/onsi/gomega v1.38.2/go.mod h1:W2MJcYxRGV63b418Ai34Ud0hEdTVXq9NW9+Sx6uXf3k= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= @@ -100,6 +110,14 @@ github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.11.0 h1:ib4sjIrwZKxE5u/Japgo/7SJV3PvgjGiRNAvTVGqQl8= github.com/stretchr/testify v1.11.0/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -121,6 +139,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ= +golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -132,6 +152,8 @@ golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKl golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -168,22 +190,22 @@ gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -k8s.io/api v0.34.1 h1:jC+153630BMdlFukegoEL8E/yT7aLyQkIVuwhmwDgJM= -k8s.io/api v0.34.1/go.mod h1:SB80FxFtXn5/gwzCoN6QCtPD7Vbu5w2n1S0J5gFfTYk= -k8s.io/apiextensions-apiserver v0.33.0 h1:d2qpYL7Mngbsc1taA4IjJPRJ9ilnsXIrndH+r9IimOs= -k8s.io/apiextensions-apiserver v0.33.0/go.mod h1:VeJ8u9dEEN+tbETo+lFkwaaZPg6uFKLGj5vyNEwwSzc= -k8s.io/apimachinery v0.34.1 h1:dTlxFls/eikpJxmAC7MVE8oOeP1zryV7iRyIjB0gky4= -k8s.io/apimachinery v0.34.1/go.mod h1:/GwIlEcWuTX9zKIg2mbw0LRFIsXwrfoVxn+ef0X13lw= -k8s.io/client-go v0.34.1 h1:ZUPJKgXsnKwVwmKKdPfw4tB58+7/Ik3CrjOEhsiZ7mY= -k8s.io/client-go v0.34.1/go.mod h1:kA8v0FP+tk6sZA0yKLRG67LWjqufAoSHA2xVGKw9Of8= +k8s.io/api v0.34.2 h1:fsSUNZhV+bnL6Aqrp6O7lMTy6o5x2C4XLjnh//8SLYY= +k8s.io/api v0.34.2/go.mod h1:MMBPaWlED2a8w4RSeanD76f7opUoypY8TFYkSM+3XHw= +k8s.io/apiextensions-apiserver v0.34.1 h1:NNPBva8FNAPt1iSVwIE0FsdrVriRXMsaWFMqJbII2CI= +k8s.io/apiextensions-apiserver v0.34.1/go.mod h1:hP9Rld3zF5Ay2Of3BeEpLAToP+l4s5UlxiHfqRaRcMc= +k8s.io/apimachinery v0.34.2 h1:zQ12Uk3eMHPxrsbUJgNF8bTauTVR2WgqJsTmwTE/NW4= +k8s.io/apimachinery v0.34.2/go.mod h1:/GwIlEcWuTX9zKIg2mbw0LRFIsXwrfoVxn+ef0X13lw= +k8s.io/client-go v0.34.2 h1:Co6XiknN+uUZqiddlfAjT68184/37PS4QAzYvQvDR8M= +k8s.io/client-go v0.34.2/go.mod h1:2VYDl1XXJsdcAxw7BenFslRQX28Dxz91U9MWKjX97fE= k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk= k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE= k8s.io/kube-openapi v0.0.0-20250710124328-f3f2b991d03b h1:MloQ9/bdJyIu9lb1PzujOPolHyvO06MXG5TUIj2mNAA= k8s.io/kube-openapi v0.0.0-20250710124328-f3f2b991d03b/go.mod h1:UZ2yyWbFTpuhSbFhv24aGNOdoRdJZgsIObGBUaYVsts= k8s.io/utils v0.0.0-20250604170112-4c0f3b243397 h1:hwvWFiBzdWw1FhfY1FooPn3kzWuJ8tmbZBHi4zVsl1Y= k8s.io/utils v0.0.0-20250604170112-4c0f3b243397/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= -sigs.k8s.io/controller-runtime v0.21.0 h1:CYfjpEuicjUecRk+KAeyYh+ouUBn4llGyDYytIGcJS8= -sigs.k8s.io/controller-runtime v0.21.0/go.mod h1:OSg14+F65eWqIu4DceX7k/+QRAbTTvxeQSNSOQpukWM= +sigs.k8s.io/controller-runtime v0.22.4 h1:GEjV7KV3TY8e+tJ2LCTxUTanW4z/FmNB7l327UfMq9A= +sigs.k8s.io/controller-runtime v0.22.4/go.mod h1:+QX1XUpTXN4mLoblf4tqr5CQcyHPAki2HLXqQMY6vh8= sigs.k8s.io/json v0.0.0-20241014173422-cfa47c3a1cc8 h1:gBQPwqORJ8d8/YNZWEjoZs7npUVDpVXUUOFfW6CgAqE= sigs.k8s.io/json v0.0.0-20241014173422-cfa47c3a1cc8/go.mod h1:mdzfpAEoE6DHQEN0uh9ZbOCuHbLK5wOm7dK4ctXE9Tg= sigs.k8s.io/randfill v1.0.0 h1:JfjMILfT8A6RbawdsK2JXGBR5AQVfd+9TbzrlneTyrU= diff --git a/clients/ui/bff/go.work b/clients/ui/bff/go.work deleted file mode 100644 index 6e12d03973..0000000000 --- a/clients/ui/bff/go.work +++ /dev/null @@ -1,5 +0,0 @@ -go 1.24.6 - -use ( - . // The BFF module -) diff --git a/clients/ui/bff/go.work.sum b/clients/ui/bff/go.work.sum deleted file mode 100644 index 37e1e187b5..0000000000 --- a/clients/ui/bff/go.work.sum +++ /dev/null @@ -1,1093 +0,0 @@ -cel.dev/expr v0.20.0 h1:OunBvVCfvpWlt4dN7zg3FM6TDkzOePe1+foGJ9AXeeI= -cel.dev/expr v0.20.0/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= -cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY= -cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= -cloud.google.com/go v0.116.0 h1:B3fRrSDkLRt5qSHWe40ERJvhvnQwdZiHu0bJOpldweE= -cloud.google.com/go v0.116.0/go.mod h1:cEPSRWPzZEswwdr9BxE6ChEn01dWlTaF05LiC2Xs70U= -cloud.google.com/go/accessapproval v1.8.2 h1:h4u1MypgeYXTGvnNc1luCBLDN4Kb9Re/gw0Atvoi8HE= -cloud.google.com/go/accessapproval v1.8.2/go.mod h1:aEJvHZtpjqstffVwF/2mCXXSQmpskyzvw6zKLvLutZM= -cloud.google.com/go/accesscontextmanager v1.9.2 h1:P0uVixQft8aacbZ7VDZStNZdrftF24Hk8JkA3kfvfqI= -cloud.google.com/go/accesscontextmanager v1.9.2/go.mod h1:T0Sw/PQPyzctnkw1pdmGAKb7XBA84BqQzH0fSU7wzJU= -cloud.google.com/go/aiplatform v1.69.0 h1:XvBzK8e6/6ufbi/i129Vmn/gVqFwbNPmRQ89K+MGlgc= -cloud.google.com/go/aiplatform v1.69.0/go.mod h1:nUsIqzS3khlnWvpjfJbP+2+h+VrFyYsTm7RNCAViiY8= -cloud.google.com/go/analytics v0.25.2 h1:KgJ5Taxtsnro/co7WIhmAHi5pzYAtvxu8LMqenPAlSo= -cloud.google.com/go/analytics v0.25.2/go.mod h1:th0DIunqrhI1ZWVlT3PH2Uw/9ANX8YHfFDEPqf/+7xM= -cloud.google.com/go/apigateway v1.7.2 h1:TRB5q0vvbT5Yx4bNSCWlqLJFJnhc7tDlCR9ccpo1vzg= -cloud.google.com/go/apigateway v1.7.2/go.mod h1:+weId+9aR9J6GRwDka7jIUSrKEX60XGcikX7dGU8O7M= -cloud.google.com/go/apigeeconnect v1.7.2 h1:GHg0ddEQUZ08C1qC780P5wwY/jaIW8UtxuRQXLLuRXs= -cloud.google.com/go/apigeeconnect v1.7.2/go.mod h1:he/SWi3A63fbyxrxD6jb67ak17QTbWjva1TFbT5w8Kw= -cloud.google.com/go/apigeeregistry v0.9.2 h1:fC3ZXEk2QsBxUlZZDZpbBGXC/ZQglCBmHDGgY5aNipg= -cloud.google.com/go/apigeeregistry v0.9.2/go.mod h1:A5n/DwpG5NaP2fcLYGiFA9QfzpQhPRFNATO1gie8KM8= -cloud.google.com/go/appengine v1.9.2 h1:pxAQ//FsyEQsaF9HJduPCOEvj9GV4fvnLARGz1+KDzM= -cloud.google.com/go/appengine v1.9.2/go.mod h1:bK4dvmMG6b5Tem2JFZcjvHdxco9g6t1pwd3y/1qr+3s= -cloud.google.com/go/area120 v0.9.2 h1:LODm6TjW27/LJ4z4fBNJHRb+tlvy0gSu6Vb8j2lfluY= -cloud.google.com/go/area120 v0.9.2/go.mod h1:Ar/KPx51UbrTWGVGgGzFnT7hFYQuk/0VOXkvHdTbQMI= -cloud.google.com/go/artifactregistry v1.16.0 h1:BZpz0x8HCG7hwTkD+GlUwPQVFGOo9w84t8kxQwwc0DA= -cloud.google.com/go/artifactregistry v1.16.0/go.mod h1:LunXo4u2rFtvJjrGjO0JS+Gs9Eco2xbZU6JVJ4+T8Sk= -cloud.google.com/go/asset v1.20.3 h1:/jQBAkZVUbsIczRepDkwaf/K5NcRYvQ6MBiWg5i20fU= -cloud.google.com/go/asset v1.20.3/go.mod h1:797WxTDwdnFAJzbjZ5zc+P5iwqXc13yO9DHhmS6wl+o= -cloud.google.com/go/assuredworkloads v1.12.2 h1:6Y6a4V7CD50qtjvayhu7f5o35UFJP8ade7IbHNfdQEc= -cloud.google.com/go/assuredworkloads v1.12.2/go.mod h1:/WeRr/q+6EQYgnoYrqCVgw7boMoDfjXZZev3iJxs2Iw= -cloud.google.com/go/auth v0.15.0 h1:Ly0u4aA5vG/fsSsxu98qCQBemXtAtJf+95z9HK+cxps= -cloud.google.com/go/auth v0.15.0/go.mod h1:WJDGqZ1o9E9wKIL+IwStfyn/+s59zl4Bi+1KQNVXLZ8= -cloud.google.com/go/auth/oauth2adapt v0.2.7 h1:/Lc7xODdqcEw8IrZ9SvwnlLX6j9FHQM74z6cBk9Rw6M= -cloud.google.com/go/auth/oauth2adapt v0.2.7/go.mod h1:NTbTTzfvPl1Y3V1nPpOgl2w6d/FjO7NNUQaWSox6ZMc= -cloud.google.com/go/automl v1.14.2 h1:RzR5Nx78iaF2FNAfaaQ/7o2b4VuQ17YbOaeK/DLYSW4= -cloud.google.com/go/automl v1.14.2/go.mod h1:mIat+Mf77W30eWQ/vrhjXsXaRh8Qfu4WiymR0hR6Uxk= -cloud.google.com/go/baremetalsolution v1.3.2 h1:rhawlI+9gy/i1ZQbN/qL6FXHGXusWbfr6UoQdcCpybw= -cloud.google.com/go/baremetalsolution v1.3.2/go.mod h1:3+wqVRstRREJV/puwaKAH3Pnn7ByreZG2aFRsavnoBQ= -cloud.google.com/go/batch v1.11.2 h1:OVhgpMMJc+mrFw51R3C06JKC0D6u125RlEBULpg78No= -cloud.google.com/go/batch v1.11.2/go.mod h1:ehsVs8Y86Q4K+qhEStxICqQnNqH8cqgpCxx89cmU5h4= -cloud.google.com/go/beyondcorp v1.1.2 h1:hzKZf9ScvqTWqR8xGKVvD35ScQuxbMySELvJ0OW1usI= -cloud.google.com/go/beyondcorp v1.1.2/go.mod h1:q6YWSkEsSZTU2WDt1qtz6P5yfv79wgktGtNbd0FJTLI= -cloud.google.com/go/bigquery v1.64.0 h1:vSSZisNyhr2ioJE1OuYBQrnrpB7pIhRQm4jfjc7E/js= -cloud.google.com/go/bigquery v1.64.0/go.mod h1:gy8Ooz6HF7QmA+TRtX8tZmXBKH5mCFBwUApGAb3zI7Y= -cloud.google.com/go/bigtable v1.33.0 h1:2BDaWLRAwXO14DJL/u8crbV2oUbMZkIa2eGq8Yao1bk= -cloud.google.com/go/bigtable v1.33.0/go.mod h1:HtpnH4g25VT1pejHRtInlFPnN5sjTxbQlsYBjh9t5l0= -cloud.google.com/go/billing v1.19.2 h1:shcyz1UkrUxbPsqHL6L84ZdtBZ7yocaFFCxMInTsrNo= -cloud.google.com/go/billing v1.19.2/go.mod h1:AAtih/X2nka5mug6jTAq8jfh1nPye0OjkHbZEZgU59c= -cloud.google.com/go/binaryauthorization v1.9.2 h1:zZX4cvtYSXc5ogOar1w5KA1BLz3j464RPSaR/HhroJ8= -cloud.google.com/go/binaryauthorization v1.9.2/go.mod h1:T4nOcRWi2WX4bjfSRXJkUnpliVIqjP38V88Z10OvEv4= -cloud.google.com/go/certificatemanager v1.9.2 h1:/lO1ejN415kRaiO6DNNCHj0UvQujKP714q3l8gp4lsY= -cloud.google.com/go/certificatemanager v1.9.2/go.mod h1:PqW+fNSav5Xz8bvUnJpATIRo1aaABP4mUg/7XIeAn6c= -cloud.google.com/go/channel v1.19.1 h1:l4XcnfzJ5UGmqZQls0atcpD6ERDps4PLd5hXSyTWFv0= -cloud.google.com/go/channel v1.19.1/go.mod h1:ungpP46l6XUeuefbA/XWpWWnAY3897CSRPXUbDstwUo= -cloud.google.com/go/cloudbuild v1.19.0 h1:Uo0bL251yvyWsNtO3Og9m5Z4S48cgGf3IUX7xzOcl8s= -cloud.google.com/go/cloudbuild v1.19.0/go.mod h1:ZGRqbNMrVGhknIIjwASa6MqoRTOpXIVMSI+Ew5DMPuY= -cloud.google.com/go/clouddms v1.8.2 h1:U53ztLRgTkclaxgmBBles+tv+nNcZ5fhbRbw3b2axFw= -cloud.google.com/go/clouddms v1.8.2/go.mod h1:pe+JSp12u4mYOkwXpSMouyCCuQHL3a6xvWH2FgOcAt4= -cloud.google.com/go/cloudtasks v1.13.2 h1:x6Qw5JyNbH3reL0arUtlYf77kK6OVjZZ//8JCvUkLro= -cloud.google.com/go/cloudtasks v1.13.2/go.mod h1:2pyE4Lhm7xY8GqbZKLnYk7eeuh8L0JwAvXx1ecKxYu8= -cloud.google.com/go/compute v1.29.0 h1:Lph6d8oPi38NHkOr6S55Nus/Pbbcp37m/J0ohgKAefs= -cloud.google.com/go/compute v1.29.0/go.mod h1:HFlsDurE5DpQZClAGf/cYh+gxssMhBxBovZDYkEn/Og= -cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= -cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= -cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= -cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= -cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= -cloud.google.com/go/contactcenterinsights v1.15.1 h1:cR/gQMweaG8RIWAlS5Jo1ARi8LUVQJ51t84EUefHeZ8= -cloud.google.com/go/contactcenterinsights v1.15.1/go.mod h1:cFGxDVm/OwEVAHbU9UO4xQCtQFn0RZSrSUcF/oJ0Bbs= -cloud.google.com/go/container v1.42.0 h1:sH9Hj9SoLeP+uKvLXc/04nWyWDiMo4Q85xfb1Nl5sAg= -cloud.google.com/go/container v1.42.0/go.mod h1:YL6lDgCUi3frIWNIFU9qrmF7/6K1EYrtspmFTyyqJ+k= -cloud.google.com/go/containeranalysis v0.13.2 h1:AG2gOcfZJFRiz+3SZCPnxU+gwbzKe++QSX/ej71Lom8= -cloud.google.com/go/containeranalysis v0.13.2/go.mod h1:AiKvXJkc3HiqkHzVIt6s5M81wk+q7SNffc6ZlkTDgiE= -cloud.google.com/go/datacatalog v1.23.0 h1:9F2zIbWNNmtrSkPIyGRQNsIugG5VgVVFip6+tXSdWLg= -cloud.google.com/go/datacatalog v1.23.0/go.mod h1:9Wamq8TDfL2680Sav7q3zEhBJSPBrDxJU8WtPJ25dBM= -cloud.google.com/go/dataflow v0.10.2 h1:o9P5/zR2mOYJmCnfp9/7RprKFZCwmSu3TvemQSmCaFM= -cloud.google.com/go/dataflow v0.10.2/go.mod h1:+HIb4HJxDCZYuCqDGnBHZEglh5I0edi/mLgVbxDf0Ag= -cloud.google.com/go/dataform v0.10.2 h1:t16DoejuOHoxJR88qrpdmFFlCXA9+x5PKrqI9qiDYz0= -cloud.google.com/go/dataform v0.10.2/go.mod h1:oZHwMBxG6jGZCVZqqMx+XWXK+dA/ooyYiyeRbUxI15M= -cloud.google.com/go/datafusion v1.8.2 h1:RPoHvIeXexXwlWhEU6DNgrYCh+C+FR2EXbrnMs2ptpI= -cloud.google.com/go/datafusion v1.8.2/go.mod h1:XernijudKtVG/VEvxtLv08COyVuiYPraSxm+8hd4zXA= -cloud.google.com/go/datalabeling v0.9.2 h1:UesbU2kYIUWhHUcnFS86ANPbugEq98X9k1whTNcenlc= -cloud.google.com/go/datalabeling v0.9.2/go.mod h1:8me7cCxwV/mZgYWtRAd3oRVGFD6UyT7hjMi+4GRyPpg= -cloud.google.com/go/dataplex v1.19.2 h1:R2xnsZnuWpHi2NmBR0e43GZk2IZcQ1AFEAo1fUI0xsw= -cloud.google.com/go/dataplex v1.19.2/go.mod h1:vsxxdF5dgk3hX8Ens9m2/pMNhQZklUhSgqTghZtF1v4= -cloud.google.com/go/dataproc/v2 v2.10.0 h1:B0b7eLRXzFTzb4UaxkGGidIF23l/Xpyce28m1Q0cHmU= -cloud.google.com/go/dataproc/v2 v2.10.0/go.mod h1:HD16lk4rv2zHFhbm8gGOtrRaFohMDr9f0lAUMLmg1PM= -cloud.google.com/go/dataqna v0.9.2 h1:hrEcid5jK5fEdlYZ0eS8HJoq+ZCTRWSV7Av42V/G994= -cloud.google.com/go/dataqna v0.9.2/go.mod h1:WCJ7pwD0Mi+4pIzFQ+b2Zqy5DcExycNKHuB+VURPPgs= -cloud.google.com/go/datastore v1.20.0 h1:NNpXoyEqIJmZFc0ACcwBEaXnmscUpcG4NkKnbCePmiM= -cloud.google.com/go/datastore v1.20.0/go.mod h1:uFo3e+aEpRfHgtp5pp0+6M0o147KoPaYNaPAKpfh8Ew= -cloud.google.com/go/datastream v1.11.2 h1:vgtrwwPfY7JFEDD0VARJK4qyiApnFnPkFRQVuczYb/w= -cloud.google.com/go/datastream v1.11.2/go.mod h1:RnFWa5zwR5SzHxeZGJOlQ4HKBQPcjGfD219Qy0qfh2k= -cloud.google.com/go/deploy v1.25.0 h1:nYLFG2TSsYMJuengVru5P8iWnA5mNA4rKFV5YoOWQ3M= -cloud.google.com/go/deploy v1.25.0/go.mod h1:h9uVCWxSDanXUereI5WR+vlZdbPJ6XGy+gcfC25v5rM= -cloud.google.com/go/dialogflow v1.60.0 h1:H+Q1SUeVU2La0Y0ZGEaKkhEXg3bj9Ceg5YKcMbyNOEc= -cloud.google.com/go/dialogflow v1.60.0/go.mod h1:PjsrI+d2FI4BlGThxL0+Rua/g9vLI+2A1KL7s/Vo3pY= -cloud.google.com/go/dlp v1.20.0 h1:Wwz1FoZp3pyrTNkS5fncaAccP/AbqzLQuN5WMi3aVYQ= -cloud.google.com/go/dlp v1.20.0/go.mod h1:nrGsA3r8s7wh2Ct9FWu69UjBObiLldNyQda2RCHgdaY= -cloud.google.com/go/documentai v1.35.0 h1:DO4ut86a+Xa0gBq7j3FZJPavnKBNoznrg44csnobqIY= -cloud.google.com/go/documentai v1.35.0/go.mod h1:ZotiWUlDE8qXSUqkJsGMQqVmfTMYATwJEYqbPXTR9kk= -cloud.google.com/go/domains v0.10.2 h1:ekJCkuzbciXyPKkwPwvI+2Ov1GcGJtMXj/fbgilPFqg= -cloud.google.com/go/domains v0.10.2/go.mod h1:oL0Wsda9KdJvvGNsykdalHxQv4Ri0yfdDkIi3bzTUwk= -cloud.google.com/go/edgecontainer v1.4.0 h1:vpKTEkQPpkl55d6aUU2rzDFvTkMUATvBXfZSlI2KMR0= -cloud.google.com/go/edgecontainer v1.4.0/go.mod h1:Hxj5saJT8LMREmAI9tbNTaBpW5loYiWFyisCjDhzu88= -cloud.google.com/go/errorreporting v0.3.1 h1:E/gLk+rL7u5JZB9oq72iL1bnhVlLrnfslrgcptjJEUE= -cloud.google.com/go/errorreporting v0.3.1/go.mod h1:6xVQXU1UuntfAf+bVkFk6nld41+CPyF2NSPCyXE3Ztk= -cloud.google.com/go/essentialcontacts v1.7.2 h1:a/reGTn7WblM5DgieiLbX6CswHgTneWrA4ZNS5E+1Bg= -cloud.google.com/go/essentialcontacts v1.7.2/go.mod h1:NoCBlOIVteJFJU+HG9dIG/Cc9kt1K9ys9mbOaGPUmPc= -cloud.google.com/go/eventarc v1.15.0 h1:IVU2EOR8P2f6N8eneuwspN122LR87v9G54B+7ihd1TY= -cloud.google.com/go/eventarc v1.15.0/go.mod h1:PAd/pPIZdJtJQFJI1yDEUms1mqohdNuM1BFEVHHlVFg= -cloud.google.com/go/filestore v1.9.2 h1:DYwMNAcF5bELHHMxRdkIWWZ3XicKp+ZpEBy+c6Gt4uY= -cloud.google.com/go/filestore v1.9.2/go.mod h1:I9pM7Hoetq9a7djC1xtmtOeHSUYocna09ZP6x+PG1Xw= -cloud.google.com/go/firestore v1.17.0 h1:iEd1LBbkDZTFsLw3sTH50eyg4qe8eoG6CjocmEXO9aQ= -cloud.google.com/go/firestore v1.17.0/go.mod h1:69uPx1papBsY8ZETooc71fOhoKkD70Q1DwMrtKuOT/Y= -cloud.google.com/go/functions v1.19.2 h1:Cu2Gj1JBBJv9gi89r8LrZNsJhGwePnhttn4Blqw/EYI= -cloud.google.com/go/functions v1.19.2/go.mod h1:SBzWwWuaFDLnUyStDAMEysVN1oA5ECLbP3/PfJ9Uk7Y= -cloud.google.com/go/gkebackup v1.6.2 h1:lWaSgjSonOXe41UhwQjts6lhDZdr5e882LNUTtnjZS0= -cloud.google.com/go/gkebackup v1.6.2/go.mod h1:WsTSWqKJkGan1pkp5dS30oxb+Eaa6cLvxEUxKTUALwk= -cloud.google.com/go/gkeconnect v0.12.0 h1:MuA3/aIuncXkXuUDGdbT7OLnIp7xpFhciuHAnQaoQz4= -cloud.google.com/go/gkeconnect v0.12.0/go.mod h1:zn37LsFiNZxPN4iO7YbUk8l/E14pAJ7KxpoXoxt7Ly0= -cloud.google.com/go/gkehub v0.15.2 h1:CR5MPEP/Ogk5IahCq3O2fKS6TJZQi8mrnrysGHCs0g8= -cloud.google.com/go/gkehub v0.15.2/go.mod h1:8YziTOpwbM8LM3r9cHaOMy2rNgJHXZCrrmGgcau9zbQ= -cloud.google.com/go/gkemulticloud v1.4.1 h1:SvVD2nJTGScEDYygIQ5dI14oFYhgtJx8HazkT3aufEI= -cloud.google.com/go/gkemulticloud v1.4.1/go.mod h1:KRvPYcx53bztNwNInrezdfNF+wwUom8Y3FuJBwhvFpQ= -cloud.google.com/go/gsuiteaddons v1.7.2 h1:Rma+a2tCB2PV0Rm87Ywr4P96dCwGIm8vw8gF23ZlYoY= -cloud.google.com/go/gsuiteaddons v1.7.2/go.mod h1:GD32J2rN/4APilqZw4JKmwV84+jowYYMkEVwQEYuAWc= -cloud.google.com/go/iam v1.2.2 h1:ozUSofHUGf/F4tCNy/mu9tHLTaxZFLOUiKzjcgWHGIA= -cloud.google.com/go/iam v1.2.2/go.mod h1:0Ys8ccaZHdI1dEUilwzqng/6ps2YB6vRsjIe00/+6JY= -cloud.google.com/go/iap v1.10.2 h1:rvM+FNIF2wIbwUU8299FhhVGak2f7oOvbW8J/I5oflE= -cloud.google.com/go/iap v1.10.2/go.mod h1:cClgtI09VIfazEK6VMJr6bX8KQfuQ/D3xqX+d0wrUlI= -cloud.google.com/go/ids v1.5.2 h1:EDYZQraE+Eq6BewUQxVRY8b3VUUo/MnjMfzSh1NGjx8= -cloud.google.com/go/ids v1.5.2/go.mod h1:P+ccDD96joXlomfonEdCnyrHvE68uLonc7sJBPVM5T0= -cloud.google.com/go/iot v1.8.2 h1:KMN0wujrPV7q0yfs4rt5CUl9Di8sQhJ0uohJn1h6yaI= -cloud.google.com/go/iot v1.8.2/go.mod h1:UDwVXvRD44JIcMZr8pzpF3o4iPsmOO6fmbaIYCAg1ww= -cloud.google.com/go/kms v1.20.1 h1:og29Wv59uf2FVaZlesaiDAqHFzHaoUyHI3HYp9VUHVg= -cloud.google.com/go/kms v1.20.1/go.mod h1:LywpNiVCvzYNJWS9JUcGJSVTNSwPwi0vBAotzDqn2nc= -cloud.google.com/go/language v1.14.2 h1:rwrIOwcAgPTYbigOaiMSjKCvBy0xHZJbRc7HB/xMECA= -cloud.google.com/go/language v1.14.2/go.mod h1:dviAbkxT9art+2ioL9AM05t+3Ql6UPfMpwq1cDsF+rg= -cloud.google.com/go/lifesciences v0.10.2 h1:eZSaRgBwbnb/oXwCj1SGE0Kp534DuXpg55iYBWgN024= -cloud.google.com/go/lifesciences v0.10.2/go.mod h1:vXDa34nz0T/ibUNoeHnhqI+Pn0OazUTdxemd0OLkyoY= -cloud.google.com/go/managedidentities v1.7.2 h1:oWxuIhIwQC1Vfs1SZi1x389W2TV9uyPsAyZMJgZDND4= -cloud.google.com/go/managedidentities v1.7.2/go.mod h1:t0WKYzagOoD3FNtJWSWcU8zpWZz2i9cw2sKa9RiPx5I= -cloud.google.com/go/maps v1.15.0 h1:bmFHlO6BL/smC6GD45r5j0ChjsyyevuJCSARdOL62TI= -cloud.google.com/go/maps v1.15.0/go.mod h1:ZFqZS04ucwFiHSNU8TBYDUr3wYhj5iBFJk24Ibvpf3o= -cloud.google.com/go/mediatranslation v0.9.2 h1:p37R/k9+L33bUMO87gFyv93MwJ+9nuzVhXM5X+6ULwA= -cloud.google.com/go/mediatranslation v0.9.2/go.mod h1:1xyRoDYN32THzy+QaU62vIMciX0CFexplju9t30XwUc= -cloud.google.com/go/memcache v1.11.2 h1:GGgC2A9AClJN8VLbMUAPUxj/dNMFwz6Lj01gDxPw7os= -cloud.google.com/go/memcache v1.11.2/go.mod h1:jIzHn79b0m5wbkax2SdlW5vNSbpaEk0yWHbeLpMIYZE= -cloud.google.com/go/metastore v1.14.2 h1:Euc9kLTKS8T6M1JVqQavwDFHu9UtT1//lGXSKjpO3/0= -cloud.google.com/go/metastore v1.14.2/go.mod h1:dk4zOBhZIy3TFOQlI8sbOa+ef0FjAcCHEnd8dO2J+LE= -cloud.google.com/go/monitoring v1.22.0 h1:mQ0040B7dpuRq1+4YiQD43M2vW9HgoVxY98xhqGT+YI= -cloud.google.com/go/monitoring v1.22.0/go.mod h1:hS3pXvaG8KgWTSz+dAdyzPrGUYmi2Q+WFX8g2hqVEZU= -cloud.google.com/go/networkconnectivity v1.15.2 h1:CuBLrRKhPbzXkFGADopQUpMcdY+SSfoy/3RqsMH2pq4= -cloud.google.com/go/networkconnectivity v1.15.2/go.mod h1:N1O01bEk5z9bkkWwXLKcN2T53QN49m/pSpjfUvlHDQY= -cloud.google.com/go/networkmanagement v1.16.0 h1:oT7c2Oo9NT54XjnP4GMNj/HEywrFnBz0u6QLJ2iu8NE= -cloud.google.com/go/networkmanagement v1.16.0/go.mod h1:Yc905R9U5jik5YMt76QWdG5WqzPU4ZsdI/mLnVa62/Q= -cloud.google.com/go/networksecurity v0.10.2 h1://zFZM8XZZs+3Y6QKuLqwD5tZ+B/17KUo/rJpGW2tJs= -cloud.google.com/go/networksecurity v0.10.2/go.mod h1:puU3Gwchd6Y/VTyMkL50GI2RSRMS3KXhcDBY1HSOcck= -cloud.google.com/go/notebooks v1.12.2 h1:BHIH9kf/02wSCcLAVttEXHSFAgSotgRg2y1YjR7VDCc= -cloud.google.com/go/notebooks v1.12.2/go.mod h1:EkLwv8zwr8DUXnvzl944+sRBG+b73HEKzV632YYAGNI= -cloud.google.com/go/optimization v1.7.2 h1:yM4teRB60qyIm8cV4VRW4wepmHbXCoqv3QKGfKzylEQ= -cloud.google.com/go/optimization v1.7.2/go.mod h1:msYgDIh1SGSfq6/KiWJQ/uxMkWq8LekPyn1LAZ7ifNE= -cloud.google.com/go/orchestration v1.11.1 h1:uZOwdQoAamx8+X0UdMqY/lro3/h/Zhb7SnfArufNVcc= -cloud.google.com/go/orchestration v1.11.1/go.mod h1:RFHf4g88Lbx6oKhwFstYiId2avwb6oswGeAQ7Tjjtfw= -cloud.google.com/go/orgpolicy v1.14.1 h1:c1QLoM5v8/aDKgYVCUaC039lD3GPvqAhTVOwsGhIoZQ= -cloud.google.com/go/orgpolicy v1.14.1/go.mod h1:1z08Hsu1mkoH839X7C8JmnrqOkp2IZRSxiDw7W/Xpg4= -cloud.google.com/go/osconfig v1.14.2 h1:iBN87PQc+EGh5QqijM3CuxcibvDWmF+9k0eOJT27FO4= -cloud.google.com/go/osconfig v1.14.2/go.mod h1:kHtsm0/j8ubyuzGciBsRxFlbWVjc4c7KdrwJw0+g+pQ= -cloud.google.com/go/oslogin v1.14.2 h1:6ehIKkALrLe9zUHwEmfXRVuSPm3HiUmEnnDRr7yLIo8= -cloud.google.com/go/oslogin v1.14.2/go.mod h1:M7tAefCr6e9LFTrdWRQRrmMeKHbkvc4D9g6tHIjHySA= -cloud.google.com/go/phishingprotection v0.9.2 h1:SaW0IPf/1fflnzomjy7+9EMtReXuxkYpUAf/77m5xL8= -cloud.google.com/go/phishingprotection v0.9.2/go.mod h1:mSCiq3tD8fTJAuXq5QBHFKZqMUy8SfWsbUM9NpzJIRQ= -cloud.google.com/go/policytroubleshooter v1.11.2 h1:sTIH5AQ8tcgmnqrqlZfYWymjMhPh4ZEt4CvQGgG+kzc= -cloud.google.com/go/policytroubleshooter v1.11.2/go.mod h1:1TdeCRv8Qsjcz2qC3wFltg/Mjga4HSpv8Tyr5rzvPsw= -cloud.google.com/go/privatecatalog v0.10.2 h1:01RPfn8IL2//8UHAmImRraTFYM/3gAEiIxudWLWrp+0= -cloud.google.com/go/privatecatalog v0.10.2/go.mod h1:o124dHoxdbO50ImR3T4+x3GRwBSTf4XTn6AatP8MgsQ= -cloud.google.com/go/pubsub v1.45.1 h1:ZC/UzYcrmK12THWn1P72z+Pnp2vu/zCZRXyhAfP1hJY= -cloud.google.com/go/pubsub v1.45.1/go.mod h1:3bn7fTmzZFwaUjllitv1WlsNMkqBgGUb3UdMhI54eCc= -cloud.google.com/go/pubsublite v1.8.2 h1:jLQozsEVr+c6tOU13vDugtnaBSUy/PD5zK6mhm+uF1Y= -cloud.google.com/go/pubsublite v1.8.2/go.mod h1:4r8GSa9NznExjuLPEJlF1VjOPOpgf3IT6k8x/YgaOPI= -cloud.google.com/go/recaptchaenterprise/v2 v2.19.0 h1:J/J7ZeVOX+sqn0hxzkOBfnQfBAzPZt8KaAuQoarQWQM= -cloud.google.com/go/recaptchaenterprise/v2 v2.19.0/go.mod h1:vnbA2SpVPPwKeoFrCQxR+5a0JFRRytwBBG69Zj9pGfk= -cloud.google.com/go/recommendationengine v0.9.2 h1:RHVdmoNBdzgRJXI/3SV+GB5TTv/umsVguiaEvmKOh98= -cloud.google.com/go/recommendationengine v0.9.2/go.mod h1:DjGfWZJ68ZF5ZuNgoTVXgajFAG0yLt4CJOpC0aMK3yw= -cloud.google.com/go/recommender v1.13.2 h1:xDFzlFk5Xp5MXnac468eicKM3MUo6UNdxoYuBMOF1mE= -cloud.google.com/go/recommender v1.13.2/go.mod h1:XJau4M5Re8F4BM+fzF3fqSjxNJuM66fwF68VCy/ngGE= -cloud.google.com/go/redis v1.17.2 h1:QbW264RBH+NSVEQqlDoHfoxcreXK8QRRByTOR2CFbJs= -cloud.google.com/go/redis v1.17.2/go.mod h1:h071xkcTMnJgQnU/zRMOVKNj5J6AttG16RDo+VndoNo= -cloud.google.com/go/resourcemanager v1.10.2 h1:LpqZZGM0uJiu1YWM878AA8zZ/qOQ/Ngno60Q8RAraAI= -cloud.google.com/go/resourcemanager v1.10.2/go.mod h1:5f+4zTM/ZOTDm6MmPOp6BQAhR0fi8qFPnvVGSoWszcc= -cloud.google.com/go/resourcesettings v1.8.2 h1:ISRX2HZHNS17F/EuIwzPrQwEyIyUJayGuLrS51yt6Wk= -cloud.google.com/go/resourcesettings v1.8.2/go.mod h1:uEgtPiMA+xuBUM4Exu+ZkNpMYP0BLlYeJbyNHfrc+U0= -cloud.google.com/go/retail v1.19.1 h1:FVzvA+VuEdNoMz2WzWZ5KwfG+CX+jSv+SOspyQPLuRs= -cloud.google.com/go/retail v1.19.1/go.mod h1:W48zg0zmt2JMqmJKCuzx0/0XDLtovwzGAeJjmv6VPaE= -cloud.google.com/go/run v1.7.0 h1:GJtHWUgi8CK+YPhmTR3tKBAmDmU9RRMYqiGKCmIgFG8= -cloud.google.com/go/run v1.7.0/go.mod h1:IvJOg2TBb/5a0Qkc6crn5yTy5nkjcgSWQLhgO8QL8PQ= -cloud.google.com/go/scheduler v1.11.2 h1:PfkvJP1qKu9NvFB65Ja/s918bPZWMBcYkg35Ljdw1Oc= -cloud.google.com/go/scheduler v1.11.2/go.mod h1:GZSv76T+KTssX2I9WukIYQuQRf7jk1WI+LOcIEHUUHk= -cloud.google.com/go/secretmanager v1.14.2 h1:2XscWCfy//l/qF96YE18/oUaNJynAx749Jg3u0CjQr8= -cloud.google.com/go/secretmanager v1.14.2/go.mod h1:Q18wAPMM6RXLC/zVpWTlqq2IBSbbm7pKBlM3lCKsmjw= -cloud.google.com/go/security v1.18.2 h1:9Nzp9LGjiDvHqy7X7Q9GrS5lIHN0bI8RvDjkrl4ILO0= -cloud.google.com/go/security v1.18.2/go.mod h1:3EwTcYw8554iEtgK8VxAjZaq2unFehcsgFIF9nOvQmU= -cloud.google.com/go/securitycenter v1.35.2 h1:XkkE+IRE5/88drGPIuvETCSN7dAnWoqJahZzDbP5Hog= -cloud.google.com/go/securitycenter v1.35.2/go.mod h1:AVM2V9CJvaWGZRHf3eG+LeSTSissbufD27AVBI91C8s= -cloud.google.com/go/servicedirectory v1.12.2 h1:W/oZmTUzlWbeSTujRbmG9v7HZyHcorj608tkcD3vVYE= -cloud.google.com/go/servicedirectory v1.12.2/go.mod h1:F0TJdFjqqotiZRlMXgIOzszaplk4ZAmUV8ovHo08M2U= -cloud.google.com/go/shell v1.8.2 h1:lSfdEng3n7zZHzC40BJ4trEMyme3CGnLLnA09MlLQdQ= -cloud.google.com/go/shell v1.8.2/go.mod h1:QQR12T6j/eKvqAQLv6R3ozeoqwJ0euaFSz2qLqG93Bs= -cloud.google.com/go/spanner v1.73.0 h1:0bab8QDn6MNj9lNK6XyGAVFhMlhMU2waePPa6GZNoi8= -cloud.google.com/go/spanner v1.73.0/go.mod h1:mw98ua5ggQXVWwp83yjwggqEmW9t8rjs9Po1ohcUGW4= -cloud.google.com/go/speech v1.25.2 h1:rKOXU9LAZTOYHhRNB4gZDekNjJx21TktQpetBa5IzOk= -cloud.google.com/go/speech v1.25.2/go.mod h1:KPFirZlLL8SqPaTtG6l+HHIFHPipjbemv4iFg7rTlYs= -cloud.google.com/go/storage v1.50.0 h1:3TbVkzTooBvnZsk7WaAQfOsNrdoM8QHusXA1cpk6QJs= -cloud.google.com/go/storage v1.50.0/go.mod h1:l7XeiD//vx5lfqE3RavfmU9yvk5Pp0Zhcv482poyafY= -cloud.google.com/go/storagetransfer v1.11.2 h1:hMcP8ECmxedXjPxr2j3Ca45ro/TKEF+1YYjq2p5LMTI= -cloud.google.com/go/storagetransfer v1.11.2/go.mod h1:FcM29aY4EyZ3yVPmW5SxhqUdhjgPBUOFyy4rqiQbias= -cloud.google.com/go/talent v1.7.2 h1:KONR7KX/EXI3pO2cbSIDOBqhBzvgDS71vaMz8k4qRCg= -cloud.google.com/go/talent v1.7.2/go.mod h1:k1sqlDgS9gbc0gMTRuRQpX6C6VB7bGUxSPcoTRWJod8= -cloud.google.com/go/texttospeech v1.10.0 h1:icRAxYDtq3zO1T0YBT/fe8C/7pXoIqfkY4iYr5zG39I= -cloud.google.com/go/texttospeech v1.10.0/go.mod h1:215FpCOyRxxrS7DSb2t7f4ylMz8dXsQg8+Vdup5IhP4= -cloud.google.com/go/tpu v1.7.2 h1:xPBJd7xZgtl3CgrZoaUf7zFPVVj68jmzzGTSzkcsOtQ= -cloud.google.com/go/tpu v1.7.2/go.mod h1:0Y7dUo2LIbDUx0yQ/vnLC6e18FK6NrDfAhYS9wZ/2vs= -cloud.google.com/go/translate v1.12.2 h1:qECivi8O+jFI/vnvN9elK6CME+WAWy56GIBszF+/rNc= -cloud.google.com/go/translate v1.12.2/go.mod h1:jjLVf2SVH2uD+BNM40DYvRRKSsuyKxVvs3YjTW/XSWY= -cloud.google.com/go/video v1.23.2 h1:CGAPOXTJMoZm9PeHkohBlMTy8lqN6VWCNDjp5VODfy8= -cloud.google.com/go/video v1.23.2/go.mod h1:rNOr2pPHWeCbW0QsOwJRIe0ZiuwHpHtumK0xbiYB1Ew= -cloud.google.com/go/videointelligence v1.12.2 h1:ZLElysepw9vfQGAKWfnxdnSnHSKbEn/nU/tmBnCJLfA= -cloud.google.com/go/videointelligence v1.12.2/go.mod h1:8xKGlq0lNVyT8JgTkkCUCpyNJnYYEJVWGdqzv+UcwR8= -cloud.google.com/go/vision/v2 v2.9.2 h1:u4pu3gKps88oUe76WwVPeX9dgWVyyYopZ1s05FwsKEk= -cloud.google.com/go/vision/v2 v2.9.2/go.mod h1:WuxjVQdAy4j4WZqY5Rr655EdAgi8B707Vdb5T8c90uo= -cloud.google.com/go/vmmigration v1.8.2 h1:Hpqv3fZ3Ri1OMhTNVJgxxsTou2ZlRzKbnc1dSybTP5Y= -cloud.google.com/go/vmmigration v1.8.2/go.mod h1:FBejrsr8ZHmJb949BSOyr3D+/yCp9z9Hk0WtsTiHc1Q= -cloud.google.com/go/vmwareengine v1.3.2 h1:LmkojgSLvsRwU1+c0iiY2XoBkXYKzpArElHC9IDWakg= -cloud.google.com/go/vmwareengine v1.3.2/go.mod h1:JsheEadzT0nfXOGkdnwtS1FhFAnj4g8qhi4rKeLi/AU= -cloud.google.com/go/vpcaccess v1.8.2 h1:nvrkqAjS2sorOu4YGCIXWz+Kk+5aAAdnaMD2tnsqeFg= -cloud.google.com/go/vpcaccess v1.8.2/go.mod h1:4yvYKNjlNjvk/ffgZ0PuEhpzNJb8HybSM1otG2aDxnY= -cloud.google.com/go/webrisk v1.10.2 h1:X7zSwS1mX2bxoZ30Ozh6lqiSLezl7RMBWwp5a3Mkxp4= -cloud.google.com/go/webrisk v1.10.2/go.mod h1:c0ODT2+CuKCYjaeHO7b0ni4CUrJ95ScP5UFl9061Qq8= -cloud.google.com/go/websecurityscanner v1.7.2 h1:8/4rfJXcyxozbfzI0lDFPcPShRE6bJ4HQwgDAG9J4oQ= -cloud.google.com/go/websecurityscanner v1.7.2/go.mod h1:728wF9yz2VCErfBaACA5px2XSYHQgkK812NmHcUsDXA= -cloud.google.com/go/workflows v1.13.2 h1:jYIxrDOVCGvTBHIAVhqQ+P8fhE0trm+Hf2hgL1YzmK0= -cloud.google.com/go/workflows v1.13.2/go.mod h1:l5Wj2Eibqba4BsADIRzPLaevLmIuYF2W+wfFBkRG3vU= -contrib.go.opencensus.io/exporter/zipkin v0.1.2 h1:YqE293IZrKtqPnpwDPH/lOqTWD/s3Iwabycam74JV3g= -contrib.go.opencensus.io/exporter/zipkin v0.1.2/go.mod h1:mP5xM3rrgOjpn79MM8fZbj3gsxcuytSqtH0dxSWW1RE= -dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= -dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= -filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= -filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= -github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= -github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= -github.com/99designs/gqlgen v0.17.57 h1:Ak4p60BRq6QibxY0lEc0JnQhDurfhxA67sp02lMjmPc= -github.com/99designs/gqlgen v0.17.57/go.mod h1:Jx61hzOSTcR4VJy/HFIgXiQ5rJ0Ypw8DxWLjbYDAUw0= -github.com/99designs/keyring v1.2.1 h1:tYLp1ULvO7i3fI5vE21ReQuj99QFSs7lGm0xWyJo87o= -github.com/99designs/keyring v1.2.1/go.mod h1:fc+wB5KTk9wQ9sDx0kFXB3A0MaeGHM9AwRStKOQ5vOA= -github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= -github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.4.0 h1:rTnT/Jrcm+figWlYz4Ixzt0SJVR2cMC8lvZcimipiEY= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.4.0/go.mod h1:ON4tFdPTwRcgWEaVDrN3584Ef+b7GgSJaXxe5fW9t4M= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2 h1:+5VZ72z0Qan5Bog5C+ZkgSqUbeVUd9wgtHOrIKuc5b8= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w= -github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 h1:u/LLAOFgsMv7HmNL4Qufg58y+qElGOt5qv0z1mURkRY= -github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0/go.mod h1:2e8rMJtl2+2j+HXbTBwnyGpm5Nou7KhvSfxOq8JpTag= -github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= -github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= -github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= -github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= -github.com/Azure/go-autorest/autorest v0.11.29 h1:I4+HL/JDvErx2LjyzaVxllw2lRDB5/BT2Bm4g20iqYw= -github.com/Azure/go-autorest/autorest v0.11.29/go.mod h1:ZtEzC4Jy2JDrZLxvWs8LrBWEBycl1hbT1eknI8MtfAs= -github.com/Azure/go-autorest/autorest/adal v0.9.23 h1:Yepx8CvFxwNKpH6ja7RZ+sKX+DWYNldbLiALMC3BTz8= -github.com/Azure/go-autorest/autorest/adal v0.9.23/go.mod h1:5pcMqFkdPhviJdlEy3kC/v1ZLnQl0MH6XA5YCcMhy4c= -github.com/Azure/go-autorest/autorest/azure/auth v0.5.11 h1:P6bYXFoao05z5uhOQzbC3Qd8JqF3jUoocoTeIxkp2cA= -github.com/Azure/go-autorest/autorest/azure/auth v0.5.11/go.mod h1:84w/uV8E37feW2NCJ08uT9VBfjfUHpgLVnG2InYD6cg= -github.com/Azure/go-autorest/autorest/azure/cli v0.4.6 h1:w77/uPk80ZET2F+AfQExZyEWtn+0Rk/uw17m9fv5Ajc= -github.com/Azure/go-autorest/autorest/azure/cli v0.4.6/go.mod h1:piCfgPho7BiIDdEQ1+g4VmKyD5y+p/XtSNqE6Hc4QD0= -github.com/Azure/go-autorest/autorest/date v0.3.0 h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw= -github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= -github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+ZtXWSmf4Tg= -github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= -github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo= -github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802 h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc= -github.com/ClickHouse/clickhouse-go v1.4.3 h1:iAFMa2UrQdR5bHJ2/yaSLffZkxpcOYQMCUuKeNXGdqc= -github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.26.0 h1:f2Qw/Ehhimh5uO1fayV0QIW7DShEQqhtUfhYc+cBPlw= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.26.0/go.mod h1:2bIszWvQRlJVmJLiuLhukLImRjKPcYdzzsx6darK02A= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.48.1 h1:UQ0AhxogsIRZDkElkblfnwjc3IaltCm2HUMvezQaL7s= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.48.1/go.mod h1:jyqM3eLpJ3IbIFDTKVz2rF9T/xWGW0rIriGwnz8l9Tk= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1 h1:8nn+rsCvTq9axyEh382S0PFLBeaFwNsT43IrPWzctRU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1/go.mod h1:viRWSEhtMZqz1rhwmOVKkWl6SwmVowfL9O2YR5gI2PE= -github.com/Khan/genqlient v0.8.1 h1:wtOCc8N9rNynRLXN3k3CnfzheCUNKBcvXmVv5zt6WCs= -github.com/Khan/genqlient v0.8.1/go.mod h1:R2G6DzjBvCbhjsEajfRjbWdVglSH/73kSivC9TLWVjU= -github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= -github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= -github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I= -github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c= -github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8= -github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo= -github.com/ahmetb/gen-crd-api-reference-docs v0.3.1-0.20210609063737-0067dc6dcea2 h1:t/ces1/q8tuApSb+T5ajsu3wqkofUT43U1gpDYTPYME= -github.com/ahmetb/gen-crd-api-reference-docs v0.3.1-0.20210609063737-0067dc6dcea2/go.mod h1:TdjdkYhlOifCQWPs1UdTma97kQQMozf5h26hTuG70u8= -github.com/alecthomas/kingpin/v2 v2.4.0 h1:f48lwail6p8zpO1bC4TxtqACaGqHYA22qkHjHpqDjYY= -github.com/alecthomas/kingpin/v2 v2.4.0/go.mod h1:0gyi0zQnjuFk8xrkNKamJoyUo382HRL7ATRpFZCw6tE= -github.com/alecthomas/participle/v2 v2.1.4 h1:W/H79S8Sat/krZ3el6sQMvMaahJ+XcM9WSI2naI7w2U= -github.com/alecthomas/participle/v2 v2.1.4/go.mod h1:8tqVbpTX20Ru4NfYQgZf4mP18eXPTBViyMWiArNEgGI= -github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc= -github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= -github.com/alexflint/go-arg v1.5.1 h1:nBuWUCpuRy0snAG+uIJ6N0UvYxpxA0/ghA/AaHxlT8Y= -github.com/alexflint/go-arg v1.5.1/go.mod h1:A7vTJzvjoaSTypg4biM5uYNTkJ27SkNTArtYXnlqVO8= -github.com/alexflint/go-scalar v1.2.0 h1:WR7JPKkeNpnYIOfHRa7ivM21aWAdHD0gEWHCx+WQBRw= -github.com/alexflint/go-scalar v1.2.0/go.mod h1:LoFvNMqS1CPrMVltza4LvnGKhaSpc3oyLEBUZVhhS2o= -github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= -github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= -github.com/antihax/optional v1.0.0 h1:xK2lYat7ZLaVVcIuj82J8kIro4V6kDe0AUDFboUCwcg= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= -github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= -github.com/apache/arrow/go/v10 v10.0.1 h1:n9dERvixoC/1JjDmBcs9FPaEryoANa2sCgVFo6ez9cI= -github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0= -github.com/apache/thrift v0.16.0 h1:qEy6UW60iVOlUy+b9ZR0d5WzUWYGOo4HfopoyBaNmoY= -github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= -github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= -github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= -github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a h1:idn718Q4B6AGu/h5Sxe66HYVdqdGu2l9Iebqhi/AEoA= -github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= -github.com/aws/aws-sdk-go v1.55.6 h1:cSg4pvZ3m8dgYcgqB97MrcdjUmZ1BeMYKUxMMB89IPk= -github.com/aws/aws-sdk-go v1.55.6/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= -github.com/aws/aws-sdk-go-v2 v1.31.0 h1:3V05LbxTSItI5kUqNwhJrrrY1BAXxXt0sN0l72QmG5U= -github.com/aws/aws-sdk-go-v2 v1.31.0/go.mod h1:ztolYtaEUtdpf9Wftr31CJfLVjOnD/CVRkKOOYgF8hA= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8 h1:tcFliCWne+zOuUfKNRn8JdFBuWPDuISDH08wD2ULkhk= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8/go.mod h1:JTnlBSot91steJeti4ryyu/tLd4Sk84O5W22L7O2EQU= -github.com/aws/aws-sdk-go-v2/config v1.27.36 h1:4IlvHh6Olc7+61O1ktesh0jOcqmq/4WG6C2Aj5SKXy0= -github.com/aws/aws-sdk-go-v2/config v1.27.36/go.mod h1:IiBpC0HPAGq9Le0Xxb1wpAKzEfAQ3XlYgJLYKEVYcfw= -github.com/aws/aws-sdk-go-v2/credentials v1.17.34 h1:gmkk1l/cDGSowPRzkdxYi8edw+gN4HmVK151D/pqGNc= -github.com/aws/aws-sdk-go-v2/credentials v1.17.34/go.mod h1:4R9OEV3tgFMsok4ZeFpExn7zQaZRa9MRGFYnI/xC/vs= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.14 h1:C/d03NAmh8C4BZXhuRNboF/DqhBkBCeDiJDcaqIT5pA= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.14/go.mod h1:7I0Ju7p9mCIdlrfS+JCgqcYD0VXz/N4yozsox+0o078= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.33 h1:fAoVmNGhir6BR+RU0/EI+6+D7abM+MCwWf8v4ip5jNI= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.33/go.mod h1:84XgODVR8uRhmOnUkKGUZKqIMxmjmLOR8Uyp7G/TPwc= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.18 h1:kYQ3H1u0ANr9KEKlGs/jTLrBFPo8P8NaH/w7A01NeeM= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.18/go.mod h1:r506HmK5JDUh9+Mw4CfGJGSSoqIiLCndAuqXuhbv67Y= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.18 h1:Z7IdFUONvTcvS7YuhtVxN99v2cCoHRXOS4mTr0B/pUc= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.18/go.mod h1:DkKMmksZVVyat+Y+r1dEOgJEfUeA7UngIHWeKsi0yNc= -github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvKgqdiXoTxAF4HQcQ= -github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14 h1:ZSIPAkAsCCjYrhqfw2+lNzWDzxzHXEckFkTePL5RSWQ= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14/go.mod h1:AyGgqiKv9ECM6IZeNQtdT8NnMvUb3/2wokeq2Fgryto= -github.com/aws/aws-sdk-go-v2/service/ecr v1.17.18 h1:uiF/RI+Up8H2xdgT2GWa20YzxiKEalHieqNjm6HC3Xk= -github.com/aws/aws-sdk-go-v2/service/ecr v1.17.18/go.mod h1:DQtDYmexqR+z+B6HBCvY7zK/tuXKv6Zy/IwOXOK3eow= -github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.13.17 h1:bcQy5/dcJO8VQD+p0tDoIYdgEC3ch9f1/BNRES7XMug= -github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.13.17/go.mod h1:r1Vuka0kyzqN0sZm4lYTXf0Vhl+o/mTLq6vKpBBZYaQ= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.5 h1:QFASJGfT8wMXtuP3D5CRmMjARHv9ZmzFUMJznHDOY3w= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.5/go.mod h1:QdZ3OmoIjSX+8D1OPAzPxDfjXASbBMDsz9qvtyIhtik= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.18 h1:BBYoNQt2kUZUUK4bIPsKrCcjVPUMNsgQpNAwhznK/zo= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.18/go.mod h1:NS55eQ4YixUJPTC+INxi2/jCqe1y2Uw3rnh9wEOVJxY= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.20 h1:Xbwbmk44URTiHNx6PNo0ujDE6ERlsCKJD3u1zfnzAPg= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.20/go.mod h1:oAfOFzUB14ltPZj1rWwRc3d/6OgD76R8KlvU3EqM9Fg= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.17 h1:HfVVR1vItaG6le+Bpw6P4midjBDMKnjMyZnw9MXYUcE= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.17/go.mod h1:YqMdV+gEKCQ59NrB7rzrJdALeBIsYiVi8Inj3+KcqHI= -github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11 h1:3/gm/JTX9bX8CpzTgIlrtYpB3EVBDxyg/GY/QdcIEZw= -github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11/go.mod h1:fmgDANqTUCxciViKl9hb/zD5LFbvPINFRgWhDbR+vZo= -github.com/aws/aws-sdk-go-v2/service/sso v1.23.0 h1:fHySkG0IGj2nepgGJPmmhZYL9ndnsq1Tvc6MeuVQCaQ= -github.com/aws/aws-sdk-go-v2/service/sso v1.23.0/go.mod h1:XRlMvmad0ZNL+75C5FYdMvbbLkd6qiqz6foR1nA1PXY= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.27.0 h1:cU/OeQPNReyMj1JEBgjE29aclYZYtXcsPMXbTkVGMFk= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.27.0/go.mod h1:FnvDM4sfa+isJ3kDXIzAB9GAwVSzFzSy97uZ3IsHo4E= -github.com/aws/aws-sdk-go-v2/service/sts v1.31.0 h1:GNVxIHBTi2EgwCxpNiozhNasMOK+ROUA2Z3X+cSBX58= -github.com/aws/aws-sdk-go-v2/service/sts v1.31.0/go.mod h1:yMWe0F+XG0DkRZK5ODZhG7BEFYhLXi2dqGsv6tX0cgI= -github.com/aws/smithy-go v1.21.0 h1:H7L8dtDRk0P1Qm6y0ji7MCYMQObJ5R9CRpyPhRUkLYA= -github.com/aws/smithy-go v1.21.0/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= -github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20221004211355-a250ad2ca1e3 h1:Ted/bR1N6ltMrASdwRhX1BrGYSFg3aeGMlK8GlgkGh4= -github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20221004211355-a250ad2ca1e3/go.mod h1:m06KtrZgOloUaePAQMv+Ha8kRmTnKdozTHZrweepIrw= -github.com/blendle/zapdriver v1.3.1 h1:C3dydBOWYRiOk+B8X9IVZ5IOe+7cl+tGOexN4QqHfpE= -github.com/blendle/zapdriver v1.3.1/go.mod h1:mdXfREi6u5MArG4j9fewC+FGnXaBR+T4Ox4J2u4eHCc= -github.com/bmatcuk/doublestar/v4 v4.6.1 h1:FH9SifrbvJhnlQpztAx++wlkk70QBf0iBWDwNy7PA4I= -github.com/bmatcuk/doublestar/v4 v4.6.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= -github.com/bradleyjkemp/cupaloy/v2 v2.6.0 h1:knToPYa2xtfg42U3I6punFEjaGFKWQRXJwj0JTv4mTs= -github.com/bradleyjkemp/cupaloy/v2 v2.6.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0= -github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500 h1:6lhrsTEnloDPXyeZBvSYvQf8u86jbKehZPVDDlkgDl4= -github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500/go.mod h1:S/7n9copUssQ56c7aAgHqftWO4LTf4xY6CGWt8Bc+3M= -github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= -github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= -github.com/cert-manager/cert-manager v1.16.3 h1:seEF5eidFaeduaCuM85PFEuzH/1X/HOV5Y8zDQrHgpc= -github.com/cert-manager/cert-manager v1.16.3/go.mod h1:6JQ/GAZ6dH+erqS1BbaqorPy8idJzCtWFUmJQBTjo6Q= -github.com/chrismellard/docker-credential-acr-env v0.0.0-20221002210726-e883f69e0206 h1:lG6Usi/kX/JBZzGz1H+nV+KwM97vThQeKunCbS6PutU= -github.com/chrismellard/docker-credential-acr-env v0.0.0-20221002210726-e883f69e0206/go.mod h1:1UmFRnmMnVsHwD+ZntmLkoVBB1ZLa6V+XXEbF6hZCxU= -github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI= -github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= -github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI= -github.com/cloudevents/sdk-go/v2 v2.15.2 h1:54+I5xQEnI73RBhWHxbI1XJcqOFOVJN85vb41+8mHUc= -github.com/cloudevents/sdk-go/v2 v2.15.2/go.mod h1:lL7kSWAE/V8VI4Wh0jbL2v/jvqsm6tjmaQBSvxcv4uE= -github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58 h1:F1EaeKL/ta07PY/k9Os/UFtwERei2/XzGemhpGnBKNg= -github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58/go.mod h1:EOBUe0h4xcZ5GoxqC5SDxFQ8gwyZPKQoEzownBlhI80= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f h1:WBZRG4aNOuI15bLRrCgN8fCq8E5Xuty6jGbmSNEvSsU= -github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 h1:Om6kYQYDUk5wWbT0t0q6pvyM49i9XZAv9dDrkDA7gjk= -github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= -github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= -github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= -github.com/cockroachdb/cockroach-go/v2 v2.1.1 h1:3XzfSMuUT0wBe1a3o5C0eOTcArhmmFAg2Jzh/7hhKqo= -github.com/cockroachdb/cockroach-go/v2 v2.1.1/go.mod h1:7NtUnP6eK+l6k483WSYNrq3Kb23bWV10IRV1TyeSpwM= -github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= -github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= -github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= -github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= -github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= -github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= -github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= -github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= -github.com/containerd/stargz-snapshotter/estargz v0.14.3 h1:OqlDCK3ZVUO6C3B/5FSkDwbkEETK84kQgEeFwDC+62k= -github.com/containerd/stargz-snapshotter/estargz v0.14.3/go.mod h1:KY//uOCIkSuNAHhJogcZtrNHdKrA99/FCCRjE3HD36o= -github.com/containerd/typeurl/v2 v2.2.0 h1:6NBDbQzr7I5LHgp34xAXYF5DOTQDn05X58lsPEmzLso= -github.com/containerd/typeurl/v2 v2.2.0/go.mod h1:8XOOxnyatxSWuG8OfsZXVnAF4iZfedjS/8UHSPJnX4g= -github.com/coreos/go-oidc v2.3.0+incompatible h1:+5vEsrgprdLjjQ9FzIKAzQz1wwPD+83hQRfUIPh7rO0= -github.com/coreos/go-oidc v2.3.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= -github.com/coreos/go-semver v0.3.1 h1:yi21YpKnrx1gt5R+la8n5WgS0kCrsPp33dmEyHReZr4= -github.com/coreos/go-semver v0.3.1/go.mod h1:irMmmIw/7yzSRPWryHsK7EYSg09caPQL03VsM8rvUec= -github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs= -github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= -github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= -github.com/cpuguy83/go-md2man/v2 v2.0.6 h1:XJtiaUW6dEEqVuZiMTn1ldk455QWwEIsMIJlo5vtkx0= -github.com/cznic/mathutil v0.0.0-20180504122225-ca4c9f2c1369 h1:XNT/Zf5l++1Pyg08/HV04ppB0gKxAqtZQBRYiYrUuYk= -github.com/cznic/mathutil v0.0.0-20180504122225-ca4c9f2c1369/go.mod h1:e6NPNENfs9mPDVNRekM7lKScauxd5kXTr1Mfyig6TDM= -github.com/danieljoos/wincred v1.1.2 h1:QLdCxFs1/Yl4zduvBdcHB8goaYk9RARS2SgLLRuAyr0= -github.com/danieljoos/wincred v1.1.2/go.mod h1:GijpziifJoIBfYh+S7BbkdUTU4LfM+QnGqR5Vl2tAx0= -github.com/deepmap/oapi-codegen v1.8.2 h1:SegyeYGcdi0jLLrpbCMoJxnUUn8GBXHsvr4rbzjuhfU= -github.com/deepmap/oapi-codegen v1.8.2/go.mod h1:YLgSKSDv/bZQB7N4ws6luhozi3cEdRktEqrX88CvjIw= -github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= -github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= -github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= -github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= -github.com/docker/cli v24.0.0+incompatible h1:0+1VshNwBQzQAx9lOl+OYCTCEAD8fKs/qeXMx3O0wqM= -github.com/docker/cli v24.0.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= -github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v28.0.1+incompatible h1:FCHjSRdXhNRFjlHMTv4jUNlIBbTeRjrWfeFuJp7jpo0= -github.com/docker/docker v28.0.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker v28.3.3+incompatible h1:Dypm25kh4rmk49v1eiVbsAtpAsYURjYkaKubwuBdxEI= -github.com/docker/docker v28.3.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A= -github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0= -github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= -github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= -github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= -github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= -github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= -github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY= -github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= -github.com/ebitengine/purego v0.8.2 h1:jPPGWs2sZ1UgOSgD2bClL0MJIqu58nOmIcBuXr62z1I= -github.com/ebitengine/purego v0.8.2/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= -github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw= -github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= -github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712 h1:aaQcKT9WumO6JEJcRyTqFVq4XUZiUcKR2/GI31TOcz8= -github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= -github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= -github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw= -github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= -github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= -github.com/expr-lang/expr v1.17.0 h1:+vpszOyzKLQXC9VF+wA8cVA0tlA984/Wabc/1hF9Whg= -github.com/expr-lang/expr v1.17.0/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4= -github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= -github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/form3tech-oss/jwt-go v3.2.5+incompatible h1:/l4kBbb4/vGSsdtB5nUe8L7B9mImVMaBPw9L/0TBHU8= -github.com/form3tech-oss/jwt-go v3.2.5+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/fsouza/fake-gcs-server v1.17.0 h1:OeH75kBZcZa3ZE+zz/mFdJ2btt9FgqfjI7gIh9+5fvk= -github.com/fsouza/fake-gcs-server v1.17.0/go.mod h1:D1rTE4YCyHFNa99oyJJ5HyclvN/0uQR+pM/VdlL83bw= -github.com/gabriel-vasile/mimetype v1.4.1 h1:TRWk7se+TOjCYgRth7+1/OYLNiRNIotknkFtf/dnN7Q= -github.com/gabriel-vasile/mimetype v1.4.1/go.mod h1:05Vi0w3Y9c/lNvJOdmIwvrrAhX3rYhfQQCaf9VJcv7M= -github.com/getkin/kin-openapi v0.131.0 h1:NO2UeHnFKRYhZ8wg6Nyh5Cq7dHk4suQQr72a4pMrDxE= -github.com/getkin/kin-openapi v0.131.0/go.mod h1:3OlG51PCYNsPByuiMB0t4fjnNlIDnaEDsjiKUV8nL58= -github.com/go-chi/chi/v5 v5.2.1 h1:KOIHODQj58PmL80G2Eak4WdvUzjSJSm0vG72crDCqb8= -github.com/go-chi/chi/v5 v5.2.1/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops= -github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618= -github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops= -github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4= -github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= -github.com/go-chi/cors v1.2.2 h1:Jmey33TE+b+rB7fT8MUy1u0I4L+NARQlK6LhzKPSyQE= -github.com/go-chi/cors v1.2.2/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= -github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE= -github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA= -github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= -github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= -github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= -github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= -github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= -github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= -github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo= -github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= -github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I= -github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= -github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= -github.com/gobuffalo/flect v1.0.3 h1:xeWBM2nui+qnVvNM4S3foBhCAL2XgPU+a7FdpelbTq4= -github.com/gobuffalo/flect v1.0.3/go.mod h1:A5msMlrHtLqh9umBSnvabjsMrCcCpAyzglnDvkbYKHs= -github.com/gobuffalo/here v0.6.0 h1:hYrd0a6gDmWxBM4TnrGw8mQg24iSVoIkHEk7FodQcBI= -github.com/gobuffalo/here v0.6.0/go.mod h1:wAG085dHOYqUpf+Ap+WOdrPTp5IYcDAs/x7PLa8Y5fM= -github.com/goccy/go-json v0.9.11 h1:/pAaQDLHEoCq/5FFmSKBswWmK6H0e8g4159Kc/X/nqk= -github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= -github.com/gocql/gocql v0.0.0-20210515062232-b7ef815b4556 h1:N/MD/sr6o61X+iZBAT2qEUF023s4KbA8RWfKzl0L6MQ= -github.com/gocql/gocql v0.0.0-20210515062232-b7ef815b4556/go.mod h1:DL0ekTmBSTdlNF25Orwt/JMzqIq3EJ4MVa/J/uK64OY= -github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= -github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= -github.com/gofrs/uuid/v5 v5.3.0 h1:m0mUMr+oVYUdxpMLgSYCZiXe7PuVPnI94+OMeVBNedk= -github.com/gofrs/uuid/v5 v5.3.0/go.mod h1:CDOjlDMVAtN56jqyRUZh58JT31Tiw7/oQyEXZV+9bD8= -github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= -github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-migrate/migrate/v4 v4.18.3 h1:EYGkoOsvgHHfm5U/naS1RP/6PL/Xv3S4B/swMiAmDLs= -github.com/golang-migrate/migrate/v4 v4.18.3/go.mod h1:99BKpIi6ruaaXRM1A77eqZ+FWPQ3cfRa+ZVy5bmWMaY= -github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY= -github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= -github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= -github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= -github.com/golang/glog v1.2.4 h1:CNNw5U8lSiiBk7druxtSHHTsRWcxKoac6kZKm2peBBc= -github.com/golang/glog v1.2.4/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= -github.com/golang/glog v1.2.5 h1:DrW6hGnjIhtvhOIiAKT6Psh/Kd/ldepEa81DKeiRJ5I= -github.com/golang/glog v1.2.5/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= -github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= -github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= -github.com/golang/mock v1.3.1 h1:qGJ6qTW+x6xX/my+8YUVl4WNpX9B7+/l2tRsHGZ7f2s= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= -github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= -github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= -github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= -github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= -github.com/google/cel-go v0.23.2 h1:UdEe3CvQh3Nv+E/j9r1Y//WO0K0cSyD7/y0bzyLIMI4= -github.com/google/cel-go v0.23.2/go.mod h1:52Pb6QsDbC5kvgxvZhiL9QX1oZEkcUF/ZqaPx1J5Wwo= -github.com/google/flatbuffers v2.0.8+incompatible h1:ivUb1cGomAB101ZM1T0nOiWz9pSrTMoa9+EiY7igmkM= -github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-containerregistry v0.16.1 h1:rUEt426sR6nyrL3gt+18ibRcvYpKYdpsa5ZW7MA08dQ= -github.com/google/go-containerregistry v0.16.1/go.mod h1:u0qB2l7mvtWVR5kNcbFIhFY1hLbf8eeGapA+vbFDCtQ= -github.com/google/go-containerregistry/pkg/authn/k8schain v0.0.0-20230209165335-3624968304fd h1:hQf//Ak0trkoqnm94i9mw00d7axUwfK92hMxslxNKYc= -github.com/google/go-containerregistry/pkg/authn/k8schain v0.0.0-20230209165335-3624968304fd/go.mod h1:x5fIlj5elU+/eYF60q4eASMQ9kDc+GMFa7UU9M3mFFw= -github.com/google/go-containerregistry/pkg/authn/kubernetes v0.0.0-20230209165335-3624968304fd h1:AQZlI371LcvBYY/7Q55TjxrpZJs6wtEXMw4Wq38XLy8= -github.com/google/go-containerregistry/pkg/authn/kubernetes v0.0.0-20230209165335-3624968304fd/go.mod h1:6pjZpt+0dg+Z0kUEn53qLtD57raiZo/bqWzsuX6dDjo= -github.com/google/go-github/v39 v39.2.0 h1:rNNM311XtPOz5rDdsJXAp2o8F67X9FnROXTvto3aSnQ= -github.com/google/go-github/v39 v39.2.0/go.mod h1:C1s8C5aCC9L+JXIYpJM5GYytdX52vC1bLvHEF1IhBrE= -github.com/google/go-pkcs11 v0.3.0 h1:PVRnTgtArZ3QQqTGtbtjtnIkzl2iY2kt24yqbrf7td8= -github.com/google/go-pkcs11 v0.3.0/go.mod h1:6eQoGcuNJpa7jnd5pMGdkSaQpNDYvPlXWMcjXXThLlY= -github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= -github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= -github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= -github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= -github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= -github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= -github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= -github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= -github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= -github.com/googleapis/google-cloud-go-testing v0.0.0-20210719221736-1c9a4c676720 h1:zC34cGQu69FG7qzJ3WiKW244WfhDC3xxYMeNOX2gtUQ= -github.com/googleapis/google-cloud-go-testing v0.0.0-20210719221736-1c9a4c676720/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/gorilla/handlers v1.4.2 h1:0QniY0USkHQ1RGCLfKxeNHK9bkDHGRYGNDFBCS+YARg= -github.com/gorilla/handlers v1.4.2/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= -github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc= -github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo= -github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA= -github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79 h1:+ngKgrYPPJrOjhax5N+uePQ0Fh1Z7PheYoUI/0nzkPA= -github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= -github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= -github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho= -github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= -github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.1 h1:e9Rjr40Z98/clHv5Yg79Is0NtosR5LXRvdr7o/6NwbA= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.1/go.mod h1:tIxuGz/9mpox++sgp9fJjHO0+q1X9/UOWd798aAm22M= -github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= -github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= -github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8= -github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= -github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= -github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= -github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= -github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= -github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= -github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= -github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= -github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= -github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= -github.com/ianlancetaylor/demangle v0.0.0-20240312041847-bd984b5ce465 h1:KwWnWVWCNtNq/ewIX7HIKnELmEx2nDP42yskD/pi7QE= -github.com/ianlancetaylor/demangle v0.0.0-20240312041847-bd984b5ce465/go.mod h1:gx7rwoVhcfuVKG5uya9Hs3Sxj7EIvldVofAWIUtGouw= -github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= -github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= -github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= -github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/influxdata/influxdb-client-go/v2 v2.9.0 h1:1Ejxpt+cpWkadefxd5xvVx7pFgFaafdNp1ItfHzKRW4= -github.com/influxdata/influxdb-client-go/v2 v2.9.0/go.mod h1:x7Jo5UHHl+w8wu8UnGiNobDDHygojXwJX4mx7rXGKMk= -github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839 h1:W9WBk7wlPfJLvMCdtV4zPulc4uCPrlywQOmbFOhgQNU= -github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo= -github.com/influxdata/tdigest v0.0.1 h1:XpFptwYmnEKUqmkcDjrzffswZ3nvNeevbUSLPP/ZzIY= -github.com/influxdata/tdigest v0.0.1/go.mod h1:Z0kXnxzbTC2qrx4NaIzYkE1k66+6oEDQTvL95hQFh5Y= -github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= -github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= -github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w= -github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM= -github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa h1:s+4MhCQ6YrzisK6hFJUX53drDT4UsSW3DEhKn0ifuHw= -github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds= -github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= -github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= -github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= -github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= -github.com/jackc/pgproto3/v2 v2.3.3 h1:1HLSx5H+tXR9pW3in3zaztoEwQYRC9SQaYUHjTSUOag= -github.com/jackc/pgproto3/v2 v2.3.3/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= -github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= -github.com/jackc/pgtype v1.14.0 h1:y+xUdabmyMkJLyApYuPj38mW+aAIqCe5uuBB51rH3Vw= -github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= -github.com/jackc/pgx/v4 v4.18.2 h1:xVpYkNR5pk5bMCZGfClbO962UIqVABcAGt7ha1s/FeU= -github.com/jackc/pgx/v4 v4.18.2/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= -github.com/jackc/pgx/v5 v5.6.0 h1:SWJzexBzPL5jb0GEsrPMLIsi/3jOo7RHlzTjcAeDrPY= -github.com/jackc/pgx/v5 v5.6.0/go.mod h1:DNZ/vlrUnhWCoFGxHAG8U2ljioxukquj7utPDgtQdTw= -github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= -github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= -github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4= -github.com/jessevdk/go-flags v1.6.1/go.mod h1:Mk8T1hIAWpOiJiHa9rJASDK2UGWji0EuPGBnNLMooyc= -github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= -github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= -github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= -github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= -github.com/jmespath/go-jmespath v0.4.1-0.20220621161143-b0104c826a24 h1:liMMTbpW34dhU4az1GN0pTPADwNmvoRSeoZ6PItiqnY= -github.com/jmespath/go-jmespath v0.4.1-0.20220621161143-b0104c826a24/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= -github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4= -github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= -github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA= -github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024 h1:rBMNdlhTLzJjJSDIjNEXX1Pz3Hmwmz91v+zycvx9PJc= -github.com/k0kubun/pp v2.3.0+incompatible h1:EKhKbi34VQDWJtq+zpsKSEhkHHs9w2P8Izbq8IhLVSo= -github.com/k0kubun/pp v2.3.0+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg= -github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0 h1:iQTw/8FWTuc7uiaSepXwyf3o52HaUYcV+Tu66S3F5GA= -github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8= -github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= -github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= -github.com/kedacore/keda/v2 v2.16.1 h1:LfYsxfSX8DjetLW8q9qnriImH936POrQJvE+caRoScI= -github.com/kedacore/keda/v2 v2.16.1/go.mod h1:pO2ksUCwSOQ2u3OWqj+jh9Hgf0+26MZug6dF7WWgcAk= -github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= -github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= -github.com/kisielk/errcheck v1.5.0 h1:e8esj/e4R+SAOwFwN+n3zr0nYeCyeweozKfO23MvHzY= -github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= -github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4= -github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= -github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= -github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= -github.com/klauspost/cpuid/v2 v2.0.9 h1:lgaqFMSdTdQYdZ04uHyN2d/eKdOMyi2YLSvlQIBFYa4= -github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/kserve/kserve v0.15.0 h1:TK6lN8XVIzc/5KQ76GULEKYiJ2obkTRRI3Z2Iwuld5Y= -github.com/kserve/kserve v0.15.0/go.mod h1:O/SboK/ju1hEUrEMKEFZuANARPW3MB9F4xpqieJP4wk= -github.com/kserve/kserve v0.15.2 h1:uph7Umk331fvBMrHHpTJNDt6+lf3IAuyiskaQw8wIAk= -github.com/kserve/kserve v0.15.2/go.mod h1:IbSh7skZ/01A3G/hRLOv2osAJBAR30I8QAXGuL/ghj0= -github.com/ktrysmt/go-bitbucket v0.6.4 h1:C8dUGp0qkwncKtAnozHCbbqhptefzEd1I0sfnuy9rYQ= -github.com/ktrysmt/go-bitbucket v0.6.4/go.mod h1:9u0v3hsd2rqCHRIpbir1oP7F58uo5dq19sBYvuMoyQ4= -github.com/kubeflow/model-registry/pkg/openapi v0.3.0 h1:ioNr/dm37J6eW63DQAcIHIPZ8zDF4v5GJwMHfs9doUM= -github.com/kubeflow/model-registry/pkg/openapi v0.3.0/go.mod h1:0V0wF5hGlLDSNS+on0MTnEOFiubfVYNc7QhuthKBu+8= -github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= -github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= -github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= -github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= -github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= -github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4 h1:sIXJOMrYnQZJu7OB7ANSF4MYri2fTEGIsRLz6LwI4xE= -github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4/go.mod h1:amey7yeodaJhXSbf/TlLvWiqQfLOSpEk//mLlc+axEk= -github.com/magiconair/properties v1.8.9 h1:nWcCbLq1N2v/cpNsy5WvQ37Fb+YElfq20WJ/a8RkpQM= -github.com/magiconair/properties v1.8.9/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= -github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= -github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= -github.com/markbates/pkger v0.15.1 h1:3MPelV53RnGSW07izx5xGxl4e/sdRD6zqseIk0rMASY= -github.com/markbates/pkger v0.15.1/go.mod h1:0JoVlrol20BSywW79rN3kdFFsE5xYM+rSCQDXbLhiuI= -github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= -github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= -github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/microsoft/go-mssqldb v1.0.0 h1:k2p2uuG8T5T/7Hp7/e3vMGTnnR0sU4h8d1CcC71iLHU= -github.com/microsoft/go-mssqldb v1.0.0/go.mod h1:+4wZTUnz/SV6nffv+RRRB/ss8jPng5Sho2SmM1l2ts4= -github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= -github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= -github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI= -github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= -github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= -github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= -github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ= -github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo= -github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= -github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= -github.com/moby/spdystream v0.5.0 h1:7r0J1Si3QO/kjRitvSLVVFUjxMEb/YLj6S9FF62JBCU= -github.com/moby/spdystream v0.5.0/go.mod h1:xBAYlnt/ay+11ShkdFKNAG7LsyK/tmNBVvVOwrfMgdI= -github.com/moby/sys/mount v0.3.4 h1:yn5jq4STPztkkzSKpZkLcmjue+bZJ0u2AuQY1iNI1Ww= -github.com/moby/sys/mount v0.3.4/go.mod h1:KcQJMbQdJHPlq5lcYT+/CjatWM4PuxKe+XLSVS4J6Os= -github.com/moby/sys/mountinfo v0.7.2 h1:1shs6aH5s4o5H2zQLn796ADW1wMrIwHsyJ2v9KouLrg= -github.com/moby/sys/mountinfo v0.7.2/go.mod h1:1YOa8w8Ih7uW0wALDUgT1dTTSBrZ+HiBLGws92L2RU4= -github.com/moby/sys/reexec v0.1.0 h1:RrBi8e0EBTLEgfruBOFcxtElzRGTEUkeIFaVXgU7wok= -github.com/moby/sys/reexec v0.1.0/go.mod h1:EqjBg8F3X7iZe5pU6nRZnYCMUTXoxsjiIfHup5wYIN8= -github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= -github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= -github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= -github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= -github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= -github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= -github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= -github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= -github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= -github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= -github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= -github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= -github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= -github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= -github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= -github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= -github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= -github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= -github.com/mutecomm/go-sqlcipher/v4 v4.4.0 h1:sV1tWCWGAVlPhNGT95Q+z/txFxuhAYWwHD1afF5bMZg= -github.com/mutecomm/go-sqlcipher/v4 v4.4.0/go.mod h1:PyN04SaWalavxRGH9E8ZftG6Ju7rsPrGmQRjrEaVpiY= -github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU= -github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus= -github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= -github.com/nakagami/firebirdsql v0.0.0-20190310045651-3c02a58cfed8 h1:P48LjvUQpTReR3TQRbxSeSBsMXzfK0uol7eRcr7VBYQ= -github.com/nakagami/firebirdsql v0.0.0-20190310045651-3c02a58cfed8/go.mod h1:86wM1zFnC6/uDBfZGNwB65O+pR2OFi5q/YQaEUid1qA= -github.com/neo4j/neo4j-go-driver v1.8.1-0.20200803113522-b626aa943eba h1:fhFP5RliM2HW/8XdcO5QngSfFli9GcRIpMXvypTQt6E= -github.com/neo4j/neo4j-go-driver v1.8.1-0.20200803113522-b626aa943eba/go.mod h1:ncO5VaFWh0Nrt+4KT4mOZboaczBZcLuHrG+/sUeP8gI= -github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= -github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= -github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 h1:G7ERwszslrBzRxj//JalHPu/3yz+De2J+4aLtSRlHiY= -github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037/go.mod h1:2bpvgLBZEtENV5scfDFEtB/5+1M4hkQhDQrccEJ/qGw= -github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 h1:bQx3WeLcUWy+RletIKwUIt4x3t8n2SxavmoclizMb8c= -github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90/go.mod h1:y5+oSEHCPT/DGrS++Wc/479ERge0zTFxaF8PbGKcg2o= -github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= -github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= -github.com/open-telemetry/opentelemetry-operator v0.113.0 h1:EoN3SLwF9dP5Ou7gFcfYligdwzedsEQBewQdagk5E3U= -github.com/open-telemetry/opentelemetry-operator v0.113.0/go.mod h1:eQ8W+MxP+q5Tewf5Cx1vNXvRynjP9JNgrBbUO7TqjXQ= -github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= -github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= -github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= -github.com/openzipkin/zipkin-go v0.4.3 h1:9EGwpqkgnwdEIJ+Od7QVSEIH+ocmm5nPat0G7sjsSdg= -github.com/openzipkin/zipkin-go v0.4.3/go.mod h1:M9wCJZFWCo2RiY+o1eBCEMe0Dp2S5LDHcMZmk3RmK7c= -github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= -github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= -github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= -github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= -github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= -github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= -github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= -github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/pierrec/lz4/v4 v4.1.16 h1:kQPfno+wyx6C5572ABwV+Uo3pDFzQ7yhyGchSyRda0c= -github.com/pierrec/lz4/v4 v4.1.16/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= -github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= -github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= -github.com/pkg/sftp v1.13.7 h1:uv+I3nNJvlKZIQGSr8JVQLNHFU9YhhNpvC14Y6KgmSM= -github.com/pkg/sftp v1.13.7/go.mod h1:KMKI0t3T6hfA+lTR/ssZdunHo+uwq7ghoN09/FSu3DY= -github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= -github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= -github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= -github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= -github.com/pquerna/cachecontrol v0.1.0 h1:yJMy84ti9h/+OEWa752kBTKv4XC30OtVVHYv/8cTqKc= -github.com/pquerna/cachecontrol v0.1.0/go.mod h1:NrUG3Z7Rdu85UNR3vm7SOsl1nFIeSiQnrHV5K9mBcUI= -github.com/prometheus/prometheus v0.55.1 h1:+NM9V/h4A+wRkOyQzGewzgPPgq/iX2LUQoISNvmjZmI= -github.com/prometheus/prometheus v0.55.1/go.mod h1:GGS7QlWKCqCbcEzWsVahYIfQwiGhcExkarHyLJTsv6I= -github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk= -github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= -github.com/rogpeppe/fastuuid v1.2.0 h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rqlite/gorqlite v0.0.0-20230708021416-2acd02b70b79 h1:V7x0hCAgL8lNGezuex1RW1sh7VXXCqfw8nXZti66iFg= -github.com/rqlite/gorqlite v0.0.0-20230708021416-2acd02b70b79/go.mod h1:xF/KoXmrRyahPfo5L7Szb5cAAUl53dMWBh9cMruGEZg= -github.com/rs/dnscache v0.0.0-20230804202142-fc85eb664529 h1:18kd+8ZUlt/ARXhljq+14TwAoKa61q6dX8jtwOf6DH8= -github.com/rs/dnscache v0.0.0-20230804202142-fc85eb664529/go.mod h1:qe5TWALJ8/a1Lqznoc5BDHpYX/8HU60Hm2AwRmqzxqA= -github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww= -github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY= -github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= -github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= -github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= -github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo= -github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k= -github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= -github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= -github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= -github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY= -github.com/shirou/gopsutil/v4 v4.25.1 h1:QSWkTc+fu9LTAWfkZwZ6j8MSUk4A2LV7rbH0ZqmLjXs= -github.com/shirou/gopsutil/v4 v4.25.1/go.mod h1:RoUCUpndaJFtT+2zsZzzmhvbfGoDCJ7nFXKJf8GqJbI= -github.com/shirou/gopsutil/v4 v4.25.5 h1:rtd9piuSMGeU8g1RMXjZs9y9luK5BwtnG7dZaQUJAsc= -github.com/shirou/gopsutil/v4 v4.25.5/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c= -github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= -github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/snowflakedb/gosnowflake v1.6.19 h1:KSHXrQ5o7uso25hNIzi/RObXtnSGkFgie91X82KcvMY= -github.com/snowflakedb/gosnowflake v1.6.19/go.mod h1:FM1+PWUdwB9udFDsXdfD58NONC0m+MlOSmQRvimobSM= -github.com/soheilhy/cmux v0.1.5 h1:jjzc5WVemNEDTLwv9tlmemhC73tI08BNOIGwBOo10Js= -github.com/soheilhy/cmux v0.1.5/go.mod h1:T7TcVDs9LWfQgPlPsdngu6I6QIoyIFZDDC6sNE1GqG0= -github.com/sosodev/duration v1.3.1 h1:qtHBDMQ6lvMQsL15g4aopM4HEfOaYuhWBw3NPTtlqq4= -github.com/sosodev/duration v1.3.1/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg= -github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= -github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= -github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= -github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= -github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs= -github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4= -github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= -github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= -github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y= -github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= -github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= -github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= -github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI= -github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg= -github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4= -github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4= -github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= -github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= -github.com/stoewer/go-strcase v1.3.0 h1:g0eASXYtp+yvN9fK8sH94oCIk0fau9uV1/ZdJ0AVEzs= -github.com/stoewer/go-strcase v1.3.0/go.mod h1:fAH5hQ5pehh+j3nZfvwdk2RgEgQjAoM8wodgtPmh1xo= -github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= -github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= -github.com/testcontainers/testcontainers-go v0.36.0 h1:YpffyLuHtdp5EUsI5mT4sRw8GZhO/5ozyDT1xWGXt00= -github.com/testcontainers/testcontainers-go v0.36.0/go.mod h1:yk73GVJ0KUZIHUtFna6MO7QS144qYpoY8lEEtU9Hed0= -github.com/testcontainers/testcontainers-go v0.38.0 h1:d7uEapLcv2P8AvH8ahLqDMMxda2W9gQN1nRbHS28HBw= -github.com/testcontainers/testcontainers-go v0.38.0/go.mod h1:C52c9MoHpWO+C4aqmgSU+hxlR5jlEayWtgYrb8Pzz1w= -github.com/testcontainers/testcontainers-go/modules/mysql v0.38.0 h1:msUPAl0LVBalG3m2KhmbFHeRrxCw36xmQFCEhzqsvqo= -github.com/testcontainers/testcontainers-go/modules/mysql v0.38.0/go.mod h1:PFyaiqBahyh1BMz23ij99z4LJGsDpkpuZKz6rchlUWc= -github.com/testcontainers/testcontainers-go/modules/postgres v0.38.0 h1:KFdx9A0yF94K70T6ibSuvgkQQeX1xKlZVF3hEagXEtY= -github.com/testcontainers/testcontainers-go/modules/postgres v0.38.0/go.mod h1:T/QRECND6N6tAKMxF1Za+G2tpwnGEHcODzHRsgIpw9M= -github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= -github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= -github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= -github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= -github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= -github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= -github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= -github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= -github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= -github.com/tmc/grpc-websocket-proxy v0.0.0-20220101234140-673ab2c3ae75 h1:6fotK7otjonDflCTK0BCfls4SPy3NcCVb5dqqmbRknE= -github.com/tmc/grpc-websocket-proxy v0.0.0-20220101234140-673ab2c3ae75/go.mod h1:KO6IkyS8Y3j8OdNO85qEYBsRPuteD+YciPomcXdrMnk= -github.com/tsenart/go-tsz v0.0.0-20180814235614-0bd30b3df1c3 h1:pcQGQzTwCg//7FgVywqge1sW9Yf8VMsMdG58MI5kd8s= -github.com/tsenart/go-tsz v0.0.0-20180814235614-0bd30b3df1c3/go.mod h1:SWZznP1z5Ki7hDT2ioqiFKEse8K9tU2OUvaRI0NeGQo= -github.com/tsenart/vegeta/v12 v12.12.0 h1:FKMMNomd3auAElO/TtbXzRFXAKGee6N/GKCGweFVm2U= -github.com/tsenart/vegeta/v12 v12.12.0/go.mod h1:gpdfR++WHV9/RZh4oux0f6lNPhsOH8pCjIGUlcPQe1M= -github.com/vbatts/tar-split v0.11.3 h1:hLFqsOLQ1SsppQNTMpkpPXClLDfC2A3Zgy9OUU+RVck= -github.com/vbatts/tar-split v0.11.3/go.mod h1:9QlHN18E+fEH7RdG+QAJJcuya3rqT7eXSTY7wGrAokY= -github.com/vektah/gqlparser/v2 v2.5.19 h1:bhCPCX1D4WWzCDvkPl4+TP1N8/kLrWnp43egplt7iSg= -github.com/vektah/gqlparser/v2 v2.5.19/go.mod h1:y7kvl5bBlDeuWIvLtA9849ncyvx6/lj06RsMrEjVy3U= -github.com/xanzy/go-gitlab v0.15.0 h1:rWtwKTgEnXyNUGrOArN7yyc3THRkpYcKXIXia9abywQ= -github.com/xanzy/go-gitlab v0.15.0/go.mod h1:8zdQa/ri1dfn8eS3Ir1SyfvOKlw7WBJ8DVThkpGiXrs= -github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= -github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= -github.com/xdg-go/scram v1.1.1 h1:VOMT+81stJgXW3CpHyqHN3AXDYIMsx56mEFrB37Mb/E= -github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= -github.com/xdg-go/stringprep v1.0.3 h1:kdwGpVNwPFtjs98xCGkHjQtGKh86rDcRZN17QEMCOIs= -github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= -github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc= -github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU= -github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510 h1:S2dVYn90KE98chqDkyE9Z4N61UnQd+KOfgp5Iu53llk= -github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= -github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= -github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= -github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= -github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= -github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= -github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= -gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b h1:7gd+rd8P3bqcn/96gOZa3F5dpJr/vEiDQYlNb/y2uNs= -gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE= -go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0= -go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I= -go.etcd.io/etcd/api/v3 v3.5.21 h1:A6O2/JDb3tvHhiIz3xf9nJ7REHvtEFJJ3veW3FbCnS8= -go.etcd.io/etcd/api/v3 v3.5.21/go.mod h1:c3aH5wcvXv/9dqIw2Y810LDXJfhSYdHQ0vxmP3CCHVY= -go.etcd.io/etcd/client/pkg/v3 v3.5.21 h1:lPBu71Y7osQmzlflM9OfeIV2JlmpBjqBNlLtcoBqUTc= -go.etcd.io/etcd/client/pkg/v3 v3.5.21/go.mod h1:BgqT/IXPjK9NkeSDjbzwsHySX3yIle2+ndz28nVsjUs= -go.etcd.io/etcd/client/v2 v2.305.21 h1:eLiFfexc2mE+pTLz9WwnoEsX5JTTpLCYVivKkmVXIRA= -go.etcd.io/etcd/client/v2 v2.305.21/go.mod h1:OKkn4hlYNf43hpjEM3Ke3aRdUkhSl8xjKjSf8eCq2J8= -go.etcd.io/etcd/client/v3 v3.5.21 h1:T6b1Ow6fNjOLOtM0xSoKNQt1ASPCLWrF9XMHcH9pEyY= -go.etcd.io/etcd/client/v3 v3.5.21/go.mod h1:mFYy67IOqmbRf/kRUvsHixzo3iG+1OF2W2+jVIQRAnU= -go.etcd.io/etcd/pkg/v3 v3.5.21 h1:jUItxeKyrDuVuWhdh0HtjUANwyuzcb7/FAeUfABmQsk= -go.etcd.io/etcd/pkg/v3 v3.5.21/go.mod h1:wpZx8Egv1g4y+N7JAsqi2zoUiBIUWznLjqJbylDjWgU= -go.etcd.io/etcd/raft/v3 v3.5.21 h1:dOmE0mT55dIUsX77TKBLq+RgyumsQuYeiRQnW/ylugk= -go.etcd.io/etcd/raft/v3 v3.5.21/go.mod h1:fmcuY5R2SNkklU4+fKVBQi2biVp5vafMrWUEj4TJ4Cs= -go.etcd.io/etcd/server/v3 v3.5.21 h1:9w0/k12majtgarGmlMVuhwXRI2ob3/d1Ik3X5TKo0yU= -go.etcd.io/etcd/server/v3 v3.5.21/go.mod h1:G1mOzdwuzKT1VRL7SqRchli/qcFrtLBTAQ4lV20sXXo= -go.mongodb.org/mongo-driver v1.7.5 h1:ny3p0reEpgsR2cfA5cjgwFZg3Cv/ofFh/8jbhGtz9VI= -go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng= -go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= -go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= -go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/collector/featuregate v1.24.0 h1:DEqDsuJgxjZ3E5JNC9hXCd4sWGFiF7h9kaziODuqwFY= -go.opentelemetry.io/collector/featuregate v1.24.0/go.mod h1:3GaXqflNDVwWndNGBJ1+XJFy3Fv/XrFgjMN60N3z7yg= -go.opentelemetry.io/contrib/detectors/gcp v1.34.0 h1:JRxssobiPg23otYU5SbWtQC//snGVIM3Tx6QRzlQBao= -go.opentelemetry.io/contrib/detectors/gcp v1.34.0/go.mod h1:cV4BMFcscUR/ckqLkbfQmF0PRsq8w/lMGzdbCSveBHo= -go.opentelemetry.io/contrib/detectors/gcp v1.36.0 h1:F7q2tNlCaHY9nMKHR6XH9/qkp8FktLnIcy6jJNyOCQw= -go.opentelemetry.io/contrib/detectors/gcp v1.36.0/go.mod h1:IbBN8uAIIx734PTonTPxAxnjc2pQTxWNkwfstZ+6H2k= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0 h1:rgMkmiGfix9vFJDcDi1PK8WEQP4FLQwLDfhp5ZLpFeE= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0/go.mod h1:ijPqXp5P6IRRByFVVg9DY8P5HkxkHE5ARIa+86aXPf4= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ= -go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= -go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= -go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= -go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 h1:Vh5HayB/0HHfOQA7Ctx69E/Y/DcQSMPpKANYVMQ7fBA= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0/go.mod h1:cpgtDBaqD/6ok/UG0jT15/uKjAY8mRA53diogHBg3UI= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0 h1:5pojmb1U1AogINhN3SurB+zm/nIcusopeBNp42f45QM= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0/go.mod h1:57gTHJSE5S1tqg+EKsLPlTWhpHMsWlVmer+LA926XiA= -go.opentelemetry.io/otel/exporters/prometheus v0.56.0 h1:GnCIi0QyG0yy2MrJLzVrIM7laaJstj//flf1zEJCG+E= -go.opentelemetry.io/otel/exporters/prometheus v0.56.0/go.mod h1:JQcVZtbIIPM+7SWBB+T6FK+xunlyidwLp++fN0sUaOk= -go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= -go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= -go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= -go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= -go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= -go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg= -go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs= -go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY= -go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o= -go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w= -go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis= -go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4= -go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= -go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= -go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= -go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= -go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg= -go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY= -go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= -go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= -golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= -golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= -golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= -golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= -golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067 h1:KYGJGHOQy8oSi1fDlSpcZF0+juKwk/hEMv5SiwHogR0= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422 h1:QzoH/1pFpZguR8NrRHLcO6jKqfv2zpuSqZLgdm7ZmjI= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6 h1:Tus/Y4w3V77xDsGwKUC8a/QrV7jScpU557J77lFffNs= -golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= -golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= -golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ= -golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc= -golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= -golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= -golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= -golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk= -golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= -golang.org/x/telemetry v0.0.0-20250807160809-1a19826ec488 h1:3doPGa+Gg4snce233aCWnbZVFsyFMo/dR40KK/6skyE= -golang.org/x/telemetry v0.0.0-20250807160809-1a19826ec488/go.mod h1:fGb/2+tgXXjhjHsTNdVEEMZNWA0quBnfrO+AfoDSAKw= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= -golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU= -golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= -google.golang.org/api v0.226.0 h1:9A29y1XUD+YRXfnHkO66KggxHBZWg9LsTGqm7TkUvtQ= -google.golang.org/api v0.226.0/go.mod h1:WP/0Xm4LVvMOCldfvOISnWquSRWbG2kArDZcg+W2DbY= -google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= -google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= -google.golang.org/genproto v0.0.0-20241118233622-e639e219e697 h1:ToEetK57OidYuqD4Q5w+vfEnPvPpuTwedCNVohYJfNk= -google.golang.org/genproto v0.0.0-20241118233622-e639e219e697/go.mod h1:JJrvXBWRZaFMxBufik1a4RpFw4HhgVtBBWQeQgUj2cc= -google.golang.org/genproto/googleapis/api v0.0.0-20250218202821-56aae31c358a h1:nwKuGPlUAt+aR+pcrkfFRrTU1BVrSmYyYMxYbUIVHr0= -google.golang.org/genproto/googleapis/api v0.0.0-20250218202821-56aae31c358a/go.mod h1:3kWAYMk1I75K4vykHtKt2ycnOgpA6974V7bREqbsenU= -google.golang.org/genproto/googleapis/api v0.0.0-20250528174236-200df99c418a h1:SGktgSolFCo75dnHJF2yMvnns6jCmHFJ0vE4Vn2JKvQ= -google.golang.org/genproto/googleapis/api v0.0.0-20250528174236-200df99c418a/go.mod h1:a77HrdMjoeKbnd2jmgcWdaS++ZLZAEq3orIOAEIKiVw= -google.golang.org/genproto/googleapis/bytestream v0.0.0-20250303144028-a0af3efb3deb h1:kw/Q892zrnljh8PXAIHmsCXgpxtSyWL4oV1eRnFtdeg= -google.golang.org/genproto/googleapis/bytestream v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:35wIojE/F1ptq1nfNDNjtowabHoMSA2qQs7+smpCO5s= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 h1:iK2jbkWL86DXjEx0qiHcRE9dE4/Ahua5k6V8OWFb//c= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4/go.mod h1:LuRYeWDFV6WOn90g357N17oMCaxpgCnbi/44qJvDn2I= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250528174236-200df99c418a h1:v2PbRU4K3llS09c7zodFpNePeamkAwG3mPrAery9VeE= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250528174236-200df99c418a/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= -google.golang.org/grpc v1.72.0 h1:S7UkcVa60b5AAQTaO6ZKamFp1zMZSU0fGDK2WZLbBnM= -google.golang.org/grpc v1.72.0/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM= -google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= -google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= -google.golang.org/grpc/examples v0.0.0-20230224211313-3775f633ce20 h1:MLBCGN1O7GzIx+cBiwfYPwtmZ41U3Mn/cotLJciaArI= -google.golang.org/grpc/examples v0.0.0-20230224211313-3775f633ce20/go.mod h1:Nr5H8+MlGWr5+xX/STzdoEqJrO+YteqFbMyCsrb6mH0= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= -gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= -gopkg.in/go-jose/go-jose.v2 v2.6.3 h1:nt80fvSDlhKWQgSWyHyy5CfmlQr+asih51R8PTWNKKs= -gopkg.in/go-jose/go-jose.v2 v2.6.3/go.mod h1:zzZDPkNNw/c9IE7Z9jr11mBZQhKQTMzoEEIoEdZlFBI= -gopkg.in/go-playground/validator.v9 v9.31.0 h1:bmXmP2RSNtFES+bn4uYuHT7iJFJv7Vj+an+ZQdDaD1M= -gopkg.in/go-playground/validator.v9 v9.31.0/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWdVbfP1avr/N00E2vyQ= -gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= -gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= -gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gorm.io/driver/mysql v1.6.0 h1:eNbLmNTpPpTOVZi8MMxCi2aaIm0ZpInbORNXDwyLGvg= -gorm.io/driver/mysql v1.6.0/go.mod h1:D/oCC2GWK3M/dqoLxnOlaNKmXz8WNTfcS9y5ovaSqKo= -gorm.io/driver/postgres v1.6.0 h1:2dxzU8xJ+ivvqTRph34QX+WrRaJlmfyPqXmoGVjMBa4= -gorm.io/driver/postgres v1.6.0/go.mod h1:vUw0mrGgrTK+uPHEhAdV4sfFELrByKVGnaVRkXDhtWo= -gorm.io/gorm v1.30.1 h1:lSHg33jJTBxs2mgJRfRZeLDG+WZaHYCk3Wtfl6Ngzo4= -gorm.io/gorm v1.30.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc h1:/hemPrYIhOhy8zYrNj+069zDB68us2sMGsfkFJO0iZs= -istio.io/api v1.24.2 h1:jYjcN6Iq0RPtQj/3KMFsybxmfqmjGN/dxhL7FGJEdIM= -istio.io/api v1.24.2/go.mod h1:MQnRok7RZ20/PE56v0LxmoWH0xVxnCQPNuf9O7PAN1I= -istio.io/client-go v1.24.2 h1:JTTfBV6dv+AAW+AfccyrdX4T1f9CpsXd1Yzo1s/IYAI= -istio.io/client-go v1.24.2/go.mod h1:dgZ9EmJzh1EECzf6nQhwNL4R6RvlyeH/RXeNeNp/MRg= -k8s.io/apiserver v0.33.0 h1:QqcM6c+qEEjkOODHppFXRiw/cE2zP85704YrQ9YaBbc= -k8s.io/apiserver v0.33.0/go.mod h1:EixYOit0YTxt8zrO2kBU7ixAtxFce9gKGq367nFmqI8= -k8s.io/code-generator v0.33.0 h1:B212FVl6EFqNmlgdOZYWNi77yBv+ed3QgQsMR8YQCw4= -k8s.io/code-generator v0.33.0/go.mod h1:KnJRokGxjvbBQkSJkbVuBbu6z4B0rC7ynkpY5Aw6m9o= -k8s.io/component-base v0.33.0 h1:Ot4PyJI+0JAD9covDhwLp9UNkUja209OzsJ4FzScBNk= -k8s.io/component-base v0.33.0/go.mod h1:aXYZLbw3kihdkOPMDhWbjGCO6sg+luw554KP51t8qCU= -k8s.io/component-helpers v0.32.1 h1:TwdsSM1vW9GjnfX18lkrZbwE5G9psCIS2/rhenTDXd8= -k8s.io/component-helpers v0.32.1/go.mod h1:1JT1Ei3FD29yFQ18F3laj1WyvxYdHIhyxx6adKMFQXI= -k8s.io/gengo v0.0.0-20240404160639-a0386bf69313 h1:wBIDZID8ju9pwOiLlV22YYKjFGtiNSWgHf5CnKLRUuM= -k8s.io/gengo v0.0.0-20240404160639-a0386bf69313/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= -k8s.io/gengo/v2 v2.0.0-20250207200755-1244d31929d7 h1:2OX19X59HxDprNCVrWi6jb7LW1PoqTlYqEq5H2oetog= -k8s.io/gengo/v2 v2.0.0-20250207200755-1244d31929d7/go.mod h1:EJykeLsmFC60UQbYJezXkEsG2FLrt0GPNkU5iK5GWxU= -k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= -k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= -k8s.io/kms v0.33.0 h1:fhQSW/vyaWDhMp0vDuO/sLg2RlGZf4F77beSXcB4/eE= -k8s.io/kms v0.33.0/go.mod h1:C1I8mjFFBNzfUZXYt9FZVJ8MJl7ynFbGgZFbBzkBJ3E= -knative.dev/caching v0.0.0-20250117155405-a76aa7cd2bb6 h1:a6oO186Ibm9iBmy9GzJPlJCVJnPPCppwo8NEm12Nnus= -knative.dev/caching v0.0.0-20250117155405-a76aa7cd2bb6/go.mod h1:xCMZSPoup5BSZ5GQ/Xa8xTEWNIZLLHx9mhPMeREt/ck= -knative.dev/hack v0.0.0-20250116150306-c142b4835bc5 h1:CwLVoSblWmfaIkgRFX2LcEA0uYZo+nOhdPatsz+QL9w= -knative.dev/hack v0.0.0-20250116150306-c142b4835bc5/go.mod h1:R0ritgYtjLDO9527h5vb5X6gfvt5LCrJ55BNbVDsWiY= -knative.dev/networking v0.0.0-20250117155906-67d1c274ba6a h1:FaDPXtv42+AkYh/mE269pttPSZ3fDVAjJiEsYUaM4SM= -knative.dev/networking v0.0.0-20250117155906-67d1c274ba6a/go.mod h1:AIKYMfZydhwXR/60c/3KXEnqEnH6aNEEqulifdqJVcQ= -knative.dev/pkg v0.0.0-20250117084104-c43477f0052b h1:a+gP7Yzu5NmoX2w1p8nfTgmSKF+aHLKGzqYT82ijJTw= -knative.dev/pkg v0.0.0-20250117084104-c43477f0052b/go.mod h1:bedSpkdLybR6JhL1J7XDLpd+JMKM/x8M5Apr80i5TeE= -knative.dev/serving v0.44.0 h1:c6TXhoSAI6eXt0/1ET3C69jMWYA4ES9FskSan/fBaac= -knative.dev/serving v0.44.0/go.mod h1:9bFONngDZtkdYZkP5ko9LDS9ZelnFY9SaPoHKG0vFxs= -lukechampine.com/uint128 v1.2.0 h1:mBi/5l91vocEN8otkC5bDLhi2KdCticRiwbdB0O+rjI= -lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= -modernc.org/b v1.0.0 h1:vpvqeyp17ddcQWF29Czawql4lDdABCDRbXRAS4+aF2o= -modernc.org/b v1.0.0/go.mod h1:uZWcZfRj1BpYzfN9JTerzlNUnnPsV9O2ZA8JsRcubNg= -modernc.org/cc/v3 v3.36.3 h1:uISP3F66UlixxWEcKuIWERa4TwrZENHSL8tWxZz8bHg= -modernc.org/cc/v3 v3.36.3/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= -modernc.org/ccgo/v3 v3.16.9 h1:AXquSwg7GuMk11pIdw7fmO1Y/ybgazVkMhsZWCV0mHM= -modernc.org/ccgo/v3 v3.16.9/go.mod h1:zNMzC9A9xeNUepy6KuZBbugn3c0Mc9TeiJO4lgvkJDo= -modernc.org/db v1.0.0 h1:2c6NdCfaLnshSvY7OU09cyAY0gYXUZj4lmg5ItHyucg= -modernc.org/db v1.0.0/go.mod h1:kYD/cO29L/29RM0hXYl4i3+Q5VojL31kTUVpVJDw0s8= -modernc.org/file v1.0.0 h1:9/PdvjVxd5+LcWUQIfapAWRGOkDLK90rloa8s/au06A= -modernc.org/file v1.0.0/go.mod h1:uqEokAEn1u6e+J45e54dsEA/pw4o7zLrA2GwyntZzjw= -modernc.org/fileutil v1.0.0 h1:Z1AFLZwl6BO8A5NldQg/xTSjGLetp+1Ubvl4alfGx8w= -modernc.org/fileutil v1.0.0/go.mod h1:JHsWpkrk/CnVV1H/eGlFf85BEpfkrp56ro8nojIq9Q8= -modernc.org/golex v1.0.0 h1:wWpDlbK8ejRfSyi0frMyhilD3JBvtcx2AdGDnU+JtsE= -modernc.org/golex v1.0.0/go.mod h1:b/QX9oBD/LhixY6NDh+IdGv17hgB+51fET1i2kPSmvk= -modernc.org/internal v1.0.0 h1:XMDsFDcBDsibbBnHB2xzljZ+B1yrOVLEFkKL2u15Glw= -modernc.org/internal v1.0.0/go.mod h1:VUD/+JAkhCpvkUitlEOnhpVxCgsBI90oTzSCRcqQVSM= -modernc.org/libc v1.17.1 h1:Q8/Cpi36V/QBfuQaFVeisEBs3WqoGAJprZzmf7TfEYI= -modernc.org/libc v1.17.1/go.mod h1:FZ23b+8LjxZs7XtFMbSzL/EhPxNbfZbErxEHc7cbD9s= -modernc.org/lldb v1.0.0 h1:6vjDJxQEfhlOLwl4bhpwIz00uyFK4EmSYcbwqwbynsc= -modernc.org/lldb v1.0.0/go.mod h1:jcRvJGWfCGodDZz8BPwiKMJxGJngQ/5DrRapkQnLob8= -modernc.org/mathutil v1.5.0 h1:rV0Ko/6SfM+8G+yKiyI830l3Wuz1zRutdslNoQ0kfiQ= -modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= -modernc.org/memory v1.2.1 h1:dkRh86wgmq/bJu2cAS2oqBCz/KsMZU7TUM4CibQ7eBs= -modernc.org/memory v1.2.1/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= -modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4= -modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= -modernc.org/ql v1.0.0 h1:bIQ/trWNVjQPlinI6jdOQsi195SIturGo3mp5hsDqVU= -modernc.org/ql v1.0.0/go.mod h1:xGVyrLIatPcO2C1JvI/Co8c0sr6y91HKFNy4pt9JXEY= -modernc.org/sortutil v1.1.0 h1:oP3U4uM+NT/qBQcbg/K2iqAX0Nx7B1b6YZtq3Gk/PjM= -modernc.org/sortutil v1.1.0/go.mod h1:ZyL98OQHJgH9IEfN71VsamvJgrtRX9Dj2gX+vH86L1k= -modernc.org/sqlite v1.18.1 h1:ko32eKt3jf7eqIkCgPAeHMBXw3riNSLhl2f3loEF7o8= -modernc.org/sqlite v1.18.1/go.mod h1:6ho+Gow7oX5V+OiOQ6Tr4xeqbx13UZ6t+Fw9IRUG4d4= -modernc.org/strutil v1.1.3 h1:fNMm+oJklMGYfU9Ylcywl0CO5O6nTfaowNsh2wpPjzY= -modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw= -modernc.org/token v1.0.0 h1:a0jaWiNMDhDUtqOj09wvjWWAqd3q7WpBulmL9H2egsk= -modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= -modernc.org/zappy v1.0.0 h1:dPVaP+3ueIUv4guk8PuZ2wiUGcJ1WUVvIheeSSTD0yk= -modernc.org/zappy v1.0.0/go.mod h1:hHe+oGahLVII/aTTyWK/b53VDHMAGCBYYeZ9sn83HC4= -rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE= -sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.31.2 h1:jpcvIRr3GLoUoEKRkHKSmGjxb6lWwrBlJsXc+eUYQHM= -sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.31.2/go.mod h1:Ve9uj1L+deCXFrPOk1LpFXqTg7LCFzFso6PA48q/XZw= -sigs.k8s.io/gateway-api v1.2.1 h1:fZZ/+RyRb+Y5tGkwxFKuYuSRQHu9dZtbjenblleOLHM= -sigs.k8s.io/gateway-api v1.2.1/go.mod h1:EpNfEXNjiYfUJypf0eZ0P5iXA9ekSGWaS1WgPaM42X0= diff --git a/clients/ui/bff/internal/api/app.go b/clients/ui/bff/internal/api/app.go index db5e770cfe..555d57fe53 100644 --- a/clients/ui/bff/internal/api/app.go +++ b/clients/ui/bff/internal/api/app.go @@ -64,9 +64,15 @@ const ( CatalogModelName = "model_name" CatalogPathPrefix = ApiPathPrefix + "/model_catalog" CatalogModelListPath = CatalogPathPrefix + "/models" + CatalogFilterOptionListPath = CatalogPathPrefix + "/models/filter_options" CatalogSourceListPath = CatalogPathPrefix + "/sources" CatalogSourceModelCatchAllPath = CatalogPathPrefix + "/sources/:" + CatalogSourceId + "/models/*" + CatalogModelName CatalogSourceModelArtifactsCatchAll = CatalogPathPrefix + "/sources/:" + CatalogSourceId + "/artifacts/*" + CatalogModelName + + ModelCatalogSettingsPathPrefix = SettingsPath + "/model_catalog" + ModelCatalogSettingsSourceConfigListPath = ModelCatalogSettingsPathPrefix + "/source_configs" + ModelCatalogSettingsSourceConfigPath = ModelCatalogSettingsSourceConfigListPath + "/:" + CatalogSourceId + CatalogSourcePreviewPath = ModelCatalogSettingsPathPrefix + "/source_preview" ) type App struct { @@ -172,6 +178,10 @@ func NewApp(cfg config.EnvConfig, logger *slog.Logger) (*App, error) { return nil, fmt.Errorf("failed to create ModelRegistry Catalog client: %w", err) } + if err != nil { + return nil, fmt.Errorf("failed to create ModelCatalogSettings client: %w", err) + } + app := &App{ config: cfg, logger: logger, @@ -223,8 +233,9 @@ func (app *App) Routes() http.Handler { // Model catalog HTTP client routes (requests that we forward to Model Catalog API) apiRouter.GET(CatalogModelListPath, app.AttachNamespace(app.AttachModelCatalogRESTClient(app.GetAllCatalogModelsAcrossSourcesHandler))) apiRouter.GET(CatalogSourceListPath, app.AttachNamespace(app.AttachModelCatalogRESTClient(app.GetAllCatalogSourcesHandler))) + apiRouter.GET(CatalogFilterOptionListPath, app.AttachNamespace(app.AttachModelCatalogRESTClient(app.GetCatalogFilterListHandler))) apiRouter.GET(CatalogSourceModelCatchAllPath, app.AttachNamespace(app.AttachModelCatalogRESTClient(app.GetCatalogSourceModelHandler))) - apiRouter.GET(CatalogSourceModelArtifactsCatchAll, app.AttachNamespace(app.AttachModelCatalogRESTClient(app.GetCatalogSourceModelArtifactHandler))) + apiRouter.GET(CatalogSourceModelArtifactsCatchAll, app.AttachNamespace(app.AttachModelCatalogRESTClient(app.GetCatalogSourceModelArtifactsHandler))) // Kubernetes routes apiRouter.GET(UserPath, app.UserHandler) apiRouter.GET(ModelRegistryListPath, app.AttachNamespace(app.RequireListServiceAccessInNamespace(app.GetAllModelRegistriesHandler))) @@ -262,6 +273,14 @@ func (app *App) Routes() http.Handler { //This namespace endpoint is used to get the namespaces for the current user inside the model registry settings apiRouter.GET(SettingsNamespacePath, app.GetNamespacesHandler) + // Model catalog settings page + apiRouter.GET(ModelCatalogSettingsSourceConfigListPath, app.AttachNamespace(app.GetAllCatalogSourceConfigsHandler)) + apiRouter.POST(ModelCatalogSettingsSourceConfigListPath, app.AttachNamespace(app.CreateCatalogSourceConfigHandler)) + apiRouter.GET(ModelCatalogSettingsSourceConfigPath, app.AttachNamespace(app.GetCatalogSourceConfigHandler)) + apiRouter.PATCH(ModelCatalogSettingsSourceConfigPath, app.AttachNamespace(app.UpdateCatalogSourceConfigHandler)) + apiRouter.DELETE(ModelCatalogSettingsSourceConfigPath, app.AttachNamespace(app.DeleteCatalogSourceConfigHandler)) + + apiRouter.POST(CatalogSourcePreviewPath, app.AttachNamespace(app.AttachModelCatalogRESTClient(app.CreateCatalogSourcePreviewHandler))) } // App Router diff --git a/clients/ui/bff/internal/api/catalog_filters_handler.go b/clients/ui/bff/internal/api/catalog_filters_handler.go new file mode 100644 index 0000000000..a56b5eb5e8 --- /dev/null +++ b/clients/ui/bff/internal/api/catalog_filters_handler.go @@ -0,0 +1,39 @@ +package api + +import ( + "errors" + "net/http" + + "github.com/julienschmidt/httprouter" + "github.com/kubeflow/model-registry/ui/bff/internal/constants" + "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" + "github.com/kubeflow/model-registry/ui/bff/internal/models" +) + +type CatalogFilterOptionEnvelope Envelope[*models.FilterOption, None] +type CatalogFilterOptionsListEnvelope Envelope[*models.FilterOptionsList, None] + +func (app *App) GetCatalogFilterListHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + client, ok := r.Context().Value(constants.ModelCatalogHttpClientKey).(httpclient.HTTPClientInterface) + + if !ok { + app.serverErrorResponse(w, r, errors.New("catalog REST client not found")) + return + } + + catalogFilterOptions, err := app.repositories.ModelCatalogClient.GetCatalogFilterOptions(client) + + if err != nil { + app.serverErrorResponse(w, r, err) + return + } + + catalogFilterOptionList := CatalogFilterOptionsListEnvelope{ + Data: catalogFilterOptions, + } + + err = app.WriteJSON(w, http.StatusOK, catalogFilterOptionList, nil) + if err != nil { + app.serverErrorResponse(w, r, err) + } +} diff --git a/clients/ui/bff/internal/api/catalog_filters_handler_test.go b/clients/ui/bff/internal/api/catalog_filters_handler_test.go new file mode 100644 index 0000000000..3722aed809 --- /dev/null +++ b/clients/ui/bff/internal/api/catalog_filters_handler_test.go @@ -0,0 +1,31 @@ +package api + +import ( + "net/http" + + "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" + "github.com/kubeflow/model-registry/ui/bff/internal/mocks" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("TestGetCatalogFilterListHandler", func() { + Context("testing catalog filter list handler", Ordered, func() { + + It("should retrive filter option list", func() { + By("fetching all filter option list") + data := mocks.GetFilterOptionsListMock() + requestIdentify := kubernetes.RequestIdentity{ + UserID: "user@example.com", + } + + expected := CatalogFilterOptionsListEnvelope{Data: &data} + actual, rs, err := setupApiTest[CatalogFilterOptionsListEnvelope](http.MethodGet, "/api/v1/model_catalog/models/filter_options?namespace=kubeflow", nil, kubernetesMockedStaticClientFactory, requestIdentify, "kubeflow") + Expect(err).NotTo(HaveOccurred()) + + By("should match the expected catalog filter options") + Expect(rs.StatusCode).To(Equal(http.StatusOK)) + Expect(actual.Data).To(Equal(expected.Data)) + }) + }) +}) diff --git a/clients/ui/bff/internal/api/catalog_models_handler.go b/clients/ui/bff/internal/api/catalog_models_handler.go index 285336f73e..a4ea49703e 100644 --- a/clients/ui/bff/internal/api/catalog_models_handler.go +++ b/clients/ui/bff/internal/api/catalog_models_handler.go @@ -5,21 +5,14 @@ import ( "net/http" "github.com/julienschmidt/httprouter" - "github.com/kubeflow/model-registry/catalog/pkg/openapi" "github.com/kubeflow/model-registry/ui/bff/internal/constants" "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" + "github.com/kubeflow/model-registry/ui/bff/internal/models" ) -type CatalogModelListEnvelope Envelope[*openapi.CatalogModelList, None] +type CatalogModelListEnvelope Envelope[*models.CatalogModelList, None] func (app *App) GetAllCatalogModelsAcrossSourcesHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { - - source := r.URL.Query().Get("source") - if source == "" { - app.badRequestResponse(w, r, errors.New("source query parameter is required")) - return - } - client, ok := r.Context().Value(constants.ModelCatalogHttpClientKey).(httpclient.HTTPClientInterface) if !ok { app.serverErrorResponse(w, r, errors.New("catalog REST client not found")) diff --git a/clients/ui/bff/internal/api/catalog_source_preview_handler.go b/clients/ui/bff/internal/api/catalog_source_preview_handler.go new file mode 100644 index 0000000000..3d2e5c6c1b --- /dev/null +++ b/clients/ui/bff/internal/api/catalog_source_preview_handler.go @@ -0,0 +1,44 @@ +package api + +import ( + "encoding/json" + "errors" + "fmt" + "net/http" + + "github.com/julienschmidt/httprouter" + "github.com/kubeflow/model-registry/ui/bff/internal/constants" + "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" + "github.com/kubeflow/model-registry/ui/bff/internal/models" +) + +type CatalogSourcePreviewEnvelope Envelope[*models.CatalogSourcePreviewResult, None] + +func (app *App) CreateCatalogSourcePreviewHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + client, ok := r.Context().Value(constants.ModelCatalogHttpClientKey).(httpclient.HTTPClientInterface) + if !ok { + app.serverErrorResponse(w, r, errors.New("catalog REST client not found")) + return + } + + var sourcePreviewPayload models.CatalogSourcePreviewRequest + if err := json.NewDecoder(r.Body).Decode(&sourcePreviewPayload); err != nil { + app.serverErrorResponse(w, r, fmt.Errorf("error decoding JSON:: %v", err.Error())) + return + } + + sourcePreview, err := app.repositories.ModelCatalogClient.CreateCatalogSourcePreview(client, sourcePreviewPayload) + + if err != nil { + app.serverErrorResponse(w, r, err) + } + + catalogSourcePreview := CatalogSourcePreviewEnvelope{ + Data: sourcePreview, + } + + err = app.WriteJSON(w, http.StatusOK, catalogSourcePreview, nil) + if err != nil { + app.serverErrorResponse(w, r, err) + } +} diff --git a/clients/ui/bff/internal/api/catalog_sources_handler.go b/clients/ui/bff/internal/api/catalog_sources_handler.go index 9b01ca5b23..1ae3bf867e 100644 --- a/clients/ui/bff/internal/api/catalog_sources_handler.go +++ b/clients/ui/bff/internal/api/catalog_sources_handler.go @@ -7,14 +7,14 @@ import ( "strings" "github.com/julienschmidt/httprouter" - "github.com/kubeflow/model-registry/catalog/pkg/openapi" "github.com/kubeflow/model-registry/ui/bff/internal/constants" "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" + "github.com/kubeflow/model-registry/ui/bff/internal/models" ) -type CatalogSourceListEnvelope Envelope[*openapi.CatalogSourceList, None] -type CatalogModelEnvelope Envelope[*openapi.CatalogModel, None] -type catalogModelArtifactsListEnvelope Envelope[*openapi.CatalogModelArtifactList, None] +type CatalogSourceListEnvelope Envelope[*models.CatalogSourceList, None] +type CatalogModelEnvelope Envelope[*models.CatalogModel, None] +type catalogModelArtifactsListEnvelope Envelope[*models.CatalogModelArtifactList, None] func (app *App) GetAllCatalogSourcesHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { client, ok := r.Context().Value(constants.ModelCatalogHttpClientKey).(httpclient.HTTPClientInterface) @@ -69,7 +69,7 @@ func (app *App) GetCatalogSourceModelHandler(w http.ResponseWriter, r *http.Requ } } -func (app *App) GetCatalogSourceModelArtifactHandler(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (app *App) GetCatalogSourceModelArtifactsHandler(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { client, ok := r.Context().Value(constants.ModelCatalogHttpClientKey).(httpclient.HTTPClientInterface) if !ok { app.serverErrorResponse(w, r, errors.New("catalog REST client not found")) @@ -81,7 +81,7 @@ func (app *App) GetCatalogSourceModelArtifactHandler(w http.ResponseWriter, r *h newModelName := url.PathEscape(modelName) - catalogModelArtifacts, err := app.repositories.ModelCatalogClient.GetCatalogModelArtifacts(client, ps.ByName(CatalogSourceId), newModelName) + catalogModelArtifacts, err := app.repositories.ModelCatalogClient.GetCatalogSourceModelArtifacts(client, ps.ByName(CatalogSourceId), newModelName) if err != nil { app.serverErrorResponse(w, r, err) diff --git a/clients/ui/bff/internal/api/middleware.go b/clients/ui/bff/internal/api/middleware.go index 711b6b4712..aec3c8cd4a 100644 --- a/clients/ui/bff/internal/api/middleware.go +++ b/clients/ui/bff/internal/api/middleware.go @@ -11,6 +11,7 @@ import ( "github.com/kubeflow/model-registry/ui/bff/internal/config" "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" + "github.com/kubeflow/model-registry/ui/bff/internal/models" "github.com/google/uuid" "github.com/julienschmidt/httprouter" @@ -259,17 +260,39 @@ func (app *App) RequireListServiceAccessInNamespace(next func(http.ResponseWrite return } - allowed, err := client.CanListServicesInNamespace(ctx, identity, namespace) + // Validate if user can list services in namespace + allowList, err := client.CanListServicesInNamespace(ctx, identity, namespace) if err != nil { app.forbiddenResponse(w, r, fmt.Sprintf("SAR or SelfSAR failed for namespace %s: %v", namespace, err)) return } - if !allowed { - app.forbiddenResponse(w, r, fmt.Sprintf("SAR or SelfSAR denied access to namespace %s", namespace)) - return + + // If user can't list all services, validate if user can access specific services + var allowedServiceNames []string + if !allowList { + // Use SelfSubjectRulesReview to get specific service names the user can access + allowedServiceNames, err = client.GetSelfSubjectRulesReview(ctx, identity, namespace) + if err != nil { + app.forbiddenResponse(w, r, fmt.Sprintf("Failed to get rules review for namespace %s: %v", namespace, err)) + return + } + + // If user has no specific service access and can't list all, deny access + if len(allowedServiceNames) == 0 { + app.forbiddenResponse(w, r, fmt.Sprintf("No service access granted in namespace %s", namespace)) + return + } } - next(w, r, ps) + //Add authorization context for passing to downstream handler GetAllModelRegistriesHandler + authCtx := &models.ServiceAuthorizationContext{ + AllowList: allowList, + AllowedServiceNames: allowedServiceNames, + Namespace: namespace, + } + + ctx = context.WithValue(ctx, constants.ServiceAuthorizationContextKey, authCtx) + next(w, r.WithContext(ctx), ps) } } diff --git a/clients/ui/bff/internal/api/model_artifacts_handler.go b/clients/ui/bff/internal/api/model_artifacts_handler.go index d0e14d1fbe..4528a56d87 100644 --- a/clients/ui/bff/internal/api/model_artifacts_handler.go +++ b/clients/ui/bff/internal/api/model_artifacts_handler.go @@ -4,9 +4,10 @@ import ( "encoding/json" "errors" "fmt" - "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" "net/http" + "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" + "github.com/julienschmidt/httprouter" "github.com/kubeflow/model-registry/pkg/openapi" "github.com/kubeflow/model-registry/ui/bff/internal/constants" @@ -35,7 +36,7 @@ func (app *App) UpdateModelArtifactHandler(w http.ResponseWriter, r *http.Reques return } - patchedModelArtifact, err := app.repositories.ModelRegistryClient.UpdateModelArtifact(client, ps.ByName(ArtifactId), jsonData) + patchedModelArtifact, err := app.repositories.ModelRegistryClient.UpdateModelArtifact(client, ps.ByName(ModelArtifactId), jsonData) if err != nil { var httpErr *httpclient.HTTPError if errors.As(err, &httpErr) { diff --git a/clients/ui/bff/internal/api/model_catalog_settings_handler.go b/clients/ui/bff/internal/api/model_catalog_settings_handler.go new file mode 100644 index 0000000000..f4ac218bf9 --- /dev/null +++ b/clients/ui/bff/internal/api/model_catalog_settings_handler.go @@ -0,0 +1,163 @@ +package api + +import ( + "encoding/json" + "errors" + "fmt" + "github.com/kubeflow/model-registry/ui/bff/internal/mocks" + "net/http" + + "github.com/julienschmidt/httprouter" + "github.com/kubeflow/model-registry/ui/bff/internal/constants" + "github.com/kubeflow/model-registry/ui/bff/internal/models" +) + +type ModelCatalogSettingsSourceConfigEnvelope Envelope[*models.CatalogSourceConfig, None] +type ModelCatalogSettingsSourceConfigListEnvelope Envelope[*models.CatalogSourceConfigList, None] +type ModelCatalogSourcePayloadEnvelope Envelope[*models.CatalogSourceConfigPayload, None] + +func (app *App) GetAllCatalogSourceConfigsHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + ctx := r.Context() + + namespace, ok := ctx.Value(constants.NamespaceHeaderParameterKey).(string) + if !ok || namespace == "" { + app.badRequestResponse(w, r, fmt.Errorf("missing namespace in context")) + return + } + + client, err := app.kubernetesClientFactory.GetClient(ctx) + if err != nil { + app.serverErrorResponse(w, r, errors.New("catalog client not found")) + return + } + catalogSourceConfigs, err := app.repositories.ModelCatalogSettingsRepository.GetAllCatalogSourceConfigs(ctx, client, namespace) + if err != nil { + app.serverErrorResponse(w, r, err) + return + } + + modelCatalogSource := ModelCatalogSettingsSourceConfigListEnvelope{ + Data: catalogSourceConfigs, + } + + err = app.WriteJSON(w, http.StatusOK, modelCatalogSource, nil) + + if err != nil { + app.serverErrorResponse(w, r, err) + } + +} + +func (app *App) GetCatalogSourceConfigHandler(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { + ctx := r.Context() + + namespace, ok := ctx.Value(constants.NamespaceHeaderParameterKey).(string) + if !ok || namespace == "" { + app.badRequestResponse(w, r, fmt.Errorf("missing namespace in context")) + return + } + + catalogSourceId := ps.ByName(CatalogSourceId) + // TODO ppadti write the real implementation here + catalogSourceConfig := mocks.CreateSampleCatalogSource(catalogSourceId, "catalog-source-1", "yaml") + + modelCatalogSource := ModelCatalogSettingsSourceConfigEnvelope{ + Data: &catalogSourceConfig, + } + + err := app.WriteJSON(w, http.StatusOK, modelCatalogSource, nil) + + if err != nil { + app.serverErrorResponse(w, r, err) + } + +} + +func (app *App) CreateCatalogSourceConfigHandler(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { + ctx := r.Context() + + namespace, ok := ctx.Value(constants.NamespaceHeaderParameterKey).(string) + if !ok || namespace == "" { + app.badRequestResponse(w, r, fmt.Errorf("missing namespace in context")) + return + } + + var envelope ModelCatalogSourcePayloadEnvelope + if err := json.NewDecoder(r.Body).Decode(&envelope); err != nil { + app.serverErrorResponse(w, r, fmt.Errorf("error decoding JSON:: %v", err.Error())) + return + } + + var sourceName = envelope.Data.Name + var sourceId = envelope.Data.Id + var sourceType = envelope.Data.Type + // TODO ppadti write the real implementation here + newCatalogSource := mocks.CreateSampleCatalogSource(sourceId, sourceName, sourceType) + + modelCatalogSource := ModelCatalogSettingsSourceConfigEnvelope{ + Data: &newCatalogSource, + } + + w.Header().Set("Location", r.URL.JoinPath(modelCatalogSource.Data.Id).String()) + writeErr := app.WriteJSON(w, http.StatusCreated, modelCatalogSource, nil) + if writeErr != nil { + app.serverErrorResponse(w, r, fmt.Errorf("error writing JSON")) + return + } + +} + +func (app *App) UpdateCatalogSourceConfigHandler(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { + ctx := r.Context() + + namespace, ok := ctx.Value(constants.NamespaceHeaderParameterKey).(string) + if !ok || namespace == "" { + app.badRequestResponse(w, r, fmt.Errorf("missing namespace in context")) + return + } + + var envelope ModelCatalogSourcePayloadEnvelope + if err := json.NewDecoder(r.Body).Decode(&envelope); err != nil { + app.serverErrorResponse(w, r, fmt.Errorf("error decoding JSON:: %v", err.Error())) + return + } + + catalogSourceId := envelope.Data.Id + // TODO ppadti write the real implementation here + newCatalogSource := mocks.CreateSampleCatalogSource(catalogSourceId, "Updated Catalog", "yaml") + + modelCatalogSource := ModelCatalogSettingsSourceConfigEnvelope{ + Data: &newCatalogSource, + } + + err := app.WriteJSON(w, http.StatusOK, modelCatalogSource, nil) + + if err != nil { + app.serverErrorResponse(w, r, err) + } +} + +func (app *App) DeleteCatalogSourceConfigHandler(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { + ctx := r.Context() + + namespace, ok := ctx.Value(constants.NamespaceHeaderParameterKey).(string) + if !ok || namespace == "" { + app.badRequestResponse(w, r, fmt.Errorf("missing namespace in context")) + return + } + + // TODO ppadti write the real implementation here + catalogSourceId := ps.ByName(CatalogSourceId) + + deletedCatalogSource := mocks.CreateSampleCatalogSource(catalogSourceId, "Updated Catalog", "yaml") + + modelCatalogSource := ModelCatalogSettingsSourceConfigEnvelope{ + Data: &deletedCatalogSource, + } + + err := app.WriteJSON(w, http.StatusOK, modelCatalogSource, nil) + + if err != nil { + app.serverErrorResponse(w, r, err) + } +} diff --git a/clients/ui/bff/internal/api/namespaces_handler_test.go b/clients/ui/bff/internal/api/namespaces_handler_test.go index 3c0f8d7d9c..eaacffe0cd 100644 --- a/clients/ui/bff/internal/api/namespaces_handler_test.go +++ b/clients/ui/bff/internal/api/namespaces_handler_test.go @@ -3,6 +3,10 @@ package api import ( "context" "encoding/json" + "io" + "net/http" + "net/http/httptest" + "github.com/kubeflow/model-registry/ui/bff/internal/config" "github.com/kubeflow/model-registry/ui/bff/internal/constants" "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" @@ -11,9 +15,6 @@ import ( "github.com/kubeflow/model-registry/ui/bff/internal/repositories" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" - "io" - "net/http" - "net/http/httptest" ) var _ = Describe("TestNamespacesHandler", func() { diff --git a/clients/ui/bff/internal/api/suite_test.go b/clients/ui/bff/internal/api/suite_test.go index 307e559272..8eb06bab78 100644 --- a/clients/ui/bff/internal/api/suite_test.go +++ b/clients/ui/bff/internal/api/suite_test.go @@ -2,14 +2,15 @@ package api import ( "context" + "log/slog" + "os" + "testing" + k8s "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes/k8mocks" "k8s.io/client-go/kubernetes" "k8s.io/client-go/rest" - "log/slog" - "os" "sigs.k8s.io/controller-runtime/pkg/envtest" - "testing" "github.com/kubeflow/model-registry/ui/bff/internal/mocks" logf "sigs.k8s.io/controller-runtime/pkg/log" @@ -67,6 +68,7 @@ var _ = BeforeSuite(func() { mockModelCatalogClient, err = mocks.NewModelCatalogClientMock(nil) Expect(err).NotTo(HaveOccurred()) + }) var _ = AfterSuite(func() { diff --git a/clients/ui/bff/internal/api/test_utils.go b/clients/ui/bff/internal/api/test_utils.go index 7a92653c42..72a1b0c2d3 100644 --- a/clients/ui/bff/internal/api/test_utils.go +++ b/clients/ui/bff/internal/api/test_utils.go @@ -4,9 +4,6 @@ import ( "bytes" "context" "encoding/json" - "github.com/kubeflow/model-registry/ui/bff/internal/config" - k8s "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" - "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" "io" "log/slog" "net/http" @@ -14,6 +11,10 @@ import ( "os" "path/filepath" + "github.com/kubeflow/model-registry/ui/bff/internal/config" + k8s "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" + "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" + "github.com/kubeflow/model-registry/ui/bff/internal/constants" "github.com/kubeflow/model-registry/ui/bff/internal/mocks" "github.com/kubeflow/model-registry/ui/bff/internal/repositories" diff --git a/clients/ui/bff/internal/api/user_handler_test.go b/clients/ui/bff/internal/api/user_handler_test.go index 7c657c9153..812e9de69c 100644 --- a/clients/ui/bff/internal/api/user_handler_test.go +++ b/clients/ui/bff/internal/api/user_handler_test.go @@ -3,12 +3,13 @@ package api import ( "context" "encoding/json" - "github.com/kubeflow/model-registry/ui/bff/internal/constants" - "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" "io" "net/http" "net/http/httptest" + "github.com/kubeflow/model-registry/ui/bff/internal/constants" + "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" + "github.com/kubeflow/model-registry/ui/bff/internal/repositories" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" diff --git a/clients/ui/bff/internal/constants/keys.go b/clients/ui/bff/internal/constants/keys.go index 43e53137ef..8f845548e1 100644 --- a/clients/ui/bff/internal/constants/keys.go +++ b/clients/ui/bff/internal/constants/keys.go @@ -19,6 +19,7 @@ const ( KubeflowUserIDHeader = "kubeflow-userid" // kubeflow-userid :contains the user's email address KubeflowUserGroupsIdHeader = "kubeflow-groups" // kubeflow-groups : Holds a comma-separated list of user groups - TraceIdKey contextKey = "TraceIdKey" - TraceLoggerKey contextKey = "TraceLoggerKey" + TraceIdKey contextKey = "TraceIdKey" + TraceLoggerKey contextKey = "TraceLoggerKey" + ServiceAuthorizationContextKey contextKey = "ServiceAuthorizationContextKey" ) diff --git a/clients/ui/bff/internal/integrations/kubernetes/client.go b/clients/ui/bff/internal/integrations/kubernetes/client.go index a2cc11dbba..fcb80e4559 100644 --- a/clients/ui/bff/internal/integrations/kubernetes/client.go +++ b/clients/ui/bff/internal/integrations/kubernetes/client.go @@ -2,16 +2,21 @@ package kubernetes import ( "context" - corev1 "k8s.io/api/core/v1" ) const ComponentLabelValue = "model-registry" +const ComponentLabelValueCatalog = "model-catalog" + +// TODO ppadti double check if the config map key is indeed sources.yaml +const CatalogSourceKey = "sources.yaml" +const CatalogSourceDefaultConfigMapName = "model-catalog-source-config" +const CatalogSourceUserConfigMapName = "model-catalog-sources" type KubernetesClientInterface interface { // Service discovery GetServiceNames(ctx context.Context, namespace string) ([]string, error) - GetServiceDetailsByName(ctx context.Context, namespace, serviceName string) (ServiceDetails, error) + GetServiceDetailsByName(ctx context.Context, namespace, serviceName string, serviceType string) (ServiceDetails, error) GetServiceDetails(ctx context.Context, namespace string) ([]ServiceDetails, error) // Namespace access @@ -20,6 +25,7 @@ type KubernetesClientInterface interface { // Permission checks (abstracted SAR/SelfSAR) CanListServicesInNamespace(ctx context.Context, identity *RequestIdentity, namespace string) (bool, error) CanAccessServiceInNamespace(ctx context.Context, identity *RequestIdentity, namespace, serviceName string) (bool, error) + GetSelfSubjectRulesReview(ctx context.Context, identity *RequestIdentity, namespace string) ([]string, error) // Meta IsClusterAdmin(identity *RequestIdentity) (bool, error) @@ -28,4 +34,8 @@ type KubernetesClientInterface interface { // Model Registry Settings GetGroups(ctx context.Context) ([]string, error) + + //Model Catalog Settings + GetAllCatalogSourceConfigs(ctx context.Context, namespace string) (corev1.ConfigMap, corev1.ConfigMap, error) + //TODO ppadti add other methods here } diff --git a/clients/ui/bff/internal/integrations/kubernetes/internal_k8s_client.go b/clients/ui/bff/internal/integrations/kubernetes/internal_k8s_client.go index 38715531fb..f79bf63b9f 100644 --- a/clients/ui/bff/internal/integrations/kubernetes/internal_k8s_client.go +++ b/clients/ui/bff/internal/integrations/kubernetes/internal_k8s_client.go @@ -114,6 +114,14 @@ func (kc *InternalKubernetesClient) CanAccessServiceInNamespace(ctx context.Cont return true, nil } +// GetSelfSubjectRulesReview gets the rules for what a user can access in a namespace +func (kc *InternalKubernetesClient) GetSelfSubjectRulesReview(ctx context.Context, identity *RequestIdentity, namespace string) ([]string, error) { + kc.Logger.Warn("GetSelfSubjectRulesReview not fully implemented for internal client", + "namespace", namespace, + "user", identity.UserID) + return []string{}, nil +} + func (kc *InternalKubernetesClient) GetNamespaces(ctx context.Context, identity *RequestIdentity) ([]corev1.Namespace, error) { ctx, cancel := context.WithTimeout(ctx, 30*time.Second) defer cancel() diff --git a/clients/ui/bff/internal/integrations/kubernetes/k8mocks/base_testenv.go b/clients/ui/bff/internal/integrations/kubernetes/k8mocks/base_testenv.go index fca262b814..d7af1e514b 100644 --- a/clients/ui/bff/internal/integrations/kubernetes/k8mocks/base_testenv.go +++ b/clients/ui/bff/internal/integrations/kubernetes/k8mocks/base_testenv.go @@ -7,8 +7,9 @@ import ( "os" "path/filepath" "runtime" + "strings" - kubernetes2 "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" + k8s "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" corev1 "k8s.io/api/core/v1" rbacv1 "k8s.io/api/rbac/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -162,6 +163,131 @@ func setupMock(mockK8sClient kubernetes.Interface, ctx context.Context) error { return fmt.Errorf("failed to set up group access to namespace: %w", err) } + //TODO ppadti: Add more mock setup as needed for other namespaces + err = createModelCatalogDefaultSourcesConfigMap(mockK8sClient, ctx, "kubeflow") + if err != nil { + return err + } + + err = createModelCatalogSourcesConfigMap(mockK8sClient, ctx, "kubeflow") + if err != nil { + return err + } + + return nil +} + +func createModelCatalogDefaultSourcesConfigMap( + k8sClient kubernetes.Interface, + ctx context.Context, + namespace string, +) error { + raw := strings.TrimSpace(` +catalogs: + - name: Dora AI + id: dora_ai_models + type: yaml + enabled: true + properties: + yamlCatalogPath: /shared-data/models-catalog.yaml + labels: + - Dora AI + + - name: Bella AI validated + id: bella_ai_validated_models + type: yaml + enabled: true + properties: + yamlCatalogPath: /shared-data/validated-models-catalog.yaml + labels: + - Bella AI validated +`) + + cm := &corev1.ConfigMap{ + ObjectMeta: metav1.ObjectMeta{ + Name: k8s.CatalogSourceDefaultConfigMapName, + Namespace: namespace, + }, + Data: map[string]string{ + k8s.CatalogSourceKey: raw, + }, + } + + if _, err := k8sClient.CoreV1().ConfigMaps(namespace).Create(ctx, cm, metav1.CreateOptions{}); err != nil { + return fmt.Errorf("failed to create model-catalog-default-sources configmap: %w", err) + } + + return nil +} + +func createModelCatalogSourcesConfigMap( + k8sClient kubernetes.Interface, + ctx context.Context, + namespace string, +) error { + raw := strings.TrimSpace(` +catalogs: + - name: Custom yaml + id: custom_yaml_models + type: yaml + enabled: true + properties: + yamlCatalogPath: /shared-data/models-catalog.yaml + includedModels: + - model-* + - model-2-* + excludedModels: + - sample-model-* + labels: + - Dora AI + + - name: Sample source + id: sample_source_models + type: yaml + enabled: false + properties: + yamlCatalogPath: /shared-data/validated-models-catalog.yaml + includedModels: + - model-* + - model-2-* + excludedModels: + - sample-model-* + labels: + - Bella AI validated + - Dora AI + + - name: Hugging face source + id: hugging_face_source + type: huggingface + enabled: true + properties: + apiKey: accessToken + allowedOrganization: org + includedModels: + - model-* + - model-2-* + excludedModels: + - sample-model-* + labels: + - Bella AI validated +`) + + cm := &corev1.ConfigMap{ + ObjectMeta: metav1.ObjectMeta{ + Name: k8s.CatalogSourceUserConfigMapName, + Namespace: namespace, + }, + Data: map[string]string{ + k8s.CatalogSourceKey: raw, + "custom_yaml_models.yaml": "models:\n - name: model1", + "sample_source_models.yaml": "models:\n - name: model2", + }, + } + + if _, err := k8sClient.CoreV1().ConfigMaps(namespace).Create(ctx, cm, metav1.CreateOptions{}); err != nil { + return fmt.Errorf("failed to create model-catalog-default-sources configmap: %w", err) + } + return nil } @@ -373,7 +499,7 @@ func createService(k8sClient kubernetes.Interface, ctx context.Context, name str }, Spec: corev1.ServiceSpec{ Selector: map[string]string{ - "component": kubernetes2.ComponentLabelValue, + "component": k8s.ComponentLabelValue, }, Type: corev1.ServiceTypeClusterIP, ClusterIP: clusterIP, diff --git a/clients/ui/bff/internal/integrations/kubernetes/k8mocks/internal_k8s_client_mock.go b/clients/ui/bff/internal/integrations/kubernetes/k8mocks/internal_k8s_client_mock.go index 1b4e1c33cb..47f0cad0bb 100644 --- a/clients/ui/bff/internal/integrations/kubernetes/k8mocks/internal_k8s_client_mock.go +++ b/clients/ui/bff/internal/integrations/kubernetes/k8mocks/internal_k8s_client_mock.go @@ -42,8 +42,8 @@ func (m *InternalKubernetesClientMock) GetServiceDetails(sessionCtx context.Cont } // GetServiceDetailsByName overrides to simulate local service access -func (m *InternalKubernetesClientMock) GetServiceDetailsByName(sessionCtx context.Context, namespace, serviceName string) (k8s.ServiceDetails, error) { - originalService, err := m.InternalKubernetesClient.GetServiceDetailsByName(sessionCtx, namespace, serviceName) +func (m *InternalKubernetesClientMock) GetServiceDetailsByName(sessionCtx context.Context, namespace, serviceName string, serviceType string) (k8s.ServiceDetails, error) { + originalService, err := m.InternalKubernetesClient.GetServiceDetailsByName(sessionCtx, namespace, serviceName, serviceType) if err != nil { return k8s.ServiceDetails{}, fmt.Errorf("failed to get service details: %w", err) } diff --git a/clients/ui/bff/internal/integrations/kubernetes/k8mocks/token_k8s_client_mock.go b/clients/ui/bff/internal/integrations/kubernetes/k8mocks/token_k8s_client_mock.go index b2631e1f08..50b60ac209 100644 --- a/clients/ui/bff/internal/integrations/kubernetes/k8mocks/token_k8s_client_mock.go +++ b/clients/ui/bff/internal/integrations/kubernetes/k8mocks/token_k8s_client_mock.go @@ -47,8 +47,8 @@ func (m *TokenKubernetesClientMock) GetServiceDetails(sessionCtx context.Context } // GetServiceDetailsByName overrides to simulate local service access -func (m *TokenKubernetesClientMock) GetServiceDetailsByName(sessionCtx context.Context, namespace, serviceName string) (k8s.ServiceDetails, error) { - originalService, err := m.TokenKubernetesClient.GetServiceDetailsByName(sessionCtx, namespace, serviceName) +func (m *TokenKubernetesClientMock) GetServiceDetailsByName(sessionCtx context.Context, namespace, serviceName string, serviceType string) (k8s.ServiceDetails, error) { + originalService, err := m.TokenKubernetesClient.GetServiceDetailsByName(sessionCtx, namespace, serviceName, serviceType) if err != nil { return k8s.ServiceDetails{}, fmt.Errorf("failed to get service details: %w", err) } diff --git a/clients/ui/bff/internal/integrations/kubernetes/shared_k8s_client.go b/clients/ui/bff/internal/integrations/kubernetes/shared_k8s_client.go index 10570a40b9..e125f1e674 100644 --- a/clients/ui/bff/internal/integrations/kubernetes/shared_k8s_client.go +++ b/clients/ui/bff/internal/integrations/kubernetes/shared_k8s_client.go @@ -124,7 +124,7 @@ func buildServiceDetails(service *corev1.Service, logger *slog.Logger) (*Service }, nil } -func (kc *SharedClientLogic) GetServiceDetailsByName(sessionCtx context.Context, namespace string, serviceName string) (ServiceDetails, error) { +func (kc *SharedClientLogic) GetServiceDetailsByName(sessionCtx context.Context, namespace string, serviceName string, serviceType string) (ServiceDetails, error) { if namespace == "" || serviceName == "" { return ServiceDetails{}, fmt.Errorf("namespace and serviceName cannot be empty") } @@ -138,6 +138,9 @@ func (kc *SharedClientLogic) GetServiceDetailsByName(sessionCtx context.Context, if err != nil { return ServiceDetails{}, fmt.Errorf("failed to get service %q in namespace %q: %w", serviceName, namespace, err) } + if serviceType != "" && service.Labels["component"] != serviceType { + return ServiceDetails{}, fmt.Errorf("service %q in namespace %q is not a %s", serviceName, namespace, serviceType) + } details, err := buildServiceDetails(service, sessionLogger) if err != nil { @@ -155,3 +158,42 @@ func (kc *SharedClientLogic) GetGroups(ctx context.Context) ([]string, error) { kc.Logger.Info("This functionality is not implement yet. This is a STUB API to unblock frontend development until we have a definition on how to create model registries") return []string{}, nil } + +func (kc *SharedClientLogic) GetAllCatalogSourceConfigs( + sessionCtx context.Context, + namespace string, +) (corev1.ConfigMap, corev1.ConfigMap, error) { + + if namespace == "" { + return corev1.ConfigMap{}, corev1.ConfigMap{}, fmt.Errorf("namespace cannot be empty") + } + + sessionLogger := sessionCtx.Value(constants.TraceLoggerKey).(*slog.Logger) + + // Fetch default sources + defaultCM, err := kc.Client.CoreV1(). + ConfigMaps(namespace). + Get(sessionCtx, CatalogSourceDefaultConfigMapName, metav1.GetOptions{}) + + if err != nil { + sessionLogger.Error("failed to fetch default catalog source configmap", + "namespace", namespace, + "name", CatalogSourceDefaultConfigMapName, + "error", err, + ) + return corev1.ConfigMap{}, corev1.ConfigMap{}, fmt.Errorf("failed to get %s: %w", CatalogSourceDefaultConfigMapName, err) + } + + userCM, err := kc.Client.CoreV1().ConfigMaps(namespace).Get(sessionCtx, CatalogSourceUserConfigMapName, metav1.GetOptions{}) + + if err != nil { + sessionLogger.Error("failed to fetch catalog source configmap", + "namespace", namespace, + "name", CatalogSourceUserConfigMapName, + "error", err, + ) + return corev1.ConfigMap{}, corev1.ConfigMap{}, fmt.Errorf("failed to get %s: %w", CatalogSourceUserConfigMapName, err) + } + + return *defaultCM, *userCM, nil +} diff --git a/clients/ui/bff/internal/integrations/kubernetes/tests/internal_k8s_client_test.go b/clients/ui/bff/internal/integrations/kubernetes/tests/internal_k8s_client_test.go index 1ed9f80ca4..27138dc437 100644 --- a/clients/ui/bff/internal/integrations/kubernetes/tests/internal_k8s_client_test.go +++ b/clients/ui/bff/internal/integrations/kubernetes/tests/internal_k8s_client_test.go @@ -46,7 +46,7 @@ var _ = Describe("Kubernetes Internal Client Test", func() { serviceAccountMockedK8client, err := kubernetesMockedStaticClientFactory.GetClient(mocks.NewMockSessionContextNoParent()) Expect(err).NotTo(HaveOccurred()) - service, err := serviceAccountMockedK8client.GetServiceDetailsByName(mocks.NewMockSessionContextNoParent(), "dora-namespace", "model-registry-dora") + service, err := serviceAccountMockedK8client.GetServiceDetailsByName(mocks.NewMockSessionContextNoParent(), "dora-namespace", "model-registry-dora", kubernetes.ComponentLabelValue) Expect(err).NotTo(HaveOccurred(), "Failed to create k8s request") By("checking that service details are correct") diff --git a/clients/ui/bff/internal/integrations/kubernetes/token_k8s_client.go b/clients/ui/bff/internal/integrations/kubernetes/token_k8s_client.go index 97a8aa3917..c3dce6de93 100644 --- a/clients/ui/bff/internal/integrations/kubernetes/token_k8s_client.go +++ b/clients/ui/bff/internal/integrations/kubernetes/token_k8s_client.go @@ -3,6 +3,10 @@ package kubernetes import ( "context" "fmt" + "log/slog" + "strings" + "time" + helper "github.com/kubeflow/model-registry/ui/bff/internal/helpers" authnv1 "k8s.io/api/authentication/v1" authv1 "k8s.io/api/authorization/v1" @@ -10,9 +14,6 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/client-go/kubernetes" "k8s.io/client-go/rest" - "log/slog" - "strings" - "time" ) type TokenKubernetesClient struct { @@ -122,6 +123,56 @@ func (kc *TokenKubernetesClient) CanListServicesInNamespace(ctx context.Context, return true, nil } +// GetSelfSubjectRulesReview gets the rules for what the user can access in a namespace +func (kc *TokenKubernetesClient) GetSelfSubjectRulesReview(ctx context.Context, _ *RequestIdentity, namespace string) ([]string, error) { + ctx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + ssrr := &authv1.SelfSubjectRulesReview{ + Spec: authv1.SelfSubjectRulesReviewSpec{ + Namespace: namespace, + }, + } + + resp, err := kc.Client.AuthorizationV1().SelfSubjectRulesReviews().Create(ctx, ssrr, metav1.CreateOptions{}) + if err != nil { + kc.Logger.Error("self-subject-rules-review failed", "namespace", namespace, "error", err) + return nil, err + } + + var allowedServiceNames []string + for _, rule := range resp.Status.ResourceRules { + // Check if rule applies to 'services' resource and includes 'get' verb + if containsResource(rule.Resources, "services") && containsVerb(rule.Verbs, "get") { + // Add specific resource names if specified + if len(rule.ResourceNames) > 0 { + allowedServiceNames = append(allowedServiceNames, rule.ResourceNames...) + } + } + } + + return allowedServiceNames, nil +} + +// Helper functions for rule filtering +func containsResource(resources []string, target string) bool { + for _, resource := range resources { + if resource == target { + return true + } + } + return false +} + +func containsVerb(verbs []string, target string) bool { + for _, verb := range verbs { + if verb == target { + return true + } + } + return false +} + // RequestIdentity is unused because the token already represents the user identity. func (kc *TokenKubernetesClient) CanAccessServiceInNamespace(ctx context.Context, _ *RequestIdentity, namespace, serviceName string) (bool, error) { ctx, cancel := context.WithTimeout(ctx, 30*time.Second) diff --git a/clients/ui/bff/internal/mocks/model_catalog_client_mock.go b/clients/ui/bff/internal/mocks/model_catalog_client_mock.go index 487ed36595..de93ffb7e4 100644 --- a/clients/ui/bff/internal/mocks/model_catalog_client_mock.go +++ b/clients/ui/bff/internal/mocks/model_catalog_client_mock.go @@ -3,11 +3,13 @@ package mocks import ( "fmt" "log/slog" + "math" "net/url" "strconv" "strings" - "github.com/kubeflow/model-registry/catalog/pkg/openapi" + "github.com/kubeflow/model-registry/ui/bff/internal/models" + "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" "github.com/stretchr/testify/mock" ) @@ -20,9 +22,9 @@ func NewModelCatalogClientMock(logger *slog.Logger) (*ModelCatalogClientMock, er return &ModelCatalogClientMock{}, nil } -func (m *ModelCatalogClientMock) GetAllCatalogModelsAcrossSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*openapi.CatalogModelList, error) { +func (m *ModelCatalogClientMock) GetAllCatalogModelsAcrossSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*models.CatalogModelList, error) { allModels := GetCatalogModelMocks() - var filteredModels []openapi.CatalogModel + var filteredModels []models.CatalogModel sourceId := pageValues.Get("source") query := pageValues.Get("q") @@ -38,7 +40,7 @@ func (m *ModelCatalogClientMock) GetAllCatalogModelsAcrossSources(client httpcli } if query != "" { - var queryFilteredModels []openapi.CatalogModel + var queryFilteredModels []models.CatalogModel queryLower := strings.ToLower(query) for _, model := range filteredModels { @@ -89,11 +91,11 @@ func (m *ModelCatalogClientMock) GetAllCatalogModelsAcrossSources(client httpcli endIndex = totalSize } - var pagedModels []openapi.CatalogModel + var pagedModels []models.CatalogModel if startIndex < totalSize { pagedModels = filteredModels[startIndex:endIndex] } else { - pagedModels = []openapi.CatalogModel{} + pagedModels = []models.CatalogModel{} } var nextPageToken string @@ -101,10 +103,19 @@ func (m *ModelCatalogClientMock) GetAllCatalogModelsAcrossSources(client httpcli nextPageToken = strconv.Itoa(endIndex) } - catalogModelList := openapi.CatalogModelList{ + size := len(pagedModels) + if size > math.MaxInt32 { + size = math.MaxInt32 + } + ps := pageSize + if ps > math.MaxInt32 { + ps = math.MaxInt32 + } + + catalogModelList := models.CatalogModelList{ Items: pagedModels, - Size: int32(len(pagedModels)), - PageSize: int32(pageSize), + Size: int32(size), + PageSize: int32(ps), NextPageToken: nextPageToken, } @@ -112,7 +123,7 @@ func (m *ModelCatalogClientMock) GetAllCatalogModelsAcrossSources(client httpcli } -func (m *ModelCatalogClientMock) GetCatalogSourceModel(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*openapi.CatalogModel, error) { +func (m *ModelCatalogClientMock) GetCatalogSourceModel(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*models.CatalogModel, error) { allModels := GetCatalogModelMocks() decodedModelName, err := url.QueryUnescape(modelName) @@ -131,9 +142,9 @@ func (m *ModelCatalogClientMock) GetCatalogSourceModel(client httpclient.HTTPCli return nil, fmt.Errorf("catalog model not found for sourceId: %s, modelName: %s", sourceId, decodedModelName) } -func (m *ModelCatalogClientMock) GetAllCatalogSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*openapi.CatalogSourceList, error) { +func (m *ModelCatalogClientMock) GetAllCatalogSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*models.CatalogSourceList, error) { allMockSources := GetCatalogSourceListMock() - var filteredMockSources []openapi.CatalogSource + var filteredMockSources []models.CatalogSource name := pageValues.Get("name") @@ -147,7 +158,7 @@ func (m *ModelCatalogClientMock) GetAllCatalogSources(client httpclient.HTTPClie } else { filteredMockSources = allMockSources.Items } - catalogSourceList := openapi.CatalogSourceList{ + catalogSourceList := models.CatalogSourceList{ Items: filteredMockSources, PageSize: int32(10), NextPageToken: "", @@ -157,7 +168,48 @@ func (m *ModelCatalogClientMock) GetAllCatalogSources(client httpclient.HTTPClie return &catalogSourceList, nil } -func (m *ModelCatalogClientMock) GetCatalogModelArtifacts(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*openapi.CatalogModelArtifactList, error) { - allMockModelArtifacts := GetCatalogModelArtifactListMock() +func (m *ModelCatalogClientMock) GetCatalogSourceModelArtifacts(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*models.CatalogModelArtifactList, error) { + var allMockModelArtifacts models.CatalogModelArtifactList + + if sourceId == "sample-source" && modelName == "repo1%2Fgranite-8b-code-instruct" { + performanceArtifacts := GetCatalogPerformanceMetricsArtifactListMock(4) + accuracyArtifacts := GetCatalogAccuracyMetricsArtifactListMock() + modelArtifacts := GetCatalogModelArtifactListMock() + combinedItems := append(performanceArtifacts.Items, accuracyArtifacts.Items...) + combinedItems = append(combinedItems, modelArtifacts.Items...) + allMockModelArtifacts = models.CatalogModelArtifactList{ + Items: combinedItems, + Size: int32(len(combinedItems)), + PageSize: performanceArtifacts.PageSize, + NextPageToken: "", + } + } else if sourceId == "sample-source" && modelName == "repo1%2Fgranite-7b-instruct" { + accuracyArtifacts := GetCatalogAccuracyMetricsArtifactListMock() + modelArtifacts := GetCatalogModelArtifactListMock() + combinedItems := append(accuracyArtifacts.Items, modelArtifacts.Items...) + allMockModelArtifacts = models.CatalogModelArtifactList{ + Items: combinedItems, + Size: int32(len(combinedItems)), + PageSize: accuracyArtifacts.PageSize, + NextPageToken: "", + } + } else if sourceId == "sample-source" && (modelName == "repo1%2Fgranite-3b-code-base") { + allMockModelArtifacts = GetCatalogModelArtifactListMock() + } else { + allMockModelArtifacts = GetCatalogModelArtifactListMock() + } + return &allMockModelArtifacts, nil } + +func (m *ModelCatalogClientMock) GetCatalogFilterOptions(client httpclient.HTTPClientInterface) (*models.FilterOptionsList, error) { + filterOptions := GetFilterOptionsListMock() + + return &filterOptions, nil +} + +func (m *ModelCatalogClientMock) CreateCatalogSourcePreview(client httpclient.HTTPClientInterface, sourcePreviewPaylod models.CatalogSourcePreviewRequest) (*models.CatalogSourcePreviewResult, error) { + catalogSourcePreview := CreateCatalogSourcePreviewMock() + + return &catalogSourcePreview, nil +} diff --git a/clients/ui/bff/internal/mocks/static_data_mock.go b/clients/ui/bff/internal/mocks/static_data_mock.go index d02ed8b317..8ea722f266 100644 --- a/clients/ui/bff/internal/mocks/static_data_mock.go +++ b/clients/ui/bff/internal/mocks/static_data_mock.go @@ -8,9 +8,9 @@ import ( "github.com/brianvoe/gofakeit/v7" "github.com/google/uuid" - catalogOpenapi "github.com/kubeflow/model-registry/catalog/pkg/openapi" "github.com/kubeflow/model-registry/pkg/openapi" "github.com/kubeflow/model-registry/ui/bff/internal/constants" + "github.com/kubeflow/model-registry/ui/bff/internal/models" ) func GetRegisteredModelMocks() []openapi.RegisteredModel { @@ -223,40 +223,52 @@ func newCustomProperties() *map[string]openapi.MetadataValue { return &result } -func catalogCustomProperties() *map[string]catalogOpenapi.MetadataValue { - result := map[string]catalogOpenapi.MetadataValue{ +func catalogCustomProperties() *map[string]openapi.MetadataValue { + result := map[string]openapi.MetadataValue{ "tensorflow": { - MetadataStringValue: &catalogOpenapi.MetadataStringValue{ + MetadataStringValue: &openapi.MetadataStringValue{ StringValue: "", MetadataType: "MetadataStringValue", }, }, "pytorch": { - MetadataStringValue: &catalogOpenapi.MetadataStringValue{ + MetadataStringValue: &openapi.MetadataStringValue{ StringValue: "", MetadataType: "MetadataStringValue", }, }, "mll": { - MetadataStringValue: &catalogOpenapi.MetadataStringValue{ + MetadataStringValue: &openapi.MetadataStringValue{ StringValue: "", MetadataType: "MetadataStringValue", }, }, "rnn": { - MetadataStringValue: &catalogOpenapi.MetadataStringValue{ + MetadataStringValue: &openapi.MetadataStringValue{ StringValue: "", MetadataType: "MetadataStringValue", }, }, + "validated": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "", + MetadataType: "MetadataStringValue", + }, + }, + "validated_on": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "RHOAI 2.20,RHAIIS 3.0,RHELAI 1.5", + MetadataType: "MetadataStringValue", + }, + }, "AWS_KEY": { - MetadataStringValue: &catalogOpenapi.MetadataStringValue{ + MetadataStringValue: &openapi.MetadataStringValue{ StringValue: "asdf89asdf098asdfa", MetadataType: "MetadataStringValue", }, }, "AWS_PASSWORD": { - MetadataStringValue: &catalogOpenapi.MetadataStringValue{ + MetadataStringValue: &openapi.MetadataStringValue{ StringValue: "*AadfeDs34adf", MetadataType: "MetadataStringValue", }, @@ -306,12 +318,12 @@ func GenerateMockArtifact() openapi.Artifact { return mockData } -func GetCatalogModelMocks() []catalogOpenapi.CatalogModel { - sampleModel1 := catalogOpenapi.CatalogModel{ +func GetCatalogModelMocks() []models.CatalogModel { + sampleModel1 := models.CatalogModel{ Name: "repo1/granite-8b-code-instruct", Description: stringToPointer("Granite-8B-Code-Instruct is a 8B parameter model fine tuned from\nGranite-8B-Code-Base on a combination of permissively licensed instruction\ndata to enhance instruction following capabilities including logical\nreasoning and problem-solving skills."), Provider: stringToPointer("provider1"), - Tasks: []string{"text-generation", "task2", "task3", "task4"}, + Tasks: []string{"text-generation", "image-to-text"}, License: stringToPointer("apache-2.0"), LicenseLink: stringToPointer("https://www.apache.org/licenses/LICENSE-2.0.txt"), Maturity: stringToPointer("Technology preview"), @@ -661,11 +673,11 @@ Granite 3.1 Instruct Models are primarily finetuned using instruction-response p LastUpdateTimeSinceEpoch: stringToPointer("1704067200000"), } - sampleModel2 := catalogOpenapi.CatalogModel{ + sampleModel2 := models.CatalogModel{ Name: "repo1/granite-7b-instruct", Description: stringToPointer("Granite 7B instruction-tuned model for enterprise applications"), - Provider: stringToPointer("provider1"), - Tasks: []string{"text-generation", "instruction-following"}, + Provider: stringToPointer("Red Hat"), + Tasks: []string{"text-generation", "image-text-to-text"}, License: stringToPointer("apache-2.0"), Maturity: stringToPointer("Generally Available"), Language: []string{"en"}, @@ -674,23 +686,24 @@ Granite 3.1 Instruct Models are primarily finetuned using instruction-response p Logo: stringToPointer("data:image/svg+xml;base64,PHN2ZyBpZD0iTGF5ZXJfMSIgZGF0YS1uYW1lPSJMYXllciAxIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxOTIgMTQ1Ij48ZGVmcz48c3R5bGU+LmNscy0xe2ZpbGw6I2UwMDt9PC9zdHlsZT48L2RlZnM+PHRpdGxlPlJlZEhhdC1Mb2dvLUhhdC1Db2xvcjwvdGl0bGU+PHBhdGggZD0iTTE1Ny43Nyw2Mi42MWExNCwxNCwwLDAsMSwuMzEsMy40MmMwLDE0Ljg4LTE4LjEsMTcuNDYtMzAuNjEsMTcuNDZDNzguODMsODMuNDksNDIuNTMsNTMuMjYsNDIuNTMsNDRhNi40Myw2LjQzLDAsMCwxLC4yMi0xLjk0bC0zLjY2LDkuMDZhMTguNDUsMTguNDUsMCwwLDAtMS41MSw3LjMzYzAsMTguMTEsNDEsNDUuNDgsODcuNzQsNDUuNDgsMjAuNjksMCwzNi40My03Ljc2LDM2LjQzLTIxLjc3LDAtMS4wOCwwLTEuOTQtMS43My0xMC4xM1oiLz48cGF0aCBjbGFzcz0iY2xzLTEiIGQ9Ik0xMjcuNDcsODMuNDljMTIuNTEsMCwzMC42MS0yLjU4LDMwLjYxLTE3LjQ2YTE0LDE0LDAsMCwwLS4zMS0zLjQybC03LjQ1LTMyLjM2Yy0xLjcyLTcuMTItMy4yMy0xMC4zNS0xNS43My0xNi42QzEyNC44OSw4LjY5LDEwMy43Ni41LDk3LjUxLjUsOTEuNjkuNSw5MCw4LDgzLjA2LDhjLTYuNjgsMC0xMS42NC01LjYtMTcuODktNS42LTYsMC05LjkxLDQuMDktMTIuOTMsMTIuNSwwLDAtOC40MSwyMy43Mi05LjQ5LDI3LjE2QTYuNDMsNi40MywwLDAsMCw0Mi41Myw0NGMwLDkuMjIsMzYuMywzOS40NSw4NC45NCwzOS40NU0xNjAsNzIuMDdjMS43Myw4LjE5LDEuNzMsOS4wNSwxLjczLDEwLjEzLDAsMTQtMTUuNzQsMjEuNzctMzYuNDMsMjEuNzdDNzguNTQsMTA0LDM3LjU4LDc2LjYsMzcuNTgsNTguNDlhMTguNDUsMTguNDUsMCwwLDEsMS41MS03LjMzQzIyLjI3LDUyLC41LDU1LC41LDc0LjIyYzAsMzEuNDgsNzQuNTksNzAuMjgsMTMzLjY1LDcwLjI4LDQ1LjI4LDAsNTYuNy0yMC40OCw1Ni43LTM2LjY1LDAtMTIuNzItMTEtMjcuMTYtMzAuODMtMzUuNzgiLz48L3N2Zz4="), } - sampleModel3 := catalogOpenapi.CatalogModel{ - Name: "repo1/granite-3b-code-base", - Description: stringToPointer("Granite 3B code generation model for programming tasks"), - Provider: stringToPointer("provider1"), - Tasks: []string{"code-generation"}, - License: stringToPointer("apache-2.0"), - Maturity: stringToPointer("Generally Available"), - Language: []string{"en"}, - SourceId: stringToPointer("sample-source"), - Logo: stringToPointer("data:image/svg+xml;base64,PHN2ZyBpZD0iTGF5ZXJfMSIgZGF0YS1uYW1lPSJMYXllciAxIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxOTIgMTQ1Ij48ZGVmcz48c3R5bGU+LmNscy0xe2ZpbGw6I2UwMDt9PC9zdHlsZT48L2RlZnM+PHRpdGxlPlJlZEhhdC1Mb2dvLUhhdC1Db2xvcjwvdGl0bGU+PHBhdGggZD0iTTE1Ny43Nyw2Mi42MWExNCwxNCwwLDAsMSwuMzEsMy40MmMwLDE0Ljg4LTE4LjEsMTcuNDYtMzAuNjEsMTcuNDZDNzguODMsODMuNDksNDIuNTMsNTMuMjYsNDIuNTMsNDRhNi40Myw2LjQzLDAsMCwxLC4yMi0xLjk0bC0zLjY2LDkuMDZhMTguNDUsMTguNDUsMCwwLDAtMS41MSw3LjMzYzAsMTguMTEsNDEsNDUuNDgsODcuNzQsNDUuNDgsMjAuNjksMCwzNi40My03Ljc2LDM2LjQzLTIxLjc3LDAtMS4wOCwwLTEuOTQtMS43My0xMC4xM1oiLz48cGF0aCBjbGFzcz0iY2xzLTEiIGQ9Ik0xMjcuNDcsODMuNDljMTIuNTEsMCwzMC42MS0yLjU4LDMwLjYxLTE3LjQ2YTE0LDE0LDAsMCwwLS4zMS0zLjQybC03LjQ1LTMyLjM2Yy0xLjcyLTcuMTItMy4yMy0xMC4zNS0xNS43My0xNi42QzEyNC44OSw4LjY5LDEwMy43Ni41LDk3LjUxLjUsOTEuNjkuNSw5MCw4LDgzLjA2LDhjLTYuNjgsMC0xMS42NC01LjYtMTcuODktNS42LTYsMC05LjkxLDQuMDktMTIuOTMsMTIuNSwwLDAtOC40MSwyMy43Mi05LjQ5LDI3LjE2QTYuNDMsNi40MywwLDAsMCw0Mi41Myw0NGMwLDkuMjIsMzYuMywzOS40NSw4NC45NCwzOS40NU0xNjAsNzIuMDdjMS43Myw4LjE5LDEuNzMsOS4wNSwxLjczLDEwLjEzLDAsMTQtMTUuNzQsMjEuNzctMzYuNDMsMjEuNzdDNzguNTQsMTA0LDM3LjU4LDc2LjYsMzcuNTgsNTguNDlhMTguNDUsMTguNDUsMCwwLDEsMS41MS03LjMzQzIyLjI3LDUyLC41LDU1LC41LDc0LjIyYzAsMzEuNDgsNzQuNTksNzAuMjgsMTMzLjY1LDcwLjI4LDQ1LjI4LDAsNTYuNy0yMC40OCw1Ni43LTM2LjY1LDAtMTIuNzItMTEtMjcuMTYtMzAuODMtMzUuNzgiLz48L3N2Zz4="), + sampleModel3 := models.CatalogModel{ + Name: "repo1/granite-3b-code-base", + Description: stringToPointer("Granite 3B code generation model for programming tasks"), + Provider: stringToPointer("IBM"), + Tasks: []string{"audio-to-text", "text-to-text", "video-to-text"}, + License: stringToPointer("mit"), + Maturity: stringToPointer("Generally Available"), + Language: []string{"en"}, + SourceId: stringToPointer("sample-source"), + CustomProperties: catalogCustomProperties(), + Logo: stringToPointer("data:image/svg+xml;base64,PHN2ZyBpZD0iTGF5ZXJfMSIgZGF0YS1uYW1lPSJMYXllciAxIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxOTIgMTQ1Ij48ZGVmcz48c3R5bGU+LmNscy0xe2ZpbGw6I2UwMDt9PC9zdHlsZT48L2RlZnM+PHRpdGxlPlJlZEhhdC1Mb2dvLUhhdC1Db2xvcjwvdGl0bGU+PHBhdGggZD0iTTE1Ny43Nyw2Mi42MWExNCwxNCwwLDAsMSwuMzEsMy40MmMwLDE0Ljg4LTE4LjEsMTcuNDYtMzAuNjEsMTcuNDZDNzguODMsODMuNDksNDIuNTMsNTMuMjYsNDIuNTMsNDRhNi40Myw2LjQzLDAsMCwxLC4yMi0xLjk0bC0zLjY2LDkuMDZhMTguNDUsMTguNDUsMCwwLDAtMS41MSw3LjMzYzAsMTguMTEsNDEsNDUuNDgsODcuNzQsNDUuNDgsMjAuNjksMCwzNi40My03Ljc2LDM2LjQzLTIxLjc3LDAtMS4wOCwwLTEuOTQtMS43My0xMC4xM1oiLz48cGF0aCBjbGFzcz0iY2xzLTEiIGQ9Ik0xMjcuNDcsODMuNDljMTIuNTEsMCwzMC42MS0yLjU4LDMwLjYxLTE3LjQ2YTE0LDE0LDAsMCwwLS4zMS0zLjQybC03LjQ1LTMyLjM2Yy0xLjcyLTcuMTItMy4yMy0xMC4zNS0xNS43My0xNi42QzEyNC44OSw4LjY5LDEwMy43Ni41LDk3LjUxLjUsOTEuNjkuNSw5MCw4LDgzLjA2LDhjLTYuNjgsMC0xMS42NC01LjYtMTcuODktNS42LTYsMC05LjkxLDQuMDktMTIuOTMsMTIuNSwwLDAtOC40MSwyMy43Mi05LjQ5LDI3LjE2QTYuNDMsNi40MywwLDAsMCw0Mi41Myw0NGMwLDkuMjIsMzYuMywzOS40NSw4NC45NCwzOS40NU0xNjAsNzIuMDdjMS43Myw4LjE5LDEuNzMsOS4wNSwxLjczLDEwLjEzLDAsMTQtMTUuNzQsMjEuNzctMzYuNDMsMjEuNzdDNzguNTQsMTA0LDM3LjU4LDc2LjYsMzcuNTgsNTguNDlhMTguNDUsMTguNDUsMCwwLDEsMS41MS03LjMzQzIyLjI3LDUyLC41LDU1LC41LDc0LjIyYzAsMzEuNDgsNzQuNTksNzAuMjgsMTMzLjY1LDcwLjI4LDQ1LjI4LDAsNTYuNy0yMC40OCw1Ni43LTM2LjY1LDAtMTIuNzItMTEtMjcuMTYtMzAuODMtMzUuNzgiLz48L3N2Zz4="), } - huggingFaceModel1 := catalogOpenapi.CatalogModel{ + huggingFaceModel1 := models.CatalogModel{ Name: "provider2/bert-base-uncased", Description: stringToPointer("BERT base model (uncased) - Pretrained model on English language"), - Provider: stringToPointer("provider2"), - Tasks: []string{"fill-mask", "feature-extraction"}, + Provider: stringToPointer("Google"), + Tasks: []string{"audio-to-text", "text-to-text"}, License: stringToPointer("apache-2.0"), Maturity: stringToPointer("Generally Available"), Language: []string{"en"}, @@ -698,11 +711,11 @@ Granite 3.1 Instruct Models are primarily finetuned using instruction-response p LibraryName: stringToPointer("transformers"), } - huggingFaceModel2 := catalogOpenapi.CatalogModel{ + huggingFaceModel2 := models.CatalogModel{ Name: "provider3/gpt2", Description: stringToPointer("GPT-2 is a transformers model pretrained on a very large corpus of English data"), Provider: stringToPointer("provider3"), - Tasks: []string{"text-generation"}, + Tasks: []string{"video-to-text"}, License: stringToPointer("mit"), Maturity: stringToPointer("Generally Available"), Language: []string{"en"}, @@ -710,7 +723,7 @@ Granite 3.1 Instruct Models are primarily finetuned using instruction-response p LibraryName: stringToPointer("transformers"), } - huggingFaceModel3 := catalogOpenapi.CatalogModel{ + huggingFaceModel3 := models.CatalogModel{ Name: "huggingface/distilbert-base-uncased", Description: stringToPointer("DistilBERT base model (uncased) - A smaller, faster version of BERT"), Provider: stringToPointer("Hugging Face"), @@ -722,8 +735,8 @@ Granite 3.1 Instruct Models are primarily finetuned using instruction-response p LibraryName: stringToPointer("transformers"), } - otherModel1 := catalogOpenapi.CatalogModel{ - Name: "adminModel2/admin-model-2", + otherModel1 := models.CatalogModel{ + Name: "adminModel2/admin-model-1", Description: stringToPointer("sample description"), Provider: stringToPointer("Admin model 1"), Tasks: []string{"code-generation", "instruction-following"}, @@ -733,10 +746,10 @@ Granite 3.1 Instruct Models are primarily finetuned using instruction-response p SourceId: stringToPointer("adminModel2"), } - otherModel2 := catalogOpenapi.CatalogModel{ - Name: "adminModel1/admin-model-1", + otherModel2 := models.CatalogModel{ + Name: "adminModel1/admin-model-2", Description: stringToPointer("sample description"), - Provider: stringToPointer("Admin model 1"), + Provider: stringToPointer("Admin model 2"), Tasks: []string{"text-generation", "conversational"}, License: stringToPointer("apache-2.0"), Maturity: stringToPointer("Generally Available"), @@ -745,9 +758,9 @@ Granite 3.1 Instruct Models are primarily finetuned using instruction-response p } // added this to test the load more models button - var additionalRepo1Models []catalogOpenapi.CatalogModel + var additionalRepo1Models []models.CatalogModel for i := 1; i <= 20; i++ { - model := catalogOpenapi.CatalogModel{ + model := models.CatalogModel{ Name: fmt.Sprintf("repo1/granite-model-%d", i), Description: stringToPointer("Granite-8B-Code-Instruct is a 8B parameter model fine tuned from\nGranite-8B-Code-Base on a combination of permissively licensed instruction\ndata to enhance instruction following capabilities including logical\nreasoning and problem-solving skills."), Provider: stringToPointer("provider1"), @@ -756,7 +769,7 @@ Granite 3.1 Instruct Models are primarily finetuned using instruction-response p LicenseLink: stringToPointer("https://www.apache.org/licenses/LICENSE-2.0.txt"), Maturity: stringToPointer("Technology preview"), Language: []string{"ar", "cs", "de", "en", "es", "fr", "it", "ja", "ko", "nl", "pt", "zh"}, - Logo: stringToPointer("data:image/svg+xml;base64,PHN2ZyBpZD0iTGF5ZXJfMSIgZGF0YS1uYW1lPSJMYXllciAxIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxOTIgMTQ1Ij48ZGVmcz48c3R5bGU+LmNscy0xe2ZpbGw6I2UwMDt9PC9zdHlsZT48L2RlZnM+PHRpdGxlPlJlZEhhdC1Mb2dvLUhhdC1Db2xvcjwvdGl0bGU+PHBhdGggZD0iTTE1Ny43Nyw2Mi42MWExNCwxNCwwLDAsMSwuMzEsMy40MmMwLDE0Ljg4LTE4LjEsMTcuNDYtMzAuNjEsMTcuNDZDNzguODMsODMuNDksNDIuNTMsNTMuMjYsNDIuNTMsNDRhNi40Myw2LjQzLDAsMCwxLC4yMi0xLjk0bC0zLjY2LDkuMDZhMTguNDUsMTguNDUsMCwwLDAtMS41MSw3LjMzYzAsMTguMTEsNDEsNDUuNDgsODcuNzQsNDUuNDgsMjAuNjksMCwzNi40My03Ljc2LDM2LjQzLTIxLjc3LDAtMS4wOCwwLTEuOTQtMS43My0xMC4xM1oiLz48cGF0aCBjbGFzcz0iY2xzLTEiIGQ9Ik0xMjcuNDcsODMuNDljMTIuNTEsMCwzMC42MS0yLjU4LDMwLjYxLTE3LjQ2YTE0LDE0LDAsMCwwLS4zMS0zLjQybC03LjQ1LTMyLjM2Yy0xLjcyLTcuMTItMy4yMy0xMC4zNS0xNS43My0xNi42QzEyNC44OSw4LjY5LDEwMy43Ni41LDk3LjUxLjUsOTEuNjkuNSw5MCw4LDgzLjA2LDhjLTYuNjgsMC0xMS42NC01LjYtMTcuODktNS42LTYsMC05LjkxLDQuMDktMTIuOTMsMTIuNSwwLDAtOC40MSwyMy43Mi05LjQ5LDI3LjE2QTYuNDMsNi40MywwLDAsMCw0Mi41Myw0NGMwLDkuMjIsMzYuMywzOS40NSw4NC45NCwzOS00NU0xNjAsNzIuMDdjMS43Myw4LjE5LDEuNzMsOS4wNSwxLjczLDEwLjEzLDAsMTQtMTUuNzQsMjEuNzctMzYuNDMsMjEuNzdDNzguNTQsMTA0LDM3LjU4LDc2LjYsMzcuNTgsNTguNDlhMTguNDUsMTguNDUsMCwwLDEsMS41MS03LjMzQzIyLjI3LDUyLC41LDU1LC41LDc0LjIyYzAsMzEuNDgsNzQuNTksNzAuMjgsMTMzLjY1LDcwLjI4LDQ1LjI4LDAsNTYuNy0yMC40OCw1Ni43LTM2LjY1LDAtMTIuNzItMTEtMjcuMTYtMzAuODMtMzUuNzgiLz48L3N2Zz4="), + Logo: stringToPointer("data:image/svg+xml;base64,PHN2ZyBpZD0iTGF5ZXJfMSIgZGF0YS1uYW1lPSJMYXllciAxIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxOTIgMTQ1Ij48ZGVmcz48c3R5bGU+LmNscy0xe2ZpbGw6I2UwMDt9PC9zdHlsZT48L2RlZnM+PHRpdGxlPlJlZEhhdC1Mb2dvLUhhdC1Db2xvcjwvdGl0bGU+PHBhdGggZD0iTTE1Ny43Nyw2Mi42MWExNCwxNCwwLDAsMSwuMzEsMy40MmMwLDE0Ljg4LTE4LjEsMTcuNDYtMzAuNjEsMTcuNDZDNzguODMsODMuNDksNDIuNTMsNTMuMjYsNDIuNTMsNDRhNi40Myw2LjQzLDAsMCwxLC4yMi0xLjk0bC0zLjY2LDkuMDZhMTguNDUsMTguNDUsMCwwLDAtMS41MSw3LjMzYzAsMTguMTEsNDEsNDUuNDgsODcuNzQsNDUuNDgsMjAuNjksMCwzNi40My03Ljc2LDM2LjQzLTIxLjc3LDAtMS4wOCwwLTEuOTQtMS43My0xMC4xM1oiLz48cGF0aCBjbGFzcz0iY2xzLTEiIGQ9Ik0xMjcuNDcsODMuNDljMTIuNTEsMCwzMC42MS0yLjU4LDMwLjYxLTE3LjQ2YTE0LDE0LDAsMCwwLS4zMS0zLjQybC03LjQ1LTMyLjM2Yy0xLjcyLTcuMTItMy4yMy0xMC4zNS0xNS43My0xNi42QzEyNC44OSw4LjY5LDEwMy43Ni41LDk3LjUxLjUsOTEuNjkuNSw5MCw4LDgzLjA2LDhjLTYuNjgsMC0xMS42NC01LjYtMTcuODktNS42LTYsMC05LjkxLDQuMDktMTIuOTMsMTIuNSwwLDAtOC40MSwyMy43Mi05LjQ5LDI3LjE2QTYuNDMsNi40MywwLDAsMCw0Mi41Myw0NGMwLDkuMjIsMzYuMywzOS40NSw4NC45NCwzOS40NU0xNjAsNzIuMDdjMS43Myw4LjE5LDEuNzMsOS4wNSwxLjczLDEwLjEzLDAsMTQtMTUuNzQsMjEuNzctMzYuNDMsMjEuNzdDNzguNTQsMTA0LDM3LjU4LDc2LjYsMzcuNTgsNTguNDlhMTguNDUsMTguNDUsMCwwLDEsMS41MS03LjMzQzIyLjI3LDUyLC41LDU1LC41LDc0LjIyYzAsMzEuNDgsNzQuNTksNzAuMjgsMTMzLjY1LDcwLjI4LDQ1LjI4LDAsNTYuNy0yMC40OCw1Ni43LTM2LjY1LDAtMTIuNzItMTEtMjcuMTYtMzAuODMtMzUuNzgiLz48L3N2Zz4="), SourceId: stringToPointer("sample-source"), LibraryName: stringToPointer("transformers"), CreateTimeSinceEpoch: stringToPointer("1693526400000"), @@ -766,7 +779,7 @@ Granite 3.1 Instruct Models are primarily finetuned using instruction-response p additionalRepo1Models = append(additionalRepo1Models, model) } - allModels := []catalogOpenapi.CatalogModel{ + allModels := []models.CatalogModel{ sampleModel1, sampleModel2, sampleModel3, huggingFaceModel1, huggingFaceModel2, huggingFaceModel3, otherModel1, otherModel2, @@ -776,10 +789,10 @@ Granite 3.1 Instruct Models are primarily finetuned using instruction-response p return allModels } -func GetCatalogModelListMock() catalogOpenapi.CatalogModelList { +func GetCatalogModelListMock() models.CatalogModelList { allModels := GetCatalogModelMocks() - return catalogOpenapi.CatalogModelList{ + return models.CatalogModelList{ Items: allModels, Size: int32(len(allModels)), PageSize: int32(10), @@ -787,40 +800,69 @@ func GetCatalogModelListMock() catalogOpenapi.CatalogModelList { } } -func GetCatalogSourceMocks() []catalogOpenapi.CatalogSource { +func GetCatalogSourceMocks() []models.CatalogSource { enabled := true - disabled := false - return []catalogOpenapi.CatalogSource{ + disabledBool := false + + // Status examples (matching OpenAPI spec) + availableStatus := "available" + errorStatus := "error" + disabledStatus := "disabled" + + invalidCredentialError := "The provided API key is invalid or has expired. Please update your credentials." + invalidOrgError := "The specified organization 'invalid-org' does not exist or you don't have access to it." + + return []models.CatalogSource{ { Id: "sample-source", Name: "Sample mocked source", Enabled: &enabled, + Labels: []string{"Sample category 1", "Sample category 2", "Sample category"}, + Status: &availableStatus, }, { - Id: "huggingface", - Name: "Hugging Face", + Id: "huggingface", + Name: "Hugging Face", + Labels: []string{"Sample category 2", "Sample category"}, + // Status is nil - represents "Starting" state (no status yet) + Status: nil, }, { Id: "adminModel1", Name: "Admin model 1", Enabled: &enabled, + Labels: []string{}, + Status: &errorStatus, + Error: &invalidCredentialError, }, { Id: "adminModel2", Name: "Admin model 2", - Enabled: &disabled, + Enabled: &enabled, + Labels: []string{"Sample category 1"}, + Status: &errorStatus, + Error: &invalidOrgError, + }, + { + Id: "dora", + Name: "Dora source", + Labels: []string{}, + Status: &availableStatus, }, { - Id: "dora", - Name: "Dora source", + Id: "adminModel3", + Name: "Admin model 3", + Enabled: &disabledBool, + Labels: []string{}, + Status: &disabledStatus, }, } } -func GetCatalogSourceListMock() catalogOpenapi.CatalogSourceList { +func GetCatalogSourceListMock() models.CatalogSourceList { allSources := GetCatalogSourceMocks() - return catalogOpenapi.CatalogSourceList{ + return models.CatalogSourceList{ Items: allSources, Size: int32(len(allSources)), PageSize: int32(10), @@ -828,24 +870,569 @@ func GetCatalogSourceListMock() catalogOpenapi.CatalogSourceList { } } -func GetCatalogModelArtifactMock() []catalogOpenapi.CatalogModelArtifact { - return []catalogOpenapi.CatalogModelArtifact{ +func GetCatalogModelArtifactMock() []models.CatalogArtifact { + return []models.CatalogArtifact{ { - Uri: "oci://registry.sample.io/repo1/modelcar-granite-7b-starter:1.4.0", + ArtifactType: "model-artifact", + Uri: stringToPointer("oci://registry.sample.io/repo1/modelcar-granite-7b-starter:1.4.0"), + CreateTimeSinceEpoch: stringToPointer("1693526400000"), + + LastUpdateTimeSinceEpoch: stringToPointer("1704067200000"), + CustomProperties: newCustomProperties(), + }, + } +} + +func performanceMetricsCustomProperties(customProperties map[string]openapi.MetadataValue) *map[string]openapi.MetadataValue { + result := map[string]openapi.MetadataValue{ + "config_id": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "0055d94f6547237dgf324238", + MetadataType: "MetadataStringValue", + }, + }, + "ttft_mean": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 35.48818160947744, + MetadataType: "MetadataDoubleValue", + }, + }, + "ttft_p90": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 51.55777931213379, + MetadataType: "MetadataDoubleValue", + }, + }, + "ttft_p95": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 61.26761436462402, + MetadataType: "MetadataDoubleValue", + }, + }, + "ttft_p99": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 72.95823097229004, + MetadataType: "MetadataDoubleValue", + }, + }, + "e2e_mean": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 1994.480013381083, + MetadataType: "MetadataDoubleValue", + }, + }, + "e2e_p90": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 2644.604682922363, + MetadataType: "MetadataDoubleValue", + }, + }, + "e2e_p95": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 2813.79246711731, + MetadataType: "MetadataDoubleValue", + }, + }, + "e2e_p99": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 3117.565155029297, + MetadataType: "MetadataDoubleValue", + }, + }, + "tps_mean": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 1785.325259154939, + MetadataType: "MetadataDoubleValue", + }, + }, + "tps_p90": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 3318.278481012658, + MetadataType: "MetadataDoubleValue", + }, + }, + "tps_p95": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 4934.475294117647, + MetadataType: "MetadataDoubleValue", + }, + }, + "tps_p99": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 11781.75280898876, + MetadataType: "MetadataDoubleValue", + }, + }, + "itl_mean": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 7.685877762379151, + MetadataType: "MetadataDoubleValue", + }, + }, + "itl_p90": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 7.778410935521725, + MetadataType: "MetadataDoubleValue", + }, + }, + "itl_p95": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 7.812754891135476, + MetadataType: "MetadataDoubleValue", + }, + }, + "itl_p99": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 7.9100158577958, + MetadataType: "MetadataDoubleValue", + }, + }, + "requests_per_second": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 7, + MetadataType: "MetadataDoubleValue", + }, + }, + "max_input_tokens": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 1024, + MetadataType: "MetadataDoubleValue", + }, + }, + "max_output_tokens": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 1, + MetadataType: "MetadataDoubleValue", + }, + }, + "hardware_type": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "H100", + MetadataType: "MetadataStringValue", + }, + }, + "hardware_count": { + MetadataIntValue: &openapi.MetadataIntValue{ + IntValue: "2", + MetadataType: "MetadataIntValue", + }, + }, + "framework": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "vllm", + MetadataType: "MetadataStringValue", + }, + }, + "framework_version": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "v0.1.1", + MetadataType: "MetadataStringValue", + }, + }, + "docker_image": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "vllm/vllm-openai:v0.1.1", + MetadataType: "MetadataStringValue", + }, + }, + "entrypoint": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "\npython3\n", + }, + }, + "inserted_at": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "2025-05-07T00:00:00.000Z", + MetadataType: "MetadataStringValue", + }, + }, + "created_at": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "2025-05-07T00:00:00.000Z", + MetadataType: "MetadataStringValue", + }, + }, + "updated_at": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "2025-05-14T12:08:25.402Z", + MetadataType: "MetadataStringValue", + }, + }, + "mean_input_tokens": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 511.5445458496306, + MetadataType: "MetadataDoubleValue", + }, + }, + "mean_output_tokens": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 255.8678835289005, + MetadataType: "MetadataDoubleValue", + }, + }, + "model_hf_repo_name": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "provider1-granite/granite-3.1-8b-instruct", + }, + }, + } + for key, value := range customProperties { + result[key] = value + } + return &result +} + +func GetCatalogPerformanceMetricsArtifactMock(itemCount int32) []models.CatalogArtifact { + artifacts := []models.CatalogArtifact{ + { + ArtifactType: *stringToPointer("metrics-artifact"), + MetricsType: stringToPointer("performance-metrics"), CreateTimeSinceEpoch: stringToPointer("1693526400000"), LastUpdateTimeSinceEpoch: stringToPointer("1704067200000"), - CustomProperties: catalogCustomProperties(), + CustomProperties: performanceMetricsCustomProperties(map[string]openapi.MetadataValue{ + "use_case": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "chatbot", + MetadataType: "MetadataStringValue", + }, + }, + }), + }, + { + ArtifactType: *stringToPointer("metrics-artifact"), + MetricsType: stringToPointer("performance-metrics"), + CreateTimeSinceEpoch: stringToPointer("1693526400000"), + LastUpdateTimeSinceEpoch: stringToPointer("1704067200000"), + CustomProperties: performanceMetricsCustomProperties(map[string]openapi.MetadataValue{ + "hardware_type": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "RTX 4090", + MetadataType: "MetadataStringValue", + }, + }, + "hardware_count": { + MetadataIntValue: &openapi.MetadataIntValue{ + IntValue: "33", + MetadataType: "MetadataIntValue", + }, + }, + "requests_per_second": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 10, + MetadataType: "MetadataDoubleValue", + }, + }, + "ttft_mean": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 67.14892749816, + MetadataType: "MetadataDoubleValue", + }, + }, + "use_case": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "rag", + MetadataType: "MetadataStringValue", + }, + }, + }), + }, + { + ArtifactType: *stringToPointer("metrics-artifact"), + MetricsType: stringToPointer("performance-metrics"), + CreateTimeSinceEpoch: stringToPointer("1693526400000"), + LastUpdateTimeSinceEpoch: stringToPointer("1704067200000"), + CustomProperties: performanceMetricsCustomProperties(map[string]openapi.MetadataValue{ + "hardware_type": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "A100", + MetadataType: "MetadataStringValue", + }, + }, + "hardware_count": { + MetadataIntValue: &openapi.MetadataIntValue{ + IntValue: "40", + MetadataType: "MetadataIntValue", + }, + }, + "requests_per_second": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 15, + MetadataType: "MetadataDoubleValue", + }, + }, + "ttft_mean": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 42.123791232, + MetadataType: "MetadataDoubleValue", + }, + }, + "use_case": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "code_fixing", + MetadataType: "MetadataStringValue", + }, + }, + }), + }, + { + ArtifactType: *stringToPointer("metrics-artifact"), + MetricsType: stringToPointer("performance-metrics"), + CreateTimeSinceEpoch: stringToPointer("1693526400000"), + LastUpdateTimeSinceEpoch: stringToPointer("1704067200000"), + CustomProperties: performanceMetricsCustomProperties(map[string]openapi.MetadataValue{ + "hardware_type": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "A100", + MetadataType: "MetadataStringValue", + }, + }, + "hardware_count": { + MetadataIntValue: &openapi.MetadataIntValue{ + IntValue: "8", + MetadataType: "MetadataIntValue", + }, + }, + "requests_per_second": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 25, + MetadataType: "MetadataDoubleValue", + }, + }, + "ttft_mean": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 28.5, + MetadataType: "MetadataDoubleValue", + }, + }, + "use_case": { + MetadataStringValue: &openapi.MetadataStringValue{ + StringValue: "long_rag", + MetadataType: "MetadataStringValue", + }, + }, + }), + }, + } + artifacts = artifacts[:itemCount] + return artifacts +} + +func accuracyMetricsCustomProperties() *map[string]openapi.MetadataValue { + result := map[string]openapi.MetadataValue{ + "overall_average": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 0.584329, + MetadataType: "MetadataDoubleValue", + }, + }, + "arc_v1": { + MetadataDoubleValue: &openapi.MetadataDoubleValue{ + DoubleValue: 0.674673, + MetadataType: "MetadataDoubleValue", + }, }, } + return &result } -func GetCatalogModelArtifactListMock() catalogOpenapi.CatalogModelArtifactList { +func GetCatalogAccuracyMetricsArtifactMock() []models.CatalogArtifact { + return []models.CatalogArtifact{ + { + ArtifactType: *stringToPointer("metrics-artifact"), + MetricsType: stringToPointer("accuracy-metrics"), + CreateTimeSinceEpoch: stringToPointer("1693526400000"), + LastUpdateTimeSinceEpoch: stringToPointer("1704067200000"), + CustomProperties: accuracyMetricsCustomProperties(), + }, + } +} +func GetModelArtifactListMockWithItems(items []models.CatalogArtifact, pageSize int32) models.CatalogModelArtifactList { + return models.CatalogModelArtifactList{ + Items: items, + Size: int32(len(items)), + PageSize: pageSize, + NextPageToken: "", + } +} + +func GetCatalogModelArtifactListMock() models.CatalogModelArtifactList { allArtifactMock := GetCatalogModelArtifactMock() + return GetModelArtifactListMockWithItems(allArtifactMock, 10) +} - return catalogOpenapi.CatalogModelArtifactList{ - Items: allArtifactMock, - Size: int32(len(allArtifactMock)), - PageSize: int32(10), +func GetCatalogPerformanceMetricsArtifactListMock(itemCount int32) models.CatalogModelArtifactList { + allArtifactMock := GetCatalogPerformanceMetricsArtifactMock(itemCount) + return GetModelArtifactListMockWithItems(allArtifactMock, 10) +} + +func GetCatalogAccuracyMetricsArtifactListMock() models.CatalogModelArtifactList { + allArtifactMock := GetCatalogAccuracyMetricsArtifactMock() + return GetModelArtifactListMockWithItems(allArtifactMock, 10) +} + +const ( + FilterOptionTypeString = "string" + FilterOptionTypeNumber = "number" +) + +func float32Ptr(i float32) *float32 { + return &i +} + +func GetFilterOptionMocks() map[string]models.FilterOption { + filterOptions := make(map[string]models.FilterOption) + + filterOptions["provider"] = models.FilterOption{ + Type: FilterOptionTypeString, + Values: []interface{}{ + "Red Hat", "IBM", "Google", + }, + } + + filterOptions["license"] = models.FilterOption{ + Type: FilterOptionTypeString, + Values: []interface{}{ + "apache-2.0", + "mit", + }, + } + + filterOptions["tasks"] = models.FilterOption{ + Type: FilterOptionTypeString, + Values: []interface{}{ + "audio-to-text", "image-to-text", "image-text-to-text", "text-generation", "text-to-text", "video-to-text", + }, + } + + // String type filter for programming languages supported + filterOptions["language"] = models.FilterOption{ + Type: FilterOptionTypeString, + Values: []interface{}{ + "ar", "cs", "de", "en", "es", "fr", "it", "ja", "ko", "nl", "pt", "zh", + }, + } + + // String type filter for use cases + filterOptions["use_case"] = models.FilterOption{ + Type: FilterOptionTypeString, + Values: []interface{}{ + "chatbot", "code_fixing", "long_rag", "rag", + }, + } + + // String type filter for use cases + filterOptions["use_case"] = models.FilterOption{ + Type: FilterOptionTypeString, + Values: []interface{}{ + "chatbot", "code_fixing", "long_rag", "rag", + }, + } + + filterOptions["ttft_mean"] = models.FilterOption{ + Type: FilterOptionTypeNumber, + Range: &models.FilterRange{ + Min: float32Ptr(1), + Max: float32Ptr(100), + }, + } + + return filterOptions +} + +func GetFilterOptionsListMock() models.FilterOptionsList { + filterOptions := GetFilterOptionMocks() + + return models.FilterOptionsList{ + Filters: &filterOptions, + } +} + +func CreateSampleCatalogSource(id string, name string, catalogType string) models.CatalogSourceConfig { + defaultCatalog := id == "catalog-1" + + sourceConfig := models.CatalogSourceConfig{ + Name: name, + Id: id, + Type: catalogType, + Enabled: BoolPtr(true), + Labels: []string{"source-1"}, + IsDefault: &defaultCatalog, + } + + if !defaultCatalog { + sourceConfig.IncludedModels = []string{"rhelai1/modelcar-granite-7b-starter"} + sourceConfig.ExcludedModels = []string{"model-a:1.0", "model-b:*"} + } + + switch catalogType { + case "yaml": + sourceConfig.Yaml = stringToPointer("models:\n - name: model1") + case "huggingface": + sourceConfig.AllowedOrganization = stringToPointer("org1") + sourceConfig.ApiKey = stringToPointer("apikey") + } + + return sourceConfig +} + +func BoolPtr(b bool) *bool { + return &b +} + +func GetCatalogSourceConfigsMocks() []models.CatalogSourceConfig { + return []models.CatalogSourceConfig{ + CreateSampleCatalogSource("catalog-1", "Default Catalog", "yaml"), + CreateSampleCatalogSource("catalog-2", "HuggingFace Catalog", "huggingface"), + CreateSampleCatalogSource("catalog-3", "Custom Catalog", "yaml"), + } +} + +func GetCatalogSourceConfigListMock() models.CatalogSourceConfigList { + allCatalogSourceConfigs := GetCatalogSourceConfigsMocks() + + return models.CatalogSourceConfigList{ + Catalogs: allCatalogSourceConfigs, + } +} + +func GetModelsWithInclusionStatusListMocks() []models.CatalogSourcePreviewModel { + return []models.CatalogSourcePreviewModel{ + { + Name: "sample-source/granite", + Included: true, + }, + { + Name: "sample-source/model-1", + Included: true, + }, + { + Name: "adminModel1/model-2", + Included: true, + }, + { + Name: "adminModel1/model-3", + Included: false, + }, + } +} + +func GetCatalogSourcePreviewSummaryMock() models.CatalogSourcePreviewSummary { + return models.CatalogSourcePreviewSummary{ + TotalModels: 1500, + IncludedModels: 850, + ExcludedModels: 650, + } +} + +func CreateCatalogSourcePreviewMock() models.CatalogSourcePreviewResult { + catalogModelPreview := GetModelsWithInclusionStatusListMocks() + catalogSourcePreviewSummary := GetCatalogSourcePreviewSummaryMock() + + return models.CatalogSourcePreviewResult{ + Items: catalogModelPreview, + Summary: catalogSourcePreviewSummary, NextPageToken: "", + PageSize: int32(10), + Size: int32(len(catalogModelPreview)), } } diff --git a/clients/ui/bff/internal/models/catalog_model_artifact_list.go b/clients/ui/bff/internal/models/catalog_model_artifact_list.go index 54ba14d6ec..5094ba51a6 100644 --- a/clients/ui/bff/internal/models/catalog_model_artifact_list.go +++ b/clients/ui/bff/internal/models/catalog_model_artifact_list.go @@ -9,11 +9,29 @@ type CatalogModelArtifact struct { LastUpdateTimeSinceEpoch *string `json:"lastUpdateTimeSinceEpoch,omitempty"` Uri string `json:"uri"` CustomProperties *map[string]openapi.MetadataValue `json:"customProperties,omitempty"` + ArtifactType string `json:"artifactType"` +} + +type CatalogMetricsArtifact struct { + ArtifactType string `json:"artifactType"` + MetricsType *string `json:"metricsType"` + CreateTimeSinceEpoch *string `json:"createTimeSinceEpoch,omitempty"` + CustomProperties *map[string]openapi.MetadataValue `json:"customProperties,omitempty"` + LastUpdateTimeSinceEpoch *string `json:"lastUpdateTimeSinceEpoch,omitempty"` +} + +type CatalogArtifact struct { + ArtifactType string `json:"artifactType"` + MetricsType *string `json:"metricsType,omitempty"` + Uri *string `json:"uri,omitempty"` + CreateTimeSinceEpoch *string `json:"createTimeSinceEpoch,omitempty"` + CustomProperties *map[string]openapi.MetadataValue `json:"customProperties,omitempty"` + LastUpdateTimeSinceEpoch *string `json:"lastUpdateTimeSinceEpoch,omitempty"` } type CatalogModelArtifactList struct { - NextPageToken string `json:"nextPageToken"` - PageSize int32 `json:"pageSize"` - Size int32 `json:"size"` - Items []CatalogModelArtifact `json:"items"` + NextPageToken string `json:"nextPageToken"` + PageSize int32 `json:"pageSize"` + Size int32 `json:"size"` + Items []CatalogArtifact `json:"items"` } diff --git a/clients/ui/bff/internal/models/catalog_model_list.go b/clients/ui/bff/internal/models/catalog_model_list.go index c699b41dc8..211eca868d 100644 --- a/clients/ui/bff/internal/models/catalog_model_list.go +++ b/clients/ui/bff/internal/models/catalog_model_list.go @@ -5,21 +5,21 @@ import ( ) type CatalogModel struct { - CreateTimeSinceEpoch *string `json:"createTimeSinceEpoch,omitempty"` - CustomProperties *map[string]*openapi.MetadataValue `json:"customProperties,omitempty"` - Description *string `json:"description,omitempty"` - Language []string `json:"language,omitempty"` - LastUpdateTimeSinceEpoch *string `json:"lastUpdateTimeSinceEpoch,omitempty"` - LibraryName *string `json:"libraryName,omitempty"` - License *string `json:"license,omitempty"` - LicenseLink *string `json:"licenseLink,omitempty"` - Logo *string `json:"logo,omitempty"` - Maturity *string `json:"maturity,omitempty"` - Name string `json:"name"` - Provider *string `json:"provider,omitempty"` - Readme *string `json:"readme,omitempty"` - SourceId *string `json:"sourceId,omitempty"` - Tasks []string `json:"tasks,omitempty"` + CreateTimeSinceEpoch *string `json:"createTimeSinceEpoch,omitempty"` + CustomProperties *map[string]openapi.MetadataValue `json:"customProperties,omitempty"` + Description *string `json:"description,omitempty"` + Language []string `json:"language,omitempty"` + LastUpdateTimeSinceEpoch *string `json:"lastUpdateTimeSinceEpoch,omitempty"` + LibraryName *string `json:"libraryName,omitempty"` + License *string `json:"license,omitempty"` + LicenseLink *string `json:"licenseLink,omitempty"` + Logo *string `json:"logo,omitempty"` + Maturity *string `json:"maturity,omitempty"` + Name string `json:"name"` + Provider *string `json:"provider,omitempty"` + Readme *string `json:"readme,omitempty"` + SourceId *string `json:"source_id,omitempty"` + Tasks []string `json:"tasks,omitempty"` } type CatalogModelList struct { @@ -28,3 +28,20 @@ type CatalogModelList struct { Size int32 `json:"size"` Items []CatalogModel `json:"items"` } + +type FilterRange struct { + Max *float32 `json:"max,omitempty"` + Min *float32 `json:"min,omitempty"` +} + +type FilterOption struct { + Range *FilterRange `json:"range,omitempty"` + Type FilterOptionType `json:"type"` + Values []interface{} `json:"values,omitempty"` +} + +type FilterOptionType string + +type FilterOptionsList struct { + Filters *map[string]FilterOption `json:"filters,omitempty"` +} diff --git a/clients/ui/bff/internal/models/catalog_source_list.go b/clients/ui/bff/internal/models/catalog_source_list.go index 995a01df6d..e746c5baf6 100644 --- a/clients/ui/bff/internal/models/catalog_source_list.go +++ b/clients/ui/bff/internal/models/catalog_source_list.go @@ -1,8 +1,12 @@ package models type CatalogSource struct { - Id string `json:"id"` - Name string `json:"name"` + Id string `json:"id"` + Name string `json:"name"` + Enabled *bool `json:"enabled,omitempty"` + Labels []string `json:"labels"` + Status *string `json:"status,omitempty"` + Error *string `json:"error,omitempty"` } type CatalogSourceList struct { diff --git a/clients/ui/bff/internal/models/catalog_source_preview.go b/clients/ui/bff/internal/models/catalog_source_preview.go new file mode 100644 index 0000000000..80f62c3fc1 --- /dev/null +++ b/clients/ui/bff/internal/models/catalog_source_preview.go @@ -0,0 +1,27 @@ +package models + +type CatalogSourcePreviewRequest struct { + Type string `json:"type"` + IncludedModels []string `json:"includedModels,omitempty"` + ExcludedModels []string `json:"excludedModels,omitempty"` + Properties map[string]interface{} `json:"properties,omitempty"` +} + +type CatalogSourcePreviewModel struct { + Name string `json:"name"` + Included bool `json:"included"` +} + +type CatalogSourcePreviewSummary struct { + TotalModels int32 `json:"totalModels"` + IncludedModels int32 `json:"includedModels"` + ExcludedModels int32 `json:"excludedModels"` +} + +type CatalogSourcePreviewResult struct { + Items []CatalogSourcePreviewModel `json:"items"` + Summary CatalogSourcePreviewSummary `json:"summary"` + NextPageToken string `json:"nextPageToken"` + PageSize int32 `json:"pageSize"` + Size int32 `json:"size"` +} diff --git a/clients/ui/bff/internal/models/model_catalog_source_configs.go b/clients/ui/bff/internal/models/model_catalog_source_configs.go new file mode 100644 index 0000000000..db6832eecf --- /dev/null +++ b/clients/ui/bff/internal/models/model_catalog_source_configs.go @@ -0,0 +1,21 @@ +package models + +type CatalogSourceConfig struct { + Id string `json:"id"` + Name string `json:"name"` + Type string `json:"type"` + Enabled *bool `json:"enabled,omitempty"` + Labels []string `json:"labels"` + ApiKey *string `json:"apiKey,omitempty"` + AllowedOrganization *string `json:"allowedOrganization,omitempty"` + IncludedModels []string `json:"includedModels,omitempty"` + ExcludedModels []string `json:"excludedModels,omitempty"` + IsDefault *bool `json:"isDefault,omitempty"` + Yaml *string `json:"yaml,omitempty"` +} + +type CatalogSourceConfigPayload = CatalogSourceConfig + +type CatalogSourceConfigList struct { + Catalogs []CatalogSourceConfig `json:"catalogs,omitempty"` +} diff --git a/clients/ui/bff/internal/models/model_registry.go b/clients/ui/bff/internal/models/model_registry.go index dd190ca316..98442bd934 100644 --- a/clients/ui/bff/internal/models/model_registry.go +++ b/clients/ui/bff/internal/models/model_registry.go @@ -7,3 +7,10 @@ type ModelRegistryModel struct { ServerAddress string `json:"serverAddress"` IsHTTPS bool `json:"isHttps"` } + +// ServiceAuthorizationContext holds the authorization decision context +type ServiceAuthorizationContext struct { + AllowList bool + AllowedServiceNames []string + Namespace string +} diff --git a/clients/ui/bff/internal/repositories/catalog_models.go b/clients/ui/bff/internal/repositories/catalog_models.go index 8e273a9b10..0bdde3e6aa 100644 --- a/clients/ui/bff/internal/repositories/catalog_models.go +++ b/clients/ui/bff/internal/repositories/catalog_models.go @@ -3,28 +3,29 @@ package repositories import ( "encoding/json" "fmt" - "github.com/kubeflow/model-registry/catalog/pkg/openapi" - "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" "net/url" + + "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" + "github.com/kubeflow/model-registry/ui/bff/internal/models" ) const catalogModelsPath = "/models" type CatalogModelsInterface interface { - GetAllCatalogModelsAcrossSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*openapi.CatalogModelList, error) + GetAllCatalogModelsAcrossSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*models.CatalogModelList, error) } type CatalogModels struct { CatalogModelsInterface } -func (a CatalogModels) GetAllCatalogModelsAcrossSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*openapi.CatalogModelList, error) { +func (a CatalogModels) GetAllCatalogModelsAcrossSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*models.CatalogModelList, error) { responseData, err := client.GET(UrlWithPageParams(catalogModelsPath, pageValues)) if err != nil { return nil, fmt.Errorf("error fetching sourcesPath: %w", err) } - var models openapi.CatalogModelList + var models models.CatalogModelList if err := json.Unmarshal(responseData, &models); err != nil { return nil, fmt.Errorf("error decoding response data: %w", err) diff --git a/clients/ui/bff/internal/repositories/catalog_source_preview.go b/clients/ui/bff/internal/repositories/catalog_source_preview.go new file mode 100644 index 0000000000..0ccb3ff266 --- /dev/null +++ b/clients/ui/bff/internal/repositories/catalog_source_preview.go @@ -0,0 +1,20 @@ +package repositories + +import ( + "fmt" + + "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" + "github.com/kubeflow/model-registry/ui/bff/internal/models" +) + +type CatalogSourcePreviewInterface interface { + CreateCatalogSourcePreview(client httpclient.HTTPClientInterface, sourcePreviewPaylod models.CatalogSourcePreviewRequest) (*models.CatalogSourcePreviewResult, error) +} + +type CatalogSourcePreview struct { + CatalogSourcePreviewInterface +} + +func (a CatalogSourcePreview) CreateCatalogSourcePreview(client httpclient.HTTPClientInterface, sourcePreviewPaylod models.CatalogSourcePreviewRequest) (*models.CatalogSourcePreviewResult, error) { + return nil, fmt.Errorf("not implemented yet") +} diff --git a/clients/ui/bff/internal/repositories/catalog_sources.go b/clients/ui/bff/internal/repositories/catalog_sources.go index ddeb7116a0..6661fab3f9 100644 --- a/clients/ui/bff/internal/repositories/catalog_sources.go +++ b/clients/ui/bff/internal/repositories/catalog_sources.go @@ -5,29 +5,31 @@ import ( "fmt" "net/url" - "github.com/kubeflow/model-registry/catalog/pkg/openapi" "github.com/kubeflow/model-registry/ui/bff/internal/integrations/httpclient" + "github.com/kubeflow/model-registry/ui/bff/internal/models" ) const sourcesPath = "/sources" +const filterOptionPath = "/models/filter_options" type CatalogSourcesInterface interface { - GetAllCatalogSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*openapi.CatalogSourceList, error) - GetCatalogSourceModel(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*openapi.CatalogModel, error) - GetCatalogModelArtifacts(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*openapi.CatalogModelArtifactList, error) + GetAllCatalogSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*models.CatalogSourceList, error) + GetCatalogSourceModel(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*models.CatalogModel, error) + GetCatalogSourceModelArtifacts(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*models.CatalogModelArtifactList, error) + GetCatalogFilterOptions(client httpclient.HTTPClientInterface) (*models.FilterOptionsList, error) } type CatalogSources struct { CatalogSourcesInterface } -func (a CatalogSources) GetAllCatalogSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*openapi.CatalogSourceList, error) { +func (a CatalogSources) GetAllCatalogSources(client httpclient.HTTPClientInterface, pageValues url.Values) (*models.CatalogSourceList, error) { responseData, err := client.GET(UrlWithPageParams(sourcesPath, pageValues)) if err != nil { return nil, fmt.Errorf("error fetching sourcesPath: %w", err) } - var sources openapi.CatalogSourceList + var sources models.CatalogSourceList if err := json.Unmarshal(responseData, &sources); err != nil { return nil, fmt.Errorf("error decoding response data: %w", err) @@ -36,7 +38,7 @@ func (a CatalogSources) GetAllCatalogSources(client httpclient.HTTPClientInterfa return &sources, nil } -func (a CatalogSources) GetCatalogSourceModel(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*openapi.CatalogModel, error) { +func (a CatalogSources) GetCatalogSourceModel(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*models.CatalogModel, error) { path, err := url.JoinPath(sourcesPath, sourceId, "models", modelName) if err != nil { @@ -47,7 +49,7 @@ func (a CatalogSources) GetCatalogSourceModel(client httpclient.HTTPClientInterf return nil, fmt.Errorf("error fetching sourcesPath: %w", err) } - var catalogModel openapi.CatalogModel + var catalogModel models.CatalogModel if err := json.Unmarshal(responseData, &catalogModel); err != nil { return nil, fmt.Errorf("error decoding response data: %w", err) @@ -56,7 +58,7 @@ func (a CatalogSources) GetCatalogSourceModel(client httpclient.HTTPClientInterf return &catalogModel, nil } -func (a CatalogSources) GetCatalogModelArtifacts(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*openapi.CatalogModelArtifactList, error) { +func (a CatalogSources) GetCatalogSourceModelArtifacts(client httpclient.HTTPClientInterface, sourceId string, modelName string) (*models.CatalogModelArtifactList, error) { path, err := url.JoinPath(sourcesPath, sourceId, "models", modelName, "artifacts") if err != nil { return nil, err @@ -66,10 +68,26 @@ func (a CatalogSources) GetCatalogModelArtifacts(client httpclient.HTTPClientInt return nil, fmt.Errorf("error fetching sourcesPath: %w", err) } - var catalogModelArtifacts openapi.CatalogModelArtifactList + var catalogModelArtifacts models.CatalogModelArtifactList if err := json.Unmarshal(responseData, &catalogModelArtifacts); err != nil { return nil, fmt.Errorf("error decoding response data: %w", err) } return &catalogModelArtifacts, nil } + +func (a CatalogSources) GetCatalogFilterOptions(client httpclient.HTTPClientInterface) (*models.FilterOptionsList, error) { + responseData, err := client.GET(filterOptionPath) + + if err != nil { + return nil, fmt.Errorf("error fetching sourcesPath: %w", err) + } + + var sources models.FilterOptionsList + + if err := json.Unmarshal(responseData, &sources); err != nil { + return nil, fmt.Errorf("error decoding response data: %w", err) + } + + return &sources, nil +} diff --git a/clients/ui/bff/internal/repositories/helpers.go b/clients/ui/bff/internal/repositories/helpers.go index c170e6b15d..727e24e1e1 100644 --- a/clients/ui/bff/internal/repositories/helpers.go +++ b/clients/ui/bff/internal/repositories/helpers.go @@ -29,6 +29,15 @@ func FilterPageValues(values url.Values) url.Values { if v := values.Get("source"); v != "" { result.Set("source", v) } + if v := values.Get("sourceLabel"); v != "" { + result.Set("sourceLabel", v) + } + if v := values.Get("filterQuery"); v != "" { + result.Set("filterQuery", v) + } + if v := values.Get("artifactType"); v != "" { + result.Set("artifactType", v) + } return result } diff --git a/clients/ui/bff/internal/repositories/model_catalog.go b/clients/ui/bff/internal/repositories/model_catalog.go index a705ac712c..87cb2bb37a 100644 --- a/clients/ui/bff/internal/repositories/model_catalog.go +++ b/clients/ui/bff/internal/repositories/model_catalog.go @@ -19,7 +19,7 @@ func NewCatalogRepository() *ModelCatalogRepository { func (m *ModelCatalogRepository) GetModelCatalogWithMode(sessionCtx context.Context, client k8s.KubernetesClientInterface, namespace string, isFederatedMode bool) (models.ModelCatalogModel, error) { - s, err := client.GetServiceDetailsByName(sessionCtx, namespace, ModelCatalogServiceName) + s, err := client.GetServiceDetailsByName(sessionCtx, namespace, ModelCatalogServiceName, k8s.ComponentLabelValueCatalog) if err != nil { return models.ModelCatalogModel{}, fmt.Errorf("error fetching model catalog: %w", err) } diff --git a/clients/ui/bff/internal/repositories/model_catalog_client.go b/clients/ui/bff/internal/repositories/model_catalog_client.go index fea89a55a2..bab728abc9 100644 --- a/clients/ui/bff/internal/repositories/model_catalog_client.go +++ b/clients/ui/bff/internal/repositories/model_catalog_client.go @@ -7,12 +7,14 @@ import ( type ModelCatalogClientInterface interface { CatalogSourcesInterface CatalogModelsInterface + CatalogSourcePreviewInterface } type ModelCatalogClient struct { logger *slog.Logger CatalogSources CatalogModels + CatalogSourcePreview } func NewModelCatalogClient(logger *slog.Logger) (ModelCatalogClientInterface, error) { diff --git a/clients/ui/bff/internal/repositories/model_catalog_settings.go b/clients/ui/bff/internal/repositories/model_catalog_settings.go new file mode 100644 index 0000000000..bfce454f29 --- /dev/null +++ b/clients/ui/bff/internal/repositories/model_catalog_settings.go @@ -0,0 +1,154 @@ +package repositories + +import ( + "context" + "fmt" + + k8s "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" + "github.com/kubeflow/model-registry/ui/bff/internal/models" + "gopkg.in/yaml.v3" +) + +type ModelCatalogSettingsRepository struct { +} + +func NewModelCatalogSettingsRepository() *ModelCatalogSettingsRepository { + return &ModelCatalogSettingsRepository{} +} + +func (r *ModelCatalogSettingsRepository) GetAllCatalogSourceConfigs(ctx context.Context, client k8s.KubernetesClientInterface, namespace string) (*models.CatalogSourceConfigList, error) { + defaultCM, userCM, err := client.GetAllCatalogSourceConfigs(ctx, namespace) + if err != nil { + return nil, fmt.Errorf("failed to fetch catalog source configmaps: %w", err) + } + + catalogSources := &models.CatalogSourceConfigList{ + Catalogs: make([]models.CatalogSourceConfig, 0), + } + + if raw, ok := defaultCM.Data[k8s.CatalogSourceKey]; ok { + defaulfCatalogSources, err := parseCatalogYaml(raw, true) + if err != nil { + return nil, fmt.Errorf("failed to parse default catalogs: %w", err) + } + catalogSources.Catalogs = append(catalogSources.Catalogs, defaulfCatalogSources...) + } + + if raw, ok := userCM.Data[k8s.CatalogSourceKey]; ok { + userManagedSources, err := parseCatalogYaml(raw, false) + if err != nil { + return nil, fmt.Errorf("failed to parse default catalogs: %w", err) + } + catalogSources.Catalogs = append(catalogSources.Catalogs, userManagedSources...) + } + + return catalogSources, nil +} + +func (r *ModelCatalogSettingsRepository) GetCatalogSourceConfig(ctx context.Context, + client k8s.KubernetesClientInterface, + namespace string, + catalogSourceId string, +) (*models.CatalogSourceConfig, error) { + // TODO ppadti write the real implementation here calling k8s client + return nil, fmt.Errorf("not implemented yet") +} + +func (r *ModelCatalogSettingsRepository) CreateCatalogSourceConfig( + ctx context.Context, + client k8s.KubernetesClientInterface, + namespace string, + payload models.CatalogSourceConfigPayload, +) (*models.CatalogSourceConfig, error) { + + // TODO ppadti write the real implementation here calling k8s client + return nil, fmt.Errorf("not implemented yet") +} + +func (r *ModelCatalogSettingsRepository) UpdateCatalogSourceConfig( + ctx context.Context, + client k8s.KubernetesClientInterface, + namespace string, + payload models.CatalogSourceConfigPayload, +) (*models.CatalogSourceConfig, error) { + // TODO ppadti write the real implementation here calling k8s client + return nil, fmt.Errorf("not implemented yet") +} + +func (r *ModelCatalogSettingsRepository) DeleteCatalogSourceConfig( + ctx context.Context, + client k8s.KubernetesClientInterface, + namespace string, + catalogSourceId string, +) (*models.CatalogSourceConfig, error) { + // TODO ppadti write the real implementation here calling k8s client + return nil, fmt.Errorf("not implemented yet") +} + +func parseCatalogYaml(raw string, isDefault bool) ([]models.CatalogSourceConfig, error) { + // Internal struct to match YAML structure + var parsed struct { + Catalogs []struct { + Name string `yaml:"name"` + Id string `yaml:"id"` + Type string `yaml:"type"` + Enabled *bool `yaml:"enabled"` + Properties map[string]interface{} `yaml:"properties"` + Labels []string `yaml:"labels"` + } `yaml:"catalogs"` + } + + if err := yaml.Unmarshal([]byte(raw), &parsed); err != nil { + return nil, fmt.Errorf("failed to parse catalogs yaml: %w", err) + } + + catalogs := make([]models.CatalogSourceConfig, 0, len(parsed.Catalogs)) + for _, c := range parsed.Catalogs { + entry := models.CatalogSourceConfig{ + Id: c.Id, + Name: c.Name, + Type: c.Type, + Enabled: c.Enabled, + Labels: c.Labels, + IsDefault: &isDefault, + } + + if c.Properties != nil { + if includedModels, ok := c.Properties["includedModels"]; ok { + entry.IncludedModels = extractStringSlice(includedModels) + } + + if excludedModels, ok := c.Properties["excludedModels"]; ok { + entry.ExcludedModels = extractStringSlice(excludedModels) + } + + if apiKey, ok := c.Properties["apiKey"].(string); ok { + entry.ApiKey = &apiKey + } + + if allowedOrganization, ok := c.Properties["allowedOrganization"].(string); ok { + entry.AllowedOrganization = &allowedOrganization + } + } + catalogs = append(catalogs, entry) + } + + return catalogs, nil +} + +func extractStringSlice(value interface{}) []string { + if arr, ok := value.([]interface{}); ok { + result := make([]string, 0, len(arr)) + for _, item := range arr { + if str, ok := item.(string); ok { + result = append(result, str) + } + } + return result + } + if strSlice, ok := value.([]string); ok { + return strSlice + } + return []string{} + +} diff --git a/clients/ui/bff/internal/repositories/model_registry.go b/clients/ui/bff/internal/repositories/model_registry.go index 2639aa57e1..001ec9d8ae 100644 --- a/clients/ui/bff/internal/repositories/model_registry.go +++ b/clients/ui/bff/internal/repositories/model_registry.go @@ -4,6 +4,8 @@ import ( "context" "fmt" + "github.com/kubeflow/model-registry/ui/bff/internal/constants" + helper "github.com/kubeflow/model-registry/ui/bff/internal/helpers" k8s "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" "github.com/kubeflow/model-registry/ui/bff/internal/models" @@ -23,11 +25,30 @@ func (m *ModelRegistryRepository) GetAllModelRegistries(sessionCtx context.Conte // GetAllModelRegistriesWithMode fetches all model registries with support for federated mode func (m *ModelRegistryRepository) GetAllModelRegistriesWithMode(sessionCtx context.Context, client k8s.KubernetesClientInterface, namespace string, isFederatedMode bool) ([]models.ModelRegistryModel, error) { - - // TODO: In default mode fetch Routes for external access. - resources, err := client.GetServiceDetails(sessionCtx, namespace) + logger := helper.GetContextLogger(sessionCtx) + logger.Debug("GetAllModelRegistriesWithMode called", "namespace", namespace, "isFederatedMode", isFederatedMode) + + var resources []k8s.ServiceDetails + var err error + + // Check if we have authorization context from the middleware + if authCtx, ok := sessionCtx.Value(constants.ServiceAuthorizationContextKey).(*models.ServiceAuthorizationContext); ok { + if authCtx.AllowList { + logger.Debug("User can get list services ") + resources, err = client.GetServiceDetails(sessionCtx, namespace) + } else { + logger.Debug("User has limited access - we need use Rule base access", + "serviceCount", len(authCtx.AllowedServiceNames), + "services", authCtx.AllowedServiceNames) + resources, err = m.getSpecificServiceDetails(sessionCtx, client, namespace, authCtx.AllowedServiceNames) + } + } else { + logger.Warn("No authorization context found - using fallback behavior") + resources, err = client.GetServiceDetails(sessionCtx, namespace) + } if err != nil { + logger.Error("Error fetching service details", "error", err, "namespace", namespace) return nil, fmt.Errorf("error fetching model registries: %w", err) } @@ -47,6 +68,31 @@ func (m *ModelRegistryRepository) GetAllModelRegistriesWithMode(sessionCtx conte return registries, nil } +// getSpecificServiceDetails fetches details for specific services by name +func (m *ModelRegistryRepository) getSpecificServiceDetails(sessionCtx context.Context, client k8s.KubernetesClientInterface, namespace string, serviceNames []string) ([]k8s.ServiceDetails, error) { + logger := helper.GetContextLogger(sessionCtx) + logger.Debug("getSpecificServiceDetails called", "namespace", namespace, "serviceNames", serviceNames) + + var resources []k8s.ServiceDetails + + for _, serviceName := range serviceNames { + logger.Debug("Fetching service details", "serviceName", serviceName, "namespace", namespace) + // Validate if service is a model registry service by passing the component label value + serviceDetail, err := client.GetServiceDetailsByName(sessionCtx, namespace, serviceName, k8s.ComponentLabelValue) + if err != nil { + logger.Warn("Failed to get service details, skipping", + "serviceName", serviceName, + "namespace", namespace, + "error", err) + // Log the error but continue with other services + continue + } + logger.Debug("Service details retrieved successfully", "serviceName", serviceName) + resources = append(resources, serviceDetail) + } + return resources, nil +} + func (m *ModelRegistryRepository) GetModelRegistry(sessionCtx context.Context, client k8s.KubernetesClientInterface, namespace string, modelRegistryID string) (models.ModelRegistryModel, error) { // Default to non-federated mode for backward compatibility return m.GetModelRegistryWithMode(sessionCtx, client, namespace, modelRegistryID, false) @@ -55,7 +101,7 @@ func (m *ModelRegistryRepository) GetModelRegistry(sessionCtx context.Context, c // GetModelRegistryWithMode fetches a specific model registry with support for federated mode func (m *ModelRegistryRepository) GetModelRegistryWithMode(sessionCtx context.Context, client k8s.KubernetesClientInterface, namespace string, modelRegistryID string, isFederatedMode bool) (models.ModelRegistryModel, error) { - s, err := client.GetServiceDetailsByName(sessionCtx, namespace, modelRegistryID) + s, err := client.GetServiceDetailsByName(sessionCtx, namespace, modelRegistryID, k8s.ComponentLabelValue) if err != nil { return models.ModelRegistryModel{}, fmt.Errorf("error fetching model registry: %w", err) } diff --git a/clients/ui/bff/internal/repositories/model_registry_test.go b/clients/ui/bff/internal/repositories/model_registry_test.go index b4c5c2a1f5..09265819b8 100644 --- a/clients/ui/bff/internal/repositories/model_registry_test.go +++ b/clients/ui/bff/internal/repositories/model_registry_test.go @@ -1,6 +1,10 @@ package repositories import ( + "context" + + "github.com/kubeflow/model-registry/ui/bff/internal/constants" + k8s "github.com/kubeflow/model-registry/ui/bff/internal/integrations/kubernetes" "github.com/kubeflow/model-registry/ui/bff/internal/mocks" "github.com/kubeflow/model-registry/ui/bff/internal/models" . "github.com/onsi/ginkgo/v2" @@ -59,4 +63,197 @@ var _ = Describe("TestFetchAllModelRegistry", func() { Expect(registries).To(BeEmpty()) }) }) + + Context("with authorization context", func() { + var modelRegistryRepository *ModelRegistryRepository + var serviceAccountMockedK8client k8s.KubernetesClientInterface + + BeforeEach(func() { + modelRegistryRepository = NewModelRegistryRepository() + var err error + serviceAccountMockedK8client, err = kubernetesMockedStaticClientFactory.GetClient(mocks.NewMockSessionContextNoParent()) + Expect(err).NotTo(HaveOccurred()) + }) + + It("should fetch all services when AllowList is true", func() { + By("creating context with AllowList authorization") + authCtx := &models.ServiceAuthorizationContext{ + AllowList: true, + AllowedServiceNames: []string{}, // Empty since AllowList=true means access to all + Namespace: "kubeflow", + } + ctx := context.WithValue(mocks.NewMockSessionContextNoParent(), constants.ServiceAuthorizationContextKey, authCtx) + + By("fetching all model registries") + registries, err := modelRegistryRepository.GetAllModelRegistriesWithMode(ctx, serviceAccountMockedK8client, "kubeflow", false) + Expect(err).NotTo(HaveOccurred()) + + By("should return all available model registries in the namespace") + expectedRegistries := []models.ModelRegistryModel{ + {Name: "model-registry", Description: "Model Registry Description", DisplayName: "Model Registry", ServerAddress: "http://127.0.0.1:8080/api/model_registry/v1alpha3"}, + {Name: "model-registry-one", Description: "Model Registry One description", DisplayName: "Model Registry One", ServerAddress: "http://127.0.0.1:8080/api/model_registry/v1alpha3"}, + } + Expect(registries).To(ConsistOf(expectedRegistries)) + }) + + It("should fetch only specific services when AllowList is false", func() { + By("creating context with specific allowed services") + authCtx := &models.ServiceAuthorizationContext{ + AllowList: false, + AllowedServiceNames: []string{"model-registry"}, // Only allow access to specific service + Namespace: "kubeflow", + } + ctx := context.WithValue(mocks.NewMockSessionContextNoParent(), constants.ServiceAuthorizationContextKey, authCtx) + + By("fetching model registries with restricted access") + registries, err := modelRegistryRepository.GetAllModelRegistriesWithMode(ctx, serviceAccountMockedK8client, "kubeflow", false) + Expect(err).NotTo(HaveOccurred()) + + By("should return only the allowed service") + expectedRegistries := []models.ModelRegistryModel{ + {Name: "model-registry", Description: "Model Registry Description", DisplayName: "Model Registry", ServerAddress: "http://127.0.0.1:8080/api/model_registry/v1alpha3"}, + } + Expect(registries).To(ConsistOf(expectedRegistries)) + }) + + It("should fetch multiple specific services when AllowList is false", func() { + By("creating context with multiple allowed services") + authCtx := &models.ServiceAuthorizationContext{ + AllowList: false, + AllowedServiceNames: []string{"model-registry", "model-registry-one"}, + Namespace: "kubeflow", + } + ctx := context.WithValue(mocks.NewMockSessionContextNoParent(), constants.ServiceAuthorizationContextKey, authCtx) + + By("fetching model registries with multiple allowed services") + registries, err := modelRegistryRepository.GetAllModelRegistriesWithMode(ctx, serviceAccountMockedK8client, "kubeflow", false) + Expect(err).NotTo(HaveOccurred()) + + By("should return all allowed services") + expectedRegistries := []models.ModelRegistryModel{ + {Name: "model-registry", Description: "Model Registry Description", DisplayName: "Model Registry", ServerAddress: "http://127.0.0.1:8080/api/model_registry/v1alpha3"}, + {Name: "model-registry-one", Description: "Model Registry One description", DisplayName: "Model Registry One", ServerAddress: "http://127.0.0.1:8080/api/model_registry/v1alpha3"}, + } + Expect(registries).To(ConsistOf(expectedRegistries)) + }) + + It("should return empty list when AllowList is false and no services are allowed", func() { + By("creating context with no allowed services") + authCtx := &models.ServiceAuthorizationContext{ + AllowList: false, + AllowedServiceNames: []string{}, // No services allowed + Namespace: "kubeflow", + } + ctx := context.WithValue(mocks.NewMockSessionContextNoParent(), constants.ServiceAuthorizationContextKey, authCtx) + + By("fetching model registries with no allowed services") + registries, err := modelRegistryRepository.GetAllModelRegistriesWithMode(ctx, serviceAccountMockedK8client, "kubeflow", false) + Expect(err).NotTo(HaveOccurred()) + + By("should return empty list") + Expect(registries).To(BeEmpty()) + }) + + It("should handle non-existent services gracefully when AllowList is false", func() { + By("creating context with non-existent service names") + authCtx := &models.ServiceAuthorizationContext{ + AllowList: false, + AllowedServiceNames: []string{"non-existent-service", "another-missing-service"}, + Namespace: "kubeflow", + } + ctx := context.WithValue(mocks.NewMockSessionContextNoParent(), constants.ServiceAuthorizationContextKey, authCtx) + + By("fetching model registries with non-existent services") + registries, err := modelRegistryRepository.GetAllModelRegistriesWithMode(ctx, serviceAccountMockedK8client, "kubeflow", false) + Expect(err).NotTo(HaveOccurred()) + + By("should return empty list without error") + Expect(registries).To(BeEmpty()) + }) + + It("should handle mixed existing and non-existent services when AllowList is false", func() { + By("creating context with mix of existing and non-existent services") + authCtx := &models.ServiceAuthorizationContext{ + AllowList: false, + AllowedServiceNames: []string{"model-registry", "non-existent-service"}, + Namespace: "kubeflow", + } + ctx := context.WithValue(mocks.NewMockSessionContextNoParent(), constants.ServiceAuthorizationContextKey, authCtx) + + By("fetching model registries with mixed service names") + registries, err := modelRegistryRepository.GetAllModelRegistriesWithMode(ctx, serviceAccountMockedK8client, "kubeflow", false) + Expect(err).NotTo(HaveOccurred()) + + By("should return only the existing service") + expectedRegistries := []models.ModelRegistryModel{ + {Name: "model-registry", Description: "Model Registry Description", DisplayName: "Model Registry", ServerAddress: "http://127.0.0.1:8080/api/model_registry/v1alpha3"}, + } + Expect(registries).To(ConsistOf(expectedRegistries)) + }) + + It("should fallback to all services when no authorization context is present", func() { + By("using context without authorization context") + ctx := mocks.NewMockSessionContextNoParent() // No authorization context + + By("fetching model registries without authorization context") + registries, err := modelRegistryRepository.GetAllModelRegistriesWithMode(ctx, serviceAccountMockedK8client, "kubeflow", false) + Expect(err).NotTo(HaveOccurred()) + + By("should return all available services as fallback behavior") + expectedRegistries := []models.ModelRegistryModel{ + {Name: "model-registry", Description: "Model Registry Description", DisplayName: "Model Registry", ServerAddress: "http://127.0.0.1:8080/api/model_registry/v1alpha3"}, + {Name: "model-registry-one", Description: "Model Registry One description", DisplayName: "Model Registry One", ServerAddress: "http://127.0.0.1:8080/api/model_registry/v1alpha3"}, + } + Expect(registries).To(ConsistOf(expectedRegistries)) + }) + }) + + Context("with federated mode", func() { + var modelRegistryRepository *ModelRegistryRepository + var serviceAccountMockedK8client k8s.KubernetesClientInterface + + BeforeEach(func() { + modelRegistryRepository = NewModelRegistryRepository() + var err error + serviceAccountMockedK8client, err = kubernetesMockedStaticClientFactory.GetClient(mocks.NewMockSessionContextNoParent()) + Expect(err).NotTo(HaveOccurred()) + }) + + It("should handle federated mode with AllowList authorization", func() { + By("creating context with AllowList authorization") + authCtx := &models.ServiceAuthorizationContext{ + AllowList: true, + AllowedServiceNames: []string{}, + Namespace: "kubeflow", + } + ctx := context.WithValue(mocks.NewMockSessionContextNoParent(), constants.ServiceAuthorizationContextKey, authCtx) + + By("fetching model registries in federated mode") + registries, err := modelRegistryRepository.GetAllModelRegistriesWithMode(ctx, serviceAccountMockedK8client, "kubeflow", true) + Expect(err).NotTo(HaveOccurred()) + + By("should return registries with appropriate server addresses for federated mode") + Expect(registries).To(HaveLen(2)) + // Note: The exact server addresses depend on the mock implementation + // but the key point is that federated mode is properly handled with authorization + }) + + It("should handle federated mode with restricted access", func() { + By("creating context with specific allowed services") + authCtx := &models.ServiceAuthorizationContext{ + AllowList: false, + AllowedServiceNames: []string{"model-registry"}, + Namespace: "kubeflow", + } + ctx := context.WithValue(mocks.NewMockSessionContextNoParent(), constants.ServiceAuthorizationContextKey, authCtx) + + By("fetching model registries in federated mode with restrictions") + registries, err := modelRegistryRepository.GetAllModelRegistriesWithMode(ctx, serviceAccountMockedK8client, "kubeflow", true) + Expect(err).NotTo(HaveOccurred()) + + By("should return only allowed services in federated mode") + Expect(registries).To(HaveLen(1)) + Expect(registries[0].Name).To(Equal("model-registry")) + }) + }) }) diff --git a/clients/ui/bff/internal/repositories/repositories.go b/clients/ui/bff/internal/repositories/repositories.go index 67a799d3d1..e325f358f0 100644 --- a/clients/ui/bff/internal/repositories/repositories.go +++ b/clients/ui/bff/internal/repositories/repositories.go @@ -2,25 +2,27 @@ package repositories // Repositories struct is a single convenient container to hold and represent all our repositories. type Repositories struct { - HealthCheck *HealthCheckRepository - ModelRegistry *ModelRegistryRepository - ModelCatalog *ModelCatalogRepository - ModelRegistrySettings *ModelRegistrySettingsRepository - ModelRegistryClient ModelRegistryClientInterface - ModelCatalogClient ModelCatalogClientInterface - User *UserRepository - Namespace *NamespaceRepository + HealthCheck *HealthCheckRepository + ModelRegistry *ModelRegistryRepository + ModelCatalog *ModelCatalogRepository + ModelRegistrySettings *ModelRegistrySettingsRepository + ModelRegistryClient ModelRegistryClientInterface + ModelCatalogClient ModelCatalogClientInterface + ModelCatalogSettingsRepository *ModelCatalogSettingsRepository + User *UserRepository + Namespace *NamespaceRepository } func NewRepositories(modelRegistryClient ModelRegistryClientInterface, modelCatalogClient ModelCatalogClientInterface) *Repositories { return &Repositories{ - HealthCheck: NewHealthCheckRepository(), - ModelRegistry: NewModelRegistryRepository(), - ModelCatalog: NewCatalogRepository(), - ModelCatalogClient: modelCatalogClient, - ModelRegistrySettings: NewModelRegistrySettingsRepository(), - ModelRegistryClient: modelRegistryClient, - User: NewUserRepository(), - Namespace: NewNamespaceRepository(), + HealthCheck: NewHealthCheckRepository(), + ModelRegistry: NewModelRegistryRepository(), + ModelCatalog: NewCatalogRepository(), + ModelCatalogClient: modelCatalogClient, + ModelRegistrySettings: NewModelRegistrySettingsRepository(), + ModelRegistryClient: modelRegistryClient, + ModelCatalogSettingsRepository: NewModelCatalogSettingsRepository(), + User: NewUserRepository(), + Namespace: NewNamespaceRepository(), } } diff --git a/clients/ui/frontend/package-lock.json b/clients/ui/frontend/package-lock.json index 12f96633fe..3e5b84cf44 100644 --- a/clients/ui/frontend/package-lock.json +++ b/clients/ui/frontend/package-lock.json @@ -11,18 +11,18 @@ "dependencies": { "@emotion/react": "^11.14.0", "@emotion/styled": "^11.14.0", - "@patternfly/patternfly": "^6.3.1", - "@patternfly/react-core": "^6.3.1", - "@patternfly/react-icons": "^6.3.1", - "@patternfly/react-styles": "^6.3.1", - "@patternfly/react-table": "^6.3.1", - "@patternfly/react-templates": "^6.3.1", + "@patternfly/patternfly": "^6.4.0", + "@patternfly/react-core": "^6.4.0", + "@patternfly/react-icons": "^6.4.0", + "@patternfly/react-styles": "^6.4.0", + "@patternfly/react-table": "^6.4.0", + "@patternfly/react-templates": "^6.4.0", "classnames": "^2.2.6", "dompurify": "^3.2.4", "lodash-es": "^4.17.15", - "mod-arch-core": "~1.0.2", - "mod-arch-kubeflow": "~1.0.2", - "mod-arch-shared": "~1.0.2", + "mod-arch-core": "~1.1.5", + "mod-arch-kubeflow": "~1.1.5", + "mod-arch-shared": "~1.1.5", "react": "^18", "react-dom": "^18", "react-markdown": "^10.1.0", @@ -36,10 +36,10 @@ "showdown": "^2.1.0" }, "devDependencies": { - "@module-federation/enhanced": "^0.18.0", - "@mui/icons-material": "^6.4.8", - "@mui/material": "^6.1.7", - "@mui/types": "^7.2.20", + "@module-federation/enhanced": "^0.21.4", + "@mui/icons-material": "^7.3.4", + "@mui/material": "^7.3.4", + "@mui/types": "^7.4.7", "@pmmmwh/react-refresh-webpack-plugin": "^0.5.15", "@swc/core": "^1.9.1", "@types/chai-subset": "^1.3.5", @@ -92,7 +92,7 @@ "webpack-merge": "^6.0.1" }, "engines": { - "node": ">=20.0.0" + "node": ">=22.0.0" }, "optionalDependencies": { "@babel/preset-env": "^7.26.9", @@ -125,7 +125,7 @@ "eslint-plugin-react": "^7.37.5", "eslint-plugin-react-hooks": "^5.2.0", "npm-run-all": "^4.1.5", - "serve": "^14.2.4", + "serve": "^14.2.5", "ts-jest": "^29.4.0" } }, @@ -1984,9 +1984,9 @@ } }, "node_modules/@babel/runtime": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.3.tgz", - "integrity": "sha512-9uIQ10o0WGdpP6GDhXcdOJPJuDgFtIDtN/9+ArJQ2NAfAmiuhTQdzkaTGR33v43GYS2UrSA0eX2pPPHoFVvpxA==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", + "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", "license": "MIT", "engines": { "node": ">=6.9.0" @@ -2056,9 +2056,9 @@ } }, "node_modules/@cypress/code-coverage": { - "version": "3.14.6", - "resolved": "https://registry.npmjs.org/@cypress/code-coverage/-/code-coverage-3.14.6.tgz", - "integrity": "sha512-aGDJG89uR5CRFbAIs+uFb3f6yPQTf/cMD6D9BZBo/gJRS1vZXli4lIGYU2DoJK4gMP+x9mAQSRexN91oizCusg==", + "version": "3.14.7", + "resolved": "https://registry.npmjs.org/@cypress/code-coverage/-/code-coverage-3.14.7.tgz", + "integrity": "sha512-0qk2aNKmrB0AwJtYSyK2+MWl/3NqDgZQ1OBSEh6oFnJwl/H2u3NTatV+FTCap22HTm+uxUS6SarU8gP9gFZ6Tw==", "license": "MIT", "optional": true, "dependencies": { @@ -2067,10 +2067,10 @@ "dayjs": "1.11.13", "debug": "4.4.0", "execa": "4.1.0", - "globby": "11.1.0", "istanbul-lib-coverage": "^3.0.0", "js-yaml": "4.1.0", - "nyc": "15.1.0" + "nyc": "15.1.0", + "tinyglobby": "^0.2.14" }, "peerDependencies": { "@babel/core": "^7.0.1", @@ -2852,6 +2852,23 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/@eslint/eslintrc/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { "version": "1.1.12", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", @@ -2873,6 +2890,13 @@ "node": ">= 4" } }, + "node_modules/@eslint/eslintrc/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "devOptional": true, + "license": "MIT" + }, "node_modules/@eslint/eslintrc/node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -3102,9 +3126,9 @@ } }, "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz", + "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==", "devOptional": true, "license": "MIT", "dependencies": { @@ -4370,13 +4394,13 @@ } }, "node_modules/@module-federation/bridge-react-webpack-plugin": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/bridge-react-webpack-plugin/-/bridge-react-webpack-plugin-0.18.4.tgz", - "integrity": "sha512-tYgso9izSinWzzVlsOUsBjW5lPMsvsVp95Jrw5W4Ajg9Un/yTkjOqEqmsMYpiL7drEN2+gPPVYyQ/hUK4QWz8Q==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/bridge-react-webpack-plugin/-/bridge-react-webpack-plugin-0.21.4.tgz", + "integrity": "sha512-aVxpy5dI5da2Qxw5YUDrXnzB68G3tUM3hogaImBjUvEsXFOxg7Pc5DBio2I/FJ45jXnoP3Gaswa0vLz6xWiyiA==", "dev": true, "license": "MIT", "dependencies": { - "@module-federation/sdk": "0.18.4", + "@module-federation/sdk": "0.21.4", "@types/semver": "7.5.8", "semver": "7.6.3" } @@ -4395,15 +4419,15 @@ } }, "node_modules/@module-federation/cli": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/cli/-/cli-0.18.4.tgz", - "integrity": "sha512-31c+2OjtRdsYq7oV+rCoTO9AXizT3D9CNzofZ9EVRGsaS9+H+nJKTkK+pw+IhK0Y8I0HsP+uxgLrazqF0tLbgg==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/cli/-/cli-0.21.4.tgz", + "integrity": "sha512-WmNVpq9h6xFe5+NviLL8/n174nhS5pOVHs7JAW7e/0qpQ5qXXn4ZN57ewUNfd6+RR6WYuoP1Q3ZWkeK+/dv9gQ==", "dev": true, "license": "MIT", "dependencies": { "@modern-js/node-bundle-require": "2.68.2", - "@module-federation/dts-plugin": "0.18.4", - "@module-federation/sdk": "0.18.4", + "@module-federation/dts-plugin": "0.21.4", + "@module-federation/sdk": "0.21.4", "chalk": "3.0.0", "commander": "11.1.0" }, @@ -4429,14 +4453,14 @@ } }, "node_modules/@module-federation/data-prefetch": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/data-prefetch/-/data-prefetch-0.18.4.tgz", - "integrity": "sha512-XOHFFO1wrVbjjfP2JRMbht+ILim5Is6Mfb5f2H4I9w0CSaZNRltG0fTnebECB1jgosrd8xaYnrwzXsCI/S53qQ==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/data-prefetch/-/data-prefetch-0.21.4.tgz", + "integrity": "sha512-RKCacRLuh2kd9gtTkEqYlT0l2w9B0NDDthOVHCi+WlHWc5vXDxupZaEFFOAUimzARN8dPXok2iwlLLD2gs6AQw==", "dev": true, "license": "MIT", "dependencies": { - "@module-federation/runtime": "0.18.4", - "@module-federation/sdk": "0.18.4", + "@module-federation/runtime": "0.21.4", + "@module-federation/sdk": "0.21.4", "fs-extra": "9.1.0" }, "peerDependencies": { @@ -4445,23 +4469,23 @@ } }, "node_modules/@module-federation/dts-plugin": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/dts-plugin/-/dts-plugin-0.18.4.tgz", - "integrity": "sha512-5FlrajLCypQ8+vEsncgEGpDmxUDG+Ub6ogKOE00e2gMxcYlgcCZNUSn5VbEGdCMcHQmIK2xt3WGQT30/7j2KiQ==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/dts-plugin/-/dts-plugin-0.21.4.tgz", + "integrity": "sha512-dStZ+J90JByoa++p3TuB4xx3b+25tHb9EAsvBkv86ptXEM1QFYRljV/7fvrQrvvqgD6Jpmq1LESi5NvI5J9P+w==", "dev": true, "license": "MIT", "dependencies": { - "@module-federation/error-codes": "0.18.4", - "@module-federation/managers": "0.18.4", - "@module-federation/sdk": "0.18.4", - "@module-federation/third-party-dts-extractor": "0.18.4", + "@module-federation/error-codes": "0.21.4", + "@module-federation/managers": "0.21.4", + "@module-federation/sdk": "0.21.4", + "@module-federation/third-party-dts-extractor": "0.21.4", "adm-zip": "^0.5.10", "ansi-colors": "^4.1.3", - "axios": "^1.11.0", + "axios": "^1.12.0", "chalk": "3.0.0", "fs-extra": "9.1.0", "isomorphic-ws": "5.0.0", - "koa": "3.0.1", + "koa": "3.0.3", "lodash.clonedeepwith": "4.5.0", "log4js": "6.9.1", "node-schedule": "2.1.1", @@ -4493,23 +4517,23 @@ } }, "node_modules/@module-federation/enhanced": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/enhanced/-/enhanced-0.18.4.tgz", - "integrity": "sha512-KiBw7e+aIBFoO2cmN5hJlKrYv3nUuXsB8yOSVnV9JBAkYNyRZQ9xoSbRCDt8rDRz/ydgEURUIwnGyL2ZU5jZYw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@module-federation/bridge-react-webpack-plugin": "0.18.4", - "@module-federation/cli": "0.18.4", - "@module-federation/data-prefetch": "0.18.4", - "@module-federation/dts-plugin": "0.18.4", - "@module-federation/error-codes": "0.18.4", - "@module-federation/inject-external-runtime-core-plugin": "0.18.4", - "@module-federation/managers": "0.18.4", - "@module-federation/manifest": "0.18.4", - "@module-federation/rspack": "0.18.4", - "@module-federation/runtime-tools": "0.18.4", - "@module-federation/sdk": "0.18.4", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/enhanced/-/enhanced-0.21.4.tgz", + "integrity": "sha512-QX4nfL1E2dboPBCLIU/x1P87wa/fwj+AOLP7TPJ6CHwEahXoXbrnrux6Hjcf/6SfrN9RGZkEauYy2W/VvigJlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@module-federation/bridge-react-webpack-plugin": "0.21.4", + "@module-federation/cli": "0.21.4", + "@module-federation/data-prefetch": "0.21.4", + "@module-federation/dts-plugin": "0.21.4", + "@module-federation/error-codes": "0.21.4", + "@module-federation/inject-external-runtime-core-plugin": "0.21.4", + "@module-federation/managers": "0.21.4", + "@module-federation/manifest": "0.21.4", + "@module-federation/rspack": "0.21.4", + "@module-federation/runtime-tools": "0.21.4", + "@module-federation/sdk": "0.21.4", "btoa": "^1.2.1", "schema-utils": "^4.3.0", "upath": "2.0.1" @@ -4535,44 +4559,44 @@ } }, "node_modules/@module-federation/error-codes": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/error-codes/-/error-codes-0.18.4.tgz", - "integrity": "sha512-cpLsqL8du9CfTTCKvXbRg93ALF+lklqHnuPryhbwVEQg2eYo6CMoMQ6Eb7kJhLigUABIDujbHD01SvBbASGkeQ==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/error-codes/-/error-codes-0.21.4.tgz", + "integrity": "sha512-ClpL5MereWNXh+EgDjz7w4RrC1JlisQTvXDa1gLxpviHafzNDfdViVmuhi9xXVuj+EYo8KU70Y999KHhk9424Q==", "dev": true, "license": "MIT" }, "node_modules/@module-federation/inject-external-runtime-core-plugin": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/inject-external-runtime-core-plugin/-/inject-external-runtime-core-plugin-0.18.4.tgz", - "integrity": "sha512-x+IakEXu+ammna2SMKkb1NRDXKxhKckOJIYanNHh1FtG2bvhu8xJplShvStmfO+BUv1n0KODSq89qGVYxFMbGQ==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/inject-external-runtime-core-plugin/-/inject-external-runtime-core-plugin-0.21.4.tgz", + "integrity": "sha512-lOy+qPEA56AdkSIN2hO5zsKvnbplCJHUR5B6BKjo5+q752BrE3C1O0vAXYBRgmdQIBn+JAssdkbJKtfwl8oReQ==", "dev": true, "license": "MIT", "peerDependencies": { - "@module-federation/runtime-tools": "0.18.4" + "@module-federation/runtime-tools": "0.21.4" } }, "node_modules/@module-federation/managers": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/managers/-/managers-0.18.4.tgz", - "integrity": "sha512-wJ8wheGNq4vnaLHx17F8Y0L+T9nzO5ijqMxQ7q9Yohm7MGeC5DoSjjurv/afxL6Dg5rGky+kHsYGM4qRTMFXaA==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/managers/-/managers-0.21.4.tgz", + "integrity": "sha512-z8KZJdT56lv73GKh0g7IO4CLxCtgV44qnTCn7GZ/R1cdR0JhdDvrqlYL8rrVGPw1y2BqudO0OxlRw0LjAGGj7g==", "dev": true, "license": "MIT", "dependencies": { - "@module-federation/sdk": "0.18.4", + "@module-federation/sdk": "0.21.4", "find-pkg": "2.0.0", "fs-extra": "9.1.0" } }, "node_modules/@module-federation/manifest": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/manifest/-/manifest-0.18.4.tgz", - "integrity": "sha512-1+sfldRpYmJX/SDqG3gWeeBbPb0H0eKyQcedf77TQGwFypVAOJwI39qV0yp3FdjutD7GdJ2TGPBHnGt7AbEvKA==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/manifest/-/manifest-0.21.4.tgz", + "integrity": "sha512-sW6eYTpqeNjPszC2FMUyT21IaUkqueDPlmPffyV9XVUSjOZgNa5VbDiD3qyW86v/bHC0nhrQ0/TWKn8EPOszLQ==", "dev": true, "license": "MIT", "dependencies": { - "@module-federation/dts-plugin": "0.18.4", - "@module-federation/managers": "0.18.4", - "@module-federation/sdk": "0.18.4", + "@module-federation/dts-plugin": "0.21.4", + "@module-federation/managers": "0.21.4", + "@module-federation/sdk": "0.21.4", "chalk": "3.0.0", "find-pkg": "2.0.0" } @@ -4592,19 +4616,19 @@ } }, "node_modules/@module-federation/rspack": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/rspack/-/rspack-0.18.4.tgz", - "integrity": "sha512-gnvXKtk/w0ML15JHueWej5/8Lkoho7EoYUxvO77nBCnGOlXNqVYqLZ3REy2SS/8SQ4vQK156eSiyUkth2OYQqw==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/rspack/-/rspack-0.21.4.tgz", + "integrity": "sha512-/FG6CjAg8M5EUwTMOMxceC+oLggHwYMGaTp7jCXhCXQJpnJ0C/OvcgKp0lif+ELIRxerAAldrFO06/4n9gLEIg==", "dev": true, "license": "MIT", "dependencies": { - "@module-federation/bridge-react-webpack-plugin": "0.18.4", - "@module-federation/dts-plugin": "0.18.4", - "@module-federation/inject-external-runtime-core-plugin": "0.18.4", - "@module-federation/managers": "0.18.4", - "@module-federation/manifest": "0.18.4", - "@module-federation/runtime-tools": "0.18.4", - "@module-federation/sdk": "0.18.4", + "@module-federation/bridge-react-webpack-plugin": "0.21.4", + "@module-federation/dts-plugin": "0.21.4", + "@module-federation/inject-external-runtime-core-plugin": "0.21.4", + "@module-federation/managers": "0.21.4", + "@module-federation/manifest": "0.21.4", + "@module-federation/runtime-tools": "0.21.4", + "@module-federation/sdk": "0.21.4", "btoa": "1.2.1" }, "peerDependencies": { @@ -4622,50 +4646,50 @@ } }, "node_modules/@module-federation/runtime": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/runtime/-/runtime-0.18.4.tgz", - "integrity": "sha512-2et6p7pjGRHzpmrW425jt/BiAU7QHgkZtbQB7pj01eQ8qx6SloFEBk9ODnV8/ztSm9H2T3d8GxXA6/9xVOslmQ==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/runtime/-/runtime-0.21.4.tgz", + "integrity": "sha512-wgvGqryurVEvkicufJmTG0ZehynCeNLklv8kIk5BLIsWYSddZAE+xe4xov1kgH5fIJQAoQNkRauFFjVNlHoAkA==", "dev": true, "license": "MIT", "dependencies": { - "@module-federation/error-codes": "0.18.4", - "@module-federation/runtime-core": "0.18.4", - "@module-federation/sdk": "0.18.4" + "@module-federation/error-codes": "0.21.4", + "@module-federation/runtime-core": "0.21.4", + "@module-federation/sdk": "0.21.4" } }, "node_modules/@module-federation/runtime-core": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/runtime-core/-/runtime-core-0.18.4.tgz", - "integrity": "sha512-LGGlFXlNeTbIGBFDiOvg0zz4jBWCGPqQatXdKx7mylXhDij7YmwbuW19oenX+P1fGhmoBUBM5WndmR87U66qWA==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/runtime-core/-/runtime-core-0.21.4.tgz", + "integrity": "sha512-SGpmoOLGNxZofpTOk6Lxb2ewaoz5wMi93AFYuuJB04HTVcngEK+baNeUZ2D/xewrqNIJoMY6f5maUjVfIIBPUA==", "dev": true, "license": "MIT", "dependencies": { - "@module-federation/error-codes": "0.18.4", - "@module-federation/sdk": "0.18.4" + "@module-federation/error-codes": "0.21.4", + "@module-federation/sdk": "0.21.4" } }, "node_modules/@module-federation/runtime-tools": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/runtime-tools/-/runtime-tools-0.18.4.tgz", - "integrity": "sha512-wSGTdx77R8BQX+q6nAcUuHPydYYm0F97gAEP9RTW1UlzXnM/0AFysDHujvtRQf5vyXkhj//HdcH6LIJJCImy2g==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/runtime-tools/-/runtime-tools-0.21.4.tgz", + "integrity": "sha512-RzFKaL0DIjSmkn76KZRfzfB6dD07cvID84950jlNQgdyoQFUGkqD80L6rIpVCJTY/R7LzR3aQjHnoqmq4JPo3w==", "dev": true, "license": "MIT", "dependencies": { - "@module-federation/runtime": "0.18.4", - "@module-federation/webpack-bundler-runtime": "0.18.4" + "@module-federation/runtime": "0.21.4", + "@module-federation/webpack-bundler-runtime": "0.21.4" } }, "node_modules/@module-federation/sdk": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/sdk/-/sdk-0.18.4.tgz", - "integrity": "sha512-dErzOlX+E3HS2Sg1m12Hi9nCnfvQPuIvlq9N47KxrbT2TIU3KKYc9q/Ua+QWqxfTyMVFpbNDwFMJ1R/w/gYf4A==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/sdk/-/sdk-0.21.4.tgz", + "integrity": "sha512-tzvhOh/oAfX++6zCDDxuvioHY4Jurf8vcfoCbKFxusjmyKr32GPbwFDazUP+OPhYCc3dvaa9oWU6X/qpUBLfJw==", "dev": true, "license": "MIT" }, "node_modules/@module-federation/third-party-dts-extractor": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/third-party-dts-extractor/-/third-party-dts-extractor-0.18.4.tgz", - "integrity": "sha512-PpiC0jxOegNR/xjhNOkjSYnUqMNJAy1kWsRd10to3Y64ZvGRf7/HF+x3aLIX8MbN7Ioy9F7Gd5oax6rtm+XmNQ==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/third-party-dts-extractor/-/third-party-dts-extractor-0.21.4.tgz", + "integrity": "sha512-zKaKpABSbpZhKbTUGkN6VKqApa+PcawwXAv+L8co3vhErRna82svSIicgLy27n4QzAFJ09coB4WgnPQLjXdU+A==", "dev": true, "license": "MIT", "dependencies": { @@ -4675,20 +4699,20 @@ } }, "node_modules/@module-federation/webpack-bundler-runtime": { - "version": "0.18.4", - "resolved": "https://registry.npmjs.org/@module-federation/webpack-bundler-runtime/-/webpack-bundler-runtime-0.18.4.tgz", - "integrity": "sha512-nPHp2wRS4/yfrGRQchZ0cyvdUZk+XgUmD0qWQl95xmeIeXUb90s3JrWFHSmS6Dt1gwMgJOeNpzzZDcBSy2P1VQ==", + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/@module-federation/webpack-bundler-runtime/-/webpack-bundler-runtime-0.21.4.tgz", + "integrity": "sha512-dusmR3uPnQh9u9ChQo3M+GLOuGFthfvnh7WitF/a1eoeTfRmXqnMFsXtZCUK+f/uXf+64874Zj/bhAgbBcVHZA==", "dev": true, "license": "MIT", "dependencies": { - "@module-federation/runtime": "0.18.4", - "@module-federation/sdk": "0.18.4" + "@module-federation/runtime": "0.21.4", + "@module-federation/sdk": "0.21.4" } }, "node_modules/@mui/core-downloads-tracker": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-6.5.0.tgz", - "integrity": "sha512-LGb8t8i6M2ZtS3Drn3GbTI1DVhDY6FJ9crEey2lZ0aN2EMZo8IZBZj9wRf4vqbZHaWjsYgtbOnJw5V8UWbmK2Q==", + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-7.3.4.tgz", + "integrity": "sha512-BIktMapG3r4iXwIhYNpvk97ZfYWTreBBQTWjQKbNbzI64+ULHfYavQEX2w99aSWHS58DvXESWIgbD9adKcUOBw==", "license": "MIT", "funding": { "type": "opencollective", @@ -4696,13 +4720,13 @@ } }, "node_modules/@mui/icons-material": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-6.5.0.tgz", - "integrity": "sha512-VPuPqXqbBPlcVSA0BmnoE4knW4/xG6Thazo8vCLWkOKusko6DtwFV6B665MMWJ9j0KFohTIf3yx2zYtYacvG1g==", + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-7.3.4.tgz", + "integrity": "sha512-9n6Xcq7molXWYb680N2Qx+FRW8oT6j/LXF5PZFH3ph9X/Rct0B/BlLAsFI7iL9ySI6LVLuQIVtrLiPT82R7OZw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/runtime": "^7.26.0" + "@babel/runtime": "^7.28.4" }, "engines": { "node": ">=14.0.0" @@ -4712,7 +4736,7 @@ "url": "https://opencollective.com/mui-org" }, "peerDependencies": { - "@mui/material": "^6.5.0", + "@mui/material": "^7.3.4", "@types/react": "^17.0.0 || ^18.0.0 || ^19.0.0", "react": "^17.0.0 || ^18.0.0 || ^19.0.0" }, @@ -4723,22 +4747,22 @@ } }, "node_modules/@mui/material": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@mui/material/-/material-6.5.0.tgz", - "integrity": "sha512-yjvtXoFcrPLGtgKRxFaH6OQPtcLPhkloC0BML6rBG5UeldR0nPULR/2E2BfXdo5JNV7j7lOzrrLX2Qf/iSidow==", + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@mui/material/-/material-7.3.4.tgz", + "integrity": "sha512-gEQL9pbJZZHT7lYJBKQCS723v1MGys2IFc94COXbUIyCTWa+qC77a7hUax4Yjd5ggEm35dk4AyYABpKKWC4MLw==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.26.0", - "@mui/core-downloads-tracker": "^6.5.0", - "@mui/system": "^6.5.0", - "@mui/types": "~7.2.24", - "@mui/utils": "^6.4.9", + "@babel/runtime": "^7.28.4", + "@mui/core-downloads-tracker": "^7.3.4", + "@mui/system": "^7.3.3", + "@mui/types": "^7.4.7", + "@mui/utils": "^7.3.3", "@popperjs/core": "^2.11.8", "@types/react-transition-group": "^4.4.12", "clsx": "^2.1.1", "csstype": "^3.1.3", "prop-types": "^15.8.1", - "react-is": "^19.0.0", + "react-is": "^19.1.1", "react-transition-group": "^4.4.5" }, "engines": { @@ -4751,7 +4775,7 @@ "peerDependencies": { "@emotion/react": "^11.5.0", "@emotion/styled": "^11.3.0", - "@mui/material-pigment-css": "^6.5.0", + "@mui/material-pigment-css": "^7.3.3", "@types/react": "^17.0.0 || ^18.0.0 || ^19.0.0", "react": "^17.0.0 || ^18.0.0 || ^19.0.0", "react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0" @@ -4771,28 +4795,14 @@ } } }, - "node_modules/@mui/material/node_modules/@mui/types": { - "version": "7.2.24", - "resolved": "https://registry.npmjs.org/@mui/types/-/types-7.2.24.tgz", - "integrity": "sha512-3c8tRt/CbWZ+pEg7QpSwbdxOk36EfmhbKf6AGZsD1EcLDLTSZoxxJ86FVtcjxvjuhdyBiWKSTGZFaXCnidO2kw==", - "license": "MIT", - "peerDependencies": { - "@types/react": "^17.0.0 || ^18.0.0 || ^19.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, "node_modules/@mui/private-theming": { - "version": "6.4.9", - "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-6.4.9.tgz", - "integrity": "sha512-LktcVmI5X17/Q5SkwjCcdOLBzt1hXuc14jYa7NPShog0GBDCDvKtcnP0V7a2s6EiVRlv7BzbWEJzH6+l/zaCxw==", + "version": "7.3.3", + "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-7.3.3.tgz", + "integrity": "sha512-OJM+9nj5JIyPUvsZ5ZjaeC9PfktmK+W5YaVLToLR8L0lB/DGmv1gcKE43ssNLSvpoW71Hct0necfade6+kW3zQ==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.26.0", - "@mui/utils": "^6.4.9", + "@babel/runtime": "^7.28.4", + "@mui/utils": "^7.3.3", "prop-types": "^15.8.1" }, "engines": { @@ -4813,13 +4823,13 @@ } }, "node_modules/@mui/styled-engine": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-6.5.0.tgz", - "integrity": "sha512-8woC2zAqF4qUDSPIBZ8v3sakj+WgweolpyM/FXf8jAx6FMls+IE4Y8VDZc+zS805J7PRz31vz73n2SovKGaYgw==", + "version": "7.3.3", + "resolved": "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-7.3.3.tgz", + "integrity": "sha512-CmFxvRJIBCEaWdilhXMw/5wFJ1+FT9f3xt+m2pPXhHPeVIbBg9MnMvNSJjdALvnQJMPw8jLhrUtXmN7QAZV2fw==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.26.0", - "@emotion/cache": "^11.13.5", + "@babel/runtime": "^7.28.4", + "@emotion/cache": "^11.14.0", "@emotion/serialize": "^1.3.3", "@emotion/sheet": "^1.4.0", "csstype": "^3.1.3", @@ -4847,16 +4857,16 @@ } }, "node_modules/@mui/system": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@mui/system/-/system-6.5.0.tgz", - "integrity": "sha512-XcbBYxDS+h/lgsoGe78ExXFZXtuIlSBpn/KsZq8PtZcIkUNJInkuDqcLd2rVBQrDC1u+rvVovdaWPf2FHKJf3w==", + "version": "7.3.3", + "resolved": "https://registry.npmjs.org/@mui/system/-/system-7.3.3.tgz", + "integrity": "sha512-Lqq3emZr5IzRLKaHPuMaLBDVaGvxoh6z7HMWd1RPKawBM5uMRaQ4ImsmmgXWtwJdfZux5eugfDhXJUo2mliS8Q==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.26.0", - "@mui/private-theming": "^6.4.9", - "@mui/styled-engine": "^6.5.0", - "@mui/types": "~7.2.24", - "@mui/utils": "^6.4.9", + "@babel/runtime": "^7.28.4", + "@mui/private-theming": "^7.3.3", + "@mui/styled-engine": "^7.3.3", + "@mui/types": "^7.4.7", + "@mui/utils": "^7.3.3", "clsx": "^2.1.1", "csstype": "^3.1.3", "prop-types": "^15.8.1" @@ -4886,28 +4896,13 @@ } } }, - "node_modules/@mui/system/node_modules/@mui/types": { - "version": "7.2.24", - "resolved": "https://registry.npmjs.org/@mui/types/-/types-7.2.24.tgz", - "integrity": "sha512-3c8tRt/CbWZ+pEg7QpSwbdxOk36EfmhbKf6AGZsD1EcLDLTSZoxxJ86FVtcjxvjuhdyBiWKSTGZFaXCnidO2kw==", - "license": "MIT", - "peerDependencies": { - "@types/react": "^17.0.0 || ^18.0.0 || ^19.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, "node_modules/@mui/types": { - "version": "7.4.6", - "resolved": "https://registry.npmjs.org/@mui/types/-/types-7.4.6.tgz", - "integrity": "sha512-NVBbIw+4CDMMppNamVxyTccNv0WxtDb7motWDlMeSC8Oy95saj1TIZMGynPpFLePt3yOD8TskzumeqORCgRGWw==", - "dev": true, + "version": "7.4.7", + "resolved": "https://registry.npmjs.org/@mui/types/-/types-7.4.7.tgz", + "integrity": "sha512-8vVje9rdEr1rY8oIkYgP+Su5Kwl6ik7O3jQ0wl78JGSmiZhRHV+vkjooGdKD8pbtZbutXFVTWQYshu2b3sG9zw==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.28.3" + "@babel/runtime": "^7.28.4" }, "peerDependencies": { "@types/react": "^17.0.0 || ^18.0.0 || ^19.0.0" @@ -4919,17 +4914,17 @@ } }, "node_modules/@mui/utils": { - "version": "6.4.9", - "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-6.4.9.tgz", - "integrity": "sha512-Y12Q9hbK9g+ZY0T3Rxrx9m2m10gaphDuUMgWxyV5kNJevVxXYCLclYUCC9vXaIk1/NdNDTcW2Yfr2OGvNFNmHg==", + "version": "7.3.3", + "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-7.3.3.tgz", + "integrity": "sha512-kwNAUh7bLZ7mRz9JZ+6qfRnnxbE4Zuc+RzXnhSpRSxjTlSTj7b4JxRLXpG+MVtPVtqks5k/XC8No1Vs3x4Z2gg==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.26.0", - "@mui/types": "~7.2.24", - "@types/prop-types": "^15.7.14", + "@babel/runtime": "^7.28.4", + "@mui/types": "^7.4.7", + "@types/prop-types": "^15.7.15", "clsx": "^2.1.1", "prop-types": "^15.8.1", - "react-is": "^19.0.0" + "react-is": "^19.1.1" }, "engines": { "node": ">=14.0.0" @@ -4948,20 +4943,6 @@ } } }, - "node_modules/@mui/utils/node_modules/@mui/types": { - "version": "7.2.24", - "resolved": "https://registry.npmjs.org/@mui/types/-/types-7.2.24.tgz", - "integrity": "sha512-3c8tRt/CbWZ+pEg7QpSwbdxOk36EfmhbKf6AGZsD1EcLDLTSZoxxJ86FVtcjxvjuhdyBiWKSTGZFaXCnidO2kw==", - "license": "MIT", - "peerDependencies": { - "@types/react": "^17.0.0 || ^18.0.0 || ^19.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, "node_modules/@napi-rs/wasm-runtime": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.0.3.tgz", @@ -5387,20 +5368,20 @@ } }, "node_modules/@patternfly/patternfly": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-6.3.1.tgz", - "integrity": "sha512-O/lTo5EHKzer/HNzqMQOQEAMG7izDDkEHpAeJ5+sGaeQ/maB3RK7sQsOPS4DjrnMxt4/cC6LogK2mowlbf1j5Q==", + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-6.4.0.tgz", + "integrity": "sha512-4drFhg74sEc/fftark5wZevODIog17qR4pwLCdB3j5iK3Uu5oMA2SdLhsEeEQggalfnFzve/Km87MdVR0ghhvQ==", "license": "MIT" }, "node_modules/@patternfly/react-core": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-6.3.1.tgz", - "integrity": "sha512-1qV20nU4M6PA28qnikH9fPLQlkteaZZToFlATjBNBw7aUI6zIvj7U0akkHz8raWcfHAI+tAzGV7dfKjiv035/g==", + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-6.4.0.tgz", + "integrity": "sha512-zMgJmcFohp2FqgAoZHg7EXZS7gnaFESquk0qIavemYI0FsqspVlzV2/PUru7w+86+jXfqebRhgubPRsv1eJwEg==", "license": "MIT", "dependencies": { - "@patternfly/react-icons": "^6.3.1", - "@patternfly/react-styles": "^6.3.1", - "@patternfly/react-tokens": "^6.3.1", + "@patternfly/react-icons": "^6.4.0", + "@patternfly/react-styles": "^6.4.0", + "@patternfly/react-tokens": "^6.4.0", "focus-trap": "7.6.4", "react-dropzone": "^14.3.5", "tslib": "^2.8.1" @@ -5411,9 +5392,9 @@ } }, "node_modules/@patternfly/react-icons": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-6.3.1.tgz", - "integrity": "sha512-uiMounSIww1iZLM4pq+X8c3upzwl9iowXRPjR5CA8entb70lwgAXg3PqvypnuTAcilTq1Y3k5sFTqkhz7rgKcQ==", + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-6.4.0.tgz", + "integrity": "sha512-SPjzatm73NUYv/BL6A/cjRA5sFQ15NkiyPAcT8gmklI7HY+ptd6/eg49uBDFmxTQcSwbb5ISW/R6wwCQBY2M+Q==", "license": "MIT", "peerDependencies": { "react": "^17 || ^18 || ^19", @@ -5421,21 +5402,21 @@ } }, "node_modules/@patternfly/react-styles": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-6.3.1.tgz", - "integrity": "sha512-hyb+PlO8YITjKh2wBvjdeZhX6FyB3hlf4r6yG4rPOHk4SgneXHjNSdGwQ3szAxgGqtbENCYtOqwD/8ai72GrxQ==", + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-6.4.0.tgz", + "integrity": "sha512-EXmHA67s5sy+Wy/0uxWoUQ52jr9lsH2wV3QcgtvVc5zxpyBX89gShpqv4jfVqaowznHGDoL6fVBBrSe9BYOliQ==", "license": "MIT" }, "node_modules/@patternfly/react-table": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-6.3.1.tgz", - "integrity": "sha512-ZndBbPcMr/vInP5eELRe9m7MWzRoejRAhWx+25xOdjVAd31/CmMK1nBgZk4QAXaWjH1P+uZaZYsTgr/FMTte2g==", + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-6.4.0.tgz", + "integrity": "sha512-yv0sFOLGts8a2q9C1xUegjp50ayYyVRe0wKjMf+aMSNIK8sVYu8qu0yfBsCDybsUCldue7+qsYKRLFZosTllWQ==", "license": "MIT", "dependencies": { - "@patternfly/react-core": "^6.3.1", - "@patternfly/react-icons": "^6.3.1", - "@patternfly/react-styles": "^6.3.1", - "@patternfly/react-tokens": "^6.3.1", + "@patternfly/react-core": "^6.4.0", + "@patternfly/react-icons": "^6.4.0", + "@patternfly/react-styles": "^6.4.0", + "@patternfly/react-tokens": "^6.4.0", "lodash": "^4.17.21", "tslib": "^2.8.1" }, @@ -5445,26 +5426,26 @@ } }, "node_modules/@patternfly/react-templates": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/@patternfly/react-templates/-/react-templates-6.3.1.tgz", - "integrity": "sha512-Rf23iWVq7cME/OE/T2F7Tbjhsjol6IdSW9CNEV2fDJQxcjfPG8UzJbLXii45ox0aLcjxTIWWXSr8vpaiDPzHrw==", + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/@patternfly/react-templates/-/react-templates-6.4.0.tgz", + "integrity": "sha512-n3/CWJ3jEv7d7ZjDa6g0B+k1N9kdw6WV259O44GqGSUd/cgMNZp+B9iIcOKQhekvCEPqvqzsAJT2b9X3YQNwkg==", "license": "MIT", "dependencies": { - "@patternfly/react-core": "^6.3.1", - "@patternfly/react-icons": "^6.3.1", - "@patternfly/react-styles": "^6.3.1", - "@patternfly/react-tokens": "^6.3.1", + "@patternfly/react-core": "^6.4.0", + "@patternfly/react-icons": "^6.4.0", + "@patternfly/react-styles": "^6.4.0", + "@patternfly/react-tokens": "^6.4.0", "tslib": "^2.8.1" }, "peerDependencies": { - "react": "^17 || ^18", - "react-dom": "^17 || ^18" + "react": "^17 || ^18 || ^19", + "react-dom": "^17 || ^18 || ^19" } }, "node_modules/@patternfly/react-tokens": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-6.3.1.tgz", - "integrity": "sha512-wt/xKU1tGCDXUueFb+8/Cwxlm4vUD/Xl26O8MxbSLm6NZAHOUPwytJ7gugloGSPvc/zcsXxEgKANL8UZNO6DTw==", + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-6.4.0.tgz", + "integrity": "sha512-iZthBoXSGQ/+PfGTdPFJVulaJZI3rwE+7A/whOXPGp3Jyq3k6X52pr1+5nlO6WHasbZ9FyeZGqXf4fazUZNjbw==", "license": "MIT" }, "node_modules/@pkgjs/parseargs": { @@ -7764,7 +7745,7 @@ "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", - "devOptional": true, + "dev": true, "license": "MIT", "dependencies": { "mime-types": "~2.1.34", @@ -7872,16 +7853,16 @@ } }, "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", "devOptional": true, "license": "MIT", "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" }, "funding": { "type": "github", @@ -7906,40 +7887,6 @@ } } }, - "node_modules/ajv-formats/node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ajv-formats/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "devOptional": true, - "license": "MIT" - }, - "node_modules/ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "ajv": "^6.9.1" - } - }, "node_modules/ansi-align": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", @@ -8167,16 +8114,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "license": "MIT", - "optional": true, - "engines": { - "node": ">=8" - } - }, "node_modules/array.prototype.findlast": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", @@ -8421,9 +8358,9 @@ } }, "node_modules/axios": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz", - "integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.2.tgz", + "integrity": "sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==", "dev": true, "license": "MIT", "dependencies": { @@ -9137,8 +9074,8 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", "integrity": "sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==", - "devOptional": true, "license": "MIT", + "optional": true, "engines": { "node": ">= 0.8" } @@ -9823,24 +9760,34 @@ } }, "node_modules/compression": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", - "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.8.1.tgz", + "integrity": "sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==", "devOptional": true, "license": "MIT", "dependencies": { - "accepts": "~1.3.5", - "bytes": "3.0.0", - "compressible": "~2.0.16", + "bytes": "3.1.2", + "compressible": "~2.0.18", "debug": "2.6.9", - "on-headers": "~1.0.2", - "safe-buffer": "5.1.2", + "negotiator": "~0.6.4", + "on-headers": "~1.1.0", + "safe-buffer": "5.2.1", "vary": "~1.1.2" }, "engines": { "node": ">= 0.8.0" } }, + "node_modules/compression/node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/compression/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -9858,12 +9805,15 @@ "devOptional": true, "license": "MIT" }, - "node_modules/compression/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "node_modules/compression/node_modules/negotiator": { + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", + "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==", "devOptional": true, - "license": "MIT" + "license": "MIT", + "engines": { + "node": ">= 0.6" + } }, "node_modules/concat-map": { "version": "0.0.1", @@ -11088,19 +11038,6 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "license": "MIT", - "optional": true, - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/dns-packet": { "version": "5.6.1", "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.6.1.tgz", @@ -12314,6 +12251,23 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/eslint/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, "node_modules/eslint/node_modules/brace-expansion": { "version": "1.1.12", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", @@ -12335,6 +12289,13 @@ "node": ">= 4" } }, + "node_modules/eslint/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "devOptional": true, + "license": "MIT" + }, "node_modules/eslint/node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -12923,6 +12884,40 @@ "webpack": "^4.0.0 || ^5.0.0" } }, + "node_modules/file-loader/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/file-loader/node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "ajv": "^6.9.1" + } + }, + "node_modules/file-loader/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, "node_modules/file-loader/node_modules/schema-utils": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", @@ -13275,6 +13270,33 @@ "webpack": "^5.11.0" } }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "ajv": "^6.9.1" + } + }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/brace-expansion": { "version": "1.1.12", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", @@ -13328,6 +13350,13 @@ "node": ">=12" } }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -13855,37 +13884,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "license": "MIT", - "optional": true, - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/globby/node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "license": "MIT", - "optional": true, - "engines": { - "node": ">= 4" - } - }, "node_modules/gopd": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", @@ -17997,9 +17995,9 @@ "optional": true }, "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", "devOptional": true, "license": "MIT" }, @@ -18121,6 +18119,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/keygrip/-/keygrip-1.1.0.tgz", "integrity": "sha512-iYSchDJ+liQ8iwbSI2QqsQOvqv58eJCEanyJPJi+Khyu8smkcKSFUCbPwzFcL7YVtZ6eONjqRX/38caJ7QjRAQ==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", "dev": true, "license": "MIT", "dependencies": { @@ -18161,9 +18160,9 @@ } }, "node_modules/koa": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/koa/-/koa-3.0.1.tgz", - "integrity": "sha512-oDxVkRwPOHhGlxKIDiDB2h+/l05QPtefD7nSqRgDfZt8P+QVYFWjfeK8jANf5O2YXjk8egd7KntvXKYx82wOag==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/koa/-/koa-3.0.3.tgz", + "integrity": "sha512-MeuwbCoN1daWS32/Ni5qkzmrOtQO2qrnfdxDHjrm6s4b59yG4nexAJ0pTEFyzjLp0pBVO80CZp0vW8Ze30Ebow==", "dev": true, "license": "MIT", "dependencies": { @@ -18208,16 +18207,20 @@ } }, "node_modules/koa/node_modules/mime-types": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", - "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz", + "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==", "dev": true, "license": "MIT", "dependencies": { "mime-db": "^1.54.0" }, "engines": { - "node": ">= 0.6" + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/language-subtag-registry": { @@ -18648,9 +18651,9 @@ } }, "node_modules/luxon": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.7.1.tgz", - "integrity": "sha512-RkRWjA926cTvz5rAb1BqyWkKbbjzCGchDUIKMCUvNi17j6f6j8uHGDV82Aqcqtzd+icoYpELmG3ksgGiFNNcNg==", + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.7.2.tgz", + "integrity": "sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==", "dev": true, "license": "MIT", "engines": { @@ -19845,9 +19848,9 @@ } }, "node_modules/mocha/node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "version": "10.5.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", + "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", "license": "ISC", "optional": true, "peer": true, @@ -20149,9 +20152,9 @@ } }, "node_modules/mod-arch-core": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/mod-arch-core/-/mod-arch-core-1.0.2.tgz", - "integrity": "sha512-7NG3o41+aGAJZAfmckuzYRKTNspPYI7vSk4UJQslkz972X/lqg+T+zDY9SpAlvOl7GME/tewQ3D/7oxc6u58ig==", + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/mod-arch-core/-/mod-arch-core-1.1.5.tgz", + "integrity": "sha512-63O2yFN7EmEFpGNr9p+ZNNz/l4vNRpt3jmy78ltzicGaexl5n4c0VZNAUNOvCqQZR11VeQg/quLsTq336C1m6Q==", "license": "Apache-2.0", "dependencies": { "lodash-es": "^4.17.15", @@ -20182,9 +20185,9 @@ } }, "node_modules/mod-arch-kubeflow": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/mod-arch-kubeflow/-/mod-arch-kubeflow-1.0.2.tgz", - "integrity": "sha512-uo62Jj6Gtcmez7wNsqq3iZ/h9m91A2ICoxaqzEuxrEzF3h3kBaHTX08AcEjVwqrQMAyBhYdaTC+pgIQCo01Iyw==", + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/mod-arch-kubeflow/-/mod-arch-kubeflow-1.1.5.tgz", + "integrity": "sha512-JdTM11Mx9QcqY67YlzyYlF/wmYR4K5jv3qwOvwoCoqAf20hgE2CyHkLyUe0xkR/JzroPy8fvFdVqQnAsaFooMw==", "license": "Apache-2.0", "engines": { "node": ">=20.0.0" @@ -20206,14 +20209,15 @@ "eslint-plugin-react-hooks": "^5.2.0" }, "peerDependencies": { - "@mui/material": "^6.0.0", - "react": ">=16.8.0" + "@mui/material": "^7.0.0", + "react": ">=18.2.0", + "react-dom": ">=18.2.0" } }, "node_modules/mod-arch-shared": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/mod-arch-shared/-/mod-arch-shared-1.0.2.tgz", - "integrity": "sha512-siz5UL+6QAWxqgz+vwu5XXG7p7eNaTx1857X/4z19dvNkIqHvXwN73+LOXLXkemcX7VBvhYkYb8NGySv0Wu6Rg==", + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/mod-arch-shared/-/mod-arch-shared-1.1.5.tgz", + "integrity": "sha512-VP9V6DXa8+VJ3UawbrqE11VCBBTNI8B3MfPB/+GyBa+Ko9bfDbnsBfZ+dPqfY9ydT/R0+Q6ZMGnqvMEXqwwEew==", "license": "Apache-2.0", "dependencies": { "@patternfly/patternfly": "^6.2.0", @@ -20322,7 +20326,7 @@ "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "devOptional": true, + "dev": true, "license": "MIT", "engines": { "node": ">= 0.6" @@ -21061,9 +21065,9 @@ } }, "node_modules/on-headers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", - "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz", + "integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==", "devOptional": true, "license": "MIT", "engines": { @@ -22588,6 +22592,40 @@ "webpack": "^4.0.0 || ^5.0.0" } }, + "node_modules/raw-loader/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/raw-loader/node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "ajv": "^6.9.1" + } + }, + "node_modules/raw-loader/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, "node_modules/raw-loader/node_modules/schema-utils": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", @@ -22683,9 +22721,9 @@ } }, "node_modules/react-is": { - "version": "19.1.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-19.1.1.tgz", - "integrity": "sha512-tr41fA15Vn8p4X9ntI+yCyeGSf1TlYaY5vlTZfQmeLBrFo3psOPX6HhTDnFNL9uj3EhP0KAQ80cugCl4b4BERA==", + "version": "19.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-19.2.0.tgz", + "integrity": "sha512-x3Ax3kNSMIIkyVYhWPyO09bu0uttcAIoecO/um/rKGQ4EltYWVYtyiGkS/3xMynrbVQdS69Jhlv8FXUEZehlzA==", "license": "MIT" }, "node_modules/react-markdown": { @@ -23367,9 +23405,9 @@ } }, "node_modules/rslog": { - "version": "1.2.11", - "resolved": "https://registry.npmjs.org/rslog/-/rslog-1.2.11.tgz", - "integrity": "sha512-YgMMzQf6lL9q4rD9WS/lpPWxVNJ1ttY9+dOXJ0+7vJrKCAOT4GH0EiRnBi9mKOitcHiOwjqJPV1n/HRqqgZmOQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/rslog/-/rslog-1.3.0.tgz", + "integrity": "sha512-93DpwwaiRrLz7fJ5z6Uwb171hHBws1VVsWjU6IruLFX63BicLA44QNu0sfn3guKHnBHZMFSKO8akfx5QhjuegQ==", "dev": true, "license": "MIT" }, @@ -23614,23 +23652,6 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/schema-utils/node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, "node_modules/schema-utils/node_modules/ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", @@ -23644,13 +23665,6 @@ "ajv": "^8.8.2" } }, - "node_modules/schema-utils/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "devOptional": true, - "license": "MIT" - }, "node_modules/select-hose": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", @@ -23765,9 +23779,9 @@ } }, "node_modules/serve": { - "version": "14.2.4", - "resolved": "https://registry.npmjs.org/serve/-/serve-14.2.4.tgz", - "integrity": "sha512-qy1S34PJ/fcY8gjVGszDB3EXiPSk5FKhUa7tQe0UPRddxRidc2V6cNHPNewbE1D7MAkgLuWEt3Vw56vYy73tzQ==", + "version": "14.2.5", + "resolved": "https://registry.npmjs.org/serve/-/serve-14.2.5.tgz", + "integrity": "sha512-Qn/qMkzCcMFVPb60E/hQy+iRLpiU8PamOfOSYoAHmmF+fFFmpPpqa6Oci2iWYpTdOUM3VF+TINud7CfbQnsZbA==", "license": "MIT", "optional": true, "dependencies": { @@ -23778,7 +23792,7 @@ "chalk": "5.0.1", "chalk-template": "0.4.0", "clipboardy": "3.0.0", - "compression": "1.7.4", + "compression": "1.8.1", "is-port-reachable": "4.0.0", "serve-handler": "6.1.6", "update-check": "1.5.4" @@ -23995,13 +24009,6 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/serve/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "license": "MIT", - "optional": true - }, "node_modules/set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", @@ -25944,16 +25951,20 @@ } }, "node_modules/type-is/node_modules/mime-types": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", - "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz", + "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==", "dev": true, "license": "MIT", "dependencies": { "mime-db": "^1.54.0" }, "engines": { - "node": ">= 0.6" + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/typed-array-buffer": { @@ -26422,6 +26433,40 @@ } } }, + "node_modules/url-loader/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/url-loader/node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "ajv": "^6.9.1" + } + }, + "node_modules/url-loader/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, "node_modules/url-loader/node_modules/schema-utils": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", @@ -26513,9 +26558,9 @@ } }, "node_modules/validator": { - "version": "13.15.15", - "resolved": "https://registry.npmjs.org/validator/-/validator-13.15.15.tgz", - "integrity": "sha512-BgWVbCI72aIQy937xbawcs+hrVaN/CZ2UwutgaJ36hGqRrLNM+f5LUT/YPRbo8IV/ASeFzXszezV+y2+rq3l8A==", + "version": "13.15.20", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.15.20.tgz", + "integrity": "sha512-KxPOq3V2LmfQPP4eqf3Mq/zrT0Dqp2Vmx2Bn285LwVahLc+CsxOM0crBHczm8ijlcjZ0Q5Xd6LW3z3odTPnlrw==", "license": "MIT", "optional": true, "engines": { diff --git a/clients/ui/frontend/package.json b/clients/ui/frontend/package.json index adf3e8201e..9f1927ce26 100644 --- a/clients/ui/frontend/package.json +++ b/clients/ui/frontend/package.json @@ -7,7 +7,7 @@ "license": "Apache-2.0", "private": true, "engines": { - "node": ">=20.0.0" + "node": ">=22.0.0" }, "scripts": { "build": "run-s build:prod", @@ -32,10 +32,10 @@ "cypress:server": "serve ./public-cypress -p 9001 -s -L" }, "devDependencies": { - "@module-federation/enhanced": "^0.18.0", - "@mui/icons-material": "^6.4.8", - "@mui/material": "^6.1.7", - "@mui/types": "^7.2.20", + "@module-federation/enhanced": "^0.21.4", + "@mui/icons-material": "^7.3.4", + "@mui/material": "^7.3.4", + "@mui/types": "^7.4.7", "@pmmmwh/react-refresh-webpack-plugin": "^0.5.15", "@swc/core": "^1.9.1", "@types/chai-subset": "^1.3.5", @@ -90,18 +90,18 @@ "dependencies": { "@emotion/react": "^11.14.0", "@emotion/styled": "^11.14.0", - "@patternfly/patternfly": "^6.3.1", - "@patternfly/react-core": "^6.3.1", - "@patternfly/react-icons": "^6.3.1", - "@patternfly/react-styles": "^6.3.1", - "@patternfly/react-table": "^6.3.1", - "@patternfly/react-templates": "^6.3.1", + "@patternfly/patternfly": "^6.4.0", + "@patternfly/react-core": "^6.4.0", + "@patternfly/react-icons": "^6.4.0", + "@patternfly/react-styles": "^6.4.0", + "@patternfly/react-table": "^6.4.0", + "@patternfly/react-templates": "^6.4.0", "classnames": "^2.2.6", "dompurify": "^3.2.4", "lodash-es": "^4.17.15", - "mod-arch-core": "~1.0.2", - "mod-arch-kubeflow": "~1.0.2", - "mod-arch-shared": "~1.0.2", + "mod-arch-core": "~1.1.5", + "mod-arch-kubeflow": "~1.1.5", + "mod-arch-shared": "~1.1.5", "react": "^18", "react-dom": "^18", "react-router": "^7.5.2", @@ -145,7 +145,7 @@ "eslint-plugin-react": "^7.37.5", "eslint-plugin-react-hooks": "^5.2.0", "npm-run-all": "^4.1.5", - "serve": "^14.2.4", + "serve": "^14.2.5", "ts-jest": "^29.4.0" } } diff --git a/clients/ui/frontend/src/__mocks__/index.ts b/clients/ui/frontend/src/__mocks__/index.ts index 8114e1d3fa..b336da531d 100644 --- a/clients/ui/frontend/src/__mocks__/index.ts +++ b/clients/ui/frontend/src/__mocks__/index.ts @@ -3,5 +3,6 @@ export * from './mockModelVersion'; export * from './mockModelVersionList'; export * from './mockModelArtifactList'; export * from './mockCatalogSourceList'; +export * from './mockCatalogSourceConfigList'; export * from './mockCatalogModelList'; export * from './mockCatalogModelArtifactList'; diff --git a/clients/ui/frontend/src/__mocks__/mockCatalogFilterOptionsList.ts b/clients/ui/frontend/src/__mocks__/mockCatalogFilterOptionsList.ts new file mode 100644 index 0000000000..616c41b7a2 --- /dev/null +++ b/clients/ui/frontend/src/__mocks__/mockCatalogFilterOptionsList.ts @@ -0,0 +1,140 @@ +/* eslint-disable camelcase */ +import { CatalogFilterOptionsList } from '~/app/modelCatalogTypes'; +import { + ModelCatalogStringFilterKey, + ModelCatalogNumberFilterKey, + ModelCatalogLicense, + ModelCatalogProvider, + ModelCatalogTask, + AllLanguageCode, + UseCaseOptionValue, +} from '~/concepts/modelCatalog/const'; + +export const mockCatalogFilterOptionsList = ( + partial?: Partial, +): CatalogFilterOptionsList => ({ + filters: { + [ModelCatalogStringFilterKey.PROVIDER]: { + type: 'string', + values: [ModelCatalogProvider.RED_HAT, ModelCatalogProvider.IBM, ModelCatalogProvider.GOOGLE], + }, + [ModelCatalogStringFilterKey.LICENSE]: { + type: 'string', + values: [ModelCatalogLicense.APACHE_2_0, ModelCatalogLicense.MIT], + }, + [ModelCatalogStringFilterKey.TASK]: { + type: 'string', + values: [ + ModelCatalogTask.TEXT_GENERATION, + ModelCatalogTask.TEXT_TO_TEXT, + ModelCatalogTask.IMAGE_TO_TEXT, + ModelCatalogTask.IMAGE_TEXT_TO_TEXT, + ModelCatalogTask.VIDEO_TO_TEXT, + ModelCatalogTask.AUDIO_TO_TEXT, + ], + }, + [ModelCatalogStringFilterKey.LANGUAGE]: { + type: 'string', + values: [ + AllLanguageCode.AR, + AllLanguageCode.CS, + AllLanguageCode.DE, + AllLanguageCode.EN, + AllLanguageCode.ES, + AllLanguageCode.FR, + AllLanguageCode.IT, + AllLanguageCode.JA, + AllLanguageCode.KO, + AllLanguageCode.NL, + AllLanguageCode.PT, + AllLanguageCode.ZH, + ], + }, + [ModelCatalogStringFilterKey.HARDWARE_TYPE]: { + type: 'string', + values: ['GPU', 'CPU', 'TPU', 'FPGA'], + }, + [ModelCatalogStringFilterKey.USE_CASE]: { + type: 'string', + values: [ + UseCaseOptionValue.CHATBOT, + UseCaseOptionValue.CODE_FIXING, + UseCaseOptionValue.LONG_RAG, + UseCaseOptionValue.RAG, + ], + }, + [ModelCatalogNumberFilterKey.MIN_RPS]: { + type: 'number', + range: { + min: 1, + max: 300, + }, + }, + // All latency metric combinations for dropdown options + ttft_mean: { + type: 'number' as const, + range: { min: 20, max: 893 }, + }, + ttft_p90: { + type: 'number' as const, + range: { min: 25, max: 600 }, + }, + ttft_p95: { + type: 'number' as const, + range: { min: 30, max: 700 }, + }, + ttft_p99: { + type: 'number' as const, + range: { min: 40, max: 893 }, + }, + e2e_mean: { + type: 'number' as const, + range: { min: 50, max: 800 }, + }, + e2e_p90: { + type: 'number' as const, + range: { min: 60, max: 900 }, + }, + e2e_p95: { + type: 'number' as const, + range: { min: 70, max: 1000 }, + }, + e2e_p99: { + type: 'number' as const, + range: { min: 80, max: 1200 }, + }, + tps_mean: { + type: 'number' as const, + range: { min: 10, max: 300 }, + }, + tps_p90: { + type: 'number' as const, + range: { min: 15, max: 350 }, + }, + tps_p95: { + type: 'number' as const, + range: { min: 20, max: 400 }, + }, + tps_p99: { + type: 'number' as const, + range: { min: 25, max: 500 }, + }, + itl_mean: { + type: 'number' as const, + range: { min: 5, max: 100 }, + }, + itl_p90: { + type: 'number' as const, + range: { min: 8, max: 120 }, + }, + itl_p95: { + type: 'number' as const, + range: { min: 10, max: 150 }, + }, + itl_p99: { + type: 'number' as const, + range: { min: 15, max: 200 }, + }, + }, + ...partial, +}); diff --git a/clients/ui/frontend/src/__mocks__/mockCatalogModelArtifactList.ts b/clients/ui/frontend/src/__mocks__/mockCatalogModelArtifactList.ts index 741e7df815..169c33651a 100644 --- a/clients/ui/frontend/src/__mocks__/mockCatalogModelArtifactList.ts +++ b/clients/ui/frontend/src/__mocks__/mockCatalogModelArtifactList.ts @@ -1,8 +1,20 @@ -import { CatalogModelArtifact, CatalogModelArtifactList } from '~/app/modelCatalogTypes'; +/* eslint-disable camelcase */ +import { + CatalogArtifacts, + CatalogArtifactList, + CatalogModelArtifact, + CatalogArtifactType, + MetricsType, + CatalogPerformanceMetricsArtifact, + CatalogAccuracyMetricsArtifact, +} from '~/app/modelCatalogTypes'; +import { ModelRegistryMetadataType } from '~/app/types'; +import { UseCaseOptionValue } from '~/concepts/modelCatalog/const'; export const mockCatalogModelArtifact = ( partial?: Partial, -): CatalogModelArtifact => ({ +): CatalogArtifacts => ({ + artifactType: CatalogArtifactType.modelArtifact, createTimeSinceEpoch: '1739210683000', lastUpdateTimeSinceEpoch: '1739210683000', uri: '', @@ -10,12 +22,104 @@ export const mockCatalogModelArtifact = ( ...partial, }); +export const mockCatalogAccuracyMetricsArtifact = ( + partial?: Partial, +): CatalogAccuracyMetricsArtifact => ({ + artifactType: CatalogArtifactType.metricsArtifact, + metricsType: MetricsType.accuracyMetrics, + createTimeSinceEpoch: '1739210683000', + lastUpdateTimeSinceEpoch: '1739210683000', + customProperties: { + overall_average: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 0.582439, + }, + arc_v1: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 0.659556, + }, + }, + ...partial, +}); + +export const mockCatalogPerformanceMetricsArtifact = ( + partial?: Partial, +): CatalogPerformanceMetricsArtifact => ({ + artifactType: CatalogArtifactType.metricsArtifact, + metricsType: MetricsType.performanceMetrics, + createTimeSinceEpoch: '1739210683000', + lastUpdateTimeSinceEpoch: '1739210683000', + customProperties: { + config_id: { + metadataType: ModelRegistryMetadataType.STRING, + string_value: '0055d94f6a542f6932cac5dfa5ffdd38', + }, + hardware_count: { + metadataType: ModelRegistryMetadataType.INT, + int_value: '2', + }, + hardware_type: { + metadataType: ModelRegistryMetadataType.STRING, + string_value: 'H100-80', + }, + requests_per_second: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 7, + }, + ttft_mean: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 35.48818160947744, + }, + ttft_p90: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 51.55777931213379, + }, + ttft_p95: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 61.26761436462402, + }, + ttft_p99: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 72.95823097229004, + }, + e2e_mean: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 1994.480013381083, + }, + use_case: { + metadataType: ModelRegistryMetadataType.STRING, + string_value: UseCaseOptionValue.CODE_FIXING, + }, + }, + ...partial, +}); + export const mockCatalogModelArtifactList = ( - partial?: Partial, -): CatalogModelArtifactList => ({ + partial?: Partial, +): CatalogArtifactList => ({ items: [mockCatalogModelArtifact({})], pageSize: 10, size: 15, nextPageToken: '', ...partial, }); + +export const mockCatalogPerformanceMetricsArtifactList = ( + partial?: Partial, +): CatalogArtifactList => ({ + items: [mockCatalogPerformanceMetricsArtifact({}), mockCatalogModelArtifact({})], + pageSize: 10, + size: 15, + nextPageToken: '', + ...partial, +}); + +export const mockCatalogAccuracyMetricsArtifactList = ( + partial?: Partial, +): CatalogArtifactList => ({ + items: [mockCatalogAccuracyMetricsArtifact({}), mockCatalogModelArtifact({})], + pageSize: 10, + size: 15, + nextPageToken: '', + ...partial, +}); diff --git a/clients/ui/frontend/src/__mocks__/mockCatalogModelList.ts b/clients/ui/frontend/src/__mocks__/mockCatalogModelList.ts index 1612623d02..dce3505157 100644 --- a/clients/ui/frontend/src/__mocks__/mockCatalogModelList.ts +++ b/clients/ui/frontend/src/__mocks__/mockCatalogModelList.ts @@ -1,4 +1,5 @@ import { CatalogModel, CatalogModelList } from '~/app/modelCatalogTypes'; +import { ModelRegistryMetadataType } from '~/app/types'; export const mockCatalogModel = (partial?: Partial): CatalogModel => ({ // eslint-disable-next-line camelcase @@ -362,3 +363,21 @@ export const mockCatalogModelList = (partial?: Partial): Catal nextPageToken: '', ...partial, }); + +// Mock models for testing +export const mockValidatedModel = mockCatalogModel({ + name: 'validated-model', + tasks: ['text-generation'], + customProperties: { + validated: { + metadataType: ModelRegistryMetadataType.STRING, + // eslint-disable-next-line camelcase + string_value: '', + }, + }, +}); + +export const mockNonValidatedModel = mockCatalogModel({ + name: 'sample%20category%201-model-1', + tasks: ['text-generation'], +}); diff --git a/clients/ui/frontend/src/__mocks__/mockCatalogSourceConfigList.ts b/clients/ui/frontend/src/__mocks__/mockCatalogSourceConfigList.ts new file mode 100644 index 0000000000..593240a60b --- /dev/null +++ b/clients/ui/frontend/src/__mocks__/mockCatalogSourceConfigList.ts @@ -0,0 +1,74 @@ +import { + CatalogSourceConfig, + CatalogSourceConfigList, + YamlCatalogSourceConfig, + HuggingFaceCatalogSourceConfig, + CatalogSourceType, +} from '~/app/modelCatalogTypes'; + +export const mockYamlCatalogSourceConfig = ( + partial?: Partial, +): YamlCatalogSourceConfig => ({ + id: 'yaml-source-1', + name: 'Red Hat AI', + type: CatalogSourceType.YAML, + enabled: true, + labels: ['Red Hat AI'], + includedModels: [], + excludedModels: [], + isDefault: true, + yaml: 'version: 1.0\nmodels:\n - name: example-model', + ...partial, +}); + +export const mockHuggingFaceCatalogSourceConfig = ( + partial?: Partial, +): HuggingFaceCatalogSourceConfig => ({ + id: 'huggingface-source-1', + name: 'Huggingface_Admin_1', + type: CatalogSourceType.HUGGING_FACE, + enabled: true, + labels: ['Hugging Face'], + includedModels: [], + excludedModels: [], + isDefault: false, + allowedOrganization: 'Google', + apiKey: undefined, + ...partial, +}); + +export const mockCatalogSourceConfig = ( + partial?: Partial, +): CatalogSourceConfig => { + if (partial?.type === CatalogSourceType.HUGGING_FACE) { + return mockHuggingFaceCatalogSourceConfig(partial as Partial); + } + return mockYamlCatalogSourceConfig(partial as Partial); +}; + +export const mockCatalogSourceConfigList = ( + partial?: Partial, +): CatalogSourceConfigList => ({ + catalogs: [ + mockYamlCatalogSourceConfig({ id: 'red-hat-ai', name: 'Red Hat AI', isDefault: true }), + mockYamlCatalogSourceConfig({ + id: 'red-hat-ai-validated', + name: 'Red Hat AI validated', + isDefault: true, + }), + mockHuggingFaceCatalogSourceConfig({ + id: 'huggingface-admin-1', + name: 'Huggingface_Admin_1', + allowedOrganization: 'Google', + isDefault: false, + }), + mockYamlCatalogSourceConfig({ + id: 'yaml-amdimport-1', + name: 'YAMLAmdImport_1', + isDefault: false, + includedModels: ['model1', 'model2'], + excludedModels: ['model3'], + }), + ], + ...partial, +}); diff --git a/clients/ui/frontend/src/__mocks__/mockCatalogSourceList.ts b/clients/ui/frontend/src/__mocks__/mockCatalogSourceList.ts index 9e717233f4..070c296764 100644 --- a/clients/ui/frontend/src/__mocks__/mockCatalogSourceList.ts +++ b/clients/ui/frontend/src/__mocks__/mockCatalogSourceList.ts @@ -3,11 +3,67 @@ import { CatalogSource, CatalogSourceList } from '~/app/modelCatalogTypes'; export const mockCatalogSource = (partial?: Partial): CatalogSource => ({ id: 'sample-source', name: 'sample source', + enabled: true, + labels: ['Sample category 1', 'Sample category 2', 'Community'], + status: 'available', ...partial, }); +// Mock source with no status (Starting state - no status field) +export const mockCatalogSourceStarting = (): CatalogSource => ({ + id: 'starting-source', + name: 'Starting Source', + enabled: true, + labels: ['Community'], + // No status field - represents "Starting" state +}); + +// Mock source with error status and error message (invalid credential) +export const mockCatalogSourceFailedCredential = (): CatalogSource => ({ + id: 'failed-credential-source', + name: 'Failed Credential Source', + enabled: true, + labels: ['Enterprise'], + status: 'error', + error: 'The provided API key is invalid or has expired. Please update your credentials.', +}); + +// Mock source with error status and error message (invalid organization) +export const mockCatalogSourceFailedOrg = (): CatalogSource => ({ + id: 'failed-org-source', + name: 'Failed Organization Source', + enabled: true, + labels: ['Enterprise'], + status: 'error', + error: "The specified organization 'invalid-org' does not exist or you don't have access to it.", +}); + +// Mock source with disabled status +export const mockCatalogSourceDisabled = (): CatalogSource => ({ + id: 'disabled-source', + name: 'Disabled Source', + enabled: false, + labels: ['Community'], + status: 'disabled', +}); + +// Mock source with available status +export const mockCatalogSourceActive = (): CatalogSource => ({ + id: 'active-source', + name: 'Active Source', + enabled: true, + labels: ['Community', 'Enterprise'], + status: 'available', +}); + export const mockCatalogSourceList = (partial?: Partial): CatalogSourceList => ({ - items: [mockCatalogSource({})], + items: [ + mockCatalogSourceActive(), + mockCatalogSourceStarting(), + mockCatalogSourceFailedCredential(), + mockCatalogSourceFailedOrg(), + mockCatalogSourceDisabled(), + ], pageSize: 10, size: 25, nextPageToken: '', diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelCatalog.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelCatalog.ts index 6b90b4bf8a..9730c7d41e 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelCatalog.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelCatalog.ts @@ -1,27 +1,100 @@ import { appChrome } from './appChrome'; +class ModelCatalogFilter { + constructor(private title: string) { + this.title = title; + } + + find() { + return cy.findByTestId(`${this.title}-filter`); + } + + findCheckbox(value: string) { + return this.find().findByTestId(`${this.title}-${value}-checkbox`); + } + + findShowMoreButton() { + return this.find().findByTestId(`${this.title}-filter-show-more`); + } + + findShowLessButton() { + return this.find().findByTestId(`${this.title}-filter-show-less`); + } + + findSearch() { + return this.find().findByTestId(`${this.title}-filter-search`); + } + + findEmpty() { + return this.find().findByTestId(`${this.title}-filter-empty`); + } +} + class ModelCatalog { visit() { cy.visit('/model-catalog'); this.wait(); } - navigate() { - appChrome.findNavItem('Model Catalog').click(); - this.wait(); - } - private wait() { cy.findByTestId('app-page-title').should('exist'); cy.findByTestId('app-page-title').contains('Model Catalog'); cy.testA11y(); } + findFilter(title: string) { + return new ModelCatalogFilter(title).find(); + } + + findFilterSearch(title: string) { + return new ModelCatalogFilter(title).findSearch(); + } + + findFilterEmpty(title: string) { + return new ModelCatalogFilter(title).findEmpty(); + } + + findFilterShowMoreButton(title: string) { + return new ModelCatalogFilter(title).findShowMoreButton(); + } + + findFilterShowLessButton(title: string) { + return new ModelCatalogFilter(title).findShowLessButton(); + } + + findFilterCheckbox(title: string, value: string) { + return new ModelCatalogFilter(title).findCheckbox(value); + } + tabEnabled() { appChrome.findNavItem('Model Catalog').should('exist'); return this; } + findAllModelsToggle() { + return cy.findByTestId('all'); + } + + findCategoryToggle(category: string) { + return cy.findByTestId(category); + } + + findCategoryTitle(category: string) { + return cy.findByTestId(['title', category]); + } + + findShowMoreModelsLink(category: string) { + return cy.findByTestId(['show-more-button', category]); + } + + findErrorState(category: string) { + return cy.findByTestId(['error-state', category]); + } + + findEmptyState(category: string) { + return cy.findByTestId(['empty-model-catalog-state', category]); + } + findModelCatalogEmptyState() { return cy.findByTestId('empty-model-catalog-state'); } @@ -38,6 +111,18 @@ class ModelCatalog { return cy.findAllByTestId('model-catalog-detail-link'); } + findValidatedModelBenchmarkLink() { + return cy.findAllByTestId('validated-model-benchmark-link'); + } + + findValidatedModelBenchmarkNext() { + return cy.findAllByTestId('validated-model-benchmark-next'); + } + + findValidatedModelBenchmarkPrev() { + return cy.findAllByTestId('validated-model-benchmark-prev'); + } + findModelCatalogDescription() { return cy.findByTestId('model-catalog-card-description'); } @@ -94,6 +179,102 @@ class ModelCatalog { findDetailsDescription() { return cy.findByTestId('model-long-description'); } + + // Tabs functionality + findModelDetailsTabs() { + return cy.findByTestId('model-details-page-tabs'); + } + + findOverviewTab() { + return cy.findByTestId('model-overview-tab'); + } + + findPerformanceInsightsTab() { + return cy.findByTestId('performance-insights-tab'); + } + + findOverviewTabContent() { + return cy.findByTestId('model-overview-tab-content'); + } + + findPerformanceInsightsTabContent() { + return cy.findByTestId('performance-insights-tab-content'); + } + + clickOverviewTab() { + this.findOverviewTab().click(); + return this; + } + + clickPerformanceInsightsTab() { + this.findPerformanceInsightsTab().click(); + return this; + } + + // Hardware Configuration functionality + findHardwareConfigurationTitle() { + return cy.contains('Hardware Configuration'); + } + + findHardwareConfigurationDescription() { + return cy.contains( + 'Compare the performance metrics of hardware configuration to determine the most suitable option for deployment.', + ); + } + + findHardwareConfigurationTable() { + return cy.findByTestId('hardware-configuration-table'); + } + + findHardwareConfigurationTableHeaders() { + return cy.get('[data-testid="hardware-configuration-table"] thead th'); + } + + findHardwareConfigurationTableRows() { + return cy.get('[data-testid="hardware-configuration-table"] tbody tr'); + } + + findHardwareConfigurationTableData() { + return cy.get('[data-testid="hardware-configuration-table"] tbody td'); + } + + findHardwareConfigurationColumn(columnName: string) { + return cy.get(`[data-testid="hardware-configuration-table"] [data-label="${columnName}"]`); + } + + findHardwareConfigurationSortButton(columnName: string) { + return cy.get(`[data-testid="hardware-configuration-table"] th`).contains(columnName); + } + + findHardwareConfigurationPagination() { + return cy.get('[data-testid="hardware-configuration-table"] .pf-v6-c-pagination'); + } + + findValidatedModelHardware() { + return cy.findByTestId('validated-model-hardware'); + } + + findValidatedModelRps() { + return cy.findByTestId('validated-model-rps'); + } + + findValidatedModelTtft() { + return cy.findByTestId('validated-model-ttft'); + } + + findWorkloadTypeFilter() { + return cy.findByTestId('workload-type-filter'); + } + + findWorkloadTypeOption(label: string) { + // Workload type uses checkboxes in a panel, not menu items + // Find checkbox by its label within the dropdown panel + return cy.contains('label', label).parent().find('input[type="checkbox"]'); + } + + selectWorkloadType(label: string) { + this.findWorkloadTypeOption(label).click(); + } } export const modelCatalog = new ModelCatalog(); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelCatalogSettings.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelCatalogSettings.ts new file mode 100644 index 0000000000..9a40b73a12 --- /dev/null +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelCatalogSettings.ts @@ -0,0 +1,374 @@ +import { appChrome } from './appChrome'; +import { TableRow } from './components/table'; +import { Modal } from './components/Modal'; + +class DeleteSourceModal extends Modal { + constructor() { + super('Delete a source'); + } + + find() { + return cy.findByTestId('delete-source-modal'); + } + + findDeleteButton() { + return this.findFooter().findByRole('button', { name: 'Delete' }); + } + + findConfirmInput() { + return cy.findByTestId('delete-modal-input'); + } + + typeConfirmation(text: string) { + this.findConfirmInput().clear().type(text); + return this; + } +} + +class CatalogSourceConfigRow extends TableRow { + findName() { + return this.find().find('[data-label="Name"]'); + } + + findOrganization() { + return this.find().find('[data-label="Organization"]'); + } + + findModelVisibility() { + return this.find().find('[data-label="Model visibility"]'); + } + + findSourceType() { + return this.find().find('[data-label="Source type"]'); + } + + findEnableToggle() { + return this.find().find('[data-label="Enable"]').find('input[type="checkbox"]'); + } + + findValidationStatus() { + return this.find().find('[data-label="Validation status"]'); + } + + findManageSourceButton() { + return this.find() + .find('[data-label="Actions"]') + .findByRole('button', { name: 'Manage source' }); + } + + shouldHaveModelVisibility(visibility: 'Filtered' | 'Unfiltered') { + this.findModelVisibility().contains(visibility); + return this; + } + + shouldHaveOrganization(org: string) { + this.findOrganization().contains(org); + return this; + } + + shouldHaveSourceType(type: string) { + this.findSourceType().contains(type); + return this; + } + + toggleEnable() { + this.findEnableToggle().click({ force: true }); + return this; + } + + shouldHaveEnableToggle(shouldExist: boolean) { + if (shouldExist) { + this.findEnableToggle().should('exist'); + } else { + this.find().find('[data-label="Enable"]').should('be.empty'); + } + return this; + } + + shouldHaveEnableState(enabled: boolean) { + if (enabled) { + this.findEnableToggle().should('be.checked'); + } else { + this.findEnableToggle().should('not.be.checked'); + } + return this; + } + + shouldHaveKebab(shouldExist: boolean) { + if (shouldExist) { + this.findKebab().should('exist'); + } else { + this.find().within(() => { + cy.get('[data-testid*="source-actions"]').should('not.exist'); + }); + } + return this; + } +} + +class ModelCatalogSettings { + visit(wait = true) { + cy.visit('/model-catalog-settings'); + if (wait) { + this.wait(); + } + } + + navigate() { + cy.get('body').then(($body) => { + if ($body.find('#page-sidebar').length > 0) { + this.findNavItem().click(); + this.wait(); + } + }); + } + + private wait() { + this.findHeading(); + cy.testA11y(); + } + + findHeading() { + cy.findByTestId('app-page-title').should('exist'); + cy.findByTestId('app-page-title').contains('Model catalog settings'); + } + + findNavItem() { + return appChrome.findNavItem('Model catalog settings', 'Settings'); + } + + findDescription() { + return cy.contains('Manage model catalog sources for your organization.'); + } + + findAddSourceButton() { + return cy.findByTestId('add-source-button'); + } + + findTable() { + return cy.findByTestId('catalog-source-configs-table'); + } + + findEmptyState() { + return cy.findByTestId('catalog-settings-empty-state'); + } + + getRow(name: string) { + return new CatalogSourceConfigRow(() => + this.findTable().find('tbody').find('tr').contains(name).parents('tr'), + ); + } + + findRows() { + return this.findTable().find('tbody tr'); + } + + shouldHaveSourceConfigs() { + this.findTable().should('exist'); + this.findRows().should('have.length.at.least', 1); + return this; + } + + shouldBeEmpty() { + this.findEmptyState().should('exist'); + return this; + } +} + +class ManageSourcePage { + visitAddSource(wait = true) { + cy.visit('/model-catalog-settings/add-source'); + if (wait) { + this.wait(); + } + } + + visitManageSource(catalogSourceId: string, wait = true) { + cy.visit(`/model-catalog-settings/manage-source/${encodeURIComponent(catalogSourceId)}`); + if (wait) { + this.wait(); + } + } + + private wait() { + this.findHeading(); + cy.testA11y(); + } + + findHeading() { + cy.findByTestId('app-page-title').should('exist'); + } + + findBreadcrumb() { + return cy.get('a[href="/model-catalog-settings"]').contains('Model catalog settings'); + } + + findBreadcrumbAction() { + return cy.findByTestId('breadcrumb-source-action'); + } + + findAddSourceTitle() { + return cy.findByTestId('app-page-title').contains('Add a source'); + } + + findManageSourceTitle() { + return cy.findByTestId('app-page-title').contains('Manage source'); + } + + findAddSourceDescription() { + return cy.contains('Add a new model catalog source to your organization.'); + } + + findManageSourceDescription() { + return cy.contains('Manage the selected model catalog source.'); + } + + // Form field methods + findNameInput() { + return cy.findByTestId('source-name-input'); + } + + findNameError() { + return cy.findByTestId('source-name-error'); + } + + findSourceTypeHuggingFace() { + return cy.findByTestId('source-type-huggingface'); + } + + findSourceTypeYaml() { + return cy.findByTestId('source-type-yaml'); + } + + findSourceTypeHuggingFaceLabel() { + return cy.get('label[for="source-type-huggingface"]'); + } + + findSourceTypeYamlLabel() { + return cy.get('label[for="source-type-yaml"]'); + } + + findCredentialsSection() { + return cy.findByTestId('credentials-section'); + } + + findAccessTokenInput() { + return cy.findByTestId('access-token-input'); + } + + findAccessTokenError() { + return cy.findByTestId('access-token-error'); + } + + findOrganizationInput() { + return cy.findByTestId('organization-input'); + } + + findOrganizationError() { + return cy.findByTestId('organization-error'); + } + + findYamlSection() { + return cy.findByTestId('yaml-section'); + } + + findYamlContentInput() { + return cy.findByTestId('yaml-content-input').find('textarea'); + } + + findYamlContentError() { + return cy.findByTestId('yaml-content-error'); + } + + findModelVisibilitySection() { + return cy.findByTestId('model-visibility-section'); + } + + toggleModelVisibility() { + this.findModelVisibilitySection().find('button').first().click(); + } + + findAllowedModelsInput() { + return cy.findByTestId('allowed-models-input'); + } + + findExcludedModelsInput() { + return cy.findByTestId('excluded-models-input'); + } + + findEnableSourceCheckbox() { + return cy.findByTestId('enable-source-checkbox'); + } + + findSubmitButton() { + return cy.findByTestId('submit-button'); + } + + findPreviewButton() { + return cy.findByTestId('preview-button'); + } + + findCancelButton() { + return cy.findByTestId('cancel-button'); + } + + fillSourceName(name: string) { + this.findNameInput().clear().type(name); + } + + selectSourceType(type: 'huggingface' | 'yaml') { + if (type === 'huggingface') { + this.findSourceTypeHuggingFaceLabel().click(); + } else { + this.findSourceTypeYamlLabel().click(); + } + } + + fillAccessToken(token: string) { + this.findAccessTokenInput().clear().type(token); + } + + fillOrganization(org: string) { + this.findOrganizationInput().clear().type(org); + } + + fillYamlContent(yaml: string) { + this.findYamlContentInput().clear().type(yaml); + } + + fillAllowedModels(models: string) { + this.findAllowedModelsInput().clear().type(models); + } + + fillExcludedModels(models: string) { + this.findExcludedModelsInput().clear().type(models); + } + + toggleEnableSource() { + this.findEnableSourceCheckbox().click(); + } + + findPreviewPanel() { + return cy.findByTestId('preview-panel'); + } + + findPreviewPanelTitle() { + return cy.contains('Model catalog preview'); + } + + findPreviewPanelEmptyMessage() { + return cy.contains('To view the models from this source that will appear'); + } + + findPreviewButtonHeader() { + return cy.findByTestId('preview-button-header'); + } + + findPreviewButtonPanel() { + return cy.findByTestId('preview-button-panel'); + } +} + +export const modelCatalogSettings = new ModelCatalogSettings(); +export const manageSourcePage = new ManageSourcePage(); +export const deleteSourceModal = new DeleteSourceModal(); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/modelVersionArchive.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/modelVersionArchive.ts index 1e433ce1c1..2b1194efcf 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/modelVersionArchive.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/modelVersionArchive.ts @@ -57,7 +57,9 @@ class ModelVersionArchive { visit() { const rmId = '1'; const preferredModelRegistry = 'modelregistry-sample'; - cy.visit(`/model-registry/${preferredModelRegistry}/registeredModels/${rmId}/versions/archive`); + cy.visit( + `/model-registry/${preferredModelRegistry}/registered-models/${rmId}/versions/archive`, + ); this.wait(); } @@ -66,14 +68,14 @@ class ModelVersionArchive { const rmId = '1'; const preferredModelRegistry = 'modelregistry-sample'; cy.visit( - `/model-registry/${preferredModelRegistry}/registeredModels/${rmId}/versions/archive/${mvId}`, + `/model-registry/${preferredModelRegistry}/registered-models/${rmId}/versions/archive/${mvId}`, ); } visitModelVersionList() { const rmId = '1'; const preferredModelRegistry = 'modelregistry-sample'; - cy.visit(`/model-registry/${preferredModelRegistry}/registeredModels/${rmId}/versions`); + cy.visit(`/model-registry/${preferredModelRegistry}/registered-models/${rmId}/versions`); this.wait(); } @@ -81,7 +83,9 @@ class ModelVersionArchive { const mvId = '3'; const rmId = '1'; const preferredModelRegistry = 'modelregistry-sample'; - cy.visit(`/model-registry/${preferredModelRegistry}/registeredModels/${rmId}/versions/${mvId}`); + cy.visit( + `/model-registry/${preferredModelRegistry}/registered-models/${rmId}/versions/${mvId}`, + ); this.wait(); } diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/modelVersionDetails.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/modelVersionDetails.ts index 2be1822575..485ad64a02 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/modelVersionDetails.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/modelVersionDetails.ts @@ -20,7 +20,9 @@ class ModelVersionDetails { const preferredModelRegistry = 'modelregistry-sample'; const rmId = '1'; const mvId = '1'; - cy.visit(`/model-registry/${preferredModelRegistry}/registeredModels/${rmId}/versions/${mvId}`); + cy.visit( + `/model-registry/${preferredModelRegistry}/registered-models/${rmId}/versions/${mvId}`, + ); this.wait(); } diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registerModelPage.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registerModelPage.ts index 4e9f4af7c4..b37a542f40 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registerModelPage.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registerModelPage.ts @@ -21,7 +21,7 @@ class RegisterModelPage { visit() { const preferredModelRegistry = 'modelregistry-sample'; - cy.visit(`/model-registry/${preferredModelRegistry}/registerModel`); + cy.visit(`/model-registry/${preferredModelRegistry}/register/model`); this.wait(); } diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registerVersionPage.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registerVersionPage.ts index 3b9df6e9a9..190c0406be 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registerVersionPage.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registerVersionPage.ts @@ -18,8 +18,8 @@ class RegisterVersionPage { const preferredModelRegistry = 'modelregistry-sample'; cy.visit( registeredModelId - ? `/model-registry/${preferredModelRegistry}/registeredModels/${registeredModelId}/registerVersion` - : `/model-registry/${preferredModelRegistry}/registerVersion`, + ? `/model-registry/${preferredModelRegistry}/registered-models/${registeredModelId}/register/version` + : `/model-registry/${preferredModelRegistry}/register/version`, ); this.wait(); } diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registeredModelArchive.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registeredModelArchive.ts index 6129d4b809..812576b367 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registeredModelArchive.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/pages/modelRegistryView/registeredModelArchive.ts @@ -60,20 +60,22 @@ class ModelArchive { visit() { const preferredModelRegistry = 'modelregistry-sample'; - cy.visit(`/model-registry/${preferredModelRegistry}/registeredModels/archive`); + cy.visit(`/model-registry/${preferredModelRegistry}/registered-models/archive`); this.wait(); } visitArchiveModelDetail() { const rmId = '2'; const preferredModelRegistry = 'modelregistry-sample'; - cy.visit(`/model-registry/${preferredModelRegistry}/registeredModels/archive/${rmId}`); + cy.visit(`/model-registry/${preferredModelRegistry}/registered-models/archive/${rmId}`); } visitArchiveModelVersionList() { const rmId = '2'; const preferredModelRegistry = 'modelregistry-sample'; - cy.visit(`/model-registry/${preferredModelRegistry}/registeredModels/archive/${rmId}/versions`); + cy.visit( + `/model-registry/${preferredModelRegistry}/registered-models/archive/${rmId}/versions`, + ); } visitModelList() { @@ -84,7 +86,7 @@ class ModelArchive { visitModelDetails() { const rmId = '2'; const preferredModelRegistry = 'modelregistry-sample'; - cy.visit(`/model-registry/${preferredModelRegistry}/registeredModels/${rmId}`); + cy.visit(`/model-registry/${preferredModelRegistry}/registered-models/${rmId}`); this.wait(); } diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/support/commands/api.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/support/commands/api.ts index 7125b3820f..d60e98b8bc 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/support/commands/api.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/support/commands/api.ts @@ -12,8 +12,9 @@ import type { RegisteredModelList, } from '~/app/types'; import type { + CatalogFilterOptionsList, + CatalogArtifactList, CatalogModel, - CatalogModelArtifactList, CatalogModelList, CatalogSourceList, } from '~/app/modelCatalogTypes'; @@ -141,7 +142,15 @@ declare global { ) => Cypress.Chainable) & (( type: 'GET /api/:apiVersion/model_catalog/models', - options: { path: { apiVersion: string }; query: { source: string } }, + options: { + path: { apiVersion: string }; + query: { + source?: string; + filterQuery?: string; + sourceLabel?: string; + searchKeyword?: string; + }; + }, response: ApiResponse, ) => Cypress.Chainable) & (( @@ -157,7 +166,12 @@ declare global { (( type: 'GET /api/:apiVersion/model_catalog/sources/:sourceId/artifacts/:modelName', options: { path: { apiVersion: string; sourceId: string; modelName: string } }, - response: ApiResponse, + response: ApiResponse, + ) => Cypress.Chainable) & + (( + type: 'GET /api/:apiVersion/model_catalog/models/filter_options', + options: { path: { apiVersion: string }; query: { namespace: string } }, + response: ApiResponse, ) => Cypress.Chainable); } } diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalog.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalog.cy.ts index 2f92fe590b..dd44c15014 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalog.cy.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalog.cy.ts @@ -1,18 +1,25 @@ import { modelCatalog } from '~/__tests__/cypress/cypress/pages/modelCatalog'; import { + mockCatalogAccuracyMetricsArtifact, mockCatalogModel, + mockCatalogModelArtifact, mockCatalogModelList, + mockCatalogPerformanceMetricsArtifact, mockCatalogSource, mockCatalogSourceList, } from '~/__mocks__'; import type { CatalogSource } from '~/app/modelCatalogTypes'; import { MODEL_CATALOG_API_VERSION } from '~/__tests__/cypress/cypress/support/commands/api'; +import { mockCatalogFilterOptionsList } from '~/__mocks__/mockCatalogFilterOptionsList'; +import { SourceLabel } from '~/app/modelCatalogTypes'; type HandlersProps = { sources?: CatalogSource[]; + modelsPerCategory?: number; }; const initIntercepts = ({ sources = [mockCatalogSource({}), mockCatalogSource({ id: 'source-2', name: 'source 2' })], + modelsPerCategory = 4, }: HandlersProps) => { cy.interceptApi( `GET /api/:apiVersion/model_catalog/sources`, @@ -24,15 +31,80 @@ const initIntercepts = ({ }), ); + sources.forEach((source) => { + source.labels.forEach((label) => { + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/models`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + query: { + sourceLabel: label, + }, + }, + mockCatalogModelList({ + items: Array.from({ length: modelsPerCategory }, (_, i) => + mockCatalogModel({ + name: `${label.toLowerCase()}-model-${i + 1}`, + // eslint-disable-next-line camelcase + source_id: source.id, + }), + ), + }), + ); + }); + }); + + // Intercept requests for sources without labels if they exist + const hasSourcesWithoutLabels = sources.some( + (source) => + source.enabled !== false && + (source.labels.length === 0 || source.labels.every((label) => !label.trim())), + ); + + if (hasSourcesWithoutLabels) { + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/models`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + query: { sourceLabel: SourceLabel.other }, + }, + mockCatalogModelList({ + items: Array.from({ length: modelsPerCategory }, (_, i) => + mockCatalogModel({ + name: `custom-model-${i + 1}`, + // eslint-disable-next-line camelcase + source_id: sources.find((s) => s.labels.length === 0)?.id || 'custom-source', + }), + ), + }), + ); + } + cy.interceptApi( - `GET /api/:apiVersion/model_catalog/models`, + `GET /api/:apiVersion/model_catalog/models/filter_options`, { path: { apiVersion: MODEL_CATALOG_API_VERSION }, - query: { source: 'sample-source' }, + query: { namespace: 'kubeflow' }, + }, + mockCatalogFilterOptionsList(), + ); + + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/sources/:sourceId/artifacts/:modelName`, + { + path: { + apiVersion: MODEL_CATALOG_API_VERSION, + sourceId: 'sample-source', + modelName: 'repo1/model1', + }, + }, + { + items: [ + mockCatalogPerformanceMetricsArtifact({}), + mockCatalogAccuracyMetricsArtifact({}), + mockCatalogModelArtifact({}), + ], }, - mockCatalogModelList({ - items: [mockCatalogModel({})], - }), ); }; @@ -40,14 +112,12 @@ describe('Model Catalog Page', () => { it('model catalog tab should be enabled', () => { initIntercepts({}); modelCatalog.visit(); - modelCatalog.navigate(); modelCatalog.tabEnabled(); }); it('should show empty state when configmap has empty sources', () => { initIntercepts({ sources: [] }); modelCatalog.visit(); - modelCatalog.navigate(); modelCatalog.visit(); modelCatalog.findModelCatalogEmptyState().should('exist'); }); @@ -55,10 +125,97 @@ describe('Model Catalog Page', () => { it('should display model catalog content when data is loaded', () => { initIntercepts({}); modelCatalog.visit(); - modelCatalog.navigate(); modelCatalog.findLoadingState().should('not.exist'); modelCatalog.findPageTitle().should('be.visible'); modelCatalog.findPageDescription().should('be.visible'); modelCatalog.findModelCatalogCards().should('have.length.at.least', 1); }); + + it('should display model catalog filters', () => { + initIntercepts({}); + modelCatalog.visit(); + modelCatalog.findFilter('Provider').should('be.visible'); + modelCatalog.findFilter('License').should('be.visible'); + modelCatalog.findFilter('Task').should('be.visible'); + modelCatalog.findFilter('Language').should('be.visible'); + }); + + it('filters show more and show less button should work', () => { + initIntercepts({}); + modelCatalog.visit(); + modelCatalog.findFilterShowMoreButton('Task').click({ scrollBehavior: false }); + modelCatalog.findFilterCheckbox('Task', 'text-generation').should('be.visible'); + modelCatalog.findFilterCheckbox('Task', 'text-to-text').should('be.visible'); + modelCatalog.findFilterCheckbox('Task', 'image-to-text').should('be.visible'); + modelCatalog.findFilterCheckbox('Task', 'image-text-to-text').should('be.visible'); + modelCatalog.findFilterCheckbox('Task', 'audio-to-text').should('be.visible'); + modelCatalog.findFilterCheckbox('Task', 'video-to-text').should('be.visible'); + modelCatalog.findFilterShowLessButton('Task').click({ scrollBehavior: false }); + modelCatalog.findFilterCheckbox('Task', 'audio-to-text').should('not.exist'); + }); + + it('filters should be searchable', () => { + initIntercepts({}); + modelCatalog.visit(); + modelCatalog.findFilterSearch('Task').type('audio-to-text'); + modelCatalog.findFilterCheckbox('Task', 'audio-to-text').should('be.visible'); + modelCatalog.findFilterCheckbox('Task', 'video-to-text').should('not.be.exist'); + modelCatalog.findFilterSearch('Task').type('test'); + modelCatalog.findFilterEmpty('Task').should('be.visible'); + }); + + it('checkbox should work', () => { + // Calculate expected category count based on sources + const defaultSources = [ + mockCatalogSource({}), + mockCatalogSource({ id: 'source-2', name: 'source 2' }), + ]; + const uniqueLabels = new Set(); + defaultSources.forEach((source) => { + source.labels.forEach((label) => { + if (label.trim()) { + uniqueLabels.add(label.trim()); + } + }); + }); + + // Check if there are sources without labels + const hasSourcesWithoutLabels = defaultSources.some( + (source) => + source.enabled !== false && + (source.labels.length === 0 || source.labels.every((label) => !label.trim())), + ); + + // Expected count: unique labels + (1 if sources without labels exist) + const expectedCategoryCount = uniqueLabels.size + (hasSourcesWithoutLabels ? 1 : 0); + + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/models`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + query: { sourceLabel: '' }, + }, + mockCatalogModelList({ + items: [mockCatalogModel({})], + }), + ).as('getCatalogModelsBySource'); + + initIntercepts({ sources: defaultSources }); + modelCatalog.visit(); + modelCatalog.findFilterCheckbox('Task', 'text-generation').click(); + modelCatalog.findFilterCheckbox('Task', 'text-to-text').click(); + modelCatalog.findFilterCheckbox('Provider', 'Google').click(); + + // Wait for the expected number of API calls (one per category section when filters are applied) + const waitCalls = Array.from( + { length: expectedCategoryCount }, + () => '@getCatalogModelsBySource', + ); + cy.wait(waitCalls).then((interceptions) => { + const lastInterception = interceptions[interceptions.length - 1]; + expect(lastInterception.request.url).to.include( + 'tasks+IN+%28%27text-generation%27%2C%27text-to-text%27%29+AND+provider%3D%27Google%27', + ); + }); + }); }); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogAllModelsView.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogAllModelsView.cy.ts new file mode 100644 index 0000000000..dead8339a2 --- /dev/null +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogAllModelsView.cy.ts @@ -0,0 +1,205 @@ +import { modelCatalog } from '~/__tests__/cypress/cypress/pages/modelCatalog'; +import { + mockCatalogModel, + mockCatalogModelList, + mockCatalogSource, + mockCatalogSourceList, +} from '~/__mocks__'; +import type { CatalogSource } from '~/app/modelCatalogTypes'; +import { MODEL_CATALOG_API_VERSION } from '~/__tests__/cypress/cypress/support/commands/api'; +import { mockCatalogFilterOptionsList } from '~/__mocks__/mockCatalogFilterOptionsList'; +import { SourceLabel } from '~/app/modelCatalogTypes'; + +type HandlersProps = { + sources?: CatalogSource[]; + modelsPerCategory?: number; + isEmpty?: boolean; + includeSourcesWithoutLabels?: boolean; +}; + +const initIntercepts = ({ + sources = [ + mockCatalogSource({ id: 'huggingface', name: 'Hugging Face', labels: ['Hugging Face'] }), + mockCatalogSource({ id: 'openvino', name: 'OpenVINO', labels: ['OpenVINO'] }), + mockCatalogSource({ id: 'community', name: 'Community', labels: ['Community'] }), + mockCatalogSource({ id: 'custom-source', name: 'Custom Source', labels: [] }), + ], + modelsPerCategory = 4, + isEmpty = false, + includeSourcesWithoutLabels = true, +}: HandlersProps) => { + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/sources`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + }, + mockCatalogSourceList({ + items: sources, + }), + ); + + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/models/filter_options`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + query: { namespace: 'kubeflow' }, + }, + mockCatalogFilterOptionsList(), + ); + + sources.forEach((source) => { + source.labels.forEach((label) => { + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/models`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + query: { sourceLabel: label }, + }, + mockCatalogModelList({ + items: isEmpty + ? [] + : Array.from({ length: modelsPerCategory }, (_, i) => + mockCatalogModel({ + name: `${label.toLowerCase()}-model-${i + 1}`, + // eslint-disable-next-line camelcase + source_id: source.id, + }), + ), + }), + ); + }); + }); + + // Intercept requests for sources without labels (sourceLabel=null) + if (includeSourcesWithoutLabels) { + const hasSourcesWithoutLabels = sources.some( + (source) => + source.enabled !== false && + (source.labels.length === 0 || source.labels.every((label) => !label.trim())), + ); + + if (hasSourcesWithoutLabels) { + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/models`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + query: { sourceLabel: SourceLabel.other }, + }, + mockCatalogModelList({ + items: isEmpty + ? [] + : Array.from({ length: modelsPerCategory }, (_, i) => + mockCatalogModel({ + name: `custom-model-${i + 1}`, + // eslint-disable-next-line camelcase + source_id: sources.find((s) => s.labels.length === 0)?.id || 'custom-source', + }), + ), + }), + ); + } + } +}; + +describe('Model Catalog All Models View', () => { + beforeEach(() => { + initIntercepts({}); + modelCatalog.visit(); + }); + + describe('Category Sections', () => { + it('should display all category sections when sources without labels exist', () => { + modelCatalog.findAllModelsToggle().should('be.visible'); + modelCatalog.findCategoryToggle('label-Hugging Face').should('be.visible'); + modelCatalog.findCategoryToggle('label-OpenVINO').should('be.visible'); + modelCatalog.findCategoryToggle('label-Community').should('be.visible'); + modelCatalog.findCategoryToggle('no-labels').should('be.visible'); + }); + + it('should hide Community and custom models section when no sources without labels exist', () => { + initIntercepts({ + sources: [ + mockCatalogSource({ id: 'huggingface', name: 'Hugging Face', labels: ['Hugging Face'] }), + mockCatalogSource({ id: 'openvino', name: 'OpenVINO', labels: ['OpenVINO'] }), + mockCatalogSource({ id: 'community', name: 'Community', labels: ['Community'] }), + ], + includeSourcesWithoutLabels: false, + }); + modelCatalog.visit(); + + modelCatalog.findAllModelsToggle().should('be.visible'); + modelCatalog.findCategoryToggle('label-Hugging Face').should('be.visible'); + modelCatalog.findCategoryToggle('label-OpenVINO').should('be.visible'); + modelCatalog.findCategoryToggle('label-Community').should('be.visible'); + modelCatalog.findCategoryToggle('no-labels').should('not.exist'); + }); + + it('should show category titles', () => { + modelCatalog.findCategoryTitle('OpenVINO').should('contain.text', 'OpenVINO models'); + cy.findByTestId('title Hugging Face').should('contain.text', 'Hugging Face models'); + modelCatalog.findCategoryTitle('Community').should('contain.text', 'Community models'); + modelCatalog.findCategoryTitle('null').should('contain.text', 'Community and custom models'); + }); + }); + + describe('Show More Functionality', () => { + it('should display show more button when category has 4 or more models', () => { + modelCatalog.findShowMoreModelsLink('hugging-face').scrollIntoView().should('be.visible'); + modelCatalog.findShowMoreModelsLink('hugging-face').click(); + modelCatalog.findAllModelsToggle().click(); + modelCatalog.findShowMoreModelsLink('openvino').scrollIntoView().should('be.visible'); + modelCatalog.findShowMoreModelsLink('openvino').click(); + modelCatalog.findAllModelsToggle().click(); + modelCatalog.findShowMoreModelsLink('community').scrollIntoView().should('be.visible'); + modelCatalog.findAllModelsToggle().click(); + modelCatalog.findShowMoreModelsLink('community').click(); + }); + }); + + describe('Error Handling', () => { + it('should display error message when category fails to load', () => { + // Setup intercepts with sources without labels + initIntercepts({ + sources: [ + mockCatalogSource({ id: 'huggingface', name: 'Hugging Face', labels: ['Hugging Face'] }), + mockCatalogSource({ id: 'openvino', name: 'OpenVINO', labels: ['OpenVINO'] }), + mockCatalogSource({ id: 'community', name: 'Community', labels: ['Community'] }), + mockCatalogSource({ id: 'custom-source', name: 'Custom Source', labels: [] }), + ], + includeSourcesWithoutLabels: false, // Don't set up success intercept + }); + + // Manually intercept with error response for sourceLabel=null + cy.intercept( + { + method: 'GET', + pathname: `/model-registry/api/${MODEL_CATALOG_API_VERSION}/model_catalog/models`, + query: { sourceLabel: SourceLabel.other }, + }, + { + statusCode: 500, + body: { error: 'Internal server error' }, + }, + ); + + modelCatalog.visit(); + + modelCatalog.findErrorState('null').scrollIntoView().should('be.visible'); + modelCatalog + .findErrorState('null') + .should('contain.text', 'Failed to load Community and custom models'); + }); + }); + + describe('Empty States', () => { + it('should show empty state when category has no models', () => { + initIntercepts({ isEmpty: true }); + modelCatalog.visit(); + + modelCatalog.findEmptyState('OpenVINO').scrollIntoView().should('be.visible'); + modelCatalog + .findEmptyState('OpenVINO') + .should('contain.text', 'No result foundAdjust your filters and try again.'); + }); + }); +}); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogCard.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogCard.cy.ts index 25d2f9d7f8..51e3bfb7eb 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogCard.cy.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogCard.cy.ts @@ -1,20 +1,36 @@ +/* eslint-disable camelcase */ import { modelCatalog } from '~/__tests__/cypress/cypress/pages/modelCatalog'; import { + mockCatalogAccuracyMetricsArtifact, mockCatalogModel, + mockCatalogModelArtifact, + mockCatalogModelArtifactList, mockCatalogModelList, + mockCatalogPerformanceMetricsArtifact, mockCatalogSource, mockCatalogSourceList, + mockNonValidatedModel, + mockValidatedModel, } from '~/__mocks__'; import type { CatalogSource } from '~/app/modelCatalogTypes'; import { MODEL_CATALOG_API_VERSION } from '~/__tests__/cypress/cypress/support/commands/api'; +import { mockCatalogFilterOptionsList } from '~/__mocks__/mockCatalogFilterOptionsList'; +import type { ModelRegistryCustomProperties } from '~/app/types'; +import { ModelRegistryMetadataType } from '~/app/types'; type HandlersProps = { sources?: CatalogSource[]; + modelsPerCategory?: number; + useValidatedModel?: boolean; }; const initIntercepts = ({ sources = [mockCatalogSource({}), mockCatalogSource({ id: 'source-2', name: 'source 2' })], + modelsPerCategory = 4, + useValidatedModel = false, }: HandlersProps) => { + const testModel = useValidatedModel ? mockValidatedModel : mockNonValidatedModel; + cy.interceptApi( `GET /api/:apiVersion/model_catalog/sources`, { @@ -25,23 +41,139 @@ const initIntercepts = ({ }), ); + sources.forEach((source) => { + source.labels.forEach((label) => { + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/models`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + query: { sourceLabel: label }, + }, + mockCatalogModelList({ + items: Array.from({ length: modelsPerCategory }, (_, i) => { + const customProperties = + i === 0 && useValidatedModel + ? ({ + validated: { + metadataType: ModelRegistryMetadataType.STRING, + // eslint-disable-next-line camelcase + string_value: '', + }, + } as ModelRegistryCustomProperties) + : undefined; + const name = + i === 0 && useValidatedModel + ? 'validated-model' + : `${label.toLowerCase()}-model-${i + 1}`; + + return mockCatalogModel({ + name, + // eslint-disable-next-line camelcase + source_id: source.id, + customProperties, + }); + }), + }), + ); + }); + }); + + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/sources/:sourceId/models/:modelName`, + { + path: { + apiVersion: MODEL_CATALOG_API_VERSION, + sourceId: 'source-2', + modelName: testModel.name.replace('/', '%2F'), + }, + }, + testModel, + ); + + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/sources/:sourceId/artifacts/:modelName`, + { + path: { + apiVersion: MODEL_CATALOG_API_VERSION, + sourceId: 'source-2', + modelName: testModel.name.replace('/', '%2F'), + }, + }, + mockCatalogModelArtifactList({}), + ); + cy.interceptApi( - `GET /api/:apiVersion/model_catalog/models`, + `GET /api/:apiVersion/model_catalog/models/filter_options`, { path: { apiVersion: MODEL_CATALOG_API_VERSION }, - query: { source: 'sample-source' }, + query: { namespace: 'kubeflow' }, }, - mockCatalogModelList({ - items: [mockCatalogModel({})], - }), + mockCatalogFilterOptionsList(), ); + + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/sources/:sourceId/artifacts/:modelName`, + { + path: { + apiVersion: MODEL_CATALOG_API_VERSION, + sourceId: 'source-2', + modelName: 'validated-model', + }, + }, + { + items: [ + mockCatalogPerformanceMetricsArtifact({}), + mockCatalogPerformanceMetricsArtifact({ + customProperties: { + hardware_type: { + metadataType: ModelRegistryMetadataType.STRING, + string_value: 'RTX 4090', + }, + hardware_count: { + metadataType: ModelRegistryMetadataType.INT, + int_value: '33', + }, + requests_per_second: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 10, + }, + ttft_mean: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 67.15, + }, + }, + }), + mockCatalogPerformanceMetricsArtifact({ + customProperties: { + hardware_type: { + metadataType: ModelRegistryMetadataType.STRING, + string_value: 'A100', + }, + hardware_count: { + metadataType: ModelRegistryMetadataType.INT, + int_value: '40', + }, + requests_per_second: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 15, + }, + ttft_mean: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: 42.12, + }, + }, + }), + mockCatalogAccuracyMetricsArtifact({}), + mockCatalogModelArtifact({}), + ], + }, + ).as('getCatalogSourceModelArtifacts'); }; describe('ModelCatalogCard Component', () => { beforeEach(() => { initIntercepts({}); modelCatalog.visit(); - modelCatalog.navigate(); }); describe('Card Layout and Content', () => { it('should render all cards from the mock data', () => { @@ -50,7 +182,9 @@ describe('ModelCatalogCard Component', () => { it('should display correct source labels', () => { modelCatalog.findFirstModelCatalogCard().within(() => { - modelCatalog.findSourceLabel().should('contain.text', 'sample source'); + modelCatalog + .findSourceLabel() + .should('contain.text', 'source 2text-generationprovider1apache-2.0'); }); }); @@ -79,13 +213,43 @@ describe('ModelCatalogCard Component', () => { }); describe('Navigation and Interaction', () => { - it('should show all model metadata correctly', () => { + it('should show model metadata correctly', () => { modelCatalog.findFirstModelCatalogCard().within(() => { - modelCatalog.findModelCatalogDetailLink().should('contain.text', 'model1'); + modelCatalog + .findModelCatalogDetailLink() + .should('contain.text', 'sample category 1-model-1'); modelCatalog.findTaskLabel().should('exist'); - modelCatalog.findLicenseLabel().should('exist'); modelCatalog.findProviderLabel().should('exist'); }); }); }); + + describe('Validated Model', () => { + beforeEach(() => { + initIntercepts({ useValidatedModel: true }); + modelCatalog.visit(); + }); + it('should show validated model correctly', () => { + cy.wait('@getCatalogSourceModelArtifacts'); + modelCatalog.findFirstModelCatalogCard().within(() => { + modelCatalog.findValidatedModelHardware().should('contain.text', '2xH100-80'); + modelCatalog.findValidatedModelRps().should('contain.text', '7'); + modelCatalog.findValidatedModelTtft().should('contain.text', '35.49'); + modelCatalog.findValidatedModelBenchmarkNext().click(); + modelCatalog.findValidatedModelHardware().should('contain.text', '33xRTX 4090'); + modelCatalog.findValidatedModelRps().should('contain.text', '10'); + modelCatalog.findValidatedModelTtft().should('contain.text', '67.15'); + modelCatalog.findValidatedModelBenchmarkNext().click(); + modelCatalog.findValidatedModelHardware().should('contain.text', '40xA100'); + modelCatalog.findValidatedModelRps().should('contain.text', '15'); + modelCatalog.findValidatedModelTtft().should('contain.text', '42.12'); + modelCatalog.findValidatedModelBenchmarkPrev().click(); + modelCatalog.findValidatedModelHardware().should('contain.text', '33xRTX 4090'); + modelCatalog.findValidatedModelRps().should('contain.text', '10'); + modelCatalog.findValidatedModelTtft().should('contain.text', '67.15'); + modelCatalog.findValidatedModelBenchmarkLink().click(); + cy.url().should('include', 'performance-insights'); + }); + }); + }); }); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogDetails.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogDetails.cy.ts index 775d706414..b6464279a5 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogDetails.cy.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogDetails.cy.ts @@ -9,13 +9,16 @@ import { modelCatalog } from '~/__tests__/cypress/cypress/pages/modelCatalog'; import { mockModelRegistry } from '~/__mocks__/mockModelRegistry'; import type { CatalogSource } from '~/app/modelCatalogTypes'; import { MODEL_CATALOG_API_VERSION } from '~/__tests__/cypress/cypress/support/commands/api'; +import { mockCatalogFilterOptionsList } from '~/__mocks__/mockCatalogFilterOptionsList'; type HandlersProps = { sources?: CatalogSource[]; + modelsPerCategory?: number; }; const initIntercepts = ({ sources = [mockCatalogSource({}), mockCatalogSource({ id: 'source-2', name: 'source 2' })], + modelsPerCategory = 4, }: HandlersProps) => { cy.interceptApi( `GET /api/:apiVersion/model_catalog/sources`, @@ -27,24 +30,34 @@ const initIntercepts = ({ }), ); - cy.interceptApi( - `GET /api/:apiVersion/model_catalog/models`, - { - path: { apiVersion: MODEL_CATALOG_API_VERSION }, - query: { source: 'sample-source' }, - }, - mockCatalogModelList({ - items: [mockCatalogModel({})], - }), - ); + sources.forEach((source) => { + source.labels.forEach((label) => { + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/models`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + query: { sourceLabel: label }, + }, + mockCatalogModelList({ + items: Array.from({ length: modelsPerCategory }, (_, i) => + mockCatalogModel({ + name: `${label.toLowerCase()}-model-${i + 1}`, + // eslint-disable-next-line camelcase + source_id: source.id, + }), + ), + }), + ); + }); + }); cy.interceptApi( `GET /api/:apiVersion/model_catalog/sources/:sourceId/models/:modelName`, { path: { apiVersion: MODEL_CATALOG_API_VERSION, - sourceId: 'sample-source', - modelName: 'repo1%2Fmodel1', + sourceId: 'source-2', + modelName: 'sample%20category%201-model-1', }, }, mockCatalogModel({}), @@ -55,12 +68,21 @@ const initIntercepts = ({ { path: { apiVersion: MODEL_CATALOG_API_VERSION, - sourceId: 'sample-source', - modelName: 'repo1%2Fmodel1', + sourceId: 'source-2', + modelName: 'sample%20category%201-model-1', }, }, mockCatalogModelArtifactList({}), ); + + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/models/filter_options`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + query: { namespace: 'kubeflow' }, + }, + mockCatalogFilterOptionsList(), + ); }; describe('Model Catalog Details Page', () => { @@ -72,13 +94,12 @@ describe('Model Catalog Details Page', () => { initIntercepts({}); modelCatalog.visit(); - modelCatalog.navigate(); }); it('navigates to details and shows header, breadcrumb and description', () => { modelCatalog.findLoadingState().should('not.exist'); modelCatalog.findModelCatalogDetailLink().first().click(); - modelCatalog.findBreadcrumb().should('be.visible'); + modelCatalog.findBreadcrumb().should('exist'); modelCatalog.findDetailsProviderText().should('be.visible'); modelCatalog.findDetailsDescription().should('exist'); }); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogTabs.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogTabs.cy.ts new file mode 100644 index 0000000000..e573686405 --- /dev/null +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalog/modelCatalogTabs.cy.ts @@ -0,0 +1,361 @@ +import { + mockCatalogModel, + mockCatalogModelArtifactList, + mockCatalogModelList, + mockCatalogSource, + mockCatalogSourceList, + mockNonValidatedModel, + mockValidatedModel, +} from '~/__mocks__'; +import { mockCatalogPerformanceMetricsArtifactList } from '~/__mocks__/mockCatalogModelArtifactList'; +import { modelCatalog } from '~/__tests__/cypress/cypress/pages/modelCatalog'; +import { mockModelRegistry } from '~/__mocks__/mockModelRegistry'; +import type { CatalogSource } from '~/app/modelCatalogTypes'; +import { MODEL_CATALOG_API_VERSION } from '~/__tests__/cypress/cypress/support/commands/api'; +import type { ModelRegistryCustomProperties } from '~/app/types'; +import { ModelRegistryMetadataType } from '~/app/types'; +import { mockCatalogFilterOptionsList } from '~/__mocks__/mockCatalogFilterOptionsList'; + +type HandlersProps = { + sources?: CatalogSource[]; + useValidatedModel?: boolean; + modelsPerCategory?: number; + hasPerformanceArtifacts?: boolean; +}; + +const initIntercepts = ({ + sources = [mockCatalogSource({}), mockCatalogSource({ id: 'source-2', name: 'source 2' })], + useValidatedModel = true, + modelsPerCategory = 4, + hasPerformanceArtifacts = true, +}: HandlersProps) => { + const testModel = useValidatedModel ? mockValidatedModel : mockNonValidatedModel; + + const testArtifacts = hasPerformanceArtifacts + ? mockCatalogPerformanceMetricsArtifactList({}) + : mockCatalogModelArtifactList({}); + + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/sources`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + }, + mockCatalogSourceList({ + items: sources, + }), + ); + + sources.forEach((source) => { + source.labels.forEach((label) => { + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/models`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + query: { sourceLabel: label }, + }, + mockCatalogModelList({ + items: Array.from({ length: modelsPerCategory }, (_, i) => { + const customProperties = + i === 0 && useValidatedModel + ? ({ + validated: { + metadataType: ModelRegistryMetadataType.STRING, + // eslint-disable-next-line camelcase + string_value: '', + }, + } as ModelRegistryCustomProperties) + : undefined; + const name = + i === 0 && useValidatedModel + ? 'validated-model' + : `${label.toLowerCase()}-model-${i + 1}`; + + return mockCatalogModel({ + name, + // eslint-disable-next-line camelcase + source_id: source.id, + customProperties, + }); + }), + }), + ); + }); + }); + + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/sources/:sourceId/models/:modelName`, + { + path: { + apiVersion: MODEL_CATALOG_API_VERSION, + sourceId: 'source-2', + modelName: testModel.name.replace('/', '%2F'), + }, + }, + testModel, + ); + + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/sources/:sourceId/artifacts/:modelName`, + { + path: { + apiVersion: MODEL_CATALOG_API_VERSION, + sourceId: 'source-2', + modelName: testModel.name.replace('/', '%2F'), + }, + }, + testArtifacts, + ); + + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/models/filter_options`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + query: { namespace: 'kubeflow' }, + }, + mockCatalogFilterOptionsList(), + ); +}; + +describe('Model Catalog Details Tabs', () => { + describe('Validated Models with performance artifacts (with tabs)', () => { + beforeEach(() => { + // Mock model registries for register button functionality + cy.intercept('GET', '/model-registry/api/v1/model_registry*', [ + mockModelRegistry({ name: 'modelregistry-sample' }), + ]).as('getModelRegistries'); + + initIntercepts({ useValidatedModel: true, hasPerformanceArtifacts: true }); + modelCatalog.visit(); + }); + + describe('Tab Navigation', () => { + it('should display tabs on model details page', () => { + modelCatalog.findLoadingState().should('not.exist'); + modelCatalog.findModelCatalogDetailLink().first().click(); + + // Verify tabs are present + modelCatalog.findModelDetailsTabs().should('be.visible'); + modelCatalog.findOverviewTab().should('be.visible'); + modelCatalog.findPerformanceInsightsTab().should('be.visible'); + }); + + it('should show Overview tab as active by default', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + + // Overview tab should be active and content should be visible + modelCatalog.findOverviewTab().should('have.attr', 'aria-selected', 'true'); + modelCatalog.findOverviewTabContent().should('be.visible'); + modelCatalog.findDetailsDescription().should('be.visible'); + cy.url().should('include', '/model-catalog/source-2/validated-model/overview'); + }); + + it('should switch to Performance Insights tab when clicked', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + + cy.url().should('include', '/model-catalog/source-2/validated-model/overview'); + + // Click Performance Insights tab + modelCatalog.clickPerformanceInsightsTab(); + + // Verify tab switch + modelCatalog.findPerformanceInsightsTab().should('have.attr', 'aria-selected', 'true'); + modelCatalog.findOverviewTab().should('have.attr', 'aria-selected', 'false'); + modelCatalog.findPerformanceInsightsTabContent().should('be.visible'); + cy.url().should('include', '/model-catalog/source-2/validated-model/performance-insights'); + }); + + it('should switch back to Overview tab when clicked', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + + cy.url().should('include', '/model-catalog/source-2/validated-model/overview'); + + // First switch to Performance Insights + modelCatalog.clickPerformanceInsightsTab(); + modelCatalog.findPerformanceInsightsTab().should('have.attr', 'aria-selected', 'true'); + + // Then switch back to Overview + modelCatalog.clickOverviewTab(); + modelCatalog.findOverviewTab().should('have.attr', 'aria-selected', 'true'); + modelCatalog.findPerformanceInsightsTab().should('have.attr', 'aria-selected', 'false'); + modelCatalog.findOverviewTabContent().should('be.visible'); + cy.url().should('include', '/model-catalog/source-2/validated-model/overview'); + }); + }); + + describe('Tab Content', () => { + it('should display Hardware Configuration content in Performance Insights tab', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + + // Switch to Performance Insights tab + modelCatalog.clickPerformanceInsightsTab(); + + // Verify Hardware Configuration content is displayed + modelCatalog.findPerformanceInsightsTabContent().should('be.visible'); + modelCatalog.findHardwareConfigurationTitle().should('be.visible'); + modelCatalog.findHardwareConfigurationDescription().should('be.visible'); + modelCatalog.findHardwareConfigurationTable().should('be.visible'); + }); + + it('should display Workload type column as the second column in hardware configuration table', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + modelCatalog.clickPerformanceInsightsTab(); + + modelCatalog + .findHardwareConfigurationTableHeaders() + .eq(1) + .should('contain.text', 'Workload type'); + modelCatalog + .findHardwareConfigurationColumn('Workload type') + .first() + .should('contain.text', 'Code Fixing') + .should('not.contain.text', 'code_fixing'); + }); + }); + + describe('Workload Type Filter', () => { + it('should display workload type filter in the toolbar', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + modelCatalog.clickPerformanceInsightsTab(); + + modelCatalog + .findWorkloadTypeFilter() + .should('be.visible') + .should('contain.text', 'Workload type'); + }); + + it('should show workload type options when clicked', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + modelCatalog.clickPerformanceInsightsTab(); + modelCatalog.findWorkloadTypeFilter().click(); + modelCatalog.findWorkloadTypeOption('Chatbot').should('be.visible'); + modelCatalog.findWorkloadTypeOption('Code Fixing').should('be.visible'); + modelCatalog.findWorkloadTypeOption('Long RAG').should('be.visible'); + modelCatalog.findWorkloadTypeOption('RAG').should('be.visible'); + }); + + it('should update toggle text when workload type is selected', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + modelCatalog.clickPerformanceInsightsTab(); + modelCatalog.findWorkloadTypeFilter().click(); + modelCatalog.selectWorkloadType('Code Fixing'); + modelCatalog + .findWorkloadTypeFilter() + .should('contain.text', 'Workload type') + .should('contain.text', '1 selected'); + }); + + it('should filter hardware configuration table by selected workload type', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + modelCatalog.clickPerformanceInsightsTab(); + modelCatalog.findHardwareConfigurationTableRows().should('have.length.at.least', 1); + modelCatalog.findWorkloadTypeFilter().click(); + modelCatalog.selectWorkloadType('Code Fixing'); + modelCatalog.findHardwareConfigurationTableRows().should('exist'); + modelCatalog.findHardwareConfigurationColumn('Workload type').each(($el) => { + cy.wrap($el).should('contain.text', 'Code Fixing'); + }); + }); + + it('should clear workload type filter when clicking selected option again', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + modelCatalog.clickPerformanceInsightsTab(); + modelCatalog.findWorkloadTypeFilter().click(); + modelCatalog.selectWorkloadType('Code Fixing'); + modelCatalog + .findWorkloadTypeFilter() + .should('contain.text', 'Workload type') + .should('contain.text', '1 selected'); + + modelCatalog.selectWorkloadType('Code Fixing'); + modelCatalog.findWorkloadTypeFilter().should('contain.text', 'Workload type'); + modelCatalog.findWorkloadTypeFilter().should('not.contain.text', '1 selected'); + }); + }); + + describe('Accessibility', () => { + it('should have proper ARIA attributes for tabs', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + + // Check tab container has proper role + modelCatalog.findModelDetailsTabs().should('have.attr', 'role', 'region'); + modelCatalog + .findModelDetailsTabs() + .should('have.attr', 'aria-label', 'Model details page tabs'); + + // Check individual tabs have proper attributes + modelCatalog.findOverviewTab().should('have.attr', 'aria-label', 'Model overview tab'); + modelCatalog + .findPerformanceInsightsTab() + .should('have.attr', 'aria-label', 'Performance insights tab'); + }); + }); + + describe('Tab State Management', () => { + it('should maintain tab state when switching between tabs', () => { + modelCatalog.findModelCatalogDetailLink().first().click(); + + cy.url().should('include', '/model-catalog/source-2/validated-model/overview'); + + // Switch to Performance Insights + modelCatalog.clickPerformanceInsightsTab(); + modelCatalog.findPerformanceInsightsTab().should('have.attr', 'aria-selected', 'true'); + + // Switch back to Overview + modelCatalog.clickOverviewTab(); + modelCatalog.findOverviewTab().should('have.attr', 'aria-selected', 'true'); + + // Switch to Performance Insights again + modelCatalog.clickPerformanceInsightsTab(); + modelCatalog.findPerformanceInsightsTab().should('have.attr', 'aria-selected', 'true'); + }); + }); + }); + + describe('Validated Models without performance artifacts (without tabs)', () => { + beforeEach(() => { + cy.intercept('GET', '/model-registry/api/v1/model_registry*', [ + mockModelRegistry({ name: 'modelregistry-sample' }), + ]).as('getModelRegistries'); + + initIntercepts({ useValidatedModel: true, hasPerformanceArtifacts: false }); + modelCatalog.visit(); + }); + + it('should not display tabs for validated models without performance artifacts', () => { + modelCatalog.findLoadingState().should('not.exist'); + modelCatalog.findModelCatalogDetailLink().first().click(); + modelCatalog.findModelDetailsTabs().should('not.exist'); + modelCatalog.findOverviewTab().should('not.exist'); + modelCatalog.findPerformanceInsightsTab().should('not.exist'); + + modelCatalog.findOverviewTabContent().should('be.visible'); + modelCatalog.findDetailsDescription().should('be.visible'); + }); + }); + + describe('Non-Validated Models (without tabs)', () => { + beforeEach(() => { + // Mock model registries for register button functionality + cy.intercept('GET', '/model-registry/api/v1/model_registry*', [ + mockModelRegistry({ name: 'modelregistry-sample' }), + ]).as('getModelRegistries'); + + initIntercepts({ useValidatedModel: false, hasPerformanceArtifacts: false }); + modelCatalog.visit(); + }); + + it('should not display tabs for non-validated models', () => { + modelCatalog.findLoadingState().should('not.exist'); + modelCatalog.findModelCatalogDetailLink().first().click(); + + // Tabs should not be present + modelCatalog.findModelDetailsTabs().should('not.exist'); + modelCatalog.findOverviewTab().should('not.exist'); + modelCatalog.findPerformanceInsightsTab().should('not.exist'); + + // But overview content should still be visible + modelCatalog.findOverviewTabContent().should('be.visible'); + modelCatalog.findDetailsDescription().should('be.visible'); + }); + }); +}); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalogSettings/modelCatalogSettings.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalogSettings/modelCatalogSettings.cy.ts new file mode 100644 index 0000000000..d262ca4e46 --- /dev/null +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelCatalogSettings/modelCatalogSettings.cy.ts @@ -0,0 +1,683 @@ +import { + modelCatalogSettings, + manageSourcePage, + deleteSourceModal, +} from '~/__tests__/cypress/cypress/pages/modelCatalogSettings'; +import { MODEL_CATALOG_API_VERSION } from '~/__tests__/cypress/cypress/support/commands/api'; +import { + mockCatalogSource, + mockCatalogSourceList, + mockCatalogSourceConfigList, + mockYamlCatalogSourceConfig, + mockHuggingFaceCatalogSourceConfig, +} from '~/__mocks__'; +import type { CatalogSource, CatalogSourceConfig } from '~/app/modelCatalogTypes'; + +const NAMESPACE = 'kubeflow'; +const userMock = { + data: { + userId: 'user@example.com', + clusterAdmin: true, + }, +}; + +const setupMocks = (sources: CatalogSource[] = [], sourceConfigs: CatalogSourceConfig[] = []) => { + cy.intercept('GET', '/model-registry/api/v1/namespaces', { + data: [{ metadata: { name: NAMESPACE } }], + }); + cy.intercept('GET', '/model-registry/api/v1/user', userMock); + cy.interceptApi( + `GET /api/:apiVersion/model_catalog/sources`, + { + path: { apiVersion: MODEL_CATALOG_API_VERSION }, + }, + mockCatalogSourceList({ + items: sources, + }), + ); + cy.intercept( + 'GET', + `/model-registry/api/${MODEL_CATALOG_API_VERSION}/settings/model_catalog/source_configs*`, + { + statusCode: 200, + body: { + data: mockCatalogSourceConfigList({ + catalogs: sourceConfigs, + }), + }, + }, + ).as('getCatalogSourceConfigs'); +}; + +function selectNamespaceIfPresent() { + cy.get('body').then(($body) => { + if ($body.find('[data-testid="namespace-select"]').length) { + cy.get('[data-testid="namespace-select"]').click(); + cy.findByText(NAMESPACE).click(); + } + }); +} + +describe('Model Catalog Settings', () => { + const defaultYamlSource = mockYamlCatalogSourceConfig({ + id: 'default-yaml', + name: 'Default Catalog', + isDefault: true, + }); + + beforeEach(() => { + setupMocks([], [defaultYamlSource]); + }); + + it('should display the settings page', () => { + modelCatalogSettings.visit(); + modelCatalogSettings.findHeading(); + modelCatalogSettings.findDescription(); + }); + + it('should navigate to settings page from nav', () => { + selectNamespaceIfPresent(); + cy.get('body').then(($body) => { + if ($body.find('#page-sidebar').length > 0) { + modelCatalogSettings.navigate(); + modelCatalogSettings.findHeading(); + } else { + cy.log('Sidebar not available, skipping nav test'); + } + }); + }); + + it('should display add source button', () => { + modelCatalogSettings.visit(); + modelCatalogSettings.findAddSourceButton().should('be.visible'); + modelCatalogSettings.findAddSourceButton().should('contain', 'Add a source'); + }); + + it('should navigate to add source page when button is clicked', () => { + modelCatalogSettings.visit(); + modelCatalogSettings.findAddSourceButton().click(); + manageSourcePage.findAddSourceTitle(); + manageSourcePage.findAddSourceDescription(); + manageSourcePage.findBreadcrumb().should('exist'); + manageSourcePage.findBreadcrumbAction().should('contain', 'Add a source'); + }); +}); + +describe('Catalog Source Configs Table', () => { + const defaultYamlSource = mockYamlCatalogSourceConfig({ + id: 'default-yaml', + name: 'Default Catalog', + isDefault: true, + enabled: true, + includedModels: [], + excludedModels: [], + }); + + const huggingFaceSource = mockHuggingFaceCatalogSourceConfig({ + id: 'hf-google', + name: 'HuggingFace Google', + isDefault: false, + enabled: true, + allowedOrganization: 'Google', + includedModels: ['model1', 'model2'], + }); + + const customYamlSource = mockYamlCatalogSourceConfig({ + id: 'custom-yaml', + name: 'Custom YAML', + isDefault: false, + enabled: false, + excludedModels: ['excluded-model'], + }); + + beforeEach(() => { + setupMocks([], [defaultYamlSource, huggingFaceSource, customYamlSource]); + }); + + it('should display empty state when no source configs exist', () => { + setupMocks([], []); + modelCatalogSettings.visit(); + modelCatalogSettings.shouldBeEmpty(); + modelCatalogSettings.findEmptyState().should('contain', 'No catalog sources'); + }); + + it('should display table with source configs', () => { + modelCatalogSettings.visit(); + modelCatalogSettings.shouldHaveSourceConfigs(); + modelCatalogSettings.findRows().should('have.length', 3); + }); + + it('should render table column headers correctly', () => { + modelCatalogSettings.visit(); + modelCatalogSettings.findTable().should('be.visible'); + modelCatalogSettings.findTable().contains('th', 'Name').should('be.visible'); + modelCatalogSettings.findTable().contains('th', 'Organization').should('be.visible'); + modelCatalogSettings.findTable().contains('th', 'Model visibility').should('be.visible'); + modelCatalogSettings.findTable().contains('th', 'Source type').should('be.visible'); + modelCatalogSettings.findTable().contains('th', 'Enable').should('be.visible'); + modelCatalogSettings.findTable().contains('th', 'Validation status').should('be.visible'); + }); + + describe('Table row rendering', () => { + it('should render default YAML source correctly', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('Default Catalog'); + row.findName().should('be.visible').and('contain', 'Default Catalog'); + row.shouldHaveOrganization('-'); + row.shouldHaveModelVisibility('Unfiltered'); + row.shouldHaveSourceType('YAML file'); + row.shouldHaveEnableToggle(false); // Default sources don't have toggle + }); + + it('should render Hugging Face source correctly', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('HuggingFace Google'); + row.findName().should('be.visible').and('contain', 'HuggingFace Google'); + row.shouldHaveOrganization('Google'); + row.shouldHaveModelVisibility('Filtered'); + row.shouldHaveSourceType('Hugging Face'); + row.shouldHaveEnableToggle(true); + row.shouldHaveEnableState(true); + }); + + it('should render custom YAML source correctly', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('Custom YAML'); + row.findName().should('be.visible').and('contain', 'Custom YAML'); + row.shouldHaveOrganization('-'); + row.shouldHaveModelVisibility('Filtered'); + row.shouldHaveSourceType('YAML file'); + row.shouldHaveEnableToggle(true); + row.shouldHaveEnableState(false); + }); + }); + + describe('Enable toggle functionality', () => { + it('should show alert when enable toggle is clicked', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('HuggingFace Google'); + row.findName().should('be.visible'); + row.findEnableToggle().should('exist').and('be.checked'); + + cy.window().then((win) => { + cy.stub(win, 'alert').as('windowAlert'); + }); + + row.toggleEnable(); + + cy.get('@windowAlert').should( + 'have.been.calledWith', + 'Toggle clicked! "HuggingFace Google" will be disabled when functionality is implemented.', + ); + }); + + it('should not show toggle for default sources', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('Default Catalog'); + row.findName().should('be.visible'); + row.shouldHaveEnableToggle(false); + }); + }); + + describe('Manage source button', () => { + it('should navigate to manage source page when button is clicked', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('HuggingFace Google'); + row.findName().should('be.visible'); + row.findManageSourceButton().should('be.visible').click(); + cy.url().should('include', '/model-catalog-settings/manage-source/hf-google'); + manageSourcePage.findManageSourceTitle(); + }); + + it('should navigate to correct manage source page for each row', () => { + modelCatalogSettings.visit(); + const customRow = modelCatalogSettings.getRow('Custom YAML'); + customRow.findName().should('be.visible'); + customRow.findManageSourceButton().should('be.visible').click(); + cy.url().should('include', '/model-catalog-settings/manage-source/custom-yaml'); + }); + }); + + describe('Kebab menu actions', () => { + it('should show kebab with delete action for non-default sources', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('HuggingFace Google'); + row.findName().should('be.visible'); + row.shouldHaveKebab(true); + row.findKebab().should('be.visible').click(); + cy.findByRole('menuitem', { name: 'Delete source' }) + .should('be.visible') + .and('not.be.disabled'); + }); + + it('should not show kebab menu for default sources', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('Default Catalog'); + row.findName().should('be.visible'); + row.shouldHaveKebab(false); + }); + }); + + describe('Delete source functionality', () => { + beforeEach(() => { + cy.intercept( + 'DELETE', + `/model-registry/api/${MODEL_CATALOG_API_VERSION}/settings/model_catalog/source_configs/*`, + { + statusCode: 200, + }, + ).as('deleteSource'); + }); + + it('should open delete modal when delete action is clicked', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('HuggingFace Google'); + row.findKebab().click(); + cy.findByRole('menuitem', { name: 'Delete source' }).click(); + + deleteSourceModal.shouldBeOpen(); + deleteSourceModal.find().should('contain', 'HuggingFace Google'); + deleteSourceModal.find().should('contain', 'repository will be deleted'); + }); + + it('should require typing source name to enable delete button', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('HuggingFace Google'); + row.findKebab().click(); + cy.findByRole('menuitem', { name: 'Delete source' }).click(); + + deleteSourceModal.shouldBeOpen(); + deleteSourceModal.findDeleteButton().should('be.disabled'); + + deleteSourceModal.typeConfirmation('wrong name'); + deleteSourceModal.findDeleteButton().should('be.disabled'); + + deleteSourceModal.typeConfirmation('HuggingFace Google'); + deleteSourceModal.findDeleteButton().should('not.be.disabled'); + }); + + it('should close modal when cancel is clicked', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('HuggingFace Google'); + row.findKebab().click(); + cy.findByRole('menuitem', { name: 'Delete source' }).click(); + + deleteSourceModal.shouldBeOpen(); + deleteSourceModal.findCancelButton().click(); + deleteSourceModal.shouldBeOpen(false); + }); + + it('should disable delete button while deleting', () => { + cy.intercept( + 'DELETE', + `/model-registry/api/${MODEL_CATALOG_API_VERSION}/settings/model_catalog/source_configs/*`, + (req) => { + req.reply({ + statusCode: 200, + delay: 1000, + }); + }, + ).as('deleteSourceSlow'); + + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('Custom YAML'); + row.findKebab().click(); + cy.findByRole('menuitem', { name: 'Delete source' }).click(); + + deleteSourceModal.shouldBeOpen(); + deleteSourceModal.typeConfirmation('Custom YAML'); + deleteSourceModal.findDeleteButton().should('not.be.disabled').click(); + + // Check that the button is disabled (it will show "Loading... Delete") + deleteSourceModal.findFooter().find('button').first().should('be.disabled'); + }); + }); + + describe('Model visibility badges', () => { + it('should show "Filtered" badge when source has included models', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('HuggingFace Google'); + row.findName().should('be.visible'); + row.findModelVisibility().should('be.visible').and('contain', 'Filtered'); + row + .findModelVisibility() + .find('[data-testid*="model-visibility-filtered"]') + .should('be.visible'); + }); + + it('should show "Filtered" badge when source has excluded models', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('Custom YAML'); + row.findName().should('be.visible'); + row.findModelVisibility().should('be.visible').and('contain', 'Filtered'); + }); + + it('should show "Unfiltered" badge when source has no filters', () => { + modelCatalogSettings.visit(); + const row = modelCatalogSettings.getRow('Default Catalog'); + row.findName().should('be.visible'); + row.findModelVisibility().should('be.visible').and('contain', 'Unfiltered'); + row + .findModelVisibility() + .find('[data-testid*="model-visibility-unfiltered"]') + .should('be.visible'); + }); + }); +}); + +describe('Manage Source Page', () => { + beforeEach(() => { + setupMocks(); + }); + + describe('Add Source Mode', () => { + it('should display add source page', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findAddSourceTitle(); + manageSourcePage.findAddSourceDescription(); + }); + + it('should display correct breadcrumb for add source', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findBreadcrumb().should('exist'); + manageSourcePage.findBreadcrumbAction().should('contain', 'Add a source'); + }); + + it('should navigate back to settings from breadcrumb', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findBreadcrumb().click({ force: true }); + modelCatalogSettings.findHeading(); + }); + + it('should navigate back to settings from cancel button', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findCancelButton().click(); + modelCatalogSettings.findHeading(); + }); + + it('should display form fields', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findNameInput().should('exist'); + manageSourcePage.findSourceTypeHuggingFace().should('exist'); + manageSourcePage.findSourceTypeYaml().should('exist'); + manageSourcePage.findEnableSourceCheckbox().should('exist'); + manageSourcePage.findSubmitButton().should('exist'); + manageSourcePage.findPreviewButton().should('exist'); + manageSourcePage.findCancelButton().should('exist'); + }); + + it('should show Hugging Face fields by default', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findSourceTypeHuggingFace().should('be.checked'); + manageSourcePage.findCredentialsSection().should('exist'); + manageSourcePage.findAccessTokenInput().should('exist'); + manageSourcePage.findOrganizationInput().should('exist'); + manageSourcePage.findYamlSection().should('not.exist'); + }); + + it('should show YAML fields when YAML type is selected', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.selectSourceType('yaml'); + manageSourcePage.findSourceTypeYaml().should('be.checked'); + manageSourcePage.findYamlSection().should('exist'); + manageSourcePage.findYamlContentInput().should('exist'); + manageSourcePage.findCredentialsSection().should('not.exist'); + }); + + it('should have Add button disabled by default', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findSubmitButton().should('be.disabled'); + }); + + it('should have Preview button disabled by default', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findPreviewButton().should('be.disabled'); + }); + + it('should show validation error when name field is empty and touched', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findNameInput().focus().blur(); + manageSourcePage.findNameError().should('exist'); + manageSourcePage.findNameError().should('contain', 'Name is required'); + }); + + it('should enable Add button when all required HF fields are filled', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.fillSourceName('Test Source'); + manageSourcePage.fillAccessToken('test-token-123'); + manageSourcePage.fillOrganization('Google'); + manageSourcePage.findSubmitButton().should('not.be.disabled'); + }); + + it('should enable Preview button when HF credentials are filled', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.fillAccessToken('test-token-123'); + manageSourcePage.fillOrganization('Google'); + manageSourcePage.findPreviewButton().should('not.be.disabled'); + }); + + it('should enable Add button when all required YAML fields are filled', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.selectSourceType('yaml'); + manageSourcePage.fillSourceName('Test Source'); + manageSourcePage.fillYamlContent('test: yaml\ncontent: here'); + manageSourcePage.findSubmitButton().should('not.be.disabled'); + }); + + it('should enable Preview button when YAML content is filled', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.selectSourceType('yaml'); + manageSourcePage.fillYamlContent('test: yaml\ncontent: here'); + manageSourcePage.findPreviewButton().should('not.be.disabled'); + }); + + it('should show validation errors for HF fields when touched', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findAccessTokenInput().focus().blur(); + manageSourcePage.findAccessTokenError().should('exist'); + manageSourcePage.findAccessTokenError().should('contain', 'Access token is required'); + + manageSourcePage.findOrganizationInput().focus().blur(); + manageSourcePage.findOrganizationError().should('exist'); + manageSourcePage.findOrganizationError().should('contain', 'Organization is required'); + }); + + it('should show validation error for YAML content when touched', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.selectSourceType('yaml'); + manageSourcePage.findYamlContentInput().focus().blur(); + manageSourcePage.findYamlContentError().should('exist'); + manageSourcePage.findYamlContentError().should('contain', 'YAML content is required'); + }); + + it('should expand and collapse model visibility section', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findAllowedModelsInput().should('not.exist'); + manageSourcePage.findExcludedModelsInput().should('not.exist'); + + manageSourcePage.toggleModelVisibility(); + manageSourcePage.findAllowedModelsInput().should('exist'); + manageSourcePage.findExcludedModelsInput().should('exist'); + + manageSourcePage.toggleModelVisibility(); + manageSourcePage.findAllowedModelsInput().should('not.exist'); + manageSourcePage.findExcludedModelsInput().should('not.exist'); + }); + + it('should allow entering filter values', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.toggleModelVisibility(); + + const allowedModels = 'model-1\nmodel-2*\nmodel-3'; + const excludedModels = 'test-model*\ndemo-model'; + + manageSourcePage.fillAllowedModels(allowedModels); + manageSourcePage.fillExcludedModels(excludedModels); + + manageSourcePage.findAllowedModelsInput().should('have.value', allowedModels); + manageSourcePage.findExcludedModelsInput().should('have.value', excludedModels); + }); + + it('should have enable source checkbox unchecked by default', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findEnableSourceCheckbox().should('not.be.checked'); + }); + + it('should allow toggling enable source checkbox', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findEnableSourceCheckbox().should('not.be.checked'); + manageSourcePage.toggleEnableSource(); + manageSourcePage.findEnableSourceCheckbox().should('be.checked'); + manageSourcePage.toggleEnableSource(); + manageSourcePage.findEnableSourceCheckbox().should('not.be.checked'); + }); + + it('should clear validation errors when fields are filled', () => { + manageSourcePage.visitAddSource(); + + // Trigger validation errors + manageSourcePage.findNameInput().focus().blur(); + manageSourcePage.findAccessTokenInput().focus().blur(); + manageSourcePage.findOrganizationInput().focus().blur(); + + manageSourcePage.findNameError().should('exist'); + manageSourcePage.findAccessTokenError().should('exist'); + manageSourcePage.findOrganizationError().should('exist'); + + // Fill fields + manageSourcePage.fillSourceName('Test Source'); + manageSourcePage.fillAccessToken('test-token'); + manageSourcePage.fillOrganization('Google'); + + // Errors should be cleared + manageSourcePage.findNameError().should('not.exist'); + manageSourcePage.findAccessTokenError().should('not.exist'); + manageSourcePage.findOrganizationError().should('not.exist'); + }); + + it('should maintain form state when switching between source types', () => { + manageSourcePage.visitAddSource(); + + // Fill name and HF fields + manageSourcePage.fillSourceName('Test Source'); + manageSourcePage.fillAccessToken('test-token'); + manageSourcePage.fillOrganization('Google'); + + // Switch to YAML + manageSourcePage.selectSourceType('yaml'); + manageSourcePage.findYamlSection().should('exist'); + + // Name should be maintained + manageSourcePage.findNameInput().should('have.value', 'Test Source'); + + // Fill YAML + manageSourcePage.fillYamlContent('test: yaml'); + + // Switch back to HF + manageSourcePage.selectSourceType('huggingface'); + manageSourcePage.findCredentialsSection().should('exist'); + + // All values should be maintained + manageSourcePage.findNameInput().should('have.value', 'Test Source'); + manageSourcePage.findAccessTokenInput().should('have.value', 'test-token'); + manageSourcePage.findOrganizationInput().should('have.value', 'Google'); + }); + + it('should dynamically update filter descriptions with organization name', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.toggleModelVisibility(); + + // Before entering organization, should show generic text + cy.contains( + 'Optionally filter which models from your source appear in the model catalog', + ).should('exist'); + + // Fill organization name + manageSourcePage.fillOrganization('Google'); + + // After entering organization, should show organization-specific text + cy.contains( + 'Optionally filter which Google models from your source appear in the model catalog', + ).should('exist'); + cy.contains('all Google models from the source will be visible').should('exist'); + + // Change organization name + manageSourcePage.fillOrganization('Meta'); + + // Text should update to new organization + cy.contains( + 'Optionally filter which Meta models from your source appear in the model catalog', + ).should('exist'); + cy.contains('all Meta models from the source will be visible').should('exist'); + }); + + it('should display model catalog preview panel', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findPreviewPanel().should('exist'); + manageSourcePage.findPreviewPanelTitle().should('be.visible'); + manageSourcePage.findPreviewPanelEmptyMessage().should('be.visible'); + }); + + it('should have three preview buttons', () => { + manageSourcePage.visitAddSource(); + // One in the action group (bottom left) + manageSourcePage.findPreviewButton().should('exist'); + // One in the preview panel header (top right) + manageSourcePage.findPreviewButtonHeader().should('exist'); + // One in the preview panel body (center) + manageSourcePage.findPreviewButtonPanel().should('exist'); + }); + + it('should have all three preview buttons disabled by default', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.findPreviewButton().should('be.disabled'); + manageSourcePage.findPreviewButtonHeader().should('be.disabled'); + manageSourcePage.findPreviewButtonPanel().should('be.disabled'); + }); + + it('should enable all three preview buttons when credentials are filled', () => { + manageSourcePage.visitAddSource(); + manageSourcePage.fillAccessToken('test-token'); + manageSourcePage.fillOrganization('Google'); + manageSourcePage.findPreviewButton().should('not.be.disabled'); + manageSourcePage.findPreviewButtonHeader().should('not.be.disabled'); + manageSourcePage.findPreviewButtonPanel().should('not.be.disabled'); + }); + }); + + describe('Manage Source Mode', () => { + const catalogSourceId = 'test-source-id'; + const catalogSource = mockCatalogSource({ + id: catalogSourceId, + name: 'Test Source', + }); + + beforeEach(() => { + setupMocks([catalogSource]); + }); + + it('should display manage source page', () => { + manageSourcePage.visitManageSource(catalogSourceId); + manageSourcePage.findManageSourceTitle(); + manageSourcePage.findManageSourceDescription(); + }); + + it('should display correct breadcrumb for manage source', () => { + manageSourcePage.visitManageSource(catalogSourceId); + manageSourcePage.findBreadcrumb().should('exist'); + manageSourcePage.findBreadcrumbAction().should('contain', 'Manage source'); + }); + + it('should navigate back to settings from breadcrumb', () => { + manageSourcePage.visitManageSource(catalogSourceId); + manageSourcePage.findBreadcrumb().click({ force: true }); + modelCatalogSettings.findHeading(); + }); + + it('should show Save button instead of Add button', () => { + manageSourcePage.visitManageSource(catalogSourceId); + manageSourcePage.findSubmitButton().should('exist'); + manageSourcePage.findSubmitButton().should('contain', 'Save'); + }); + }); +}); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelDetailsCard.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelDetailsCard.cy.ts index 3d2ba3fe9b..8a4d20f68c 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelDetailsCard.cy.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelDetailsCard.cy.ts @@ -104,7 +104,7 @@ describe('Model Details Card', () => { }); it('displays model details correctly', () => { - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); cy.contains('Model details').should('be.visible'); @@ -120,7 +120,7 @@ describe('Model Details Card', () => { }); it('displays labels section correctly', () => { - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); cy.contains('Labels').should('be.visible'); cy.contains('label1').should('be.visible'); @@ -130,7 +130,7 @@ describe('Model Details Card', () => { }); it('displays properties in expandable section', () => { - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); cy.contains('Properties').should('be.visible'); cy.contains('Properties').parent().find('.pf-v6-c-badge').should('contain', '3'); // property1, property2, url-property @@ -149,7 +149,7 @@ describe('Model Details Card', () => { }); it('shows add property button and validates input', () => { - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); cy.contains('Properties').click(); @@ -168,7 +168,7 @@ describe('Model Details Card', () => { }); it('validates property key length correctly', () => { - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); cy.contains('Properties').click(); @@ -208,7 +208,7 @@ describe('Model Details Card', () => { manyPropertiesModel, ); - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); cy.contains('Properties').click(); @@ -248,7 +248,7 @@ describe('Model Details Card', () => { archivedModel, ); - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); cy.contains('Properties').click(); @@ -256,7 +256,7 @@ describe('Model Details Card', () => { }); it('shows the correct tab structure and navigation', () => { - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); cy.findByTestId('model-versions-page-tabs').should('exist'); cy.findByTestId('model-overview-tab').should('exist'); @@ -270,7 +270,7 @@ describe('Model Details Card', () => { // These tests need investigation of exact DOM structure of mod-arch-shared components it('allows editing model description', () => { - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); cy.findByText('Test model description').should('be.visible'); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelRegistry.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelRegistry.cy.ts index cf2c5c2dd0..6470e09ce1 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelRegistry.cy.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelRegistry.cy.ts @@ -232,17 +232,17 @@ describe('Model Registry core', () => { registeredModelRow.findLatestVersion().contains('new model version'); registeredModelRow.findLatestVersion().click(); verifyRelativeURL( - `/model-registry/modelregistry-sample/registeredModels/1/versions/1/details`, + `/model-registry/modelregistry-sample/registered-models/1/versions/1/details`, ); }); it('table kebab actions', () => { const registeredModelRow = modelRegistry.getRow('Fraud detection model'); registeredModelRow.findKebabAction('Versions').click(); - verifyRelativeURL(`/model-registry/modelregistry-sample/registeredModels/1/versions`); + verifyRelativeURL(`/model-registry/modelregistry-sample/registered-models/1/versions`); cy.go('back'); registeredModelRow.findKebabAction('Overview').click(); - verifyRelativeURL(`/model-registry/modelregistry-sample/registeredModels/1/overview`); + verifyRelativeURL(`/model-registry/modelregistry-sample/registered-models/1/overview`); }); it('Renders labels in modal', () => { @@ -273,12 +273,12 @@ describe('Model Registry core', () => { }); it('Sort by Last modified', () => { - modelRegistry.findRegisteredModelTableHeaderButton('Last modified').click(); - modelRegistry.findRegisteredModelTableHeaderButton('Last modified').should(be.sortAscending); modelRegistry.findRegisteredModelTableHeaderButton('Last modified').click(); modelRegistry.findRegisteredModelTableHeaderButton('Last modified').should(be.sortDescending); modelRegistry.findRegisteredModelTableHeaderButton('Last modified').click(); modelRegistry.findRegisteredModelTableHeaderButton('Last modified').should(be.sortAscending); + modelRegistry.findRegisteredModelTableHeaderButton('Last modified').click(); + modelRegistry.findRegisteredModelTableHeaderButton('Last modified').should(be.sortDescending); }); it('Filter by keyword then both', () => { @@ -368,8 +368,8 @@ describe('Register Model button', () => { modelRegistry.findRegisterModelButton().click(); cy.findByTestId('app-page-title').should('contain', 'Register model'); const expectedUrlPattern = - '/model-registry/modelregistry-sample/registeredModels/test-model-id/versions/test-version-id'; - cy.url().should('include', '/model-registry/modelregistry-sample/registerModel'); + '/model-registry/modelregistry-sample/registered-models/test-model-id/versions/test-version-id'; + cy.url().should('include', '/model-registry/modelregistry-sample/register/model'); cy.log(`Expected redirect URL: ${expectedUrlPattern}`); }); }); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionArchive.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionArchive.cy.ts index 4175064abb..6a4b4b3655 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionArchive.cy.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionArchive.cy.ts @@ -166,7 +166,7 @@ describe('Model version archive list', () => { it('No archive versions in the selected registered model', () => { initIntercepts({ modelVersions: [mockModelVersion({ id: '3', name: 'model version 2' })] }); modelVersionArchive.visitModelVersionList(); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/1/versions'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/1/versions'); modelVersionArchive .findModelVersionsTableKebab() .findDropdownItem('View archived versions') @@ -177,15 +177,15 @@ describe('Model version archive list', () => { it('Archived version details browser back button should lead to archived versions table', () => { initIntercepts({}); modelVersionArchive.visit(); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/1/versions/archive'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/1/versions/archive'); modelVersionArchive.findArchiveVersionBreadcrumbItem().contains('Archived version'); const archiveVersionRow = modelVersionArchive.getRow('model version 2'); archiveVersionRow.findName().contains('model version 2').click(); verifyRelativeURL( - '/model-registry/modelregistry-sample/registeredModels/1/versions/archive/2/details', + '/model-registry/modelregistry-sample/registered-models/1/versions/archive/2/details', ); cy.go('back'); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/1/versions/archive'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/1/versions/archive'); modelVersionArchive.findArchiveVersionBreadcrumbItem().contains('Archived version'); archiveVersionRow.findName().contains('model version 2').should('exist'); }); @@ -193,7 +193,7 @@ describe('Model version archive list', () => { it('Archive version list', () => { initIntercepts({}); modelVersionArchive.visit(); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/1/versions/archive'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/1/versions/archive'); //breadcrumb modelVersionArchive.findArchiveVersionBreadcrumbItem().contains('Archived version'); @@ -230,7 +230,7 @@ describe('Model version archive list', () => { it('Archived model versions table', () => { initIntercepts({}); modelVersionArchive.visit(); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/1/versions/archive'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/1/versions/archive'); // filtering by keyword then both modelVersionArchive.findArchiveVersionTableSearch().type('model version 1'); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionDetails.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionDetails.cy.ts index 162970f2ad..584b5e449c 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionDetails.cy.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionDetails.cy.ts @@ -258,7 +258,7 @@ describe('Model version details', () => { it('Model version details page header', () => { verifyRelativeURL( - '/model-registry/modelregistry-sample/registeredModels/1/versions/1/details', + '/model-registry/modelregistry-sample/registered-models/1/versions/1/details', ); cy.findByTestId('app-page-title').should('contain.text', 'Version 1'); cy.findByTestId('breadcrumb-version-name').should('have.text', 'Version 1'); @@ -554,7 +554,7 @@ describe('Model version details', () => { // Verify we navigated to the versions list page cy.url().should( 'include', - '/model-registry/modelregistry-sample/registeredModels/1/versions', + '/model-registry/modelregistry-sample/registered-models/1/versions', ); cy.findByTestId('model-versions-tab-content').should('exist'); }); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersions.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersions.cy.ts index 17aa36f911..cbfcfd9861 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersions.cy.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersions.cy.ts @@ -142,7 +142,7 @@ describe('Model Versions', () => { modelRegistry.visit(); const registeredModelRow = modelRegistry.getRow('Fraud detection model'); registeredModelRow.findName().contains('Fraud detection model').click(); - verifyRelativeURL(`/model-registry/modelregistry-sample/registeredModels/1/overview`); + verifyRelativeURL(`/model-registry/modelregistry-sample/registered-models/1/overview`); modelRegistry.shouldModelVersionsEmpty(); }); @@ -155,7 +155,7 @@ describe('Model Versions', () => { modelRegistry.visit(); const registeredModelRow = modelRegistry.getRow('Fraud detection model'); registeredModelRow.findName().contains('Fraud detection model').click(); - verifyRelativeURL(`/model-registry/modelregistry-sample/registeredModels/1/overview`); + verifyRelativeURL(`/model-registry/modelregistry-sample/registered-models/1/overview`); cy.go('back'); verifyRelativeURL(`/model-registry/modelregistry-sample`); registeredModelRow.findName().contains('Fraud detection model').should('exist'); @@ -176,7 +176,7 @@ describe('Model Versions', () => { //cy.reload(); const registeredModelRow = modelRegistry.getRow('Fraud detection model'); registeredModelRow.findName().contains('Fraud detection model').click(); - verifyRelativeURL(`/model-registry/modelregistry-sample/registeredModels/1/overview`); + verifyRelativeURL(`/model-registry/modelregistry-sample/registered-models/1/overview`); modelRegistry.findModelBreadcrumbItem().contains('test'); // Navigate to versions tab @@ -264,10 +264,12 @@ describe('Model Versions', () => { cy.findByTestId('model-versions-tab').click(); const modelVersionRow = modelRegistry.getModelVersionRow('model version'); modelVersionRow.findModelVersionName().contains('model version').click(); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/1/versions/1/details'); + verifyRelativeURL( + '/model-registry/modelregistry-sample/registered-models/1/versions/1/details', + ); cy.findByTestId('app-page-title').should('contain.text', 'test'); cy.findByTestId('breadcrumb-version-name').should('have.text', 'model version'); cy.go('back'); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/1/versions'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/1/versions'); }); }); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionsCard.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionsCard.cy.ts index 258d85fa2b..2768963fa0 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionsCard.cy.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/modelVersionsCard.cy.ts @@ -270,7 +270,7 @@ describe('Model Versions Card', () => { }); it('does not display model versions list if there are no live model versions', () => { - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); cy.interceptApi( `GET /api/:apiVersion/model_registry/:modelRegistryName/registered_models/:registeredModelId/versions`, { @@ -289,7 +289,7 @@ describe('Model Versions Card', () => { }); it('does not display model versions list if there are no archived model versions', () => { - cy.visit('/model-registry/modelregistry-sample/registeredModels/archive/2/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/archive/2/overview'); cy.interceptApi( `GET /api/:apiVersion/model_registry/:modelRegistryName/registered_models/:registeredModelId/versions`, { @@ -308,7 +308,7 @@ describe('Model Versions Card', () => { }); it('should display live model versions list correctly', () => { - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); modelVersionsCard.findModelVersion('1').should('exist'); @@ -322,11 +322,11 @@ describe('Model Versions Card', () => { modelVersionsCard.findModelVersion('4').should('exist'); modelVersionsCard.findViewAllVersionsLink().click(); - cy.url().should('include', '/model-registry/modelregistry-sample/registeredModels/1/versions'); + cy.url().should('include', '/model-registry/modelregistry-sample/registered-models/1/versions'); }); it('should display archived model versions list correctly', () => { - cy.visit('/model-registry/modelregistry-sample/registeredModels/archive/2/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/archive/2/overview'); modelVersionsCard.findModelVersion('5').should('exist'); @@ -342,30 +342,30 @@ describe('Model Versions Card', () => { modelVersionsCard.findViewAllVersionsLink().click(); cy.url().should( 'include', - '/model-registry/modelregistry-sample/registeredModels/archive/2/versions', + '/model-registry/modelregistry-sample/registered-models/archive/2/versions', ); }); it('should have the correct link to the live model version', () => { initInterceptsForVersion('4'); - cy.visit('/model-registry/modelregistry-sample/registeredModels/1/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/1/overview'); modelVersionsCard.findModelVersionLink('4').click(); cy.url().should( 'include', - '/model-registry/modelregistry-sample/registeredModels/1/versions/4/details', + '/model-registry/modelregistry-sample/registered-models/1/versions/4/details', ); cy.contains('Version 4').should('be.visible'); }); it('should have the correct link to the archived model version', () => { initInterceptsForVersion('8'); - cy.visit('/model-registry/modelregistry-sample/registeredModels/archive/2/overview'); + cy.visit('/model-registry/modelregistry-sample/registered-models/archive/2/overview'); modelVersionsCard.findModelVersionLink('8').click(); cy.url().should( 'include', - '/model-registry/modelregistry-sample/registeredModels/archive/2/versions/8/details', + '/model-registry/modelregistry-sample/registered-models/archive/2/versions/8/details', ); cy.contains('Version 8').should('be.visible'); }); diff --git a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/registeredModelArchive.cy.ts b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/registeredModelArchive.cy.ts index b0dfa8931d..259967e33d 100644 --- a/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/registeredModelArchive.cy.ts +++ b/clients/ui/frontend/src/__tests__/cypress/cypress/tests/mocked/modelRegistry/registeredModelArchive.cy.ts @@ -170,28 +170,28 @@ describe('Model archive list', () => { registeredModels: [], }); registeredModelArchive.visit(); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/archive'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/archive'); registeredModelArchive.shouldArchiveVersionsEmpty(); }); it('Archived model details browser back button should lead to archived models table', () => { initIntercepts({}); registeredModelArchive.visit(); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/archive'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/archive'); registeredModelArchive.findArchiveModelBreadcrumbItem().contains('Archived models'); const archiveModelRow = registeredModelArchive.getRow('model 2'); archiveModelRow.findName().contains('model 2').click(); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/archive/2/overview'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/archive/2/overview'); cy.findByTestId('app-page-title').should('have.text', 'model 2Archived'); cy.go('back'); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/archive'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/archive'); registeredModelArchive.findArchiveModelTable().should('be.visible'); }); it('Archived model with no versions', () => { initIntercepts({ modelVersions: [] }); registeredModelArchive.visit(); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/archive'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/archive'); registeredModelArchive.findArchiveModelBreadcrumbItem().contains('Archived models'); const archiveModelRow = registeredModelArchive.getRow('model 2'); archiveModelRow.findName().contains('model 2').click(); @@ -201,23 +201,23 @@ describe('Model archive list', () => { it('Archived model flow', () => { initIntercepts({}); registeredModelArchive.visitArchiveModelVersionList(); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/archive/2/versions'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/archive/2/versions'); modelRegistry.findModelVersionsTable().should('be.visible'); modelRegistry.findModelVersionsTableRows().should('have.length', 2); const version = modelRegistry.getModelVersionRow('model version'); version.findModelVersionName().contains('model version').click(); verifyRelativeURL( - '/model-registry/modelregistry-sample/registeredModels/archive/2/versions/1/details', + '/model-registry/modelregistry-sample/registered-models/archive/2/versions/1/details', ); cy.go('back'); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/archive/2/versions'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/archive/2/versions'); }); it('Archive models list', () => { initIntercepts({}); registeredModelArchive.visit(); - verifyRelativeURL('/model-registry/modelregistry-sample/registeredModels/archive'); + verifyRelativeURL('/model-registry/modelregistry-sample/registered-models/archive'); //breadcrumb registeredModelArchive.findArchiveModelBreadcrumbItem().contains('Archived models'); @@ -303,7 +303,7 @@ describe('Model archive list', () => { archiveModelRow.findLatestVersion().contains('new model version'); archiveModelRow.findLatestVersion().click(); verifyRelativeURL( - `/model-registry/modelregistry-sample/registeredModels/archive/2/versions/1/details`, + `/model-registry/modelregistry-sample/registered-models/archive/2/versions/1/details`, ); }); @@ -314,11 +314,11 @@ describe('Model archive list', () => { archiveModelRow.findKebabAction('Overview').click(); cy.location('pathname').should( 'be.equals', - '/model-registry/modelregistry-sample/registeredModels/archive/2/overview', + '/model-registry/modelregistry-sample/registered-models/archive/2/overview', ); cy.go('back'); archiveModelRow.findKebabAction('Versions').click(); - verifyRelativeURL(`/model-registry/modelregistry-sample/registeredModels/archive/2/versions`); + verifyRelativeURL(`/model-registry/modelregistry-sample/registered-models/archive/2/versions`); }); }); @@ -444,5 +444,7 @@ describe('Archiving model', () => { cy.wait('@modelArchived').then((interception) => { expect(interception.request.body).to.eql(mockModArchResponse({ state: 'ARCHIVED' })); }); + verifyRelativeURL('/model-registry/modelregistry-sample'); + cy.findByTestId('app-page-title').should('contain.text', 'Model Registry'); }); }); diff --git a/clients/ui/frontend/src/app/App.tsx b/clients/ui/frontend/src/app/App.tsx index f244a88eb5..71d77ee732 100644 --- a/clients/ui/frontend/src/app/App.tsx +++ b/clients/ui/frontend/src/app/App.tsx @@ -6,7 +6,6 @@ import { Bullseye, Button, Page, - PageSection, PageSidebar, Spinner, Stack, @@ -60,31 +59,29 @@ const App: React.FC = () => { if (error) { // There was an error fetching critical data return ( - - - - - -

- {configError?.message || - namespacesLoadError?.message || - initializationError?.message || - 'Unknown error occurred during startup'} -

-

Logging out and logging back in may solve the issue

-
-
- - - -
-
-
+
+ + + +

+ {configError?.message || + namespacesLoadError?.message || + initializationError?.message || + 'Unknown error occurred during startup'} +

+

Logging out and logging back in may solve the issue

+
+
+ + + +
+
); } diff --git a/clients/ui/frontend/src/app/AppRoutes.tsx b/clients/ui/frontend/src/app/AppRoutes.tsx index 3ededddb8e..bbd606d434 100644 --- a/clients/ui/frontend/src/app/AppRoutes.tsx +++ b/clients/ui/frontend/src/app/AppRoutes.tsx @@ -6,19 +6,36 @@ import { NavDataItem } from '~/app/standalone/types'; import ModelRegistrySettingsRoutes from './pages/settings/ModelRegistrySettingsRoutes'; import ModelRegistryRoutes from './pages/modelRegistry/ModelRegistryRoutes'; import ModelCatalogRoutes from './pages/modelCatalog/ModelCatalogRoutes'; +import ModelCatalogSettingsRoutes from './pages/modelCatalogSettings/ModelCatalogSettingsRoutes'; +import { modelCatalogUrl } from './routes/modelCatalog/catalogModel'; +import { + catalogSettingsUrl, + CATALOG_SETTINGS_PAGE_TITLE, +} from './routes/modelCatalogSettings/modelCatalogSettings'; +import { modelRegistryUrl } from './pages/modelRegistry/screens/routeUtils'; import useUser from './hooks/useUser'; export const useAdminSettings = (): NavDataItem[] => { const { clusterAdmin } = useUser(); + const { config } = useModularArchContext(); + const { deploymentMode } = config; + const isStandalone = deploymentMode === DeploymentMode.Standalone; + const isFederated = deploymentMode === DeploymentMode.Federated; if (!clusterAdmin) { return []; } + const settingsChildren = [{ label: 'Model Registry', path: '/model-registry-settings' }]; + // Only show Model Catalog Settings in Standalone or Federated mode + if (isStandalone || isFederated) { + settingsChildren.push({ label: CATALOG_SETTINGS_PAGE_TITLE, path: catalogSettingsUrl() }); + } + return [ { label: 'Settings', - children: [{ label: 'Model Registry', path: '/model-registry-settings' }], + children: settingsChildren, }, ]; }; @@ -32,7 +49,7 @@ export const useNavData = (): NavDataItem[] => { const baseNavItems = [ { label: 'Model Registry', - path: '/model-registry', + path: modelRegistryUrl(), }, ]; @@ -40,7 +57,7 @@ export const useNavData = (): NavDataItem[] => { if (isStandalone || isFederated) { baseNavItems.push({ label: 'Model Catalog', - path: '/model-catalog', + path: modelCatalogUrl(), }); } @@ -56,10 +73,13 @@ const AppRoutes: React.FC = () => { return ( - } /> - } /> + } /> + } /> {(isStandalone || isFederated) && ( - } /> + <> + } /> + } /> + )} } /> {/* TODO: [Conditional render] Follow up add testing and conditional rendering when in standalone mode */} diff --git a/clients/ui/frontend/src/app/api/modelCatalog/service.ts b/clients/ui/frontend/src/app/api/modelCatalog/service.ts index a320ce300f..98a0aa752e 100644 --- a/clients/ui/frontend/src/app/api/modelCatalog/service.ts +++ b/clients/ui/frontend/src/app/api/modelCatalog/service.ts @@ -1,16 +1,20 @@ import { APIOptions, handleRestFailures, isModArchResponse, restGET } from 'mod-arch-core'; import { + CatalogArtifactList, + CatalogFilterOptionsList, CatalogModel, - CatalogModelArtifactList, CatalogModelList, CatalogSourceList, + ModelCatalogFilterStates, } from '~/app/modelCatalogTypes'; +import { filtersToFilterQuery } from '~/app/pages/modelCatalog/utils/modelCatalogUtils'; export const getCatalogModelsBySource = (hostPath: string, queryParams: Record = {}) => ( opts: APIOptions, - sourceId: string, + sourceId?: string, + sourceLabel?: string, paginationParams?: { pageSize?: string; nextPageToken?: string; @@ -18,12 +22,17 @@ export const getCatalogModelsBySource = sortOrder?: string; }, searchKeyword?: string, + filterData?: ModelCatalogFilterStates, + filterOptions?: CatalogFilterOptionsList | null, ): Promise => { const allParams = { source: sourceId, + sourceLabel, ...paginationParams, ...(searchKeyword && { q: searchKeyword }), ...queryParams, + ...(filterData && + filterOptions && { filterQuery: filtersToFilterQuery(filterData, filterOptions) }), }; return handleRestFailures(restGET(hostPath, '/models', allParams, opts)).then((response) => { if (isModArchResponse(response)) { @@ -33,6 +42,18 @@ export const getCatalogModelsBySource = }); }; +export const getCatalogFilterOptionList = + (hostPath: string, queryParams: Record = {}) => + (opts: APIOptions): Promise => + handleRestFailures(restGET(hostPath, '/models/filter_options', queryParams, opts)).then( + (response) => { + if (isModArchResponse(response)) { + return response.data; + } + throw new Error('Invalid response format'); + }, + ); + export const getListSources = (hostPath: string, queryParams: Record = {}) => (opts: APIOptions): Promise => @@ -57,11 +78,11 @@ export const getCatalogModel = export const getListCatalogModelArtifacts = (hostPath: string, queryParams: Record = {}) => - (opts: APIOptions, sourceId: string, modelName: string): Promise => + (opts: APIOptions, sourceId: string, modelName: string): Promise => handleRestFailures( restGET(hostPath, `/sources/${sourceId}/artifacts/${modelName}`, queryParams, opts), ).then((response) => { - if (isModArchResponse(response)) { + if (isModArchResponse(response)) { return response.data; } throw new Error('Invalid response format'); diff --git a/clients/ui/frontend/src/app/api/modelCatalogSettings/service.ts b/clients/ui/frontend/src/app/api/modelCatalogSettings/service.ts new file mode 100644 index 0000000000..4a8ae57b98 --- /dev/null +++ b/clients/ui/frontend/src/app/api/modelCatalogSettings/service.ts @@ -0,0 +1,76 @@ +import { + APIOptions, + assembleModArchBody, + handleRestFailures, + isModArchResponse, + restCREATE, + restDELETE, + restGET, + restPATCH, +} from 'mod-arch-core'; +import { + CatalogSourceConfig, + CatalogSourceConfigList, + CatalogSourceConfigPayload, +} from '~/app/modelCatalogTypes'; + +export const getCatalogSourceConfigs = + (hostPath: string, queryParams: Record = {}) => + (opts: APIOptions): Promise => + handleRestFailures(restGET(hostPath, '/source_configs', queryParams, opts)).then((response) => { + if (isModArchResponse(response)) { + return response.data; + } + throw new Error('Invalid response format'); + }); + +export const createCatalogSourceConfig = + (hostPath: string, queryParams: Record = {}) => + (opts: APIOptions, data: CatalogSourceConfigPayload): Promise => + handleRestFailures( + restCREATE(hostPath, '/source_configs', assembleModArchBody(data), queryParams, opts), + ).then((response) => { + if (isModArchResponse(response)) { + return response.data; + } + throw new Error('Invalid response format'); + }); + +export const getCatalogSourceConfig = + (hostPath: string, queryParams: Record = {}) => + (opts: APIOptions, sourceId: string): Promise => + handleRestFailures(restGET(hostPath, `/source_configs/${sourceId}`, queryParams, opts)).then( + (response) => { + if (isModArchResponse(response)) { + return response.data; + } + throw new Error('Invalid response format'); + }, + ); + +export const updateCatalogSourceConfig = + (hostPath: string, queryParams: Record = {}) => + ( + opts: APIOptions, + sourceId: string, + data: CatalogSourceConfigPayload, + ): Promise => + handleRestFailures( + restPATCH( + hostPath, + `/source_configs/${sourceId}`, + assembleModArchBody(data), + queryParams, + opts, + ), + ).then((response) => { + if (isModArchResponse(response)) { + return response.data; + } + throw new Error('Invalid response format'); + }); + +export const deleteCatalogSourceConfig = + (hostPath: string, queryParams: Record = {}) => + (opts: APIOptions, sourceId: string): Promise => + handleRestFailures(restDELETE(hostPath, `/source_configs/${sourceId}`, {}, queryParams, opts)); diff --git a/clients/ui/frontend/src/app/context/modelCatalog/ModelCatalogContext.tsx b/clients/ui/frontend/src/app/context/modelCatalog/ModelCatalogContext.tsx index 81a0379ded..c5460813dc 100644 --- a/clients/ui/frontend/src/app/context/modelCatalog/ModelCatalogContext.tsx +++ b/clients/ui/frontend/src/app/context/modelCatalog/ModelCatalogContext.tsx @@ -1,20 +1,42 @@ import { useQueryParamNamespaces } from 'mod-arch-core'; +import useGenericObjectState from 'mod-arch-core/dist/utilities/useGenericObjectState'; import * as React from 'react'; +import { useCatalogFilterOptionList } from '~/app/hooks/modelCatalog/useCatalogFilterOptionList'; import { useCatalogSources } from '~/app/hooks/modelCatalog/useCatalogSources'; import useModelCatalogAPIState, { ModelCatalogAPIState, } from '~/app/hooks/modelCatalog/useModelCatalogAPIState'; -import { CatalogSource, CatalogSourceList } from '~/app/modelCatalogTypes'; +import { + CatalogFilterOptionsList, + CatalogSource, + CatalogSourceList, + CategoryName, + ModelCatalogFilterStates, +} from '~/app/modelCatalogTypes'; import { BFF_API_VERSION, URL_PREFIX } from '~/app/utilities/const'; +import { + ModelCatalogStringFilterKey, + ModelCatalogNumberFilterKey, +} from '~/concepts/modelCatalog/const'; export type ModelCatalogContextType = { catalogSourcesLoaded: boolean; catalogSourcesLoadError?: Error; catalogSources: CatalogSourceList | null; selectedSource: CatalogSource | undefined; - updateSelectedSource: (modelRegistry: CatalogSource | undefined) => void; + updateSelectedSource: (source: CatalogSource | undefined) => void; + selectedSourceLabel: string | undefined; + updateSelectedSourceLabel: (sourceLabel: string | undefined) => void; apiState: ModelCatalogAPIState; refreshAPIState: () => void; + filterData: ModelCatalogFilterStates; + setFilterData: ( + key: K, + value: ModelCatalogFilterStates[K], + ) => void; + filterOptions: CatalogFilterOptionsList | null; + filterOptionsLoaded: boolean; + filterOptionsLoadError?: Error; }; type ModelCatalogContextProviderProps = { @@ -26,10 +48,25 @@ export const ModelCatalogContext = React.createContext( catalogSourcesLoadError: undefined, catalogSources: null, selectedSource: undefined, + filterData: { + [ModelCatalogStringFilterKey.TASK]: [], + [ModelCatalogStringFilterKey.PROVIDER]: [], + [ModelCatalogStringFilterKey.LICENSE]: [], + [ModelCatalogStringFilterKey.LANGUAGE]: [], + [ModelCatalogStringFilterKey.HARDWARE_TYPE]: [], + [ModelCatalogStringFilterKey.USE_CASE]: [], + [ModelCatalogNumberFilterKey.MIN_RPS]: undefined, + }, updateSelectedSource: () => undefined, + selectedSourceLabel: undefined, + updateSelectedSourceLabel: () => undefined, // eslint-disable-next-line @typescript-eslint/consistent-type-assertions apiState: { apiAvailable: false, api: null as unknown as ModelCatalogAPIState['api'] }, refreshAPIState: () => undefined, + setFilterData: () => undefined, + filterOptions: null, + filterOptionsLoaded: false, + filterOptionsLoadError: undefined, }); export const ModelCatalogContextProvider: React.FC = ({ @@ -38,21 +75,56 @@ export const ModelCatalogContextProvider: React.FC(undefined); + const [filterData, setFilterData] = useGenericObjectState({ + [ModelCatalogStringFilterKey.TASK]: [], + [ModelCatalogStringFilterKey.PROVIDER]: [], + [ModelCatalogStringFilterKey.LICENSE]: [], + [ModelCatalogStringFilterKey.LANGUAGE]: [], + [ModelCatalogStringFilterKey.HARDWARE_TYPE]: [], + [ModelCatalogStringFilterKey.USE_CASE]: [], + [ModelCatalogNumberFilterKey.MIN_RPS]: undefined, + }); + const [filterOptions, filterOptionsLoaded, filterOptionsLoadError] = + useCatalogFilterOptionList(apiState); + const [selectedSourceLabel, setSelectedSourceLabel] = React.useState< + ModelCatalogContextType['selectedSourceLabel'] + >(CategoryName.allModels); const contextValue = React.useMemo( () => ({ - catalogSourcesLoaded: isLoaded, - catalogSourcesLoadError: error, + catalogSourcesLoaded, + catalogSourcesLoadError, catalogSources, selectedSource: selectedSource ?? undefined, updateSelectedSource: setSelectedSource, + selectedSourceLabel: selectedSourceLabel ?? undefined, + updateSelectedSourceLabel: setSelectedSourceLabel, apiState, refreshAPIState, + filterData, + setFilterData, + filterOptions, + filterOptionsLoaded, + filterOptionsLoadError, }), - [isLoaded, error, catalogSources, selectedSource, apiState, refreshAPIState], + [ + catalogSourcesLoaded, + catalogSourcesLoadError, + catalogSources, + selectedSource, + apiState, + refreshAPIState, + filterData, + setFilterData, + filterOptions, + filterOptionsLoaded, + filterOptionsLoadError, + selectedSourceLabel, + ], ); return ( diff --git a/clients/ui/frontend/src/app/context/modelCatalogSettings/ModelCatalogSettingsContext.tsx b/clients/ui/frontend/src/app/context/modelCatalogSettings/ModelCatalogSettingsContext.tsx new file mode 100644 index 0000000000..0f79224b8b --- /dev/null +++ b/clients/ui/frontend/src/app/context/modelCatalogSettings/ModelCatalogSettingsContext.tsx @@ -0,0 +1,70 @@ +import * as React from 'react'; +import { useQueryParamNamespaces } from 'mod-arch-core'; +import useModelCatalogSettingsAPIState, { + ModelCatalogSettingsAPIState, +} from '~/app/hooks/modelCatalogSettings/useModelCatalogSettingsAPIState'; +import { useCatalogSourceConfigs } from '~/app/hooks/modelCatalogSettings/useCatalogSourceConfigs'; +import { CatalogSourceConfigList } from '~/app/modelCatalogTypes'; +import { BFF_API_VERSION, URL_PREFIX } from '~/app/utilities/const'; + +export type ModelCatalogSettingsContextType = { + apiState: ModelCatalogSettingsAPIState; + refreshAPIState: () => void; + catalogSourceConfigs: CatalogSourceConfigList | null; + catalogSourceConfigsLoaded: boolean; + catalogSourceConfigsLoadError?: Error; + refreshCatalogSourceConfigs: () => void; +}; + +type ModelCatalogSettingsContextProviderProps = { + children: React.ReactNode; +}; + +export const ModelCatalogSettingsContext = React.createContext({ + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + apiState: { apiAvailable: false, api: null as unknown as ModelCatalogSettingsAPIState['api'] }, + refreshAPIState: () => undefined, + catalogSourceConfigs: null, + catalogSourceConfigsLoaded: false, + catalogSourceConfigsLoadError: undefined, + refreshCatalogSourceConfigs: () => undefined, +}); + +export const ModelCatalogSettingsContextProvider: React.FC< + ModelCatalogSettingsContextProviderProps +> = ({ children }) => { + const hostPath = `${URL_PREFIX}/api/${BFF_API_VERSION}/settings/model_catalog`; + const queryParams = useQueryParamNamespaces(); + const [apiState, refreshAPIState] = useModelCatalogSettingsAPIState(hostPath, queryParams); + const [ + catalogSourceConfigs, + catalogSourceConfigsLoaded, + catalogSourceConfigsLoadError, + refreshCatalogSourceConfigs, + ] = useCatalogSourceConfigs(apiState); + + const contextValue = React.useMemo( + () => ({ + apiState, + refreshAPIState, + catalogSourceConfigs, + catalogSourceConfigsLoaded, + catalogSourceConfigsLoadError, + refreshCatalogSourceConfigs, + }), + [ + apiState, + refreshAPIState, + catalogSourceConfigs, + catalogSourceConfigsLoaded, + catalogSourceConfigsLoadError, + refreshCatalogSourceConfigs, + ], + ); + + return ( + + {children} + + ); +}; diff --git a/clients/ui/frontend/src/app/hooks/modelCatalog/useCatalogFilterOptionList.ts b/clients/ui/frontend/src/app/hooks/modelCatalog/useCatalogFilterOptionList.ts new file mode 100644 index 0000000000..3260463f7f --- /dev/null +++ b/clients/ui/frontend/src/app/hooks/modelCatalog/useCatalogFilterOptionList.ts @@ -0,0 +1,20 @@ +import { FetchState, FetchStateCallbackPromise, useFetchState } from 'mod-arch-core'; +import React from 'react'; +import { CatalogFilterOptionsList } from '~/app/modelCatalogTypes'; +import { ModelCatalogAPIState } from './useModelCatalogAPIState'; + +type State = CatalogFilterOptionsList | null; + +export const useCatalogFilterOptionList = (apiState: ModelCatalogAPIState): FetchState => { + const call = React.useCallback>( + (opts) => { + if (!apiState.apiAvailable) { + return Promise.reject(new Error('API not yet available')); + } + + return apiState.api.getCatalogFilterOptionList(opts); + }, + [apiState], + ); + return useFetchState(call, null, { initialPromisePurity: true }); +}; diff --git a/clients/ui/frontend/src/app/hooks/modelCatalog/useCatalogModelArtifacts.ts b/clients/ui/frontend/src/app/hooks/modelCatalog/useCatalogModelArtifacts.ts index a04965d7b4..cf580a23a2 100644 --- a/clients/ui/frontend/src/app/hooks/modelCatalog/useCatalogModelArtifacts.ts +++ b/clients/ui/frontend/src/app/hooks/modelCatalog/useCatalogModelArtifacts.ts @@ -1,15 +1,17 @@ import { FetchState, FetchStateCallbackPromise, NotReadyError, useFetchState } from 'mod-arch-core'; import React from 'react'; -import { CatalogModelArtifactList } from '~/app/modelCatalogTypes'; +import { CatalogArtifactList } from '~/app/modelCatalogTypes'; import { useModelCatalogAPI } from './useModelCatalogAPI'; export const useCatalogModelArtifacts = ( sourceId: string, modelName: string, -): FetchState => { + isValidated?: boolean, + onlyFetchIfValidated = false, +): FetchState => { const { api, apiAvailable } = useModelCatalogAPI(); - const call = React.useCallback>( + const call = React.useCallback>( (opts) => { if (!apiAvailable) { return Promise.reject(new Error('API not yet available')); @@ -20,9 +22,12 @@ export const useCatalogModelArtifacts = ( if (!modelName) { return Promise.reject(new NotReadyError('No model name')); } + if (onlyFetchIfValidated && !isValidated) { + return Promise.reject(new NotReadyError('Model is not validated')); + } return api.getListCatalogModelArtifacts(opts, sourceId, modelName); }, - [api, apiAvailable, sourceId, modelName], + [apiAvailable, sourceId, modelName, isValidated, api, onlyFetchIfValidated], ); return useFetchState( call, diff --git a/clients/ui/frontend/src/app/hooks/modelCatalog/useCatalogModelsBySource.ts b/clients/ui/frontend/src/app/hooks/modelCatalog/useCatalogModelsBySource.ts index 18f17adc6b..2c6d9e32b0 100644 --- a/clients/ui/frontend/src/app/hooks/modelCatalog/useCatalogModelsBySource.ts +++ b/clients/ui/frontend/src/app/hooks/modelCatalog/useCatalogModelsBySource.ts @@ -1,6 +1,11 @@ +import { useFetchState, FetchStateCallbackPromise } from 'mod-arch-core'; import React from 'react'; -import { NotReadyError } from 'mod-arch-core'; -import { CatalogModel } from '~/app/modelCatalogTypes'; +import { + CatalogFilterOptionsList, + CatalogModel, + CatalogModelList, + ModelCatalogFilterStates, +} from '~/app/modelCatalogTypes'; import { useModelCatalogAPI } from './useModelCatalogAPI'; type PaginatedCatalogModelList = { @@ -11,120 +16,138 @@ type PaginatedCatalogModelList = { loadMore: () => void; isLoadingMore: boolean; hasMore: boolean; + refresh: () => void; }; -interface CatalogModelsState { - items: CatalogModel[]; - nextPageToken: string; - totalSize: number; - isLoading: boolean; - isLoadingMore: boolean; - loaded: boolean; - error: Error | undefined; -} - -type CatalogModelList = [ - models: PaginatedCatalogModelList, - catalogModelLoaded: boolean, - catalogModelLoadError: Error | undefined, - refresh: () => void, -]; +type ModelList = { + catalogModels: PaginatedCatalogModelList; + catalogModelsLoaded: boolean; + catalogModelsLoadError: Error | undefined; + refresh: () => void; +}; export const useCatalogModelsBySources = ( - sourceId: string, + sourceId?: string, + sourceLabel?: string, pageSize = 10, searchQuery = '', -): CatalogModelList => { + filterData?: ModelCatalogFilterStates, + filterOptions?: CatalogFilterOptionsList | null, +): ModelList => { const { api, apiAvailable } = useModelCatalogAPI(); - const [state, setState] = React.useState({ - items: [], - nextPageToken: '', - totalSize: 0, - isLoading: false, - isLoadingMore: false, - loaded: false, - error: undefined, - }); - - const fetchModels = React.useCallback( - async (nextPageToken?: string) => { - const isLoadMore = Boolean(nextPageToken); - - setState((prev) => ({ - ...prev, - isLoading: !isLoadMore, - isLoadingMore: isLoadMore, - })); - - try { - if (!apiAvailable) { - return await Promise.reject(new Error('API not yet available')); - } - if (!sourceId) { - return await Promise.reject(new NotReadyError('No source id')); - } - const response = await api.getCatalogModelsBySource( - {}, - sourceId, - { - pageSize: pageSize.toString(), - ...(nextPageToken && { nextPageToken }), - }, - searchQuery && searchQuery.trim() ? searchQuery.trim() : undefined, - ); - - setState((prev) => ({ - items: isLoadMore ? [...prev.items, ...response.items] : response.items, - nextPageToken: response.nextPageToken, - totalSize: response.size, - isLoading: false, - isLoadingMore: false, - loaded: true, - error: undefined, - })); - } catch (error) { - setState((prev) => ({ - ...prev, - isLoading: false, - isLoadingMore: false, - loaded: true, - error: new Error( - `Failed to load models ${error instanceof Error ? error.message : String(error)}`, - ), - })); + const [allItems, setAllItems] = React.useState([]); + const [totalSize, setTotalSize] = React.useState(0); + const [nextPageToken, setNextPageToken] = React.useState(''); + const [isLoadingMore, setIsLoadingMore] = React.useState(false); + + const fetchModels = React.useCallback>( + (opts) => { + if (!apiAvailable) { + return Promise.reject(new Error('API not yet available')); } + + return api.getCatalogModelsBySource( + opts, + sourceId, + sourceLabel, + { pageSize: pageSize.toString() }, + searchQuery.trim() || undefined, + filterData, + filterOptions, + ); }, - [api, sourceId, pageSize, apiAvailable, searchQuery], + [api, apiAvailable, sourceId, pageSize, searchQuery, filterData, filterOptions, sourceLabel], + ); + + const [firstPageData, loaded, error, refetch] = useFetchState( + fetchModels, + { items: [], size: 0, pageSize: 10, nextPageToken: '' }, + { initialPromisePurity: true }, ); React.useEffect(() => { - fetchModels(); - }, [fetchModels]); + if (loaded && !error && firstPageData.items.length > 0) { + setAllItems(firstPageData.items); + setTotalSize(firstPageData.size); + setNextPageToken(firstPageData.nextPageToken); + } + }, [firstPageData, loaded, error]); - const loadMore = React.useCallback(() => { - if (state.nextPageToken && !state.isLoadingMore) { - fetchModels(state.nextPageToken); + const loadMore = React.useCallback(async () => { + if (!nextPageToken || isLoadingMore || !apiAvailable) { + return; } - }, [fetchModels, state.nextPageToken, state.isLoadingMore]); + + setIsLoadingMore(true); + + try { + const response = await api.getCatalogModelsBySource( + {}, + sourceId, + sourceLabel, + { + pageSize: pageSize.toString(), + nextPageToken, + }, + searchQuery.trim() || undefined, + filterData, + filterOptions, + ); + + setAllItems((prev) => [...prev, ...response.items]); + setTotalSize(response.size); + setNextPageToken(response.nextPageToken); + } catch (err) { + throw new Error( + `Failed to load more models: ${err instanceof Error ? err.message : String(err)}`, + ); + } finally { + setIsLoadingMore(false); + } + }, [ + api, + apiAvailable, + sourceId, + pageSize, + searchQuery, + nextPageToken, + isLoadingMore, + sourceLabel, + filterData, + filterOptions, + ]); + + React.useEffect(() => { + setAllItems([]); + setTotalSize(0); + setNextPageToken(''); + setIsLoadingMore(false); + }, [sourceId, searchQuery, sourceLabel, filterData, filterOptions]); const refresh = React.useCallback(() => { - setState((prev) => ({ ...prev, items: [], nextPageToken: '' })); - fetchModels(); - }, [fetchModels]); - - return [ - { - items: state.items, - size: state.totalSize, - pageSize: 10, - nextPageToken: state.nextPageToken, - loadMore, - isLoadingMore: state.isLoadingMore, - hasMore: Boolean(state.nextPageToken), - }, - state.loaded, - state.error, + setAllItems([]); + setTotalSize(0); + setNextPageToken(''); + setIsLoadingMore(false); + refetch(); + }, [refetch]); + + const paginatedData: PaginatedCatalogModelList = { + items: allItems, + size: totalSize, + pageSize: firstPageData.pageSize, + nextPageToken, + loadMore, + isLoadingMore, + hasMore: Boolean(nextPageToken), + refresh, + }; + + return { + catalogModels: paginatedData, + catalogModelsLoaded: loaded, + catalogModelsLoadError: error, refresh, - ]; + }; }; diff --git a/clients/ui/frontend/src/app/hooks/modelCatalog/useModelCatalogAPIState.tsx b/clients/ui/frontend/src/app/hooks/modelCatalog/useModelCatalogAPIState.tsx index 633728699b..5b7905be17 100644 --- a/clients/ui/frontend/src/app/hooks/modelCatalog/useModelCatalogAPIState.tsx +++ b/clients/ui/frontend/src/app/hooks/modelCatalog/useModelCatalogAPIState.tsx @@ -1,6 +1,7 @@ import { APIState, useAPIState } from 'mod-arch-core'; import React from 'react'; import { + getCatalogFilterOptionList, getCatalogModel, getCatalogModelsBySource, getListCatalogModelArtifacts, @@ -20,6 +21,7 @@ const useModelCatalogAPIState = ( getListSources: getListSources(path, queryParameters), getCatalogModel: getCatalogModel(path, queryParameters), getListCatalogModelArtifacts: getListCatalogModelArtifacts(path, queryParameters), + getCatalogFilterOptionList: getCatalogFilterOptionList(path, queryParameters), }), [queryParameters], ); diff --git a/clients/ui/frontend/src/app/hooks/modelCatalogSettings/useCatalogSourceConfigs.ts b/clients/ui/frontend/src/app/hooks/modelCatalogSettings/useCatalogSourceConfigs.ts new file mode 100644 index 0000000000..02c5eb83e7 --- /dev/null +++ b/clients/ui/frontend/src/app/hooks/modelCatalogSettings/useCatalogSourceConfigs.ts @@ -0,0 +1,20 @@ +import { FetchState, FetchStateCallbackPromise, useFetchState } from 'mod-arch-core'; +import React from 'react'; +import { CatalogSourceConfigList } from '~/app/modelCatalogTypes'; +import { ModelCatalogSettingsAPIState } from './useModelCatalogSettingsAPIState'; + +export const useCatalogSourceConfigs = ( + apiState: ModelCatalogSettingsAPIState, +): FetchState => { + const call = React.useCallback>( + (opts) => { + if (!apiState.apiAvailable) { + return Promise.reject(new Error('API not yet available')); + } + + return apiState.api.getCatalogSourceConfigs(opts); + }, + [apiState], + ); + return useFetchState(call, { catalogs: [] }, { initialPromisePurity: true }); +}; diff --git a/clients/ui/frontend/src/app/hooks/modelCatalogSettings/useModelCatalogSettingsAPI.ts b/clients/ui/frontend/src/app/hooks/modelCatalogSettings/useModelCatalogSettingsAPI.ts new file mode 100644 index 0000000000..4ba9a092cf --- /dev/null +++ b/clients/ui/frontend/src/app/hooks/modelCatalogSettings/useModelCatalogSettingsAPI.ts @@ -0,0 +1,18 @@ +import React from 'react'; +import { ModelCatalogSettingsContext } from '~/app/context/modelCatalogSettings/ModelCatalogSettingsContext'; +import { ModelCatalogSettingsAPIState } from './useModelCatalogSettingsAPIState'; + +type UseModelCatalogSettingsAPI = ModelCatalogSettingsAPIState & { + refreshAllAPI: () => void; +}; + +export const useModelCatalogSettingsAPI = (): UseModelCatalogSettingsAPI => { + const { apiState, refreshAPIState: refreshAllAPI } = React.useContext( + ModelCatalogSettingsContext, + ); + + return { + refreshAllAPI, + ...apiState, + }; +}; diff --git a/clients/ui/frontend/src/app/hooks/modelCatalogSettings/useModelCatalogSettingsAPIState.tsx b/clients/ui/frontend/src/app/hooks/modelCatalogSettings/useModelCatalogSettingsAPIState.tsx new file mode 100644 index 0000000000..583a2100c3 --- /dev/null +++ b/clients/ui/frontend/src/app/hooks/modelCatalogSettings/useModelCatalogSettingsAPIState.tsx @@ -0,0 +1,32 @@ +import { APIState, useAPIState } from 'mod-arch-core'; +import React from 'react'; +import { + createCatalogSourceConfig, + deleteCatalogSourceConfig, + getCatalogSourceConfig, + getCatalogSourceConfigs, + updateCatalogSourceConfig, +} from '~/app/api/modelCatalogSettings/service'; +import { ModelCatalogSettingsAPIs } from '~/app/modelCatalogTypes'; + +export type ModelCatalogSettingsAPIState = APIState; + +const useModelCatalogSettingsAPIState = ( + hostPath: string | null, + queryParameters?: Record, +): [apiState: ModelCatalogSettingsAPIState, refreshAPIState: () => void] => { + const createAPI = React.useCallback( + (path: string) => ({ + getCatalogSourceConfigs: getCatalogSourceConfigs(path, queryParameters), + createCatalogSourceConfig: createCatalogSourceConfig(path, queryParameters), + getCatalogSourceConfig: getCatalogSourceConfig(path, queryParameters), + updateCatalogSourceConfig: updateCatalogSourceConfig(path, queryParameters), + deleteCatalogSourceConfig: deleteCatalogSourceConfig(path, queryParameters), + }), + [queryParameters], + ); + + return useAPIState(hostPath, createAPI); +}; + +export default useModelCatalogSettingsAPIState; diff --git a/clients/ui/frontend/src/app/modelCatalogTypes.ts b/clients/ui/frontend/src/app/modelCatalogTypes.ts index d676005105..8eff766bf5 100644 --- a/clients/ui/frontend/src/app/modelCatalogTypes.ts +++ b/clients/ui/frontend/src/app/modelCatalogTypes.ts @@ -1,10 +1,28 @@ import { APIOptions } from 'mod-arch-core'; -import { ModelRegistryCustomProperties } from './types'; +import { + ModelCatalogTask, + ModelCatalogProvider, + ModelCatalogLicense, + AllLanguageCode, + ModelCatalogStringFilterKey, + ModelCatalogNumberFilterKey, + LatencyMetricFieldName, + UseCaseOptionValue, +} from '~/concepts/modelCatalog/const'; +import { + ModelRegistryCustomProperties, + ModelRegistryCustomPropertyString, + ModelRegistryCustomPropertyInt, + ModelRegistryCustomPropertyDouble, +} from './types'; export type CatalogSource = { id: string; name: string; + labels: string[]; enabled?: boolean; + status?: 'available' | 'error' | 'disabled'; + error?: string; }; export type CatalogSourceList = ModelCatalogListParams & { items: CatalogSource[] }; @@ -35,18 +53,106 @@ export type ModelCatalogListParams = { export type CatalogModelList = ModelCatalogListParams & { items: CatalogModel[] }; -export type CatalogModelArtifact = { +export enum CatalogArtifactType { + modelArtifact = 'model-artifact', + metricsArtifact = 'metrics-artifact', +} + +export enum MetricsType { + accuracyMetrics = 'accuracy-metrics', + performanceMetrics = 'performance-metrics', +} + +export enum CategoryName { + allModels = 'All models', + communityAndCustomModels = 'Community and custom', +} + +export enum SourceLabel { + other = 'null', +} + +export enum CatalogSourceType { + YAML = 'yaml', + HUGGING_FACE = 'huggingface', +} + +export type CatalogArtifactBase = { createTimeSinceEpoch: string; lastUpdateTimeSinceEpoch: string; - uri: string; customProperties: ModelRegistryCustomProperties; }; -export type CatalogModelArtifactList = ModelCatalogListParams & { items: CatalogModelArtifact[] }; +export type CatalogModelArtifact = CatalogArtifactBase & { + artifactType: CatalogArtifactType.modelArtifact; + uri: string; +}; + +export type PerformanceMetricsCustomProperties = { + config_id?: ModelRegistryCustomPropertyString; + hardware_type?: ModelRegistryCustomPropertyString; + hardware_count?: ModelRegistryCustomPropertyInt; + requests_per_second?: ModelRegistryCustomPropertyDouble; + // Token metrics + mean_input_tokens?: ModelRegistryCustomPropertyDouble; + mean_output_tokens?: ModelRegistryCustomPropertyDouble; + // Use case information + use_case?: ModelRegistryCustomPropertyString; + // Framework information + framework?: ModelRegistryCustomPropertyString; + framework_version?: ModelRegistryCustomPropertyString; + // Additional fields from ADR (excluded from display per requirements) + docker_image?: ModelRegistryCustomPropertyString; + entrypoint?: ModelRegistryCustomPropertyString; + inserted_at?: ModelRegistryCustomPropertyString; + created_at?: ModelRegistryCustomPropertyString; + updated_at?: ModelRegistryCustomPropertyString; + model_hf_repo_name?: ModelRegistryCustomPropertyString; + scenario_id?: ModelRegistryCustomPropertyString; +} & Partial>; + +export type AccuracyMetricsCustomProperties = { + // overall_average?: ModelRegistryCustomPropertyDouble; // NOTE: overall_average is currently omitted from the API and will be restored + arc_v1?: ModelRegistryCustomPropertyDouble; +} & Record; + +export type CatalogPerformanceMetricsArtifact = Omit & { + artifactType: CatalogArtifactType.metricsArtifact; + metricsType: MetricsType.performanceMetrics; + customProperties: PerformanceMetricsCustomProperties; +}; + +export type CatalogAccuracyMetricsArtifact = Omit & { + artifactType: CatalogArtifactType.metricsArtifact; + metricsType: MetricsType.accuracyMetrics; + customProperties: AccuracyMetricsCustomProperties; +}; + +export type CatalogMetricsArtifact = + | CatalogPerformanceMetricsArtifact + | CatalogAccuracyMetricsArtifact; + +export type CatalogArtifacts = CatalogModelArtifact | CatalogMetricsArtifact; + +export type CatalogArtifactList = ModelCatalogListParams & { items: CatalogArtifacts[] }; + +export type CatalogFilterNumberOption = { + type: 'number'; + range: { + max: number; + min: number; + }; +}; + +export type CatalogFilterStringOption = { + type: 'string'; + values: T[]; +}; export type GetCatalogModelsBySource = ( opts: APIOptions, - sourceId: string, + sourceId?: string, + sourceLabel?: string, paginationParams?: { pageSize?: string; nextPageToken?: string; @@ -54,6 +160,8 @@ export type GetCatalogModelsBySource = ( sortOrder?: string; }, searchKeyword?: string, + filterData?: ModelCatalogFilterStates, + filterOptions?: CatalogFilterOptionsList | null, ) => Promise; export type GetListSources = (opts: APIOptions) => Promise; @@ -68,16 +176,118 @@ export type GetListCatalogModelArtifacts = ( opts: APIOptions, sourceId: string, modelName: string, -) => Promise; +) => Promise; + +export type GetCatalogFilterOptionList = (opts: APIOptions) => Promise; export type ModelCatalogAPIs = { getCatalogModelsBySource: GetCatalogModelsBySource; getListSources: GetListSources; getCatalogModel: GetCatalogModel; getListCatalogModelArtifacts: GetListCatalogModelArtifacts; + getCatalogFilterOptionList: GetCatalogFilterOptionList; }; export type CatalogModelDetailsParams = { sourceId?: string; modelName?: string; }; + +export type ModelCatalogFilterKey = + | ModelCatalogStringFilterKey + | ModelCatalogNumberFilterKey + | LatencyMetricFieldName; + +// Not used for a run time value, just for mapping other types +export type ModelCatalogStringFilterValueType = { + [ModelCatalogStringFilterKey.TASK]: ModelCatalogTask; + [ModelCatalogStringFilterKey.PROVIDER]: ModelCatalogProvider; + [ModelCatalogStringFilterKey.LICENSE]: ModelCatalogLicense; + [ModelCatalogStringFilterKey.LANGUAGE]: AllLanguageCode; + [ModelCatalogStringFilterKey.HARDWARE_TYPE]: string; + [ModelCatalogStringFilterKey.USE_CASE]: UseCaseOptionValue; +}; + +export type ModelCatalogStringFilterOptions = { + [key in ModelCatalogStringFilterKey]: CatalogFilterStringOption< + ModelCatalogStringFilterValueType[key] + >; +}; + +export type CatalogFilterOptions = ModelCatalogStringFilterOptions & { + [key in ModelCatalogNumberFilterKey]: CatalogFilterNumberOption; +} & { + // Allow additional latency metric field names + [key in LatencyMetricFieldName]?: CatalogFilterNumberOption; +}; + +export type CatalogFilterOptionsList = { + filters: CatalogFilterOptions; +}; + +export type ModelCatalogFilterStates = { + [ModelCatalogStringFilterKey.TASK]: ModelCatalogTask[]; + [ModelCatalogStringFilterKey.PROVIDER]: ModelCatalogProvider[]; + [ModelCatalogStringFilterKey.LICENSE]: ModelCatalogLicense[]; + [ModelCatalogStringFilterKey.LANGUAGE]: AllLanguageCode[]; + [ModelCatalogStringFilterKey.HARDWARE_TYPE]: string[]; + [ModelCatalogStringFilterKey.USE_CASE]: UseCaseOptionValue[]; +} & { + [key in ModelCatalogNumberFilterKey]: number | undefined; +} & { + [key in LatencyMetricFieldName]?: number | undefined; +}; + +// Model Catalog Settings types +export type CatalogSourceConfigCommon = { + id: string; + name: string; + enabled?: boolean; + labels?: string[]; + includedModels?: string[]; + excludedModels?: string[]; + isDefault?: boolean; +}; + +export type YamlCatalogSourceConfig = CatalogSourceConfigCommon & { + type: CatalogSourceType.YAML; + yaml?: string; +}; + +export type HuggingFaceCatalogSourceConfig = CatalogSourceConfigCommon & { + type: CatalogSourceType.HUGGING_FACE; + allowedOrganization?: string; + apiKey?: string; +}; + +export type CatalogSourceConfig = YamlCatalogSourceConfig | HuggingFaceCatalogSourceConfig; + +export type CatalogSourceConfigPayload = CatalogSourceConfig; + +export type CatalogSourceConfigList = { + catalogs: CatalogSourceConfig[]; +}; + +export type GetCatalogSourceConfigs = (opts: APIOptions) => Promise; +export type CreateCatalogSourceConfig = ( + opts: APIOptions, + data: CatalogSourceConfigPayload, +) => Promise; +export type GetCatalogSourceConfig = ( + opts: APIOptions, + sourceId: string, +) => Promise; +export type UpdateCatalogSourceConfig = ( + opts: APIOptions, + sourceId: string, + data: CatalogSourceConfigPayload, +) => Promise; +export type DeleteCatalogSourceConfig = (opts: APIOptions, sourceId: string) => Promise; + +export type ModelCatalogSettingsAPIs = { + getCatalogSourceConfigs: GetCatalogSourceConfigs; + createCatalogSourceConfig: CreateCatalogSourceConfig; + getCatalogSourceConfig: GetCatalogSourceConfig; + updateCatalogSourceConfig: UpdateCatalogSourceConfig; + deleteCatalogSourceConfig: DeleteCatalogSourceConfig; +}; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/ModelCatalogCoreLoader.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/ModelCatalogCoreLoader.tsx index 083777e60d..26b26bf700 100644 --- a/clients/ui/frontend/src/app/pages/modelCatalog/ModelCatalogCoreLoader.tsx +++ b/clients/ui/frontend/src/app/pages/modelCatalog/ModelCatalogCoreLoader.tsx @@ -9,54 +9,32 @@ import { WhosMyAdministrator, } from 'mod-arch-shared'; import * as React from 'react'; -import { Navigate, Outlet, useParams } from 'react-router-dom'; +import { Outlet } from 'react-router-dom'; import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; -import { modelCatalogUrl } from '~/app/routes/modelCatalog/catalogModel'; import EmptyModelCatalogState from './EmptyModelCatalogState'; -import InvalidCatalogSource from './screens/InvalidCatalogSource'; -import ModelCatalogSourceSelectorNavigator from './screens/ModelCatalogSourceSelectorNavigator'; -type ApplicationPageProps = React.ComponentProps; - -type ApplicationPageRenderState = Pick< - ApplicationPageProps, - 'emptyStatePage' | 'empty' | 'headerContent' ->; - -type ModelCatalogCoreLoaderrProps = { - getInvalidRedirectPath: (sourceId: string) => string; -}; - -const ModelCatalogCoreLoader: React.FC = ({ - getInvalidRedirectPath, -}) => { - const { sourceId } = useParams<{ sourceId: string }>(); - - const { - catalogSources, - catalogSourcesLoaded, - catalogSourcesLoadError, - selectedSource, - updateSelectedSource, - } = React.useContext(ModelCatalogContext); +const ModelCatalogCoreLoader: React.FC = () => { + const { catalogSources, catalogSourcesLoaded, catalogSourcesLoadError } = + React.useContext(ModelCatalogContext); const { isMUITheme } = useThemeContext(); - const modelCatalogFromRoute = catalogSources?.items.find((source) => source.id === sourceId); - - React.useEffect(() => { - if (modelCatalogFromRoute && !selectedSource) { - updateSelectedSource(modelCatalogFromRoute); - } - }, [modelCatalogFromRoute, updateSelectedSource, selectedSource]); - if (catalogSourcesLoadError) { return ( - - - {catalogSourcesLoadError.message} - - + } + description="Discover models that are available for your organization to register, deploy, and customize." + headerContent={null} + empty + emptyStatePage={ + + + {catalogSourcesLoadError.message} + + + } + loaded + /> ); } @@ -73,57 +51,35 @@ const ModelCatalogCoreLoader: React.FC = ({ ); } - let renderStateProps: ApplicationPageRenderState & { children?: React.ReactNode }; if (catalogSources?.items.length === 0) { - renderStateProps = { - empty: true, - emptyStatePage: ( - ( - // for now, added the modelRegistrySettings for this - will remove once we update the shared library - - )} - customAction={isMUITheme ? : } - /> - ), - headerContent: null, - }; - } else if (sourceId) { - const foundCatalogSource = catalogSources?.items.find((source) => source.id === sourceId); - if (foundCatalogSource) { - // Render the content - return ; - } - // They ended up on a non-valid project path - renderStateProps = { - empty: true, - emptyStatePage: , - }; - } else { - // Redirect the namespace suffix into the URL - const redirectCatalogSource = selectedSource ?? catalogSources?.items[0]; - return ; + return ( + } + description="Discover models that are available for your organization to register, deploy, and customize." + empty + emptyStatePage={ + ( + + )} + customAction={isMUITheme ? : } + /> + } + headerContent={null} + loaded + provideChildrenPadding + /> + ); } - return ( - } - description="Discover models that are available for your organization to register, deploy, and customize." - headerContent={ - modelCatalogUrl(id)} /> - } - {...renderStateProps} - loaded - provideChildrenPadding - /> - ); + return ; }; export default ModelCatalogCoreLoader; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/ModelCatalogRoutes.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/ModelCatalogRoutes.tsx index 65f963a536..2be6482e23 100644 --- a/clients/ui/frontend/src/app/pages/modelCatalog/ModelCatalogRoutes.tsx +++ b/clients/ui/frontend/src/app/pages/modelCatalog/ModelCatalogRoutes.tsx @@ -1,26 +1,27 @@ import * as React from 'react'; import { Navigate, Route, Routes } from 'react-router-dom'; import { ModelCatalogContextProvider } from '~/app/context/modelCatalog/ModelCatalogContext'; -import { modelCatalogUrl } from '~/app/routes/modelCatalog/catalogModel'; import ModelCatalogCoreLoader from './ModelCatalogCoreLoader'; import ModelDetailsPage from './screens/ModelDetailsPage'; import RegisterCatalogModelPage from './screens/RegisterCatalogModelPage'; import ModelCatalog from './screens/ModelCatalog'; +import { ModelDetailsTab } from './screens/ModelDetailsTabs'; const ModelCatalogRoutes: React.FC = () => ( - modelCatalogUrl(sourceId)} - /> - } - > + }> } /> - } /> + } /> + } + /> + } + /> } /> } /> diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationFilterToolbar.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationFilterToolbar.tsx new file mode 100644 index 0000000000..99213e2e5a --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationFilterToolbar.tsx @@ -0,0 +1,53 @@ +import * as React from 'react'; +import { Toolbar, ToolbarContent, ToolbarGroup, ToolbarItem } from '@patternfly/react-core'; +import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; +import { CatalogPerformanceMetricsArtifact } from '~/app/modelCatalogTypes'; +import { clearAllFilters } from '~/app/pages/modelCatalog/utils/hardwareConfigurationFilterUtils'; +import WorkloadTypeFilter from './globalFilters/WorkloadTypeFilter'; +import HardwareTypeFilter from './globalFilters/HardwareTypeFilter'; +import MinRpsFilter from './globalFilters/MinRpsFilter'; +import MaxLatencyFilter from './globalFilters/MaxLatencyFilter'; + +type HardwareConfigurationFilterToolbarProps = { + performanceArtifacts: CatalogPerformanceMetricsArtifact[]; +}; + +const HardwareConfigurationFilterToolbar: React.FC = ({ + performanceArtifacts, +}) => { + const { filterOptions, filterOptionsLoaded, filterOptionsLoadError, setFilterData } = + React.useContext(ModelCatalogContext); + + if (!filterOptionsLoaded || filterOptionsLoadError || !filterOptions) { + return null; + } + + const handleClearAllFilters = () => { + clearAllFilters(setFilterData); + }; + + return ( + + + + + + + + + + + + + + + + + + + + + ); +}; + +export default HardwareConfigurationFilterToolbar; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationTable.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationTable.tsx new file mode 100644 index 0000000000..a37fd00d84 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationTable.tsx @@ -0,0 +1,67 @@ +import * as React from 'react'; +import { DashboardEmptyTableView, Table } from 'mod-arch-shared'; +import { Spinner } from '@patternfly/react-core'; +import { OuterScrollContainer } from '@patternfly/react-table'; +import { CatalogPerformanceMetricsArtifact } from '~/app/modelCatalogTypes'; +import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; +import { + filterHardwareConfigurationArtifacts, + clearAllFilters, +} from '~/app/pages/modelCatalog/utils/hardwareConfigurationFilterUtils'; +import { hardwareConfigColumns } from './HardwareConfigurationTableColumns'; +import HardwareConfigurationTableRow from './HardwareConfigurationTableRow'; +import HardwareConfigurationFilterToolbar from './HardwareConfigurationFilterToolbar'; + +type HardwareConfigurationTableProps = { + performanceArtifacts: CatalogPerformanceMetricsArtifact[]; + isLoading?: boolean; +}; + +const HardwareConfigurationTable: React.FC = ({ + performanceArtifacts, + isLoading = false, +}) => { + const { filterData, setFilterData } = React.useContext(ModelCatalogContext); + + // Apply filters to the artifacts + const filteredArtifacts = React.useMemo( + () => filterHardwareConfigurationArtifacts(performanceArtifacts, filterData), + [performanceArtifacts, filterData], + ); + + if (isLoading) { + return ; + } + + const toolbarContent = ( + + ); + const handleClearFilters = () => { + clearAllFilters(setFilterData); + }; + + return ( + + } + rowRenderer={(artifact) => ( + + )} + /> + + ); +}; + +export default HardwareConfigurationTable; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationTableColumns.ts b/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationTableColumns.ts new file mode 100644 index 0000000000..5e7f3bd6d4 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationTableColumns.ts @@ -0,0 +1,283 @@ +import { SortableData } from 'mod-arch-shared'; +import { + CatalogPerformanceMetricsArtifact, + PerformanceMetricsCustomProperties, +} from '~/app/modelCatalogTypes'; +import { + getHardwareConfiguration, + getWorkloadType, +} from '~/app/pages/modelCatalog/utils/performanceMetricsUtils'; +import { getDoubleValue, getStringValue } from '~/app/utils'; + +export type HardwareConfigColumnField = keyof PerformanceMetricsCustomProperties; + +export type HardwareConfigColumn = Omit< + SortableData, + 'field' +> & { field: HardwareConfigColumnField }; + +/*Non-breaking space constant (U+00A0) used to selectively control word wrap in column labels. +This prevents word wrapping into 3 lines (e.g., keeps "TTFT Latency" together instead of "TTFT\nLatency\nMean"). +*/ +const NBSP = '\u00A0'; + +export const hardwareConfigColumns: HardwareConfigColumn[] = [ + { + field: 'hardware_type', + label: 'Hardware Configuration', + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => getHardwareConfiguration(a).localeCompare(getHardwareConfiguration(b)), + isStickyColumn: true, + stickyMinWidth: '162px', + stickyLeftOffset: '0', + modifier: 'wrap', + }, + { + field: 'use_case', + label: 'Workload type', + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => getWorkloadType(a).localeCompare(getWorkloadType(b)), + isStickyColumn: true, + stickyMinWidth: '132px', + stickyLeftOffset: '162px', + modifier: 'wrap', + hasRightBorder: true, + }, + { + field: 'requests_per_second', + label: `RPS${NBSP}per Replica`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'requests_per_second') - + getDoubleValue(b.customProperties, 'requests_per_second'), + width: 20, + modifier: 'wrap', + }, + { + field: 'ttft_mean', + label: `TTFT${NBSP}Latency Mean`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'ttft_mean') - + getDoubleValue(b.customProperties, 'ttft_mean'), + width: 20, + modifier: 'wrap', + }, + { + field: 'ttft_p90', + label: `TTFT${NBSP}Latency P90`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'ttft_p90') - + getDoubleValue(b.customProperties, 'ttft_p90'), + width: 20, + modifier: 'wrap', + }, + { + field: 'ttft_p95', + label: `TTFT${NBSP}Latency P95`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'ttft_p95') - + getDoubleValue(b.customProperties, 'ttft_p95'), + width: 20, + modifier: 'wrap', + }, + { + field: 'ttft_p99', + label: `TTFT${NBSP}Latency P99`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'ttft_p99') - + getDoubleValue(b.customProperties, 'ttft_p99'), + width: 20, + modifier: 'wrap', + }, + { + field: 'e2e_mean', + label: `E2E${NBSP}Latency Mean`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'e2e_mean') - + getDoubleValue(b.customProperties, 'e2e_mean'), + width: 20, + modifier: 'wrap', + }, + { + field: 'e2e_p90', + label: `E2E${NBSP}Latency P90`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'e2e_p90') - getDoubleValue(b.customProperties, 'e2e_p90'), + width: 20, + modifier: 'wrap', + }, + { + field: 'e2e_p95', + label: `E2E${NBSP}Latency P95`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'e2e_p95') - getDoubleValue(b.customProperties, 'e2e_p95'), + width: 20, + modifier: 'wrap', + }, + { + field: 'e2e_p99', + label: `E2E${NBSP}Latency P99`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'e2e_p99') - getDoubleValue(b.customProperties, 'e2e_p99'), + width: 20, + modifier: 'wrap', + }, + { + field: 'tps_mean', + label: `TPS${NBSP}Latency Mean`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'tps_mean') - + getDoubleValue(b.customProperties, 'tps_mean'), + width: 20, + modifier: 'wrap', + }, + { + field: 'tps_p90', + label: `TPS${NBSP}Latency P90`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'tps_p90') - getDoubleValue(b.customProperties, 'tps_p90'), + width: 20, + modifier: 'wrap', + }, + { + field: 'tps_p95', + label: `TPS${NBSP}Latency P95`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'tps_p95') - getDoubleValue(b.customProperties, 'tps_p95'), + width: 20, + modifier: 'wrap', + }, + { + field: 'tps_p99', + label: `TPS${NBSP}Latency P99`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'tps_p99') - getDoubleValue(b.customProperties, 'tps_p99'), + width: 20, + modifier: 'wrap', + }, + { + field: 'itl_mean', + label: `ITL${NBSP}Latency Mean`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'itl_mean') - + getDoubleValue(b.customProperties, 'itl_mean'), + width: 20, + modifier: 'wrap', + }, + { + field: 'itl_p90', + label: `ITL${NBSP}Latency P90`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'itl_p90') - getDoubleValue(b.customProperties, 'itl_p90'), + width: 20, + modifier: 'wrap', + }, + { + field: 'itl_p95', + label: `ITL${NBSP}Latency P95`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'itl_p95') - getDoubleValue(b.customProperties, 'itl_p95'), + width: 20, + modifier: 'wrap', + }, + { + field: 'itl_p99', + label: `ITL${NBSP}Latency P99`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'itl_p99') - getDoubleValue(b.customProperties, 'itl_p99'), + width: 20, + modifier: 'wrap', + }, + { + field: 'mean_input_tokens', + label: `Mean${NBSP}Input Tokens`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'mean_input_tokens') - + getDoubleValue(b.customProperties, 'mean_input_tokens'), + width: 20, + modifier: 'wrap', + }, + { + field: 'mean_output_tokens', + label: `Mean${NBSP}Output Tokens`, + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => + getDoubleValue(a.customProperties, 'mean_output_tokens') - + getDoubleValue(b.customProperties, 'mean_output_tokens'), + width: 20, + modifier: 'wrap', + }, + { + field: 'framework_version', + label: 'vLLM Version', + sortable: ( + a: CatalogPerformanceMetricsArtifact, + b: CatalogPerformanceMetricsArtifact, + ): number => { + const versionA = getStringValue(a.customProperties, 'framework_version'); + const versionB = getStringValue(b.customProperties, 'framework_version'); + return versionA.localeCompare(versionB); + }, + width: 20, + modifier: 'wrap', + }, +]; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationTableRow.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationTableRow.tsx new file mode 100644 index 0000000000..22aa51e9c3 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/HardwareConfigurationTableRow.tsx @@ -0,0 +1,83 @@ +import * as React from 'react'; +import { Td, Tr } from '@patternfly/react-table'; +import { CatalogPerformanceMetricsArtifact } from '~/app/modelCatalogTypes'; +import { + formatLatency, + formatTokenValue, + getHardwareConfiguration, + getWorkloadType, +} from '~/app/pages/modelCatalog/utils/performanceMetricsUtils'; +import { getDoubleValue, getIntValue, getStringValue } from '~/app/utils'; +import { + HardwareConfigColumnField, + hardwareConfigColumns, +} from './HardwareConfigurationTableColumns'; + +type HardwareConfigurationTableRowProps = { + performanceArtifact: CatalogPerformanceMetricsArtifact; +}; + +const HardwareConfigurationTableRow: React.FC = ({ + performanceArtifact, +}) => { + const getCellValue = (field: HardwareConfigColumnField): string | number => { + const { customProperties } = performanceArtifact; + + switch (field) { + case 'hardware_type': + return getHardwareConfiguration(performanceArtifact); + case 'use_case': + return getWorkloadType(performanceArtifact); + case 'hardware_count': + return getIntValue(customProperties, 'hardware_count'); + case 'requests_per_second': + return getDoubleValue(customProperties, 'requests_per_second'); + case 'ttft_mean': + case 'ttft_p90': + case 'ttft_p95': + case 'ttft_p99': + case 'e2e_mean': + case 'e2e_p90': + case 'e2e_p95': + case 'e2e_p99': + case 'tps_mean': + case 'tps_p90': + case 'tps_p95': + case 'tps_p99': + case 'itl_mean': + case 'itl_p90': + case 'itl_p95': + case 'itl_p99': + return formatLatency(getDoubleValue(customProperties, field)); + case 'mean_input_tokens': + case 'mean_output_tokens': + return formatTokenValue(getDoubleValue(customProperties, field)); + case 'framework_version': + return getStringValue(customProperties, field); + default: + return '-'; + } + }; + + // TODO sticky isn't quite working with both columns and the scroll container is weird. double check PF docs + + return ( + + {hardwareConfigColumns.map((column) => ( + + ))} + + ); +}; + +export default HardwareConfigurationTableRow; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/ModelCatalogActiveFilters.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/ModelCatalogActiveFilters.tsx new file mode 100644 index 0000000000..7a5a87c010 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/ModelCatalogActiveFilters.tsx @@ -0,0 +1,120 @@ +import React from 'react'; +import { ToolbarFilter, ToolbarLabelGroup, ToolbarLabel } from '@patternfly/react-core'; +import { isEnumMember } from 'mod-arch-core'; +import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; +import { + ModelCatalogStringFilterKey, + MODEL_CATALOG_PROVIDER_NAME_MAPPING, + MODEL_CATALOG_LICENSE_NAME_MAPPING, + MODEL_CATALOG_TASK_NAME_MAPPING, + AllLanguageCodesMap, + MODEL_CATALOG_FILTER_CATEGORY_NAMES, + ModelCatalogProvider, + ModelCatalogLicense, + ModelCatalogTask, + AllLanguageCode, +} from '~/concepts/modelCatalog/const'; +import { ModelCatalogFilterKey } from '~/app/modelCatalogTypes'; + +type ModelCatalogActiveFiltersProps = { + filtersToShow: ModelCatalogFilterKey[]; +}; + +const ModelCatalogActiveFilters: React.FC = ({ filtersToShow }) => { + const { filterData, setFilterData } = React.useContext(ModelCatalogContext); + + const handleRemoveFilter = (categoryKey: string, labelKey: string) => { + if (isEnumMember(categoryKey, ModelCatalogStringFilterKey)) { + const currentValues = filterData[categoryKey]; + if (Array.isArray(currentValues)) { + const newValues = currentValues.filter((v) => String(v) !== String(labelKey)); + setFilterData(categoryKey, newValues); + } + } + }; + + const handleClearCategory = (categoryKey: string) => { + if (isEnumMember(categoryKey, ModelCatalogStringFilterKey)) { + setFilterData(categoryKey, []); + } + }; + + const getFilterLabel = (filterKey: ModelCatalogStringFilterKey, value: string): string => { + switch (filterKey) { + case ModelCatalogStringFilterKey.PROVIDER: { + return isEnumMember(value, ModelCatalogProvider) + ? MODEL_CATALOG_PROVIDER_NAME_MAPPING[value] + : value; + } + case ModelCatalogStringFilterKey.LICENSE: { + return isEnumMember(value, ModelCatalogLicense) + ? MODEL_CATALOG_LICENSE_NAME_MAPPING[value] + : value; + } + case ModelCatalogStringFilterKey.TASK: { + return isEnumMember(value, ModelCatalogTask) + ? MODEL_CATALOG_TASK_NAME_MAPPING[value] + : value; + } + case ModelCatalogStringFilterKey.LANGUAGE: { + return isEnumMember(value, AllLanguageCode) ? AllLanguageCodesMap[value] : value; + } + default: + return value; + } + }; + + return ( + <> + {filtersToShow.map((filterKey) => { + // Only process string filter keys that are arrays + if (!isEnumMember(filterKey, ModelCatalogStringFilterKey)) { + return null; + } + + const filterValues = filterData[filterKey]; + if (!Array.isArray(filterValues) || filterValues.length === 0) { + return null; + } + + const categoryName = MODEL_CATALOG_FILTER_CATEGORY_NAMES[filterKey]; + const labels: ToolbarLabel[] = filterValues.map((value) => { + const valueStr = String(value); + const labelText = getFilterLabel(filterKey, valueStr); + return { + key: valueStr, + node: {labelText}, + }; + }); + + const categoryLabelGroup: ToolbarLabelGroup = { + key: filterKey, + name: categoryName, + }; + + return ( + { + const categoryKey = typeof category === 'string' ? category : category.key; + const labelKey = typeof label === 'string' ? label : label.key; + handleRemoveFilter(categoryKey, labelKey); + }} + deleteLabelGroup={(category) => { + const categoryKey = typeof category === 'string' ? category : category.key; + handleClearCategory(categoryKey); + }} + data-testid={`${filterKey}-filter-container`} + > + {/* ToolbarFilter requires children but we only render labels, not filter controls */} + {null} + + ); + })} + + ); +}; + +export default ModelCatalogActiveFilters; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/ModelCatalogCard.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/ModelCatalogCard.tsx index f938937ff0..8374e18f0d 100644 --- a/clients/ui/frontend/src/app/pages/modelCatalog/components/ModelCatalogCard.tsx +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/ModelCatalogCard.tsx @@ -10,15 +10,15 @@ import { FlexItem, Label, Skeleton, - Stack, - StackItem, Truncate, } from '@patternfly/react-core'; -import { useNavigate } from 'react-router-dom'; +import { Link } from 'react-router-dom'; import { CatalogModel, CatalogSource } from '~/app/modelCatalogTypes'; -import { getModelName } from '~/app/pages/modelCatalog/utils/modelCatalogUtils'; import { catalogModelDetailsFromModel } from '~/app/routes/modelCatalog/catalogModel'; +import { getLabels } from '~/app/pages/modelRegistry/screens/utils'; +import { isModelValidated, getModelName } from '~/app/pages/modelCatalog/utils/modelCatalogUtils'; import ModelCatalogLabels from './ModelCatalogLabels'; +import ModelCatalogCardBody from './ModelCatalogCardBody'; type ModelCatalogCardProps = { model: CatalogModel; @@ -27,13 +27,16 @@ type ModelCatalogCardProps = { }; const ModelCatalogCard: React.FC = ({ model, source, truncate = false }) => { - const navigate = useNavigate(); + // Extract labels from customProperties and check for validated label + const allLabels = model.customProperties ? getLabels(model.customProperties) : []; + const validatedLabels = allLabels.includes('validated') ? ['validated'] : []; + const isValidated = isModelValidated(model); return ( - + {model.logo ? ( model logo ) : ( @@ -45,22 +48,19 @@ const ModelCatalogCard: React.FC = ({ model, source, trun /> )} - {source && } + {isValidated ? ( + + ) : ( + source && + )} - - - - - + - - - {truncate ? ( -
- {model.description} -
- ) : ( - model.description - )} -
-
+ + + + +
diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/ModelCatalogCardBody.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/ModelCatalogCardBody.tsx new file mode 100644 index 0000000000..8dbdd7d9c8 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/ModelCatalogCardBody.tsx @@ -0,0 +1,219 @@ +import React, { useState } from 'react'; +import { + Alert, + Button, + Content, + ContentVariants, + Flex, + List, + ListItem, + Popover, + Spinner, + Stack, + StackItem, +} from '@patternfly/react-core'; +import { Link } from 'react-router-dom'; +import { HelpIcon, AngleLeftIcon, AngleRightIcon } from '@patternfly/react-icons'; +import { + CatalogModel, + CatalogSource, + CatalogArtifactType, + MetricsType, + CatalogPerformanceMetricsArtifact, + CatalogAccuracyMetricsArtifact, +} from '~/app/modelCatalogTypes'; +import { extractValidatedModelMetrics } from '~/app/pages/modelCatalog/utils/validatedModelUtils'; +import { catalogModelDetailsTabFromModel } from '~/app/routes/modelCatalog/catalogModel'; +import { ModelDetailsTab } from '~/app/pages/modelCatalog/screens/ModelDetailsTabs'; +import { useCatalogModelArtifacts } from '~/app/hooks/modelCatalog/useCatalogModelArtifacts'; +import { filterArtifactsByType } from '~/app/pages/modelCatalog/utils/modelCatalogUtils'; +import { formatLatency } from '~/app/pages/modelCatalog/utils/performanceMetricsUtils'; + +type ModelCatalogCardBodyProps = { + model: CatalogModel; + isValidated: boolean; + source: CatalogSource | undefined; +}; + +const ModelCatalogCardBody: React.FC = ({ + model, + isValidated, + source, +}) => { + const [currentPerformanceIndex, setCurrentPerformanceIndex] = useState(0); + + const handlePreviousBenchmark = () => { + setCurrentPerformanceIndex((prev) => (prev > 0 ? prev - 1 : performanceMetrics.length - 1)); + }; + + const handleNextBenchmark = () => { + setCurrentPerformanceIndex((prev) => (prev < performanceMetrics.length - 1 ? prev + 1 : 0)); + }; + + const [artifacts, artifactsLoaded, artifactsLoadError] = useCatalogModelArtifacts( + source?.id || '', + model.name, + isValidated, + true, + ); + + const performanceMetrics = filterArtifactsByType( + artifacts.items, + CatalogArtifactType.metricsArtifact, + MetricsType.performanceMetrics, + ); + + const accuracyMetrics = filterArtifactsByType( + artifacts.items, + CatalogArtifactType.metricsArtifact, + MetricsType.accuracyMetrics, + ); + + if (!artifactsLoaded && isValidated) { + return ; + } + + if (artifactsLoadError && isValidated) { + return ( + + {artifactsLoadError.message} + + ); + } + + if (isValidated && performanceMetrics.length > 0 && accuracyMetrics.length > 0) { + const metrics = extractValidatedModelMetrics( + performanceMetrics, + accuracyMetrics, + currentPerformanceIndex, + ); + + return ( + + + + + + {metrics.hardwareCount}x{metrics.hardwareType} + + Hardware + + + + {metrics.rpsPerReplica} + + RPS/rep. + + + + {formatLatency(metrics.ttftMean)} + + + + TTFT + + +

+ The delay (in milliseconds) between sending a request and receiving the + first response. +

+ + + TTFT (Time to First Token) - The time between when a + request is sent to a model and when the model begins streaming its first + token in the response. + + + ITL (Inter-Token Latency) - The average time between + successive output tokens after the model has started generating. + + + E2E (End-to-End latency) - The total time from when the + request is sent until the last token is received. + + + + } + > + + + + + + )} + {showMore && valuesMatchingSearch.length > MAX_VISIBLE_FILTERS && ( + + )} + + ); +}; + +export default ModelCatalogStringFilter; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/HardwareTypeFilter.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/HardwareTypeFilter.tsx new file mode 100644 index 0000000000..ee775be28b --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/HardwareTypeFilter.tsx @@ -0,0 +1,103 @@ +import * as React from 'react'; +import { + Badge, + Checkbox, + Dropdown, + Flex, + FlexItem, + MenuToggle, + MenuToggleElement, + Panel, + PanelMain, +} from '@patternfly/react-core'; +import { CatalogPerformanceMetricsArtifact } from '~/app/modelCatalogTypes'; +import { getUniqueHardwareTypes } from '~/app/pages/modelCatalog/utils/hardwareConfigurationFilterUtils'; +import { useHardwareTypeFilterState } from '~/app/pages/modelCatalog/utils/hardwareTypeFilterState'; + +type HardwareTypeFilterProps = { + performanceArtifacts: CatalogPerformanceMetricsArtifact[]; +}; + +type HardwareTypeOption = { + value: string; + label: string; +}; + +const HardwareTypeFilter: React.FC = ({ performanceArtifacts }) => { + const { appliedHardwareTypes, setAppliedHardwareTypes } = useHardwareTypeFilterState(); + const [isOpen, setIsOpen] = React.useState(false); + + // Get unique hardware types from actual performance artifacts + const hardwareOptions: HardwareTypeOption[] = React.useMemo(() => { + const uniqueTypes = getUniqueHardwareTypes(performanceArtifacts); + return uniqueTypes.map((type) => ({ + value: type, + label: type, + })); + }, [performanceArtifacts]); + + const selectedCount = appliedHardwareTypes.length; + + const isHardwareSelected = (value: string): boolean => appliedHardwareTypes.includes(value); + + const toggleHardwareSelection = (value: string, selected: boolean) => { + if (selected) { + setAppliedHardwareTypes([...appliedHardwareTypes, value]); + } else { + setAppliedHardwareTypes(appliedHardwareTypes.filter((item) => item !== value)); + } + }; + + const toggle = (toggleRef: React.Ref) => ( + setIsOpen(!isOpen)} + isExpanded={isOpen} + style={{ minWidth: '200px', width: 'fit-content' }} + badge={selectedCount > 0 ? {selectedCount} selected : undefined} + > + Hardware type + + ); + + const filterContent = ( + + + + {/* Hardware type checkboxes */} + + + {hardwareOptions.map((option) => ( + + + + toggleHardwareSelection(option.value, checked)} + /> + + + + ))} + + + + + + ); + + return ( + + {filterContent} + + ); +}; + +export default HardwareTypeFilter; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/LanguageFilter.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/LanguageFilter.tsx new file mode 100644 index 0000000000..660940e328 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/LanguageFilter.tsx @@ -0,0 +1,43 @@ +import * as React from 'react'; +import { StackItem } from '@patternfly/react-core'; +import ModelCatalogStringFilter from '~/app/pages/modelCatalog/components/ModelCatalogStringFilter'; +import { + ModelCatalogStringFilterKey, + MODEL_CATALOG_ASIAN_LANGUAGES_DETAILS, + MODEL_CATALOG_EUROPEAN_LANGUAGES_DETAILS, + MODEL_CATALOG_MIDDLE_EASTERN_AND_OTHER_LANGUAGES_DETAILS, +} from '~/concepts/modelCatalog/const'; +import { CatalogFilterOptions, ModelCatalogStringFilterOptions } from '~/app/modelCatalogTypes'; + +const filterKey = ModelCatalogStringFilterKey.LANGUAGE; + +const LANGUAGE_NAME_MAPPING = { + ...MODEL_CATALOG_EUROPEAN_LANGUAGES_DETAILS, + ...MODEL_CATALOG_ASIAN_LANGUAGES_DETAILS, + ...MODEL_CATALOG_MIDDLE_EASTERN_AND_OTHER_LANGUAGES_DETAILS, +}; + +type LanguageFilterProps = { + filters?: Extract>; +}; + +const LanguageFilter: React.FC = ({ filters }) => { + const language = filters?.[filterKey]; + + if (!language) { + return null; + } + + return ( + + + title="Language" + filterKey={filterKey} + filterToNameMapping={LANGUAGE_NAME_MAPPING} + filters={language} + /> + + ); +}; + +export default LanguageFilter; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/LicenseFilter.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/LicenseFilter.tsx new file mode 100644 index 0000000000..224476f90c --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/LicenseFilter.tsx @@ -0,0 +1,38 @@ +import * as React from 'react'; +import { Divider, StackItem } from '@patternfly/react-core'; +import ModelCatalogStringFilter from '~/app/pages/modelCatalog/components/ModelCatalogStringFilter'; +import { + ModelCatalogStringFilterKey, + MODEL_CATALOG_LICENSE_NAME_MAPPING, +} from '~/concepts/modelCatalog/const'; +import { CatalogFilterOptions, ModelCatalogStringFilterOptions } from '~/app/modelCatalogTypes'; + +const filterKey = ModelCatalogStringFilterKey.LICENSE; + +type LicenseFilterProps = { + filters?: Extract>; +}; + +const LicenseFilter: React.FC = ({ filters }) => { + const license = filters?.[filterKey]; + + if (!license) { + return null; + } + + return ( + <> + + + title="License" + filterKey={filterKey} + filterToNameMapping={MODEL_CATALOG_LICENSE_NAME_MAPPING} + filters={license} + /> + + + + ); +}; + +export default LicenseFilter; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/MaxLatencyFilter.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/MaxLatencyFilter.tsx new file mode 100644 index 0000000000..c6f982bcb6 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/MaxLatencyFilter.tsx @@ -0,0 +1,345 @@ +import * as React from 'react'; +import { + Button, + Dropdown, + Flex, + FlexItem, + FormGroup, + MenuToggle, + MenuToggleElement, + Popover, + Select, + SelectList, + SelectOption, +} from '@patternfly/react-core'; +import { HelpIcon } from '@patternfly/react-icons'; +import { LatencyMetric, LatencyPercentile } from '~/concepts/modelCatalog/const'; +import { CatalogPerformanceMetricsArtifact } from '~/app/modelCatalogTypes'; +import { getDoubleValue } from '~/app/utils'; +import { getLatencyFieldName } from '~/app/pages/modelCatalog/utils/hardwareConfigurationFilterUtils'; +import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; +import { + getSliderRange, + FALLBACK_LATENCY_RANGE, + SliderRange, +} from '~/app/pages/modelCatalog/utils/performanceMetricsUtils'; +import SliderWithInput from './SliderWithInput'; + +type LatencyFilterState = { + metric: LatencyMetric; + percentile: LatencyPercentile; + value: number; +}; + +type MaxLatencyFilterProps = { + performanceArtifacts: CatalogPerformanceMetricsArtifact[]; +}; + +const METRIC_OPTIONS: { value: LatencyMetric; label: LatencyMetric }[] = Object.values( + LatencyMetric, +).map((metric) => ({ value: metric, label: metric })); + +const PERCENTILE_OPTIONS: { value: LatencyPercentile; label: LatencyPercentile }[] = Object.values( + LatencyPercentile, +).map((percentile) => ({ value: percentile, label: percentile })); + +const MaxLatencyFilter: React.FC = ({ performanceArtifacts }) => { + const { filterData, setFilterData } = React.useContext(ModelCatalogContext); + const [isOpen, setIsOpen] = React.useState(false); + const [isMetricOpen, setIsMetricOpen] = React.useState(false); + const [isPercentileOpen, setIsPercentileOpen] = React.useState(false); + + // Show all available metrics - in production this could be filtered based on backend data + const availableMetrics = React.useMemo(() => METRIC_OPTIONS, []); + + // Show all available percentiles - in production this could be filtered based on backend data + const getAvailablePercentiles = React.useCallback(() => PERCENTILE_OPTIONS, []); + + // Find the currently active latency filter (if any) + const currentActiveFilter = React.useMemo(() => { + for (const metric of Object.values(LatencyMetric)) { + for (const percentile of Object.values(LatencyPercentile)) { + const fieldName = getLatencyFieldName(metric, percentile); + const value = filterData[fieldName]; + if (value !== undefined && typeof value === 'number') { + return { fieldName, metric, percentile, value }; + } + } + } + return null; + }, [filterData]); + + const defaultFilterState = React.useMemo( + () => + // Default to TTFT P90 as the most common use case + ({ + metric: LatencyMetric.TTFT, + percentile: LatencyPercentile.P90, + value: 30, // Reasonable default within typical TTFT range + }), + [], + ); + + // Working state while editing the filter + const [localFilter, setLocalFilter] = React.useState(() => { + if (currentActiveFilter) { + return { + metric: currentActiveFilter.metric, + percentile: currentActiveFilter.percentile, + value: currentActiveFilter.value, + }; + } + return defaultFilterState; + }); + + // Update local filter when active filter changes + React.useEffect(() => { + if (currentActiveFilter) { + setLocalFilter({ + metric: currentActiveFilter.metric, + percentile: currentActiveFilter.percentile, + value: currentActiveFilter.value, + }); + } + }, [currentActiveFilter]); + + const { minValue, maxValue, isSliderDisabled } = React.useMemo((): SliderRange => { + const fieldName = getLatencyFieldName(localFilter.metric, localFilter.percentile); + + return getSliderRange({ + performanceArtifacts, + getArtifactFilterValue: (artifact) => getDoubleValue(artifact.customProperties, fieldName), + fallbackRange: FALLBACK_LATENCY_RANGE, + shouldRound: true, + }); + }, [performanceArtifacts, localFilter.metric, localFilter.percentile]); + + const clampedValue = React.useMemo( + () => Math.min(Math.max(localFilter.value, minValue), maxValue), + [localFilter.value, minValue, maxValue], + ); + const getDisplayText = (): React.ReactNode => { + if (currentActiveFilter) { + // When there's an active filter, show the full specification with actual selected values + return ( + <> + Max latency: {currentActiveFilter.metric} |{' '} + {currentActiveFilter.percentile} | {currentActiveFilter.value}ms + + ); + } + return 'Max latency'; + }; + + const handleApplyFilter = () => { + // Clear any existing latency filter + if (currentActiveFilter) { + setFilterData(currentActiveFilter.fieldName, undefined); + } + + // Set the new latency filter using the dynamic field name + const newFieldName = getLatencyFieldName(localFilter.metric, localFilter.percentile); + setFilterData(newFieldName, localFilter.value); + setIsOpen(false); + }; + + const handleReset = () => { + // Clear any existing latency filter + if (currentActiveFilter) { + setFilterData(currentActiveFilter.fieldName, undefined); + } + + // Reset local filter to default + setLocalFilter(defaultFilterState); + setIsOpen(false); + }; + + const toggle = (toggleRef: React.Ref) => ( + setIsOpen(!isOpen)} + isExpanded={isOpen} + style={{ minWidth: '200px', width: 'fit-content' }} + > + {getDisplayText()} + + ); + + const filterContent = ( + + + + Max latency + + document.body} + > + + + + + + + + + ); + + return ( + + {filterContent} + + ); +}; + +export default MaxLatencyFilter; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/MinRpsFilter.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/MinRpsFilter.tsx new file mode 100644 index 0000000000..d6f9b6091b --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/MinRpsFilter.tsx @@ -0,0 +1,161 @@ +import * as React from 'react'; +import { + Button, + Dropdown, + Flex, + FlexItem, + MenuToggle, + MenuToggleElement, + Popover, +} from '@patternfly/react-core'; +import { HelpIcon } from '@patternfly/react-icons'; +import { ModelCatalogNumberFilterKey } from '~/concepts/modelCatalog/const'; +import { useCatalogNumberFilterState } from '~/app/pages/modelCatalog/utils/modelCatalogUtils'; +import { CatalogPerformanceMetricsArtifact } from '~/app/modelCatalogTypes'; +import { getDoubleValue } from '~/app/utils'; +import { + getSliderRange, + FALLBACK_RPS_RANGE, + SliderRange, +} from '~/app/pages/modelCatalog/utils/performanceMetricsUtils'; +import SliderWithInput from './SliderWithInput'; + +const filterKey = ModelCatalogNumberFilterKey.MIN_RPS; + +type MinRpsFilterProps = { + performanceArtifacts: CatalogPerformanceMetricsArtifact[]; +}; + +const MinRpsFilter: React.FC = ({ performanceArtifacts }) => { + const { value: rpsFilterValue, setValue: setRpsFilterValue } = + useCatalogNumberFilterState(filterKey); + const [isOpen, setIsOpen] = React.useState(false); + + const { minValue, maxValue, isSliderDisabled } = React.useMemo( + (): SliderRange => + getSliderRange({ + performanceArtifacts, + getArtifactFilterValue: (artifact) => + getDoubleValue(artifact.customProperties, 'requests_per_second'), + fallbackRange: FALLBACK_RPS_RANGE, + }), + [performanceArtifacts], + ); + + const [localValue, setLocalValue] = React.useState( + () => rpsFilterValue ?? FALLBACK_RPS_RANGE.minValue, + ); + + const clampedValue = React.useMemo( + () => Math.min(Math.max(localValue, minValue), maxValue), + [localValue, minValue, maxValue], + ); + + React.useEffect(() => { + if (isOpen) { + setLocalValue(rpsFilterValue ?? minValue); + } + }, [isOpen, rpsFilterValue, minValue]); + + const hasActiveFilter = rpsFilterValue !== undefined; + + const getDisplayText = (): React.ReactNode => { + if (hasActiveFilter) { + return ( + <> + Min RPS: {rpsFilterValue} + + ); + } + return 'Min RPS'; + }; + + const handleApplyFilter = () => { + setRpsFilterValue(localValue); + setIsOpen(false); + }; + + const handleReset = () => { + setRpsFilterValue(undefined); + setLocalValue(minValue); + setIsOpen(false); + }; + + const toggle = (toggleRef: React.Ref) => ( + setIsOpen(!isOpen)} + isExpanded={isOpen} + style={{ minWidth: '200px', width: 'fit-content' }} + > + {getDisplayText()} + + ); + + const filterContent = ( + + + + Min requests per second (RPS) + + document.body} + > + + + + + + + + + ); + + return ( + + {filterContent} + + ); +}; + +export default MinRpsFilter; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/ProviderFilter.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/ProviderFilter.tsx new file mode 100644 index 0000000000..b114f44377 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/ProviderFilter.tsx @@ -0,0 +1,38 @@ +import * as React from 'react'; +import { Divider, StackItem } from '@patternfly/react-core'; +import ModelCatalogStringFilter from '~/app/pages/modelCatalog/components/ModelCatalogStringFilter'; +import { + ModelCatalogStringFilterKey, + MODEL_CATALOG_PROVIDER_NAME_MAPPING, +} from '~/concepts/modelCatalog/const'; +import { CatalogFilterOptions, ModelCatalogStringFilterOptions } from '~/app/modelCatalogTypes'; + +const filterKey = ModelCatalogStringFilterKey.PROVIDER; + +type ProviderFilterProps = { + filters?: Extract>; +}; + +const ProviderFilter: React.FC = ({ filters }) => { + const provider = filters?.[filterKey]; + + if (!provider) { + return null; + } + + return ( + <> + + + title="Provider" + filterKey={filterKey} + filterToNameMapping={MODEL_CATALOG_PROVIDER_NAME_MAPPING} + filters={provider} + /> + + + + ); +}; + +export default ProviderFilter; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/SliderWithInput.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/SliderWithInput.tsx new file mode 100644 index 0000000000..bfec241623 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/SliderWithInput.tsx @@ -0,0 +1,90 @@ +import * as React from 'react'; +import { Slider, SliderOnChangeEvent } from '@patternfly/react-core'; + +type SliderWithInputProps = { + value: number; + min: number; + max: number; + isDisabled: boolean; + onChange: (value: number) => void; + suffix: string; + ariaLabel: string; + shouldRound?: boolean; + showBoundaries?: boolean; + hasTooltipOverThumb?: boolean; +}; + +const SliderWithInput: React.FC = ({ + value, + min, + max, + isDisabled, + onChange, + suffix, + ariaLabel, + shouldRound = false, + showBoundaries = false, + hasTooltipOverThumb = false, +}) => { + const roundValue = React.useCallback( + (val: number) => (shouldRound ? Math.round(val) : val), + [shouldRound], + ); + + // Maintain separate state for value and inputValue, following PatternFly's example pattern + const [localValue, setLocalValue] = React.useState(value); + const [localInputValue, setLocalInputValue] = React.useState(value); + + // Sync local state when prop value changes (from parent) + React.useEffect(() => { + setLocalValue(value); + setLocalInputValue(value); + }, [value]); + + const handleChange = ( + _event: SliderOnChangeEvent, + sliderValue: number, + inputValueArg?: number, + setPFInputValue?: React.Dispatch>, + ) => { + let newValue: number; + + if (inputValueArg === undefined) { + newValue = roundValue(sliderValue); + setLocalValue(newValue); + setLocalInputValue(newValue); + } else { + if (inputValueArg > max) { + newValue = max; + setPFInputValue?.(max); + } else if (inputValueArg < min) { + newValue = min; + setPFInputValue?.(min); + } else { + newValue = roundValue(inputValueArg); + } + setLocalValue(newValue); + setLocalInputValue(newValue); + } + + onChange(newValue); + }; + + return ( + + ); +}; + +export default SliderWithInput; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/TaskFilter.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/TaskFilter.tsx new file mode 100644 index 0000000000..526f61d41c --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/TaskFilter.tsx @@ -0,0 +1,37 @@ +import * as React from 'react'; +import { Divider, StackItem } from '@patternfly/react-core'; +import ModelCatalogStringFilter from '~/app/pages/modelCatalog/components/ModelCatalogStringFilter'; +import { + ModelCatalogStringFilterKey, + MODEL_CATALOG_TASK_NAME_MAPPING, +} from '~/concepts/modelCatalog/const'; +import { CatalogFilterOptions, ModelCatalogStringFilterOptions } from '~/app/modelCatalogTypes'; + +const filterKey = ModelCatalogStringFilterKey.TASK; + +type TaskFilterProps = { + filters?: Extract>; +}; + +const TaskFilter: React.FC = ({ filters }) => { + const task = filters?.[filterKey]; + if (!task) { + return null; + } + + return ( + <> + + + title="Task" + filterKey={filterKey} + filterToNameMapping={MODEL_CATALOG_TASK_NAME_MAPPING} + filters={task} + /> + + + + ); +}; + +export default TaskFilter; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/WorkloadTypeFilter.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/WorkloadTypeFilter.tsx new file mode 100644 index 0000000000..22493e910f --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/components/globalFilters/WorkloadTypeFilter.tsx @@ -0,0 +1,92 @@ +import * as React from 'react'; +import { + Badge, + Checkbox, + Dropdown, + Flex, + FlexItem, + MenuToggle, + MenuToggleElement, + Panel, + PanelMain, +} from '@patternfly/react-core'; +import { ModelCatalogStringFilterKey, UseCaseOptionValue } from '~/concepts/modelCatalog/const'; +import { USE_CASE_OPTIONS } from '~/app/pages/modelCatalog/utils/workloadTypeUtils'; +import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; + +const WorkloadTypeFilter: React.FC = () => { + const { filterData, setFilterData } = React.useContext(ModelCatalogContext); + const selectedUseCases = filterData[ModelCatalogStringFilterKey.USE_CASE]; + const [isOpen, setIsOpen] = React.useState(false); + + const selectedCount = selectedUseCases.length; + + const isUseCaseSelected = (value: UseCaseOptionValue): boolean => + selectedUseCases.includes(value); + + const toggleUseCaseSelection = (value: UseCaseOptionValue, selected: boolean) => { + if (selected) { + setFilterData(ModelCatalogStringFilterKey.USE_CASE, [...selectedUseCases, value]); + } else { + setFilterData( + ModelCatalogStringFilterKey.USE_CASE, + selectedUseCases.filter((item) => item !== value), + ); + } + }; + + const toggle = (toggleRef: React.Ref) => ( + setIsOpen(!isOpen)} + isExpanded={isOpen} + style={{ minWidth: '200px', width: 'fit-content' }} + badge={selectedCount > 0 ? {selectedCount} selected : undefined} + > + Workload type + + ); + + const filterContent = ( + + + + {/* Workload type checkboxes */} + + + {USE_CASE_OPTIONS.map((option) => ( + + + + toggleUseCaseSelection(option.value, checked)} + /> + + + + ))} + + + + + + ); + + return ( + + {filterContent} + + ); +}; + +export default WorkloadTypeFilter; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/mocks/accuracyMetricsMock.ts b/clients/ui/frontend/src/app/pages/modelCatalog/mocks/accuracyMetricsMock.ts new file mode 100644 index 0000000000..0cfdad41ec --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/mocks/accuracyMetricsMock.ts @@ -0,0 +1,39 @@ +/* eslint-disable camelcase */ +import { + CatalogAccuracyMetricsArtifact, + CatalogArtifactType, + MetricsType, +} from '~/app/modelCatalogTypes'; +import { ModelRegistryMetadataType } from '~/app/types'; + +const MOCK_TIMESTAMP = '1739210683000'; + +const createAccuracyMetricsArtifact = ( + id: string, + // overallAverage: number, // NOTE: overall_average is currently omitted from the API and will be restored + arcV1: number, +): CatalogAccuracyMetricsArtifact => ({ + artifactType: CatalogArtifactType.metricsArtifact, + metricsType: MetricsType.accuracyMetrics, + createTimeSinceEpoch: MOCK_TIMESTAMP, + lastUpdateTimeSinceEpoch: MOCK_TIMESTAMP, + customProperties: { + // overall_average: { // NOTE: overall_average is currently omitted from the API and will be restored + // metadataType: ModelRegistryMetadataType.DOUBLE, + // double_value: overallAverage, + // }, + arc_v1: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: arcV1, + }, + }, +}); + +export const mockAccuracyMetricsArtifacts: CatalogAccuracyMetricsArtifact[] = [ + createAccuracyMetricsArtifact('1', /* 53.9, */ 45.2), + createAccuracyMetricsArtifact('2', /* 67.3, */ 58.1), + createAccuracyMetricsArtifact('3', /* 42.1, */ 38.7), + createAccuracyMetricsArtifact('4', /* 78.5, */ 72.3), + createAccuracyMetricsArtifact('5', /* 61.2, */ 55.8), + createAccuracyMetricsArtifact('6', /* 49.7, */ 43.9), +]; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/mocks/hardwareConfigurationMock.ts b/clients/ui/frontend/src/app/pages/modelCatalog/mocks/hardwareConfigurationMock.ts new file mode 100644 index 0000000000..0fda0a1433 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/mocks/hardwareConfigurationMock.ts @@ -0,0 +1,300 @@ +/* eslint-disable camelcase */ +import { + CatalogPerformanceMetricsArtifact, + CatalogArtifactType, + MetricsType, +} from '~/app/modelCatalogTypes'; +import { ModelRegistryMetadataType } from '~/app/types'; + +const createPerformanceMetricsArtifact = ( + id: string, + hardwareType: string, + hardwareCount: number, + rpsPerReplica: number, + ttftMean: number, + ttftP90: number, + ttftP95: number, + ttftP99: number, + e2eMean: number, + e2eP90: number, + e2eP95: number, + e2eP99: number, + tpsMean: number, + tpsP90: number, + tpsP95: number, + tpsP99: number, + itlMean: number, + itlP90: number, + itlP95: number, + itlP99: number, + meanInputTokens: number, + meanOutputTokens: number, + frameworkVersion: string, + useCase: string, +): CatalogPerformanceMetricsArtifact => ({ + artifactType: CatalogArtifactType.metricsArtifact, + metricsType: MetricsType.performanceMetrics, + createTimeSinceEpoch: '1739210683000', + lastUpdateTimeSinceEpoch: '1739210683000', + customProperties: { + config_id: { + metadataType: ModelRegistryMetadataType.STRING, + string_value: id, + }, + hardware_type: { + metadataType: ModelRegistryMetadataType.STRING, + string_value: hardwareType, + }, + hardware_count: { + metadataType: ModelRegistryMetadataType.INT, + int_value: hardwareCount.toString(), + }, + requests_per_second: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: rpsPerReplica, + }, + ttft_mean: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: ttftMean, + }, + ttft_p90: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: ttftP90, + }, + ttft_p95: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: ttftP95, + }, + ttft_p99: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: ttftP99, + }, + e2e_mean: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: e2eMean, + }, + e2e_p90: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: e2eP90, + }, + e2e_p95: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: e2eP95, + }, + e2e_p99: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: e2eP99, + }, + tps_mean: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: tpsMean, + }, + tps_p90: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: tpsP90, + }, + tps_p95: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: tpsP95, + }, + tps_p99: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: tpsP99, + }, + itl_mean: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: itlMean, + }, + itl_p90: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: itlP90, + }, + itl_p95: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: itlP95, + }, + itl_p99: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: itlP99, + }, + mean_input_tokens: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: meanInputTokens, + }, + mean_output_tokens: { + metadataType: ModelRegistryMetadataType.DOUBLE, + double_value: meanOutputTokens, + }, + framework: { + metadataType: ModelRegistryMetadataType.STRING, + string_value: 'vllm', + }, + framework_version: { + metadataType: ModelRegistryMetadataType.STRING, + string_value: frameworkVersion, + }, + use_case: { + metadataType: ModelRegistryMetadataType.STRING, + string_value: useCase, + }, + }, +}); + +export const mockPerformanceMetricsArtifacts: CatalogPerformanceMetricsArtifact[] = [ + createPerformanceMetricsArtifact( + '1', + 'A100-80', + 1, + 2, + 24, + 25, + 27, + 28, + 1240, + 1242, + 1248, + 1249, + 124, + 24, + 25, + 27, + 24, + 24, + 24, + 24, + 512, + 256, + 'v0.8.4', + 'chatbot', + ), + createPerformanceMetricsArtifact( + '2', + 'H100', + 1, + 3, + 18, + 19, + 21, + 22, + 1180, + 1185, + 1190, + 1195, + 98, + 18, + 19, + 21, + 18, + 18, + 18, + 18, + 1024, + 512, + 'v0.9.1', + 'rag', + ), + createPerformanceMetricsArtifact( + '3', + 'A100-40', + 2, + 4, + 32, + 35, + 38, + 42, + 1320, + 1330, + 1340, + 1350, + 156, + 32, + 35, + 38, + 32, + 32, + 32, + 32, + 2048, + 1024, + 'v0.7.2', + 'long_rag', + ), + createPerformanceMetricsArtifact( + '4', + 'H200', + 2, + 5, + 15, + 16, + 18, + 20, + 1100, + 1110, + 1120, + 1130, + 88, + 15, + 16, + 18, + 15, + 15, + 15, + 15, + 4096, + 2048, + 'v0.9.5', + 'code_fixing', + ), + createPerformanceMetricsArtifact( + '5', + 'A100-80', + 2, + 3, + 28, + 30, + 33, + 36, + 1280, + 1290, + 1300, + 1310, + 142, + 28, + 30, + 33, + 28, + 28, + 28, + 28, + 1536, + 768, + 'v0.8.7', + 'rag', + ), + createPerformanceMetricsArtifact( + '6', + 'A100-40', + 4, + 6, + 12, + 13, + 14, + 16, + 1050, + 1060, + 1070, + 1080, + 75, + 12, + 13, + 14, + 12, + 12, + 12, + 12, + 3072, + 1536, + 'v0.6.9', + 'chatbot', + ), +]; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/screens/CatalogCategorySection.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/screens/CatalogCategorySection.tsx new file mode 100644 index 0000000000..2dc7355354 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/screens/CatalogCategorySection.tsx @@ -0,0 +1,116 @@ +import { + Alert, + Button, + Flex, + FlexItem, + Gallery, + Skeleton, + StackItem, + Title, +} from '@patternfly/react-core'; +import React from 'react'; +import { ArrowRightIcon, SearchIcon } from '@patternfly/react-icons'; +import { CatalogSourceList } from '~/app/modelCatalogTypes'; +import { useCatalogModelsBySources } from '~/app/hooks/modelCatalog/useCatalogModelsBySource'; +import EmptyModelCatalogState from '~/app/pages/modelCatalog/EmptyModelCatalogState'; +import { getSourceFromSourceId } from '~/app/pages/modelCatalog/utils/modelCatalogUtils'; +import ModelCatalogCard from '~/app/pages/modelCatalog/components/ModelCatalogCard'; + +type CategorySectionProps = { + label: string; + searchTerm: string; + pageSize: number; + catalogSources: CatalogSourceList | null; + onShowMore: (label: string) => void; + displayName?: string; +}; + +const CatalogCategorySection: React.FC = ({ + label, + searchTerm, + pageSize, + catalogSources, + onShowMore, + displayName, +}) => { + const { catalogModels, catalogModelsLoaded, catalogModelsLoadError } = useCatalogModelsBySources( + undefined, + label, + pageSize, + searchTerm, + ); + + return ( + <> + + + + + {`${displayName ?? label} models`} + + + + {catalogModels.items.length >= 4 && ( + + + + )} + + + {catalogModelsLoadError ? ( + + {catalogModelsLoadError.message} + + ) : !catalogModelsLoaded ? ( + + {Array.from({ length: 4 }).map((_, index) => ( + + ))} + + ) : catalogModels.items.length === 0 ? ( + + ) : ( + + {catalogModels.items.slice(0, pageSize).map((model) => ( + + ))} + + )} + + + ); +}; +export default CatalogCategorySection; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/screens/InvalidCatalogSource.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/screens/InvalidCatalogSource.tsx deleted file mode 100644 index 29040c2574..0000000000 --- a/clients/ui/frontend/src/app/pages/modelCatalog/screens/InvalidCatalogSource.tsx +++ /dev/null @@ -1,23 +0,0 @@ -import { EmptyStateErrorMessage } from 'mod-arch-shared'; -import React from 'react'; -import { modelCatalogUrl } from '~/app/routes/modelCatalog/catalogModel'; -import ModelCatalogSourceSelectorNavigator from './ModelCatalogSourceSelectorNavigator'; - -type InvalidCatalogSourceProps = { - title?: string; - sourceId?: string; -}; - -const InvalidCatalogSource: React.FC = ({ title, sourceId }) => ( - - modelCatalogUrl(id)} - isPrimary - /> - -); - -export default InvalidCatalogSource; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalog.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalog.tsx index a83efac4cd..4dc4037583 100644 --- a/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalog.tsx +++ b/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalog.tsx @@ -1,13 +1,25 @@ import * as React from 'react'; -import { PageSection } from '@patternfly/react-core'; +import { PageSection, Sidebar, SidebarContent, SidebarPanel } from '@patternfly/react-core'; import { ApplicationsPage, ProjectObjectType, TitleWithIcon } from 'mod-arch-shared'; import ScrollViewOnMount from '~/app/shared/components/ScrollViewOnMount'; -import { modelCatalogUrl } from '~/app/routes/modelCatalog/catalogModel'; -import ModelCatalogPage from './ModelCatalogPage'; -import ModelCatalogSourceSelectorNavigator from './ModelCatalogSourceSelectorNavigator'; +import ModelCatalogFilters from '~/app/pages/modelCatalog/components/ModelCatalogFilters'; +import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; +import { hasFiltersApplied } from '~/app/pages/modelCatalog/utils/modelCatalogUtils'; +import { + ModelCatalogNumberFilterKey, + ModelCatalogStringFilterKey, +} from '~/concepts/modelCatalog/const'; +import { CategoryName } from '~/app/modelCatalogTypes'; +import ModelCatalogSourceLabelSelectorNavigator from './ModelCatalogSourceLabelSelectorNavigator'; +import ModelCatalogAllModelsView from './ModelCatalogAllModelsView'; +import ModelCatalogGalleryView from './ModelCatalogGalleryView'; const ModelCatalog: React.FC = () => { const [searchTerm, setSearchTerm] = React.useState(''); + const { selectedSourceLabel, filterData, setFilterData } = React.useContext(ModelCatalogContext); + const filtersApplied = hasFiltersApplied(filterData); + const isAllModelsView = + selectedSourceLabel === CategoryName.allModels && !searchTerm && !filtersApplied; const handleSearch = React.useCallback((term: string) => { setSearchTerm(term); @@ -17,27 +29,54 @@ const ModelCatalog: React.FC = () => { setSearchTerm(''); }, []); + const resetAllFilters = React.useCallback(() => { + Object.values(ModelCatalogStringFilterKey).forEach((filterKey) => { + setFilterData(filterKey, []); + }); + + Object.values(ModelCatalogNumberFilterKey).forEach((filterKey) => { + setFilterData(filterKey, undefined); + }); + }, [setFilterData]); + + const handleFilterReset = React.useCallback(() => { + setSearchTerm(''); + resetAllFilters(); + }, [resetAllFilters]); + return ( <> - + } description="Discover models that are available for your organization to register, deploy, and customize." empty={false} - headerContent={ - modelCatalogUrl(sourceId)} - searchTerm={searchTerm} - onSearch={handleSearch} - onClearSearch={handleClearSearch} - /> - } loaded provideChildrenPadding > - - - + + + + + + + + {isAllModelsView ? ( + + ) : ( + + )} + + + ); diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogAllModelsView.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogAllModelsView.tsx new file mode 100644 index 0000000000..67cc651886 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogAllModelsView.tsx @@ -0,0 +1,63 @@ +import React from 'react'; +import { Stack } from '@patternfly/react-core'; +import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; +import { + filterEnabledCatalogSources, + getUniqueSourceLabels, + hasSourcesWithoutLabels, +} from '~/app/pages/modelCatalog/utils/modelCatalogUtils'; +import { CategoryName, SourceLabel } from '~/app/modelCatalogTypes'; +import CatalogCategorySection from './CatalogCategorySection'; + +type ModelCatalogAllModelsViewProps = { + searchTerm: string; +}; + +const ModelCatalogAllModelsView: React.FC = ({ searchTerm }) => { + const { catalogSources, updateSelectedSourceLabel } = React.useContext(ModelCatalogContext); + + const sourceLabels = React.useMemo(() => { + const enabledSources = filterEnabledCatalogSources(catalogSources); + return getUniqueSourceLabels(enabledSources); + }, [catalogSources]); + + const hasSourcesWithoutLabelsValue = React.useMemo( + () => hasSourcesWithoutLabels(catalogSources), + [catalogSources], + ); + + const handleShowMoreCategory = React.useCallback( + (categoryLabel: string) => { + updateSelectedSourceLabel(categoryLabel); + }, + [updateSelectedSourceLabel], + ); + + return ( + + {sourceLabels.map((label) => ( + + ))} + {hasSourcesWithoutLabelsValue && ( + + )} + + ); +}; + +export default ModelCatalogAllModelsView; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogGalleryView.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogGalleryView.tsx new file mode 100644 index 0000000000..bb47fb3040 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogGalleryView.tsx @@ -0,0 +1,134 @@ +import { + Alert, + Bullseye, + Button, + EmptyState, + Flex, + Gallery, + Spinner, + Title, +} from '@patternfly/react-core'; +import { SearchIcon } from '@patternfly/react-icons'; +import React from 'react'; +import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; +import { useCatalogModelsBySources } from '~/app/hooks/modelCatalog/useCatalogModelsBySource'; +import { CatalogModel } from '~/app/modelCatalogTypes'; +import ModelCatalogCard from '~/app/pages/modelCatalog/components/ModelCatalogCard'; +import { + getSourceFromSourceId, + hasFiltersApplied, +} from '~/app/pages/modelCatalog/utils/modelCatalogUtils'; +import EmptyModelCatalogState from '~/app/pages/modelCatalog/EmptyModelCatalogState'; +import ScrollViewOnMount from '~/app/shared/components/ScrollViewOnMount'; + +type ModelCatalogPageProps = { + searchTerm: string; + handleFilterReset: () => void; +}; + +const ModelCatalogGalleryView: React.FC = ({ + searchTerm, + handleFilterReset, +}) => { + const { + selectedSourceLabel, + filterData, + filterOptions, + filterOptionsLoaded, + filterOptionsLoadError, + catalogSources, + } = React.useContext(ModelCatalogContext); + const filtersApplied = hasFiltersApplied(filterData); + + const { catalogModels, catalogModelsLoaded, catalogModelsLoadError } = useCatalogModelsBySources( + '', + selectedSourceLabel === 'All models' ? undefined : selectedSourceLabel, + 10, + searchTerm, + filterData, + filterOptions, + ); + + const loaded = catalogModelsLoaded && filterOptionsLoaded; + const loadError = catalogModelsLoadError || filterOptionsLoadError; + + if (loadError) { + return ( + + {loadError.message} + + ); + } + + if (!loaded) { + return ( + + + + Loading model catalog... + + + ); + } + + if (catalogModels.items.length === 0 && !searchTerm && !filtersApplied) { + return ( + + ); + } + + if (catalogModels.items.length === 0 && (searchTerm || filtersApplied)) { + return ( + Reset filters} + /> + ); + } + + return ( + <> + + + {catalogModels.items.map((model: CatalogModel) => ( + + ))} + + {catalogModels.hasMore && ( + + {catalogModels.isLoadingMore ? ( + + + + Loading more catalog models... + + + ) : ( + + )} + + )} + + ); +}; + +export default ModelCatalogGalleryView; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogPage.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogPage.tsx deleted file mode 100644 index 750b4d7795..0000000000 --- a/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogPage.tsx +++ /dev/null @@ -1,98 +0,0 @@ -import { - Alert, - Bullseye, - Button, - EmptyState, - Gallery, - Spinner, - Title, -} from '@patternfly/react-core'; -import { SearchIcon } from '@patternfly/react-icons'; -import React from 'react'; -import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; -import { useCatalogModelsBySources } from '~/app/hooks/modelCatalog/useCatalogModelsBySource'; -import { CatalogModel } from '~/app/modelCatalogTypes'; -import ModelCatalogCard from '~/app/pages/modelCatalog/components/ModelCatalogCard'; -import EmptyModelCatalogState from '~/app/pages/modelCatalog/EmptyModelCatalogState'; - -type ModelCatalogPageProps = { - searchTerm: string; -}; - -const ModelCatalogPage: React.FC = ({ searchTerm }) => { - const { selectedSource } = React.useContext(ModelCatalogContext); - const [catalogModels, catalogModelsLoaded, catalogModelsLoadError, refresh] = - useCatalogModelsBySources(selectedSource?.id || '', 10, searchTerm); - - if (!catalogModelsLoaded) { - return ( - - - - Loading model catalog... - - - ); - } - - if (catalogModelsLoadError) { - return ( - - {catalogModelsLoadError.message} - - - ); - } - - if (catalogModels.items.length === 0) { - return ( - - No models from the {selectedSource?.name} source match the search criteria. - Adjust your seach, or select a differenct source - - } - /> - ); - } - - return ( - <> - - {catalogModels.items.map((model: CatalogModel) => ( - - ))} - - {catalogModels.hasMore && ( -
- - {catalogModels.isLoadingMore ? ( - <> - - - Loading more catalog models... - - - ) : ( - - )} - -
- )} - - ); -}; - -export default ModelCatalogPage; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogSourceLabelBlocks.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogSourceLabelBlocks.tsx new file mode 100644 index 0000000000..e173b58480 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogSourceLabelBlocks.tsx @@ -0,0 +1,82 @@ +import { ToggleGroup, ToggleGroupItem } from '@patternfly/react-core'; +import React from 'react'; +import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; +import { CategoryName, SourceLabel } from '~/app/modelCatalogTypes'; +import { + getUniqueSourceLabels, + filterEnabledCatalogSources, + hasSourcesWithoutLabels, +} from '~/app/pages/modelCatalog/utils/modelCatalogUtils'; + +type SourceLabelBlock = { + id: string; + label: string; + displayName: string; +}; + +const ModelCatalogSourceLabelBlocks: React.FC = () => { + const { catalogSources, updateSelectedSourceLabel, selectedSourceLabel } = + React.useContext(ModelCatalogContext); + + const blocks: SourceLabelBlock[] = React.useMemo(() => { + if (!catalogSources) { + return []; + } + + const enabledSources = filterEnabledCatalogSources(catalogSources); + const uniqueLabels = getUniqueSourceLabels(enabledSources); + const hasNoLabels = hasSourcesWithoutLabels(catalogSources); + + const allBlock: SourceLabelBlock = { + id: 'all', + label: CategoryName.allModels, + displayName: CategoryName.allModels, + }; + + const labelBlocks: SourceLabelBlock[] = uniqueLabels.map((label) => ({ + id: `label-${label}`, + label, + displayName: `${label} models`, + })); + + const blocksToReturn: SourceLabelBlock[] = [allBlock, ...labelBlocks]; + + if (hasNoLabels) { + const noLabelsBlock: SourceLabelBlock = { + id: 'no-labels', + label: SourceLabel.other, + displayName: `${CategoryName.communityAndCustomModels} models`, + }; + blocksToReturn.push(noLabelsBlock); + } + + return blocksToReturn; + }, [catalogSources]); + + if (!catalogSources) { + return null; + } + + const handleToggleClick = (label: string) => { + updateSelectedSourceLabel(label); + }; + + return ( + + {blocks.map((block) => ( + { + handleToggleClick(block.label); + }} + /> + ))} + + ); +}; + +export default ModelCatalogSourceLabelBlocks; diff --git a/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogSourceLabelSelector.tsx b/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogSourceLabelSelector.tsx new file mode 100644 index 0000000000..71a7bcbf6b --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalog/screens/ModelCatalogSourceLabelSelector.tsx @@ -0,0 +1,147 @@ +import { + Button, + Flex, + Stack, + StackItem, + Toolbar, + ToolbarContent, + ToolbarGroup, + ToolbarItem, + ToolbarToggleGroup, +} from '@patternfly/react-core'; +import { ArrowRightIcon, FilterIcon } from '@patternfly/react-icons'; +import React from 'react'; +import { useThemeContext } from 'mod-arch-kubeflow'; +import { ModelCatalogStringFilterKey } from '~/concepts/modelCatalog/const'; +import { ModelCatalogFilterKey } from '~/app/modelCatalogTypes'; +import ModelCatalogActiveFilters from '~/app/pages/modelCatalog/components/ModelCatalogActiveFilters'; +import ThemeAwareSearchInput from '~/app/pages/modelRegistry/screens/components/ThemeAwareSearchInput'; +import { ModelCatalogContext } from '~/app/context/modelCatalog/ModelCatalogContext'; +import { hasFiltersApplied } from '~/app/pages/modelCatalog/utils/modelCatalogUtils'; +import ModelCatalogSourceLabelBlocks from './ModelCatalogSourceLabelBlocks'; + +type ModelCatalogSourceLabelSelectorProps = { + searchTerm?: string; + onSearch?: (term: string) => void; + onClearSearch?: () => void; + onResetAllFilters?: () => void; +}; + +const ModelCatalogSourceLabelSelector: React.FC = ({ + searchTerm, + onSearch, + onClearSearch, + onResetAllFilters, +}) => { + const [inputValue, setInputValue] = React.useState(searchTerm || ''); + const { isMUITheme } = useThemeContext(); + const { filterData } = React.useContext(ModelCatalogContext); + const filtersApplied = React.useMemo(() => hasFiltersApplied(filterData), [filterData]); + const hasActiveFilters = React.useMemo( + () => filtersApplied || (searchTerm && searchTerm.trim().length > 0), + [filtersApplied, searchTerm], + ); + + const handleClearAllFilters = React.useCallback(() => { + if (hasActiveFilters && onResetAllFilters) { + onResetAllFilters(); + } + }, [hasActiveFilters, onResetAllFilters]); + + React.useEffect(() => { + setInputValue(searchTerm || ''); + }, [searchTerm]); + + const handleModelSearch = () => { + if (onSearch && inputValue.trim() !== searchTerm) { + onSearch(inputValue.trim()); + } + }; + + const handleClear = () => { + if (onClearSearch) { + onClearSearch(); + } + }; + + const handleSearchInputChange = (value: string) => { + setInputValue(value); + }; + + const handleSearchInputSearch = (_: React.SyntheticEvent, value: string) => { + if (onSearch) { + onSearch(value.trim()); + } + }; + + // Define which filters to show on the landing page + const filtersToShow: ModelCatalogFilterKey[] = [ + ModelCatalogStringFilterKey.PROVIDER, + ModelCatalogStringFilterKey.LICENSE, + ModelCatalogStringFilterKey.TASK, + ModelCatalogStringFilterKey.LANGUAGE, + ]; + + return ( + + + + + + }> + + + + + + {isMUITheme && ( + + ); + } + return modelRegistries.length === 0 ? ( registerButtonPopover( 'Request access to a model registry', 'To request a new model registry, or to request permission to access an existing model registry, contact your administrator.', ) - ) : artifacts.items.length === 0 ? ( + ) : artifacts.items.length === 0 || !hasModelArtifacts(artifacts.items) ? ( registerButtonPopover('', 'Model location is unavailable') ) : ( + + + + + + + + + + + + +); + +export default ManageSourceFormFooter; diff --git a/clients/ui/frontend/src/app/pages/modelCatalogSettings/components/ModelVisibilitySection.tsx b/clients/ui/frontend/src/app/pages/modelCatalogSettings/components/ModelVisibilitySection.tsx new file mode 100644 index 0000000000..73b69637d4 --- /dev/null +++ b/clients/ui/frontend/src/app/pages/modelCatalogSettings/components/ModelVisibilitySection.tsx @@ -0,0 +1,123 @@ +import * as React from 'react'; +import { + FormFieldGroupExpandable, + FormFieldGroupHeader, + FormGroup, + TextArea, + FormHelperText, + HelperText, + HelperTextItem, +} from '@patternfly/react-core'; +import { UpdateObjectAtPropAndValue } from 'mod-arch-shared'; +import FormFieldset from '~/app/pages/modelRegistry/screens/components/FormFieldset'; +import FormSection from '~/app/pages/modelRegistry/components/pf-overrides/FormSection'; +import { + ManageSourceFormData, + SourceType, +} from '~/app/pages/modelCatalogSettings/useManageSourceData'; +import { + FORM_LABELS, + PLACEHOLDERS, + DESCRIPTIONS, + FIELD_HELPER_TEXT, + getFilterInfoWithOrg, + getAllowedModelsHelp, + getExcludedModelsHelp, +} from '~/app/pages/modelCatalogSettings/constants'; + +type ModelVisibilitySectionProps = { + formData: ManageSourceFormData; + setData: UpdateObjectAtPropAndValue; +}; + +const ModelVisibilitySection: React.FC = ({ formData, setData }) => { + const isHuggingFaceMode = formData.sourceType === SourceType.HuggingFace; + const organization = isHuggingFaceMode ? formData.organization : undefined; + + const sectionDescription = + isHuggingFaceMode && organization + ? getFilterInfoWithOrg(organization) + : DESCRIPTIONS.FILTER_INFO_GENERIC; + + const allowedModelsHelp = getAllowedModelsHelp(organization); + const excludedModelsHelp = getExcludedModelsHelp(organization); + + const allowedModelsPlaceholder = isHuggingFaceMode + ? PLACEHOLDERS.ALLOWED_MODELS_HF + : PLACEHOLDERS.ALLOWED_MODELS_GENERIC; + + const excludedModelsPlaceholder = isHuggingFaceMode + ? PLACEHOLDERS.EXCLUDED_MODELS_HF + : PLACEHOLDERS.EXCLUDED_MODELS_GENERIC; + + const allowedModelsInput = ( +
+ {getCellValue(column.field)} +