From e4f5ee7b557b16f6aee445633e18373c866ea8ea Mon Sep 17 00:00:00 2001 From: Priyanka Abel Date: Thu, 6 Nov 2025 10:37:41 +0530 Subject: [PATCH 01/11] Fixed merge conflicts --- artifacts/attributes.adoc | 8 + artifacts/snip-developer-preview-rhoai.adoc | 6 + ...and-how-ai-assets-map-to-rhdh-catalog.adoc | 8 + ...roc-populating-the-api-definition-tab.adoc | 27 ++++ ...hift-ai-connector-for-rhdh-with-rhoai.adoc | 143 ++++++++++++++++++ ...oubleshooting-connector-functionality.adoc | 67 ++++++++ .../ref-enrich-ai-model-metadata.adoc | 42 +++++ .../ref-model-to-entity-mapping.adoc | 34 +++++ ...el-registry-and-model-catalog-queries.adoc | 48 ++++++ ...ref-out-of-the-box-details-from-rhoai.adoc | 16 ++ .../openshift-ai-connector-for-rhdh/artifacts | 1 + .../assemblies | 1 + .../docinfo.xml | 11 ++ titles/openshift-ai-connector-for-rhdh/images | 1 + .../master.adoc | 22 +++ .../openshift-ai-connector-for-rhdh/modules | 1 + 16 files changed, 436 insertions(+) create mode 100644 artifacts/snip-developer-preview-rhoai.adoc create mode 100644 modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc create mode 100644 modules/openshift-ai-connector-for-rhdh/proc-populating-the-api-definition-tab.adoc create mode 100644 modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc create mode 100644 modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc create mode 100644 modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc create mode 100644 modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc create mode 100644 modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc create mode 100644 modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc create mode 120000 titles/openshift-ai-connector-for-rhdh/artifacts create mode 120000 titles/openshift-ai-connector-for-rhdh/assemblies create mode 100644 titles/openshift-ai-connector-for-rhdh/docinfo.xml create mode 120000 titles/openshift-ai-connector-for-rhdh/images create mode 100644 titles/openshift-ai-connector-for-rhdh/master.adoc create mode 120000 titles/openshift-ai-connector-for-rhdh/modules diff --git a/artifacts/attributes.adoc b/artifacts/attributes.adoc index 350ff6215b..46c8716ad3 100644 --- a/artifacts/attributes.adoc +++ b/artifacts/attributes.adoc @@ -65,6 +65,7 @@ :rhdeveloper-name: Red Hat Developer :rhel: Red Hat Enterprise Linux :rhoai-brand-name: Red Hat OpenShift AI +:rhoai-short: RHOAI :rhoserverless-brand-name: Red Hat OpenShift Serverless :rhsso-brand-name: Red Hat Single-Sign On :rhsso: RHSSO @@ -171,8 +172,15 @@ :plugin-type-name: custom :plugin-type-name-uppercase: Custom + :scorecard-plugin-book-link: {product-docs-link}/html-single/understand_and_visualize_red_hat_developer_hub_project_health_using_scorecards/index :scorecard-plugin-book-title: Understand and visualize {product} project health using Scorecards :model-context-protocol-link: {product-docs-link}/html-single/interacting_with_model_context_protocol_tools_for_red_hat_developer_hub/index :model-context-protocol-title: Interacting with Model Context Protocol tools for {product} + +:openshift-ai-connector-for-rhdh-link: {product-docs-link}/html-single/integrating_rhdh_with_openshift_ai_connector_for_rhdh/index +:openshift-ai-connector-for-rhdh-title: Integrate {product} with {openshift-ai-connector-name} to leverage AI models + +:openshift-ai-connector-name: OpenShift AI Connector for {product} +:openshift-ai-connector-name-short: OpenShift AI Connector for {product-very-short} diff --git a/artifacts/snip-developer-preview-rhoai.adoc b/artifacts/snip-developer-preview-rhoai.adoc new file mode 100644 index 0000000000..fc8cff4911 --- /dev/null +++ b/artifacts/snip-developer-preview-rhoai.adoc @@ -0,0 +1,6 @@ +[IMPORTANT] +==== +This section describes Developer Preview features in the {openshift-ai-connector-name} plugin. Developer Preview features are not supported by Red Hat in any way and are not functionally complete or production-ready. Do not use Developer Preview features for production or business-critical workloads. Developer Preview features provide early access to functionality in advance of possible inclusion in a Red Hat product offering. Customers can use these features to test functionality and provide feedback during the development process. Developer Preview features might not have any documentation, are subject to change or removal at any time, and have received limited testing. Red Hat might provide ways to submit feedback on Developer Preview features without an associated SLA. + +For more information about the support scope of Red Hat Developer Preview features, see https://access.redhat.com/support/offerings/devpreview/[Developer Preview Support Scope]. +==== diff --git a/modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc b/modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc new file mode 100644 index 0000000000..1e067d90b4 --- /dev/null +++ b/modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc @@ -0,0 +1,8 @@ +:_mod-docs-content-type: CONCEPT + +[id="con-understand-how-ai-assets-map-to-rhdh-catalog_{context}"] += Understand how AI assets map to the {product} Catalog + +include::{docdir}/artifacts/snip-developer-preview-rhoai.adoc[] + +The {openshift-ai-connector-name} ({openshift-ai-connector-name-short}) serves as a crucial link, enabling the discovery and accessibility of AI assets managed within the {rhoai-brand-name} offering directly within your {product-very-short} instance. \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/proc-populating-the-api-definition-tab.adoc b/modules/openshift-ai-connector-for-rhdh/proc-populating-the-api-definition-tab.adoc new file mode 100644 index 0000000000..bd72fbc373 --- /dev/null +++ b/modules/openshift-ai-connector-for-rhdh/proc-populating-the-api-definition-tab.adoc @@ -0,0 +1,27 @@ +:_mod-docs-content-type: PROCEDURE + +[id="proc-populating-the-api-definition-tab_{context}"] += Populating the API Definition tab + +The AI platform engineer must follow these steps to provide this valuable information because {rhoai-short} does not expose the OpenAPI specification by default. + +.Procedure + +. Retrieve OpenAPI JSON: Use a tool like `curl` to fetch the specification directly from the running endpoint of the AI model server. The following command provides the precise endpoint (`/openapi.json`) and shows how to include a `Bearer` token if the model requires authentication for access. ++ +[source,bash] +---- +curl -k -H "Authorization: Bearer $MODEL_API_KEY" https://$MODEL_ROOT_URL_INCLUDING_PORT/openapi.json | jq > open-api.json +---- + +. Set Property in {rhoai-short}. +.. In the *{rhoai-short}* dashboard, go to *Model Registry* and select the appropriate *Model Version*. ++ +[NOTE] +==== +We recommend using *Model Version* instead of *Registered Model* to maintain stability if the API changes between versions. +==== + +.. In the **Properties** section, set a key/value pair where the key is `API Spec` and the value is the entire JSON content from the `open-api.json` file. + +. Propagation: The {openshift-ai-connector-name} periodically polls the {rhoai-short} Model Registry, propagates this JSON, and renders the interactive API documentation in the {product-very-short} API Entity *Definition* tab. \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc new file mode 100644 index 0000000000..2c311a3125 --- /dev/null +++ b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc @@ -0,0 +1,143 @@ +:_mod-docs-content-type: PROCEDURE + +[id="proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai_{context}"] += Setting up {openshift-ai-connector-name} with {rhoai-brand-name} + +The installation of the {openshift-ai-connector-name} requires manual updates to {product-very-short}-related Kubernetes resources. + +.{rhoai-short} Prerequisites + +* To have Model Cards from the Model Catalog imported as Tech Docs, you must use {rhoai-short} version 2.25 or later, and the Model Catalog dashboard and a Model Registry need to be enabled (they are both off by default). +* If you employed Model Catalog at earlier versions of {rhoai-short}, Tech Doc propagation does not work for any models you registered into the Model Registry while at those earlier versions; only models registered into Model Registry from a 2.25 (or later) Model Catalog have their Model Cards transferred to {product-very-short} as TechDocs. +* For the rest of the features, version 2.20 or later suffices. Enabling Model Registry and its associated dashboard allows for a user experience that more directly allows for customizing AI Model metadata. + +//connect with Lindsey to update the above prereqs + +.Procedure + +. Configure {rhoai-short}-related RBAC and credentials. +A Kubernetes `ServiceAccount` and a `service-account-token` Secret are required for the connector to retrieve data from {rhoai-short}. The following resources must be created, replacing namespace names (`ai-rhdh` for {product-very-short}, `rhoai-model-registries` for {rhoa``}) as needed: +** `ServiceAccount` (`rhdh-rhoai-bridge`) +** `ClusterRole` and `ClusterRoleBinding` (`rhdh-rhoai-bridge`) to allow access to OCP resources like `routes`, `services`, and `inferenceservices`. +** `Role` and `RoleBinding` to allow ConfigMap updates within the {product-very-short} namespace. +** `RoleBinding` in the {rhoai-short} namespace to grant the {product-very-short} `ServiceAccount` read permissions to the Model Registry data (binding to `registry-user-modelregistry-public`). +** Secret (`rhdh-rhoai-bridge-token`) of type `kubernetes.io/service-account-token` that goes along with the `rhdh-rhoai-bridge` `ServiceAccount`. + +. Update your {product-very-short} dynamic plugin configuration. +The {product-very-short} Pod requires two dynamic plugins. +.. In your {product-very-short} dynamic plugins ConfigMap, add the following code: ++ +[source,yaml] +---- +plugins: + - disabled: false + package: oci://ghcr.io/redhat-developer/rhdh-plugin-export-overlays/red-hat-developer-hub-backstage-plugin-catalog-backend-module-model-catalog:bs_1.42.5__0.7.0!red-hat-developer-hub-backstage-plugin-catalog-backend-module-model-catalog + - disabled: false + package: oci://ghcr.io/redhat-developer/rhdh-plugin-export-overlays/red-hat-developer-hub-backstage-plugin-catalog-techdoc-url-reader-backend:bs_1.42.5__0.3.0!red-hat-developer-hub-backstage-plugin-catalog-techdoc-url-reader-backend +---- + +. Add Connector sidecar containers to the {product-very-short} Pod. +The system relies on three sidecar containers (Model Catalog Bridge) running alongside the `backstage-backend` container. These sidecar containers must be added to your {product-very-short} deployment specification, referencing the `rhdh-rhoai-bridge-token` Secret: +** `location`: Provides the REST API for RHDH plugins to fetch model metadata. +** `storage-rest`: Maintains a cache of AI Model metadata in a ConfigMap called `bac-import-model`. +** `rhoai-normalizer`: Acts as a Kubernetes controller and RHOAI client, normalizing RHOAI metadata for the connector. The following code block is an example: ++ +[source,yaml] +---- +spec: + template: + spec: + containers: + - name: backstage-backend + - env: + - name: NORMALIZER_FORMAT + value: JsonArrayFormat + - name: POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: POD_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + envFrom: + - secretRef: + name: rhdh-rhoai-bridge-token + image: quay.io/redhat-ai-dev/model-catalog-location-service@sha256:4f6ab6624a29f627f9f861cfcd5d18177d46aa2c67a81a75a1502c49bc2ff012 + + imagePullPolicy: Always + name: location + ports: + - containerPort: 9090 + name: location + protocol: TCP + volumeMounts: + - mountPath: /opt/app-root/src/dynamic-plugins-root + name: dynamic-plugins-root + workingDir: /opt/app-root/src + - env: + - name: NORMALIZER_FORMAT + value: JsonArrayFormat + - name: STORAGE_TYPE + value: ConfigMap + - name: BRIDGE_URL + value: http://localhost:9090 + - name: POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: POD_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + envFrom: + - secretRef: + name: rhdh-rhoai-bridge-token + image: quay.io/redhat-ai-dev/model-catalog-storage-rest@sha256:398095e7469e86d84b1196371286363f4b7668aa3e26370b4d78cb8d4ace1dc9 + + imagePullPolicy: Always + name: storage-rest + volumeMounts: + - mountPath: /opt/app-root/src/dynamic-plugins-root + name: dynamic-plugins-root + workingDir: /opt/app-root/src + - env: + - name: NORMALIZER_FORMAT + value: JsonArrayFormat + - name: POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: POD_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + envFrom: + - secretRef: + name: rhdh-rhoai-bridge-token + image: quay.io/redhat-ai-dev/model-catalog-rhoai-normalizer@sha256:fe6c05d57495d6217c4d584940ec552c3727847ff60f39f5d04f94be024576d8 + + imagePullPolicy: Always + name: rhoai-normalizer + volumeMounts: + - mountPath: /opt/app-root/src/dynamic-plugins-root + name: dynamic-plugins-root + workingDir: /opt/app-root/src +---- + +. Enable `Connector` in your `{product-very-short}{my-app-config-file}` file. +In your `{backstage} `app-config.extra.yaml` file, configure `Entity Provider` under the `catalog.providers` section: ++ +[source,yaml] +---- +providers: + modelCatalog: + development: + baseUrl: http://localhost:9090 +---- + +where: + +`modelCatalog`:: Specifies the name of the provider. +`development`:: Defines future connector capability beyond a single `baseUrl`. +`baseUrl`:: For Developer Preview, this value is the only one supported. Future releases might support external routes. \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc new file mode 100644 index 0000000000..b9ff1428d1 --- /dev/null +++ b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc @@ -0,0 +1,67 @@ +:_mod-docs-content-type: PROCEDURE + +[id="proc-troubleshooting-connector-functionality_{context}"] += Troubleshooting Connector functionality + +The connector system consists of the two dynamic plugins and the three Model Catalog Bridge sidecar containers. Generally speaking, the logs collected should be provided to {company-name} Support for analysis. + +== Checking Dynamic Plugins status + +Validate that the dynamic plugins have been successfully installed into your {product-very-short} project Pod by using the following command: ++ +[source,bash] +---- +oc logs -c install-dynamic-plugins deployment/ +---- + +The `install-dynamic-plugin` logs allow you to check the following installation logs for successful logs: + +* `red-hat-developer-hub-backstage-plugin-catalog-backend-module-model-catalog` (Entity Provider) +* `red-hat-developer-hub-backstage-plugin-catalog-techdoc-url-reader-backend` (TechDoc URL Reader) + +== Inspecting plugin logs + +View the {openshift-ai-connector-name}plugins in the `backstage-backend` container. Items to look for: + +[cols="3,4,4"] +|=== +|Plugin Component |Logger Service Target |Common Log Text + +|Model Catalog Entity Provider +|`ModelCatalogResourceEntityProvider` +|`Discovering ResourceEntities from Model Server...` + +|Model Catalog TechDoc URL Reader +|`ModelCatalogBridgeTechdocUrlReader` +|`ModelCatalogBridgeTechdocUrlReader.readUrl` +|=== + +To enable debug logging, set the `LOG_LEVEL` environment variable to `debug` on the `backstage-backend` container. For more information, see {monitoring-and-logging-book-link}[{monitoring-and-logging-book-title}]. + +== Inspecting the Model Catalog Bridge + +The Model Catalog Bridge sidecars manage the data fetching and storage: + +. Check Cached Data (ConfigMap): The processed AI Model metadata is stored in a `ConfigMap`. ++ +[source,bash] +---- +oc get configmap bac-import-model -o json | jq -r '.binaryData | to_entries[] | "=== \(.key) ===\n" + (.value | @base64d | fromjson | .body | @base64d | fromjson | tostring)' | jq -R 'if startswith("=== ") then . else (. | fromjson) end' +---- + +. Check Location Service API: Confirm the location service is providing data to the {product-very-short} Entity Provider. ++ +[source,bash] +---- +oc rsh -c backstage-backend deployment/ +curl http://localhost:9090/list +---- + +. Check Sidecar Container Logs: ++ +[source,bash] +---- +oc logs -c rhoai-normalizer deployment/ +oc logs -c storage-rest deployment/ +oc logs -c location deployment/ +---- \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc b/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc new file mode 100644 index 0000000000..22bb8ada80 --- /dev/null +++ b/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc @@ -0,0 +1,42 @@ +:_mod-docs-content-type: REFERENCE + +[id="ref-enrich-ai-model-metadata_{context}"] += Enrich AI model metadata for enhanced {product} experience + +While {rhoai-short} provides essential data, an AI platform engineer can enrich the {backstage} experience by adding custom properties to the `ModelVersion` or `RegisteredModel` (or annotations to the `KServe InferenceService` if the Model Registry is not used) so that the {openshift-ai-connector-name} can add the information to the {product-very-short} entities it creates. + +|=== +|Property Key |Entity Field Populated |Description + +|`API Spec` +|API Definition Tab +|The OpenAPI / Swagger JSON specification for the model REST API. + +|`API Type` +|API Type +|Correlates to supported {product-very-short}/{backstage} API types (defaults to `openapi`). + +|`TechDocs` +|TechDocs +|URL pointing to a Git repository that follows {product-very-short} TechDocs conventions for the Model Card. + +|`Homepage URL` +|Links +|A URL considered the home page for the model. + +|`Owner` +|Owner +|Overrides the default OpenShift user as the entity owner. + +|`Lifecycle` +|Lifecycle +|Serves a means to express the lifecycle notion of {product-very-short}/{backstage}. + +|`How to use` +|Links +|A URL that points to usage documentation. + +|`License` +|Links +|A URL to the license file of the model. +|=== \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc b/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc new file mode 100644 index 0000000000..b96f0ee510 --- /dev/null +++ b/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc @@ -0,0 +1,34 @@ +:_mod-docs-content-type: REFERENCE + +[id="ref-model-to-entity-mapping_{context}"] += Model-to-Entity mapping + +This offering interfaces with the {openshift-ai-connector-name-short}, Model Catalog, and KServe-based Model Deployments (InferenceServices) to create familiar {backstage} entities. + +|=== +|{rhoai-short} Artifact |{product-very-short}/{backstage} Entity Kind |{product-very-short}/{backstage} Entity Type |Purpose + +|Model Server (InferenceService) +|Component +|`model-server` +|Represents a running, accessible AI model endpoint. //Requires RHOAI 2.20 or later to obtain this mapping. + +|AI Model (Model Registry Version) +|Resource +|`ai-model` +|Represents the specific AI model artifact (e.g., Llama-3-8B). Requires RHOAI 2.20 or later to obtain this mapping. + +|Model Server API Details +|API +|`openapi` (Default) +|Provides the OpenAPI/Swagger specification for the model's REST endpoint. Requires RHOAI 2.20 or later to obtain this mapping. + +|Model Cards +|TechDocs +|N/A +|Model Cards from the {rhoai-short} Model Catalog are associated with the Component and Resource entities. Requires RHOAI 2.25 to obtain this mapping. +|=== + +Once the {openshift-ai-connector-name-short} is installed and connected with {rhoai-short}, the transfer of information commences automatically. + +//To connect with Lindsey to update links here \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc b/modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc new file mode 100644 index 0000000000..1a9f3fd4f8 --- /dev/null +++ b/modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc @@ -0,0 +1,48 @@ +:_mod-docs-content-type: REFERENCE + +[id="ref-openshift-ai-model-registry-and-model-catalog-queries_{context}"] += OpenShift AI Model Registry and Model Catalog queries + +To access the same {rhoai-short} data as the connector, use `curl` to query the {rhoai-short} Model Registry and Model Catalog APIs, ensuring the ServiceAccount token has correct access control: + +* Example: Fetch Registered Models ++ +[source,bash] +---- +curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/registered_models | jq +---- + +* Example: Fetch Model Versions ++ +[source,bash] +---- +curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/model_versions | jq +---- + +* Example: Fetch Model Artifacts ++ +[source,bash] +---- +curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/model_artifacts | jq +---- + +* Example: Fetch Inference Services ++ +[source,bash] +---- +curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/inference_services | jq +---- + +* Example: Fetch Serving Environments ++ +[source,bash] +---- +curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/serving_environments | jq +---- + +* Example: Fetch Catalog Sources ++ +[source,bash] +---- +curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_CATALOG_URL/api/model_catalog/v1alpha1/sources | jq +---- \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc new file mode 100644 index 0000000000..75bd96a584 --- /dev/null +++ b/modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc @@ -0,0 +1,16 @@ +:_mod-docs-content-type: REFERENCE + +[id="ref-out-of-the-box-details-from-rhoai_{context}"] += Out-of-the-Box details from {rhoai-short} + +The connector propagates the following key data: + +* InferenceServices (Component type model-server): +** URL of the OpenShift Route (if exposed). +** URL of the Kubernetes Service. +** Authentication requirement status. +* Model Registry (Resource type `ai-model`): +** Model description, artifact URIs, and author/owner information. +* Model Catalog: +** Links to the Model Card (as {product-very-short} TechDocs). +** Model license URL. \ No newline at end of file diff --git a/titles/openshift-ai-connector-for-rhdh/artifacts b/titles/openshift-ai-connector-for-rhdh/artifacts new file mode 120000 index 0000000000..f30b6dea60 --- /dev/null +++ b/titles/openshift-ai-connector-for-rhdh/artifacts @@ -0,0 +1 @@ +../../artifacts \ No newline at end of file diff --git a/titles/openshift-ai-connector-for-rhdh/assemblies b/titles/openshift-ai-connector-for-rhdh/assemblies new file mode 120000 index 0000000000..91646274db --- /dev/null +++ b/titles/openshift-ai-connector-for-rhdh/assemblies @@ -0,0 +1 @@ +../../assemblies \ No newline at end of file diff --git a/titles/openshift-ai-connector-for-rhdh/docinfo.xml b/titles/openshift-ai-connector-for-rhdh/docinfo.xml new file mode 100644 index 0000000000..5f7fe2ebac --- /dev/null +++ b/titles/openshift-ai-connector-for-rhdh/docinfo.xml @@ -0,0 +1,11 @@ +{title} +{product} +{product-version} +{subtitle} + + {abstract} + + + {company-name} Customer Content Services + + diff --git a/titles/openshift-ai-connector-for-rhdh/images b/titles/openshift-ai-connector-for-rhdh/images new file mode 120000 index 0000000000..5fa6987088 --- /dev/null +++ b/titles/openshift-ai-connector-for-rhdh/images @@ -0,0 +1 @@ +../../images \ No newline at end of file diff --git a/titles/openshift-ai-connector-for-rhdh/master.adoc b/titles/openshift-ai-connector-for-rhdh/master.adoc new file mode 100644 index 0000000000..427e37e914 --- /dev/null +++ b/titles/openshift-ai-connector-for-rhdh/master.adoc @@ -0,0 +1,22 @@ +include::artifacts/attributes.adoc[] +:context: +:imagesdir: images +:title: {openshift-ai-connector-for-rhdh-title} +:subtitle: As a developer, when you require access to centralized AI/ML services, you can integrate AI models and model servers from {rhoai-brand-name}* directly into the {product} ({product-very-short}) Catalog, so that you can provide a single, consistent hub for discovering, managing, and consuming all components, accelerating time-to-market. += {title} + +include::modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc[leveloffset=+1] + +include::modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc[leveloffset=+2] + +include::modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc[leveloffset=+2] + +include::modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc[leveloffset=+1] + +include::modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc[leveloffset=+1] + +include::modules/openshift-ai-connector-for-rhdh/proc-populating-the-api-definition-tab.adoc[leveloffset=+2] + +include::modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc[leveloffset=+1] + +include::modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc[leveloffset=+2] \ No newline at end of file diff --git a/titles/openshift-ai-connector-for-rhdh/modules b/titles/openshift-ai-connector-for-rhdh/modules new file mode 120000 index 0000000000..36719b9de7 --- /dev/null +++ b/titles/openshift-ai-connector-for-rhdh/modules @@ -0,0 +1 @@ +../../modules/ \ No newline at end of file From 82aac29f7b17a3b60a29299807759aaf263e9289 Mon Sep 17 00:00:00 2001 From: Priyanka Abel Date: Tue, 4 Nov 2025 00:06:17 +0530 Subject: [PATCH 02/11] title-related changes --- titles/openshift-ai-connector-for-rhdh/master.adoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/titles/openshift-ai-connector-for-rhdh/master.adoc b/titles/openshift-ai-connector-for-rhdh/master.adoc index 427e37e914..aeed943201 100644 --- a/titles/openshift-ai-connector-for-rhdh/master.adoc +++ b/titles/openshift-ai-connector-for-rhdh/master.adoc @@ -2,7 +2,8 @@ include::artifacts/attributes.adoc[] :context: :imagesdir: images :title: {openshift-ai-connector-for-rhdh-title} -:subtitle: As a developer, when you require access to centralized AI/ML services, you can integrate AI models and model servers from {rhoai-brand-name}* directly into the {product} ({product-very-short}) Catalog, so that you can provide a single, consistent hub for discovering, managing, and consuming all components, accelerating time-to-market. +:subtitle: Installing, configuring, and troubleshooting {openshift-ai-connector-name} +:abstract: As a developer, when you require access to centralized AI/ML services, you can integrate AI models and model servers from {rhoai-brand-name} directly into the {product} ({product-very-short}) Catalog, so that you can provide a single, consistent hub for discovering, managing, and consuming all components, accelerating time-to-market. = {title} include::modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc[leveloffset=+1] From 1e1f60824d6ce5e92fc6781b59240796e23db287 Mon Sep 17 00:00:00 2001 From: Priyanka Abel Date: Tue, 4 Nov 2025 15:37:57 +0530 Subject: [PATCH 03/11] Incorporated Gabe's comments --- ...-openshift-ai-connector-for-rhdh-with-rhoai.adoc | 13 ++++++++----- ...roc-troubleshooting-connector-functionality.adoc | 5 +++++ .../ref-enrich-ai-model-metadata.adoc | 2 +- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc index 2c311a3125..7e2390bcbd 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc @@ -7,7 +7,7 @@ The installation of the {openshift-ai-connector-name} requires manual updates to .{rhoai-short} Prerequisites -* To have Model Cards from the Model Catalog imported as Tech Docs, you must use {rhoai-short} version 2.25 or later, and the Model Catalog dashboard and a Model Registry need to be enabled (they are both off by default). +* To have Model Cards from the Model Catalog imported as TechDocs, you must use {rhoai-short} version 2.25 or later, and the *Model Catalog* dashboard and a Model Registry need to be enabled (they are both disabled by default). * If you employed Model Catalog at earlier versions of {rhoai-short}, Tech Doc propagation does not work for any models you registered into the Model Registry while at those earlier versions; only models registered into Model Registry from a 2.25 (or later) Model Catalog have their Model Cards transferred to {product-very-short} as TechDocs. * For the rest of the features, version 2.20 or later suffices. Enabling Model Registry and its associated dashboard allows for a user experience that more directly allows for customizing AI Model metadata. @@ -36,11 +36,14 @@ plugins: package: oci://ghcr.io/redhat-developer/rhdh-plugin-export-overlays/red-hat-developer-hub-backstage-plugin-catalog-techdoc-url-reader-backend:bs_1.42.5__0.3.0!red-hat-developer-hub-backstage-plugin-catalog-techdoc-url-reader-backend ---- -. Add Connector sidecar containers to the {product-very-short} Pod. -The system relies on three sidecar containers (Model Catalog Bridge) running alongside the `backstage-backend` container. These sidecar containers must be added to your {product-very-short} deployment specification, referencing the `rhdh-rhoai-bridge-token` Secret: -** `location`: Provides the REST API for RHDH plugins to fetch model metadata. +. Add `Connector` sidecar containers to the {product-very-short} Pod. +** If {product-very-short} was installed using the Operator, modify your {product-very-short} custom resource (CR) instance. +** If {product-very-short} was installed using the Helm charts, modify the *Deployment* specification. + +. The system relies on three sidecar containers (Model Catalog Bridge) running alongside the `backstage-backend` container. Add these sidecar containers to your configuration referencing the `rhdh-rhoai-bridge-token` Secret: +** `location`: Provides the REST API for {product-very-short} plugins to fetch model metadata. ** `storage-rest`: Maintains a cache of AI Model metadata in a ConfigMap called `bac-import-model`. -** `rhoai-normalizer`: Acts as a Kubernetes controller and RHOAI client, normalizing RHOAI metadata for the connector. The following code block is an example: +** `rhoai-normalizer`: Acts as a Kubernetes controller and {rhoai-short} client, normalizing {rhoai-short} metadata for the connector. The following code block is an example: + [source,yaml] ---- diff --git a/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc index b9ff1428d1..2347452a22 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc @@ -42,6 +42,11 @@ To enable debug logging, set the `LOG_LEVEL` environment variable to `debug` on The Model Catalog Bridge sidecars manage the data fetching and storage: +[IMPORTANT] +==== +{openshift-ai-connector-name} collects feedback from users who engage with the feedback feature. If a user submits feedback, the feedback score (thumbs up or down), text feedback (if entered), the user query, and the LLM provider response are stored locally in the file system of the Pod. {company-name} does not have access to the collected feedback data. +==== + . Check Cached Data (ConfigMap): The processed AI Model metadata is stored in a `ConfigMap`. + [source,bash] diff --git a/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc b/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc index 22bb8ada80..029c17a88d 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc @@ -18,7 +18,7 @@ While {rhoai-short} provides essential data, an AI platform engineer can enrich |`TechDocs` |TechDocs -|URL pointing to a Git repository that follows {product-very-short} TechDocs conventions for the Model Card. +|URL pointing to a Git repository that follows {product-very-short} TechDocs conventions for the Model Card. Use this setting only if the *Model Card to TechDocs* mapping is not active. |`Homepage URL` |Links From 3b7b5d56d33439a05148eb8a99fbbfcdf00340bc Mon Sep 17 00:00:00 2001 From: Priyanka Abel Date: Wed, 5 Nov 2025 21:02:24 +0530 Subject: [PATCH 04/11] Added links to RHOAI --- artifacts/attributes.adoc | 2 +- ...rstand-how-ai-assets-map-to-rhdh-catalog.adoc | 4 +++- ...enshift-ai-connector-for-rhdh-with-rhoai.adoc | 12 +++++++----- ...-troubleshooting-connector-functionality.adoc | 2 ++ .../ref-model-to-entity-mapping.adoc | 14 ++++++-------- ...model-registry-and-model-catalog-queries.adoc | 16 ++++++++-------- .../ref-out-of-the-box-details-from-rhoai.adoc | 4 ++-- 7 files changed, 29 insertions(+), 25 deletions(-) diff --git a/artifacts/attributes.adoc b/artifacts/attributes.adoc index 46c8716ad3..dcc76c1cde 100644 --- a/artifacts/attributes.adoc +++ b/artifacts/attributes.adoc @@ -183,4 +183,4 @@ :openshift-ai-connector-for-rhdh-title: Integrate {product} with {openshift-ai-connector-name} to leverage AI models :openshift-ai-connector-name: OpenShift AI Connector for {product} -:openshift-ai-connector-name-short: OpenShift AI Connector for {product-very-short} +:openshift-ai-connector-name-short: OpenShift AI Connector for {product-very-short} \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc b/modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc index 1e067d90b4..db2c85842c 100644 --- a/modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc +++ b/modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc @@ -5,4 +5,6 @@ include::{docdir}/artifacts/snip-developer-preview-rhoai.adoc[] -The {openshift-ai-connector-name} ({openshift-ai-connector-name-short}) serves as a crucial link, enabling the discovery and accessibility of AI assets managed within the {rhoai-brand-name} offering directly within your {product-very-short} instance. \ No newline at end of file +The {openshift-ai-connector-name} ({openshift-ai-connector-name-short}) serves as a crucial link, enabling the discovery and accessibility of AI assets managed within the {rhoai-brand-name} offering directly within your {product-very-short} instance. + +For more information on model registry components, see https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html/enabling_the_model_registry_component/overview-of-model-registries_model-registry-config[Overview of model registries and model catalog]. \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc index 7e2390bcbd..5c1ae9ece7 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc @@ -7,16 +7,18 @@ The installation of the {openshift-ai-connector-name} requires manual updates to .{rhoai-short} Prerequisites -* To have Model Cards from the Model Catalog imported as TechDocs, you must use {rhoai-short} version 2.25 or later, and the *Model Catalog* dashboard and a Model Registry need to be enabled (they are both disabled by default). -* If you employed Model Catalog at earlier versions of {rhoai-short}, Tech Doc propagation does not work for any models you registered into the Model Registry while at those earlier versions; only models registered into Model Registry from a 2.25 (or later) Model Catalog have their Model Cards transferred to {product-very-short} as TechDocs. -* For the rest of the features, version 2.20 or later suffices. Enabling Model Registry and its associated dashboard allows for a user experience that more directly allows for customizing AI Model metadata. +* To import model cards from the model catalog as TechDocs, you must use {rhoai-short} version 2.25 or later, and you must ensure that the model registry component is enabled. -//connect with Lindsey to update the above prereqs +* If you employed model catalog at earlier versions of {rhoai-short}, TechDocs propagation does not work for any models you registered into the model registry while at those earlier versions; only models registered into model registry from a 2.25 (or later) model catalog have their model cards transferred to {product-very-short} as TechDocs. + +* For the rest of the features, version 2.20 or later suffices. Enabling model registry and its associated dashboard allows for a user experience that more directly allows for customizing AI Model metadata. + +For more details, see link:https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html-single/enabling_the_model_registry_component/index[Enabling the model registry component]. .Procedure . Configure {rhoai-short}-related RBAC and credentials. -A Kubernetes `ServiceAccount` and a `service-account-token` Secret are required for the connector to retrieve data from {rhoai-short}. The following resources must be created, replacing namespace names (`ai-rhdh` for {product-very-short}, `rhoai-model-registries` for {rhoa``}) as needed: +A Kubernetes `ServiceAccount` and a `service-account-token` Secret are required for the connector to retrieve data from {rhoai-short}. The following resources must be created, replacing namespace names (`ai-rhdh` for {product-very-short}, `rhoai-model-registries` for {rhoai-short}) as needed: ** `ServiceAccount` (`rhdh-rhoai-bridge`) ** `ClusterRole` and `ClusterRoleBinding` (`rhdh-rhoai-bridge`) to allow access to OCP resources like `routes`, `services`, and `inferenceservices`. ** `Role` and `RoleBinding` to allow ConfigMap updates within the {product-very-short} namespace. diff --git a/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc index 2347452a22..f3dc4bcdb5 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc @@ -5,6 +5,8 @@ The connector system consists of the two dynamic plugins and the three Model Catalog Bridge sidecar containers. Generally speaking, the logs collected should be provided to {company-name} Support for analysis. +The actual contents of the diagnostic data are not part of any product guaranteed specification, and can change at any time. + == Checking Dynamic Plugins status Validate that the dynamic plugins have been successfully installed into your {product-very-short} project Pod by using the following command: diff --git a/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc b/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc index b96f0ee510..04efae323c 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc @@ -3,7 +3,7 @@ [id="ref-model-to-entity-mapping_{context}"] = Model-to-Entity mapping -This offering interfaces with the {openshift-ai-connector-name-short}, Model Catalog, and KServe-based Model Deployments (InferenceServices) to create familiar {backstage} entities. +This offering interfaces with the {openshift-ai-connector-name-short}, model catalog, and KServe-based Model Deployments (InferenceServices) to create familiar {backstage} entities. |=== |{rhoai-short} Artifact |{product-very-short}/{backstage} Entity Kind |{product-very-short}/{backstage} Entity Type |Purpose @@ -11,24 +11,22 @@ This offering interfaces with the {openshift-ai-connector-name-short}, Model Cat |Model Server (InferenceService) |Component |`model-server` -|Represents a running, accessible AI model endpoint. //Requires RHOAI 2.20 or later to obtain this mapping. +|Represents a running, accessible AI model endpoint. See https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html/configuring_your_model-serving_platform/index[Configuring your model-serving platform]. |AI Model (Model Registry Version) |Resource |`ai-model` -|Represents the specific AI model artifact (e.g., Llama-3-8B). Requires RHOAI 2.20 or later to obtain this mapping. +|Represents the specific AI model artifact, for example, `Llama-3-8B`. |Model Server API Details |API |`openapi` (Default) -|Provides the OpenAPI/Swagger specification for the model's REST endpoint. Requires RHOAI 2.20 or later to obtain this mapping. +|Provides the OpenAPI/Swagger specification for the REST endpoint of the model. See https://access.redhat.com/articles/7047935[Red Hat OpenShifT AI: API Tiers] |Model Cards |TechDocs |N/A -|Model Cards from the {rhoai-short} Model Catalog are associated with the Component and Resource entities. Requires RHOAI 2.25 to obtain this mapping. +|Model cards from the {rhoai-short} model catalog are associated with the Component and Resource entities. See https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html/working_with_the_model_catalog/registering-a-model-from-the-model-catalog_working-model-catalog[Registering a model from the model catalog]. |=== -Once the {openshift-ai-connector-name-short} is installed and connected with {rhoai-short}, the transfer of information commences automatically. - -//To connect with Lindsey to update links here \ No newline at end of file +Once the {openshift-ai-connector-name-short} is installed and connected with {rhoai-short}, the transfer of information commences automatically. \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc b/modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc index 1a9f3fd4f8..906087c910 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc @@ -1,46 +1,46 @@ :_mod-docs-content-type: REFERENCE [id="ref-openshift-ai-model-registry-and-model-catalog-queries_{context}"] -= OpenShift AI Model Registry and Model Catalog queries += OpenShift AI model registry and model catalog queries -To access the same {rhoai-short} data as the connector, use `curl` to query the {rhoai-short} Model Registry and Model Catalog APIs, ensuring the ServiceAccount token has correct access control: +To access the same {rhoai-short} data as the connector, use `curl` to query the {rhoai-short} model registry and model catalog APIs, ensuring the `ServiceAccount` token has correct access control: -* Example: Fetch Registered Models +* Example: Fetch registered models + [source,bash] ---- curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/registered_models | jq ---- -* Example: Fetch Model Versions +* Example: Fetch model versions + [source,bash] ---- curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/model_versions | jq ---- -* Example: Fetch Model Artifacts +* Example: Fetch model artifacts + [source,bash] ---- curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/model_artifacts | jq ---- -* Example: Fetch Inference Services +* Example: Fetch inference services + [source,bash] ---- curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/inference_services | jq ---- -* Example: Fetch Serving Environments +* Example: Fetch serving environments + [source,bash] ---- curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/serving_environments | jq ---- -* Example: Fetch Catalog Sources +* Example: Fetch catalog sources + [source,bash] ---- diff --git a/modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc index 75bd96a584..39e122c274 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc @@ -9,8 +9,8 @@ The connector propagates the following key data: ** URL of the OpenShift Route (if exposed). ** URL of the Kubernetes Service. ** Authentication requirement status. -* Model Registry (Resource type `ai-model`): +* Model registry (Resource type `ai-model`): ** Model description, artifact URIs, and author/owner information. -* Model Catalog: +* Model catalog: ** Links to the Model Card (as {product-very-short} TechDocs). ** Model license URL. \ No newline at end of file From 8437722ba6e944861eb35d9d3f9734064ccda004 Mon Sep 17 00:00:00 2001 From: Priyanka Abel Date: Wed, 5 Nov 2025 21:46:35 +0530 Subject: [PATCH 05/11] Incorporated Stephen's comments --- ...up-openshift-ai-connector-for-rhdh-with-rhoai.adoc | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc index 5c1ae9ece7..83b827e83b 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc @@ -7,9 +7,14 @@ The installation of the {openshift-ai-connector-name} requires manual updates to .{rhoai-short} Prerequisites -* To import model cards from the model catalog as TechDocs, you must use {rhoai-short} version 2.25 or later, and you must ensure that the model registry component is enabled. +* To import model cards from the model catalog into TechDocs, you need to use {rhoai-short} 2.25. ++ +[NOTE] +==== +If you upgraded to {rhoai-short} 2.25 from an earlier version, you must manually enable the model catalog dashboard and model registry before you can import model cards. +==== -* If you employed model catalog at earlier versions of {rhoai-short}, TechDocs propagation does not work for any models you registered into the model registry while at those earlier versions; only models registered into model registry from a 2.25 (or later) model catalog have their model cards transferred to {product-very-short} as TechDocs. +* If you employed model catalog at earlier versions of {rhoai-short}, TechDocs propagation does not work for any models you registered into the model registry while at those earlier versions; only models registered into model registry from a {rhoai-short} 2.25 model catalog have their model cards transferred to {product-very-short} as TechDocs. * For the rest of the features, version 2.20 or later suffices. Enabling model registry and its associated dashboard allows for a user experience that more directly allows for customizing AI Model metadata. @@ -42,7 +47,7 @@ plugins: ** If {product-very-short} was installed using the Operator, modify your {product-very-short} custom resource (CR) instance. ** If {product-very-short} was installed using the Helm charts, modify the *Deployment* specification. -. The system relies on three sidecar containers (Model Catalog Bridge) running alongside the `backstage-backend` container. Add these sidecar containers to your configuration referencing the `rhdh-rhoai-bridge-token` Secret: +. The system relies on three sidecar containers (model catalog bridge) running alongside the `backstage-backend` container. Add these sidecar containers to your configuration referencing the `rhdh-rhoai-bridge-token` Secret: ** `location`: Provides the REST API for {product-very-short} plugins to fetch model metadata. ** `storage-rest`: Maintains a cache of AI Model metadata in a ConfigMap called `bac-import-model`. ** `rhoai-normalizer`: Acts as a Kubernetes controller and {rhoai-short} client, normalizing {rhoai-short} metadata for the connector. The following code block is an example: From 0d2e6ab89f5dc64cbd66426bfd03beb1aafe8315 Mon Sep 17 00:00:00 2001 From: Priyanka Abel Date: Wed, 5 Nov 2025 22:34:03 +0530 Subject: [PATCH 06/11] Incorporated Ben's comments --- ...etting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc | 5 +++-- .../ref-out-of-the-box-details-from-rhoai.adoc | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc index 83b827e83b..e0cfcee9ec 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc @@ -14,9 +14,10 @@ The installation of the {openshift-ai-connector-name} requires manual updates to If you upgraded to {rhoai-short} 2.25 from an earlier version, you must manually enable the model catalog dashboard and model registry before you can import model cards. ==== -* If you employed model catalog at earlier versions of {rhoai-short}, TechDocs propagation does not work for any models you registered into the model registry while at those earlier versions; only models registered into model registry from a {rhoai-short} 2.25 model catalog have their model cards transferred to {product-very-short} as TechDocs. +* If you used the model catalog in earlier versions of {rhoai-short}, TechDocs propagation does not work for any models you registered into the model registry while at those earlier versions; only models registered into model registry from a {rhoai-short} 2.25 model catalog have their model cards transferred to {product-very-short} as TechDocs. * For the rest of the features, version 2.20 or later suffices. Enabling model registry and its associated dashboard allows for a user experience that more directly allows for customizing AI Model metadata. +For best overall experience, {rhoai-short} 2.25 is recommended. For more details, see link:https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html-single/enabling_the_model_registry_component/index[Enabling the model registry component]. @@ -43,7 +44,7 @@ plugins: package: oci://ghcr.io/redhat-developer/rhdh-plugin-export-overlays/red-hat-developer-hub-backstage-plugin-catalog-techdoc-url-reader-backend:bs_1.42.5__0.3.0!red-hat-developer-hub-backstage-plugin-catalog-techdoc-url-reader-backend ---- -. Add `Connector` sidecar containers to the {product-very-short} Pod. +. Add the `Connector` sidecar containers to the {product-very-short} Pod. ** If {product-very-short} was installed using the Operator, modify your {product-very-short} custom resource (CR) instance. ** If {product-very-short} was installed using the Helm charts, modify the *Deployment* specification. diff --git a/modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc index 39e122c274..451ef64726 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-out-of-the-box-details-from-rhoai.adoc @@ -1,7 +1,7 @@ :_mod-docs-content-type: REFERENCE [id="ref-out-of-the-box-details-from-rhoai_{context}"] -= Out-of-the-Box details from {rhoai-short} += Out-of-the-Box as asset details synched from {rhoai-short} The connector propagates the following key data: From 15829cb753095fc6352dcb6b003618ecaba8a1bc Mon Sep 17 00:00:00 2001 From: Priyanka Abel Date: Thu, 6 Nov 2025 11:31:59 +0530 Subject: [PATCH 07/11] Incorporated Gabe's comments --- ...hift-ai-connector-for-rhdh-with-rhoai.adoc | 133 +++++++++++++++++- .../ref-enrich-ai-model-metadata.adoc | 2 +- 2 files changed, 130 insertions(+), 5 deletions(-) diff --git a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc index e0cfcee9ec..68d86bf1f2 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc @@ -7,7 +7,7 @@ The installation of the {openshift-ai-connector-name} requires manual updates to .{rhoai-short} Prerequisites -* To import model cards from the model catalog into TechDocs, you need to use {rhoai-short} 2.25. +* To import model cards from the model catalog into TechDocs, you must use {rhoai-short} 2.25. + [NOTE] ==== @@ -25,11 +25,136 @@ For more details, see link:https://docs.redhat.com/en/documentation/red_hat_open . Configure {rhoai-short}-related RBAC and credentials. A Kubernetes `ServiceAccount` and a `service-account-token` Secret are required for the connector to retrieve data from {rhoai-short}. The following resources must be created, replacing namespace names (`ai-rhdh` for {product-very-short}, `rhoai-model-registries` for {rhoai-short}) as needed: -** `ServiceAccount` (`rhdh-rhoai-bridge`) -** `ClusterRole` and `ClusterRoleBinding` (`rhdh-rhoai-bridge`) to allow access to OCP resources like `routes`, `services`, and `inferenceservices`. -** `Role` and `RoleBinding` to allow ConfigMap updates within the {product-very-short} namespace. +** `ServiceAccount` (`rhdh-rhoai-bridge`). For example: ++ +[source,yaml] +---- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: rhdh-rhoai-bridge + namespace: ai-rhdh +---- +** `ClusterRole` and `ClusterRoleBinding` (`rhdh-rhoai-bridge`) to allow access to OCP resources like `routes`, `services`, and `inferenceservices`. For example: ++ +[source,yaml] +---- +# Example for `ClusterRole` +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + name: rhdh-rhoai-bridge + annotations: + argocd.argoproj.io/sync-wave: "0" +rules: + - apiGroups: + - apiextensions.k8s.sio + resources: + - customresourcedefinitions + verbs: + - get + - apiGroups: + - route.openshift.io + resources: + - routes + verbs: + - get + - list + - watch + - apiGroups: [""] + resources: + - serviceaccounts + - services + verbs: + - get + - list + - watch + + - apiGroups: ["serving.kserve.io"] + resources: ["inferenceservices"] + verbs: ["get", "list", "watch"] +---- ++ +[source,yaml] +---- +# Example for `ClusterRoleBinding` +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: rhdh-rhoai-bridge +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: rhdh-rhoai-bridge +subjects: + - kind: ServiceAccount + name: rhdh-rhoai-bridge + namespace: ai-rhdh +---- +** `Role` and `RoleBinding` to allow ConfigMap updates within the {product-very-short} namespace. For example: ++ +[source,yaml] +---- +# Example for `Role` +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: rhdh-rhoai-bridge + namespace: ai-rhdh +rules: + - apiGroups: [""] + resources: ["configmaps"] + verbs: ["get", "list", "watch", "create", "update", "patch"] +---- ++ +[source,yaml] +---- +# Example for `RoleBinding` +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: rhdh-rhoai-dashboard-permissions + namespace: rhoai-model-registries +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: registry-user-modelregistry-public +subjects: + - apiGroup: rbac.authorization.k8s.io + kind: Group + name: system:serviceaccounts:ai-rhdh +---- ** `RoleBinding` in the {rhoai-short} namespace to grant the {product-very-short} `ServiceAccount` read permissions to the Model Registry data (binding to `registry-user-modelregistry-public`). ++ +[source,yaml] +---- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: rhdh-rhoai-bridge + namespace: ai-rhdh +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: rhdh-rhoai-bridge +subjects: + - kind: ServiceAccount + name: rhdh-rhoai-bridge + namespace: ai-rhdh +---- ** Secret (`rhdh-rhoai-bridge-token`) of type `kubernetes.io/service-account-token` that goes along with the `rhdh-rhoai-bridge` `ServiceAccount`. ++ +[source,yaml] +---- +apiVersion: v1 +kind: Secret +metadata: + name: rhdh-rhoai-bridge-token + namespace: ai-rhdh + annotations: + kubernetes.io/service-account.name: rhdh-rhoai-bridge +type: kubernetes.io/service-account-token +---- . Update your {product-very-short} dynamic plugin configuration. The {product-very-short} Pod requires two dynamic plugins. diff --git a/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc b/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc index 029c17a88d..36ae0093d2 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc @@ -3,7 +3,7 @@ [id="ref-enrich-ai-model-metadata_{context}"] = Enrich AI model metadata for enhanced {product} experience -While {rhoai-short} provides essential data, an AI platform engineer can enrich the {backstage} experience by adding custom properties to the `ModelVersion` or `RegisteredModel` (or annotations to the `KServe InferenceService` if the Model Registry is not used) so that the {openshift-ai-connector-name} can add the information to the {product-very-short} entities it creates. +While {rhoai-short} provides essential data, an AI platform engineer using {rhoai-short} can enrich the {backstage}/{product-very-short} experience by adding custom properties to the `ModelVersion` or `RegisteredModel` (or annotations to the `KServe InferenceService` if the model registry is not used) so that the {openshift-ai-connector-name} can add the information to the {product-very-short} entities it creates. For more details, see https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html/working_with_model_registries/working-with-model-registries_model-registry#editing-model-version-metadata-in-a-model-registry_model-registry[Editing model version metadata in a model registry]. |=== |Property Key |Entity Field Populated |Description From 2047c56ce85785005015529658afdce745680bc5 Mon Sep 17 00:00:00 2001 From: Priyanka Abel Date: Thu, 6 Nov 2025 11:59:29 +0530 Subject: [PATCH 08/11] Updating links --- artifacts/attributes.adoc | 4 +++- ...rstand-how-ai-assets-map-to-rhdh-catalog.adoc | 2 +- ...enshift-ai-connector-for-rhdh-with-rhoai.adoc | 4 ++-- ...-troubleshooting-connector-functionality.adoc | 16 ++++++++-------- .../ref-enrich-ai-model-metadata.adoc | 2 +- .../ref-model-to-entity-mapping.adoc | 4 ++-- 6 files changed, 17 insertions(+), 15 deletions(-) diff --git a/artifacts/attributes.adoc b/artifacts/attributes.adoc index dcc76c1cde..1b26be0ede 100644 --- a/artifacts/attributes.adoc +++ b/artifacts/attributes.adoc @@ -183,4 +183,6 @@ :openshift-ai-connector-for-rhdh-title: Integrate {product} with {openshift-ai-connector-name} to leverage AI models :openshift-ai-connector-name: OpenShift AI Connector for {product} -:openshift-ai-connector-name-short: OpenShift AI Connector for {product-very-short} \ No newline at end of file +:openshift-ai-connector-name-short: OpenShift AI Connector for {product-very-short} + +:rhoai-docs-link: link:https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html-single \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc b/modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc index db2c85842c..ae3033dc9d 100644 --- a/modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc +++ b/modules/openshift-ai-connector-for-rhdh/con-understand-how-ai-assets-map-to-rhdh-catalog.adoc @@ -7,4 +7,4 @@ include::{docdir}/artifacts/snip-developer-preview-rhoai.adoc[] The {openshift-ai-connector-name} ({openshift-ai-connector-name-short}) serves as a crucial link, enabling the discovery and accessibility of AI assets managed within the {rhoai-brand-name} offering directly within your {product-very-short} instance. -For more information on model registry components, see https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html/enabling_the_model_registry_component/overview-of-model-registries_model-registry-config[Overview of model registries and model catalog]. \ No newline at end of file +For more information on model registry components, see {rhoai-docs-link}/enabling_the_model_registry_component/index#overview-of-model-registries_model-registry-config[Overview of model registries and model catalog]. \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc index 68d86bf1f2..d8863d2f56 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc @@ -19,7 +19,7 @@ If you upgraded to {rhoai-short} 2.25 from an earlier version, you must manually * For the rest of the features, version 2.20 or later suffices. Enabling model registry and its associated dashboard allows for a user experience that more directly allows for customizing AI Model metadata. For best overall experience, {rhoai-short} 2.25 is recommended. -For more details, see link:https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html-single/enabling_the_model_registry_component/index[Enabling the model registry component]. +For more details, see {rhoai-docs-link}/enabling_the_model_registry_component/index[Enabling the model registry component]. .Procedure @@ -124,7 +124,7 @@ subjects: kind: Group name: system:serviceaccounts:ai-rhdh ---- -** `RoleBinding` in the {rhoai-short} namespace to grant the {product-very-short} `ServiceAccount` read permissions to the Model Registry data (binding to `registry-user-modelregistry-public`). +** `RoleBinding` in the {rhoai-short} namespace to grant the {product-very-short} `ServiceAccount` read permissions to the model registry data (binding to `registry-user-modelregistry-public`). + [source,yaml] ---- diff --git a/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc index f3dc4bcdb5..2d923e8914 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc @@ -3,7 +3,7 @@ [id="proc-troubleshooting-connector-functionality_{context}"] = Troubleshooting Connector functionality -The connector system consists of the two dynamic plugins and the three Model Catalog Bridge sidecar containers. Generally speaking, the logs collected should be provided to {company-name} Support for analysis. +The connector system consists of the two dynamic plugins and the three model catalog bridge sidecar containers. Generally speaking, the logs collected must be provided to {company-name} Support for analysis. The actual contents of the diagnostic data are not part of any product guaranteed specification, and can change at any time. @@ -11,9 +11,9 @@ The actual contents of the diagnostic data are not part of any product guarantee Validate that the dynamic plugins have been successfully installed into your {product-very-short} project Pod by using the following command: + -[source,bash] +[source,bash,subs=+attributes] ---- -oc logs -c install-dynamic-plugins deployment/ +oc logs -c install-dynamic-plugins deployment/ ---- The `install-dynamic-plugin` logs allow you to check the following installation logs for successful logs: @@ -58,9 +58,9 @@ oc get configmap bac-import-model -o json | jq -r '.binaryData | to_entries[] | . Check Location Service API: Confirm the location service is providing data to the {product-very-short} Entity Provider. + -[source,bash] +[source,bash,subs=+attributes] ---- -oc rsh -c backstage-backend deployment/ +oc rsh -c backstage-backend deployment/ curl http://localhost:9090/list ---- @@ -68,7 +68,7 @@ curl http://localhost:9090/list + [source,bash] ---- -oc logs -c rhoai-normalizer deployment/ -oc logs -c storage-rest deployment/ -oc logs -c location deployment/ +oc logs -c rhoai-normalizer deployment/ +oc logs -c storage-rest deployment/ +oc logs -c location deployment/ ---- \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc b/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc index 36ae0093d2..c35b08e69b 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc @@ -3,7 +3,7 @@ [id="ref-enrich-ai-model-metadata_{context}"] = Enrich AI model metadata for enhanced {product} experience -While {rhoai-short} provides essential data, an AI platform engineer using {rhoai-short} can enrich the {backstage}/{product-very-short} experience by adding custom properties to the `ModelVersion` or `RegisteredModel` (or annotations to the `KServe InferenceService` if the model registry is not used) so that the {openshift-ai-connector-name} can add the information to the {product-very-short} entities it creates. For more details, see https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html/working_with_model_registries/working-with-model-registries_model-registry#editing-model-version-metadata-in-a-model-registry_model-registry[Editing model version metadata in a model registry]. +While {rhoai-short} provides essential data, an AI platform engineer using {rhoai-short} can enrich the {backstage}/{product-very-short} experience by adding custom properties to the `ModelVersion` or `RegisteredModel` (or annotations to the `KServe InferenceService` if the model registry is not used) so that the {openshift-ai-connector-name} can add the information to the {product-very-short} entities it creates. For more details, see {rhoai-docs-link}/working_with_model_registries/index#editing-model-version-metadata-in-a-model-registry_model-registry[Editing model version metadata in a model registry]. |=== |Property Key |Entity Field Populated |Description diff --git a/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc b/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc index 04efae323c..d4b15b9ccb 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc @@ -11,7 +11,7 @@ This offering interfaces with the {openshift-ai-connector-name-short}, model cat |Model Server (InferenceService) |Component |`model-server` -|Represents a running, accessible AI model endpoint. See https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html/configuring_your_model-serving_platform/index[Configuring your model-serving platform]. +|Represents a running, accessible AI model endpoint. See {rhoai-docs-link}/configuring_your_model-serving_platform/index[Configuring your model-serving platform]. |AI Model (Model Registry Version) |Resource @@ -26,7 +26,7 @@ This offering interfaces with the {openshift-ai-connector-name-short}, model cat |Model Cards |TechDocs |N/A -|Model cards from the {rhoai-short} model catalog are associated with the Component and Resource entities. See https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2.25/html/working_with_the_model_catalog/registering-a-model-from-the-model-catalog_working-model-catalog[Registering a model from the model catalog]. +|Model cards from the {rhoai-short} model catalog are associated with the Component and Resource entities. See {rhoai-docs-link}/working_with_the_model_catalog/registering-a-model-from-the-model-catalog_working-model-catalog#registering-a-model-from-the-model-catalog_working-model-catalog[Registering a model from the model catalog]. |=== Once the {openshift-ai-connector-name-short} is installed and connected with {rhoai-short}, the transfer of information commences automatically. \ No newline at end of file From b1f56143e77e1256672133e496c31d8278e1604c Mon Sep 17 00:00:00 2001 From: Priyanka Abel Date: Thu, 6 Nov 2025 18:33:22 +0530 Subject: [PATCH 09/11] Incorporated Judy's comments and Ben's --- .../proc-populating-the-api-definition-tab.adoc | 6 +++--- ...proc-troubleshooting-connector-functionality.adoc | 11 +++-------- .../ref-enrich-ai-model-metadata.adoc | 2 +- .../ref-model-to-entity-mapping.adoc | 2 ++ ...-ai-model-registry-and-model-catalog-queries.adoc | 12 ++++++------ 5 files changed, 15 insertions(+), 18 deletions(-) diff --git a/modules/openshift-ai-connector-for-rhdh/proc-populating-the-api-definition-tab.adoc b/modules/openshift-ai-connector-for-rhdh/proc-populating-the-api-definition-tab.adoc index bd72fbc373..b71596cde7 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-populating-the-api-definition-tab.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-populating-the-api-definition-tab.adoc @@ -1,9 +1,9 @@ :_mod-docs-content-type: PROCEDURE [id="proc-populating-the-api-definition-tab_{context}"] -= Populating the API Definition tab += Populating the API Definition tab in {product-very-short} API entities -The AI platform engineer must follow these steps to provide this valuable information because {rhoai-short} does not expose the OpenAPI specification by default. +Since {rhoai-short} does not expose the OpenAPI specification by default, the AI platform engineer can take the following steps to provide this valuable information: .Procedure @@ -24,4 +24,4 @@ We recommend using *Model Version* instead of *Registered Model* to maintain sta .. In the **Properties** section, set a key/value pair where the key is `API Spec` and the value is the entire JSON content from the `open-api.json` file. -. Propagation: The {openshift-ai-connector-name} periodically polls the {rhoai-short} Model Registry, propagates this JSON, and renders the interactive API documentation in the {product-very-short} API Entity *Definition* tab. \ No newline at end of file +. Propagation: The {openshift-ai-connector-name} periodically polls the {rhoai-short} Model Registry, propagates this JSON, and renders the interactive API documentation in the *Definition* tab of the {product-very-short} API entity. \ No newline at end of file diff --git a/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc index 2d923e8914..77a7c1c835 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc @@ -3,7 +3,7 @@ [id="proc-troubleshooting-connector-functionality_{context}"] = Troubleshooting Connector functionality -The connector system consists of the two dynamic plugins and the three model catalog bridge sidecar containers. Generally speaking, the logs collected must be provided to {company-name} Support for analysis. +The connector system consists of the two dynamic plugins and the three {openshift-ai-connector-name-short} sidecar containers. Generally speaking, the logs collected must be provided to {company-name} Support for analysis. The actual contents of the diagnostic data are not part of any product guaranteed specification, and can change at any time. @@ -40,14 +40,9 @@ View the {openshift-ai-connector-name}plugins in the `backstage-backend` contain To enable debug logging, set the `LOG_LEVEL` environment variable to `debug` on the `backstage-backend` container. For more information, see {monitoring-and-logging-book-link}[{monitoring-and-logging-book-title}]. -== Inspecting the Model Catalog Bridge +== Inspecting the {openshift-ai-connector-name-short} -The Model Catalog Bridge sidecars manage the data fetching and storage: - -[IMPORTANT] -==== -{openshift-ai-connector-name} collects feedback from users who engage with the feedback feature. If a user submits feedback, the feedback score (thumbs up or down), text feedback (if entered), the user query, and the LLM provider response are stored locally in the file system of the Pod. {company-name} does not have access to the collected feedback data. -==== +The {openshift-ai-connector-name-short} sidecars manage the data fetching and storage: . Check Cached Data (ConfigMap): The processed AI Model metadata is stored in a `ConfigMap`. + diff --git a/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc b/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc index c35b08e69b..d4ced2e4b2 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-enrich-ai-model-metadata.adoc @@ -3,7 +3,7 @@ [id="ref-enrich-ai-model-metadata_{context}"] = Enrich AI model metadata for enhanced {product} experience -While {rhoai-short} provides essential data, an AI platform engineer using {rhoai-short} can enrich the {backstage}/{product-very-short} experience by adding custom properties to the `ModelVersion` or `RegisteredModel` (or annotations to the `KServe InferenceService` if the model registry is not used) so that the {openshift-ai-connector-name} can add the information to the {product-very-short} entities it creates. For more details, see {rhoai-docs-link}/working_with_model_registries/index#editing-model-version-metadata-in-a-model-registry_model-registry[Editing model version metadata in a model registry]. +While {rhoai-short} provides essential data, an AI platform engineer using {rhoai-short} can enrich the {backstage}/{product-very-short} experience by adding `custom properties` to the `ModelVersion` or `RegisteredModel` (or annotations to the `KServe InferenceService` if the model registry is not used) so that the {openshift-ai-connector-name} can add the information to the {product-very-short} entities it creates. For more details, see {rhoai-docs-link}/working_with_model_registries/index#editing-model-version-metadata-in-a-model-registry_model-registry[Editing model version metadata in a model registry]. |=== |Property Key |Entity Field Populated |Description diff --git a/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc b/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc index d4b15b9ccb..137395ea00 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc @@ -3,6 +3,8 @@ [id="ref-model-to-entity-mapping_{context}"] = Model-to-Entity mapping +{openshift-ai-connector-name-short} integrates with {openshift-ai-connector-name-short}, the model catalog, and KServe-based Model Deployments (InferenceServices). This integration automatically converts your AI/ML artifacts into familiar {backstage} entities, simplifying management and providing a unified view of your models. + This offering interfaces with the {openshift-ai-connector-name-short}, model catalog, and KServe-based Model Deployments (InferenceServices) to create familiar {backstage} entities. |=== diff --git a/modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc b/modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc index 906087c910..b87b896b2b 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-openshift-ai-model-registry-and-model-catalog-queries.adoc @@ -5,42 +5,42 @@ To access the same {rhoai-short} data as the connector, use `curl` to query the {rhoai-short} model registry and model catalog APIs, ensuring the `ServiceAccount` token has correct access control: -* Example: Fetch registered models +* Example showing how to fetch registered models + [source,bash] ---- curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/registered_models | jq ---- -* Example: Fetch model versions +* Example showing how to fetch model versions + [source,bash] ---- curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/model_versions | jq ---- -* Example: Fetch model artifacts +* Example showing how to fetch model artifacts + [source,bash] ---- curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/model_artifacts | jq ---- -* Example: Fetch inference services +* Example showing how to fetch inference services + [source,bash] ---- curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/inference_services | jq ---- -* Example: Fetch serving environments +* Example showing how to fetch serving environments + [source,bash] ---- curl -k -H "Authorization: Bearer $TOKEN" $RHOAI_MODEL_REGISTRY_URL/api/model_registry/v1alpha3/serving_environments | jq ---- -* Example: Fetch catalog sources +* Example showing how to fetch catalog sources + [source,bash] ---- From d70542f2a77a3a177c05702dfc23f18984cbddaf Mon Sep 17 00:00:00 2001 From: Priyanka Abel Date: Thu, 6 Nov 2025 19:11:08 +0530 Subject: [PATCH 10/11] Updated model registry bridge mentions to the new product name --- ...c-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc | 2 +- .../proc-troubleshooting-connector-functionality.adoc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc index d8863d2f56..fe09693448 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc @@ -173,7 +173,7 @@ plugins: ** If {product-very-short} was installed using the Operator, modify your {product-very-short} custom resource (CR) instance. ** If {product-very-short} was installed using the Helm charts, modify the *Deployment* specification. -. The system relies on three sidecar containers (model catalog bridge) running alongside the `backstage-backend` container. Add these sidecar containers to your configuration referencing the `rhdh-rhoai-bridge-token` Secret: +. The system relies on three sidecar containers ({openshift-ai-connector-name}) running alongside the `backstage-backend` container. Add these sidecar containers to your configuration referencing the `rhdh-rhoai-bridge-token` Secret: ** `location`: Provides the REST API for {product-very-short} plugins to fetch model metadata. ** `storage-rest`: Maintains a cache of AI Model metadata in a ConfigMap called `bac-import-model`. ** `rhoai-normalizer`: Acts as a Kubernetes controller and {rhoai-short} client, normalizing {rhoai-short} metadata for the connector. The following code block is an example: diff --git a/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc index 77a7c1c835..ead4a0074e 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-troubleshooting-connector-functionality.adoc @@ -23,7 +23,7 @@ The `install-dynamic-plugin` logs allow you to check the following installation == Inspecting plugin logs -View the {openshift-ai-connector-name}plugins in the `backstage-backend` container. Items to look for: +View the {openshift-ai-connector-name} plugins in the `backstage-backend` container. Items to look for: [cols="3,4,4"] |=== From 7fe206b8f9d1493ba4ce93204069d82412f7a3b2 Mon Sep 17 00:00:00 2001 From: Priyanka Abel Date: Thu, 6 Nov 2025 23:02:03 +0530 Subject: [PATCH 11/11] Incorporated Ben's comments and Gabe's comments --- ...hift-ai-connector-for-rhdh-with-rhoai.adoc | 36 +++++++++---------- .../ref-model-to-entity-mapping.adoc | 2 +- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc index fe09693448..20ac6a1fdc 100644 --- a/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc +++ b/modules/openshift-ai-connector-for-rhdh/proc-setting-up-openshift-ai-connector-for-rhdh-with-rhoai.adoc @@ -25,17 +25,17 @@ For more details, see {rhoai-docs-link}/enabling_the_model_registry_component/in . Configure {rhoai-short}-related RBAC and credentials. A Kubernetes `ServiceAccount` and a `service-account-token` Secret are required for the connector to retrieve data from {rhoai-short}. The following resources must be created, replacing namespace names (`ai-rhdh` for {product-very-short}, `rhoai-model-registries` for {rhoai-short}) as needed: -** `ServiceAccount` (`rhdh-rhoai-bridge`). For example: +** `ServiceAccount` (`rhdh-rhoai-connector`). For example: + [source,yaml] ---- apiVersion: v1 kind: ServiceAccount metadata: - name: rhdh-rhoai-bridge + name: rhdh-rhoai-connector namespace: ai-rhdh ---- -** `ClusterRole` and `ClusterRoleBinding` (`rhdh-rhoai-bridge`) to allow access to OCP resources like `routes`, `services`, and `inferenceservices`. For example: +** `ClusterRole` and `ClusterRoleBinding` (`rhdh-rhoai-connector`) to allow access to OCP resources like `routes`, `services`, and `inferenceservices`. For example: + [source,yaml] ---- @@ -43,7 +43,7 @@ metadata: apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: - name: rhdh-rhoai-bridge + name: rhdh-rhoai-connector annotations: argocd.argoproj.io/sync-wave: "0" rules: @@ -81,14 +81,14 @@ rules: apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: - name: rhdh-rhoai-bridge + name: rhdh-rhoai-connector roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole - name: rhdh-rhoai-bridge + name: rhdh-rhoai-connector subjects: - kind: ServiceAccount - name: rhdh-rhoai-bridge + name: rhdh-rhoai-connector namespace: ai-rhdh ---- ** `Role` and `RoleBinding` to allow ConfigMap updates within the {product-very-short} namespace. For example: @@ -99,7 +99,7 @@ subjects: apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: - name: rhdh-rhoai-bridge + name: rhdh-rhoai-connector namespace: ai-rhdh rules: - apiGroups: [""] @@ -131,28 +131,28 @@ subjects: apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: - name: rhdh-rhoai-bridge + name: rhdh-rhoai-connector namespace: ai-rhdh roleRef: apiGroup: rbac.authorization.k8s.io kind: Role - name: rhdh-rhoai-bridge + name: rhdh-rhoai-connector subjects: - kind: ServiceAccount - name: rhdh-rhoai-bridge + name: rhdh-rhoai-connector namespace: ai-rhdh ---- -** Secret (`rhdh-rhoai-bridge-token`) of type `kubernetes.io/service-account-token` that goes along with the `rhdh-rhoai-bridge` `ServiceAccount`. +** Secret (`rhdh-rhoai-connector-token`) of type `kubernetes.io/service-account-token` that goes along with the `rhdh-rhoai-connector` `ServiceAccount`. + [source,yaml] ---- apiVersion: v1 kind: Secret metadata: - name: rhdh-rhoai-bridge-token + name: rhdh-rhoai-connector-token namespace: ai-rhdh annotations: - kubernetes.io/service-account.name: rhdh-rhoai-bridge + kubernetes.io/service-account.name: rhdh-rhoai-connector type: kubernetes.io/service-account-token ---- @@ -173,7 +173,7 @@ plugins: ** If {product-very-short} was installed using the Operator, modify your {product-very-short} custom resource (CR) instance. ** If {product-very-short} was installed using the Helm charts, modify the *Deployment* specification. -. The system relies on three sidecar containers ({openshift-ai-connector-name}) running alongside the `backstage-backend` container. Add these sidecar containers to your configuration referencing the `rhdh-rhoai-bridge-token` Secret: +. The system relies on three sidecar containers ({openshift-ai-connector-name}) running alongside the `backstage-backend` container. Add these sidecar containers to your configuration referencing the `rhdh-rhoai-connector-token` Secret: ** `location`: Provides the REST API for {product-very-short} plugins to fetch model metadata. ** `storage-rest`: Maintains a cache of AI Model metadata in a ConfigMap called `bac-import-model`. ** `rhoai-normalizer`: Acts as a Kubernetes controller and {rhoai-short} client, normalizing {rhoai-short} metadata for the connector. The following code block is an example: @@ -198,7 +198,7 @@ spec: fieldPath: metadata.namespace envFrom: - secretRef: - name: rhdh-rhoai-bridge-token + name: rhdh-rhoai-connector-token image: quay.io/redhat-ai-dev/model-catalog-location-service@sha256:4f6ab6624a29f627f9f861cfcd5d18177d46aa2c67a81a75a1502c49bc2ff012 imagePullPolicy: Always @@ -228,7 +228,7 @@ spec: fieldPath: metadata.namespace envFrom: - secretRef: - name: rhdh-rhoai-bridge-token + name: rhdh-rhoai-connector-token image: quay.io/redhat-ai-dev/model-catalog-storage-rest@sha256:398095e7469e86d84b1196371286363f4b7668aa3e26370b4d78cb8d4ace1dc9 imagePullPolicy: Always @@ -250,7 +250,7 @@ spec: fieldPath: metadata.namespace envFrom: - secretRef: - name: rhdh-rhoai-bridge-token + name: rhdh-rhoai-connector-token image: quay.io/redhat-ai-dev/model-catalog-rhoai-normalizer@sha256:fe6c05d57495d6217c4d584940ec552c3727847ff60f39f5d04f94be024576d8 imagePullPolicy: Always diff --git a/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc b/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc index 137395ea00..aa919e71eb 100644 --- a/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc +++ b/modules/openshift-ai-connector-for-rhdh/ref-model-to-entity-mapping.adoc @@ -3,7 +3,7 @@ [id="ref-model-to-entity-mapping_{context}"] = Model-to-Entity mapping -{openshift-ai-connector-name-short} integrates with {openshift-ai-connector-name-short}, the model catalog, and KServe-based Model Deployments (InferenceServices). This integration automatically converts your AI/ML artifacts into familiar {backstage} entities, simplifying management and providing a unified view of your models. +Model-to-Entity mapping integrates with {openshift-ai-connector-name-short}, the model catalog, and KServe-based Model Deployments (InferenceServices). This integration automatically converts your AI/ML artifacts into familiar {backstage} entities, simplifying management and providing a unified view of your available AI models to your developer teams. This offering interfaces with the {openshift-ai-connector-name-short}, model catalog, and KServe-based Model Deployments (InferenceServices) to create familiar {backstage} entities.