diff --git a/README.md b/README.md index 8ab7bf60e0..b0a5c9bdfa 100644 --- a/README.md +++ b/README.md @@ -177,6 +177,43 @@ migrated by following these steps: docker compose up -d ``` +### Local Kubernetes with Tilt + +#### Setup + +1. Install Tilt +2. Install OpenTofu +3. Install Minikube +4. Install ctlptl + +Create the Minikube cluster with ctlptl: + +```console +ctlptl create cluster minikube --registry=ctlptl-registry --minikube-start-flags "--memory=6g" --minikube-start-flags "--cpus=4" +``` + +Start ORT Server: + +```console +tilt up +``` + +Destroy the cluster: + +```console +ctlptl delete cluster minikube +``` + +To get the UI working, create file `/ui/.env.local` with the following content: + +```env +VITE_CLIENT_ID=ort-server +VITE_AUTHORITY=http://localhost:8081/realms/ort-server +``` + +and run it with `pnpm -C ui dev`. + + ### Accessing the services | Service | URL | Credentials | diff --git a/Tiltfile b/Tiltfile new file mode 100644 index 0000000000..4bdfb94e20 --- /dev/null +++ b/Tiltfile @@ -0,0 +1,304 @@ +load('ext://helm_remote', 'helm_remote') +load('ext://helm_resource', 'helm_resource', 'helm_repo') +load('ext://configmap', 'configmap_create', 'configmap_from_dict') +load('ext://secret', 'secret_create_generic') + +update_settings(k8s_upsert_timeout_secs=120) + +gradlew = "./gradlew" +if os.name == "nt": + gradlew = "gradlew.bat" + +k8s_yaml('./scripts/kubernetes/namespace.yaml') + +helm_resource( + 'postgresql', + 'bitnami/postgresql', + resource_deps=['bitnami'], + namespace='ort-server', + flags=[ + '--set=global.postgresql.auth.postgresPassword=postgres', + '--set=global.postgresql.auth.database=ort', + ], + labels=['ort-server'] +) + +helm_repo('bitnami', 'https://charts.bitnami.com/bitnami', labels=['helm_repos']) + +configmap_create('ort-core-secrets', + namespace='ort-server', + from_file=['secrets.properties=./scripts/compose/secrets.properties']) + +secret_create_generic('ort-secrets', + namespace='ort-server', + from_file=['secrets.properties=./scripts/compose/secrets.properties']) + +secret_create_generic('ort-core-secrets', + namespace='ort-server', + from_file=['secrets.properties=./scripts/compose/secrets.properties'] + ) + +secret_create_generic('ort-config-secret', + namespace='ort-server', + from_file=[ + 'evaluator.rules.kts=./scripts/compose/config/evaluator.rules.kts', + 'ort-server.params.kts=./scripts/compose/config/ort-server.params.kts', + ] + ) + +helm_resource( + 'keycloak', + 'bitnami/keycloak', + resource_deps=['bitnami'], + namespace='ort-server', + flags=[ + '--version=21.4.5', + '--set=auth.adminUser=keycloak-admin', + '--set=auth.adminPassword=keycloak-admin', + '--set=extraStartupArgs=--hostname-strict-backchannel=false', + ], + labels=['keycloak'] +) + +local_resource('keycloak-terraform', + resource_deps=['keycloak'], + cmd='cd ./scripts/kubernetes/keycloak && tofu init && tofu apply -auto-approve', + deps=['./scripts/kubernetes/keycloak/keycloak.tf'], + labels=['keycloak'] +) + +# Keycloak port forward has to be done manually because the Chart contains multiple containers, and +# the port forward may hit the database if done in helm_resource. +k8s_resource( + workload='keycloak', + port_forwards=["8081:8080"], + extra_pod_selectors={'statefulset.kubernetes.io/pod-name': 'keycloak-0'}, + discovery_strategy='selectors-only') + +helm_resource( + 'rabbitmq', + 'bitnami/rabbitmq', + resource_deps=['bitnami'], + namespace='ort-server', + flags=[ + '--version=14.4.6', + "--set=auth.username=admin", + "--set=auth.password=admin", + ], + labels=['rabbitmq'] +) + +k8s_resource( + workload='rabbitmq', + port_forwards=["15672"], +) + +local_resource('rabbitmq-terraform', + resource_deps=['rabbitmq'], + cmd='cd ./scripts/kubernetes/rabbitmq && tofu init && tofu apply -auto-approve', + deps=['./scripts/kubernetes/rabbitmq/rabbitmq.tf'], + labels=['rabbitmq'] +) + +helm_repo('kiwigrid', 'https://kiwigrid.github.io', labels=['helm_repos']) + +helm_resource( + 'graphite', + 'kiwigrid/graphite', + resource_deps=['kiwigrid'], + namespace='ort-server', + labels=['monitoring'], +) + +custom_build( + 'core', + './gradlew :core:jibDockerBuild --image $EXPECTED_REF', + live_update= [ + sync('./core/build/classes/kotlin/main', '/app/classes') + ], + deps=['./core/build/classes', './core/build.gradle.kts',], +) + +k8s_resource( + workload='ort-server-core', + port_forwards=[ + port_forward(8080, 8080, "API Endpoint")], + links=[ + link('http://localhost:8080/swagger-ui', "Swagger UI"), + ], + resource_deps=['keycloak', 'rabbitmq', 'rabbitmq-terraform', 'graphite', 'postgresql', 'keycloak-terraform'], + labels=['ort-server'], +) + +configmap_create('ort-orchestrator-config', + namespace='ort-server', + from_file=['application.conf=./scripts/kubernetes/orchestrator.application.conf']) + +secret_create_generic('ort-config-worker-config', + secret_type='generic', + namespace='ort-server', + from_file=['application.conf=./scripts/kubernetes/config.application.conf'], + ) + +k8s_resource( + workload='ort-server-orchestrator', + resource_deps=['keycloak', 'rabbitmq', 'rabbitmq-terraform', 'graphite', 'postgresql', 'ort-server-core', 'worker-base-images'], + labels=['ort-server'], +) + +custom_build( + 'ort-server-orchestrator', + './gradlew :orchestrator:jibDockerBuild --image $EXPECTED_REF', + live_update= [ + sync('./orchestrator/build/classes/kotlin/main', '/app/classes') + ], + deps=['./orchestrator/build/classes', './orchestrator/build.gradle.kts', './scripts/kubernetes/orchestrator.application.conf'], +) + +k8s_yaml('./scripts/kubernetes/core.yaml') +k8s_yaml('./scripts/kubernetes/orchestrator.yaml') + +# Worker images + +local_resource( + 'worker-base-images', + cmd='./gradlew buildAllWorkerImages', + deps=[ + './workers/analyzer/docker/Analyzer.Dockerfile', + './workers/config/docker/Config.Dockerfile', + './workers/evaluator/docker/Evaluator.Dockerfile', + './workers/notifier/docker/Notifier.Dockerfile', + './workers/reporter/docker/Reporter.Dockerfile', + './workers/scanner/docker/Scanner.Dockerfile' + ], + labels=["ort-server"] +) + +custom_build( + 'advisor-worker-image', + './gradlew :workers:advisor:jibDockerBuild --image $EXPECTED_REF', + live_update= [ + sync('./workers/advisor/build/classes/kotlin/main', '/app/classes') + ], + deps=['./workers/advisor/build/classes', './workers/advisor/build.gradle.kts'], + match_in_env_vars=True, +) + +custom_build( + 'analyzer-worker-image', + './gradlew :workers:analyzer:jibDockerBuild --image $EXPECTED_REF', + live_update= [ + sync('./workers/analyzer/build/classes/kotlin/main', '/app/classes') + ], + deps=['./workers/analyzer/build/classes', './workers/analyzer/build.gradle.kts'], + match_in_env_vars=True, +) + +custom_build( + 'config-worker-image', + './gradlew :workers:config:jibDockerBuild --image $EXPECTED_REF', + live_update= [ + sync('./workers/config/build/classes/kotlin/main', '/app/classes') + ], + deps=['./workers/config/build/classes', './workers/config/build.gradle.kts'], + match_in_env_vars=True, +) + +custom_build( + 'evaluator-worker-image', + './gradlew :workers:evaluator:jibDockerBuild --image $EXPECTED_REF', + live_update= [ + sync('./workers/evaluator/build/classes/kotlin/main', '/app/classes') + ], + deps=['./workers/evaluator/build/classes', './workers/evaluator/build.gradle.kts'], + match_in_env_vars=True, +) + +custom_build( + 'notifier-worker-image', + './gradlew :workers:notifier:jibDockerBuild --image $EXPECTED_REF', + live_update= [ + sync('./workers/notifier/build/classes/kotlin/main', '/app/classes') + ], + deps=['./workers/notifier/build/classes', './workers/notifier/build.gradle.kts'], + match_in_env_vars=True, +) + +custom_build( + 'reporter-worker-image', + './gradlew :workers:reporter:jibDockerBuild --image $EXPECTED_REF', + live_update= [ + sync('./workers/reporter/build/classes/kotlin/main', '/app/classes') + ], + deps=['./workers/reporter/build/classes', './workers/reporter/build.gradle.kts'], + match_in_env_vars=True, +) + +custom_build( + 'scanner-worker-image', + './gradlew :workers:scanner:jibDockerBuild --image $EXPECTED_REF', + live_update= [ + sync('./workers/scanner/build/classes/kotlin/main', '/app/classes') + ], + deps=['./workers/scanner/build/classes', './workers/scanner/build.gradle.kts'], + match_in_env_vars=True, +) + +helm_repo('grafana-repo', 'https://grafana.github.io/helm-charts', labels=['helm_repos']) + +helm_resource( + 'loki-stack', + 'grafana/loki-stack', + resource_deps=['grafana-repo'], + namespace='ort-server', + flags=[ + '--values=./scripts/kubernetes/loki-values.yaml', + ], + deps=['./scripts/kubernetes/loki-values.yaml'], + labels=['monitoring'], +) + +k8s_yaml('./scripts/kubernetes/alloy-config.yaml') + +helm_resource( + 'alloy', + 'grafana/alloy', + resource_deps=['grafana-repo'], + namespace='ort-server', + flags=[ + '--values=./scripts/kubernetes/grafana-alloy-values.yaml', + ], + deps=['./scripts/kubernetes/grafana-alloy-values.yaml', './scripts/kubernetes/alloy-config.yaml'], + labels=['monitoring'], +) + +k8s_resource( + workload='loki-stack', + port_forwards=["19000:3000"], + extra_pod_selectors={'app.kubernetes.io/name': 'grafana'}, + discovery_strategy='selectors-only') + +custom_build( + 'kubernetes-jobmonitor-image', + './gradlew :transport:kubernetes-jobmonitor:jibDockerBuild --image $EXPECTED_REF', + live_update= [ + sync('./transport/kubernetes-jobmonitor/build/classes/kotlin/main', '/app/classes') + ], + deps=[ + './transport/kubernetes-jobmonitor/build/classes', + './transport/kubernetes-jobmonitor/build.gradle.kts', + './scripts/kubernetes/jobmonitor.application.conf'], + match_in_env_vars=True, +) + +configmap_create('ort-jobmonitor-config', + namespace='ort-server', + from_file=['application.conf=./scripts/kubernetes/jobmonitor.application.conf']) + +k8s_yaml('./scripts/kubernetes/kubernetes-jobmonitor.yaml') + +k8s_resource( + workload='kubernetes-jobmonitor', + resource_deps=['rabbitmq', 'rabbitmq-terraform', 'postgresql'], + labels=['ort-server'], +) diff --git a/scripts/kubernetes/README.md b/scripts/kubernetes/README.md new file mode 100644 index 0000000000..75362ecc7e --- /dev/null +++ b/scripts/kubernetes/README.md @@ -0,0 +1,24 @@ +# Setup + +## Keycloak + +Expoted the following clients from Docker Compose setup through the UI and imported them to the +Kubernetes setup through the UI: + +1. ort-server +2. ort-server-ui +3. ort-server-ui-dev + +Changed ort-server client authentication on and added "Service accounts roles" as authentication +flow. Created a client secret and added it the the manifest as "KEYCLOAK_API_SECRET". Added admin to +service accounts roles. + +Create client scope "ort-server-client" with "Display on consent screen" off. + +Configure mapper: + +- Mapper type: Audience. +- Name: ORT-server-audience-mapping +- Included Client Audience: ort-server + +Add the client scope to ort-server-ui-dev. diff --git a/scripts/kubernetes/alloy-config.yaml b/scripts/kubernetes/alloy-config.yaml new file mode 100644 index 0000000000..7eb4a94e73 --- /dev/null +++ b/scripts/kubernetes/alloy-config.yaml @@ -0,0 +1,145 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: alloy-config + namespace: ort-server +data: + config.alloy: | + logging { + level = "info" + format = "logfmt" + } + + discovery.kubernetes "ort_server_deployment_pods" { + role = "pod" + + selectors { + role = "pod" + label = "app=ort-server" + } + + namespaces { + own_namespace = true + } + } + + discovery.relabel "ort_server_deployment_containers" { + targets = discovery.kubernetes.ort_server_deployment_pods.targets + + rule { + action = "labelmap" + regex = "__meta_kubernetes_(namespace|(pod_(node_name|name|container_image)))" + } + + rule { + source_labels = ["__meta_kubernetes_pod_label_app"] + target_label = "app" + } + + rule { + source_labels = ["__meta_kubernetes_pod_label_component"] + target_label = "component" + } + + rule { + action = "keep" + source_labels = ["__meta_kubernetes_pod_container_name"] + regex = "^(ort-server|kubernetes-jobmonitor)$" + } + } + + discovery.kubernetes "ort_server_worker_pods" { + role = "pod" + + selectors { + role = "pod" + label = "ort-worker in (advisor, analyzer, config, evaluator, reporter, scanner)" + } + + namespaces { + own_namespace = true + } + } + + discovery.relabel "ort_server_worker_relabelled" { + targets = discovery.kubernetes.ort_server_worker_pods.targets + + rule { + action = "labelmap" + regex = "__meta_kubernetes_(namespace|(pod_(node_name|name|container_image)))" + } + + rule { + source_labels = ["__meta_kubernetes_pod_label_ort_worker"] + target_label = "component" + } + + rule { + source_labels = ["__meta_kubernetes_pod_label_run_id"] + target_label = "run_id" + } + } + + loki.source.kubernetes "ort_server_deployment_logs" { + targets = discovery.relabel.ort_server_deployment_containers.output + forward_to = [loki.process.parse_logs.receiver] + } + + loki.source.kubernetes "ort_server_worker_logs" { + targets = discovery.relabel.ort_server_worker_relabelled.output + forward_to = [loki.process.parse_logs.receiver] + } + + loki.process "parse_logs" { + forward_to = [loki.write.cockpit_logs_endpoint.receiver] + + stage.regex { + expression = "^(?P