|
| 1 | + |
| 2 | + |
| 3 | +# radar-s3-connector-strimzi |
| 4 | +[](https://artifacthub.io/packages/helm/radar-base/radar-s3-connector-strimzi) |
| 5 | + |
| 6 | +   |
| 7 | + |
| 8 | +A Helm chart for RADAR-base s3 connector. This connector uses Confluent s3 connector with a custom data transformers. These configurations enable a sink connector. See full list of properties here https://docs.confluent.io/kafka-connect-s3-sink/current/configuration_options.html#s3-configuration-options |
| 9 | + |
| 10 | +**Homepage:** <https://radar-base.org> |
| 11 | + |
| 12 | +## Maintainers |
| 13 | + |
| 14 | +| Name | Email | Url | |
| 15 | +| ---- | ------ | --- | |
| 16 | +| Pim van Nierop | <pim@thehyve.nl> | <https://www.thehyve.nl/experts/pim-van-nierop> | |
| 17 | + |
| 18 | +## Source Code |
| 19 | + |
| 20 | +* <https://github.com/RADAR-base/radar-helm-charts/tree/main/charts/radar-s3-connector> |
| 21 | +* <https://github.com/RADAR-base/kafka-connect-transform-keyvalue> |
| 22 | +* <https://docs.confluent.io/kafka-connect-s3-sink/current/configuration_options.html#s3-configuration-options> |
| 23 | + |
| 24 | +## Prerequisites |
| 25 | +* Kubernetes 1.28+ |
| 26 | +* Kubectl 1.28+ |
| 27 | +* Helm 3.1.0+ |
| 28 | + |
| 29 | +## Requirements |
| 30 | + |
| 31 | +| Repository | Name | Version | |
| 32 | +|------------|------|---------| |
| 33 | +| https://radar-base.github.io/radar-helm-charts | common | 2.x.x | |
| 34 | + |
| 35 | +## Values |
| 36 | + |
| 37 | +| Key | Type | Default | Description | |
| 38 | +|-----|------|---------|-------------| |
| 39 | +| replicaCount | int | `1` | Number of radar-s3-connector replicas to deploy | |
| 40 | +| image.registry | string | `"docker.io"` | Image registry | |
| 41 | +| image.repository | string | `"radarbase/kafka-connect-transform-s3"` | Image repository | |
| 42 | +| image.tag | string | `nil` | Image tag (immutable tags are recommended) Overrides the image tag whose default is the chart appVersion. | |
| 43 | +| image.digest | string | `""` | Image digest in the way sha256:aa.... Please note this parameter, if set, will override the tag | |
| 44 | +| image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | |
| 45 | +| image.pullSecrets | list | `[]` | Optionally specify an array of imagePullSecrets. Secrets must be manually created in the namespace. e.g: pullSecrets: - myRegistryKeySecretName | |
| 46 | +| nameOverride | string | `""` | String to partially override common.names.fullname template with a string (will prepend the release name) | |
| 47 | +| fullnameOverride | string | `""` | String to fully override common.names.fullname template with a string | |
| 48 | +| podSecurityContext | object | `{}` | Configure radar-s3-connector pods' Security Context | |
| 49 | +| securityContext | object | `{}` | Configure radar-s3-connector containers' Security Context | |
| 50 | +| service.type | string | `"ClusterIP"` | Kubernetes Service type | |
| 51 | +| service.port | int | `8083` | radar-s3-connector port | |
| 52 | +| resources.requests | object | `{"cpu":"100m","memory":"3Gi"}` | CPU/Memory resource requests | |
| 53 | +| jvmOptions.xmx | string | `"4g"` | | |
| 54 | +| jvmOptions.xms | string | `"3g"` | | |
| 55 | +| nodeSelector | object | `{}` | Node labels for pod assignment | |
| 56 | +| tolerations | list | `[]` | Toleration labels for pod assignment | |
| 57 | +| affinity | object | `{}` | Affinity labels for pod assignment | |
| 58 | +| secret.jaas | object | `{"key":"sasl.jaas.config","name":"shared-service-user"}` | Secret for the Kafka SASL JAAS configuration | |
| 59 | +| extraEnvVars | list | `[]` | Extra environment variables | |
| 60 | +| customLivenessProbe | object | `{}` | Custom livenessProbe that overrides the default one | |
| 61 | +| livenessProbe.enabled | bool | `true` | Enable livenessProbe | |
| 62 | +| livenessProbe.initialDelaySeconds | int | `5` | Initial delay seconds for livenessProbe | |
| 63 | +| livenessProbe.periodSeconds | int | `30` | Period seconds for livenessProbe | |
| 64 | +| livenessProbe.timeoutSeconds | int | `5` | Timeout seconds for livenessProbe | |
| 65 | +| livenessProbe.successThreshold | int | `1` | Success threshold for livenessProbe | |
| 66 | +| livenessProbe.failureThreshold | int | `3` | Failure threshold for livenessProbe | |
| 67 | +| customReadinessProbe | object | `{}` | Custom readinessProbe that overrides the default one | |
| 68 | +| readinessProbe.enabled | bool | `true` | Enable readinessProbe | |
| 69 | +| readinessProbe.initialDelaySeconds | int | `5` | Initial delay seconds for readinessProbe | |
| 70 | +| readinessProbe.periodSeconds | int | `30` | Period seconds for readinessProbe | |
| 71 | +| readinessProbe.timeoutSeconds | int | `5` | Timeout seconds for readinessProbe | |
| 72 | +| readinessProbe.successThreshold | int | `1` | Success threshold for readinessProbe | |
| 73 | +| readinessProbe.failureThreshold | int | `3` | Failure threshold for readinessProbe | |
| 74 | +| networkpolicy | object | check `values.yaml` | Network policy defines who can access this application and who this applications has access to | |
| 75 | +| schema_registry | string | `"http://radar-kafka-schema-registry:8081"` | URL of the Kafka schema registry | |
| 76 | +| radar_rest_sources_backend_url | string | `"http://radar-rest-sources-backend:8080/rest-sources/backend/"` | Base URL of the rest-sources-authorizer-backend service | |
| 77 | +| connector_num_tasks | string | `"5"` | Number of connector tasks to be used in kafka-connector spec properties | |
| 78 | +| catalogServer.url | string | `"http://catalog-server:9010"` | Catalog server URL | |
| 79 | +| topics | string | `""` | List of topics to be consumed by the sink connector separated by comma. Topics defined in the catalog server will automatically be loaded if `initTopics.enabled` is true. | |
| 80 | +| s3Endpoint | string | `"http://minio:9000/"` | Target S3 endpoint url | |
| 81 | +| s3Tagging | bool | `false` | set to true, if S3 objects should be tagged with start and end offsets, as well as record count. | |
| 82 | +| s3PartSize | int | `5242880` | The Part Size in S3 Multi-part Uploads. | |
| 83 | +| s3Region | string | `nil` | The AWS region to be used the connector. Some compatibility layers require this. | |
| 84 | +| flushSize | int | `10000` | Number of records written to store before invoking file commits. | |
| 85 | +| rotateInterval | int | `900000` | The time interval in milliseconds to invoke file commits. | |
| 86 | +| maxTasks | int | `4` | Number of tasks in the connector | |
| 87 | +| bucketAccessKey | string | `"access_key"` | Access key of the target S3 bucket | |
| 88 | +| bucketSecretKey | string | `"secret"` | Secret key of the target S3 bucket | |
| 89 | +| bucketName | string | `"radar_intermediate_storage"` | Bucket name of the target S3 bucket | |
| 90 | +| cc.enabled | bool | `false` | Set to true, if Confluent Cloud is used | |
| 91 | +| cc.bootstrapServerurl | string | `""` | Confluent cloud based Kafka broker URL (if Confluent Cloud based Kafka cluster is used) | |
| 92 | +| cc.schemaRegistryUrl | string | `""` | Confluent cloud based Schema registry URL (if Confluent Cloud based Schema registry is used) | |
| 93 | +| cc.apiKey | string | `"ccApikey"` | API Key of the Confluent Cloud cluster | |
| 94 | +| cc.apiSecret | string | `"ccApiSecret"` | API secret of the Confluent Cloud cluster | |
| 95 | +| cc.schemaRegistryApiKey | string | `"srApiKey"` | API Key of the Confluent Cloud Schema registry | |
| 96 | +| cc.schemaRegistryApiSecret | string | `"srApiSecret"` | API Key of the Confluent Cloud Schema registry | |
| 97 | +| initTopics.enabled | bool | `true` | If true, fetch list of topics from catalog server | |
| 98 | +| initTopics.image.repository | string | `"linuxserver/yq"` | Image repository to fetch topics with | |
| 99 | +| initTopics.image.tag | string | `"3.2.2"` | Image tag to fetch topics with | |
| 100 | +| initTopics.image.pullPolicy | string | `"IfNotPresent"` | Image pull policy to fetch topics with | |
| 101 | +| log4j | object | `{"rootLogLevel":"INFO"}` | Log4j configuration | |
| 102 | +| log4j.rootLogLevel | string | `"INFO"` | Root log level for the Kafka Connect instance | |
| 103 | +| sentry.dsn | string | `nil` | DSN (Data Source Name) of the sentry server | |
| 104 | +| sentry.level | string | `"ERROR"` | Log level for sentry (TRACE, DEBUG, INFO, WARN, or ERROR) | |
0 commit comments