|
| 1 | +// Copyright 2025 Redpanda Data, Inc. |
| 2 | +// |
| 3 | +// Use of this software is governed by the Business Source License |
| 4 | +// included in the file licenses/BSL.md |
| 5 | +// |
| 6 | +// As of the Change Date specified in that file, in accordance with |
| 7 | +// the Business Source License, use of this software will be governed |
| 8 | +// by the Apache License, Version 2.0 |
| 9 | + |
| 10 | +//go:build !gotohelm |
| 11 | + |
| 12 | +package console |
| 13 | + |
| 14 | +import ( |
| 15 | + "strings" |
| 16 | + |
| 17 | + "github.com/cockroachdb/errors" |
| 18 | + "github.com/itchyny/gojq" |
| 19 | +) |
| 20 | + |
| 21 | +// ConfigFromV2 transforms a Console v2 configuration into a v3 configuration. |
| 22 | +// It additionally returns a string slice containing human readable |
| 23 | +// descriptions of fields that could not be migrated. |
| 24 | +// |
| 25 | +// Unknown or invalid data is left in place for Console itself handle (by crashing / logging). |
| 26 | +// |
| 27 | +// V2 Config: |
| 28 | +// - https://github.com/redpanda-data/console/blob/v2.8.10/backend/pkg/config/config.go |
| 29 | +// - https://github.com/redpanda-data/console-enterprise/blob/v2.8.10/backend/pkg/config/config.go |
| 30 | +// |
| 31 | +// V3 Config: |
| 32 | +// - https://github.com/redpanda-data/console/blob/v3.2.2/backend/pkg/config/config.go |
| 33 | +// - https://github.com/redpanda-data/console-enterprise/blob/v3.2.2/backend/pkg/config/config.go |
| 34 | +func ConfigFromV2(v2 map[string]any) (map[string]any, []string, error) { |
| 35 | + v3 := make(map[string]any) |
| 36 | + |
| 37 | + for _, m := range mappings { |
| 38 | + val, ok, err := execQueryScalar[any](v2, m.code) |
| 39 | + if err != nil { |
| 40 | + return nil, nil, errors.Wrapf(err, "failed to execute query for path %v", m.dst) |
| 41 | + } |
| 42 | + if !ok { |
| 43 | + continue |
| 44 | + } |
| 45 | + if err := setPath(v3, m.dst, val); err != nil { |
| 46 | + return nil, nil, errors.Wrapf(err, "failed to set path %v", m.dst) |
| 47 | + } |
| 48 | + } |
| 49 | + |
| 50 | + // Generate warnings |
| 51 | + var warnings []string |
| 52 | + for _, code := range warningQueries { |
| 53 | + results, err := execQuery[string](v2, code) |
| 54 | + if err != nil { |
| 55 | + return nil, nil, errors.Wrap(err, "failed to generate warnings") |
| 56 | + } |
| 57 | + warnings = append(warnings, results...) |
| 58 | + } |
| 59 | + |
| 60 | + return v3, warnings, nil |
| 61 | +} |
| 62 | + |
| 63 | +type mappingSpec struct { |
| 64 | + dst string |
| 65 | + query string |
| 66 | +} |
| 67 | + |
| 68 | +type mapping struct { |
| 69 | + dst []string |
| 70 | + code *gojq.Code |
| 71 | +} |
| 72 | + |
| 73 | +// mappings is a slice of destination path to JQ query that together migrate a |
| 74 | +// Console v2 config to a Console v3 config. Its behavior is exactly the same |
| 75 | +// as the converter in our public docs (Thanks Jake!) |
| 76 | +// https://github.com/redpanda-data/docs-ui/blob/d55545f392e0a9aaa0dbd193606a2b629d779699/console-config-migrator/main.go#L25 |
| 77 | +// Due to module dependency conflicts and the quasi closed source nature of |
| 78 | +// console typing the configurations was deemed a non-option. JQ was elected as |
| 79 | +// it's relatively well known and substantially easier to comprehend than |
| 80 | +// map[string]any manipulations. |
| 81 | +var mappings = compileMappings([]mappingSpec{ |
| 82 | + // Authentication |
| 83 | + {"authentication.jwtSigningKey", ".login.jwtSecret"}, |
| 84 | + {"authentication.useSecureCookies", ".login.useSecureCookies"}, |
| 85 | + {"authentication.basic", ".login.plain"}, |
| 86 | + {"authentication.oidc", `.login // {} | to_entries | sort_by(.key) | map(select(.value.enabled?) | .value) | first | del(.realm, .directory)`}, |
| 87 | + |
| 88 | + // Schema Registry |
| 89 | + {"schemaRegistry.authentication.basic.username", ".kafka.schemaRegistry.username"}, |
| 90 | + {"schemaRegistry.authentication.basic.password", ".kafka.schemaRegistry.password"}, |
| 91 | + {"schemaRegistry.authentication.bearerToken", ".kafka.schemaRegistry.bearerToken"}, |
| 92 | + {"schemaRegistry.authentication.impersonateUser", `.kafka.schemaRegistry | select(.) | .username == null`}, |
| 93 | + {"schemaRegistry", `.kafka.schemaRegistry | del(.username, .password, .bearerToken)`}, |
| 94 | + |
| 95 | + // Kafka |
| 96 | + {"kafka.sasl.enabled", "true"}, |
| 97 | + {"kafka.sasl.impersonateUser", "true"}, |
| 98 | + {"kafka", `.kafka | del(.schemaRegistry, .protobuf, .cbor, .messagePack)`}, |
| 99 | + |
| 100 | + // Serde |
| 101 | + {"serde.protobuf", ".kafka.protobuf"}, |
| 102 | + {"serde.cbor", ".kafka.cbor"}, |
| 103 | + {"serde.messagePack", ".kafka.messagePack"}, |
| 104 | + {"serde.maxDeserializationPayloadSize", ".console.maxDeserializationPayloadSize"}, |
| 105 | + |
| 106 | + // Connect rename |
| 107 | + {"kafkaConnect", ".connect"}, |
| 108 | + |
| 109 | + // Redpanda adminApi |
| 110 | + {"redpanda", `.redpanda | del(.adminApi.username, .adminApi.password)`}, |
| 111 | + {"redpanda.adminApi.authentication.basic.username", ".redpanda.adminApi.username"}, |
| 112 | + {"redpanda.adminApi.authentication.basic.password", ".redpanda.adminApi.password"}, |
| 113 | + {"redpanda.adminApi.authentication.impersonateUser", `.redpanda.adminApi | select(.) | .username == null`}, |
| 114 | + |
| 115 | + // RoleBindings |
| 116 | + {"authorization.roleBindings", `.roleBindings | map({ |
| 117 | + roleName: .roleName, |
| 118 | + users: [.subjects[] | select(.kind == "user" or .kind == null) | { |
| 119 | + name: .name, |
| 120 | + loginType: (if .provider == "Plain" then "basic" else "oidc" end) |
| 121 | + }] |
| 122 | + })?`}, |
| 123 | + |
| 124 | + // Copy remaining top-level fields (empty dst means merge into root) |
| 125 | + {"", "del(.connect, .console, .enterprise, .kafka, .login, .roleBindings, .redpanda)"}, |
| 126 | +}) |
| 127 | + |
| 128 | +// warningQueries is a slice of JQ queries that produce warnings about |
| 129 | +// configurations that can not be migrate to Console V3. |
| 130 | +var warningQueries = compileWarnings([]string{ |
| 131 | + `.login // {} | to_entries | sort_by(.key) | map(select(.value.enabled?)) | select(length > 1) | "Elected '\(.[0].key)' as OIDC provider out of \(map(.key)). Only one provider is supported in v3."`, |
| 132 | + `.login // {} | to_entries | sort_by(.key) | .[] | select(.value.enabled? and .value.realm? != null) | "Removed 'realm' from '\(.key)'. OIDC groups are not supported in v3. Create Roles in Redpanda instead."`, |
| 133 | + `.login // {} | to_entries | sort_by(.key) | .[] | select(.value.enabled? and .value.directory? != null) | select(length > 1) | "Removed 'directory' from '\(.key)'. OIDC groups are not supported in v3. Create Roles in Redpanda instead."`, |
| 134 | + `.roleBindings.[]? | . as $binding | .subjects.[]? | select(.kind != "user") | "Removed group subject from role binding '\($binding.roleName)'. Groups are not supported in v3."`, |
| 135 | +}) |
| 136 | + |
| 137 | +func compileMappings(specs []mappingSpec) []mapping { |
| 138 | + mappings := make([]mapping, len(specs)) |
| 139 | + for i, spec := range specs { |
| 140 | + var dst []string |
| 141 | + if spec.dst != "" { |
| 142 | + dst = strings.Split(spec.dst, ".") |
| 143 | + } |
| 144 | + mappings[i] = mapping{ |
| 145 | + dst: dst, |
| 146 | + code: mustCompile(spec.query), |
| 147 | + } |
| 148 | + } |
| 149 | + return mappings |
| 150 | +} |
| 151 | + |
| 152 | +func compileWarnings(queries []string) []*gojq.Code { |
| 153 | + compiled := make([]*gojq.Code, len(queries)) |
| 154 | + for i, query := range queries { |
| 155 | + compiled[i] = mustCompile(query) |
| 156 | + } |
| 157 | + return compiled |
| 158 | +} |
| 159 | + |
| 160 | +func execQuery[T any](data map[string]any, code *gojq.Code) ([]T, error) { |
| 161 | + iter := code.Run(data) |
| 162 | + |
| 163 | + var results []T |
| 164 | + for { |
| 165 | + result, ok := iter.Next() |
| 166 | + if !ok { |
| 167 | + break |
| 168 | + } |
| 169 | + |
| 170 | + // gojq can produce some strange results: |
| 171 | + // - errors are returned through Next and must be checked |
| 172 | + // - |
| 173 | + switch result := result.(type) { |
| 174 | + // Errors are returned through .Next and need to be checked. |
| 175 | + case error: |
| 176 | + return nil, errors.WithStack(result) |
| 177 | + // nil (untyped) can be returned directly. We don't want to emit nils, so skip. |
| 178 | + case nil: |
| 179 | + continue |
| 180 | + // nil maps in an any box ( any(map[string]any(nil)) ) can also be |
| 181 | + // returned. They need to be unboxed and explicitly checked. |
| 182 | + case map[string]any: |
| 183 | + if result == nil { |
| 184 | + continue |
| 185 | + } |
| 186 | + } |
| 187 | + |
| 188 | + results = append(results, result.(T)) |
| 189 | + } |
| 190 | + |
| 191 | + return results, nil |
| 192 | +} |
| 193 | + |
| 194 | +func execQueryScalar[T comparable](data map[string]any, code *gojq.Code) (T, bool, error) { |
| 195 | + var zero T |
| 196 | + out, err := execQuery[T](data, code) |
| 197 | + if err != nil { |
| 198 | + return zero, false, err |
| 199 | + } |
| 200 | + if len(out) == 1 { |
| 201 | + return out[0], out[0] != zero, nil |
| 202 | + } |
| 203 | + return zero, false, nil |
| 204 | +} |
| 205 | + |
| 206 | +func mustCompile(expr string) *gojq.Code { |
| 207 | + query, err := gojq.Parse(expr) |
| 208 | + if err != nil { |
| 209 | + panic(err) |
| 210 | + } |
| 211 | + |
| 212 | + code, err := gojq.Compile(query) |
| 213 | + if err != nil { |
| 214 | + panic(err) |
| 215 | + } |
| 216 | + return code |
| 217 | +} |
| 218 | + |
| 219 | +// setPath sets a value in a nested map structure using a slice of keys. |
| 220 | +func setPath(m map[string]any, path []string, value any) error { |
| 221 | + // Special case, if path is empty merge value into m. |
| 222 | + if len(path) == 0 { |
| 223 | + valueMap, ok := value.(map[string]any) |
| 224 | + if !ok { |
| 225 | + return errors.Newf("cannot merge non-map value into root: %T", value) |
| 226 | + } |
| 227 | + mergeMaps(m, valueMap) |
| 228 | + return nil |
| 229 | + } |
| 230 | + |
| 231 | + curr := m |
| 232 | + for i := 0; i < len(path)-1; i++ { |
| 233 | + key := path[i] |
| 234 | + if _, exists := curr[key]; !exists { |
| 235 | + curr[key] = make(map[string]any) |
| 236 | + } |
| 237 | + |
| 238 | + next, ok := curr[key].(map[string]any) |
| 239 | + if !ok { |
| 240 | + return errors.Newf("cannot traverse through non-map at key %q: %T", key, curr[key]) |
| 241 | + } |
| 242 | + curr = next |
| 243 | + } |
| 244 | + |
| 245 | + lastKey := path[len(path)-1] |
| 246 | + existing, exists := curr[lastKey] |
| 247 | + if !exists { |
| 248 | + curr[lastKey] = value |
| 249 | + return nil |
| 250 | + } |
| 251 | + |
| 252 | + // If both existing and new values are maps, merge them |
| 253 | + existingMap, existingIsMap := existing.(map[string]any) |
| 254 | + valueMap, valueIsMap := value.(map[string]any) |
| 255 | + if existingIsMap && valueIsMap { |
| 256 | + mergeMaps(existingMap, valueMap) |
| 257 | + } else { |
| 258 | + // Otherwise overwrite |
| 259 | + curr[lastKey] = value |
| 260 | + } |
| 261 | + |
| 262 | + return nil |
| 263 | +} |
| 264 | + |
| 265 | +func mergeMaps(dst, src map[string]any) { |
| 266 | + for k, v := range src { |
| 267 | + existing, exists := dst[k] |
| 268 | + if !exists { |
| 269 | + dst[k] = v |
| 270 | + continue |
| 271 | + } |
| 272 | + |
| 273 | + // If both are maps, merge recursively |
| 274 | + dstMap, dstIsMap := existing.(map[string]any) |
| 275 | + srcMap, srcIsMap := v.(map[string]any) |
| 276 | + if dstIsMap && srcIsMap { |
| 277 | + mergeMaps(dstMap, srcMap) |
| 278 | + } else { |
| 279 | + // Otherwise overwrite |
| 280 | + dst[k] = v |
| 281 | + } |
| 282 | + } |
| 283 | +} |
0 commit comments