|
| 1 | +/* |
| 2 | + * Copyright 2013-2025 the original author or authors. |
| 3 | + * |
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | + * you may not use this file except in compliance with the License. |
| 6 | + * You may obtain a copy of the License at |
| 7 | + * |
| 8 | + * https://www.apache.org/licenses/LICENSE-2.0 |
| 9 | + * |
| 10 | + * Unless required by applicable law or agreed to in writing, software |
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | + * See the License for the specific language governing permissions and |
| 14 | + * limitations under the License. |
| 15 | + */ |
| 16 | + |
| 17 | +package org.springframework.cloud.kubernetes.commons.config; |
| 18 | + |
| 19 | +import java.io.IOException; |
| 20 | +import java.io.Reader; |
| 21 | +import java.nio.charset.StandardCharsets; |
| 22 | +import java.util.ArrayList; |
| 23 | +import java.util.Collection; |
| 24 | +import java.util.Collections; |
| 25 | +import java.util.LinkedHashMap; |
| 26 | +import java.util.List; |
| 27 | +import java.util.Map; |
| 28 | +import java.util.Properties; |
| 29 | +import java.util.stream.Collectors; |
| 30 | + |
| 31 | +import org.apache.commons.logging.LogFactory; |
| 32 | +import org.yaml.snakeyaml.Yaml; |
| 33 | +import org.yaml.snakeyaml.reader.UnicodeReader; |
| 34 | + |
| 35 | +import org.springframework.core.CollectionFactory; |
| 36 | +import org.springframework.core.io.ByteArrayResource; |
| 37 | +import org.springframework.core.io.Resource; |
| 38 | +import org.springframework.core.log.LogAccessor; |
| 39 | +import org.springframework.lang.Nullable; |
| 40 | +import org.springframework.util.StringUtils; |
| 41 | + |
| 42 | +import static org.springframework.beans.factory.config.YamlProcessor.DocumentMatcher; |
| 43 | +import static org.springframework.beans.factory.config.YamlProcessor.MatchStatus; |
| 44 | +import static org.springframework.cloud.kubernetes.commons.config.Constants.SPRING_CONFIG_ACTIVATE_ON_PROFILE; |
| 45 | +import static org.springframework.cloud.kubernetes.commons.config.Constants.SPRING_PROFILES; |
| 46 | + |
| 47 | +/** |
| 48 | + * A class based on |
| 49 | + * {@link org.springframework.beans.factory.config.YamlPropertiesFactoryBean} that takes |
| 50 | + * care to override profile-based collections and maps. |
| 51 | + * |
| 52 | + * We can't use the same functionality of loading yaml files that spring-boot does : |
| 53 | + * {@link org.springframework.boot.env.YamlPropertySourceLoader} and thus OriginTrackedYamlLoader, |
| 54 | + * because spring-boot loads every single yaml document (all in a file) into a separate PropertySource. |
| 55 | + * So each yaml document ends up in a separate PropertySource. We, on the other hand, have to load all yaml documents |
| 56 | + * into a single Properties file, that ends up being a single PropertySource. |
| 57 | + * This happens because we first have to read configmaps / secrets |
| 58 | + * and only at that point do we know if a yaml contains more than one document. |
| 59 | + * |
| 60 | + * As such, we mimic the same things that spring-boot achieves by creating our own yaml reader, that is neavily based |
| 61 | + * on the YamlPropertiesFactoryBean. |
| 62 | + * |
| 63 | + * This is how it does things: |
| 64 | + * |
| 65 | + * <ul> |
| 66 | + * <li>read all the documents in a yaml file</li> |
| 67 | + * <li>flatten all properties besides collection and maps, |
| 68 | + * YamlPropertiesFactoryBean does not do that and starts flattening everything</li> |
| 69 | + * <li>take only those that match the document matchers</li> |
| 70 | + * <li>split them in two : those that have profile activation and those that don't</li> |
| 71 | + * <li>override properties in the non-profile based yamls with the ones from profile based ones. |
| 72 | + * This achieves the same result as a plain spring-boot app, where profile based properties have a higher |
| 73 | + * precedence.</li> |
| 74 | + * <li>once the overriding happened, we do another flattening, this time including collection and maps</li> |
| 75 | + * </ul> |
| 76 | + * |
| 77 | + * @author wind57 |
| 78 | + */ |
| 79 | +final class ProfileActivationAwareYamlPropertiesFactoryBean { |
| 80 | + |
| 81 | + private static final LogAccessor LOG = new LogAccessor(LogFactory.getLog(ProfileActivationAwareYamlPropertiesFactoryBean.class)); |
| 82 | + |
| 83 | + private List<DocumentMatcher> documentMatchers = Collections.emptyList(); |
| 84 | + |
| 85 | + Map<String, Object> createProperties(String source) { |
| 86 | + LinkedHashMap<String, Object> finalMap = new LinkedHashMap<>(); |
| 87 | + Yaml yaml = new Yaml(); |
| 88 | + |
| 89 | + Resource resource = new ByteArrayResource(source.getBytes(StandardCharsets.UTF_8)); |
| 90 | + try (Reader reader = new UnicodeReader(resource.getInputStream())) { |
| 91 | + Iterable<Object> iterable = yaml.loadAll(reader); |
| 92 | + |
| 93 | + List<LinkedHashMap<String, Object>> allYamlDocuments = new ArrayList<>(); |
| 94 | + |
| 95 | + // 1. read all the documents that are contained in the yaml (might be more |
| 96 | + // than one). |
| 97 | + // We flatten all properties besides collection and maps. This is needed to |
| 98 | + // be able to properly override them |
| 99 | + for (Object singleYamlDocument : iterable) { |
| 100 | + if (singleYamlDocument != null) { |
| 101 | + LinkedHashMap<String, Object> flattenedMap = new LinkedHashMap<>(); |
| 102 | + Map<String, Object> originalSource = asMap(singleYamlDocument); |
| 103 | + buildFlattenedMapWithoutComplexObjects(flattenedMap, originalSource, null); |
| 104 | + allYamlDocuments.add(flattenedMap); |
| 105 | + } |
| 106 | + } |
| 107 | + |
| 108 | + // 2. take only those that match document matchers |
| 109 | + List<LinkedHashMap<String, Object>> yamlDocumentsMatchedAgainstDocumentMatchers = filterBasedOnDocumentMatchers( |
| 110 | + allYamlDocuments); |
| 111 | + |
| 112 | + // 3. split them in two: ones that do not have any profile activation |
| 113 | + // and ones that do have profile activation. |
| 114 | + Map<Boolean, List<LinkedHashMap<String, Object>>> partitioned = yamlDocumentsMatchedAgainstDocumentMatchers |
| 115 | + .stream() |
| 116 | + .collect(Collectors.partitioningBy( |
| 117 | + x -> !x.containsKey(SPRING_CONFIG_ACTIVATE_ON_PROFILE) && !x.containsKey(SPRING_PROFILES))); |
| 118 | + |
| 119 | + LOG.debug(() -> "non-profile source : " + partitioned.get(Boolean.TRUE).toString()); |
| 120 | + LOG.debug(() -> "profile source : " + partitioned.get(Boolean.FALSE)); |
| 121 | + |
| 122 | + // 4. once they are split, iterate and compute a single properties map |
| 123 | + // (with collections and maps unflattened yet), but correctly overridden. |
| 124 | + // Meaning non-profile-based sources come first. |
| 125 | + |
| 126 | + LinkedHashMap<String, Object> flattenedWithoutComplexObjects = new LinkedHashMap<>(); |
| 127 | + partitioned.get(Boolean.TRUE).forEach(flattenedWithoutComplexObjects::putAll); |
| 128 | + partitioned.get(Boolean.FALSE).forEach(flattenedWithoutComplexObjects::putAll); |
| 129 | + |
| 130 | + // 5. we now know the correct order, let's do the final flattening |
| 131 | + buildFlattenedMap(finalMap, flattenedWithoutComplexObjects, null); |
| 132 | + |
| 133 | + LOG.debug(() -> "final source : " + finalMap); |
| 134 | + |
| 135 | + } |
| 136 | + catch (IOException e) { |
| 137 | + throw new RuntimeException(e); |
| 138 | + } |
| 139 | + |
| 140 | + return finalMap; |
| 141 | + } |
| 142 | + |
| 143 | + void setDocumentMatchers(DocumentMatcher... matchers) { |
| 144 | + this.documentMatchers = List.of(matchers); |
| 145 | + } |
| 146 | + |
| 147 | + private List<LinkedHashMap<String, Object>> filterBasedOnDocumentMatchers( |
| 148 | + List<LinkedHashMap<String, Object>> allDocuments) { |
| 149 | + return allDocuments.stream().filter(x -> { |
| 150 | + Properties properties = CollectionFactory.createStringAdaptingProperties(); |
| 151 | + properties.putAll(x); |
| 152 | + MatchStatus matchStatus = MatchStatus.ABSTAIN; |
| 153 | + for (DocumentMatcher matcher : this.documentMatchers) { |
| 154 | + MatchStatus match = matcher.matches(properties); |
| 155 | + matchStatus = MatchStatus.getMostSpecific(match, matchStatus); |
| 156 | + if (match == MatchStatus.FOUND) { |
| 157 | + LOG.debug(() -> "Matched document with document matcher: " + properties); |
| 158 | + return true; |
| 159 | + } |
| 160 | + |
| 161 | + if (matchStatus == MatchStatus.ABSTAIN) { |
| 162 | + LOG.debug(() -> "Matched document with default matcher: " + properties); |
| 163 | + return true; |
| 164 | + } |
| 165 | + } |
| 166 | + return false; |
| 167 | + }).toList(); |
| 168 | + } |
| 169 | + |
| 170 | + /** |
| 171 | + * builds the flattened properties. |
| 172 | + */ |
| 173 | + @SuppressWarnings({ "rawtypes", "unchecked" }) |
| 174 | + private static void buildFlattenedMap(Map<String, Object> result, Map<String, Object> source, |
| 175 | + @Nullable String path) { |
| 176 | + source.forEach((key, value) -> { |
| 177 | + if (StringUtils.hasText(path)) { |
| 178 | + if (key.startsWith("[")) { |
| 179 | + key = path + key; |
| 180 | + } |
| 181 | + else { |
| 182 | + key = path + '.' + key; |
| 183 | + } |
| 184 | + } |
| 185 | + if (value instanceof String) { |
| 186 | + result.put(key, value); |
| 187 | + } |
| 188 | + else if (value instanceof Map map) { |
| 189 | + // Need a compound key |
| 190 | + buildFlattenedMap(result, map, key); |
| 191 | + } |
| 192 | + else if (value instanceof Collection collection) { |
| 193 | + // Need a compound key |
| 194 | + if (collection.isEmpty()) { |
| 195 | + result.put(key, ""); |
| 196 | + } |
| 197 | + else { |
| 198 | + int count = 0; |
| 199 | + for (Object object : collection) { |
| 200 | + buildFlattenedMap(result, Collections.singletonMap("[" + (count++) + "]", object), key); |
| 201 | + } |
| 202 | + } |
| 203 | + } |
| 204 | + else { |
| 205 | + result.put(key, (value != null ? value : "")); |
| 206 | + } |
| 207 | + }); |
| 208 | + } |
| 209 | + |
| 210 | + /** |
| 211 | + * flatten properties, but without collections or maps. So it looks like this, for |
| 212 | + * example: <pre> |
| 213 | + * bean.test=[{name=Alice, role=admin}, {name=ER, role=user}]}, {bean.items=[Item 10]}] |
| 214 | + * </pre> |
| 215 | + */ |
| 216 | + @SuppressWarnings({ "rawtypes", "unchecked" }) |
| 217 | + private static void buildFlattenedMapWithoutComplexObjects(Map<String, Object> result, Map<String, Object> source, |
| 218 | + @Nullable String path) { |
| 219 | + source.forEach((key, value) -> { |
| 220 | + if (StringUtils.hasText(path)) { |
| 221 | + if (key.startsWith("[")) { |
| 222 | + key = path + key; |
| 223 | + } |
| 224 | + else { |
| 225 | + key = path + '.' + key; |
| 226 | + } |
| 227 | + } |
| 228 | + if (value instanceof String) { |
| 229 | + result.put(key, value); |
| 230 | + } |
| 231 | + else if (value instanceof Map map) { |
| 232 | + // Need a compound key |
| 233 | + buildFlattenedMapWithoutComplexObjects(result, map, key); |
| 234 | + } |
| 235 | + else if (value instanceof Collection collection) { |
| 236 | + if (collection.isEmpty()) { |
| 237 | + result.put(key, ""); |
| 238 | + } |
| 239 | + else { |
| 240 | + result.put(key, collection); |
| 241 | + } |
| 242 | + } |
| 243 | + else { |
| 244 | + result.put(key, (value != null ? value : "")); |
| 245 | + } |
| 246 | + }); |
| 247 | + } |
| 248 | + |
| 249 | + @SuppressWarnings({ "rawtypes", "unchecked" }) |
| 250 | + private static Map<String, Object> asMap(Object object) { |
| 251 | + Map<String, Object> result = new LinkedHashMap<>(); |
| 252 | + if (object instanceof Map map) { |
| 253 | + map.forEach((key, value) -> { |
| 254 | + if (value instanceof Map) { |
| 255 | + value = asMap(value); |
| 256 | + } |
| 257 | + if (key instanceof CharSequence) { |
| 258 | + result.put(key.toString(), value); |
| 259 | + } |
| 260 | + else { |
| 261 | + // It has to be a map key in this case |
| 262 | + result.put("[" + key.toString() + "]", value); |
| 263 | + } |
| 264 | + }); |
| 265 | + } |
| 266 | + |
| 267 | + return result; |
| 268 | + } |
| 269 | + |
| 270 | +} |
0 commit comments