|
| 1 | +// |
| 2 | +// DISCLAIMER |
| 3 | +// |
| 4 | +// Copyright 2023 ArangoDB GmbH, Cologne, Germany |
| 5 | +// |
| 6 | +// Licensed under the Apache License, Version 2.0 (the "License"); |
| 7 | +// you may not use this file except in compliance with the License. |
| 8 | +// You may obtain a copy of the License at |
| 9 | +// |
| 10 | +// http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | +// |
| 12 | +// Unless required by applicable law or agreed to in writing, software |
| 13 | +// distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | +// See the License for the specific language governing permissions and |
| 16 | +// limitations under the License. |
| 17 | +// |
| 18 | +// Copyright holder is ArangoDB GmbH, Cologne, Germany |
| 19 | +// |
| 20 | + |
| 21 | +package internal |
| 22 | + |
| 23 | +import ( |
| 24 | + "fmt" |
| 25 | + "go/ast" |
| 26 | + "go/parser" |
| 27 | + "go/token" |
| 28 | + "io/fs" |
| 29 | + "os" |
| 30 | + "path/filepath" |
| 31 | + "reflect" |
| 32 | + "sort" |
| 33 | + "strings" |
| 34 | + "testing" |
| 35 | + |
| 36 | + "github.com/stretchr/testify/require" |
| 37 | + |
| 38 | + "github.com/arangodb/kube-arangodb/pkg/util" |
| 39 | +) |
| 40 | + |
| 41 | +func parseDocDefinitions(t *testing.T, res map[string]*ast.Field, fs *token.FileSet) DocDefinitions { |
| 42 | + root := os.Getenv("ROOT") |
| 43 | + require.NotEmpty(t, root) |
| 44 | + |
| 45 | + var elements []string |
| 46 | + for k := range res { |
| 47 | + elements = append(elements, k) |
| 48 | + } |
| 49 | + |
| 50 | + sort.Slice(elements, func(i, j int) bool { |
| 51 | + if a, b := strings.ToLower(elements[i]), strings.ToLower(elements[j]); a == b { |
| 52 | + return elements[i] < elements[j] |
| 53 | + } else { |
| 54 | + return a < b |
| 55 | + } |
| 56 | + }) |
| 57 | + |
| 58 | + defs := make(DocDefinitions, len(elements)) |
| 59 | + |
| 60 | + for id, k := range elements { |
| 61 | + field := res[k] |
| 62 | + |
| 63 | + var def DocDefinition |
| 64 | + |
| 65 | + def.Path = strings.Split(k, ":")[0] |
| 66 | + def.Type = strings.Split(k, ":")[1] |
| 67 | + |
| 68 | + require.NotNil(t, field) |
| 69 | + |
| 70 | + if links, ok := extract(field, "link"); ok { |
| 71 | + def.Links = links |
| 72 | + } |
| 73 | + |
| 74 | + if d, ok := extract(field, "default"); ok { |
| 75 | + def.Default = util.NewType[string](d[0]) |
| 76 | + } |
| 77 | + |
| 78 | + if example, ok := extract(field, "example"); ok { |
| 79 | + def.Example = example |
| 80 | + } |
| 81 | + |
| 82 | + if enum, ok := extract(field, "enum"); ok { |
| 83 | + def.Enum = enum |
| 84 | + } |
| 85 | + |
| 86 | + if immutable, ok := extract(field, "immutable"); ok { |
| 87 | + def.Immutable = util.NewType[string](immutable[0]) |
| 88 | + } |
| 89 | + |
| 90 | + if important, ok := extract(field, "important"); ok { |
| 91 | + def.Important = util.NewType[string](important[0]) |
| 92 | + } |
| 93 | + |
| 94 | + if docs, ok := extractNotTags(field); !ok { |
| 95 | + println(def.Path, " is missing documentation!") |
| 96 | + } else { |
| 97 | + def.Docs = docs |
| 98 | + } |
| 99 | + |
| 100 | + file := fs.File(field.Pos()) |
| 101 | + |
| 102 | + filePath, err := filepath.Rel(root, file.Name()) |
| 103 | + require.NoError(t, err) |
| 104 | + |
| 105 | + def.File = filePath |
| 106 | + def.Line = file.Line(field.Pos()) |
| 107 | + |
| 108 | + defs[id] = def |
| 109 | + } |
| 110 | + return defs |
| 111 | +} |
| 112 | + |
| 113 | +func iterateOverObject(t *testing.T, fields map[string]*ast.Field, name string, object reflect.Type, path string) map[string]*ast.Field { |
| 114 | + r := map[string]*ast.Field{} |
| 115 | + t.Run(name, func(t *testing.T) { |
| 116 | + for k, v := range iterateOverObjectDirect(t, fields, name, object, path) { |
| 117 | + r[k] = v |
| 118 | + } |
| 119 | + }) |
| 120 | + |
| 121 | + return r |
| 122 | +} |
| 123 | + |
| 124 | +func iterateOverObjectDirect(t *testing.T, fields map[string]*ast.Field, name string, object reflect.Type, path string) map[string]*ast.Field { |
| 125 | + if n, simple := isSimpleType(object); simple { |
| 126 | + return map[string]*ast.Field{ |
| 127 | + fmt.Sprintf("%s.%s:%s", path, name, n): nil, |
| 128 | + } |
| 129 | + } |
| 130 | + |
| 131 | + r := map[string]*ast.Field{} |
| 132 | + |
| 133 | + switch object.Kind() { |
| 134 | + case reflect.Array, reflect.Slice: |
| 135 | + if n, simple := isSimpleType(object.Elem()); simple { |
| 136 | + return map[string]*ast.Field{ |
| 137 | + fmt.Sprintf("%s.%s:[]%s", path, name, n): nil, |
| 138 | + } |
| 139 | + } |
| 140 | + |
| 141 | + for k, v := range iterateOverObjectDirect(t, fields, fmt.Sprintf("%s\\[int\\]", name), object.Elem(), path) { |
| 142 | + r[k] = v |
| 143 | + } |
| 144 | + case reflect.Map: |
| 145 | + if n, simple := isSimpleType(object.Elem()); simple { |
| 146 | + return map[string]*ast.Field{ |
| 147 | + fmt.Sprintf("%s.%s:map[%s]%s", path, name, object.Key().String(), n): nil, |
| 148 | + } |
| 149 | + } |
| 150 | + |
| 151 | + for k, v := range iterateOverObjectDirect(t, fields, fmt.Sprintf("%s.\\<%s\\>", name, object.Key().Kind().String()), object.Elem(), path) { |
| 152 | + r[k] = v |
| 153 | + } |
| 154 | + case reflect.Struct: |
| 155 | + for field := 0; field < object.NumField(); field++ { |
| 156 | + f := object.Field(field) |
| 157 | + |
| 158 | + if !f.IsExported() { |
| 159 | + continue |
| 160 | + } |
| 161 | + |
| 162 | + tag, ok := f.Tag.Lookup("json") |
| 163 | + if !ok { |
| 164 | + if f.Anonymous { |
| 165 | + tag = ",inline" |
| 166 | + } |
| 167 | + } |
| 168 | + |
| 169 | + n, inline := extractTag(tag) |
| 170 | + |
| 171 | + if n == "-" { |
| 172 | + continue |
| 173 | + } |
| 174 | + |
| 175 | + fullFieldName := fmt.Sprintf("%s.%s", object.String(), f.Name) |
| 176 | + |
| 177 | + doc, ok := fields[fullFieldName] |
| 178 | + if !ok && !f.Anonymous { |
| 179 | + require.True(t, ok, "field %s was not parsed from source", fullFieldName) |
| 180 | + } |
| 181 | + |
| 182 | + if !f.Anonymous { |
| 183 | + if t, ok := extractType(doc); ok { |
| 184 | + r[fmt.Sprintf("%s.%s.%s:%s", path, name, n, t[0])] = doc |
| 185 | + continue |
| 186 | + } |
| 187 | + } |
| 188 | + |
| 189 | + if inline { |
| 190 | + for k, v := range iterateOverObjectDirect(t, fields, name, f.Type, path) { |
| 191 | + if v == nil { |
| 192 | + v = doc |
| 193 | + } |
| 194 | + r[k] = v |
| 195 | + } |
| 196 | + } else { |
| 197 | + |
| 198 | + for k, v := range iterateOverObject(t, fields, n, f.Type, fmt.Sprintf("%s.%s", path, name)) { |
| 199 | + if v == nil { |
| 200 | + v = doc |
| 201 | + } |
| 202 | + r[k] = v |
| 203 | + } |
| 204 | + } |
| 205 | + } |
| 206 | + case reflect.Pointer: |
| 207 | + for k, v := range iterateOverObjectDirect(t, fields, name, object.Elem(), path) { |
| 208 | + r[k] = v |
| 209 | + } |
| 210 | + default: |
| 211 | + require.Failf(t, "unsupported type", "%s for %s at %s", object.String(), name, path) |
| 212 | + } |
| 213 | + |
| 214 | + return r |
| 215 | +} |
| 216 | + |
| 217 | +func extractType(n *ast.Field) ([]string, bool) { |
| 218 | + return extract(n, "type") |
| 219 | +} |
| 220 | + |
| 221 | +func extract(n *ast.Field, tag string) ([]string, bool) { |
| 222 | + if n.Doc == nil { |
| 223 | + return nil, false |
| 224 | + } |
| 225 | + |
| 226 | + var ret []string |
| 227 | + |
| 228 | + for _, c := range n.Doc.List { |
| 229 | + if strings.HasPrefix(c.Text, fmt.Sprintf("// +doc/%s: ", tag)) { |
| 230 | + ret = append(ret, strings.TrimPrefix(c.Text, fmt.Sprintf("// +doc/%s: ", tag))) |
| 231 | + } |
| 232 | + } |
| 233 | + |
| 234 | + return ret, len(ret) > 0 |
| 235 | +} |
| 236 | + |
| 237 | +func extractNotTags(n *ast.Field) ([]string, bool) { |
| 238 | + if n.Doc == nil { |
| 239 | + return nil, false |
| 240 | + } |
| 241 | + |
| 242 | + var ret []string |
| 243 | + |
| 244 | + for _, c := range n.Doc.List { |
| 245 | + if strings.HasPrefix(c.Text, "// ") { |
| 246 | + if !strings.HasPrefix(c.Text, "// +doc/") { |
| 247 | + ret = append(ret, strings.TrimPrefix(c.Text, "// ")) |
| 248 | + } |
| 249 | + } |
| 250 | + } |
| 251 | + |
| 252 | + return ret, len(ret) > 0 |
| 253 | +} |
| 254 | + |
| 255 | +func isSimpleType(obj reflect.Type) (string, bool) { |
| 256 | + switch obj.Kind() { |
| 257 | + case reflect.String, |
| 258 | + reflect.Bool, |
| 259 | + reflect.Int, reflect.Int32, reflect.Int64, |
| 260 | + reflect.Uint, reflect.Uint16, reflect.Uint64, |
| 261 | + reflect.Float32: |
| 262 | + return obj.Kind().String(), true |
| 263 | + } |
| 264 | + |
| 265 | + return "", false |
| 266 | +} |
| 267 | + |
| 268 | +func extractTag(tag string) (string, bool) { |
| 269 | + parts := strings.SplitN(tag, ",", 2) |
| 270 | + |
| 271 | + if len(parts) == 1 { |
| 272 | + return parts[0], false |
| 273 | + } |
| 274 | + |
| 275 | + if parts[1] == "inline" { |
| 276 | + return parts[0], true |
| 277 | + } |
| 278 | + |
| 279 | + return parts[0], false |
| 280 | +} |
| 281 | + |
| 282 | +// parseSourceFiles returns map of <path to field in structure> -> AST for structure Field and the token inspector for all files in package |
| 283 | +func parseSourceFiles(t *testing.T, paths ...string) (map[string]*ast.Field, *token.FileSet) { |
| 284 | + d, fs := parseMultipleDirs(t, parser.ParseComments, paths...) |
| 285 | + |
| 286 | + r := map[string]*ast.Field{} |
| 287 | + |
| 288 | + for k, f := range d { |
| 289 | + var ct *ast.TypeSpec |
| 290 | + var nt *ast.TypeSpec |
| 291 | + |
| 292 | + ast.Inspect(f, func(n ast.Node) bool { |
| 293 | + switch x := n.(type) { |
| 294 | + case *ast.TypeSpec, *ast.FuncDecl, *ast.Field, *ast.Package, *ast.File, *ast.Ident, *ast.StructType: |
| 295 | + default: |
| 296 | + if x == nil { |
| 297 | + return true |
| 298 | + } |
| 299 | + return true |
| 300 | + } |
| 301 | + |
| 302 | + switch x := n.(type) { |
| 303 | + case *ast.TypeSpec: |
| 304 | + ct = x |
| 305 | + case *ast.StructType: |
| 306 | + nt = ct |
| 307 | + case *ast.FuncDecl: |
| 308 | + nt = nil |
| 309 | + case *ast.Field: |
| 310 | + if nt != nil { |
| 311 | + require.NotEmpty(t, nt.Name) |
| 312 | + |
| 313 | + for _, name := range x.Names { |
| 314 | + r[fmt.Sprintf("%s.%s.%s", k, nt.Name, name)] = x |
| 315 | + } |
| 316 | + } |
| 317 | + } |
| 318 | + |
| 319 | + return true |
| 320 | + }) |
| 321 | + } |
| 322 | + |
| 323 | + return r, fs |
| 324 | +} |
| 325 | + |
| 326 | +func parseMultipleDirs(t *testing.T, mode parser.Mode, dirs ...string) (map[string]*ast.Package, *token.FileSet) { |
| 327 | + fset := token.NewFileSet() // positions are relative to fset |
| 328 | + |
| 329 | + r := map[string]*ast.Package{} |
| 330 | + |
| 331 | + for _, dir := range dirs { |
| 332 | + d, err := parser.ParseDir(fset, dir, func(info fs.FileInfo) bool { |
| 333 | + return !strings.HasSuffix(info.Name(), "_test.go") |
| 334 | + }, mode) |
| 335 | + require.NoError(t, err) |
| 336 | + |
| 337 | + for k, v := range d { |
| 338 | + require.NotContains(t, r, k) |
| 339 | + r[k] = v |
| 340 | + } |
| 341 | + } |
| 342 | + |
| 343 | + return r, fset |
| 344 | +} |
0 commit comments