diff --git a/.github/workflows/build-and-publish-image.yml b/.github/workflows/build-and-publish-image.yml new file mode 100644 index 0000000..cfd7c35 --- /dev/null +++ b/.github/workflows/build-and-publish-image.yml @@ -0,0 +1,67 @@ +--- +name: build +env: + image: pdok/atom-operator +on: + push: + tags: + - '*' +jobs: + docker: + runs-on: ubuntu-latest + steps: + - name: Docker meta + id: docker_meta + uses: docker/metadata-action@v3 + with: + images: ${{ env.image }} + tags: | + type=semver,pattern={{major}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{version}} + - name: Login to PDOK Docker Hub + if: startsWith(env.image, 'pdok/') + uses: docker/login-action@v1 + with: + username: koalapdok + password: ${{ secrets.DOCKERHUB_PUSH }} + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Cache Docker layers + uses: actions/cache@v3 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx- + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + push: true + tags: ${{ steps.docker_meta.outputs.tags }} + labels: ${{ steps.docker_meta.outputs.labels }} + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache-new + - # Temp fix to cleanup cache + # https://github.com/docker/build-push-action/issues/252 + # https://github.com/moby/buildkit/issues/1896 + name: Move cache + run: | + rm -rf /tmp/.buildx-cache + mv /tmp/.buildx-cache-new /tmp/.buildx-cache + - name: Build result notification + if: success() || failure() + uses: 8398a7/action-slack@v3 + with: + fields: all + status: custom + custom_payload: | + { + attachments: [{ + color: '${{ job.status }}' === 'success' ? 'good' : '${{ job.status }}' === 'failure' ? 'danger' : 'warning', + text: `${process.env.AS_WORKFLOW} ${{ job.status }} for ${process.env.AS_REPO}!\n${process.env.AS_JOB} job on ${process.env.AS_REF} (commit: ${process.env.AS_COMMIT}, version: ${{ steps.docker_meta.outputs.version }}) by ${process.env.AS_AUTHOR} took ${process.env.AS_TOOK}`, + }] + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} \ No newline at end of file diff --git a/Makefile b/Makefile index 17bf912..1eb484f 100644 --- a/Makefile +++ b/Makefile @@ -43,7 +43,7 @@ help: ## Display this help. .PHONY: manifests manifests: controller-gen ## Generate WebhookConfiguration, ClusterRole and CustomResourceDefinition objects. - $(CONTROLLER_GEN) rbac:roleName=manager-role crd webhook paths="./..." output:crd:artifacts:config=config/crd/bases + $(CONTROLLER_GEN) rbac:roleName=manager-role crd:allowDangerousTypes=true webhook paths="./..." output:crd:artifacts:config=config/crd/bases .PHONY: generate generate: controller-gen ## Generate code containing DeepCopy, DeepCopyInto, and DeepCopyObject method implementations. diff --git a/PROJECT b/PROJECT index 7766621..1c487ab 100644 --- a/PROJECT +++ b/PROJECT @@ -16,4 +16,16 @@ resources: kind: Atom path: github.com/pdok/atom-operator/api/v3 version: v3 + webhooks: + conversion: true + spoke: + - v2beta1 + webhookVersion: v1 +- api: + crdVersion: v1 + namespaced: true + domain: pdok.nl + kind: Atom + path: github.com/pdok/atom-operator/api/v2beta1 + version: v2beta1 version: "3" diff --git a/api/v2beta1/atom_conversion.go b/api/v2beta1/atom_conversion.go new file mode 100644 index 0000000..2e96d09 --- /dev/null +++ b/api/v2beta1/atom_conversion.go @@ -0,0 +1,352 @@ +/* +MIT License + +Copyright (c) 2024 Publieke Dienstverlening op de Kaart + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +package v2beta1 + +import ( + "fmt" + "log" + "strconv" + "time" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + + pdoknlv3 "github.com/pdok/atom-operator/api/v3" + "sigs.k8s.io/controller-runtime/pkg/conversion" +) + +// ConvertTo converts this Atom (v2beta1) to the Hub version (v3). +func (src *Atom) ConvertTo(dstRaw conversion.Hub) error { + dst := dstRaw.(*pdoknlv3.Atom) + log.Printf("ConvertTo: Converting Atom from Spoke version v2beta1 to Hub version v3;"+ + "source: %s/%s, target: %s/%s", src.Namespace, src.Name, dst.Namespace, dst.Name) + + host := "https://service.pdok.nl/" // Todo read from flag + + // ObjectMeta + dst.ObjectMeta = src.ObjectMeta + + // Lifecycle + log.Printf("Start mapping the Lifecycle specs...") + if src.Spec.Kubernetes != nil && src.Spec.Kubernetes.Lifecycle != nil && src.Spec.Kubernetes.Lifecycle.TTLInDays != nil { + dst.Spec.Lifecycle.TTLInDays = GetInt32Pointer(int32(*src.Spec.Kubernetes.Lifecycle.TTLInDays)) + } + log.Printf("Done mapping the Lifecycle specs...") + + // Service + log.Printf("Start mapping the Service...") + dst.Spec.Service = pdoknlv3.Service{ + BaseURL: createBaseURL(host, src.Spec.General), + Lang: "nl", + Stylesheet: "https://service.pdok.nl/atom/style/style.xsl", + Title: src.Spec.Service.Title, + Subtitle: src.Spec.Service.Subtitle, + OwnerInfoRef: "pdok", + Links: []pdoknlv3.Link{}, + ServiceMetadataLinks: []pdoknlv3.MetadataLink{}, // Todo + Rights: src.Spec.Service.Rights, + } + log.Printf("Done mapping the Service...") + + dst.Spec.DatasetFeeds = []pdoknlv3.DatasetFeed{} + log.Printf("Start mapping the Datasets...") + for _, srcDataset := range src.Spec.Service.Datasets { + dstDatasetFeed := pdoknlv3.DatasetFeed{ + TechnicalName: srcDataset.Name, + Title: srcDataset.Title, + Subtitle: srcDataset.Subtitle, + DatasetMetadataLinks: []pdoknlv3.MetadataLink{}, // Todo + SpatialDatasetIdentifierCode: srcDataset.SourceIdentifier, + SpatialDatasetIdentifierNamespace: "http://www.pdok.nl", + } + + // Map the links + log.Printf("Start mapping the Links...") + for _, srcLink := range srcDataset.Links { + dstLink := pdoknlv3.Link{ + Title: srcLink.Type, + Href: srcLink.URI, + } + if srcLink.ContentType != nil { + dstLink.Type = *srcLink.ContentType + } + if srcLink.Language != nil { + dstLink.Href = *srcLink.Language + } + + dstDatasetFeed.Links = append(dstDatasetFeed.Links, dstLink) + } + log.Printf("Done mapping the Links...") + + // Map the entries + log.Printf("Start mapping the Entries...") + for _, srcDownload := range srcDataset.Downloads { + dstEntry := pdoknlv3.Entry{ + TechnicalName: srcDownload.Name, + SRS: &pdoknlv3.SRS{ + URI: srcDownload.Srs.URI, + Name: srcDownload.Srs.Code, + }, + Polygon: &pdoknlv3.Polygon{ + BBox: pdoknlv3.BBox{ + MinX: GetFloat32AsString(srcDataset.Bbox.Minx), + MinY: GetFloat32AsString(srcDataset.Bbox.Miny), + MaxX: GetFloat32AsString(srcDataset.Bbox.Maxx), + MaxY: GetFloat32AsString(srcDataset.Bbox.Maxy), + }, + }, + } + + if srcDownload.Title != nil { + dstEntry.Title = *srcDownload.Title + } + if srcDownload.Content != nil { + dstEntry.Content = *srcDownload.Content + } + if srcDownload.Updated != nil { + parsedUpdatedTime, err := time.Parse(time.RFC3339, *srcDownload.Updated) + if err != nil { + log.Printf("Error parsing updated time: %v", err) + dstEntry.Updated = nil + } + updatedTime := metav1.NewTime(parsedUpdatedTime) + dstEntry.Updated = &updatedTime + } + + // Map the links + log.Printf("Start mapping the DownloadLinks...") + for _, srcLink := range srcDownload.Links { + dstDownloadLink := pdoknlv3.DownloadLink{} + + if srcLink.BlobKey != nil { + dstDownloadLink.Data = *srcLink.BlobKey + } + if srcLink.Updated != nil { + dstDownloadLink.Time = srcLink.Updated + } + if srcLink.Version != nil { + dstDownloadLink.Version = srcLink.Version + } + if srcLink.Bbox != nil { + dstDownloadLink.BBox = &pdoknlv3.BBox{ + MinX: GetFloat32AsString(srcLink.Bbox.Minx), + MinY: GetFloat32AsString(srcLink.Bbox.Miny), + MaxX: GetFloat32AsString(srcLink.Bbox.Maxx), + MaxY: GetFloat32AsString(srcLink.Bbox.Maxy), + } + } + if srcLink.Rel != nil { + dstDownloadLink.Rel = *srcLink.Rel + } + + dstEntry.DownloadLinks = append(dstEntry.DownloadLinks, dstDownloadLink) + } + log.Printf("Done mapping the DownloadLinks...") + + dstDatasetFeed.Entries = append(dstDatasetFeed.Entries, dstEntry) + } + log.Printf("Done mapping the Entries...") + + dst.Spec.DatasetFeeds = append(dst.Spec.DatasetFeeds, dstDatasetFeed) + } + log.Printf("Done mapping the Datasets...") + + return nil +} + +// ConvertFrom converts the Hub version (v3) to this Atom (v2beta1). +func (dst *Atom) ConvertFrom(srcRaw conversion.Hub) error { + src := srcRaw.(*pdoknlv3.Atom) + log.Printf("ConvertFrom: Converting Atom from Hub version v3 to Spoke version v2beta1;"+ + "source: %s/%s, target: %s/%s", src.Namespace, src.Name, dst.Namespace, dst.Name) + + // ObjectMeta + dst.ObjectMeta = src.ObjectMeta + + // General + log.Printf("Start mapping the General specs...") + dst.Spec.General = General{ + Dataset: src.ObjectMeta.Labels["dataset"], + DatasetOwner: src.ObjectMeta.Labels["dataset-owner"], + DataVersion: nil, + } + + serviceVersion, ok := src.ObjectMeta.Labels["service-version"] + if ok { + dst.Spec.General.ServiceVersion = &serviceVersion + } + + theme, ok := src.ObjectMeta.Labels["theme"] + if ok { + dst.Spec.General.Theme = &theme + } + + log.Printf("Done mapping the General specs...") + + // Service + log.Printf("Start mapping the Service...") + dst.Spec.Service = AtomService{ + Title: src.Spec.Service.Title, + Subtitle: src.Spec.Service.Subtitle, + Rights: src.Spec.Service.Rights, + Author: Author{ + Name: "PDOK Beheer", + Email: "beheerPDOK@kadaster.nl", + }, + } + log.Printf("Done mapping the Service...") + + // Datasets + log.Printf("Start mapping the Datasets...") + dst.Spec.Service.Datasets = []Dataset{} + for _, srcDatasetFeed := range src.Spec.DatasetFeeds { + dstDataset := Dataset{ + Name: srcDatasetFeed.TechnicalName, + Title: srcDatasetFeed.Title, + Subtitle: srcDatasetFeed.Subtitle, + SourceIdentifier: srcDatasetFeed.SpatialDatasetIdentifierCode, + } + + // Map the links + log.Printf("Start mapping the Links...") + for _, srcLink := range srcDatasetFeed.Links { + dstDataset.Links = append(dstDataset.Links, OtherLink{ + Type: srcLink.Title, + URI: srcLink.Href, + ContentType: &srcLink.Type, + Language: &srcLink.Hreflang, + }) + } + log.Printf("Done mapping the Links...") + + if len(srcDatasetFeed.Entries) > 0 && srcDatasetFeed.Entries[0].Polygon != nil { + // We can assume all entries have the same bbox, so we take the first one + firstBbox := srcDatasetFeed.Entries[0].Polygon.BBox + dstDataset.Bbox = Bbox{ + Minx: GetStringAsFloat32(firstBbox.MinX), + Miny: GetStringAsFloat32(firstBbox.MinY), + Maxx: GetStringAsFloat32(firstBbox.MaxX), + Maxy: GetStringAsFloat32(firstBbox.MaxY), + } + } + + // Map the downloads + log.Printf("Start mapping the Entries...") + for _, srcEntry := range srcDatasetFeed.Entries { + dstDownload := Download{ + Name: srcEntry.TechnicalName, + Content: &srcEntry.Content, + Title: &srcEntry.Title, + } + + if srcEntry.Updated != nil { + updatedString := srcEntry.Updated.Format(time.RFC3339) + dstDownload.Updated = &updatedString + } + + if srcEntry.SRS != nil { + dstDownload.Srs = Srs{ + URI: srcEntry.SRS.URI, + Code: srcEntry.SRS.Name, + } + } + + // Map the links + log.Printf("Start mapping the DownloadLinks...") + for _, srcDownloadLink := range srcEntry.DownloadLinks { + + dstLink := Link{ + BlobKey: &srcDownloadLink.Data, + Rel: &srcDownloadLink.Rel, + } + + if srcDownloadLink.Time != nil { + dstLink.Updated = srcDownloadLink.Time + } + if srcDownloadLink.Version != nil { + dstLink.Version = srcDownloadLink.Version + } + if srcDownloadLink.BBox != nil { + dstLink.Bbox = &Bbox{ + Minx: GetStringAsFloat32(srcDownloadLink.BBox.MinX), + Miny: GetStringAsFloat32(srcDownloadLink.BBox.MinY), + Maxx: GetStringAsFloat32(srcDownloadLink.BBox.MaxX), + Maxy: GetStringAsFloat32(srcDownloadLink.BBox.MaxY), + } + } + } + + log.Printf("Done mapping the DownloadLinks...") + dstDataset.Downloads = append(dstDataset.Downloads, dstDownload) + } + log.Printf("Done mapping the Entries...") + dst.Spec.Service.Datasets = append(dst.Spec.Service.Datasets, dstDataset) + } + log.Printf("Start mapping the Datasets...") + + // Kubernetes + log.Printf("Start mapping the Kubernetes Specs...") + dst.Spec.Kubernetes = &Kubernetes{ + Lifecycle: &Lifecycle{}, + } + if src.Spec.Lifecycle.TTLInDays != nil { + dst.Spec.Kubernetes.Lifecycle.TTLInDays = GetIntPointer(int(*src.Spec.Lifecycle.TTLInDays)) + } + log.Printf("Done mapping the Kubernetes Specs...") + + return nil +} + +func createBaseURL(host string, general General) (baseURL string) { + + atomURI := fmt.Sprintf("%s/%s", general.DatasetOwner, general.Dataset) + if general.Theme != nil { + atomURI += fmt.Sprintf("/%s", *general.Theme) + } + atomURI += "/atom" + + if general.ServiceVersion != nil { + atomURI += fmt.Sprintf("/%s", *general.ServiceVersion) + } + + baseURL = fmt.Sprintf("%s/%s/index.xml", host, atomURI) + return +} + +func GetInt32Pointer(value int32) *int32 { + return &value +} + +func GetIntPointer(value int) *int { + return &value +} + +func GetFloat32AsString(value float32) string { + return strconv.FormatFloat(float64(value), 'f', 0, 32) +} + +func GetStringAsFloat32(value string) float32 { + float, _ := strconv.ParseFloat(value, 32) + return float32(float) +} diff --git a/api/v2beta1/atom_types.go b/api/v2beta1/atom_types.go new file mode 100644 index 0000000..d394253 --- /dev/null +++ b/api/v2beta1/atom_types.go @@ -0,0 +1,144 @@ +/* +MIT License + +Copyright (c) 2024 Publieke Dienstverlening op de Kaart + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +package v2beta1 + +import ( + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +// EDIT THIS FILE! THIS IS SCAFFOLDING FOR YOU TO OWN! +// NOTE: json tags are required. Any new fields you add must have json tags for the fields to be serialized. + +// AtomSpec defines the desired state of Atom. +type AtomSpec struct { + // INSERT ADDITIONAL SPEC FIELDS - desired state of cluster + // Important: Run "make" to regenerate code after modifying this file + + General General `json:"general"` + Service AtomService `json:"service"` + Kubernetes *Kubernetes `json:"kubernetes,omitempty"` +} + +// AtomService is the struct for all service level fields +type AtomService struct { + Title string `json:"title"` + Subtitle string `json:"subtitle"` + MetadataIdentifier string `json:"metadataIdentifier"` + Rights string `json:"rights"` + Updated *string `json:"updated,omitempty"` // deprecated + Author Author `json:"author"` + Datasets []Dataset `json:"datasets"` +} + +// AtomStatus defines the observed state of Atom. +type AtomStatus struct { + // INSERT ADDITIONAL STATUS FIELD - define observed state of cluster + // Important: Run "make" to regenerate code after modifying this file +} + +// +kubebuilder:object:root=true +// +kubebuilder:subresource:status +// versionName=v2beta1 + +// Atom is the Schema for the atoms API. +type Atom struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + + Spec AtomSpec `json:"spec,omitempty"` + Status AtomStatus `json:"status,omitempty"` +} + +// Author is the struct with the input for the author field of an atom +type Author struct { + Name string `json:"name"` + Email string `json:"email"` +} + +// Dataset is the struct for all dataset level fields +type Dataset struct { + Name string `json:"name"` + Title string `json:"title"` + Subtitle string `json:"subtitle"` + MetadataIdentifier string `json:"metadataIdentifier"` + SourceIdentifier string `json:"sourceIdentifier"` + Links []OtherLink `json:"links,omitempty"` + Downloads []Download `json:"downloads"` + Bbox Bbox `json:"bbox"` +} + +// Bbox is the struct for the bounding box extent of an atom +type Bbox struct { + Minx float32 `json:"minx"` + Maxx float32 `json:"maxx"` + Miny float32 `json:"miny"` + Maxy float32 `json:"maxy"` +} + +// Download is the struct for the download level fields +type Download struct { + Name string `json:"name"` + Title *string `json:"title,omitempty"` + Updated *string `json:"updated,omitempty"` + Content *string `json:"content,omitempty"` + Links []Link `json:"links,omitempty"` + Srs Srs `json:"srs"` +} + +// Link represents a link in a download entry +type Link struct { + BlobKey *string `json:"BlobKey"` + Updated *string `json:"updated,omitempty"` + Version *string `json:"version,omitempty"` + Bbox *Bbox `json:"bbox,omitempty"` + Rel *string `json:"rel,omitempty"` +} + +// OtherLink represents any type of link that is not a download link related to the data (see Link) +type OtherLink struct { + Type string `json:"type"` + URI string `json:"uri"` + ContentType *string `json:"contentType,omitempty"` + Language *string `json:"language,omitempty"` +} + +// Srs is the struct with the information for the srs field of an atom +type Srs struct { + URI string `json:"uri"` + Code string `json:"code"` +} + +// +kubebuilder:object:root=true + +// AtomList contains a list of Atom. +type AtomList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []Atom `json:"items"` +} + +func init() { + SchemeBuilder.Register(&Atom{}, &AtomList{}) +} diff --git a/api/v2beta1/groupversion_info.go b/api/v2beta1/groupversion_info.go new file mode 100644 index 0000000..7033d98 --- /dev/null +++ b/api/v2beta1/groupversion_info.go @@ -0,0 +1,44 @@ +/* +MIT License + +Copyright (c) 2024 Publieke Dienstverlening op de Kaart + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +// Package v2beta1 contains API Schema definitions for the v2beta1 API group. +// +kubebuilder:object:generate=true +// +groupName=pdok.nl +package v2beta1 + +import ( + "k8s.io/apimachinery/pkg/runtime/schema" + "sigs.k8s.io/controller-runtime/pkg/scheme" +) + +var ( + // GroupVersion is group version used to register these objects. + GroupVersion = schema.GroupVersion{Group: "pdok.nl", Version: "v2beta1"} + + // SchemeBuilder is used to add go types to the GroupVersionKind scheme. + SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} + + // AddToScheme adds the types in this group-version to the given scheme. + AddToScheme = SchemeBuilder.AddToScheme +) diff --git a/api/v2beta1/types.go b/api/v2beta1/types.go new file mode 100644 index 0000000..a1c2472 --- /dev/null +++ b/api/v2beta1/types.go @@ -0,0 +1,90 @@ +package v2beta1 + +import ( + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +// Status - The status for custom resources managed by the operator-sdk. +type Status struct { + Conditions []Condition `json:"conditions,omitempty"` + Deployment *string `json:"deployment,omitempty"` + Resources []Resources `json:"resources,omitempty"` +} + +// Resources is the struct for the resources field within status +type Resources struct { + APIVersion *string `json:"apiversion,omitempty"` + Kind *string `json:"kind,omitempty"` + Name *string `json:"name,omitempty"` +} + +// General is the struct with all generic fields for the crds +type General struct { + Dataset string `json:"dataset"` + Theme *string `json:"theme,omitempty"` + DatasetOwner string `json:"datasetOwner"` + ServiceVersion *string `json:"serviceVersion,omitempty"` + DataVersion *string `json:"dataVersion,omitempty"` +} + +// Kubernetes is the struct with all fields that can be defined in kubernetes fields in the crds +type Kubernetes struct { + Autoscaling *Autoscaling `json:"autoscaling,omitempty"` + HealthCheck *HealthCheck `json:"healthCheck,omitempty"` + Resources *corev1.ResourceRequirements `json:"resources,omitempty"` + Lifecycle *Lifecycle `json:"lifecycle,omitempty"` +} + +// Autoscaling is the struct with all fields to configure autoscalers for the crs +type Autoscaling struct { + AverageCPUUtilization *int `json:"averageCpuUtilization,omitempty"` + MinReplicas *int `json:"minReplicas,omitempty"` + MaxReplicas *int `json:"maxReplicas,omitempty"` +} + +// HealthCheck is the struct with all fields to configure healthchecks for the crs +type HealthCheck struct { + Querystring *string `json:"querystring,omitempty"` + Mimetype *string `json:"mimetype,omitempty"` + Boundingbox *string `json:"boundingbox,omitempty"` +} + +// Lifecycle is the struct with the fields to configure lifecycle settings for the resources +type Lifecycle struct { + TTLInDays *int `json:"ttlInDays,omitempty"` +} + +// TODO Should we move this to an ansible package? + +// Condition - the condition for the ansible operator +type Condition struct { + Type ConditionType `json:"type"` + Status ConditionStatus `json:"status"` + LastTransitionTime metav1.Time `json:"lastTransitionTime"` + AnsibleResult *ResultAnsible `json:"ansibleResult,omitempty"` + Reason string `json:"reason"` + Message string `json:"message"` +} + +// ConditionType specifies a string for field ConditionType +type ConditionType string + +// ConditionStatus specifies a string for field ConditionType +type ConditionStatus string + +// // This const specifies allowed fields for Status +// const ( +// ConditionTrue ConditionStatus = "True" +// ConditionFalse ConditionStatus = "False" +// ConditionUnknown ConditionStatus = "Unknown" +// ) + +// ResultAnsible - encapsulation of the ansible result. 'AnsibleResult' is turned around in struct to comply with linting +type ResultAnsible struct { + Ok int `json:"ok"` + Changed int `json:"changed"` + Skipped int `json:"skipped"` + Failures int `json:"failures"` + TimeOfCompletion string `json:"completion"` +} diff --git a/api/v2beta1/zz_generated.deepcopy.go b/api/v2beta1/zz_generated.deepcopy.go new file mode 100644 index 0000000..c4e9062 --- /dev/null +++ b/api/v2beta1/zz_generated.deepcopy.go @@ -0,0 +1,581 @@ +//go:build !ignore_autogenerated + +/* +MIT License + +Copyright (c) 2024 Publieke Dienstverlening op de Kaart + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +// Code generated by controller-gen. DO NOT EDIT. + +package v2beta1 + +import ( + "k8s.io/api/core/v1" + runtime "k8s.io/apimachinery/pkg/runtime" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Atom) DeepCopyInto(out *Atom) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + out.Status = in.Status +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Atom. +func (in *Atom) DeepCopy() *Atom { + if in == nil { + return nil + } + out := new(Atom) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *Atom) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AtomList) DeepCopyInto(out *AtomList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]Atom, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AtomList. +func (in *AtomList) DeepCopy() *AtomList { + if in == nil { + return nil + } + out := new(AtomList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *AtomList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AtomService) DeepCopyInto(out *AtomService) { + *out = *in + if in.Updated != nil { + in, out := &in.Updated, &out.Updated + *out = new(string) + **out = **in + } + out.Author = in.Author + if in.Datasets != nil { + in, out := &in.Datasets, &out.Datasets + *out = make([]Dataset, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AtomService. +func (in *AtomService) DeepCopy() *AtomService { + if in == nil { + return nil + } + out := new(AtomService) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AtomSpec) DeepCopyInto(out *AtomSpec) { + *out = *in + in.General.DeepCopyInto(&out.General) + in.Service.DeepCopyInto(&out.Service) + if in.Kubernetes != nil { + in, out := &in.Kubernetes, &out.Kubernetes + *out = new(Kubernetes) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AtomSpec. +func (in *AtomSpec) DeepCopy() *AtomSpec { + if in == nil { + return nil + } + out := new(AtomSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AtomStatus) DeepCopyInto(out *AtomStatus) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AtomStatus. +func (in *AtomStatus) DeepCopy() *AtomStatus { + if in == nil { + return nil + } + out := new(AtomStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Author) DeepCopyInto(out *Author) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Author. +func (in *Author) DeepCopy() *Author { + if in == nil { + return nil + } + out := new(Author) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Autoscaling) DeepCopyInto(out *Autoscaling) { + *out = *in + if in.AverageCPUUtilization != nil { + in, out := &in.AverageCPUUtilization, &out.AverageCPUUtilization + *out = new(int) + **out = **in + } + if in.MinReplicas != nil { + in, out := &in.MinReplicas, &out.MinReplicas + *out = new(int) + **out = **in + } + if in.MaxReplicas != nil { + in, out := &in.MaxReplicas, &out.MaxReplicas + *out = new(int) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Autoscaling. +func (in *Autoscaling) DeepCopy() *Autoscaling { + if in == nil { + return nil + } + out := new(Autoscaling) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Bbox) DeepCopyInto(out *Bbox) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Bbox. +func (in *Bbox) DeepCopy() *Bbox { + if in == nil { + return nil + } + out := new(Bbox) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Condition) DeepCopyInto(out *Condition) { + *out = *in + in.LastTransitionTime.DeepCopyInto(&out.LastTransitionTime) + if in.AnsibleResult != nil { + in, out := &in.AnsibleResult, &out.AnsibleResult + *out = new(ResultAnsible) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Condition. +func (in *Condition) DeepCopy() *Condition { + if in == nil { + return nil + } + out := new(Condition) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Dataset) DeepCopyInto(out *Dataset) { + *out = *in + if in.Links != nil { + in, out := &in.Links, &out.Links + *out = make([]OtherLink, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Downloads != nil { + in, out := &in.Downloads, &out.Downloads + *out = make([]Download, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + out.Bbox = in.Bbox +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Dataset. +func (in *Dataset) DeepCopy() *Dataset { + if in == nil { + return nil + } + out := new(Dataset) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Download) DeepCopyInto(out *Download) { + *out = *in + if in.Title != nil { + in, out := &in.Title, &out.Title + *out = new(string) + **out = **in + } + if in.Updated != nil { + in, out := &in.Updated, &out.Updated + *out = new(string) + **out = **in + } + if in.Content != nil { + in, out := &in.Content, &out.Content + *out = new(string) + **out = **in + } + if in.Links != nil { + in, out := &in.Links, &out.Links + *out = make([]Link, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + out.Srs = in.Srs +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Download. +func (in *Download) DeepCopy() *Download { + if in == nil { + return nil + } + out := new(Download) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *General) DeepCopyInto(out *General) { + *out = *in + if in.Theme != nil { + in, out := &in.Theme, &out.Theme + *out = new(string) + **out = **in + } + if in.ServiceVersion != nil { + in, out := &in.ServiceVersion, &out.ServiceVersion + *out = new(string) + **out = **in + } + if in.DataVersion != nil { + in, out := &in.DataVersion, &out.DataVersion + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new General. +func (in *General) DeepCopy() *General { + if in == nil { + return nil + } + out := new(General) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *HealthCheck) DeepCopyInto(out *HealthCheck) { + *out = *in + if in.Querystring != nil { + in, out := &in.Querystring, &out.Querystring + *out = new(string) + **out = **in + } + if in.Mimetype != nil { + in, out := &in.Mimetype, &out.Mimetype + *out = new(string) + **out = **in + } + if in.Boundingbox != nil { + in, out := &in.Boundingbox, &out.Boundingbox + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HealthCheck. +func (in *HealthCheck) DeepCopy() *HealthCheck { + if in == nil { + return nil + } + out := new(HealthCheck) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Kubernetes) DeepCopyInto(out *Kubernetes) { + *out = *in + if in.Autoscaling != nil { + in, out := &in.Autoscaling, &out.Autoscaling + *out = new(Autoscaling) + (*in).DeepCopyInto(*out) + } + if in.HealthCheck != nil { + in, out := &in.HealthCheck, &out.HealthCheck + *out = new(HealthCheck) + (*in).DeepCopyInto(*out) + } + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(v1.ResourceRequirements) + (*in).DeepCopyInto(*out) + } + if in.Lifecycle != nil { + in, out := &in.Lifecycle, &out.Lifecycle + *out = new(Lifecycle) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Kubernetes. +func (in *Kubernetes) DeepCopy() *Kubernetes { + if in == nil { + return nil + } + out := new(Kubernetes) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Lifecycle) DeepCopyInto(out *Lifecycle) { + *out = *in + if in.TTLInDays != nil { + in, out := &in.TTLInDays, &out.TTLInDays + *out = new(int) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Lifecycle. +func (in *Lifecycle) DeepCopy() *Lifecycle { + if in == nil { + return nil + } + out := new(Lifecycle) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Link) DeepCopyInto(out *Link) { + *out = *in + if in.BlobKey != nil { + in, out := &in.BlobKey, &out.BlobKey + *out = new(string) + **out = **in + } + if in.Updated != nil { + in, out := &in.Updated, &out.Updated + *out = new(string) + **out = **in + } + if in.Version != nil { + in, out := &in.Version, &out.Version + *out = new(string) + **out = **in + } + if in.Bbox != nil { + in, out := &in.Bbox, &out.Bbox + *out = new(Bbox) + **out = **in + } + if in.Rel != nil { + in, out := &in.Rel, &out.Rel + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Link. +func (in *Link) DeepCopy() *Link { + if in == nil { + return nil + } + out := new(Link) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *OtherLink) DeepCopyInto(out *OtherLink) { + *out = *in + if in.ContentType != nil { + in, out := &in.ContentType, &out.ContentType + *out = new(string) + **out = **in + } + if in.Language != nil { + in, out := &in.Language, &out.Language + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OtherLink. +func (in *OtherLink) DeepCopy() *OtherLink { + if in == nil { + return nil + } + out := new(OtherLink) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Resources) DeepCopyInto(out *Resources) { + *out = *in + if in.APIVersion != nil { + in, out := &in.APIVersion, &out.APIVersion + *out = new(string) + **out = **in + } + if in.Kind != nil { + in, out := &in.Kind, &out.Kind + *out = new(string) + **out = **in + } + if in.Name != nil { + in, out := &in.Name, &out.Name + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Resources. +func (in *Resources) DeepCopy() *Resources { + if in == nil { + return nil + } + out := new(Resources) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ResultAnsible) DeepCopyInto(out *ResultAnsible) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResultAnsible. +func (in *ResultAnsible) DeepCopy() *ResultAnsible { + if in == nil { + return nil + } + out := new(ResultAnsible) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Srs) DeepCopyInto(out *Srs) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Srs. +func (in *Srs) DeepCopy() *Srs { + if in == nil { + return nil + } + out := new(Srs) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Status) DeepCopyInto(out *Status) { + *out = *in + if in.Conditions != nil { + in, out := &in.Conditions, &out.Conditions + *out = make([]Condition, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Deployment != nil { + in, out := &in.Deployment, &out.Deployment + *out = new(string) + **out = **in + } + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = make([]Resources, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Status. +func (in *Status) DeepCopy() *Status { + if in == nil { + return nil + } + out := new(Status) + in.DeepCopyInto(out) + return out +} diff --git a/api/v3/atom_conversion.go b/api/v3/atom_conversion.go new file mode 100644 index 0000000..f9b6b94 --- /dev/null +++ b/api/v3/atom_conversion.go @@ -0,0 +1,30 @@ +/* +MIT License + +Copyright (c) 2024 Publieke Dienstverlening op de Kaart + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +package v3 + +// EDIT THIS FILE! THIS IS SCAFFOLDING FOR YOU TO OWN! + +// Hub marks this type as a conversion hub. +func (*Atom) Hub() {} diff --git a/api/v3/atom_types.go b/api/v3/atom_types.go index c5d7d16..1ed8bdb 100644 --- a/api/v3/atom_types.go +++ b/api/v3/atom_types.go @@ -42,14 +42,15 @@ type Lifecycle struct { // Service defines the service configuration for the Atom feed type Service struct { - BaseURL string `json:"baseUrl"` - Lang string `json:"lang,omitempty"` - Stylesheet string `json:"stylesheet,omitempty"` - Title string `json:"title"` - Subtitle string `json:"subtitle,omitempty"` - Links []Link `json:"links,omitempty"` - Rights string `json:"rights,omitempty"` - Author Author `json:"author"` + BaseURL string `json:"baseUrl"` + Lang string `json:"lang,omitempty"` + Stylesheet string `json:"stylesheet,omitempty"` + Title string `json:"title"` + Subtitle string `json:"subtitle,omitempty"` + OwnerInfoRef string `json:"ownerInfoRef"` + ServiceMetadataLinks []MetadataLink `json:"serviceMetadataLinks,omitempty"` + Links []Link `json:"links,omitempty"` // Todo kan weg? + Rights string `json:"rights,omitempty"` } // Link represents a link in the service or dataset feed @@ -71,14 +72,21 @@ type Author struct { // DatasetFeed represents individual dataset feeds within the Atom service type DatasetFeed struct { - TechnicalName string `json:"technicalName"` - Title string `json:"title"` - Subtitle string `json:"subtitle,omitempty"` - Links []Link `json:"links,omitempty"` - Author Author `json:"author,omitempty"` - SpatialDatasetIdentifierCode string `json:"spatial_dataset_identifier_code,omitempty"` - SpatialDatasetIdentifierNamespace string `json:"spatial_dataset_identifier_namespace,omitempty"` - Entries []Entry `json:"entries,omitempty"` + TechnicalName string `json:"technicalName"` + Title string `json:"title"` + Subtitle string `json:"subtitle,omitempty"` + Links []Link `json:"links,omitempty"` // Todo kan weg? + DatasetMetadataLinks []MetadataLink `json:"datasetMetadataLinks,omitempty"` + Author Author `json:"author,omitempty"` + SpatialDatasetIdentifierCode string `json:"spatial_dataset_identifier_code,omitempty"` + SpatialDatasetIdentifierNamespace string `json:"spatial_dataset_identifier_namespace,omitempty"` + Entries []Entry `json:"entries,omitempty"` +} + +// Metadatalink represents a link in the service or dataset feed +type MetadataLink struct { + MetadataIdentifier string `json:"metadataIdentifier"` + Templates []string `json:"templates,omitempty"` } // Entry represents an entry within a dataset feed, typically for downloads @@ -128,7 +136,10 @@ type AtomStatus struct { } // +kubebuilder:object:root=true +// +kubebuilder:conversion:hub // +kubebuilder:subresource:status +// versionName=v3 +// +kubebuilder:storageversion // Atom is the Schema for the atoms API. type Atom struct { diff --git a/api/v3/zz_generated.deepcopy.go b/api/v3/zz_generated.deepcopy.go index e5809e7..e3c7719 100644 --- a/api/v3/zz_generated.deepcopy.go +++ b/api/v3/zz_generated.deepcopy.go @@ -37,7 +37,7 @@ func (in *Atom) DeepCopyInto(out *Atom) { *out = *in out.TypeMeta = in.TypeMeta in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) - out.Spec = in.Spec + in.Spec.DeepCopyInto(&out.Spec) out.Status = in.Status } @@ -94,6 +94,15 @@ func (in *AtomList) DeepCopyObject() runtime.Object { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *AtomSpec) DeepCopyInto(out *AtomSpec) { *out = *in + in.Lifecycle.DeepCopyInto(&out.Lifecycle) + in.Service.DeepCopyInto(&out.Service) + if in.DatasetFeeds != nil { + in, out := &in.DatasetFeeds, &out.DatasetFeeds + *out = make([]DatasetFeed, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AtomSpec. @@ -120,3 +129,247 @@ func (in *AtomStatus) DeepCopy() *AtomStatus { in.DeepCopyInto(out) return out } + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Author) DeepCopyInto(out *Author) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Author. +func (in *Author) DeepCopy() *Author { + if in == nil { + return nil + } + out := new(Author) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BBox) DeepCopyInto(out *BBox) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BBox. +func (in *BBox) DeepCopy() *BBox { + if in == nil { + return nil + } + out := new(BBox) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DatasetFeed) DeepCopyInto(out *DatasetFeed) { + *out = *in + if in.Links != nil { + in, out := &in.Links, &out.Links + *out = make([]Link, len(*in)) + copy(*out, *in) + } + if in.DatasetMetadataLinks != nil { + in, out := &in.DatasetMetadataLinks, &out.DatasetMetadataLinks + *out = make([]MetadataLink, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + out.Author = in.Author + if in.Entries != nil { + in, out := &in.Entries, &out.Entries + *out = make([]Entry, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetFeed. +func (in *DatasetFeed) DeepCopy() *DatasetFeed { + if in == nil { + return nil + } + out := new(DatasetFeed) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DownloadLink) DeepCopyInto(out *DownloadLink) { + *out = *in + if in.Version != nil { + in, out := &in.Version, &out.Version + *out = new(string) + **out = **in + } + if in.Time != nil { + in, out := &in.Time, &out.Time + *out = new(string) + **out = **in + } + if in.BBox != nil { + in, out := &in.BBox, &out.BBox + *out = new(BBox) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DownloadLink. +func (in *DownloadLink) DeepCopy() *DownloadLink { + if in == nil { + return nil + } + out := new(DownloadLink) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Entry) DeepCopyInto(out *Entry) { + *out = *in + if in.DownloadLinks != nil { + in, out := &in.DownloadLinks, &out.DownloadLinks + *out = make([]DownloadLink, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Updated != nil { + in, out := &in.Updated, &out.Updated + *out = (*in).DeepCopy() + } + if in.Polygon != nil { + in, out := &in.Polygon, &out.Polygon + *out = new(Polygon) + **out = **in + } + if in.SRS != nil { + in, out := &in.SRS, &out.SRS + *out = new(SRS) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Entry. +func (in *Entry) DeepCopy() *Entry { + if in == nil { + return nil + } + out := new(Entry) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Lifecycle) DeepCopyInto(out *Lifecycle) { + *out = *in + if in.TTLInDays != nil { + in, out := &in.TTLInDays, &out.TTLInDays + *out = new(int32) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Lifecycle. +func (in *Lifecycle) DeepCopy() *Lifecycle { + if in == nil { + return nil + } + out := new(Lifecycle) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Link) DeepCopyInto(out *Link) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Link. +func (in *Link) DeepCopy() *Link { + if in == nil { + return nil + } + out := new(Link) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MetadataLink) DeepCopyInto(out *MetadataLink) { + *out = *in + if in.Templates != nil { + in, out := &in.Templates, &out.Templates + *out = make([]string, len(*in)) + copy(*out, *in) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetadataLink. +func (in *MetadataLink) DeepCopy() *MetadataLink { + if in == nil { + return nil + } + out := new(MetadataLink) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Polygon) DeepCopyInto(out *Polygon) { + *out = *in + out.BBox = in.BBox +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Polygon. +func (in *Polygon) DeepCopy() *Polygon { + if in == nil { + return nil + } + out := new(Polygon) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SRS) DeepCopyInto(out *SRS) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SRS. +func (in *SRS) DeepCopy() *SRS { + if in == nil { + return nil + } + out := new(SRS) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Service) DeepCopyInto(out *Service) { + *out = *in + if in.ServiceMetadataLinks != nil { + in, out := &in.ServiceMetadataLinks, &out.ServiceMetadataLinks + *out = make([]MetadataLink, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Links != nil { + in, out := &in.Links, &out.Links + *out = make([]Link, len(*in)) + copy(*out, *in) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Service. +func (in *Service) DeepCopy() *Service { + if in == nil { + return nil + } + out := new(Service) + in.DeepCopyInto(out) + return out +} diff --git a/cmd/main.go b/cmd/main.go index a802646..23cc54b 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -37,8 +37,11 @@ import ( metricsserver "sigs.k8s.io/controller-runtime/pkg/metrics/server" "sigs.k8s.io/controller-runtime/pkg/webhook" + pdoknlv2beta1 "github.com/pdok/atom-operator/api/v2beta1" pdoknlv3 "github.com/pdok/atom-operator/api/v3" "github.com/pdok/atom-operator/internal/controller" + webhookpdoknlv2beta1 "github.com/pdok/atom-operator/internal/webhook/v2beta1" + webhookpdoknlv3 "github.com/pdok/atom-operator/internal/webhook/v3" // +kubebuilder:scaffold:imports ) @@ -51,6 +54,7 @@ func init() { utilruntime.Must(clientgoscheme.AddToScheme(scheme)) utilruntime.Must(pdoknlv3.AddToScheme(scheme)) + utilruntime.Must(pdoknlv2beta1.AddToScheme(scheme)) // +kubebuilder:scaffold:scheme } @@ -209,6 +213,22 @@ func main() { setupLog.Error(err, "unable to create controller", "controller", "Atom") os.Exit(1) } + + // nolint:goconst + if os.Getenv("ENABLE_WEBHOOKS") != "false" { + if err = webhookpdoknlv2beta1.SetupAtomWebhookWithManager(mgr); err != nil { + setupLog.Error(err, "unable to create webhook", "webhook", "Atom") + os.Exit(1) + } + } + + // nolint:goconst + if os.Getenv("ENABLE_WEBHOOKS") != "false" { + if err = webhookpdoknlv3.SetupAtomWebhookWithManager(mgr); err != nil { + setupLog.Error(err, "unable to create webhook", "webhook", "Atom") + os.Exit(1) + } + } // +kubebuilder:scaffold:builder if metricsCertWatcher != nil { diff --git a/config/certmanager/certificate-metrics.yaml b/config/certmanager/certificate-metrics.yaml new file mode 100644 index 0000000..1730402 --- /dev/null +++ b/config/certmanager/certificate-metrics.yaml @@ -0,0 +1,20 @@ +# The following manifests contain a self-signed issuer CR and a metrics certificate CR. +# More document can be found at https://docs.cert-manager.io +apiVersion: cert-manager.io/v1 +kind: Certificate +metadata: + labels: + app.kubernetes.io/name: atom-operator + app.kubernetes.io/managed-by: kustomize + name: metrics-certs # this name should match the one appeared in kustomizeconfig.yaml + namespace: system +spec: + dnsNames: + # SERVICE_NAME and SERVICE_NAMESPACE will be substituted by kustomize + # replacements in the config/default/kustomization.yaml file. + - SERVICE_NAME.SERVICE_NAMESPACE.svc + - SERVICE_NAME.SERVICE_NAMESPACE.svc.cluster.local + issuerRef: + kind: Issuer + name: selfsigned-issuer + secretName: metrics-server-cert diff --git a/config/certmanager/certificate-webhook.yaml b/config/certmanager/certificate-webhook.yaml new file mode 100644 index 0000000..1549557 --- /dev/null +++ b/config/certmanager/certificate-webhook.yaml @@ -0,0 +1,20 @@ +# The following manifests contain a self-signed issuer CR and a certificate CR. +# More document can be found at https://docs.cert-manager.io +apiVersion: cert-manager.io/v1 +kind: Certificate +metadata: + labels: + app.kubernetes.io/name: atom-operator + app.kubernetes.io/managed-by: kustomize + name: serving-cert # this name should match the one appeared in kustomizeconfig.yaml + namespace: system +spec: + # SERVICE_NAME and SERVICE_NAMESPACE will be substituted by kustomize + # replacements in the config/default/kustomization.yaml file. + dnsNames: + - SERVICE_NAME.SERVICE_NAMESPACE.svc + - SERVICE_NAME.SERVICE_NAMESPACE.svc.cluster.local + issuerRef: + kind: Issuer + name: selfsigned-issuer + secretName: webhook-server-cert diff --git a/config/certmanager/issuer.yaml b/config/certmanager/issuer.yaml new file mode 100644 index 0000000..273862c --- /dev/null +++ b/config/certmanager/issuer.yaml @@ -0,0 +1,13 @@ +# The following manifest contains a self-signed issuer CR. +# More information can be found at https://docs.cert-manager.io +# WARNING: Targets CertManager v1.0. Check https://cert-manager.io/docs/installation/upgrading/ for breaking changes. +apiVersion: cert-manager.io/v1 +kind: Issuer +metadata: + labels: + app.kubernetes.io/name: atom-operator + app.kubernetes.io/managed-by: kustomize + name: selfsigned-issuer + namespace: system +spec: + selfSigned: {} diff --git a/config/certmanager/kustomization.yaml b/config/certmanager/kustomization.yaml new file mode 100644 index 0000000..fcb7498 --- /dev/null +++ b/config/certmanager/kustomization.yaml @@ -0,0 +1,7 @@ +resources: +- issuer.yaml +- certificate-webhook.yaml +- certificate-metrics.yaml + +configurations: +- kustomizeconfig.yaml diff --git a/config/certmanager/kustomizeconfig.yaml b/config/certmanager/kustomizeconfig.yaml new file mode 100644 index 0000000..cf6f89e --- /dev/null +++ b/config/certmanager/kustomizeconfig.yaml @@ -0,0 +1,8 @@ +# This configuration is for teaching kustomize how to update name ref substitution +nameReference: +- kind: Issuer + group: cert-manager.io + fieldSpecs: + - kind: Certificate + group: cert-manager.io + path: spec/issuerRef/name diff --git a/config/crd/bases/pdok.nl_atoms.yaml b/config/crd/bases/pdok.nl_atoms.yaml index 9f2cc60..feefd54 100644 --- a/config/crd/bases/pdok.nl_atoms.yaml +++ b/config/crd/bases/pdok.nl_atoms.yaml @@ -14,6 +14,313 @@ spec: singular: atom scope: Namespaced versions: + - name: v2beta1 + schema: + openAPIV3Schema: + description: Atom is the Schema for the atoms API. + properties: + apiVersion: + description: |- + APIVersion defines the versioned schema of this representation of an object. + Servers should convert recognized schemas to the latest internal value, and + may reject unrecognized values. + More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources + type: string + kind: + description: |- + Kind is a string value representing the REST resource this object represents. + Servers may infer this from the endpoint the client submits requests to. + Cannot be updated. + In CamelCase. + More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds + type: string + metadata: + type: object + spec: + description: AtomSpec defines the desired state of Atom. + properties: + general: + description: General is the struct with all generic fields for the + crds + properties: + dataVersion: + type: string + dataset: + type: string + datasetOwner: + type: string + serviceVersion: + type: string + theme: + type: string + required: + - dataset + - datasetOwner + type: object + kubernetes: + description: Kubernetes is the struct with all fields that can be + defined in kubernetes fields in the crds + properties: + autoscaling: + description: Autoscaling is the struct with all fields to configure + autoscalers for the crs + properties: + averageCpuUtilization: + type: integer + maxReplicas: + type: integer + minReplicas: + type: integer + type: object + healthCheck: + description: HealthCheck is the struct with all fields to configure + healthchecks for the crs + properties: + boundingbox: + type: string + mimetype: + type: string + querystring: + type: string + type: object + lifecycle: + description: Lifecycle is the struct with the fields to configure + lifecycle settings for the resources + properties: + ttlInDays: + type: integer + type: object + resources: + description: ResourceRequirements describes the compute resource + requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + + This is an alpha field and requires enabling the + DynamicResourceAllocation feature gate. + + This field is immutable. It can only be set for containers. + items: + description: ResourceClaim references one entry in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. It makes that resource available + inside a container. + type: string + request: + description: |- + Request is the name chosen for a request in the referenced claim. + If empty, everything from the claim is made available, otherwise + only the result of this request. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Requests describes the minimum amount of compute resources required. + If Requests is omitted for a container, it defaults to Limits if that is explicitly specified, + otherwise to an implementation-defined value. Requests cannot exceed Limits. + More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + type: object + type: object + type: object + service: + description: AtomService is the struct for all service level fields + properties: + author: + description: Author is the struct with the input for the author + field of an atom + properties: + email: + type: string + name: + type: string + required: + - email + - name + type: object + datasets: + items: + description: Dataset is the struct for all dataset level fields + properties: + bbox: + description: Bbox is the struct for the bounding box extent + of an atom + properties: + maxx: + type: number + maxy: + type: number + minx: + type: number + miny: + type: number + required: + - maxx + - maxy + - minx + - miny + type: object + downloads: + items: + description: Download is the struct for the download level + fields + properties: + content: + type: string + links: + items: + description: Link represents a link in a download + entry + properties: + BlobKey: + type: string + bbox: + description: Bbox is the struct for the bounding + box extent of an atom + properties: + maxx: + type: number + maxy: + type: number + minx: + type: number + miny: + type: number + required: + - maxx + - maxy + - minx + - miny + type: object + rel: + type: string + updated: + type: string + version: + type: string + required: + - BlobKey + type: object + type: array + name: + type: string + srs: + description: Srs is the struct with the information + for the srs field of an atom + properties: + code: + type: string + uri: + type: string + required: + - code + - uri + type: object + title: + type: string + updated: + type: string + required: + - name + - srs + type: object + type: array + links: + items: + description: OtherLink represents any type of link that + is not a download link related to the data (see Link) + properties: + contentType: + type: string + language: + type: string + type: + type: string + uri: + type: string + required: + - type + - uri + type: object + type: array + metadataIdentifier: + type: string + name: + type: string + sourceIdentifier: + type: string + subtitle: + type: string + title: + type: string + required: + - bbox + - downloads + - metadataIdentifier + - name + - sourceIdentifier + - subtitle + - title + type: object + type: array + metadataIdentifier: + type: string + rights: + type: string + subtitle: + type: string + title: + type: string + updated: + type: string + required: + - author + - datasets + - metadataIdentifier + - rights + - subtitle + - title + type: object + required: + - general + - service + type: object + status: + description: AtomStatus defines the observed state of Atom. + type: object + type: object + served: true + storage: false + subresources: + status: {} - name: v3 schema: openAPIV3Schema: @@ -57,6 +364,21 @@ spec: - email - name type: object + datasetMetadataLinks: + items: + description: Metadatalink represents a link in the service + or dataset feed + properties: + metadataIdentifier: + type: string + templates: + items: + type: string + type: array + required: + - metadataIdentifier + type: object + type: array entries: items: description: Entry represents an entry within a dataset feed, @@ -192,19 +514,6 @@ spec: description: Service defines the service configuration for the Atom feed properties: - author: - description: |- - Author todo: move to higher level - Author specifies the author or owner information - properties: - email: - type: string - name: - type: string - required: - - email - - name - type: object baseUrl: type: string lang: @@ -230,8 +539,25 @@ spec: - href type: object type: array + ownerInfoRef: + type: string rights: type: string + serviceMetadataLinks: + items: + description: Metadatalink represents a link in the service or + dataset feed + properties: + metadataIdentifier: + type: string + templates: + items: + type: string + type: array + required: + - metadataIdentifier + type: object + type: array stylesheet: type: string subtitle: @@ -239,8 +565,8 @@ spec: title: type: string required: - - author - baseUrl + - ownerInfoRef - title type: object required: diff --git a/config/crd/kustomization.yaml b/config/crd/kustomization.yaml index ac6687e..6db2335 100644 --- a/config/crd/kustomization.yaml +++ b/config/crd/kustomization.yaml @@ -8,9 +8,10 @@ resources: patches: # [WEBHOOK] To enable webhook, uncomment all the sections with [WEBHOOK] prefix. # patches here are for enabling the conversion webhook for each CRD +- path: patches/webhook_in_atoms.yaml # +kubebuilder:scaffold:crdkustomizewebhookpatch # [WEBHOOK] To enable webhook, uncomment the following section # the following config is for teaching kustomize how to do kustomization for CRDs. -#configurations: -#- kustomizeconfig.yaml +configurations: +- kustomizeconfig.yaml diff --git a/config/crd/patches/webhook_in_atoms.yaml b/config/crd/patches/webhook_in_atoms.yaml new file mode 100644 index 0000000..ebe005a --- /dev/null +++ b/config/crd/patches/webhook_in_atoms.yaml @@ -0,0 +1,16 @@ +# The following patch enables a conversion webhook for the CRD +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition +metadata: + name: atoms.pdok.nl +spec: + conversion: + strategy: Webhook + webhook: + clientConfig: + service: + namespace: system + name: webhook-service + path: /convert + conversionReviewVersions: + - v1 diff --git a/config/default/kustomization.yaml b/config/default/kustomization.yaml index e088d86..8504bf2 100644 --- a/config/default/kustomization.yaml +++ b/config/default/kustomization.yaml @@ -1,5 +1,5 @@ # Adds namespace to all resources. -namespace: atom-operator-system +namespace: services # Value of this field is prepended to the # names of all resources, e.g. a deployment named @@ -20,9 +20,9 @@ resources: - ../manager # [WEBHOOK] To enable webhook, uncomment all the sections with [WEBHOOK] prefix including the one in # crd/kustomization.yaml -#- ../webhook +- ../webhook # [CERTMANAGER] To enable cert-manager, uncomment all sections with 'CERTMANAGER'. 'WEBHOOK' components are required. -#- ../certmanager +- ../certmanager # [PROMETHEUS] To enable prometheus monitor, uncomment all sections with 'PROMETHEUS'. #- ../prometheus # [METRICS] Expose the controller manager metrics service. @@ -44,169 +44,187 @@ patches: # Uncomment the patches line if you enable Metrics and CertManager # [METRICS-WITH-CERTS] To enable metrics protected with certManager, uncomment the following line. # This patch will protect the metrics with certManager self-signed certs. -#- path: cert_metrics_manager_patch.yaml -# target: -# kind: Deployment +- path: cert_metrics_manager_patch.yaml + target: + kind: Deployment # [WEBHOOK] To enable webhook, uncomment all the sections with [WEBHOOK] prefix including the one in # crd/kustomization.yaml -#- path: manager_webhook_patch.yaml -# target: -# kind: Deployment +- path: manager_webhook_patch.yaml + target: + kind: Deployment # [CERTMANAGER] To enable cert-manager, uncomment all sections with 'CERTMANAGER' prefix. # Uncomment the following replacements to add the cert-manager CA injection annotations -#replacements: -# - source: # Uncomment the following block to enable certificates for metrics -# kind: Service -# version: v1 -# name: controller-manager-metrics-service -# fieldPath: metadata.name -# targets: -# - select: -# kind: Certificate -# group: cert-manager.io -# version: v1 -# name: metrics-certs -# fieldPaths: -# - spec.dnsNames.0 -# - spec.dnsNames.1 -# options: -# delimiter: '.' -# index: 0 -# create: true -# -# - source: -# kind: Service -# version: v1 -# name: controller-manager-metrics-service -# fieldPath: metadata.namespace -# targets: -# - select: -# kind: Certificate -# group: cert-manager.io -# version: v1 -# name: metrics-certs -# fieldPaths: -# - spec.dnsNames.0 -# - spec.dnsNames.1 -# options: -# delimiter: '.' -# index: 1 -# create: true -# -# - source: # Uncomment the following block if you have any webhook -# kind: Service -# version: v1 -# name: webhook-service -# fieldPath: .metadata.name # Name of the service -# targets: -# - select: -# kind: Certificate -# group: cert-manager.io -# version: v1 -# name: serving-cert -# fieldPaths: -# - .spec.dnsNames.0 -# - .spec.dnsNames.1 -# options: -# delimiter: '.' -# index: 0 -# create: true -# - source: -# kind: Service -# version: v1 -# name: webhook-service -# fieldPath: .metadata.namespace # Namespace of the service -# targets: -# - select: -# kind: Certificate -# group: cert-manager.io -# version: v1 -# name: serving-cert -# fieldPaths: -# - .spec.dnsNames.0 -# - .spec.dnsNames.1 -# options: -# delimiter: '.' -# index: 1 -# create: true -# -# - source: # Uncomment the following block if you have a ValidatingWebhook (--programmatic-validation) -# kind: Certificate -# group: cert-manager.io -# version: v1 -# name: serving-cert # This name should match the one in certificate.yaml -# fieldPath: .metadata.namespace # Namespace of the certificate CR -# targets: -# - select: -# kind: ValidatingWebhookConfiguration -# fieldPaths: -# - .metadata.annotations.[cert-manager.io/inject-ca-from] -# options: -# delimiter: '/' -# index: 0 -# create: true -# - source: -# kind: Certificate -# group: cert-manager.io -# version: v1 -# name: serving-cert -# fieldPath: .metadata.name -# targets: -# - select: -# kind: ValidatingWebhookConfiguration -# fieldPaths: -# - .metadata.annotations.[cert-manager.io/inject-ca-from] -# options: -# delimiter: '/' -# index: 1 -# create: true -# -# - source: # Uncomment the following block if you have a DefaultingWebhook (--defaulting ) -# kind: Certificate -# group: cert-manager.io -# version: v1 -# name: serving-cert -# fieldPath: .metadata.namespace # Namespace of the certificate CR -# targets: -# - select: -# kind: MutatingWebhookConfiguration -# fieldPaths: -# - .metadata.annotations.[cert-manager.io/inject-ca-from] -# options: -# delimiter: '/' -# index: 0 -# create: true -# - source: -# kind: Certificate -# group: cert-manager.io -# version: v1 -# name: serving-cert -# fieldPath: .metadata.name -# targets: -# - select: -# kind: MutatingWebhookConfiguration -# fieldPaths: -# - .metadata.annotations.[cert-manager.io/inject-ca-from] -# options: -# delimiter: '/' -# index: 1 -# create: true -# -# - source: # Uncomment the following block if you have a ConversionWebhook (--conversion) -# kind: Certificate -# group: cert-manager.io -# version: v1 -# name: serving-cert -# fieldPath: .metadata.namespace # Namespace of the certificate CR -# targets: # Do not remove or uncomment the following scaffold marker; required to generate code for target CRD. -# +kubebuilder:scaffold:crdkustomizecainjectionns -# - source: -# kind: Certificate -# group: cert-manager.io -# version: v1 -# name: serving-cert -# fieldPath: .metadata.name -# targets: # Do not remove or uncomment the following scaffold marker; required to generate code for target CRD. -# +kubebuilder:scaffold:crdkustomizecainjectionname +replacements: + - source: # Uncomment the following block to enable certificates for metrics + kind: Service + version: v1 + name: controller-manager-metrics-service + fieldPath: metadata.name + targets: + - select: + kind: Certificate + group: cert-manager.io + version: v1 + name: metrics-certs + fieldPaths: + - spec.dnsNames.0 + - spec.dnsNames.1 + options: + delimiter: '.' + index: 0 + create: true + + - source: + kind: Service + version: v1 + name: controller-manager-metrics-service + fieldPath: metadata.namespace + targets: + - select: + kind: Certificate + group: cert-manager.io + version: v1 + name: metrics-certs + fieldPaths: + - spec.dnsNames.0 + - spec.dnsNames.1 + options: + delimiter: '.' + index: 1 + create: true + + - source: # Uncomment the following block if you have any webhook + kind: Service + version: v1 + name: webhook-service + fieldPath: .metadata.name # Name of the service + targets: + - select: + kind: Certificate + group: cert-manager.io + version: v1 + name: serving-cert + fieldPaths: + - .spec.dnsNames.0 + - .spec.dnsNames.1 + options: + delimiter: '.' + index: 0 + create: true + - source: + kind: Service + version: v1 + name: webhook-service + fieldPath: .metadata.namespace # Namespace of the service + targets: + - select: + kind: Certificate + group: cert-manager.io + version: v1 + name: serving-cert + fieldPaths: + - .spec.dnsNames.0 + - .spec.dnsNames.1 + options: + delimiter: '.' + index: 1 + create: true + + - source: # Uncomment the following block if you have a ValidatingWebhook (--programmatic-validation) + kind: Certificate + group: cert-manager.io + version: v1 + name: serving-cert # This name should match the one in certificate.yaml + fieldPath: .metadata.namespace # Namespace of the certificate CR + targets: + - select: + kind: ValidatingWebhookConfiguration + fieldPaths: + - .metadata.annotations.[cert-manager.io/inject-ca-from] + options: + delimiter: '/' + index: 0 + create: true + - source: + kind: Certificate + group: cert-manager.io + version: v1 + name: serving-cert + fieldPath: .metadata.name + targets: + - select: + kind: ValidatingWebhookConfiguration + fieldPaths: + - .metadata.annotations.[cert-manager.io/inject-ca-from] + options: + delimiter: '/' + index: 1 + create: true + + - source: # Uncomment the following block if you have a DefaultingWebhook (--defaulting ) + kind: Certificate + group: cert-manager.io + version: v1 + name: serving-cert + fieldPath: .metadata.namespace # Namespace of the certificate CR + targets: + - select: + kind: MutatingWebhookConfiguration + fieldPaths: + - .metadata.annotations.[cert-manager.io/inject-ca-from] + options: + delimiter: '/' + index: 0 + create: true + - source: + kind: Certificate + group: cert-manager.io + version: v1 + name: serving-cert + fieldPath: .metadata.name + targets: + - select: + kind: MutatingWebhookConfiguration + fieldPaths: + - .metadata.annotations.[cert-manager.io/inject-ca-from] + options: + delimiter: '/' + index: 1 + create: true + + - source: # Uncomment the following block if you have a ConversionWebhook (--conversion) + kind: Certificate + group: cert-manager.io + version: v1 + name: serving-cert + fieldPath: .metadata.namespace # Namespace of the certificate CR + targets: # Do not remove or uncomment the following scaffold marker; required to generate code for target CRD. + - select: + kind: CustomResourceDefinition + name: atoms.pdok.nl + fieldPaths: + - .metadata.annotations.[cert-manager.io/inject-ca-from] + options: + delimiter: '/' + index: 0 + create: true + # +kubebuilder:scaffold:crdkustomizecainjectionns + - source: + kind: Certificate + group: cert-manager.io + version: v1 + name: serving-cert + fieldPath: .metadata.name + targets: # Do not remove or uncomment the following scaffold marker; required to generate code for target CRD. + - select: + kind: CustomResourceDefinition + name: atoms.pdok.nl + fieldPaths: + - .metadata.annotations.[cert-manager.io/inject-ca-from] + options: + delimiter: '/' + index: 1 + create: true + # +kubebuilder:scaffold:crdkustomizecainjectionname diff --git a/config/default/manager_webhook_patch.yaml b/config/default/manager_webhook_patch.yaml new file mode 100644 index 0000000..963c8a4 --- /dev/null +++ b/config/default/manager_webhook_patch.yaml @@ -0,0 +1,31 @@ +# This patch ensures the webhook certificates are properly mounted in the manager container. +# It configures the necessary arguments, volumes, volume mounts, and container ports. + +# Add the --webhook-cert-path argument for configuring the webhook certificate path +- op: add + path: /spec/template/spec/containers/0/args/- + value: --webhook-cert-path=/tmp/k8s-webhook-server/serving-certs + +# Add the volumeMount for the webhook certificates +- op: add + path: /spec/template/spec/containers/0/volumeMounts/- + value: + mountPath: /tmp/k8s-webhook-server/serving-certs + name: webhook-certs + readOnly: true + +# Add the port configuration for the webhook server +- op: add + path: /spec/template/spec/containers/0/ports/- + value: + containerPort: 9443 + name: webhook-server + protocol: TCP + +# Add the volume configuration for the webhook certificates +- op: add + path: /spec/template/spec/volumes/- + value: + name: webhook-certs + secret: + secretName: webhook-server-cert diff --git a/config/manager/kustomization.yaml b/config/manager/kustomization.yaml index 5c5f0b8..c8f7ecf 100644 --- a/config/manager/kustomization.yaml +++ b/config/manager/kustomization.yaml @@ -1,2 +1,8 @@ resources: - manager.yaml +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization +images: +- name: controller + newName: pdok/atom-operator + newTag: 2.0.7 diff --git a/config/network-policy/allow-webhook-traffic.yaml b/config/network-policy/allow-webhook-traffic.yaml new file mode 100644 index 0000000..d86d9f6 --- /dev/null +++ b/config/network-policy/allow-webhook-traffic.yaml @@ -0,0 +1,27 @@ +# This NetworkPolicy allows ingress traffic to your webhook server running +# as part of the controller-manager from specific namespaces and pods. CR(s) which uses webhooks +# will only work when applied in namespaces labeled with 'webhook: enabled' +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + labels: + app.kubernetes.io/name: atom-operator + app.kubernetes.io/managed-by: kustomize + name: allow-webhook-traffic + namespace: system +spec: + podSelector: + matchLabels: + control-plane: controller-manager + app.kubernetes.io/name: atom-operator + policyTypes: + - Ingress + ingress: + # This allows ingress traffic from any namespace with the label webhook: enabled + - from: + - namespaceSelector: + matchLabels: + webhook: enabled # Only from namespaces with this label + ports: + - port: 443 + protocol: TCP diff --git a/config/network-policy/kustomization.yaml b/config/network-policy/kustomization.yaml index ec0fb5e..0872bee 100644 --- a/config/network-policy/kustomization.yaml +++ b/config/network-policy/kustomization.yaml @@ -1,2 +1,3 @@ resources: +- allow-webhook-traffic.yaml - allow-metrics-traffic.yaml diff --git a/config/samples/kustomization.yaml b/config/samples/kustomization.yaml index 025e1b5..b2665f1 100644 --- a/config/samples/kustomization.yaml +++ b/config/samples/kustomization.yaml @@ -1,4 +1,5 @@ ## Append samples of your project ## resources: - v3_atom.yaml +- v2beta1_atom.yaml # +kubebuilder:scaffold:manifestskustomizesamples diff --git a/config/samples/v2beta1_atom.yaml b/config/samples/v2beta1_atom.yaml new file mode 100644 index 0000000..2bb3f38 --- /dev/null +++ b/config/samples/v2beta1_atom.yaml @@ -0,0 +1,61 @@ +apiVersion: pdok.nl/v2beta1 +kind: Atom +metadata: + labels: + app.kubernetes.io/name: atom-operator + app.kubernetes.io/managed-by: kustomize + dataset: dataset + dataset-owner: owner + service-type: atom + name: v2-sample +spec: + general: + dataset: dataset + datasetOwner: owner + dataVersion: + serviceVersion: v1_0 + theme: + service: + title: Test Dataset ATOM + subtitle: Test Dataset ATOM + metadataIdentifier: 1234-456 + rights: https://creativecommons.org/publicdomain/zero/1.0/deed.nl + author: + name: "PDOK Beheer" + email: "beheerPDOK@kadaster.nl" + datasets: + - name: dataset-1-name + title: "dataset-1-title \"1\"" + subtitle: "dataset-1-subtitle \"1\"" + metadataIdentifier: 00000000-0000-0000-0000-000000000000 + sourceIdentifier: 00000000-0000-0000-0000-000000000000 + links: + - type: encodingRule + uri: https://www.eionet.europa.eu/reportnet/docs/noise/guidelines/geopackage-encoding-rule-end.pdf + contentType: application/pdf + language: en + bbox: + minx: 3.31 + maxx: 7.241 + miny: 50.731 + maxy: 53.61 + downloads: + - name: "dataset_1_id_1" + updated: "2012-03-31T13:45:03Z" + content: "Per Featuretype is er een downloadbestand beschikbaar" + title: "Dataset download GML" + links: + - BlobKey: "http://localazurite.blob.azurite/bucket/key1/dataset_1.gpkg" + updated: "2022-12-02T14:02:14Z" + bbox: + minx: 3.31 + maxx: 7.241 + miny: 50.731 + maxy: 53.61 + version: "v1" + srs: + uri: "https://www.opengis.net/def/crs/EPSG/0/28992" + code: "Amersfoort / RD New" + kubernetes: + lifecycle: + ttlInDays: 1 diff --git a/config/samples/v3_atom.yaml b/config/samples/v3_atom.yaml index eb0fd03..eefeb77 100644 --- a/config/samples/v3_atom.yaml +++ b/config/samples/v3_atom.yaml @@ -4,6 +4,161 @@ metadata: labels: app.kubernetes.io/name: atom-operator app.kubernetes.io/managed-by: kustomize - name: atom-sample + dataset: dataset + dataset-owner: owner + service-type: atom + name: v3-sample spec: - # TODO(user): Add fields here + lifecycle: + ttlInDays: + service: + baseUrl: "https://service.pdok.nl/owner/dataset/atom/index.xml" + lang: nl + stylesheet: "https://service.pdok.nl/atom/style/style.xsl" + title: Test Dataset ATOM + subtitle: Test Dataset ATOM + ownerInfoRef: pdok + serviceMetadataLinks: + metadataIdentifier: 9615cd15-631b-45f8-a22a-d9d9c48cf211 + templates: + - csv + - opensearch + - html + rights: https://creativecommons.org/publicdomain/zero/1.0/deed.nl + datasetFeeds: + - technicalName: dataset-1-name + title: "dataset-1-title \"1\"" + subtitle: "dataset-1-subtitle \"1\"" + datasetMetadataLinks: + metadataIdentifier: 9615cd15-631b-45f8-a22a-d9d9c48cf211 + templates: + - csv + - html + author: + name: owner + email: info@test.nl + spatial_dataset_identifier_code: "ce2c46dd-50c6-4beb-ba96-f769e948b296" + spatial_dataset_identifier_namespace: "http://www.pdok.nl" # default value + entries: + - technicalName: "dataset-1-name" + downloadlinks: + - data: "http://localazurite.blob.azurite/bucket/key1/dataset-1-file" + updated: "2012-03-31T13:45:03Z" + polygon: + bbox: + minx: "3.31" + maxx: "7.241" + miny: "50.731" + maxy: "53.61" + srs: + name: "Amersfoort / RD New" + uri: "https://www.opengis.net/def/crs/EPSG/0/28992" + - technicalName: dataset-2-name + title: "dataset-2-title \"2\"" + subtitle: "dataset-2-subtitle \"2\"" + datasetMetadataLinks: + metadataIdentifier: 9615cd15-631b-45f8-a22a-d9d9c48cf211 + templates: + - csv + - html + links: + - href: https://www.eionet.europa.eu/reportnet/docs/noise/guidelines/geopackage-encoding-rule-end.pdf + title: encodingRule + type: application/pdf + hreflang: en + author: + name: owner + email: info@test.nl + spatial_dataset_identifier_code: "ce2c46dd-50c6-4beb-ba96-f769e948b296" + spatial_dataset_identifier_namespace: "http://www.pdok.nl" # default value + entries: + - technicalName: "dataset_2_id_1" + title: "Dataset download GPKG" + downloadlinks: + - data: "http://localazurite.blob.azurite/bucket/key2/dataset.gpkg" + time: "2022-12-02T14:02:14Z" + - data: "http://localazurite.blob.azurite/bucket/key2/dataset.gpkg" + version: "v1" # mogelijk deprecated + time: "2022-12-02T14:02:14Z" + updated: "2012-03-31T13:45:03Z" + polygon: + bbox: + minx: "3.32" + maxx: "7.242" + miny: "50.732" + maxy: "53.62" + srs: + name: "Amersfoort / RD New" + uri: "https://www.opengis.net/def/crs/EPSG/0/28992" + - technicalName: "dataset_2_id_2" + title: "Dataset download GML" + content: "Per Featuretype is er een downloadbestand beschikbaar" + downloadlinks: + - data: "http://localazurite.blob.azurite/bucket/key2/dataset_2_1.gml" + time: "2022-12-03T15:03:15Z" + bbox: + minx: "1" + miny: "2" + maxx: "3" + maxy: "4" + - data: "http://localazurite.blob.azurite/bucket/key2/dataset_2_2.gml" + time: "2022-12-04T16:04:16Z" + bbox: + minx: "5" + miny: "6" + maxx: "7" + maxy: "8" + updated: "2012-03-31T13:45:03Z" + polygon: + bbox: + minx: "3.32" + maxx: "7.242" + miny: "50.732" + maxy: "53.62" + srs: + name: "Amersfoort / RD New" + uri: "https://www.opengis.net/def/crs/EPSG/0/28992" + - technicalName: "dataset_2_id_3" + title: "Dataset GPKG in alternate projection" + downloadlinks: + - data: "http://localazurite.blob.azurite/bucket/key2/dataset-alt.gpkg" + updated: "2012-03-31T13:45:03Z" + polygon: + bbox: + minx: "3.32" + maxx: "7.242" + miny: "50.732" + maxy: "53.62" + srs: + name: "Amersfoort / RD New" + uri: "https://www.opengis.net/def/crs/EPSG/0/28992" + - technicalName: "dataset_2_id_4" + title: "Dataset download GEOTiff" + content: "geotiff downloads" + downloadlinks: + - data: "http://localazurite.blob.azurite/bucket/key2/kaartbladindex.json" + rel: index + - data: "http://localazurite.blob.azurite/bucket/key2/00.tif" + time: "2022-12-03T15:03:15Z" + bbox: + minx: "1" + miny: "2" + maxx: "3" + maxy: "4" + - data: "http://localazurite.blob.azurite/bucket/key2/01.tif" + time: "2022-12-04T16:04:16Z" + bbox: + minx: "5" + miny: "6" + maxx: "7" + maxy: "8" + updated: "2012-03-31T13:45:03Z" + polygon: + bbox: + minx: "3.32" + maxx: "7.242" + miny: "50.732" + maxy: "53.62" + srs: + name: "Amersfoort / RD New" + uri: "https://www.opengis.net/def/crs/EPSG/0/28992" diff --git a/config/webhook/kustomization.yaml b/config/webhook/kustomization.yaml new file mode 100644 index 0000000..e040ac0 --- /dev/null +++ b/config/webhook/kustomization.yaml @@ -0,0 +1,6 @@ +resources: +# - manifests.yaml see https://github.com/kubernetes-sigs/kubebuilder/issues/2231 +- service.yaml + +configurations: +- kustomizeconfig.yaml diff --git a/config/webhook/kustomizeconfig.yaml b/config/webhook/kustomizeconfig.yaml new file mode 100644 index 0000000..206316e --- /dev/null +++ b/config/webhook/kustomizeconfig.yaml @@ -0,0 +1,22 @@ +# the following config is for teaching kustomize where to look at when substituting nameReference. +# It requires kustomize v2.1.0 or newer to work properly. +nameReference: +- kind: Service + version: v1 + fieldSpecs: + - kind: MutatingWebhookConfiguration + group: admissionregistration.k8s.io + path: webhooks/clientConfig/service/name + - kind: ValidatingWebhookConfiguration + group: admissionregistration.k8s.io + path: webhooks/clientConfig/service/name + +namespace: +- kind: MutatingWebhookConfiguration + group: admissionregistration.k8s.io + path: webhooks/clientConfig/service/namespace + create: true +- kind: ValidatingWebhookConfiguration + group: admissionregistration.k8s.io + path: webhooks/clientConfig/service/namespace + create: true diff --git a/config/webhook/service.yaml b/config/webhook/service.yaml new file mode 100644 index 0000000..02a2261 --- /dev/null +++ b/config/webhook/service.yaml @@ -0,0 +1,16 @@ +apiVersion: v1 +kind: Service +metadata: + labels: + app.kubernetes.io/name: atom-operator + app.kubernetes.io/managed-by: kustomize + name: webhook-service + namespace: system +spec: + ports: + - port: 443 + protocol: TCP + targetPort: 9443 + selector: + control-plane: controller-manager + app.kubernetes.io/name: atom-operator diff --git a/go.mod b/go.mod index 3d56576..9e866b3 100644 --- a/go.mod +++ b/go.mod @@ -7,6 +7,7 @@ godebug default=go1.23 require ( github.com/onsi/ginkgo/v2 v2.21.0 github.com/onsi/gomega v1.35.1 + k8s.io/api v0.32.0 k8s.io/apimachinery v0.32.0 k8s.io/client-go v0.32.0 sigs.k8s.io/controller-runtime v0.20.0 @@ -86,7 +87,6 @@ require ( gopkg.in/evanphx/json-patch.v4 v4.12.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - k8s.io/api v0.32.0 // indirect k8s.io/apiextensions-apiserver v0.32.0 // indirect k8s.io/apiserver v0.32.0 // indirect k8s.io/component-base v0.32.0 // indirect diff --git a/internal/webhook/v2beta1/atom_webhook.go b/internal/webhook/v2beta1/atom_webhook.go new file mode 100644 index 0000000..6313d4e --- /dev/null +++ b/internal/webhook/v2beta1/atom_webhook.go @@ -0,0 +1,46 @@ +/* +MIT License + +Copyright (c) 2024 Publieke Dienstverlening op de Kaart + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +package v3 + +import ( + ctrl "sigs.k8s.io/controller-runtime" + logf "sigs.k8s.io/controller-runtime/pkg/log" + + pdoknlv2beta1 "github.com/pdok/atom-operator/api/v2beta1" +) + +// nolint:unused +// log is for logging in this package. +var atomlog = logf.Log.WithName("atom-resource") + +// SetupAtomWebhookWithManager registers the webhook for Atom in the manager. +func SetupAtomWebhookWithManager(mgr ctrl.Manager) error { + return ctrl.NewWebhookManagedBy(mgr).For(&pdoknlv2beta1.Atom{}). + // Todo add WithValidator / AtomCustomValidator ? + // Todo add WithDefaulter / AtomCustomDefaulter ? + Complete() +} + +// TODO(user): EDIT THIS FILE! THIS IS SCAFFOLDING FOR YOU TO OWN! diff --git a/internal/webhook/v3/atom_webhook.go b/internal/webhook/v3/atom_webhook.go new file mode 100644 index 0000000..a240c69 --- /dev/null +++ b/internal/webhook/v3/atom_webhook.go @@ -0,0 +1,46 @@ +/* +MIT License + +Copyright (c) 2024 Publieke Dienstverlening op de Kaart + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +package v3 + +import ( + ctrl "sigs.k8s.io/controller-runtime" + logf "sigs.k8s.io/controller-runtime/pkg/log" + + pdoknlv3 "github.com/pdok/atom-operator/api/v3" +) + +// nolint:unused +// log is for logging in this package. +var atomlog = logf.Log.WithName("atom-resource") + +// SetupAtomWebhookWithManager registers the webhook for Atom in the manager. +func SetupAtomWebhookWithManager(mgr ctrl.Manager) error { + return ctrl.NewWebhookManagedBy(mgr).For(&pdoknlv3.Atom{}). + // Todo add WithValidator / AtomCustomValidator ? + // Todo add WithDefaulter / AtomCustomDefaulter ? + Complete() +} + +// TODO(user): EDIT THIS FILE! THIS IS SCAFFOLDING FOR YOU TO OWN! diff --git a/internal/webhook/v3/atom_webhook_test.go b/internal/webhook/v3/atom_webhook_test.go new file mode 100644 index 0000000..e46b800 --- /dev/null +++ b/internal/webhook/v3/atom_webhook_test.go @@ -0,0 +1,63 @@ +/* +MIT License + +Copyright (c) 2024 Publieke Dienstverlening op de Kaart + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +package v3 + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + + pdoknlv3 "github.com/pdok/atom-operator/api/v3" + // TODO (user): Add any additional imports if needed +) + +var _ = Describe("Atom Webhook", func() { + var ( + obj *pdoknlv3.Atom + oldObj *pdoknlv3.Atom + ) + + BeforeEach(func() { + obj = &pdoknlv3.Atom{} + oldObj = &pdoknlv3.Atom{} + Expect(oldObj).NotTo(BeNil(), "Expected oldObj to be initialized") + Expect(obj).NotTo(BeNil(), "Expected obj to be initialized") + // TODO (user): Add any setup logic common to all tests + }) + + AfterEach(func() { + // TODO (user): Add any teardown logic common to all tests + }) + + Context("When creating Atom under Conversion Webhook", func() { + // TODO (user): Add logic to convert the object to the desired version and verify the conversion + // Example: + // It("Should convert the object correctly", func() { + // convertedObj := &pdoknlv3.Atom{} + // Expect(obj.ConvertTo(convertedObj)).To(Succeed()) + // Expect(convertedObj).ToNot(BeNil()) + // }) + }) + +}) diff --git a/test/e2e/e2e_test.go b/test/e2e/e2e_test.go index a3efe77..064687b 100644 --- a/test/e2e/e2e_test.go +++ b/test/e2e/e2e_test.go @@ -31,7 +31,7 @@ import ( ) // namespace where the project is deployed in -const namespace = "atom-operator-system" +const namespace = "services" // serviceAccountName created for the project const serviceAccountName = "atom-operator-controller-manager" @@ -261,6 +261,30 @@ var _ = Describe("Manager", Ordered, func() { )) }) + It("should provisioned cert-manager", func() { + By("validating that cert-manager has the certificate Secret") + verifyCertManager := func(g Gomega) { + cmd := exec.Command("kubectl", "get", "secrets", "webhook-server-cert", "-n", namespace) + _, err := utils.Run(cmd) + g.Expect(err).NotTo(HaveOccurred()) + } + Eventually(verifyCertManager).Should(Succeed()) + }) + + It("should have CA injection for Atom conversion webhook", func() { + By("checking CA injection for Atom conversion webhook") + verifyCAInjection := func(g Gomega) { + cmd := exec.Command("kubectl", "get", + "customresourcedefinitions.apiextensions.k8s.io", + "atoms.pdok.nl", + "-o", "go-template={{ .spec.conversion.webhook.clientConfig.caBundle }}") + vwhOutput, err := utils.Run(cmd) + g.Expect(err).NotTo(HaveOccurred()) + g.Expect(len(vwhOutput)).To(BeNumerically(">", 10)) + } + Eventually(verifyCAInjection).Should(Succeed()) + }) + // +kubebuilder:scaffold:e2e-webhooks-checks // TODO: Customize the e2e test suite with scenarios specific to your project.