Skip to content

Commit 130e10a

Browse files
committed
WIP
1 parent 9fbf71a commit 130e10a

File tree

7 files changed

+342
-0
lines changed

7 files changed

+342
-0
lines changed
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
apiVersion: kyverno.io/v1
2+
kind: ClusterPolicy
3+
metadata:
4+
name: apache-spark-app-generate
5+
spec:
6+
rules:
7+
- name: generate-apache-spark-app
8+
match:
9+
all:
10+
- resources:
11+
kinds:
12+
- sparkoperator.k8s.io/v1beta2/SparkApplication
13+
operations:
14+
- CREATE
15+
context:
16+
- name: driverLabels
17+
variable:
18+
value: "{{ request.object.spec.driver.labels || `{}`}}"
19+
- name: driverLabelsConfValues
20+
variable:
21+
value: "{{ driverLabels | values(@) }}"
22+
- name: driverLabelsConfKeys
23+
variable:
24+
value: '{{ map(&join(`""`, [`"spark.kubernetes.driver.labels."`, @]), keys({{driverLabels}}))}}'
25+
- name: driverLabelsConf
26+
variable:
27+
value: '{{object_from_lists({{driverLabelsConfKeys}}, {{driverLabelsConfValues}})}})'
28+
- name: jobName
29+
variable:
30+
jmesPath: request.object.metadata.name
31+
- name: sparkVersion
32+
variable:
33+
jmesPath: request.object.spec.sparkVersion
34+
- name: dynamicAllocationEnabledConf
35+
variable:
36+
jmesPath: '{"spark.dynamicAllocation.enabled": to_string(`{{request.object.spec.dynamicAllocation.enabled || false}}`)}'
37+
- name: executorInstancesContains
38+
variable:
39+
jmesPath: "`{{request.object.spec.executor.instances || `0`}}` != `0`"
40+
- name: executorInstancesConf
41+
variable:
42+
jmesPath: '[{"key": `true`, "value": {"spark.executor.instances": to_string(`{{request.object.spec.executor.instances || "foo"}}`)}}, {"key": `false`, "value": `{}`}][?key==`{{executorInstancesContains}}`]|[0].value'
43+
reportProperties:
44+
sparkAppName: '`{{ jobName }}`'
45+
generate:
46+
kind: SparkApplication
47+
apiVersion: spark.apache.org/v1alpha1
48+
name: "{{ request.object.metadata.name }}"
49+
namespace: "{{ request.object.metadata.namespace }}"
50+
data:
51+
kind: SparkApplication
52+
apiVersion: spark.apache.org/v1alpha1
53+
metadata:
54+
ownerReferences:
55+
- apiVersion: sparkoperator.k8s.io/v1beta2
56+
kind: SparkApplication
57+
name: "{{request.object.metadata.name}}"
58+
uid: "{{request.object.metadata.uid}}"
59+
name: "{{ request.object.metadata.name }}"
60+
namespace: "{{ request.object.metadata.namespace }}"
61+
labels: "{{ request.object.metadata.labels || `{}`}}"
62+
spec:
63+
mainClass: "{{ request.object.spec.mainClass }}"
64+
jars: "{{ request.object.spec.mainApplicationFile }}"
65+
driverArgs: "{{ request.object.spec.arguments }}"
66+
sparkConf: "{{ merge(`{{request.object.spec.sparkConf}}`, `{{ executorInstancesConf }}`, `{{ dynamicAllocationEnabledConf }}`, `{{ driverLabelsConf }}`)}}"
67+
runtimeVersions:
68+
sparkVersion: "{{ sparkVersion }}"

migration/foo.yaml

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
apiVersion: kyverno.io/v1
2+
kind: ClusterPolicy
3+
metadata:
4+
name: test
5+
spec:
6+
rules:
7+
- name: test
8+
match:
9+
all:
10+
- resources:
11+
kinds:
12+
- sparkoperator.k8s.io/v1beta2/SparkApplication
13+
operations:
14+
- CREATE
15+
context:
16+
- name: driverLabels
17+
variable:
18+
value: "{{ request.object.spec.driver.labels || `{}`}}"
19+
- name: driverLabelsConfValues3
20+
variable:
21+
value: "{{ driverLabels | values(@) }}"
22+
generate:
23+
apiVersion: v1
24+
kind: ConfigMap
25+
namespace: "default"
26+
name: test
27+
data:
28+
apiVersion: v1
29+
kind: ConfigMap
30+
metadata:
31+
name: test
32+
data:
33+
qi: "join(``, {{ driverLabelsConfValues3 }})"

spark-pi.json

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
{
2+
"apiVersion": "sparkoperator.k8s.io/v1beta2",
3+
"kind": "SparkApplication",
4+
"metadata": {
5+
"name": "spark-pi",
6+
"namespace": "default"
7+
},
8+
"spec": {
9+
"type": "Scala",
10+
"mode": "cluster",
11+
"image": "spark:3.5.2",
12+
"mainClass": "org.apache.spark.examples.SparkPi",
13+
"mainApplicationFile": "local:///opt/spark/examples/jars/spark-examples_2.12-3.5.2.jar",
14+
"sparkConf": {
15+
"spark.eventLog.enabled": "true",
16+
"spark.eventLog.dir": "hdfs://hdfs-namenode-1:8020/spark/spark-events"
17+
},
18+
"arguments": [
19+
"5000"
20+
],
21+
"sparkVersion": "3.5.2",
22+
"driver": {
23+
"labels": {
24+
"version": "3.5.2"
25+
},
26+
"cores": 1,
27+
"memory": "512m",
28+
"serviceAccount": "spark"
29+
},
30+
"executor": {
31+
"labels": {
32+
"version": "3.5.2"
33+
},
34+
"cores": 2,
35+
"memory": "512m"
36+
}
37+
}
38+
}
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
18+
apiVersion: spark.apache.org/v1alpha1
19+
kind: SparkApplication
20+
metadata:
21+
name: spark-pi
22+
namespace: default
23+
ownerReferences:
24+
- apiVersion: "sparkoperator.k8s.io/v1beta2"
25+
kind: "SparkApplication"
26+
name: "spark-pi"
27+
# uid: "*"
28+
spec:
29+
mainClass: "org.apache.spark.examples.SparkPi"
30+
jars: "local:///opt/spark/examples/jars/spark-examples_2.12-3.5.2.jar"
31+
driverArgs: [ "5000" ]
32+
sparkConf:
33+
spark.eventLog.enabled: "true"
34+
spark.eventLog.dir: "hdfs://hdfs-namenode-1:8020/spark/spark-events"
35+
spark.kubernetes.container.image: "spark:3.5.2"
36+
spark.kubernetes.driver.label.version: "3.5.2"
37+
spark.kubernetes.driver.label.nodeType: "p1"
38+
spark.driver.cores: "1"
39+
spark.driver.memory: "512m"
40+
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
41+
spark.executor.memory: "512m"
42+
spark.executor.instances: "10"
43+
spark.executor.cores: "2"
44+
runtimeVersions:
45+
sparkVersion: "3.5.2"
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
18+
apiVersion: chainsaw.kyverno.io/v1alpha1
19+
kind: Test
20+
metadata:
21+
name: add-spark-k8s-operator-cr
22+
spec:
23+
steps:
24+
- try:
25+
- apply:
26+
file: https://raw.githubusercontent.com/kubeflow/spark-operator/refs/heads/master/config/crd/bases/sparkoperator.k8s.io_sparkapplications.yaml
27+
- apply:
28+
file: kyverno-rbac.yaml
29+
- apply:
30+
file: "../../../migration/apache-spark-app-generate-policy.yaml"
31+
- create:
32+
file: spark-kubeflow-pi.yaml
33+
- assert:
34+
timeout: 2m
35+
file: "../assertions/spark-application/spark-apache-pi.yaml"
36+
finally:
37+
- script:
38+
timeout: 20s
39+
content: |
40+
kubectl delete sparkapplication.sparkoperator.k8s.io spark-pi -n default
41+
kubectl delete sparkapplication.spark.apache.org spark-pi -n default
42+
kubectl delete crd sparkapplications.sparkoperator.k8s.io
43+
kubectl delete clusterpolicy apache-spark-app-generate
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
18+
apiVersion: rbac.authorization.k8s.io/v1
19+
kind: ClusterRole
20+
metadata:
21+
name: spark-application-watcher
22+
rules:
23+
- apiGroups:
24+
- "spark.apache.org"
25+
resources:
26+
- "sparkapplications"
27+
- "sparkapplications/status"
28+
verbs:
29+
- get
30+
- list
31+
- watch
32+
- create
33+
- update
34+
- delete
35+
- apiGroups:
36+
- "sparkoperator.k8s.io"
37+
resources:
38+
- "sparkapplications"
39+
verbs:
40+
- get
41+
- list
42+
- watch
43+
- update
44+
- delete
45+
---
46+
apiVersion: rbac.authorization.k8s.io/v1
47+
kind: ClusterRoleBinding
48+
metadata:
49+
name: kyverno-background-rolebinding
50+
roleRef:
51+
apiGroup: rbac.authorization.k8s.io
52+
kind: ClusterRole
53+
name: spark-application-watcher
54+
subjects:
55+
- kind: ServiceAccount
56+
name: kyverno-background-controller
57+
namespace: kyverno
58+
---
59+
apiVersion: rbac.authorization.k8s.io/v1
60+
kind: ClusterRoleBinding
61+
metadata:
62+
name: kyverno-reporter-rolebinding
63+
roleRef:
64+
apiGroup: rbac.authorization.k8s.io
65+
kind: ClusterRole
66+
name: spark-application-watcher
67+
subjects:
68+
- kind: ServiceAccount
69+
name: kyverno-reports-controller
70+
namespace: kyverno
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
18+
apiVersion: sparkoperator.k8s.io/v1beta2
19+
kind: SparkApplication
20+
metadata:
21+
name: spark-pi
22+
namespace: default
23+
spec:
24+
type: Scala
25+
mode: cluster
26+
image: spark:3.5.2
27+
mainClass: org.apache.spark.examples.SparkPi
28+
mainApplicationFile: local:///opt/spark/examples/jars/spark-examples_2.12-3.5.2.jar
29+
sparkConf:
30+
spark.eventLog.enabled: "true"
31+
spark.eventLog.dir: "hdfs://hdfs-namenode-1:8020/spark/spark-events"
32+
arguments:
33+
- "5000"
34+
sparkVersion: 3.5.2
35+
driver:
36+
labels:
37+
version: 3.5.2
38+
nodeType: p1
39+
cores: 1
40+
memory: 512m
41+
serviceAccount: spark
42+
executor:
43+
instances: 10
44+
cores: 2
45+
memory: 512m

0 commit comments

Comments
 (0)