Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions tests/release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,11 @@ releases:
operatorVersion: 0.0.0-dev
listener:
operatorVersion: 0.0.0-dev
zookeeper:
operatorVersion: 0.0.0-dev
hdfs:
operatorVersion: 0.0.0-dev
hbase:
operatorVersion: 0.0.0-dev
spark-k8s:
operatorVersion: 0.0.0-dev
9 changes: 9 additions & 0 deletions tests/templates/kuttl/hbase-connector/00-assert.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
apiVersion: kuttl.dev/v1beta1
kind: TestAssert
timeout: 900
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: integration-tests-sa
9 changes: 9 additions & 0 deletions tests/templates/kuttl/hbase-connector/00-patch-ns.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{% if test_scenario['values']['openshift'] == 'true' %}
# see https://github.com/stackabletech/issues/issues/566
---
apiVersion: kuttl.dev/v1beta1
kind: TestStep
commands:
- script: kubectl patch namespace $NAMESPACE -p '{"metadata":{"labels":{"pod-security.kubernetes.io/enforce":"privileged"}}}'
timeout: 120
{% endif %}
29 changes: 29 additions & 0 deletions tests/templates/kuttl/hbase-connector/00-serviceaccount.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
---
kind: Role
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: use-integration-tests-scc
rules:
{% if test_scenario['values']['openshift'] == "true" %}
- apiGroups: ["security.openshift.io"]
resources: ["securitycontextconstraints"]
resourceNames: ["privileged"]
verbs: ["use"]
{% endif %}
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: integration-tests-sa
---
kind: RoleBinding
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: use-integration-tests-scc
subjects:
- kind: ServiceAccount
name: integration-tests-sa
roleRef:
kind: Role
name: use-integration-tests-scc
apiGroup: rbac.authorization.k8s.io
10 changes: 10 additions & 0 deletions tests/templates/kuttl/hbase-connector/01-assert.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
---
apiVersion: kuttl.dev/v1beta1
kind: TestAssert
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
---
apiVersion: v1
kind: ConfigMap
metadata:
name: vector-aggregator-discovery
{% endif %}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
---
apiVersion: v1
kind: ConfigMap
metadata:
name: vector-aggregator-discovery
data:
ADDRESS: {{ lookup('env', 'VECTOR_AGGREGATOR') }}
{% endif %}
14 changes: 14 additions & 0 deletions tests/templates/kuttl/hbase-connector/02-assert.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---
apiVersion: kuttl.dev/v1beta1
kind: TestAssert
metadata:
name: install-zk
timeout: 600
---
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: test-zk-server-default
status:
readyReplicas: 1
replicas: 1
28 changes: 28 additions & 0 deletions tests/templates/kuttl/hbase-connector/02-install-zookeeper.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
---
apiVersion: zookeeper.stackable.tech/v1alpha1
kind: ZookeeperCluster
metadata:
name: test-zk
spec:
image:
productVersion: "{{ test_scenario['values']['zookeeper-latest'] }}"
pullPolicy: IfNotPresent
clusterConfig:
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
vectorAggregatorConfigMapName: vector-aggregator-discovery
{% endif %}
servers:
config:
logging:
enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }}
roleGroups:
default:
replicas: 1
---
apiVersion: zookeeper.stackable.tech/v1alpha1
kind: ZookeeperZnode
metadata:
name: test-znode
spec:
clusterRef:
name: test-zk
30 changes: 30 additions & 0 deletions tests/templates/kuttl/hbase-connector/03-assert.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
---
apiVersion: kuttl.dev/v1beta1
kind: TestAssert
metadata:
name: install-hdfs
timeout: 600
---
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: test-hdfs-namenode-default
status:
readyReplicas: 2
replicas: 2
---
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: test-hdfs-journalnode-default
status:
readyReplicas: 1
replicas: 1
---
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: test-hdfs-datanode-default
status:
readyReplicas: 1
replicas: 1
35 changes: 35 additions & 0 deletions tests/templates/kuttl/hbase-connector/03-install-hdfs.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
---
apiVersion: hdfs.stackable.tech/v1alpha1
kind: HdfsCluster
metadata:
name: test-hdfs
spec:
image:
productVersion: "{{ test_scenario['values']['hdfs-latest'] }}"
pullPolicy: IfNotPresent
clusterConfig:
zookeeperConfigMapName: test-znode
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
vectorAggregatorConfigMapName: vector-aggregator-discovery
{% endif %}
nameNodes:
config:
logging:
enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }}
roleGroups:
default:
replicas: 2
dataNodes:
config:
logging:
enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }}
roleGroups:
default:
replicas: 1
journalNodes:
config:
logging:
enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }}
roleGroups:
default:
replicas: 1
22 changes: 22 additions & 0 deletions tests/templates/kuttl/hbase-connector/04-assert.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
---
apiVersion: kuttl.dev/v1beta1
kind: TestAssert
metadata:
name: install-hbase
timeout: 600
---
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: test-hbase-master-default
status:
readyReplicas: 2
replicas: 2
---
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: test-hbase-regionserver-default
status:
readyReplicas: 1
replicas: 1
29 changes: 29 additions & 0 deletions tests/templates/kuttl/hbase-connector/04-install-hbase.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
---
apiVersion: hbase.stackable.tech/v1alpha1
kind: HbaseCluster
metadata:
name: test-hbase
spec:
image:
productVersion: "{{ test_scenario['values']['hbase'] }}"
pullPolicy: IfNotPresent
clusterConfig:
hdfsConfigMapName: test-hdfs
zookeeperConfigMapName: test-znode
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
vectorAggregatorConfigMapName: vector-aggregator-discovery
{% endif %}
masters:
config:
logging:
enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }}
roleGroups:
default:
replicas: 2
regionServers:
config:
logging:
enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }}
roleGroups:
default:
replicas: 1
11 changes: 11 additions & 0 deletions tests/templates/kuttl/hbase-connector/10-assert.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
---
apiVersion: kuttl.dev/v1beta1
kind: TestAssert
timeout: 900
---
apiVersion: spark.stackable.tech/v1alpha1
kind: SparkApplication
metadata:
name: test-spark-hbase-connector
status:
phase: Succeeded
104 changes: 104 additions & 0 deletions tests/templates/kuttl/hbase-connector/10-deploy-spark-app.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
---
apiVersion: spark.stackable.tech/v1alpha1
kind: SparkApplication
metadata:
name: test-spark-hbase-connector
spec:
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
vectorAggregatorConfigMapName: vector-aggregator-discovery
{% endif %}
sparkImage:
{% if test_scenario['values']['spark'].find(",") > 0 %}
custom: "{{ test_scenario['values']['spark'].split(',')[1] }}"
productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}"
{% else %}
productVersion: "{{ test_scenario['values']['spark'] }}"
{% endif %}
# pullPolicy: IfNotPresent
pullPolicy: Always
mode: cluster
mainApplicationFile: local:///stackable/spark/jobs/test-hbase.py
sparkConf:
spark.driver.extraClassPath: /stackable/spark/config
spark.executor.extraClassPath: /stackable/spark/config
driver:
config:
logging:
enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }}
volumeMounts:
- name: script
mountPath: /stackable/spark/jobs
- name: hbase-config
mountPath: /stackable/spark/config/hbase-site.xml
subPath: hbase-site.xml
- name: hdfs-config
mountPath: /stackable/spark/config/hdfs-site.xml
subPath: hdfs-site.xml
- name: hdfs-config
mountPath: /stackable/spark/config/core-site.xml
subPath: core-site.xml
executor:
replicas: 1
config:
logging:
enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }}
volumeMounts:
- name: script
mountPath: /stackable/spark/jobs
- name: hbase-config
mountPath: /stackable/spark/config/hbase-site.xml
subPath: hbase-site.xml
- name: hdfs-config
mountPath: /stackable/spark/config/hdfs-site.xml
subPath: hdfs-site.xml
- name: hdfs-config
mountPath: /stackable/spark/config/core-site.xml
subPath: core-site.xml
volumes:
- name: script
configMap:
name: test-hbase
- name: hbase-config
configMap:
name: test-hbase
- name: hdfs-config
configMap:
name: test-hdfs
---
apiVersion: v1
kind: ConfigMap
metadata:
name: test-hbase
data:
test-hbase.py: |
import os
from pyspark.sql import SparkSession
from pyspark.sql.types import *

spark = SparkSession.builder.appName("test-hbase").getOrCreate()

df = spark.createDataFrame(
[("row1", "Hello, Stackable!")],
"key: string, value: string"
)

spark._jvm.org.apache.hadoop.hbase.spark.HBaseContext(
spark._jsc.sc(),
spark._jvm.org.apache.hadoop.hbase.HBaseConfiguration.create(),
None,
)

catalog = '{\
"table":{"namespace":"default","name":"test-hbase"},\
"rowkey":"key",\
"columns":{\
"key":{"cf":"rowkey","col":"key","type":"string"},\
"value":{"cf":"cf1","col":"value","type":"string"}\
}}'

df\
.write\
.format("org.apache.hadoop.hbase.spark")\
.option('catalog', catalog)\
.option('newtable', '5')\
.save()
Loading
Loading