@@ -125,7 +125,7 @@ services:
125125 condition : service_completed_successfully
126126 rabbitmq :
127127 condition : service_healthy
128- profiles : [worker, scheduler, s3, oracle, hdfs, hive, clickhouse, mysql, mssql, sftp, ftp, ftps, all]
128+ profiles : [worker, scheduler, s3, oracle, hdfs, hive, clickhouse, mysql, mssql, sftp, ftp, ftps, samba, all]
129129
130130 test-postgres :
131131 image : postgres
@@ -139,7 +139,7 @@ services:
139139 interval : 30s
140140 timeout : 5s
141141 retries : 3
142- profiles : [s3, oracle, clickhouse, mysql, mssql, hdfs, hive, sftp, ftp, ftps, all]
142+ profiles : [s3, oracle, clickhouse, mysql, mssql, hdfs, hive, sftp, ftp, ftps, samba, all]
143143
144144 test-s3 :
145145 image : bitnami/minio:latest
@@ -225,7 +225,7 @@ services:
225225 interval : 30s
226226 timeout : 5s
227227 retries : 3
228- profiles : [hive, hdfs, s3, sftp, ftp, ftps, all]
228+ profiles : [hive, hdfs, s3, sftp, ftp, ftps, samba, all]
229229
230230 keycloak :
231231 image : quay.io/keycloak/keycloak:latest
@@ -264,7 +264,7 @@ services:
264264 HIVE_METASTORE_DB_USER : test_hive
265265 HIVE_METASTORE_DB_PASSWORD : test_hive
266266 # writing spark dataframe to s3, sftp, ftp, ftps xml file fails without running hive metastore server
267- profiles : [hive, hdfs, s3, sftp, ftp, ftps, all]
267+ profiles : [hive, hdfs, s3, sftp, ftp, ftps, samba, all]
268268
269269 test-sftp :
270270 image : ${SFTP_IMAGE:-linuxserver/openssh-server}
@@ -316,6 +316,17 @@ services:
316316 - ./docker/ftp/on_post_init.sh:/sources/ftps/eventscripts/on_post_init.sh
317317 profiles : [ftps, all]
318318
319+ test-samba :
320+ image : ${SAMBA_IMAGE:-elswork/samba}
321+ restart : unless-stopped
322+ ports :
323+ - 139:139
324+ - 1445:445
325+ volumes :
326+ - ./docker/samba/custom_entrypoint.sh:/custom_entrypoint.sh
327+ entrypoint : [/custom_entrypoint.sh]
328+ profiles : [samba, all]
329+
319330volumes :
320331 postgres_test_data :
321332 rabbitmq_test_data :
0 commit comments