Skip to content

Commit 49273f2

Browse files
[DOP-22336] Add logic for handling WebDAV transfers (#194)
1 parent f3abd77 commit 49273f2

File tree

19 files changed

+576
-8
lines changed

19 files changed

+576
-8
lines changed

.env.docker

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,6 +142,13 @@ TEST_SAMBA_USER=syncmaster
142142
TEST_SAMBA_PASSWORD=test_only
143143
TEST_SAMBA_AUTH_TYPE=NTLMv2
144144

145+
TEST_WEBDAV_HOST_FOR_CONFTEST=webdav
146+
TEST_WEBDAV_PORT_FOR_CONFTEST=80
147+
TEST_WEBDAV_HOST_FOR_WORKER=webdav
148+
TEST_WEBDAV_PORT_FOR_WORKER=80
149+
TEST_WEBDAV_USER=syncmaster
150+
TEST_WEBDAV_PASSWORD=test_only
151+
145152
SPARK_CONF_DIR=/app/tests/spark/hive/conf/
146153
HADOOP_CONF_DIR=/app/tests/spark/hadoop/
147154
HIVE_CONF_DIR=/app/tests/spark/hive/conf/

.env.local

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,13 @@ export TEST_SAMBA_USER=syncmaster
129129
export TEST_SAMBA_PASSWORD=test_only
130130
export TEST_SAMBA_AUTH_TYPE=NTLMv2
131131

132+
export TEST_WEBDAV_HOST_FOR_CONFTEST=localhost
133+
export TEST_WEBDAV_PORT_FOR_CONFTEST=8010
134+
export TEST_WEBDAV_HOST_FOR_WORKER=webdav
135+
export TEST_WEBDAV_PORT_FOR_WORKER=80
136+
export TEST_WEBDAV_USER=syncmaster
137+
export TEST_WEBDAV_PASSWORD=test_only
138+
132139
export SPARK_CONF_DIR=./tests/spark/hive/conf/
133140
export HADOOP_CONF_DIR=./tests/spark/hadoop/
134141
export HIVE_CONF_DIR=./tests/spark/hive/conf/

.github/workflows/tests.yml

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,10 @@ jobs:
6060
name: Samba tests
6161
uses: ./.github/workflows/samba-tests.yml
6262

63+
webdav_tests:
64+
name: WebDAV tests
65+
uses: ./.github/workflows/webdav-tests.yml
66+
6367
scheduler_tests:
6468
name: Scheduler tests
6569
uses: ./.github/workflows/scheduler-tests.yml
@@ -72,7 +76,7 @@ jobs:
7276
name: Tests done
7377
runs-on: ubuntu-latest
7478

75-
needs: [unit_tests, scheduler_tests, oracle_tests, clickhouse_tests, mssql_tests, mysql_tests, hive_tests, hdfs_tests, s3_tests, sftp_tests, ftp_tests, ftps_tests, samba_tests]
79+
needs: [unit_tests, scheduler_tests, oracle_tests, clickhouse_tests, mssql_tests, mysql_tests, hive_tests, hdfs_tests, s3_tests, sftp_tests, ftp_tests, ftps_tests, samba_tests, webdav_tests]
7680
steps:
7781
- name: Checkout code
7882
uses: actions/checkout@v4

.github/workflows/webdav-tests.yml

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
name: WebDAV tests
2+
on:
3+
workflow_call:
4+
5+
env:
6+
DEFAULT_PYTHON: '3.12'
7+
8+
jobs:
9+
test:
10+
name: Run WebDAV tests
11+
runs-on: ubuntu-latest
12+
13+
steps:
14+
- name: Checkout code
15+
uses: actions/checkout@v4
16+
17+
- name: Set up QEMU
18+
uses: docker/setup-qemu-action@v3
19+
20+
- name: Set up Docker Buildx
21+
uses: docker/setup-buildx-action@v3
22+
23+
- name: Cache jars
24+
uses: actions/cache@v4
25+
with:
26+
path: ./cached_jars
27+
key: ${{ runner.os }}-python-${{ env.DEFAULT_PYTHON }}-test-webdav
28+
restore-keys: |
29+
${{ runner.os }}-python-${{ env.DEFAULT_PYTHON }}-test-webdav
30+
${{ runner.os }}-python-
31+
32+
- name: Build Worker Image
33+
uses: docker/build-push-action@v6
34+
with:
35+
context: .
36+
tags: mtsrus/syncmaster-worker:${{ github.sha }}
37+
target: test
38+
file: docker/Dockerfile.worker
39+
load: true
40+
cache-from: mtsrus/syncmaster-worker:develop
41+
42+
- name: Docker compose up
43+
run: |
44+
docker compose -f docker-compose.test.yml --profile all down -v --remove-orphans
45+
docker compose -f docker-compose.test.yml --profile webdav up -d --wait --wait-timeout 200
46+
env:
47+
WORKER_IMAGE_TAG: ${{ github.sha }}
48+
49+
- name: Run WebDAV Tests
50+
run: |
51+
docker compose -f ./docker-compose.test.yml --profile webdav exec -T worker coverage run -m pytest -vvv -s -m "worker and webdav"
52+
53+
- name: Dump worker logs on failure
54+
if: failure()
55+
uses: jwalton/gh-docker-logs@v2
56+
with:
57+
images: mtsrus/syncmaster-worker
58+
dest: ./logs
59+
60+
# This is important, as coverage is exported after receiving SIGTERM
61+
- name: Shutdown
62+
if: always()
63+
run: |
64+
docker compose -f docker-compose.test.yml --profile all down -v --remove-orphans
65+
66+
- name: Upload worker logs
67+
uses: actions/upload-artifact@v4
68+
if: failure()
69+
with:
70+
name: worker-logs-webdav
71+
path: logs/*
72+
73+
- name: Upload coverage results
74+
uses: actions/upload-artifact@v4
75+
with:
76+
name: coverage-webdav
77+
path: reports/*
78+
# https://github.com/actions/upload-artifact/issues/602
79+
include-hidden-files: true

Makefile

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -125,6 +125,10 @@ test-integration-samba: test-db ##@Test Run integration tests for Samb
125125
docker compose -f docker-compose.test.yml --profile samba up -d --wait $(DOCKER_COMPOSE_ARGS)
126126
${POETRY} run pytest ./tests/test_integration -m samba $(PYTEST_ARGS)
127127

128+
test-integration-webdav: test-db ##@Test Run integration tests for WebDAV
129+
docker compose -f docker-compose.test.yml --profile webdav up -d --wait $(DOCKER_COMPOSE_ARGS)
130+
${POETRY} run pytest ./tests/test_integration -m webdav $(PYTEST_ARGS)
131+
128132
test-integration: test-db ##@Test Run all integration tests
129133
docker compose -f docker-compose.test.yml --profile all up -d --wait $(DOCKER_COMPOSE_ARGS)
130134
${POETRY} run pytest ./tests/test_integration $(PYTEST_ARGS)

README.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ List of currently supported connections:
4444
* FTPS
4545
* SFTP
4646
* Samba
47+
* WebDAV
4748

4849
Current Data.SyncMaster implementation provides following components:
4950

docker-compose.test.yml

Lines changed: 19 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ services:
125125
condition: service_completed_successfully
126126
rabbitmq:
127127
condition: service_healthy
128-
profiles: [worker, scheduler, s3, oracle, hdfs, hive, clickhouse, mysql, mssql, sftp, ftp, ftps, samba, all]
128+
profiles: [worker, scheduler, s3, oracle, hdfs, hive, clickhouse, mysql, mssql, sftp, ftp, ftps, samba, webdav, all]
129129

130130
test-postgres:
131131
image: postgres
@@ -139,7 +139,7 @@ services:
139139
interval: 30s
140140
timeout: 5s
141141
retries: 3
142-
profiles: [s3, oracle, clickhouse, mysql, mssql, hdfs, hive, sftp, ftp, ftps, samba, all]
142+
profiles: [s3, oracle, clickhouse, mysql, mssql, hdfs, hive, sftp, ftp, ftps, samba, webdav, all]
143143

144144
test-s3:
145145
image: bitnami/minio:latest
@@ -209,7 +209,6 @@ services:
209209
platform: linux/amd64
210210
profiles: [mysql, all]
211211

212-
213212
metastore-hive:
214213
image: postgres
215214
restart: unless-stopped
@@ -225,7 +224,7 @@ services:
225224
interval: 30s
226225
timeout: 5s
227226
retries: 3
228-
profiles: [hive, hdfs, s3, sftp, ftp, ftps, samba, all]
227+
profiles: [hive, hdfs, s3, sftp, ftp, ftps, samba, webdav, all]
229228

230229
keycloak:
231230
image: quay.io/keycloak/keycloak:latest
@@ -264,7 +263,7 @@ services:
264263
HIVE_METASTORE_DB_USER: test_hive
265264
HIVE_METASTORE_DB_PASSWORD: test_hive
266265
# writing spark dataframe to s3, sftp, ftp, ftps xml file fails without running hive metastore server
267-
profiles: [hive, hdfs, s3, sftp, ftp, ftps, samba, all]
266+
profiles: [hive, hdfs, s3, sftp, ftp, ftps, samba, webdav, all]
268267

269268
test-sftp:
270269
image: ${SFTP_IMAGE:-linuxserver/openssh-server}
@@ -327,6 +326,21 @@ services:
327326
entrypoint: [/custom_entrypoint.sh]
328327
profiles: [samba, all]
329328

329+
webdav:
330+
image: ${WEBDAV_IMAGE:-chonjay21/webdav:latest}
331+
restart: unless-stopped
332+
environment:
333+
- APP_USER_NAME=syncmaster
334+
- APP_USER_PASSWD=test_only
335+
- APP_UID=1000
336+
- APP_GID=1000
337+
ports:
338+
- 8010:80
339+
volumes:
340+
# Remove after https://github.com/chonjay21/docker-webdav/pull/3
341+
- ./docker/webdav/on_post_init.sh:/sources/webdav/eventscripts/on_post_init.sh
342+
profiles: [webdav, all]
343+
330344
volumes:
331345
postgres_test_data:
332346
rabbitmq_test_data:

docker/webdav/on_post_init.sh

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
#!/usr/bin/env bash
2+
set -e
3+
4+
# allow create files and directories
5+
chown -R www-data:www-data /var/webdav
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add logic for handling WebDAV transfers

syncmaster/dto/connections.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -132,3 +132,13 @@ class SambaConnectionDTO(ConnectionDTO):
132132
domain: str = ""
133133
port: int | None = None
134134
type: ClassVar[str] = "samba"
135+
136+
137+
@dataclass
138+
class WebDAVConnectionDTO(ConnectionDTO):
139+
host: str
140+
port: int
141+
user: str
142+
password: str
143+
protocol: Literal["http", "https"] = "https"
144+
type: ClassVar[str] = "webdav"

0 commit comments

Comments
 (0)