Skip to content

Commit ba80948

Browse files
author
Ilyas Gasanov
committed
[DOP-19896] Add Clickhouse API schema
1 parent 2260466 commit ba80948

File tree

7 files changed

+239
-4
lines changed

7 files changed

+239
-4
lines changed

README.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ Data.SyncMaster is as low-code ETL tool for transfering data between databases a
3333
List of currently supported connections:
3434

3535
* Apache Hive
36+
* Clickhouse
3637
* Postgres
3738
* Oracle
3839
* HDFS

syncmaster/schemas/v1/connection_types.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
POSTGRES_TYPE = Literal["postgres"]
99
S3_TYPE = Literal["s3"]
1010
HDFS_TYPE = Literal["hdfs"]
11+
CLICKHOUSE_TYPE = Literal["clickhouse"]
1112

1213

1314
class ConnectionType(str, Enum):
@@ -16,3 +17,4 @@ class ConnectionType(str, Enum):
1617
ORACLE = "oracle"
1718
S3 = "s3"
1819
HDFS = "hdfs"
20+
CLICKHOUSE = "clickhouse"
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
# SPDX-FileCopyrightText: 2023-2024 MTS PJSC
2+
# SPDX-License-Identifier: Apache-2.0
3+
from pydantic import BaseModel, Field, SecretStr
4+
5+
from syncmaster.schemas.v1.connection_types import CLICKHOUSE_TYPE
6+
7+
8+
class ClickhouseBaseSchema(BaseModel):
9+
type: CLICKHOUSE_TYPE
10+
11+
class Config:
12+
from_attributes = True
13+
14+
15+
class ReadClickhouseConnectionSchema(ClickhouseBaseSchema):
16+
host: str
17+
port: int
18+
database: str | None = None
19+
additional_params: dict = Field(default_factory=dict)
20+
21+
22+
class ReadClickhouseAuthSchema(ClickhouseBaseSchema):
23+
user: str
24+
25+
26+
class UpdateClickhouseConnectionSchema(ClickhouseBaseSchema):
27+
host: str | None = None
28+
port: int | None = None
29+
database: str | None = None
30+
additional_params: dict | None = Field(default_factory=dict)
31+
32+
33+
class UpdateClickhouseAuthSchema(ClickhouseBaseSchema):
34+
user: str | None = None # noqa: F722
35+
password: SecretStr | None = None
36+
37+
38+
class CreateClickhouseConnectionSchema(ClickhouseBaseSchema):
39+
host: str
40+
port: int
41+
database: str | None = None
42+
additional_params: dict = Field(default_factory=dict)
43+
44+
45+
class CreateClickhouseAuthSchema(ClickhouseBaseSchema):
46+
user: str
47+
password: SecretStr

syncmaster/schemas/v1/connections/connection.py

Lines changed: 29 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,14 @@
33

44
from pydantic import BaseModel, Field, model_validator
55

6+
from syncmaster.schemas.v1.connections.clickhouse import (
7+
CreateClickhouseAuthSchema,
8+
CreateClickhouseConnectionSchema,
9+
ReadClickhouseAuthSchema,
10+
ReadClickhouseConnectionSchema,
11+
UpdateClickhouseAuthSchema,
12+
UpdateClickhouseConnectionSchema,
13+
)
614
from syncmaster.schemas.v1.connections.hdfs import (
715
HDFSCreateAuthSchema,
816
HDFSCreateConnectionSchema,
@@ -51,12 +59,14 @@
5159
| HDFSReadConnectionSchema
5260
| ReadOracleConnectionSchema
5361
| ReadPostgresConnectionSchema
62+
| ReadClickhouseConnectionSchema
5463
| S3ReadConnectionSchema
5564
)
5665
CreateConnectionDataSchema = (
5766
CreateHiveConnectionSchema
5867
| CreateOracleConnectionSchema
5968
| CreatePostgresConnectionSchema
69+
| CreateClickhouseConnectionSchema
6070
| HDFSCreateConnectionSchema
6171
| S3CreateConnectionSchema
6272
)
@@ -66,15 +76,31 @@
6676
| S3UpdateConnectionSchema
6777
| UpdateOracleConnectionSchema
6878
| UpdatePostgresConnectionSchema
79+
| UpdateClickhouseConnectionSchema
6980
)
7081
ReadConnectionAuthDataSchema = (
71-
ReadHiveAuthSchema | ReadOracleAuthSchema | ReadPostgresAuthSchema | S3ReadAuthSchema | HDFSReadAuthSchema
82+
ReadHiveAuthSchema
83+
| ReadOracleAuthSchema
84+
| ReadPostgresAuthSchema
85+
| ReadClickhouseAuthSchema
86+
| S3ReadAuthSchema
87+
| HDFSReadAuthSchema
7288
)
7389
CreateConnectionAuthDataSchema = (
74-
CreateHiveAuthSchema | CreateOracleAuthSchema | CreatePostgresAuthSchema | S3CreateAuthSchema | HDFSCreateAuthSchema
90+
CreateHiveAuthSchema
91+
| CreateOracleAuthSchema
92+
| CreatePostgresAuthSchema
93+
| CreateClickhouseAuthSchema
94+
| S3CreateAuthSchema
95+
| HDFSCreateAuthSchema
7596
)
7697
UpdateConnectionAuthDataSchema = (
77-
UpdateHiveAuthSchema | UpdateOracleAuthSchema | UpdatePostgresAuthSchema | S3UpdateAuthSchema | HDFSUpdateAuthSchema
98+
UpdateHiveAuthSchema
99+
| UpdateOracleAuthSchema
100+
| UpdatePostgresAuthSchema
101+
| UpdateClickhouseAuthSchema
102+
| S3UpdateAuthSchema
103+
| HDFSUpdateAuthSchema
78104
)
79105

80106

syncmaster/schemas/v1/transfers/__init__.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from syncmaster.schemas.v1.connections.connection import ReadConnectionSchema
88
from syncmaster.schemas.v1.page import PageSchema
99
from syncmaster.schemas.v1.transfers.db import (
10+
ClickhouseReadTransferSourceAndTarget,
1011
HiveReadTransferSourceAndTarget,
1112
OracleReadTransferSourceAndTarget,
1213
PostgresReadTransferSourceAndTarget,
@@ -31,6 +32,7 @@
3132
| HDFSReadTransferSource
3233
| HiveReadTransferSourceAndTarget
3334
| OracleReadTransferSourceAndTarget
35+
| ClickhouseReadTransferSourceAndTarget
3436
| S3ReadTransferSource
3537
)
3638

@@ -39,6 +41,7 @@
3941
| HDFSReadTransferTarget
4042
| HiveReadTransferSourceAndTarget
4143
| OracleReadTransferSourceAndTarget
44+
| ClickhouseReadTransferSourceAndTarget
4245
| S3ReadTransferTarget
4346
)
4447

@@ -47,6 +50,7 @@
4750
| HDFSCreateTransferSource
4851
| HiveReadTransferSourceAndTarget
4952
| OracleReadTransferSourceAndTarget
53+
| ClickhouseReadTransferSourceAndTarget
5054
| S3CreateTransferSource
5155
)
5256

@@ -55,6 +59,7 @@
5559
| HDFSCreateTransferTarget
5660
| HiveReadTransferSourceAndTarget
5761
| OracleReadTransferSourceAndTarget
62+
| ClickhouseReadTransferSourceAndTarget
5863
| S3CreateTransferTarget
5964
)
6065

@@ -63,6 +68,7 @@
6368
| HDFSReadTransferSource
6469
| HiveReadTransferSourceAndTarget
6570
| OracleReadTransferSourceAndTarget
71+
| ClickhouseReadTransferSourceAndTarget
6672
| S3CreateTransferSource
6773
| None
6874
)
@@ -72,6 +78,7 @@
7278
| HDFSReadTransferSource
7379
| HiveReadTransferSourceAndTarget
7480
| OracleReadTransferSourceAndTarget
81+
| ClickhouseReadTransferSourceAndTarget
7582
| S3CreateTransferTarget
7683
| None
7784
)

syncmaster/schemas/v1/transfers/db.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,12 @@
44

55
from pydantic import BaseModel
66

7-
from syncmaster.schemas.v1.connection_types import HIVE_TYPE, ORACLE_TYPE, POSTGRES_TYPE
7+
from syncmaster.schemas.v1.connection_types import (
8+
CLICKHOUSE_TYPE,
9+
HIVE_TYPE,
10+
ORACLE_TYPE,
11+
POSTGRES_TYPE,
12+
)
813

914

1015
class ReadDBTransfer(BaseModel):
@@ -21,3 +26,7 @@ class OracleReadTransferSourceAndTarget(ReadDBTransfer):
2126

2227
class PostgresReadTransferSourceAndTarget(ReadDBTransfer):
2328
type: POSTGRES_TYPE
29+
30+
31+
class ClickhouseReadTransferSourceAndTarget(ReadDBTransfer):
32+
type: CLICKHOUSE_TYPE
Lines changed: 143 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,143 @@
1+
import pytest
2+
from httpx import AsyncClient
3+
from sqlalchemy import select
4+
from sqlalchemy.ext.asyncio import AsyncSession
5+
6+
from syncmaster.db.models import AuthData, Connection
7+
from syncmaster.db.repositories.utils import decrypt_auth_data
8+
from syncmaster.settings import Settings
9+
from tests.mocks import MockGroup, UserTestRoles
10+
11+
pytestmark = [pytest.mark.asyncio, pytest.mark.backend, pytest.mark.clickhouse]
12+
13+
14+
async def test_developer_plus_can_create_clickhouse_connection(
15+
client: AsyncClient,
16+
group: MockGroup,
17+
session: AsyncSession,
18+
settings: Settings,
19+
role_developer_plus: UserTestRoles,
20+
):
21+
# Arrange
22+
user = group.get_member_of_role(role_developer_plus)
23+
24+
# Act
25+
result = await client.post(
26+
"v1/connections",
27+
headers={"Authorization": f"Bearer {user.token}"},
28+
json={
29+
"group_id": group.id,
30+
"name": "New connection",
31+
"description": "",
32+
"connection_data": {
33+
"type": "clickhouse",
34+
"host": "127.0.0.1",
35+
"port": 1521,
36+
"database": "database_name",
37+
},
38+
"auth_data": {
39+
"type": "clickhouse",
40+
"user": "user",
41+
"password": "secret",
42+
},
43+
},
44+
)
45+
connection = (
46+
await session.scalars(
47+
select(Connection).filter_by(
48+
name="New connection",
49+
),
50+
)
51+
).first()
52+
53+
creds = (
54+
await session.scalars(
55+
select(AuthData).filter_by(
56+
connection_id=connection.id,
57+
),
58+
)
59+
).one()
60+
61+
# Assert
62+
decrypted = decrypt_auth_data(creds.value, settings=settings)
63+
assert result.status_code == 200
64+
assert result.json() == {
65+
"id": connection.id,
66+
"name": connection.name,
67+
"description": connection.description,
68+
"group_id": connection.group_id,
69+
"connection_data": {
70+
"type": connection.data["type"],
71+
"host": connection.data["host"],
72+
"port": connection.data["port"],
73+
"database": connection.data["database"],
74+
"additional_params": connection.data["additional_params"],
75+
},
76+
"auth_data": {
77+
"type": decrypted["type"],
78+
"user": decrypted["user"],
79+
},
80+
}
81+
82+
83+
async def test_developer_plus_cannot_create_connection_with_type_mismatch(
84+
client: AsyncClient,
85+
group: MockGroup,
86+
session: AsyncSession,
87+
settings: Settings,
88+
role_developer_plus: UserTestRoles,
89+
event_loop,
90+
request,
91+
):
92+
# Arrange
93+
user = group.get_member_of_role(role_developer_plus)
94+
95+
# Act
96+
result = await client.post(
97+
"v1/connections",
98+
headers={"Authorization": f"Bearer {user.token}"},
99+
json={
100+
"group_id": group.id,
101+
"name": "New connection",
102+
"description": "",
103+
"connection_data": {
104+
"type": "postgres",
105+
"host": "127.0.0.1",
106+
"port": 5432,
107+
"database_name": "postgres",
108+
},
109+
"auth_data": {
110+
"type": "clickhouse",
111+
"user": "user",
112+
"password": "secret",
113+
},
114+
},
115+
)
116+
117+
# Assert
118+
assert result.json() == {
119+
"error": {
120+
"code": "invalid_request",
121+
"message": "Invalid request",
122+
"details": [
123+
{
124+
"context": {},
125+
"input": {
126+
"group_id": group.id,
127+
"name": "New connection",
128+
"description": "",
129+
"connection_data": {
130+
"type": "postgres",
131+
"host": "127.0.0.1",
132+
"port": 5432,
133+
"database_name": "postgres",
134+
},
135+
"auth_data": {"type": "clickhouse", "user": "user", "password": "secret"},
136+
},
137+
"location": ["body"],
138+
"message": "Value error, Connection data and auth data must have same types",
139+
"code": "value_error",
140+
},
141+
],
142+
},
143+
}

0 commit comments

Comments
 (0)