Skip to content

Commit 9a03779

Browse files
[DOP-19900] Add MSSQL API schema (#125)
1 parent 2470590 commit 9a03779

File tree

15 files changed

+270
-11
lines changed

15 files changed

+270
-11
lines changed

README.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ List of currently supported connections:
3636
* Clickhouse
3737
* Postgres
3838
* Oracle
39+
* MSSQL
3940
* HDFS
4041
* S3
4142

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add MSSQL API schema

syncmaster/backend/api/v1/connections.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
CLICKHOUSE_TYPE,
2222
HDFS_TYPE,
2323
HIVE_TYPE,
24+
MSSQL_TYPE,
2425
ORACLE_TYPE,
2526
POSTGRES_TYPE,
2627
S3_TYPE,
@@ -38,7 +39,7 @@
3839

3940
router = APIRouter(tags=["Connections"], responses=get_error_responses())
4041

41-
CONNECTION_TYPES = ORACLE_TYPE, POSTGRES_TYPE, CLICKHOUSE_TYPE, HIVE_TYPE, S3_TYPE, HDFS_TYPE
42+
CONNECTION_TYPES = ORACLE_TYPE, POSTGRES_TYPE, CLICKHOUSE_TYPE, HIVE_TYPE, MSSQL_TYPE, S3_TYPE, HDFS_TYPE
4243

4344

4445
@router.get("/connections")

syncmaster/schemas/v1/connection_types.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
ORACLE_TYPE = Literal["oracle"]
88
POSTGRES_TYPE = Literal["postgres"]
99
CLICKHOUSE_TYPE = Literal["clickhouse"]
10+
MSSQL_TYPE = Literal["mssql"]
1011
S3_TYPE = Literal["s3"]
1112
HDFS_TYPE = Literal["hdfs"]
1213

@@ -16,5 +17,6 @@ class ConnectionType(str, Enum):
1617
HIVE = "hive"
1718
ORACLE = "oracle"
1819
CLICKHOUSE = "clickhouse"
20+
MSSQL = "mssql"
1921
S3 = "s3"
2022
HDFS = "hdfs"

syncmaster/schemas/v1/connections/connection.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,14 @@
2727
UpdateHiveAuthSchema,
2828
UpdateHiveConnectionSchema,
2929
)
30+
from syncmaster.schemas.v1.connections.mssql import (
31+
CreateMSSQLAuthSchema,
32+
CreateMSSQLConnectionSchema,
33+
ReadMSSQLAuthSchema,
34+
ReadMSSQLConnectionSchema,
35+
UpdateMSSQLAuthSchema,
36+
UpdateMSSQLConnectionSchema,
37+
)
3038
from syncmaster.schemas.v1.connections.oracle import (
3139
CreateOracleAuthSchema,
3240
CreateOracleConnectionSchema,
@@ -60,13 +68,15 @@
6068
| ReadOracleConnectionSchema
6169
| ReadPostgresConnectionSchema
6270
| ReadClickhouseConnectionSchema
71+
| ReadMSSQLConnectionSchema
6372
| S3ReadConnectionSchema
6473
)
6574
CreateConnectionDataSchema = (
6675
CreateHiveConnectionSchema
6776
| CreateOracleConnectionSchema
6877
| CreatePostgresConnectionSchema
6978
| CreateClickhouseConnectionSchema
79+
| CreateMSSQLConnectionSchema
7080
| HDFSCreateConnectionSchema
7181
| S3CreateConnectionSchema
7282
)
@@ -77,12 +87,14 @@
7787
| UpdateOracleConnectionSchema
7888
| UpdatePostgresConnectionSchema
7989
| UpdateClickhouseConnectionSchema
90+
| UpdateMSSQLConnectionSchema
8091
)
8192
ReadConnectionAuthDataSchema = (
8293
ReadHiveAuthSchema
8394
| ReadOracleAuthSchema
8495
| ReadPostgresAuthSchema
8596
| ReadClickhouseAuthSchema
97+
| ReadMSSQLAuthSchema
8698
| S3ReadAuthSchema
8799
| HDFSReadAuthSchema
88100
)
@@ -91,6 +103,7 @@
91103
| CreateOracleAuthSchema
92104
| CreatePostgresAuthSchema
93105
| CreateClickhouseAuthSchema
106+
| CreateMSSQLAuthSchema
94107
| S3CreateAuthSchema
95108
| HDFSCreateAuthSchema
96109
)
@@ -99,6 +112,7 @@
99112
| UpdateOracleAuthSchema
100113
| UpdatePostgresAuthSchema
101114
| UpdateClickhouseAuthSchema
115+
| UpdateMSSQLAuthSchema
102116
| S3UpdateAuthSchema
103117
| HDFSUpdateAuthSchema
104118
)
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
# SPDX-FileCopyrightText: 2023-2024 MTS PJSC
2+
# SPDX-License-Identifier: Apache-2.0
3+
from pydantic import BaseModel, Field, SecretStr
4+
5+
from syncmaster.schemas.v1.connection_types import MSSQL_TYPE
6+
7+
8+
class MSSQLBaseSchema(BaseModel):
9+
type: MSSQL_TYPE
10+
11+
class Config:
12+
from_attributes = True
13+
14+
15+
class ReadMSSQLConnectionSchema(MSSQLBaseSchema):
16+
host: str
17+
port: int
18+
database: str
19+
additional_params: dict = Field(default_factory=dict)
20+
21+
22+
class ReadMSSQLAuthSchema(MSSQLBaseSchema):
23+
user: str
24+
25+
26+
class UpdateMSSQLConnectionSchema(MSSQLBaseSchema):
27+
host: str | None = None
28+
port: int | None = None
29+
database: str | None = None
30+
additional_params: dict | None = Field(default_factory=dict)
31+
32+
33+
class UpdateMSSQLAuthSchema(MSSQLBaseSchema):
34+
user: str | None = None # noqa: F722
35+
password: SecretStr | None = None
36+
37+
38+
class CreateMSSQLConnectionSchema(MSSQLBaseSchema):
39+
host: str
40+
port: int
41+
database: str
42+
additional_params: dict = Field(default_factory=dict)
43+
44+
45+
class CreateMSSQLAuthSchema(MSSQLBaseSchema):
46+
user: str
47+
password: SecretStr

syncmaster/schemas/v1/transfers/__init__.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from syncmaster.schemas.v1.transfers.db import (
1010
ClickhouseReadTransferSourceAndTarget,
1111
HiveReadTransferSourceAndTarget,
12+
MSSQLReadTransferSourceAndTarget,
1213
OracleReadTransferSourceAndTarget,
1314
PostgresReadTransferSourceAndTarget,
1415
)
@@ -33,6 +34,7 @@
3334
| HiveReadTransferSourceAndTarget
3435
| OracleReadTransferSourceAndTarget
3536
| ClickhouseReadTransferSourceAndTarget
37+
| MSSQLReadTransferSourceAndTarget
3638
| S3ReadTransferSource
3739
)
3840

@@ -42,6 +44,7 @@
4244
| HiveReadTransferSourceAndTarget
4345
| OracleReadTransferSourceAndTarget
4446
| ClickhouseReadTransferSourceAndTarget
47+
| MSSQLReadTransferSourceAndTarget
4548
| S3ReadTransferTarget
4649
)
4750

@@ -51,6 +54,7 @@
5154
| HiveReadTransferSourceAndTarget
5255
| OracleReadTransferSourceAndTarget
5356
| ClickhouseReadTransferSourceAndTarget
57+
| MSSQLReadTransferSourceAndTarget
5458
| S3CreateTransferSource
5559
)
5660

@@ -60,6 +64,7 @@
6064
| HiveReadTransferSourceAndTarget
6165
| OracleReadTransferSourceAndTarget
6266
| ClickhouseReadTransferSourceAndTarget
67+
| MSSQLReadTransferSourceAndTarget
6368
| S3CreateTransferTarget
6469
)
6570

@@ -69,6 +74,7 @@
6974
| HiveReadTransferSourceAndTarget
7075
| OracleReadTransferSourceAndTarget
7176
| ClickhouseReadTransferSourceAndTarget
77+
| MSSQLReadTransferSourceAndTarget
7278
| S3CreateTransferSource
7379
| None
7480
)
@@ -79,6 +85,7 @@
7985
| HiveReadTransferSourceAndTarget
8086
| OracleReadTransferSourceAndTarget
8187
| ClickhouseReadTransferSourceAndTarget
88+
| MSSQLReadTransferSourceAndTarget
8289
| S3CreateTransferTarget
8390
| None
8491
)

syncmaster/schemas/v1/transfers/db.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from syncmaster.schemas.v1.connection_types import (
88
CLICKHOUSE_TYPE,
99
HIVE_TYPE,
10+
MSSQL_TYPE,
1011
ORACLE_TYPE,
1112
POSTGRES_TYPE,
1213
)
@@ -30,3 +31,7 @@ class PostgresReadTransferSourceAndTarget(ReadDBTransfer):
3031

3132
class ClickhouseReadTransferSourceAndTarget(ReadDBTransfer):
3233
type: CLICKHOUSE_TYPE
34+
35+
36+
class MSSQLReadTransferSourceAndTarget(ReadDBTransfer):
37+
type: MSSQL_TYPE

tests/test_unit/test_connections/connection_fixtures/group_connections_fixture.py

Lines changed: 32 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
from collections.abc import AsyncGenerator
2+
13
import pytest_asyncio
24
from sqlalchemy.ext.asyncio import AsyncSession
35

@@ -7,7 +9,10 @@
79

810

911
@pytest_asyncio.fixture
10-
async def group_connections(group_connection: MockConnection, session: AsyncSession) -> list[MockConnection]:
12+
async def group_connections(
13+
group_connection: MockConnection,
14+
session: AsyncSession,
15+
) -> AsyncGenerator[list[MockConnection], None]:
1116
connection = group_connection.connection
1217

1318
# start with the connection from group_connection fixture
@@ -17,13 +22,36 @@ async def group_connections(group_connection: MockConnection, session: AsyncSess
1722
# since group_connection already created a connection, we start from index 1
1823
for conn_type in connection_types[1:]:
1924

20-
new_data = { # TODO: create different dicts
25+
new_data = {
2126
**connection.data,
2227
"type": conn_type.value,
23-
"cluster": "cluster",
24-
"bucket": "bucket",
2528
}
2629

30+
if conn_type in [ConnectionType.HDFS, ConnectionType.HIVE]:
31+
new_data.update(
32+
{
33+
"cluster": "cluster",
34+
},
35+
)
36+
elif conn_type == ConnectionType.S3:
37+
new_data.update(
38+
{
39+
"bucket": "bucket",
40+
},
41+
)
42+
elif conn_type == ConnectionType.POSTGRES:
43+
new_data.update(
44+
{
45+
"database_name": "database",
46+
},
47+
)
48+
elif conn_type in [ConnectionType.ORACLE, ConnectionType.CLICKHOUSE, ConnectionType.MSSQL]:
49+
new_data.update(
50+
{
51+
"database": "database",
52+
},
53+
)
54+
2755
new_connection = Connection(
2856
group_id=connection.group_id,
2957
name=f"{connection.name}_{conn_type.value}",

tests/test_unit/test_connections/test_create_connection.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -282,7 +282,7 @@ async def test_check_fields_validation_on_create_connection(
282282
"context": {
283283
"discriminator": "'type'",
284284
"tag": "POSTGRESQL",
285-
"expected_tags": "'hive', 'oracle', 'postgres', 'clickhouse', 'hdfs', 's3'",
285+
"expected_tags": "'hive', 'oracle', 'postgres', 'clickhouse', 'mssql', 'hdfs', 's3'",
286286
},
287287
"input": {
288288
"type": "POSTGRESQL",
@@ -292,7 +292,7 @@ async def test_check_fields_validation_on_create_connection(
292292
"database_name": "postgres",
293293
},
294294
"location": ["body", "connection_data"],
295-
"message": "Input tag 'POSTGRESQL' found using 'type' does not match any of the expected tags: 'hive', 'oracle', 'postgres', 'clickhouse', 'hdfs', 's3'",
295+
"message": "Input tag 'POSTGRESQL' found using 'type' does not match any of the expected tags: 'hive', 'oracle', 'postgres', 'clickhouse', 'mssql', 'hdfs', 's3'",
296296
"code": "union_tag_invalid",
297297
},
298298
],

0 commit comments

Comments
 (0)