Skip to content

Commit 2154658

Browse files
author
Ilyas Gasanov
committed
[DOP-22146] Set the names of saved files
1 parent 98f006c commit 2154658

File tree

27 files changed

+272
-44
lines changed

27 files changed

+272
-44
lines changed
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Use the `file_name_template` field to specify the names of saved files

syncmaster/dto/connections.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,8 @@ class HiveConnectionDTO(ConnectionDTO):
7575

7676
@dataclass
7777
class HDFSConnectionDTO(ConnectionDTO):
78+
host: str
79+
port: int
7880
user: str
7981
password: str
8082
cluster: str

syncmaster/dto/runs.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
# SPDX-FileCopyrightText: 2023-2024 MTS PJSC
2+
# SPDX-License-Identifier: Apache-2.0
3+
from dataclasses import dataclass
4+
from datetime import datetime
5+
6+
7+
@dataclass
8+
class RunDTO:
9+
id: str
10+
created_at: datetime

syncmaster/dto/transfers.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ class FileTransferDTO(TransferDTO):
2323
directory_path: str
2424
file_format: CSV | JSONLine | JSON | Excel | XML | ORC | Parquet
2525
options: dict
26+
file_name_template: str | None = None
2627
df_schema: dict | None = None
2728
transformations: list[dict] | None = None
2829

syncmaster/schemas/v1/connections/hdfs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ class ReadHDFSConnectionDataSchema(BaseModel):
2525

2626

2727
class UpdateHDFSConnectionDataSchema(BaseModel):
28-
cluster: str
28+
cluster: str | None = None
2929

3030

3131
class CreateHDFSConnectionSchema(CreateConnectionBaseSchema):

syncmaster/schemas/v1/transfers/file/base.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
# SPDX-License-Identifier: Apache-2.0
33
from __future__ import annotations
44

5+
import re
56
from pathlib import PurePosixPath
67
from typing import Any
78

@@ -63,7 +64,7 @@ class CreateFileTransferTarget(BaseModel):
6364
discriminator="type",
6465
)
6566
file_name_template: str = Field(
66-
default="{run_created_at}_{index}.{extension}",
67+
default="{run_created_at}-{index}.{extension}",
6768
description="Template for file naming with required placeholders 'index' and 'extension'",
6869
)
6970
options: dict[str, Any] = Field(default_factory=dict)
@@ -80,7 +81,10 @@ def _directory_path_is_valid_path(cls, value):
8081

8182
@field_validator("file_name_template")
8283
@classmethod
83-
def validate_file_name_template(cls, value):
84+
def validate_file_name_template(cls, value: str) -> str:
85+
if not re.match(r"^[a-zA-Z0-9_.{}-]+$", value):
86+
raise ValueError("Template contains invalid characters. Allowed: letters, numbers, '.', '_', '-', '{', '}'")
87+
8488
required_keys = {"index", "extension"}
8589
placeholders = {key for key in required_keys if f"{{{key}}}" in value}
8690

syncmaster/worker/controller.py

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
SFTPConnectionDTO,
2121
WebDAVConnectionDTO,
2222
)
23+
from syncmaster.dto.runs import RunDTO
2324
from syncmaster.dto.transfers import (
2425
ClickhouseTransferDTO,
2526
FTPSTransferDTO,
@@ -60,66 +61,79 @@
6061
HiveHandler,
6162
HiveConnectionDTO,
6263
HiveTransferDTO,
64+
RunDTO,
6365
),
6466
"oracle": (
6567
OracleHandler,
6668
OracleConnectionDTO,
6769
OracleTransferDTO,
70+
RunDTO,
6871
),
6972
"clickhouse": (
7073
ClickhouseHandler,
7174
ClickhouseConnectionDTO,
7275
ClickhouseTransferDTO,
76+
RunDTO,
7377
),
7478
"mssql": (
7579
MSSQLHandler,
7680
MSSQLConnectionDTO,
7781
MSSQLTransferDTO,
82+
RunDTO,
7883
),
7984
"mysql": (
8085
MySQLHandler,
8186
MySQLConnectionDTO,
8287
MySQLTransferDTO,
88+
RunDTO,
8389
),
8490
"postgres": (
8591
PostgresHandler,
8692
PostgresConnectionDTO,
8793
PostgresTransferDTO,
94+
RunDTO,
8895
),
8996
"s3": (
9097
S3Handler,
9198
S3ConnectionDTO,
9299
S3TransferDTO,
100+
RunDTO,
93101
),
94102
"hdfs": (
95103
HDFSHandler,
96104
HDFSConnectionDTO,
97105
HDFSTransferDTO,
106+
RunDTO,
98107
),
99108
"sftp": (
100109
SFTPHandler,
101110
SFTPConnectionDTO,
102111
SFTPTransferDTO,
112+
RunDTO,
103113
),
104114
"ftp": (
105115
FTPHandler,
106116
FTPConnectionDTO,
107117
FTPTransferDTO,
118+
RunDTO,
108119
),
109120
"ftps": (
110121
FTPSHandler,
111122
FTPSConnectionDTO,
112123
FTPSTransferDTO,
124+
RunDTO,
113125
),
114126
"samba": (
115127
SambaHandler,
116128
SambaConnectionDTO,
117129
SambaTransferDTO,
130+
RunDTO,
118131
),
119132
"webdav": (
120133
WebDAVHandler,
121134
WebDAVConnectionDTO,
122135
WebDAVTransferDTO,
136+
RunDTO,
123137
),
124138
}
125139

@@ -141,13 +155,15 @@ def __init__(
141155
self.run = run
142156
self.source_handler = self.get_handler(
143157
connection_data=source_connection.data,
158+
run_data={"id": run.id, "created_at": run.created_at},
144159
transfer_params=run.transfer.source_params,
145160
transformations=run.transfer.transformations,
146161
connection_auth_data=source_auth_data,
147162
temp_dir=TemporaryDirectory(dir=self.temp_dir.name, prefix="downloaded_"),
148163
)
149164
self.target_handler = self.get_handler(
150165
connection_data=target_connection.data,
166+
run_data={"id": run.id, "created_at": run.created_at},
151167
transfer_params=run.transfer.target_params,
152168
transformations=run.transfer.transformations,
153169
connection_auth_data=target_auth_data,
@@ -175,6 +191,7 @@ def get_handler(
175191
self,
176192
connection_data: dict[str, Any],
177193
connection_auth_data: dict,
194+
run_data: dict[str, Any],
178195
transfer_params: dict[str, Any],
179196
transformations: list[dict],
180197
temp_dir: TemporaryDirectory,
@@ -186,10 +203,11 @@ def get_handler(
186203
if connection_handler_proxy.get(handler_type, None) is None:
187204
raise ConnectionTypeNotRecognizedError
188205

189-
handler, connection_dto, transfer_dto = connection_handler_proxy[handler_type]
206+
handler, connection_dto, transfer_dto, run_dto = connection_handler_proxy[handler_type]
190207

191208
return handler(
192209
connection_dto=connection_dto(**connection_data),
193210
transfer_dto=transfer_dto(**transfer_params, transformations=transformations),
211+
run_dto=run_dto(**run_data),
194212
temp_dir=temp_dir,
195213
)

syncmaster/worker/handlers/base.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from typing import TYPE_CHECKING
99

1010
from syncmaster.dto.connections import ConnectionDTO
11+
from syncmaster.dto.runs import RunDTO
1112
from syncmaster.dto.transfers import TransferDTO
1213

1314
if TYPE_CHECKING:
@@ -20,10 +21,12 @@ def __init__(
2021
self,
2122
connection_dto: ConnectionDTO,
2223
transfer_dto: TransferDTO,
24+
run_dto: RunDTO,
2325
temp_dir: TemporaryDirectory,
2426
):
2527
self.connection_dto = connection_dto
2628
self.transfer_dto = transfer_dto
29+
self.run_dto = run_dto
2730
self.temp_dir = temp_dir
2831

2932
@abstractmethod

syncmaster/worker/handlers/file/base.py

Lines changed: 61 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,11 @@
33

44
from __future__ import annotations
55

6+
import os
67
from typing import TYPE_CHECKING
78

89
from onetl.base.base_file_df_connection import BaseFileDFConnection
9-
from onetl.file import FileDFReader, FileDFWriter
10+
from onetl.file import FileDFReader, FileDFWriter, FileMover
1011

1112
from syncmaster.dto.connections import ConnectionDTO
1213
from syncmaster.dto.transfers import FileTransferDTO
@@ -17,7 +18,7 @@
1718

1819

1920
class FileHandler(Handler):
20-
connection: BaseFileDFConnection
21+
df_connection: BaseFileDFConnection
2122
connection_dto: ConnectionDTO
2223
transfer_dto: FileTransferDTO
2324
_operators = {
@@ -40,7 +41,7 @@ def read(self) -> DataFrame:
4041
from pyspark.sql.types import StructType
4142

4243
reader = FileDFReader(
43-
connection=self.connection,
44+
connection=self.df_connection,
4445
format=self.transfer_dto.file_format,
4546
source_path=self.transfer_dto.directory_path,
4647
df_schema=StructType.fromJson(self.transfer_dto.df_schema) if self.transfer_dto.df_schema else None,
@@ -59,14 +60,65 @@ def read(self) -> DataFrame:
5960
return df
6061

6162
def write(self, df: DataFrame) -> None:
62-
writer = FileDFWriter(
63-
connection=self.connection,
64-
format=self.transfer_dto.file_format,
65-
target_path=self.transfer_dto.directory_path,
66-
options=self.transfer_dto.options,
63+
tmp_path = os.path.join(self.transfer_dto.directory_path, ".tmp", str(self.run_dto.id))
64+
try:
65+
writer = FileDFWriter(
66+
connection=self.df_connection,
67+
format=self.transfer_dto.file_format,
68+
target_path=tmp_path,
69+
options=self.transfer_dto.options,
70+
)
71+
writer.run(df=df)
72+
73+
self._rename_files(tmp_path)
74+
75+
mover = FileMover(
76+
connection=self.file_connection,
77+
source_path=tmp_path,
78+
target_path=self.transfer_dto.directory_path,
79+
)
80+
mover.run()
81+
finally:
82+
self.file_connection.remove_dir(tmp_path, recursive=True)
83+
84+
def _rename_files(self, tmp_path: str) -> None:
85+
files = self.file_connection.list_dir(tmp_path)
86+
87+
for index, file_name in enumerate(files):
88+
extension = self._get_file_extension(str(file_name))
89+
new_name = self._get_file_name(str(index), extension)
90+
old_path = os.path.join(tmp_path, file_name)
91+
new_path = os.path.join(tmp_path, new_name)
92+
self.file_connection.rename_file(old_path, new_path)
93+
94+
def _get_file_name(self, index: str, extension: str) -> str:
95+
return self.transfer_dto.file_name_template.format(
96+
index=index,
97+
extension=extension,
98+
run_id=self.run_dto.id,
99+
run_created_at=self.run_dto.created_at.strftime("%Y_%m_%d_%H_%M_%S"),
67100
)
68101

69-
return writer.run(df=df)
102+
def _get_file_extension(self, file_name: str) -> str:
103+
extension = self.transfer_dto.file_format.name
104+
parts = file_name.split(".")
105+
106+
if extension == "xml": # spark-xml does not write any extension to files
107+
if len(parts) <= 1:
108+
return extension
109+
110+
compression = parts[-1]
111+
112+
else:
113+
if len(parts) <= 2:
114+
return extension
115+
116+
compression = parts[-1] if parts[-1] != extension else parts[-2]
117+
118+
if extension in ("parquet", "orc"):
119+
return f"{compression}.{extension}"
120+
121+
return f"{extension}.{compression}"
70122

71123
def _make_rows_filter_expression(self, filters: list[dict]) -> str | None:
72124
expressions = []

syncmaster/worker/handlers/file/ftp.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,12 @@ class FTPHandler(FileProtocolHandler):
1818
connection_dto: FTPConnectionDTO
1919

2020
def connect(self, spark: SparkSession) -> None:
21-
self.connection = FTP(
21+
self.file_connection = FTP(
2222
host=self.connection_dto.host,
2323
port=self.connection_dto.port,
2424
user=self.connection_dto.user,
2525
password=self.connection_dto.password,
2626
).check()
27-
self.local_connection = SparkLocalFS(
27+
self.local_df_connection = SparkLocalFS(
2828
spark=spark,
2929
).check()

0 commit comments

Comments
 (0)