Skip to content

Commit 7b53c29

Browse files
author
Ilyas Gasanov
committed
[DOP-22146] Set the names of saved files
1 parent 98f006c commit 7b53c29

File tree

18 files changed

+226
-25
lines changed

18 files changed

+226
-25
lines changed
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Use the `file_name_template` field to specify the names of saved files

syncmaster/dto/runs.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
# SPDX-FileCopyrightText: 2023-2024 MTS PJSC
2+
# SPDX-License-Identifier: Apache-2.0
3+
from dataclasses import dataclass
4+
from datetime import datetime
5+
6+
7+
@dataclass
8+
class RunDTO:
9+
id: str
10+
created_at: datetime

syncmaster/dto/transfers.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ class FileTransferDTO(TransferDTO):
2323
directory_path: str
2424
file_format: CSV | JSONLine | JSON | Excel | XML | ORC | Parquet
2525
options: dict
26+
file_name_template: str | None = None
2627
df_schema: dict | None = None
2728
transformations: list[dict] | None = None
2829

syncmaster/schemas/v1/transfers/file/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ class CreateFileTransferTarget(BaseModel):
6363
discriminator="type",
6464
)
6565
file_name_template: str = Field(
66-
default="{run_created_at}_{index}.{extension}",
66+
default="{run_created_at}-{index}.{extension}",
6767
description="Template for file naming with required placeholders 'index' and 'extension'",
6868
)
6969
options: dict[str, Any] = Field(default_factory=dict)

syncmaster/worker/controller.py

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
SFTPConnectionDTO,
2121
WebDAVConnectionDTO,
2222
)
23+
from syncmaster.dto.runs import RunDTO
2324
from syncmaster.dto.transfers import (
2425
ClickhouseTransferDTO,
2526
FTPSTransferDTO,
@@ -60,66 +61,79 @@
6061
HiveHandler,
6162
HiveConnectionDTO,
6263
HiveTransferDTO,
64+
RunDTO,
6365
),
6466
"oracle": (
6567
OracleHandler,
6668
OracleConnectionDTO,
6769
OracleTransferDTO,
70+
RunDTO,
6871
),
6972
"clickhouse": (
7073
ClickhouseHandler,
7174
ClickhouseConnectionDTO,
7275
ClickhouseTransferDTO,
76+
RunDTO,
7377
),
7478
"mssql": (
7579
MSSQLHandler,
7680
MSSQLConnectionDTO,
7781
MSSQLTransferDTO,
82+
RunDTO,
7883
),
7984
"mysql": (
8085
MySQLHandler,
8186
MySQLConnectionDTO,
8287
MySQLTransferDTO,
88+
RunDTO,
8389
),
8490
"postgres": (
8591
PostgresHandler,
8692
PostgresConnectionDTO,
8793
PostgresTransferDTO,
94+
RunDTO,
8895
),
8996
"s3": (
9097
S3Handler,
9198
S3ConnectionDTO,
9299
S3TransferDTO,
100+
RunDTO,
93101
),
94102
"hdfs": (
95103
HDFSHandler,
96104
HDFSConnectionDTO,
97105
HDFSTransferDTO,
106+
RunDTO,
98107
),
99108
"sftp": (
100109
SFTPHandler,
101110
SFTPConnectionDTO,
102111
SFTPTransferDTO,
112+
RunDTO,
103113
),
104114
"ftp": (
105115
FTPHandler,
106116
FTPConnectionDTO,
107117
FTPTransferDTO,
118+
RunDTO,
108119
),
109120
"ftps": (
110121
FTPSHandler,
111122
FTPSConnectionDTO,
112123
FTPSTransferDTO,
124+
RunDTO,
113125
),
114126
"samba": (
115127
SambaHandler,
116128
SambaConnectionDTO,
117129
SambaTransferDTO,
130+
RunDTO,
118131
),
119132
"webdav": (
120133
WebDAVHandler,
121134
WebDAVConnectionDTO,
122135
WebDAVTransferDTO,
136+
RunDTO,
123137
),
124138
}
125139

@@ -141,13 +155,15 @@ def __init__(
141155
self.run = run
142156
self.source_handler = self.get_handler(
143157
connection_data=source_connection.data,
158+
run_data={"id": run.id, "created_at": run.created_at},
144159
transfer_params=run.transfer.source_params,
145160
transformations=run.transfer.transformations,
146161
connection_auth_data=source_auth_data,
147162
temp_dir=TemporaryDirectory(dir=self.temp_dir.name, prefix="downloaded_"),
148163
)
149164
self.target_handler = self.get_handler(
150165
connection_data=target_connection.data,
166+
run_data={"id": run.id, "created_at": run.created_at},
151167
transfer_params=run.transfer.target_params,
152168
transformations=run.transfer.transformations,
153169
connection_auth_data=target_auth_data,
@@ -175,6 +191,7 @@ def get_handler(
175191
self,
176192
connection_data: dict[str, Any],
177193
connection_auth_data: dict,
194+
run_data: dict[str, Any],
178195
transfer_params: dict[str, Any],
179196
transformations: list[dict],
180197
temp_dir: TemporaryDirectory,
@@ -186,10 +203,11 @@ def get_handler(
186203
if connection_handler_proxy.get(handler_type, None) is None:
187204
raise ConnectionTypeNotRecognizedError
188205

189-
handler, connection_dto, transfer_dto = connection_handler_proxy[handler_type]
206+
handler, connection_dto, transfer_dto, run_dto = connection_handler_proxy[handler_type]
190207

191208
return handler(
192209
connection_dto=connection_dto(**connection_data),
193210
transfer_dto=transfer_dto(**transfer_params, transformations=transformations),
211+
run_dto=run_dto(**run_data),
194212
temp_dir=temp_dir,
195213
)

syncmaster/worker/handlers/base.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from typing import TYPE_CHECKING
99

1010
from syncmaster.dto.connections import ConnectionDTO
11+
from syncmaster.dto.runs import RunDTO
1112
from syncmaster.dto.transfers import TransferDTO
1213

1314
if TYPE_CHECKING:
@@ -20,10 +21,12 @@ def __init__(
2021
self,
2122
connection_dto: ConnectionDTO,
2223
transfer_dto: TransferDTO,
24+
run_dto: RunDTO,
2325
temp_dir: TemporaryDirectory,
2426
):
2527
self.connection_dto = connection_dto
2628
self.transfer_dto = transfer_dto
29+
self.run_dto = run_dto
2730
self.temp_dir = temp_dir
2831

2932
@abstractmethod

syncmaster/worker/handlers/file/base.py

Lines changed: 67 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,11 @@
33

44
from __future__ import annotations
55

6+
import os
67
from typing import TYPE_CHECKING
78

89
from onetl.base.base_file_df_connection import BaseFileDFConnection
9-
from onetl.file import FileDFReader, FileDFWriter
10+
from onetl.file import FileDFReader, FileDFWriter, FileMover
1011

1112
from syncmaster.dto.connections import ConnectionDTO
1213
from syncmaster.dto.transfers import FileTransferDTO
@@ -17,7 +18,7 @@
1718

1819

1920
class FileHandler(Handler):
20-
connection: BaseFileDFConnection
21+
df_connection: BaseFileDFConnection
2122
connection_dto: ConnectionDTO
2223
transfer_dto: FileTransferDTO
2324
_operators = {
@@ -40,7 +41,7 @@ def read(self) -> DataFrame:
4041
from pyspark.sql.types import StructType
4142

4243
reader = FileDFReader(
43-
connection=self.connection,
44+
connection=self.df_connection,
4445
format=self.transfer_dto.file_format,
4546
source_path=self.transfer_dto.directory_path,
4647
df_schema=StructType.fromJson(self.transfer_dto.df_schema) if self.transfer_dto.df_schema else None,
@@ -59,14 +60,74 @@ def read(self) -> DataFrame:
5960
return df
6061

6162
def write(self, df: DataFrame) -> None:
63+
tmp_path = os.path.join(self.transfer_dto.directory_path, ".tmp", str(self.run_dto.id))
6264
writer = FileDFWriter(
63-
connection=self.connection,
65+
connection=self.df_connection,
6466
format=self.transfer_dto.file_format,
65-
target_path=self.transfer_dto.directory_path,
67+
target_path=tmp_path,
6668
options=self.transfer_dto.options,
6769
)
70+
writer.run(df=df)
71+
72+
self._rename_files(tmp_path)
73+
74+
mover = FileMover(
75+
connection=self.connection,
76+
source_path=tmp_path,
77+
target_path=self.transfer_dto.directory_path,
78+
)
79+
mover.run()
80+
81+
def _rename_files(self, tmp_path: str) -> None:
82+
files = self.connection.list_dir(tmp_path)
83+
84+
for index, file_name in enumerate(files):
85+
extension = self._get_file_extension(str(file_name))
86+
new_name = self._get_file_name(str(index), extension)
87+
old_path = os.path.join(tmp_path, file_name)
88+
new_path = os.path.join(tmp_path, new_name)
89+
self.connection.rename_file(old_path, new_path)
90+
91+
def _get_file_name(self, index: str, extension: str) -> str:
92+
return (
93+
self.transfer_dto.file_name_template.replace(
94+
"{index}",
95+
index,
96+
)
97+
.replace(
98+
"{extension}",
99+
extension,
100+
)
101+
.replace(
102+
"{run_id}",
103+
str(self.run_dto.id),
104+
)
105+
.replace(
106+
"{run_created_at}",
107+
self.run_dto.created_at.strftime("%Y_%m_%d_%H_%M_%S"),
108+
)
109+
)
110+
111+
def _get_file_extension(self, file_name: str) -> str:
112+
extension = self.transfer_dto.file_format.name
113+
parts = file_name.split(".")
114+
115+
if extension == "xml": # spark-xml does not write any extension to files
116+
if len(parts) <= 1:
117+
return extension
118+
119+
compression = parts[-1]
120+
121+
else:
122+
if len(parts) <= 2:
123+
return extension
124+
125+
compression = parts[-1] if parts[-1] != extension else parts[-2]
126+
127+
if extension in ("parquet", "orc"):
128+
return f"{compression}.{extension}"
68129

69-
return writer.run(df=df)
130+
return f"{extension}.{compression}"
70131

71132
def _make_rows_filter_expression(self, filters: list[dict]) -> str | None:
72133
expressions = []

syncmaster/worker/handlers/file/hdfs.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
from typing import TYPE_CHECKING
77

8-
from onetl.connection import SparkHDFS
8+
from onetl.connection import HDFS, SparkHDFS
99

1010
from syncmaster.dto.connections import HDFSConnectionDTO
1111
from syncmaster.worker.handlers.file.base import FileHandler
@@ -18,7 +18,10 @@ class HDFSHandler(FileHandler):
1818
connection_dto: HDFSConnectionDTO
1919

2020
def connect(self, spark: SparkSession):
21-
self.connection = SparkHDFS(
21+
self.df_connection = SparkHDFS(
2222
cluster=self.connection_dto.cluster,
2323
spark=spark,
2424
).check()
25+
self.connection = HDFS(
26+
cluster=self.connection_dto.cluster,
27+
).check()

syncmaster/worker/handlers/file/protocol.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,8 @@ def write(self, df: DataFrame) -> None:
6060
for file in crc_files:
6161
os.remove(os.path.join(self.temp_dir.name, file))
6262

63+
self._rename_files()
64+
6365
uploader = FileUploader(
6466
connection=self.connection,
6567
local_path=self.temp_dir.name,
@@ -68,6 +70,16 @@ def write(self, df: DataFrame) -> None:
6870
)
6971
uploader.run()
7072

73+
def _rename_files(self):
74+
files = os.listdir(self.temp_dir.name)
75+
76+
for index, file_name in enumerate(files):
77+
extension = self._get_file_extension(file_name)
78+
new_name = self._get_file_name(str(index), extension)
79+
old_path = os.path.join(self.temp_dir.name, file_name)
80+
new_path = os.path.join(self.temp_dir.name, new_name)
81+
os.rename(old_path, new_path)
82+
7183
def _make_file_metadata_filters(self, filters: list[dict]) -> list[Glob | Regexp | FileSizeRange]:
7284
processed_filters = []
7385
for filter in filters:

syncmaster/worker/handlers/file/s3.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
from typing import TYPE_CHECKING
77

8-
from onetl.connection import SparkS3
8+
from onetl.connection import S3, SparkS3
99
from onetl.file import FileDFReader
1010

1111
from syncmaster.dto.connections import S3ConnectionDTO
@@ -19,7 +19,7 @@ class S3Handler(FileHandler):
1919
connection_dto: S3ConnectionDTO
2020

2121
def connect(self, spark: SparkSession):
22-
self.connection = SparkS3(
22+
self.df_connection = SparkS3(
2323
host=self.connection_dto.host,
2424
port=self.connection_dto.port,
2525
access_key=self.connection_dto.access_key,
@@ -30,6 +30,15 @@ def connect(self, spark: SparkSession):
3030
extra=self.connection_dto.additional_params,
3131
spark=spark,
3232
).check()
33+
self.connection = S3(
34+
host=self.connection_dto.host,
35+
port=self.connection_dto.port,
36+
access_key=self.connection_dto.access_key,
37+
secret_key=self.connection_dto.secret_key,
38+
bucket=self.connection_dto.bucket,
39+
protocol=self.connection_dto.protocol,
40+
region=self.connection_dto.region,
41+
).check()
3342

3443
def read(self) -> DataFrame:
3544
from pyspark.sql.types import StructType
@@ -39,7 +48,7 @@ def read(self) -> DataFrame:
3948
options = {"inferSchema": True}
4049

4150
reader = FileDFReader(
42-
connection=self.connection,
51+
connection=self.df_connection,
4352
format=self.transfer_dto.file_format,
4453
source_path=self.transfer_dto.directory_path,
4554
df_schema=StructType.fromJson(self.transfer_dto.df_schema) if self.transfer_dto.df_schema else None,

0 commit comments

Comments
 (0)