|
| 1 | +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC |
| 2 | +# SPDX-License-Identifier: Apache-2.0 |
| 3 | + |
| 4 | +from __future__ import annotations |
| 5 | + |
| 6 | +from typing import TYPE_CHECKING |
| 7 | + |
| 8 | +from onetl.connection import Iceberg |
| 9 | +from onetl.hooks import slot, support_hooks |
| 10 | + |
| 11 | +from syncmaster.dto.connections import IcebergRESTCatalogS3ConnectionDTO |
| 12 | +from syncmaster.dto.transfers import IcebergRESTCatalogS3TransferDTO |
| 13 | +from syncmaster.worker.handlers.db.base import DBHandler |
| 14 | + |
| 15 | +if TYPE_CHECKING: |
| 16 | + from pyspark.sql import SparkSession |
| 17 | + from pyspark.sql.dataframe import DataFrame |
| 18 | + |
| 19 | + |
| 20 | +@support_hooks |
| 21 | +class IcebergHandler(DBHandler): |
| 22 | + connection: Iceberg |
| 23 | + connection_dto: IcebergRESTCatalogS3ConnectionDTO |
| 24 | + transfer_dto: IcebergRESTCatalogS3TransferDTO |
| 25 | + _operators = { |
| 26 | + "regexp": "RLIKE", |
| 27 | + **DBHandler._operators, |
| 28 | + } |
| 29 | + |
| 30 | + def connect(self, spark: SparkSession): |
| 31 | + self.connection = Iceberg( |
| 32 | + spark=spark, |
| 33 | + catalog_name=self.transfer_dto.catalog_name, |
| 34 | + catalog=Iceberg.RESTCatalog( |
| 35 | + uri=self.connection_dto.metastore_url, |
| 36 | + auth=Iceberg.RESTCatalog.BasicAuth( |
| 37 | + user=self.connection_dto.metastore_username, |
| 38 | + password=self.connection_dto.metastore_password, |
| 39 | + ), |
| 40 | + ), |
| 41 | + warehouse=Iceberg.S3Warehouse( |
| 42 | + path=self.connection_dto.s3_warehouse_path, |
| 43 | + host=self.connection_dto.s3_host, |
| 44 | + port=self.connection_dto.s3_port, |
| 45 | + protocol=self.connection_dto.s3_protocol, |
| 46 | + bucket=self.connection_dto.s3_bucket, |
| 47 | + path_style_access=self.connection_dto.s3_path_style_access, |
| 48 | + region=self.connection_dto.s3_region, |
| 49 | + access_key=self.connection_dto.s3_access_key, |
| 50 | + secret_key=self.connection_dto.s3_secret_key, |
| 51 | + ), |
| 52 | + ).check() |
| 53 | + |
| 54 | + @slot |
| 55 | + def read(self) -> DataFrame: |
| 56 | + return super().read() |
| 57 | + |
| 58 | + @slot |
| 59 | + def write(self, df: DataFrame) -> None: |
| 60 | + return super().write(df) |
| 61 | + |
| 62 | + def _normalize_column_names(self, df: DataFrame) -> DataFrame: |
| 63 | + for column_name in df.columns: |
| 64 | + df = df.withColumnRenamed(column_name, column_name.lower()) |
| 65 | + return df |
| 66 | + |
| 67 | + def _make_rows_filter_expression(self, filters: list[dict]) -> str | None: |
| 68 | + expressions = [] |
| 69 | + for filter in filters: |
| 70 | + op = self._operators[filter["type"]] |
| 71 | + field = self._quote_field(filter["field"]) |
| 72 | + value = filter.get("value") |
| 73 | + |
| 74 | + if value is None: |
| 75 | + expressions.append(f"{field} {op}") |
| 76 | + continue |
| 77 | + |
| 78 | + if op == "ILIKE": |
| 79 | + expressions.append(f"LOWER({field}) LIKE LOWER('{value}')") |
| 80 | + elif op == "NOT ILIKE": |
| 81 | + expressions.append(f"NOT LOWER({field}) LIKE LOWER('{value}')") |
| 82 | + else: |
| 83 | + expressions.append(f"{field} {op} '{value}'") |
| 84 | + |
| 85 | + return " AND ".join(expressions) or None |
| 86 | + |
| 87 | + def _get_reading_options(self) -> dict: |
| 88 | + return {} |
| 89 | + |
| 90 | + def _get_hwm_name(self): |
| 91 | + table = f"{self.transfer_dto.catalog_name}.{self.transfer_dto.table_name}" |
| 92 | + return f"{self.transfer_dto.id}_{self.connection_dto.type}_{table}" # noqa: WPS237 |
| 93 | + |
| 94 | + def _quote_field(self, field: str) -> str: |
| 95 | + return f"`{field}`" |
0 commit comments