Skip to content

Commit f0dc51d

Browse files
committed
Fix unit tests
1 parent 5f71c78 commit f0dc51d

File tree

3 files changed

+32
-18
lines changed

3 files changed

+32
-18
lines changed

.github/workflows/unit-test.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ jobs:
6767
- name: Run Unit Tests
6868
run: |
6969
source .env.local.test
70-
uv run coverage run -m pytest -vvv -s -k "test_unit or test_database"
70+
uv run coverage run -m pytest -vvv -s tests/test_unit tests/test_database
7171
7272
- name: Dump worker logs on failure
7373
if: failure()

tests/test_unit/utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
from contextlib import asynccontextmanager
55
from typing import TYPE_CHECKING, Any
66

7-
from onetl.impl import LocalPath, RemotePath
87
from sqlalchemy.orm import joinedload
98

109
from syncmaster.db.models import (
@@ -26,6 +25,7 @@
2625

2726
from httpx import AsyncClient
2827
from onetl.connection import FileConnection
28+
from onetl.impl import RemotePath
2929
from sqlalchemy.ext.asyncio import AsyncSession
3030

3131
from syncmaster.server.settings import ServerAppSettings as Settings
@@ -249,6 +249,8 @@ def upload_files(
249249
remote_path: os.PathLike | str,
250250
file_connection: FileConnection,
251251
) -> list[RemotePath]:
252+
from onetl.impl import LocalPath, RemotePath
253+
252254
remote_files = []
253255

254256
local_path = LocalPath(source_path)

tests/utils.py

Lines changed: 28 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,14 @@
1+
from __future__ import annotations
2+
13
import asyncio
24
import logging
35
from datetime import UTC, datetime
4-
from pathlib import Path
5-
from typing import Any
6+
from typing import TYPE_CHECKING, Any
67

78
from alembic.autogenerate import compare_metadata
8-
from alembic.config import Config
99
from alembic.runtime.environment import EnvironmentContext
1010
from alembic.runtime.migration import MigrationContext
1111
from alembic.script import ScriptDirectory
12-
from httpx import AsyncClient
13-
from onetl.connection import FileConnection
14-
from onetl.file import FileDownloader, FileUploader
15-
from pyspark.sql import DataFrame
16-
from pyspark.sql.functions import (
17-
col,
18-
date_format,
19-
date_trunc,
20-
from_unixtime,
21-
to_timestamp,
22-
)
2312
from sqlalchemy import Connection as AlchConnection
2413
from sqlalchemy import MetaData, pool, text
2514
from sqlalchemy.ext.asyncio import (
@@ -30,8 +19,17 @@
3019

3120
from syncmaster.db.models import Status
3221
from syncmaster.exceptions.base import EntityNotFoundError
33-
from syncmaster.server.settings import ServerAppSettings as Settings
34-
from tests.mocks import MockUser
22+
23+
if TYPE_CHECKING:
24+
from pathlib import Path
25+
26+
from alembic.config import Config
27+
from httpx import AsyncClient
28+
from onetl.connection import FileConnection
29+
from pyspark.sql import DataFrame
30+
31+
from syncmaster.server.settings import ServerAppSettings as Settings
32+
from tests.mocks import MockUser
3533

3634
logger = logging.getLogger(__name__)
3735

@@ -198,6 +196,13 @@ def truncate_datetime_to_seconds(
198196
init_df: DataFrame,
199197
transfer_direction: str | None = None,
200198
) -> tuple[DataFrame, DataFrame]:
199+
from pyspark.sql.functions import (
200+
col,
201+
date_format,
202+
date_trunc,
203+
to_timestamp,
204+
)
205+
201206
# Excel does not support datetime values with precision greater than milliseconds
202207
# Spark rounds datetime to nearest 3.33 milliseconds when writing to MSSQL: https://onetl.readthedocs.io/en/latest/connection/db_connection/mssql/types.html#id5
203208
if transfer_direction == "file_to_db" or transfer_direction is None:
@@ -212,6 +217,11 @@ def truncate_datetime_to_seconds(
212217

213218

214219
def round_datetime_to_seconds(df: DataFrame, init_df: DataFrame) -> tuple[DataFrame, DataFrame]:
220+
from pyspark.sql.functions import (
221+
col,
222+
from_unixtime,
223+
)
224+
215225
# Spark rounds milliseconds to seconds while writing to MySQL: https://onetl.readthedocs.io/en/latest/connection/db_connection/mysql/types.html#id5
216226
df = df.withColumn(
217227
"REGISTERED_AT",
@@ -225,6 +235,8 @@ def round_datetime_to_seconds(df: DataFrame, init_df: DataFrame) -> tuple[DataFr
225235

226236

227237
def add_increment_to_files_and_upload(file_connection: FileConnection, remote_path: str, tmp_path: Path) -> None:
238+
from onetl.file import FileDownloader, FileUploader
239+
228240
downloader = FileDownloader(
229241
connection=file_connection,
230242
source_path=remote_path,

0 commit comments

Comments
 (0)