|
12 | 12 | from tests.test_server.fixtures.factories.job import create_job |
13 | 13 | from tests.test_server.fixtures.factories.job_type import create_job_type |
14 | 14 | from tests.test_server.fixtures.factories.location import create_location |
| 15 | +from tests.test_server.fixtures.factories.user import create_user |
15 | 16 | from tests.test_server.utils.delete import clean_db |
16 | 17 |
|
17 | 18 | if TYPE_CHECKING: |
@@ -188,50 +189,95 @@ async def runs_with_same_parent( |
188 | 189 | @pytest_asyncio.fixture() |
189 | 190 | async def runs_search( |
190 | 191 | async_session_maker: Callable[[], AbstractAsyncContextManager[AsyncSession]], |
191 | | - user: User, |
192 | 192 | ) -> AsyncGenerator[dict[str | None, Run], None]: |
193 | | - job_kwargs = [ |
194 | | - {"name": "spark_application_name", "type": "SPARK_APPLICATION"}, |
195 | | - {"name": "airflow_dag_name", "type": "AIRFLOW_DAG"}, |
196 | | - ] |
197 | | - runs_kwargs = [ |
198 | | - {"external_id": "application_1638922609021_0001", "status": RunStatus.KILLED}, |
199 | | - { |
200 | | - "external_id": "application_1638922609021_0002", |
201 | | - "status": RunStatus.SUCCEEDED, |
202 | | - }, |
203 | | - {"external_id": "extract_task_0001", "status": RunStatus.STARTED}, |
204 | | - {"external_id": "extract_task_0002", "status": RunStatus.FAILED}, |
205 | | - ] |
206 | | - started_at = datetime.now(tz=UTC) |
| 193 | + created_at = datetime.now(tz=UTC) |
207 | 194 | async with async_session_maker() as async_session: |
208 | | - jobs = [] |
209 | | - for kwargs in job_kwargs: |
210 | | - location = await create_location(async_session) |
211 | | - job_type = await create_job_type(async_session, job_type_kwargs={"type": kwargs["type"]}) |
212 | | - jobs.append( |
213 | | - await create_job( |
214 | | - async_session, |
215 | | - location_id=location.id, |
216 | | - job_type_id=job_type.id, |
217 | | - job_kwargs=kwargs, |
218 | | - ), |
219 | | - ) |
220 | | - runs = [ |
221 | | - await create_run( |
222 | | - async_session, |
223 | | - run_kwargs={ |
224 | | - "created_at": started_at + timedelta(seconds=0.1 * i), |
225 | | - "job_id": job.id, |
226 | | - "started_by_user_id": user.id, |
227 | | - **kwargs, |
228 | | - }, |
229 | | - ) |
230 | | - for i, (job, kwargs) in enumerate(zip([job for job in jobs for _ in range(2)], runs_kwargs, strict=False)) |
231 | | - ] |
232 | | - |
233 | | - async_session.expunge_all() |
234 | | - |
| 195 | + spark_location = await create_location(async_session) |
| 196 | + airflow_location = await create_location(async_session) |
| 197 | + |
| 198 | + spark_user = await create_user(async_session) |
| 199 | + airflow_user = await create_user(async_session) |
| 200 | + |
| 201 | + spark_application_job_type = await create_job_type(async_session, job_type_kwargs={"type": "SPARK_APPLICATION"}) |
| 202 | + airflow_dag_job_type = await create_job_type(async_session, job_type_kwargs={"type": "AIRFLOW_DAG"}) |
| 203 | + airflow_task_job_type = await create_job_type(async_session, job_type_kwargs={"type": "AIRFLOW_TASK"}) |
| 204 | + |
| 205 | + spark_application = await create_job( |
| 206 | + async_session, |
| 207 | + location_id=spark_location.id, |
| 208 | + job_type_id=spark_application_job_type.id, |
| 209 | + job_kwargs={"name": "spark_application_name"}, |
| 210 | + ) |
| 211 | + airflow_dag = await create_job( |
| 212 | + async_session, |
| 213 | + location_id=airflow_location.id, |
| 214 | + job_type_id=airflow_dag_job_type.id, |
| 215 | + job_kwargs={"name": "airflow_dag_name"}, |
| 216 | + ) |
| 217 | + airflow_task = await create_job( |
| 218 | + async_session, |
| 219 | + location_id=airflow_location.id, |
| 220 | + job_type_id=airflow_task_job_type.id, |
| 221 | + job_kwargs={"name": "airflow_task_name"}, |
| 222 | + ) |
| 223 | + |
| 224 | + spark_app_run1 = await create_run( |
| 225 | + async_session, |
| 226 | + run_kwargs={ |
| 227 | + "job_id": spark_application.id, |
| 228 | + "started_by_user_id": spark_user.id, |
| 229 | + "external_id": "application_1638922609021_0001", |
| 230 | + "status": RunStatus.KILLED, |
| 231 | + "created_at": created_at + timedelta(seconds=0.1), |
| 232 | + "started_at": created_at + timedelta(seconds=1), |
| 233 | + "ended_at": created_at + timedelta(seconds=60), |
| 234 | + }, |
| 235 | + ) |
| 236 | + spark_app_run2 = await create_run( |
| 237 | + async_session, |
| 238 | + run_kwargs={ |
| 239 | + "job_id": spark_application.id, |
| 240 | + "started_by_user_id": spark_user.id, |
| 241 | + "external_id": "application_1638922609021_0002", |
| 242 | + "status": RunStatus.SUCCEEDED, |
| 243 | + "created_at": created_at + timedelta(seconds=0.2), |
| 244 | + "started_at": created_at + timedelta(seconds=2), |
| 245 | + "ended_at": created_at + timedelta(seconds=120), |
| 246 | + }, |
| 247 | + ) |
| 248 | + |
| 249 | + airflow_dag_run1 = await create_run( |
| 250 | + async_session, |
| 251 | + run_kwargs={ |
| 252 | + "job_id": airflow_dag.id, |
| 253 | + "started_by_user_id": airflow_user.id, |
| 254 | + "external_id": "dag_0001", |
| 255 | + "status": RunStatus.STARTED, |
| 256 | + "created_at": created_at + timedelta(seconds=0.3), |
| 257 | + "started_at": created_at + timedelta(seconds=3), |
| 258 | + "ended_at": None, |
| 259 | + }, |
| 260 | + ) |
| 261 | + airflow_task_run1 = await create_run( |
| 262 | + async_session, |
| 263 | + run_kwargs={ |
| 264 | + "job_id": airflow_task.id, |
| 265 | + "parent_run_id": airflow_dag_run1.id, |
| 266 | + "started_by_user_id": airflow_user.id, |
| 267 | + "external_id": "task_0001", |
| 268 | + "status": RunStatus.FAILED, |
| 269 | + "created_at": created_at + timedelta(seconds=0.4), |
| 270 | + "started_at": created_at + timedelta(seconds=4), |
| 271 | + "ended_at": created_at + timedelta(seconds=240), |
| 272 | + }, |
| 273 | + ) |
| 274 | + |
| 275 | + runs = [ |
| 276 | + spark_app_run1, |
| 277 | + spark_app_run2, |
| 278 | + airflow_dag_run1, |
| 279 | + airflow_task_run1, |
| 280 | + ] |
235 | 281 | yield {run.external_id: run for run in runs} |
236 | 282 |
|
237 | 283 | async with async_session_maker() as async_session: |
|
0 commit comments