|
1 | 1 | import asyncio |
2 | 2 | import os |
3 | 3 | import time |
| 4 | +from dataclasses import dataclass, field |
4 | 5 | from datetime import timedelta |
5 | | -from typing import cast |
| 6 | +from typing import Self, cast |
6 | 7 |
|
7 | 8 | from fastapi import FastAPI |
8 | 9 | from pydantic import BaseModel, Field |
9 | 10 |
|
10 | 11 | from fluid.scheduler import TaskRun, TaskScheduler, every, task |
11 | 12 | from fluid.scheduler.broker import RedisTaskBroker |
12 | 13 | from fluid.scheduler.endpoints import setup_fastapi |
| 14 | +from fluid.utils.http_client import HttpxClient |
| 15 | + |
| 16 | + |
| 17 | +@dataclass |
| 18 | +class Deps: |
| 19 | + http_client: HttpxClient = field(default_factory=HttpxClient) |
| 20 | + |
| 21 | + @classmethod |
| 22 | + def get(cls, context: TaskRun) -> Self: |
| 23 | + return context.deps |
13 | 24 |
|
14 | 25 |
|
15 | 26 | def task_scheduler() -> TaskScheduler: |
16 | | - task_manager = TaskScheduler() |
| 27 | + task_manager = TaskScheduler(deps=Deps()) |
17 | 28 | task_manager.register_from_dict(globals()) |
18 | 29 | return task_manager |
19 | 30 |
|
@@ -63,3 +74,16 @@ async def cpu_bound(context: TaskRun) -> None: |
63 | 74 | broker = cast(RedisTaskBroker, context.task_manager.broker) |
64 | 75 | redis = broker.redis_cli |
65 | 76 | await redis.setex(context.id, os.getpid(), 10) |
| 77 | + |
| 78 | + |
| 79 | +class Scrape(BaseModel): |
| 80 | + url: str = Field(default="https://httpbin.org/get", description="URL to scrape") |
| 81 | + |
| 82 | + |
| 83 | +@task |
| 84 | +async def scrape(context: TaskRun[Scrape]) -> None: |
| 85 | + """Scrape a website""" |
| 86 | + deps = Deps.get(context) |
| 87 | + response = await deps.http_client.get(context.params.url, callback=True) |
| 88 | + text = await response.text() |
| 89 | + context.logger.info(text) |
0 commit comments