|
6 | 6 | from datetime import timedelta
|
7 | 7 | from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast
|
8 | 8 |
|
| 9 | +from crawlee.storages import RequestList |
9 | 10 | from lazy_object_proxy import Proxy
|
10 | 11 | from pydantic import AliasChoices
|
11 | 12 | from typing_extensions import Self
|
12 | 13 |
|
13 | 14 | from apify_client import ApifyClientAsync
|
14 | 15 | from apify_shared.consts import ActorEnvVars, ActorExitCodes, ApifyEnvVars
|
15 | 16 | from apify_shared.utils import ignore_docs, maybe_extract_enum_member_value
|
16 |
| -from crawlee import service_container |
| 17 | +from crawlee import service_container, Request |
17 | 18 | from crawlee.events._types import Event, EventPersistStateData
|
18 | 19 |
|
19 | 20 | from apify._configuration import Configuration
|
@@ -974,6 +975,20 @@ async def create_proxy_configuration(
|
974 | 975 |
|
975 | 976 | return proxy_configuration
|
976 | 977 |
|
| 978 | + @staticmethod |
| 979 | + def create_request_list( |
| 980 | + *, |
| 981 | + actor_start_urls_input: dict |
| 982 | + ) ->RequestList: |
| 983 | + return RequestList(requests=[ |
| 984 | + Request.from_url( |
| 985 | + method=request_input.get("method"), |
| 986 | + url=request_input.get("url"), |
| 987 | + payload=request_input.get("payload", "").encode("utf-8"), |
| 988 | + headers=request_input.get("headers", {}), |
| 989 | + user_data=request_input.get("userData", {}), |
| 990 | + ) for request_input in actor_start_urls_input]) |
| 991 | + |
977 | 992 |
|
978 | 993 | Actor = cast(_ActorType, Proxy(_ActorType))
|
979 | 994 | """The entry point of the SDK, through which all the Actor operations should be done."""
|
0 commit comments