|
51 | 51 |
|
52 | 52 | from crawlee._types import ConcurrencySettings, HttpMethod, JsonSerializable
|
53 | 53 | from crawlee.base_storage_client._models import DatasetItemsListPage
|
54 |
| - from crawlee.events._event_manager import EventManager |
55 | 54 | from crawlee.http_clients import BaseHttpClient, HttpResponse
|
56 | 55 | from crawlee.proxy_configuration import ProxyConfiguration, ProxyInfo
|
57 | 56 | from crawlee.sessions import Session
|
@@ -113,9 +112,6 @@ class BasicCrawlerOptions(TypedDict, Generic[TCrawlingContext]):
|
113 | 112 | statistics: NotRequired[Statistics[StatisticsState]]
|
114 | 113 | """A custom `Statistics` instance, allowing the use of non-default configuration."""
|
115 | 114 |
|
116 |
| - event_manager: NotRequired[EventManager] |
117 |
| - """A custom `EventManager` instance, allowing the use of non-default configuration.""" |
118 |
| - |
119 | 115 | configure_logging: NotRequired[bool]
|
120 | 116 | """If True, the crawler will set up logging infrastructure automatically."""
|
121 | 117 |
|
@@ -178,7 +174,6 @@ def __init__(
|
178 | 174 | retry_on_blocked: bool = True,
|
179 | 175 | proxy_configuration: ProxyConfiguration | None = None,
|
180 | 176 | statistics: Statistics | None = None,
|
181 |
| - event_manager: EventManager | None = None, |
182 | 177 | configure_logging: bool = True,
|
183 | 178 | max_crawl_depth: int | None = None,
|
184 | 179 | abort_on_error: bool = False,
|
@@ -206,7 +201,6 @@ def __init__(
|
206 | 201 | retry_on_blocked: If True, the crawler attempts to bypass bot protections automatically.
|
207 | 202 | proxy_configuration: HTTP proxy configuration used when making requests.
|
208 | 203 | statistics: A custom `Statistics` instance, allowing the use of non-default configuration.
|
209 |
| - event_manager: A custom `EventManager` instance, allowing the use of non-default configuration. |
210 | 204 | configure_logging: If True, the crawler will set up logging infrastructure automatically.
|
211 | 205 | max_crawl_depth: Maximum crawl depth. If set, the crawler will stop crawling after reaching this depth.
|
212 | 206 | abort_on_error: If True, the crawler stops immediately when any request handler error occurs.
|
@@ -247,9 +241,8 @@ def __init__(
|
247 | 241 |
|
248 | 242 | self._tld_extractor = TLDExtract(cache_dir=tempfile.TemporaryDirectory().name)
|
249 | 243 |
|
250 |
| - self._event_manager = event_manager or service_container.get_event_manager() |
| 244 | + self._event_manager = service_container.get_event_manager() |
251 | 245 | self._snapshotter = Snapshotter(
|
252 |
| - self._event_manager, |
253 | 246 | max_memory_size=ByteSize.from_mb(config.memory_mbytes) if config.memory_mbytes else None,
|
254 | 247 | available_memory_ratio=config.available_memory_ratio,
|
255 | 248 | )
|
|
0 commit comments