File tree Expand file tree Collapse file tree 6 files changed +10
-10
lines changed Expand file tree Collapse file tree 6 files changed +10
-10
lines changed Original file line number Diff line number Diff line change @@ -151,4 +151,4 @@ jobs:
151
151
version_number : ${{ needs.should_release.outputs.version_number }}
152
152
153
153
# TODO: add job for publish package to Conda
154
- # https://github.com/apify/crawlee-py /issues/104
154
+ # https://github.com/apify/crawlee-python /issues/104
Original file line number Diff line number Diff line change @@ -11,7 +11,7 @@ license = "Apache-2.0"
11
11
readme = " README.md"
12
12
packages = [{ include = " crawlee" , from = " src" }]
13
13
classifiers = [
14
- " Development Status :: 3 - Alpha " ,
14
+ " Development Status :: 4 - Beta " ,
15
15
" Intended Audience :: Developers" ,
16
16
" License :: OSI Approved :: Apache Software License" ,
17
17
" Operating System :: OS Independent" ,
@@ -35,10 +35,10 @@ keywords = [
35
35
[tool .poetry .urls ]
36
36
"Homepage" = " https://crawlee.dev/python"
37
37
"Apify Homepage" = " https://apify.com"
38
- "Changelog" = " https://github.com/apify/crawlee-py/blob/master/CHANGELOG.md "
38
+ "Changelog" = " https://crawlee.dev/python/docs/changelog "
39
39
"Documentation" = " https://crawlee.dev/python/docs/quick-start"
40
- "Issue Tracker" = " https://github.com/apify/crawlee-py /issues"
41
- "Repository" = " https://github.com/apify/crawlee-py "
40
+ "Issue Tracker" = " https://github.com/apify/crawlee-python /issues"
41
+ "Repository" = " https://github.com/apify/crawlee-python "
42
42
43
43
# We use inclusive ordered comparison clauses for external packages intentionally in order to enhance Crawlee's
44
44
# compatibility with external packages. This decision was discussed in detail in the following PR:
Original file line number Diff line number Diff line change @@ -267,7 +267,7 @@ def _snapshot_client(self) -> None:
267
267
"""
268
268
# TODO: This is just a dummy placeholder. It can be implemented once `StorageClient` is ready.
269
269
# Attribute `self._client_rate_limit_error_retry_count` will be used here.
270
- # https://github.com/apify/crawlee-py /issues/60
270
+ # https://github.com/apify/crawlee-python /issues/60
271
271
272
272
error_count = 0
273
273
snapshot = ClientSnapshot (error_count = error_count , max_error_count = self ._max_client_errors )
Original file line number Diff line number Diff line change @@ -102,10 +102,10 @@ class Request(BaseRequestData):
102
102
id : str
103
103
104
104
json_ : str | None = None # TODO: get rid of this
105
- # https://github.com/apify/crawlee-py /issues/94
105
+ # https://github.com/apify/crawlee-python /issues/94
106
106
107
107
order_no : Decimal | None = None # TODO: get rid of this
108
- # https://github.com/apify/crawlee-py /issues/94
108
+ # https://github.com/apify/crawlee-python /issues/94
109
109
110
110
@classmethod
111
111
def from_url (
Original file line number Diff line number Diff line change 1
1
# TODO: type ignores and crawlee_storage_dir
2
- # https://github.com/apify/crawlee-py /issues/146
2
+ # https://github.com/apify/crawlee-python /issues/146
3
3
4
4
from __future__ import annotations
5
5
Original file line number Diff line number Diff line change 1
1
# TODO: type ignores and crawlee_storage_dir
2
- # https://github.com/apify/crawlee-py /issues/146
2
+ # https://github.com/apify/crawlee-python /issues/146
3
3
4
4
from __future__ import annotations
5
5
You can’t perform that action at this time.
0 commit comments