diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 565092c9..ee9681eb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -130,7 +130,7 @@ make run-doc Publishing new versions to [PyPI](https://pypi.org/project/apify) is automated through GitHub Actions. - **Beta releases**: On each commit to the master branch, a new beta release is automatically published. The version number is determined based on the latest release and conventional commits. The beta version suffix is incremented by 1 from the last beta release on PyPI. -- **Stable releases**: A stable version release may be created by triggering the `release` GitHub Actions workflow. The version number is determined based on the latest release and conventional commits (`auto` release type), or it may be overriden using the `custom` release type. +- **Stable releases**: A stable version release may be created by triggering the `release` GitHub Actions workflow. The version number is determined based on the latest release and conventional commits (`auto` release type), or it may be overridden using the `custom` release type. ### Publishing to PyPI manually diff --git a/docs/02_concepts/code/actor_charge.py b/docs/02_concepts/code/actor_charge.py index d8d771a3..3478f60f 100644 --- a/docs/02_concepts/code/actor_charge.py +++ b/docs/02_concepts/code/actor_charge.py @@ -4,7 +4,7 @@ async def main() -> None: async with Actor: # highlight-start - # Charge for a single occurence of an event + # Charge for a single occurrence of an event await Actor.charge(event_name='init') # highlight-end diff --git a/docs/03_guides/06_scrapy.mdx b/docs/03_guides/06_scrapy.mdx index 523a2423..95f34fae 100644 --- a/docs/03_guides/06_scrapy.mdx +++ b/docs/03_guides/06_scrapy.mdx @@ -109,6 +109,6 @@ In this guide you learned how to use Scrapy in Apify Actors. You can now start b - [Apify CLI: Integrating Scrapy projects](https://docs.apify.com/cli/docs/integrating-scrapy) - [Apify: Run Scrapy spiders on Apify](https://apify.com/run-scrapy-in-cloud) -- [Apify templates: Pyhon Actor Scrapy template](https://apify.com/templates/python-scrapy) +- [Apify templates: Python Actor Scrapy template](https://apify.com/templates/python-scrapy) - [Apify store: Scrapy Books Example Actor](https://apify.com/vdusek/scrapy-books-example) - [Scrapy: Official documentation](https://docs.scrapy.org/) diff --git a/docs/03_guides/code/01_beautifulsoup_httpx.py b/docs/03_guides/code/01_beautifulsoup_httpx.py index 157948d0..5dbfab2a 100644 --- a/docs/03_guides/code/01_beautifulsoup_httpx.py +++ b/docs/03_guides/code/01_beautifulsoup_httpx.py @@ -37,7 +37,7 @@ async def main() -> None: url = request.url if not isinstance(request.user_data['depth'], (str, int)): - raise TypeError('Request.depth is an enexpected type.') + raise TypeError('Request.depth is an unexpected type.') depth = int(request.user_data['depth']) Actor.log.info(f'Scraping {url} (depth={depth}) ...') diff --git a/docs/03_guides/code/03_playwright.py b/docs/03_guides/code/03_playwright.py index 0ecc7d45..3eecb4ac 100644 --- a/docs/03_guides/code/03_playwright.py +++ b/docs/03_guides/code/03_playwright.py @@ -50,7 +50,7 @@ async def main() -> None: url = request.url if not isinstance(request.user_data['depth'], (str, int)): - raise TypeError('Request.depth is an enexpected type.') + raise TypeError('Request.depth is an unexpected type.') depth = int(request.user_data['depth']) Actor.log.info(f'Scraping {url} (depth={depth}) ...') diff --git a/docs/03_guides/code/04_selenium.py b/docs/03_guides/code/04_selenium.py index 0f919c71..4b427a7a 100644 --- a/docs/03_guides/code/04_selenium.py +++ b/docs/03_guides/code/04_selenium.py @@ -58,7 +58,7 @@ async def main() -> None: url = request.url if not isinstance(request.user_data['depth'], (str, int)): - raise TypeError('Request.depth is an enexpected type.') + raise TypeError('Request.depth is an unexpected type.') depth = int(request.user_data['depth']) Actor.log.info(f'Scraping {url} (depth={depth}) ...') diff --git a/docs/03_guides/code/scrapy_project/src/spiders/title.py b/docs/03_guides/code/scrapy_project/src/spiders/title.py index 408d3106..7223a53d 100644 --- a/docs/03_guides/code/scrapy_project/src/spiders/title.py +++ b/docs/03_guides/code/scrapy_project/src/spiders/title.py @@ -32,7 +32,7 @@ def __init__( *args: Any, **kwargs: Any, ) -> None: - """A default costructor. + """A default constructor. Args: start_urls: URLs to start the scraping from. diff --git a/src/apify/_configuration.py b/src/apify/_configuration.py index 53d4afe8..10109585 100644 --- a/src/apify/_configuration.py +++ b/src/apify/_configuration.py @@ -464,7 +464,7 @@ def input_key_candidates(self) -> set[str]: def get_global_configuration(cls) -> Configuration: """Retrieve the global instance of the configuration. - This method ensures that ApifyConfigration is returned, even if CrawleeConfiguration was set in the + This method ensures that ApifyConfiguration is returned, even if CrawleeConfiguration was set in the service locator. """ global_configuration = service_locator.get_configuration() @@ -474,7 +474,7 @@ def get_global_configuration(cls) -> Configuration: return global_configuration logger.warning( - 'Non Apify Configration is set in the `service_locator` in the SDK context. ' + 'Non Apify Configuration is set in the `service_locator` in the SDK context. ' 'It is recommended to set `apify.Configuration` explicitly as early as possible by using ' 'service_locator.set_configuration' ) diff --git a/tests/unit/actor/test_actor_env_helpers.py b/tests/unit/actor/test_actor_env_helpers.py index 80632f83..3d78f94e 100644 --- a/tests/unit/actor/test_actor_env_helpers.py +++ b/tests/unit/actor/test_actor_env_helpers.py @@ -108,7 +108,7 @@ async def test_get_env_with_randomized_env_vars(monkeypatch: pytest.MonkeyPatch) expected_get_env[list_get_env_var] = random.sample(available_values, expected_value_count) monkeypatch.setenv(list_env_var, ','.join(expected_get_env[list_get_env_var])) - # Test behavior with mising env var in case of empty list + # Test behavior with missing env var in case of empty list if expected_value_count == 0 and random.random() < 0.5: monkeypatch.delenv(list_env_var) expected_get_env[list_get_env_var] = None diff --git a/tests/unit/actor/test_configuration.py b/tests/unit/actor/test_configuration.py index 97500eab..7f01c48e 100644 --- a/tests/unit/actor/test_configuration.py +++ b/tests/unit/actor/test_configuration.py @@ -237,7 +237,7 @@ def test_apify_configuration_is_always_used(caplog: pytest.LogCaptureFixture) -> assert Actor.configuration.max_used_cpu_ratio == max_used_cpu_ratio assert isinstance(Actor.configuration, ApifyConfiguration) assert ( - 'Non Apify Configration is set in the `service_locator` in the SDK context. ' + 'Non Apify Configuration is set in the `service_locator` in the SDK context. ' 'It is recommended to set `apify.Configuration` explicitly as early as possible by using ' 'service_locator.set_configuration' ) in caplog.messages diff --git a/tests/unit/actor/test_request_list.py b/tests/unit/actor/test_request_list.py index 3ed751c0..c5ec3f32 100644 --- a/tests/unit/actor/test_request_list.py +++ b/tests/unit/actor/test_request_list.py @@ -198,7 +198,7 @@ async def test_request_list_open_name() -> None: pytest.param('http://www.something.com', id='standard_http_with_www'), pytest.param('https://www.something.net', id='standard_https_with_www'), pytest.param('http://nowww.cz', id='http_no_www'), - pytest.param('https://with-hypen.com', id='https_with_hyphen'), + pytest.param('https://with-hyphen.com', id='https_with_hyphen'), pytest.param('http://number1.com', id='http_with_number_in_domain'), pytest.param('http://www.number.123.abc', id='http_with_subdomains_and_numbers'), pytest.param('http://many.dots.com', id='http_with_multiple_subdomains'), diff --git a/tests/unit/test_proxy_configuration.py b/tests/unit/test_proxy_configuration.py index 72020ad0..44707320 100644 --- a/tests/unit/test_proxy_configuration.py +++ b/tests/unit/test_proxy_configuration.py @@ -172,7 +172,7 @@ async def test_rotating_custom_urls() -> None: async def test_rotating_custom_urls_with_sessions() -> None: - sessions = ['sesssion_01', 'sesssion_02', 'sesssion_03', 'sesssion_04', 'sesssion_05', 'sesssion_06'] + sessions = ['session_01', 'session_02', 'session_03', 'session_04', 'session_05', 'session_06'] proxy_urls: list[str | None] = ['http://proxy.com:1111', 'http://proxy.com:2222', 'http://proxy.com:3333'] proxy_configuration = ProxyConfiguration(proxy_urls=proxy_urls) @@ -328,7 +328,7 @@ async def test_new_proxy_info_rotating_urls() -> None: async def test_new_proxy_info_rotating_urls_with_sessions() -> None: - sessions = ['sesssion_01', 'sesssion_02', 'sesssion_03', 'sesssion_04', 'sesssion_05', 'sesssion_06'] + sessions = ['session_01', 'session_02', 'session_03', 'session_04', 'session_05', 'session_06'] proxy_urls: list[str | None] = ['http://proxy.com:1111', 'http://proxy.com:2222', 'http://proxy.com:3333'] proxy_configuration = ProxyConfiguration(proxy_urls=proxy_urls) @@ -443,7 +443,7 @@ async def test_initialize_with_manual_password(monkeypatch: pytest.MonkeyPatch, @pytest.mark.usefixtures('patched_impit_client') -async def test_initialize_prefering_password_from_env_over_calling_api( +async def test_initialize_preferring_password_from_env_over_calling_api( monkeypatch: pytest.MonkeyPatch, httpserver: HTTPServer, patched_apify_client: ApifyClientAsync,