Skip to content

Commit ea46689

Browse files
authored
Merge pull request #15 from cds-1993/main
Update 002_源码实现_同步版本.py
2 parents c636398 + c238978 commit ea46689

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

源代码/爬虫入门/09_爬虫入门实战2_动态数据提取/002_源码实现_同步版本.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ def run_crawler(save_file_name: str) -> None:
118118
"""
119119
# step1 获取最大数据总量
120120
max_total: int = get_max_total_count()
121-
# step2 遍历每一夜数据并解析存储到数据容器中
121+
# step2 遍历每一页数据并解析存储到数据容器中
122122
data_list: List[SymbolContent] = fetch_currency_data_list(max_total)
123123
# step3 将数据容器中的数据保存csv
124124
save_data_to_csv(save_file_name, data_list)

源代码/爬虫入门/09_爬虫入门实战2_动态数据提取/003_源码实现_异步版本.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ async def send_request(page_start: int, page_size: int) -> Dict[str, Any]:
7373
response = await client.post(url=req_url, params=common_params, json=common_payload_data, headers=headers,
7474
timeout=30)
7575
if response.status_code != 200:
76-
raise Exception("发起请求是发生异常,请求发生错误,原因:", response.text)
76+
raise Exception("发起请求时发生异常,请求发生错误,原因:", response.text)
7777
try:
7878
response_dict: Dict = response.json()
7979
return response_dict
@@ -123,7 +123,7 @@ async def run_crawler(save_file_name: str) -> None:
123123
"""
124124
# step1 获取最大数据总量
125125
max_total: int = await get_max_total_count()
126-
# step2 遍历每一夜数据并解析存储到数据容器中
126+
# step2 遍历每一页数据并解析存储到数据容器中
127127
data_list: List[SymbolContent] = await fetch_currency_data_list(max_total)
128128
# step3 将数据容器中的数据保存csv
129129
await save_data_to_csv(save_file_name, data_list)

0 commit comments

Comments
 (0)