|
17 | 17 |
|
18 | 18 | async def parse_cmd(): |
19 | 19 | # 读取command arg |
20 | | - parser = argparse.ArgumentParser(description='Media crawler program.') |
21 | | - parser.add_argument('--platform', type=str, help='Media platform select (xhs | dy | ks | bili | wb | tieba | zhihu)', |
| 20 | + parser = argparse.ArgumentParser(description='Media crawler program. / 媒体爬虫程序') |
| 21 | + parser.add_argument('--platform', type=str, |
| 22 | + help='Media platform select / 选择媒体平台 (xhs=小红书 | dy=抖音 | ks=快手 | bili=哔哩哔哩 | wb=微博 | tieba=百度贴吧 | zhihu=知乎)', |
22 | 23 | choices=["xhs", "dy", "ks", "bili", "wb", "tieba", "zhihu"], default=config.PLATFORM) |
23 | | - parser.add_argument('--lt', type=str, help='Login type (qrcode | phone | cookie)', |
| 24 | + parser.add_argument('--lt', type=str, |
| 25 | + help='Login type / 登录方式 (qrcode=二维码 | phone=手机号 | cookie=Cookie)', |
24 | 26 | choices=["qrcode", "phone", "cookie"], default=config.LOGIN_TYPE) |
25 | | - parser.add_argument('--type', type=str, help='crawler type (search | detail | creator)', |
| 27 | + parser.add_argument('--type', type=str, |
| 28 | + help='Crawler type / 爬取类型 (search=搜索 | detail=详情 | creator=创作者)', |
26 | 29 | choices=["search", "detail", "creator"], default=config.CRAWLER_TYPE) |
27 | 30 | parser.add_argument('--start', type=int, |
28 | | - help='number of start page', default=config.START_PAGE) |
| 31 | + help='Number of start page / 起始页码', default=config.START_PAGE) |
29 | 32 | parser.add_argument('--keywords', type=str, |
30 | | - help='please input keywords', default=config.KEYWORDS) |
| 33 | + help='Please input keywords / 请输入关键词', default=config.KEYWORDS) |
31 | 34 | parser.add_argument('--get_comment', type=str2bool, |
32 | | - help='''whether to crawl level one comment, supported values case insensitive ('yes', 'true', 't', 'y', '1', 'no', 'false', 'f', 'n', '0')''', default=config.ENABLE_GET_COMMENTS) |
| 35 | + help='''Whether to crawl level one comment / 是否爬取一级评论, supported values case insensitive / 支持的值(不区分大小写) ('yes', 'true', 't', 'y', '1', 'no', 'false', 'f', 'n', '0')''', default=config.ENABLE_GET_COMMENTS) |
33 | 36 | parser.add_argument('--get_sub_comment', type=str2bool, |
34 | | - help=''''whether to crawl level two comment, supported values case insensitive ('yes', 'true', 't', 'y', '1', 'no', 'false', 'f', 'n', '0')''', default=config.ENABLE_GET_SUB_COMMENTS) |
| 37 | + help=''''Whether to crawl level two comment / 是否爬取二级评论, supported values case insensitive / 支持的值(不区分大小写) ('yes', 'true', 't', 'y', '1', 'no', 'false', 'f', 'n', '0')''', default=config.ENABLE_GET_SUB_COMMENTS) |
35 | 38 | parser.add_argument('--save_data_option', type=str, |
36 | | - help='where to save the data (csv or db or json or sqlite)', choices=['csv', 'db', 'json', 'sqlite'], default=config.SAVE_DATA_OPTION) |
| 39 | + help='Where to save the data / 数据保存方式 (csv=CSV文件 | db=MySQL数据库 | json=JSON文件 | sqlite=SQLite数据库)', |
| 40 | + choices=['csv', 'db', 'json', 'sqlite'], default=config.SAVE_DATA_OPTION) |
37 | 41 | parser.add_argument('--cookies', type=str, |
38 | | - help='cookies used for cookie login type', default=config.COOKIES) |
| 42 | + help='Cookies used for cookie login type / Cookie登录方式使用的Cookie值', default=config.COOKIES) |
39 | 43 |
|
40 | 44 | args = parser.parse_args() |
41 | 45 |
|
|
0 commit comments