@@ -200,6 +200,9 @@ def list_tags():
200200@main .command ("fetch-unread" )
201201@click .option ("-f" , "--folder" , required = True , help = "Folder which articles belong to" )
202202@click .option ("-t" , "--tags" , help = "Tag(s) for filtering, separate with comma" )
203+ @click .option (
204+ "--batch-size" , type = int , default = 50 , help = "Maximum number of articles per API request"
205+ )
203206@click .option ("-o" , "--outfile" , required = True , help = "Filename to save articles" )
204207@click .option (
205208 "--out-format" ,
@@ -208,14 +211,14 @@ def list_tags():
208211 help = "Format of output file, default: json" ,
209212)
210213@catch_error
211- def fetch_unread (folder , tags , outfile , out_format ):
214+ def fetch_unread (folder , tags , batch_size , outfile , out_format ):
212215 """Fetch unread articles"""
213216 client = get_client ()
214217
215218 tag_list = [] if not tags else tags .split ("," )
216219 fout = codecs .open (outfile , mode = "w" , encoding = "utf-8" )
217220 writer = csv .writer (fout , delimiter = "," ) if out_format == "csv" else None
218- for idx , article in enumerate (client .fetch_unread (folder = folder , tags = tag_list )):
221+ for idx , article in enumerate (client .fetch_unread (folder = folder , tags = tag_list , n = batch_size )):
219222 if idx > 0 and (idx % 10 ) == 0 :
220223 LOGGER .info ("fetched %d articles" , idx )
221224 title = article .title
@@ -391,6 +394,10 @@ def get_subscriptions(outfile, folder, out_format):
391394
392395@main .command ("fetch-articles" )
393396@click .option ("-i" , "--stream-id" , required = True , help = "Stream ID which you want to fetch" )
397+ @click .option (
398+ "--batch-size" , type = int , default = 50 , help = "Maximum number of articles per API request"
399+ )
400+ @click .option ("--only-unread" , is_flag = True , help = "Fetch unread articles only" )
394401@click .option ("-o" , "--outfile" , required = True , help = "Filename to save results" )
395402@click .option (
396403 "--out-format" ,
@@ -399,7 +406,7 @@ def get_subscriptions(outfile, folder, out_format):
399406 help = "Format of output, default: json" ,
400407)
401408@catch_error
402- def fetch_articles (outfile , stream_id , out_format ):
409+ def fetch_articles (outfile , stream_id , batch_size , only_unread , out_format ):
403410 """Fetch articles by stream id"""
404411 client = get_client ()
405412
@@ -409,7 +416,9 @@ def fetch_articles(outfile, stream_id, out_format):
409416 writer = csv .DictWriter (fout , ["title" , "content" ], delimiter = "," , quoting = csv .QUOTE_ALL )
410417 writer .writeheader ()
411418
412- for idx , article in enumerate (client .get_stream_contents (stream_id )):
419+ for idx , article in enumerate (
420+ client .fetch_articles (stream_id = stream_id , n = batch_size , unread = only_unread )
421+ ):
413422 if idx > 0 and (idx % 10 ) == 0 :
414423 LOGGER .info ("fetched %d articles" , idx )
415424
@@ -469,6 +478,9 @@ def dedupe(folder, thresh):
469478@main .command ("fetch-starred" )
470479@click .option ("-f" , "--folder" , help = "Folder which articles belong to" )
471480@click .option ("-t" , "--tags" , help = "Tag(s) for filtering, separate with comma" )
481+ @click .option (
482+ "--batch-size" , type = int , default = 50 , help = "Maximum number of articles per API request"
483+ )
472484@click .option (
473485 "-o" , "--outfile" , help = "Filename to save articles, required when output format is `csv`"
474486)
@@ -484,7 +496,7 @@ def dedupe(folder, thresh):
484496 help = "Format of output file, default: json" ,
485497)
486498@catch_error
487- def fetch_starred (folder , tags , outfile , outdir , limit , save_image , out_format ):
499+ def fetch_starred (folder , tags , batch_size , outfile , outdir , limit , save_image , out_format ):
488500 """Fetch starred articles"""
489501 client = get_client ()
490502
@@ -506,7 +518,7 @@ def fetch_starred(folder, tags, outfile, outdir, limit, save_image, out_format):
506518 tag_list = [] if not tags else tags .split ("," )
507519 url_to_image = {}
508520 fetched_count = 0
509- for article in client .fetch_starred (folder = folder , tags = tag_list , limit = limit ):
521+ for article in client .fetch_starred (folder = folder , tags = tag_list , limit = limit , n = batch_size ):
510522 if limit and fetched_count >= limit :
511523 break
512524
0 commit comments