22from __future__ import print_function , unicode_literals
33
44import logging
5- from uuid import uuid4
65from datetime import datetime
76from operator import itemgetter
8- try : # python2
9- from urlparse import urljoin
7+ from uuid import uuid4
8+
9+ try : # python2
1010 from urllib import quote_plus
11- except ImportError : # python3
11+
12+ from urlparse import urljoin
13+ except ImportError : # python3
1214 from urllib .parse import urljoin , quote_plus
1315
1416import requests
1517
16- from .consts import BASE_URL
17- from .exception import NotLoginError , APIError
1818from .article import Article
19+ from .consts import BASE_URL
20+ from .exception import APIError , NotLoginError
1921from .subscription import Subscription
2022
21-
2223LOGGER = logging .getLogger (__name__ )
2324
2425
@@ -39,19 +40,22 @@ class InoreaderClient(object):
3940 LIKED_TAG = 'user/-/state/com.google/like'
4041 BROADCAST_TAG = 'user/-/state/com.google/broadcast'
4142
42- def __init__ (self , app_id , app_key , access_token , refresh_token ,
43- expires_at , config_manager = None ):
43+ def __init__ (
44+ self , app_id , app_key , access_token , refresh_token , expires_at , config_manager = None
45+ ):
4446 self .app_id = app_id
4547 self .app_key = app_key
4648 self .access_token = access_token
4749 self .refresh_token = refresh_token
4850 self .expires_at = float (expires_at )
4951 self .session = requests .Session ()
50- self .session .headers .update ({
51- 'AppId' : self .app_id ,
52- 'AppKey' : self .app_key ,
53- 'Authorization' : 'Bearer {}' .format (self .access_token )
54- })
52+ self .session .headers .update (
53+ {
54+ 'AppId' : self .app_id ,
55+ 'AppKey' : self .app_key ,
56+ 'Authorization' : 'Bearer {}' .format (self .access_token ),
57+ }
58+ )
5559 self .config_manager = config_manager
5660 self .proxies = self .config_manager .proxies if config_manager else None
5761
@@ -151,12 +155,7 @@ def __get_stream_contents(self, stream_id, continuation=''):
151155 self .check_token ()
152156
153157 url = urljoin (BASE_URL , self .STREAM_CONTENTS_PATH + quote_plus (stream_id ))
154- params = {
155- 'n' : 50 , # default 20, max 1000
156- 'r' : '' ,
157- 'c' : continuation ,
158- 'output' : 'json'
159- }
158+ params = {'n' : 50 , 'r' : '' , 'c' : continuation , 'output' : 'json' } # default 20, max 1000
160159 response = self .parse_response (self .session .post (url , params = params , proxies = self .proxies ))
161160 if 'continuation' in response :
162161 return response ['items' ], response ['continuation' ]
@@ -168,10 +167,7 @@ def fetch_articles(self, folder=None, tags=None, unread=True, starred=False, lim
168167
169168 url = urljoin (BASE_URL , self .STREAM_CONTENTS_PATH )
170169 if folder :
171- url = urljoin (
172- url ,
173- quote_plus (self .GENERAL_TAG_TEMPLATE .format (folder ))
174- )
170+ url = urljoin (url , quote_plus (self .GENERAL_TAG_TEMPLATE .format (folder )))
175171
176172 params = {'c' : str (uuid4 ())}
177173 if unread :
@@ -183,10 +179,13 @@ def fetch_articles(self, folder=None, tags=None, unread=True, starred=False, lim
183179 fetched_count = 0
184180 response = self .parse_response (self .session .post (url , params = params , proxies = self .proxies ))
185181 for data in response ['items' ]:
186- categories = set ([
187- category .split ('/' )[- 1 ] for category in data .get ('categories' , [])
188- if category .find ('label' ) > 0
189- ])
182+ categories = set (
183+ [
184+ category .split ('/' )[- 1 ]
185+ for category in data .get ('categories' , [])
186+ if category .find ('label' ) > 0
187+ ]
188+ )
190189 if tags and not categories .issuperset (set (tags )):
191190 continue
192191
@@ -202,10 +201,13 @@ def fetch_articles(self, folder=None, tags=None, unread=True, starred=False, lim
202201 self .session .post (url , params = params , proxies = self .proxies )
203202 )
204203 for data in response ['items' ]:
205- categories = set ([
206- category .split ('/' )[- 1 ] for category in data .get ('categories' , [])
207- if category .find ('label' ) > 0
208- ])
204+ categories = set (
205+ [
206+ category .split ('/' )[- 1 ]
207+ for category in data .get ('categories' , [])
208+ if category .find ('label' ) > 0
209+ ]
210+ )
209211 if tags and not categories .issuperset (set (tags )):
210212 continue
211213 yield Article .from_json (data )
@@ -229,30 +231,22 @@ def add_general_label(self, articles, label):
229231 url = urljoin (BASE_URL , self .EDIT_TAG_PATH )
230232 for start in range (0 , len (articles ), 10 ):
231233 end = min (start + 10 , len (articles ))
232- params = {
233- 'a' : label ,
234- 'i' : [articles [idx ].id for idx in range (start , end )]
235- }
234+ params = {'a' : label , 'i' : [articles [idx ].id for idx in range (start , end )]}
236235 self .parse_response (
237- self .session .post (url , params = params , proxies = self .proxies ),
238- json_data = False
236+ self .session .post (url , params = params , proxies = self .proxies ), json_data = False
239237 )
240-
238+
241239 def remove_general_label (self , articles , label ):
242240 self .check_token ()
243241
244242 url = urljoin (BASE_URL , self .EDIT_TAG_PATH )
245243 for start in range (0 , len (articles ), 10 ):
246244 end = min (start + 10 , len (articles ))
247- params = {
248- 'r' : label ,
249- 'i' : [articles [idx ].id for idx in range (start , end )]
250- }
245+ params = {'r' : label , 'i' : [articles [idx ].id for idx in range (start , end )]}
251246 self .parse_response (
252- self .session .post (url , params = params , proxies = self .proxies ),
253- json_data = False
247+ self .session .post (url , params = params , proxies = self .proxies ), json_data = False
254248 )
255-
249+
256250 def add_tag (self , articles , tag ):
257251 self .add_general_label (articles , self .GENERAL_TAG_TEMPLATE .format (tag ))
258252
@@ -276,6 +270,6 @@ def remove_starred(self, articles):
276270
277271 def remove_liked (self , articles ):
278272 self .remove_general_label (articles , self .LIKED_TAG )
279-
273+
280274 def broadcast (self , articles ):
281275 self .add_general_label (articles , self .BROADCAST_TAG )
0 commit comments