Skip to content

Commit 872eb09

Browse files
authored
Merge pull request #16 from pimanac/streams
add ListingStream
2 parents e5722dd + 07ee964 commit 872eb09

File tree

3 files changed

+247
-8
lines changed

3 files changed

+247
-8
lines changed

README.md

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,34 @@ foreach (var post in all.Take(25))
4848
}
4949
```
5050

51+
52+
**Using ListingStreams**
53+
54+
Use ListingStreams to infinitely yeild new Things posted to reddit
55+
56+
Example:
57+
58+
```csharp
59+
// get all new comments as they are posted.
60+
foreach (var comment in subreddit.CommentStream)
61+
{
62+
Console.WriteLine(DateTime.Now + " New Comment posted to /r/example: " + comment.ShortLink);
63+
}
64+
```
65+
66+
you can call .GetListingStream() on any Listing<Thing>
67+
68+
```csharp
69+
// get new modmail
70+
var newModmail = user.ModMail.GetListingStream();
71+
foreach (var message in newModmail)
72+
{
73+
if (message.FirstMessageName == "")
74+
message.Reply("Thanks for the message - we will get back to you soon.");
75+
}
76+
77+
```
78+
5179
## Development
5280

5381
RedditSharp is developed with the following workflow:

RedditSharp/Listing.cs

Lines changed: 178 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -55,9 +55,9 @@ internal Listing(Reddit reddit, string url, IWebAgent webAgent)
5555
/// <param name="limitPerRequest">The number of listings to be returned per request</param>
5656
/// <param name="maximumLimit">The maximum number of listings to return</param>
5757
/// <returns></returns>
58-
public IEnumerator<T> GetEnumerator(int limitPerRequest, int maximumLimit = -1)
58+
public IEnumerator<T> GetEnumerator(int limitPerRequest, int maximumLimit = -1, bool stream = false)
5959
{
60-
return new ListingEnumerator<T>(this, limitPerRequest, maximumLimit);
60+
return new ListingEnumerator<T>(this, limitPerRequest, maximumLimit, stream);
6161
}
6262

6363
/// <summary>
@@ -69,6 +69,7 @@ public IEnumerator<T> GetEnumerator()
6969
return GetEnumerator(DefaultListingPerRequest);
7070
}
7171

72+
7273
/// <summary>
7374
/// Returns an enumerator that iterates through a collection
7475
/// </summary>
@@ -103,6 +104,24 @@ public IEnumerable<T> GetListing(int maximumLimit, int limitPerRequest)
103104
return GetEnumerator(enumerator);
104105
}
105106

107+
/// <summary>
108+
/// Returns an IEnumerable instance which will infinitely yield new <see cref="Thing"/>
109+
/// </summary>
110+
/// <param name="limitPerRequest">
111+
/// Number of records to return in each request to the reddit api. Defaults to using the reddit
112+
/// standard of 25 records of requests.
113+
/// Adjusting this up or down based on the size of your subreddit and the rate at which new content
114+
/// is created.
115+
/// </param>
116+
/// <param name="maximumLimit">maximum number of records to return</param>
117+
/// <returns></returns>
118+
public IEnumerable<T> GetListingStream(int limitPerRequest = -1, int maximumLimit = -1)
119+
{
120+
// Get the enumerator with the specified maximum and per request limits
121+
var enumerator = GetEnumerator(limitPerRequest, maximumLimit, true);
122+
return GetEnumerator(enumerator);
123+
}
124+
106125
/// <summary>
107126
/// Converts an IEnumerator instance to an IEnumerable
108127
/// </summary>
@@ -119,6 +138,7 @@ private static IEnumerable<T> GetEnumerator(IEnumerator<T> enumerator)
119138
#pragma warning disable 0693
120139
private class ListingEnumerator<T> : IEnumerator<T> where T : Thing
121140
{
141+
private bool stream = false;
122142
private Listing<T> Listing { get; set; }
123143
private int CurrentPageIndex { get; set; }
124144
private string After { get; set; }
@@ -128,17 +148,22 @@ private class ListingEnumerator<T> : IEnumerator<T> where T : Thing
128148
private int LimitPerRequest { get; set; }
129149
private int MaximumLimit { get; set; }
130150

151+
private List<string> done;
152+
131153
/// <summary>
132154
/// Creates a new ListingEnumerator instance
133155
/// </summary>
134156
/// <param name="listing"></param>
135157
/// <param name="limitPerRequest">The number of listings to be returned per request. -1 will exclude this parameter and use the Reddit default (25)</param>
136158
/// <param name="maximumLimit">The maximum number of listings to return, -1 will not add a limit</param>
137-
public ListingEnumerator(Listing<T> listing, int limitPerRequest, int maximumLimit)
159+
/// <param name="stream">yield new <see cref="Thing"/> as they are created</param>
160+
public ListingEnumerator(Listing<T> listing, int limitPerRequest, int maximumLimit, bool stream = false)
138161
{
139162
Listing = listing;
140163
CurrentPageIndex = -1;
141164
CurrentPage = new Thing[0];
165+
done = new List<string>();
166+
this.stream = stream;
142167

143168
// Set the listings per page (if not specified, use the Reddit default of 25) and the maximum listings
144169
LimitPerRequest = (limitPerRequest <= 0 ? DefaultListingPerRequest : limitPerRequest);
@@ -147,13 +172,24 @@ public ListingEnumerator(Listing<T> listing, int limitPerRequest, int maximumLim
147172

148173
public T Current
149174
{
150-
get
175+
get
151176
{
152177
return (T)CurrentPage[CurrentPageIndex];
153178
}
154179
}
155180

156181
private void FetchNextPage()
182+
{
183+
if (stream)
184+
PageForward();
185+
else
186+
PageBack();
187+
}
188+
189+
/// <summary>
190+
/// Standard behavior. Page from newest to oldest - "backward" in time.
191+
/// </summary>
192+
private void PageBack()
157193
{
158194
var url = Listing.Url;
159195

@@ -200,14 +236,83 @@ private void FetchNextPage()
200236
Parse(json);
201237
}
202238

239+
240+
/// <summary>
241+
/// Page from oldest to newest - "forward" in time.
242+
/// </summary>
243+
private void PageForward()
244+
{
245+
var url = Listing.Url;
246+
247+
if (Before != null)
248+
{
249+
url += (url.Contains("?") ? "&" : "?") + "before=" + Before;
250+
}
251+
252+
if (LimitPerRequest != -1)
253+
{
254+
int limit = LimitPerRequest;
255+
256+
if (limit > MaximumLimit)
257+
{
258+
// If the limit is more than the maximum number of listings, adjust
259+
limit = MaximumLimit;
260+
}
261+
else if (Count + limit > MaximumLimit)
262+
{
263+
// If a smaller subset of listings are needed, adjust the limit
264+
limit = MaximumLimit - Count;
265+
}
266+
267+
if (limit > 0)
268+
{
269+
// Add the limit, the maximum number of items to be returned per page
270+
url += (url.Contains("?") ? "&" : "?") + "limit=" + limit;
271+
}
272+
}
273+
274+
if (Count > 0)
275+
{
276+
// Add the count, the number of items already seen in this listingStream
277+
// The Reddit API uses this to determine when to give values for before and after fields
278+
url += (url.Contains("?") ? "&" : "?") + "count=" + Count;
279+
}
280+
281+
var request = Listing.WebAgent.CreateGet(url);
282+
var response = request.GetResponse();
283+
var data = Listing.WebAgent.GetResponseString(response.GetResponseStream());
284+
var json = JToken.Parse(data);
285+
if (json["kind"].ValueOrDefault<string>() != "Listing")
286+
throw new FormatException("Reddit responded with an object that is not a listingStream.");
287+
Parse(json);
288+
}
289+
290+
203291
private void Parse(JToken json)
204292
{
205293
var children = json["data"]["children"] as JArray;
206-
CurrentPage = new Thing[children.Count];
207-
208-
for (int i = 0; i < CurrentPage.Length; i++)
209-
CurrentPage[i] = Thing.Parse<T>(Listing.Reddit, children[i], Listing.WebAgent);
294+
var things = new List<Thing>();
210295

296+
for (int i = 0; i < children.Count; i++)
297+
{
298+
if (!stream)
299+
things.Add(Thing.Parse<T>(Listing.Reddit, children[i], Listing.WebAgent));
300+
else
301+
{
302+
// we only want to see new items.
303+
var id = children[i]["data"]["id"].ValueOrDefault<string>();
304+
if (String.IsNullOrEmpty(id) || done.Contains(id))
305+
continue;
306+
307+
things.Add(Thing.Parse<T>(Listing.Reddit, children[i], Listing.WebAgent));
308+
done.Add(id);
309+
}
310+
}
311+
312+
if (stream)
313+
things.Reverse();
314+
315+
CurrentPage = things.ToArray();
211316
// Increase the total count of items returned
212317
Count += CurrentPage.Length;
213318

@@ -226,6 +331,14 @@ object System.Collections.IEnumerator.Current
226331
}
227332

228333
public bool MoveNext()
334+
{
335+
if (stream)
336+
return MoveNextForward();
337+
else
338+
return MoveNextBack();
339+
}
340+
341+
private bool MoveNextBack()
229342
{
230343
CurrentPageIndex++;
231344
if (CurrentPageIndex == CurrentPage.Length)
@@ -255,6 +368,63 @@ public bool MoveNext()
255368
return true;
256369
}
257370

371+
private bool MoveNextForward()
372+
{
373+
CurrentPageIndex++;
374+
if (CurrentPageIndex == CurrentPage.Length)
375+
{
376+
int tries = 0;
377+
while (true)
378+
{
379+
if (MaximumLimit != -1 && Count >= MaximumLimit)
380+
return false;
381+
382+
tries++;
383+
// Get the next page
384+
try
385+
{
386+
FetchNextPage();
387+
}
388+
catch (Exception ex)
389+
{
390+
// sleep for a while to see if we can recover
391+
// Sleep() will rethrow after waiting a bit
392+
// todo: make this smarter
393+
Sleep(tries,ex);
394+
}
395+
396+
CurrentPageIndex = 0;
397+
398+
if (CurrentPage.Length == 0)
399+
{
400+
// No listings were returned in the page
401+
// sleep for a while
402+
Sleep(tries);
403+
}
404+
else
405+
{
406+
tries = 0;
407+
break;
408+
}
409+
}
410+
}
411+
return true;
412+
}
413+
414+
private void Sleep(int tries, Exception ex = null)
415+
{
416+
// wait up to 3 minutes between tries
417+
int seconds = 180;
418+
419+
if (tries > 36)
420+
if (ex != null)
421+
throw ex;
422+
else
423+
seconds = tries * 5;
424+
425+
System.Threading.Thread.Sleep(seconds*1000);
426+
}
427+
258428
public void Reset()
259429
{
260430
After = Before = null;

RedditSharp/Things/Subreddit.cs

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -980,6 +980,47 @@ public Listing<ModAction> GetModerationLog(ModActionType action, string[] mods)
980980
{
981981
return new Listing<ModAction>(Reddit, string.Format(ModLogUrl + "?type={1}&mod={2}", Name, ModActionTypeConverter.GetRedditParamName(action), string.Join(",", mods)), WebAgent);
982982
}
983+
984+
985+
/// <summary>
986+
/// Infinitely yields new <see cref="Comment"/> posted to the subreddit.
987+
/// </summary>
988+
public IEnumerable<Comment> CommentStream
989+
{
990+
get
991+
{
992+
if (Name == "/")
993+
return new Listing<Comment>(Reddit, "/comments.json", WebAgent).GetListingStream();
994+
return new Listing<Comment>(Reddit, string.Format(CommentsUrl, Name), WebAgent).GetListingStream();
995+
}
996+
}
997+
998+
/// <summary>
999+
/// Infinitely yields new <see cref="Post"/> made to the subreddit.
1000+
/// </summary>
1001+
public IEnumerable<Post> SubmissionStream
1002+
{
1003+
get
1004+
{
1005+
if (Name == "/")
1006+
return new Listing<Post>(Reddit, "/new.json", WebAgent).GetListingStream();
1007+
return new Listing<Post>(Reddit, string.Format(SubredditNewUrl, Name), WebAgent).GetListingStream();
1008+
}
1009+
}
1010+
1011+
/// <summary>
1012+
/// Infinitely yields new <see cref="ModAction"/> made on the subreddit.
1013+
/// </summary>
1014+
public IEnumerable<ModAction> ModerationLogStream
1015+
{
1016+
get
1017+
{
1018+
if (Name == "/")
1019+
return new Listing<ModAction>(Reddit, string.Format(ModLogUrl, this.Name), WebAgent).GetListingStream();
1020+
return new Listing<ModAction>(Reddit, string.Format(ModLogUrl, this.Name), WebAgent).GetListingStream();
1021+
}
1022+
}
1023+
9831024
#region Obsolete Getter Methods
9841025

9851026
[Obsolete("Use Posts property instead")]

0 commit comments

Comments
 (0)