Skip to content
This repository was archived by the owner on Aug 8, 2025. It is now read-only.

Commit 2b6a121

Browse files
committed
[builder]: ran builder;
1 parent 388d4fc commit 2b6a121

File tree

3 files changed

+271
-46
lines changed

3 files changed

+271
-46
lines changed

src/App.tsx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -203,6 +203,7 @@ import { LazyPage1lmjg3p } from './page/1lmjg3p/LazyPage';
203203
import { LazyPage1lmjimi } from './page/1lmjimi/LazyPage';
204204
import { LazyPage1lmjs43 } from './page/1lmjs43/LazyPage';
205205

206+
206207
const App = () => (
207208
<ConfigProvider locale={enUS}>
208209
<BrowserRouter basename="/LocalLLaMA">

src/page/1lmjg3p/index.json

Lines changed: 11 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,15 @@
44
"data": {
55
"after": null,
66
"dist": 1,
7-
"modhash": "dy0634ws1m6eb4c30dd5fb8bd868b0a672db766f5fbe81224b",
7+
"modhash": "",
88
"geo_filter": "",
99
"children": [
1010
{
1111
"kind": "t3",
1212
"data": {
1313
"approved_at_utc": null,
1414
"subreddit": "LocalLLaMA",
15-
"selftext": "# Hey everyone! 👋\n\nI’ve built an **open snapshot** of r/LocalLLaMA to help preserve its discussions, discoveries, and resources for all of us - *especially given how shaky things can get with subs these days*.\n\n👉 **Dive into the live archive:** [https://maifeeulasad.github.io/LocalLLaMA/](https://maifeeulasad.github.io/LocalLLaMA/)\n\nThis little bot quietly **fetches and saves new posts every hour**, so all the local LLM experiments, model links, feedback threads, and tips stay safe and easy to explore - *now and in the future*.\n\nI stitched this together with **React, Ant Design, Node.js**, and a pinch of automation magic. It runs on its own, taking snapshots and refreshing the archive around the clock.\n\n💡 *Don’t trust me?* Fork it. Run it yourself. The goal is simple: **keep the knowledge free**.\n\n**⚡ NB:** Right now, this only auto-archives **new posts**. I’d love help figuring out how to **scrape and backfill older gems** too - so we can lock in the full history. *This needs everyone’s ideas and support!*\n\nIf this helps you, please **star the repo** ❤️ [https://github.com/maifeeulasad/LocalLLaMA](https://github.com/maifeeulasad/LocalLLaMA) \\- issues, ideas, and pull requests are all welcome!\n\nI’ve learned so much from this sub - this is just my small way of giving back. Let’s keep **open models** and **shared knowledge** alive and accessible, no matter what. 🌍✨",
15+
"selftext": "[removed]",
1616
"user_reports": [],
1717
"saved": false,
1818
"mod_reason_title": null,
@@ -41,9 +41,7 @@
4141
"subreddit_type": "public",
4242
"ups": 1,
4343
"total_awards_received": 0,
44-
"media_embed": {
45-
46-
},
44+
"media_embed": {},
4745
"thumbnail_width": null,
4846
"author_flair_template_id": "50c36eba-fdca-11ee-9735-92a88d7e3b87",
4947
"is_original_content": false,
@@ -52,16 +50,14 @@
5250
"is_reddit_media_domain": false,
5351
"is_meta": false,
5452
"category": null,
55-
"secure_media_embed": {
56-
57-
},
53+
"secure_media_embed": {},
5854
"link_flair_text": "Discussion",
5955
"can_mod_post": false,
6056
"score": 1,
6157
"approved_by": null,
6258
"is_created_from_ads_ui": false,
6359
"author_premium": false,
64-
"thumbnail": "self",
60+
"thumbnail": "default",
6561
"edited": false,
6662
"author_flair_css_class": null,
6763
"author_flair_richtext": [
@@ -70,9 +66,7 @@
7066
"t": "Ollama"
7167
}
7268
],
73-
"gildings": {
74-
75-
},
69+
"gildings": {},
7670
"content_categories": null,
7771
"is_self": true,
7872
"mod_note": null,
@@ -84,13 +78,13 @@
8478
"author_flair_type": "richtext",
8579
"domain": "self.LocalLLaMA",
8680
"allow_live_comments": false,
87-
"selftext_html": "&lt;!-- SC_OFF --&gt;&lt;div class=\"md\"&gt;&lt;h1&gt;Hey everyone! 👋&lt;/h1&gt;\n\n&lt;p&gt;I’ve built an &lt;strong&gt;open snapshot&lt;/strong&gt; of &lt;a href=\"/r/LocalLLaMA\"&gt;r/LocalLLaMA&lt;/a&gt; to help preserve its discussions, discoveries, and resources for all of us - &lt;em&gt;especially given how shaky things can get with subs these days&lt;/em&gt;.&lt;/p&gt;\n\n&lt;p&gt;👉 &lt;strong&gt;Dive into the live archive:&lt;/strong&gt; &lt;a href=\"https://maifeeulasad.github.io/LocalLLaMA/\"&gt;https://maifeeulasad.github.io/LocalLLaMA/&lt;/a&gt;&lt;/p&gt;\n\n&lt;p&gt;This little bot quietly &lt;strong&gt;fetches and saves new posts every hour&lt;/strong&gt;, so all the local LLM experiments, model links, feedback threads, and tips stay safe and easy to explore - &lt;em&gt;now and in the future&lt;/em&gt;.&lt;/p&gt;\n\n&lt;p&gt;I stitched this together with &lt;strong&gt;React, Ant Design, Node.js&lt;/strong&gt;, and a pinch of automation magic. It runs on its own, taking snapshots and refreshing the archive around the clock.&lt;/p&gt;\n\n&lt;p&gt;💡 &lt;em&gt;Don’t trust me?&lt;/em&gt; Fork it. Run it yourself. The goal is simple: &lt;strong&gt;keep the knowledge free&lt;/strong&gt;.&lt;/p&gt;\n\n&lt;p&gt;&lt;strong&gt;⚡ NB:&lt;/strong&gt; Right now, this only auto-archives &lt;strong&gt;new posts&lt;/strong&gt;. I’d love help figuring out how to &lt;strong&gt;scrape and backfill older gems&lt;/strong&gt; too - so we can lock in the full history. &lt;em&gt;This needs everyone’s ideas and support!&lt;/em&gt;&lt;/p&gt;\n\n&lt;p&gt;If this helps you, please &lt;strong&gt;star the repo&lt;/strong&gt; ❤️ &lt;a href=\"https://github.com/maifeeulasad/LocalLLaMA\"&gt;https://github.com/maifeeulasad/LocalLLaMA&lt;/a&gt; - issues, ideas, and pull requests are all welcome!&lt;/p&gt;\n\n&lt;p&gt;I’ve learned so much from this sub - this is just my small way of giving back. Let’s keep &lt;strong&gt;open models&lt;/strong&gt; and &lt;strong&gt;shared knowledge&lt;/strong&gt; alive and accessible, no matter what. 🌍✨&lt;/p&gt;\n&lt;/div&gt;&lt;!-- SC_ON --&gt;",
88-
"likes": true,
81+
"selftext_html": "&lt;!-- SC_OFF --&gt;&lt;div class=\"md\"&gt;&lt;p&gt;[removed]&lt;/p&gt;\n&lt;/div&gt;&lt;!-- SC_ON --&gt;",
82+
"likes": null,
8983
"suggested_sort": null,
9084
"banned_at_utc": null,
9185
"view_count": null,
9286
"archived": false,
93-
"no_follow": false,
87+
"no_follow": true,
9488
"is_crosspostable": false,
9589
"pinned": false,
9690
"over_18": false,
@@ -103,7 +97,6 @@
10397
"locked": false,
10498
"author_flair_text": "Ollama",
10599
"treatment_tags": [],
106-
"rte_mode": "richtext",
107100
"visited": false,
108101
"removed_by": null,
109102
"num_reports": null,
@@ -128,7 +121,7 @@
128121
"permalink": "/r/LocalLLaMA/comments/1lmjg3p/helping_archive_rlocalllama_for_everyone_for_open/",
129122
"stickied": false,
130123
"url": "https://www.reddit.com/r/LocalLLaMA/comments/1lmjg3p/helping_archive_rlocalllama_for_everyone_for_open/",
131-
"subreddit_subscribers": 492256,
124+
"subreddit_subscribers": 492572,
132125
"created_utc": 1751105890,
133126
"num_crossposts": 0,
134127
"mod_reports": [],
@@ -144,7 +137,7 @@
144137
"data": {
145138
"after": null,
146139
"dist": null,
147-
"modhash": "dy0634ws1m6eb4c30dd5fb8bd868b0a672db766f5fbe81224b",
140+
"modhash": "",
148141
"geo_filter": "",
149142
"children": [],
150143
"before": null

0 commit comments

Comments
 (0)