Skip to content
This repository was archived by the owner on Aug 8, 2025. It is now read-only.

Commit 9b58ab4

Browse files
committed
[builder]: ran builder;
1 parent 9cef4b3 commit 9b58ab4

File tree

208 files changed

+91231
-3083
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

208 files changed

+91231
-3083
lines changed

src/App.tsx

Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -165,8 +165,10 @@ import { LazyPage1lm23z8 } from './page/1lm23z8/LazyPage';
165165
import { LazyPage1lm24xd } from './page/1lm24xd/LazyPage';
166166
import { LazyPage1lm32zh } from './page/1lm32zh/LazyPage';
167167
import { LazyPage1lm3jvm } from './page/1lm3jvm/LazyPage';
168+
import { LazyPage1lm4s6i } from './page/1lm4s6i/LazyPage';
168169
import { LazyPage1lm4tno } from './page/1lm4tno/LazyPage';
169170
import { LazyPage1lm58q1 } from './page/1lm58q1/LazyPage';
171+
import { LazyPage1lm5a05 } from './page/1lm5a05/LazyPage';
170172
import { LazyPage1lm5muh } from './page/1lm5muh/LazyPage';
171173
import { LazyPage1lm66fy } from './page/1lm66fy/LazyPage';
172174
import { LazyPage1lm76gk } from './page/1lm76gk/LazyPage';
@@ -202,6 +204,61 @@ import { LazyPage1lmizi2 } from './page/1lmizi2/LazyPage';
202204
import { LazyPage1lmjg3p } from './page/1lmjg3p/LazyPage';
203205
import { LazyPage1lmjimi } from './page/1lmjimi/LazyPage';
204206
import { LazyPage1lmjs43 } from './page/1lmjs43/LazyPage';
207+
import { LazyPage1lmjwtu } from './page/1lmjwtu/LazyPage';
208+
import { LazyPage1lmk2dj } from './page/1lmk2dj/LazyPage';
209+
import { LazyPage1lml6eo } from './page/1lml6eo/LazyPage';
210+
import { LazyPage1lml8lx } from './page/1lml8lx/LazyPage';
211+
import { LazyPage1lmmh3l } from './page/1lmmh3l/LazyPage';
212+
import { LazyPage1lmmvmj } from './page/1lmmvmj/LazyPage';
213+
import { LazyPage1lmmxh1 } from './page/1lmmxh1/LazyPage';
214+
import { LazyPage1lmn5k2 } from './page/1lmn5k2/LazyPage';
215+
import { LazyPage1lmni3q } from './page/1lmni3q/LazyPage';
216+
import { LazyPage1lmo9b2 } from './page/1lmo9b2/LazyPage';
217+
import { LazyPage1lmoqsl } from './page/1lmoqsl/LazyPage';
218+
import { LazyPage1lmp3en } from './page/1lmp3en/LazyPage';
219+
import { LazyPage1lmpd8j } from './page/1lmpd8j/LazyPage';
220+
import { LazyPage1lmqsru } from './page/1lmqsru/LazyPage';
221+
import { LazyPage1lmqtby } from './page/1lmqtby/LazyPage';
222+
import { LazyPage1lmr1qh } from './page/1lmr1qh/LazyPage';
223+
import { LazyPage1lmranc } from './page/1lmranc/LazyPage';
224+
import { LazyPage1lmrd6x } from './page/1lmrd6x/LazyPage';
225+
import { LazyPage1lmrmnz } from './page/1lmrmnz/LazyPage';
226+
import { LazyPage1lmsme1 } from './page/1lmsme1/LazyPage';
227+
import { LazyPage1lmt3kt } from './page/1lmt3kt/LazyPage';
228+
import { LazyPage1lmtlgp } from './page/1lmtlgp/LazyPage';
229+
import { LazyPage1lmvosa } from './page/1lmvosa/LazyPage';
230+
import { LazyPage1lmvv5e } from './page/1lmvv5e/LazyPage';
231+
import { LazyPage1lmwjf2 } from './page/1lmwjf2/LazyPage';
232+
import { LazyPage1lmx8ic } from './page/1lmx8ic/LazyPage';
233+
import { LazyPage1lmxduv } from './page/1lmxduv/LazyPage';
234+
import { LazyPage1lmxg89 } from './page/1lmxg89/LazyPage';
235+
import { LazyPage1lmxhd7 } from './page/1lmxhd7/LazyPage';
236+
import { LazyPage1lmxpis } from './page/1lmxpis/LazyPage';
237+
import { LazyPage1lmy53s } from './page/1lmy53s/LazyPage';
238+
import { LazyPage1lmz4kf } from './page/1lmz4kf/LazyPage';
239+
import { LazyPage1lmzqb9 } from './page/1lmzqb9/LazyPage';
240+
import { LazyPage1ln10a8 } from './page/1ln10a8/LazyPage';
241+
import { LazyPage1ln1a6u } from './page/1ln1a6u/LazyPage';
242+
import { LazyPage1ln1gdr } from './page/1ln1gdr/LazyPage';
243+
import { LazyPage1ln1ij8 } from './page/1ln1ij8/LazyPage';
244+
import { LazyPage1ln1m7d } from './page/1ln1m7d/LazyPage';
245+
import { LazyPage1ln3pur } from './page/1ln3pur/LazyPage';
246+
import { LazyPage1ln4iyg } from './page/1ln4iyg/LazyPage';
247+
import { LazyPage1ln56xd } from './page/1ln56xd/LazyPage';
248+
import { LazyPage1ln5jli } from './page/1ln5jli/LazyPage';
249+
import { LazyPage1ln5jqr } from './page/1ln5jqr/LazyPage';
250+
import { LazyPage1ln5l6b } from './page/1ln5l6b/LazyPage';
251+
import { LazyPage1ln7poe } from './page/1ln7poe/LazyPage';
252+
import { LazyPage1ln7rll } from './page/1ln7rll/LazyPage';
253+
import { LazyPage1ln8uqb } from './page/1ln8uqb/LazyPage';
254+
import { LazyPage1ln93o3 } from './page/1ln93o3/LazyPage';
255+
import { LazyPage1lnacbb } from './page/1lnacbb/LazyPage';
256+
import { LazyPage1lnahfy } from './page/1lnahfy/LazyPage';
257+
import { LazyPage1lnbru7 } from './page/1lnbru7/LazyPage';
258+
import { LazyPage1lncfmw } from './page/1lncfmw/LazyPage';
259+
import { LazyPage1lncymd } from './page/1lncymd/LazyPage';
260+
import { LazyPage1lnd1su } from './page/1lnd1su/LazyPage';
261+
import { LazyPage1lndmzj } from './page/1lndmzj/LazyPage';
205262

206263

207264
const App = () => (
@@ -360,8 +417,10 @@ const App = () => (
360417
<Route path="/1lm24xd" element={<LazyPage1lm24xd />} />
361418
<Route path="/1lm32zh" element={<LazyPage1lm32zh />} />
362419
<Route path="/1lm3jvm" element={<LazyPage1lm3jvm />} />
420+
<Route path="/1lm4s6i" element={<LazyPage1lm4s6i />} />
363421
<Route path="/1lm4tno" element={<LazyPage1lm4tno />} />
364422
<Route path="/1lm58q1" element={<LazyPage1lm58q1 />} />
423+
<Route path="/1lm5a05" element={<LazyPage1lm5a05 />} />
365424
<Route path="/1lm5muh" element={<LazyPage1lm5muh />} />
366425
<Route path="/1lm66fy" element={<LazyPage1lm66fy />} />
367426
<Route path="/1lm76gk" element={<LazyPage1lm76gk />} />
@@ -397,6 +456,61 @@ const App = () => (
397456
<Route path="/1lmjg3p" element={<LazyPage1lmjg3p />} />
398457
<Route path="/1lmjimi" element={<LazyPage1lmjimi />} />
399458
<Route path="/1lmjs43" element={<LazyPage1lmjs43 />} />
459+
<Route path="/1lmjwtu" element={<LazyPage1lmjwtu />} />
460+
<Route path="/1lmk2dj" element={<LazyPage1lmk2dj />} />
461+
<Route path="/1lml6eo" element={<LazyPage1lml6eo />} />
462+
<Route path="/1lml8lx" element={<LazyPage1lml8lx />} />
463+
<Route path="/1lmmh3l" element={<LazyPage1lmmh3l />} />
464+
<Route path="/1lmmvmj" element={<LazyPage1lmmvmj />} />
465+
<Route path="/1lmmxh1" element={<LazyPage1lmmxh1 />} />
466+
<Route path="/1lmn5k2" element={<LazyPage1lmn5k2 />} />
467+
<Route path="/1lmni3q" element={<LazyPage1lmni3q />} />
468+
<Route path="/1lmo9b2" element={<LazyPage1lmo9b2 />} />
469+
<Route path="/1lmoqsl" element={<LazyPage1lmoqsl />} />
470+
<Route path="/1lmp3en" element={<LazyPage1lmp3en />} />
471+
<Route path="/1lmpd8j" element={<LazyPage1lmpd8j />} />
472+
<Route path="/1lmqsru" element={<LazyPage1lmqsru />} />
473+
<Route path="/1lmqtby" element={<LazyPage1lmqtby />} />
474+
<Route path="/1lmr1qh" element={<LazyPage1lmr1qh />} />
475+
<Route path="/1lmranc" element={<LazyPage1lmranc />} />
476+
<Route path="/1lmrd6x" element={<LazyPage1lmrd6x />} />
477+
<Route path="/1lmrmnz" element={<LazyPage1lmrmnz />} />
478+
<Route path="/1lmsme1" element={<LazyPage1lmsme1 />} />
479+
<Route path="/1lmt3kt" element={<LazyPage1lmt3kt />} />
480+
<Route path="/1lmtlgp" element={<LazyPage1lmtlgp />} />
481+
<Route path="/1lmvosa" element={<LazyPage1lmvosa />} />
482+
<Route path="/1lmvv5e" element={<LazyPage1lmvv5e />} />
483+
<Route path="/1lmwjf2" element={<LazyPage1lmwjf2 />} />
484+
<Route path="/1lmx8ic" element={<LazyPage1lmx8ic />} />
485+
<Route path="/1lmxduv" element={<LazyPage1lmxduv />} />
486+
<Route path="/1lmxg89" element={<LazyPage1lmxg89 />} />
487+
<Route path="/1lmxhd7" element={<LazyPage1lmxhd7 />} />
488+
<Route path="/1lmxpis" element={<LazyPage1lmxpis />} />
489+
<Route path="/1lmy53s" element={<LazyPage1lmy53s />} />
490+
<Route path="/1lmz4kf" element={<LazyPage1lmz4kf />} />
491+
<Route path="/1lmzqb9" element={<LazyPage1lmzqb9 />} />
492+
<Route path="/1ln10a8" element={<LazyPage1ln10a8 />} />
493+
<Route path="/1ln1a6u" element={<LazyPage1ln1a6u />} />
494+
<Route path="/1ln1gdr" element={<LazyPage1ln1gdr />} />
495+
<Route path="/1ln1ij8" element={<LazyPage1ln1ij8 />} />
496+
<Route path="/1ln1m7d" element={<LazyPage1ln1m7d />} />
497+
<Route path="/1ln3pur" element={<LazyPage1ln3pur />} />
498+
<Route path="/1ln4iyg" element={<LazyPage1ln4iyg />} />
499+
<Route path="/1ln56xd" element={<LazyPage1ln56xd />} />
500+
<Route path="/1ln5jli" element={<LazyPage1ln5jli />} />
501+
<Route path="/1ln5jqr" element={<LazyPage1ln5jqr />} />
502+
<Route path="/1ln5l6b" element={<LazyPage1ln5l6b />} />
503+
<Route path="/1ln7poe" element={<LazyPage1ln7poe />} />
504+
<Route path="/1ln7rll" element={<LazyPage1ln7rll />} />
505+
<Route path="/1ln8uqb" element={<LazyPage1ln8uqb />} />
506+
<Route path="/1ln93o3" element={<LazyPage1ln93o3 />} />
507+
<Route path="/1lnacbb" element={<LazyPage1lnacbb />} />
508+
<Route path="/1lnahfy" element={<LazyPage1lnahfy />} />
509+
<Route path="/1lnbru7" element={<LazyPage1lnbru7 />} />
510+
<Route path="/1lncfmw" element={<LazyPage1lncfmw />} />
511+
<Route path="/1lncymd" element={<LazyPage1lncymd />} />
512+
<Route path="/1lnd1su" element={<LazyPage1lnd1su />} />
513+
<Route path="/1lndmzj" element={<LazyPage1lndmzj />} />
400514
<Route path="*" element={<Navigate to="/landing" replace />} />
401515
</Routes>
402516
</CustomLayout>

src/layout/CustomLayout.tsx

Lines changed: 228 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -616,6 +616,10 @@ const defaultMenus: MenuDataItem[] = [
616616
path: '/1lm3jvm',
617617
name: 'Arch-Router: The first (and fastest) LLM router that can align to your usage preferences.',
618618
},
619+
{
620+
path: '/1lm4s6i',
621+
name: 'Inconsistent responses between OpenRouter API and native OpenAI API',
622+
},
619623
{
620624
path: '/1lm4tno',
621625
name: 'Build advice question for repurposing spare GPUs',
@@ -624,6 +628,10 @@ const defaultMenus: MenuDataItem[] = [
624628
path: '/1lm58q1',
625629
name: 'Problems on RVC WebUI creating new vocal model',
626630
},
631+
{
632+
path: '/1lm5a05',
633+
name: 'What if we remove reasoning models\' &lt;think&gt; process but make them believe they already reasoned?',
634+
},
627635
{
628636
path: '/1lm5muh',
629637
name: 'What is your favorite opensource image embedding model',
@@ -764,6 +772,226 @@ const defaultMenus: MenuDataItem[] = [
764772
path: '/1lmjs43',
765773
name: 'Hi everyone, I have a problem with fine tuning LLM on law',
766774
},
775+
{
776+
path: '/1lmjwtu',
777+
name: 'Good Courses to Learn and Use Local LLaMA Models?',
778+
},
779+
{
780+
path: '/1lmk2dj',
781+
name: 'Progress stalled in non-reasoning open-source models?',
782+
},
783+
{
784+
path: '/1lml6eo',
785+
name: 'Using local models with Void',
786+
},
787+
{
788+
path: '/1lml8lx',
789+
name: '120 AI Chat - Native macOS Chat App with Ollama Support',
790+
},
791+
{
792+
path: '/1lmmh3l',
793+
name: 'Consumer hardware landscape for local LLMs June 2025',
794+
},
795+
{
796+
path: '/1lmmvmj',
797+
name: 'Many small evals are better than one big eval [techniques]',
798+
},
799+
{
800+
path: '/1lmmxh1',
801+
name: 'Which are the best realistic video generation tools',
802+
},
803+
{
804+
path: '/1lmn5k2',
805+
name: 'What are Coqui-TTS alternatives?',
806+
},
807+
{
808+
path: '/1lmni3q',
809+
name: 'What framework are you using to build AI Agents?',
810+
},
811+
{
812+
path: '/1lmo9b2',
813+
name: 'Play Infinite Tic Tac Toe against LLM Models',
814+
},
815+
{
816+
path: '/1lmoqsl',
817+
name: 'Link between LM Studio and tools/functions?',
818+
},
819+
{
820+
path: '/1lmp3en',
821+
name: 'support for the upcoming ERNIE 4.5 0.3B model has been merged into llama.cpp',
822+
},
823+
{
824+
path: '/1lmpd8j',
825+
name: 'Best model tuned specifically for Programming?',
826+
},
827+
{
828+
path: '/1lmqsru',
829+
name: 'deepseek-r1-0528 ranked #2 on lmarena, matching best from chatgpt',
830+
},
831+
{
832+
path: '/1lmqtby',
833+
name: 'Como mejorar un sistema RAG?',
834+
},
835+
{
836+
path: '/1lmr1qh',
837+
name: 'EPYC cpu build. Which cpu? (9354, 9534, 9654)',
838+
},
839+
{
840+
path: '/1lmranc',
841+
name: 'Gemma3n:2B and Gemma3n:4B models are ~40% slower than equivalent models in size running on Llama.cpp',
842+
},
843+
{
844+
path: '/1lmrd6x',
845+
name: 'Looking for Android chat ui',
846+
},
847+
{
848+
path: '/1lmrmnz',
849+
name: 'Multimodal Multistage Reasoning',
850+
},
851+
{
852+
path: '/1lmsme1',
853+
name: 'Can Copilot be trusted with private source code more than competition?',
854+
},
855+
{
856+
path: '/1lmt3kt',
857+
name: 'i5-8500 (6 cores), 24GB DDR4 2666 dual channel, realistic expectations for 3b/4b models?',
858+
},
859+
{
860+
path: '/1lmtlgp',
861+
name: 'The ollama models are excellent models that can be installed locally as a starting point but.....',
862+
},
863+
{
864+
path: '/1lmvosa',
865+
name: 'Best GGUF Base Models Under 3B for Unfiltered NSFW Roleplay?',
866+
},
867+
{
868+
path: '/1lmvv5e',
869+
name: 'Assistance for beginner in local LLM',
870+
},
871+
{
872+
path: '/1lmwjf2',
873+
name: 'Recent best models &lt;=14b for agentic search?',
874+
},
875+
{
876+
path: '/1lmx8ic',
877+
name: 'NVIDIA acquires CentML. what does this mean for inference infra?',
878+
},
879+
{
880+
path: '/1lmxduv',
881+
name: 'Looking for a local LLM translator for large documents and especialized tools',
882+
},
883+
{
884+
path: '/1lmxg89',
885+
name: 'The AutoInference library now supports major and popular backends for LLM inference, including Transformers, vLLM, Unsloth, and llama.cpp. ⭐',
886+
},
887+
{
888+
path: '/1lmxhd7',
889+
name: 'Anyone used RAM across multiple networked devices?',
890+
},
891+
{
892+
path: '/1lmxpis',
893+
name: 'Local AI conversational model for English language learning',
894+
},
895+
{
896+
path: '/1lmy53s',
897+
name: 'The Orakle Manifesto: Or Why Your AI Apps (Should) Belong To You',
898+
},
899+
{
900+
path: '/1lmz4kf',
901+
name: 'Transformer ASIC 500k tokens/s',
902+
},
903+
{
904+
path: '/1lmzqb9',
905+
name: 'Sydney4 beats ChatGPT 4o in existential crisis',
906+
},
907+
{
908+
path: '/1ln10a8',
909+
name: 'A bunch of LLM FPHAM Python scripts I\'ve added to my GitHub in recent days',
910+
},
911+
{
912+
path: '/1ln1a6u',
913+
name: 'What\'s it currently like for people here running AMD GPUs with AI?',
914+
},
915+
{
916+
path: '/1ln1gdr',
917+
name: 'Problems creating an executable with llama cpp',
918+
},
919+
{
920+
path: '/1ln1ij8',
921+
name: 'RLHF from scratch, step-by-step, in 3 Jupyter notebooks',
922+
},
923+
{
924+
path: '/1ln1m7d',
925+
name: 'Audio Input LLM',
926+
},
927+
{
928+
path: '/1ln3pur',
929+
name: 'Do you use AI (like ChatGPT, Gmini, etc) to develop your LangGraph agents? Or is it just my impostor syndrome talking?',
930+
},
931+
{
932+
path: '/1ln4iyg',
933+
name: 'Need your opinion please, appreciated.',
934+
},
935+
{
936+
path: '/1ln56xd',
937+
name: 'Building a Coding Mentor Agent with LangChain + LangGraph + GPT-4o-mini',
938+
},
939+
{
940+
path: '/1ln5jli',
941+
name: 'How do you evaluate and compare multiple LLMs (e.g., via OpenRouter) to test which one performs best?',
942+
},
943+
{
944+
path: '/1ln5jqr',
945+
name: 'Is ReAct still the best prompt template?',
946+
},
947+
{
948+
path: '/1ln5l6b',
949+
name: 'Training Open models on my data for replacing RAG',
950+
},
951+
{
952+
path: '/1ln7poe',
953+
name: 'Suggest me an Uncensored LLM and another LLM for Coding stuffs',
954+
},
955+
{
956+
path: '/1ln7rll',
957+
name: 'I made a writing assistant Chrome extension. Completely free with Gemini Nano.',
958+
},
959+
{
960+
path: '/1ln8uqb',
961+
name: 'LM Studio vision models???',
962+
},
963+
{
964+
path: '/1ln93o3',
965+
name: 'Is anyone here using Llama to code websites and apps? From my experience, it sucks',
966+
},
967+
{
968+
path: '/1lnacbb',
969+
name: 'Why the local Llama-3.2-1B-Instruct is not as smart as the one provided on Hugging Face?',
970+
},
971+
{
972+
path: '/1lnahfy',
973+
name: 'Intelligent decisioning for small language model training and serving platform',
974+
},
975+
{
976+
path: '/1lnbru7',
977+
name: 'How to teach AI to read a complete guide/manual/help website to ask questions about it?',
978+
},
979+
{
980+
path: '/1lncfmw',
981+
name: 'Seems I was informed (incorrectly) that Ollama had very little censorship--at least it finally stopped apologizing.',
982+
},
983+
{
984+
path: '/1lncymd',
985+
name: '12B Q5_K_M or 22B Q4_K_S',
986+
},
987+
{
988+
path: '/1lnd1su',
989+
name: 'Windows vs Linux (Ubuntu) for LLM-GenAI work/research.',
990+
},
991+
{
992+
path: '/1lndmzj',
993+
name: 'Mistral Small 3.2 can\'t generate tables, and stops generation altogether',
994+
},
767995
],
768996
},
769997
{

0 commit comments

Comments
 (0)