|
1 | 1 | import { json } from '@remix-run/cloudflare'; |
2 | 2 | import JSZip from 'jszip'; |
3 | 3 |
|
4 | | -export async function loader({ request }: { request: Request }) { |
5 | | - const url = new URL(request.url); |
6 | | - const repo = url.searchParams.get('repo'); |
| 4 | +// Function to detect if we're running in Cloudflare |
| 5 | +function isCloudflareEnvironment(context: any): boolean { |
| 6 | + // Check if we're in production AND have Cloudflare Pages specific env vars |
| 7 | + const isProduction = process.env.NODE_ENV === 'production'; |
| 8 | + const hasCfPagesVars = !!( |
| 9 | + context?.cloudflare?.env?.CF_PAGES || |
| 10 | + context?.cloudflare?.env?.CF_PAGES_URL || |
| 11 | + context?.cloudflare?.env?.CF_PAGES_COMMIT_SHA |
| 12 | + ); |
| 13 | + |
| 14 | + return isProduction && hasCfPagesVars; |
| 15 | +} |
7 | 16 |
|
8 | | - if (!repo) { |
9 | | - return json({ error: 'Repository name is required' }, { status: 400 }); |
| 17 | +// Cloudflare-compatible method using GitHub Contents API |
| 18 | +async function fetchRepoContentsCloudflare(repo: string, githubToken?: string) { |
| 19 | + const baseUrl = 'https://api.github.com'; |
| 20 | + |
| 21 | + // Get repository info to find default branch |
| 22 | + const repoResponse = await fetch(`${baseUrl}/repos/${repo}`, { |
| 23 | + headers: { |
| 24 | + Accept: 'application/vnd.github.v3+json', |
| 25 | + 'User-Agent': 'bolt.diy-app', |
| 26 | + ...(githubToken ? { Authorization: `Bearer ${githubToken}` } : {}), |
| 27 | + }, |
| 28 | + }); |
| 29 | + |
| 30 | + if (!repoResponse.ok) { |
| 31 | + throw new Error(`Repository not found: ${repo}`); |
10 | 32 | } |
11 | 33 |
|
12 | | - try { |
13 | | - const baseUrl = 'https://api.github.com'; |
| 34 | + const repoData = (await repoResponse.json()) as any; |
| 35 | + const defaultBranch = repoData.default_branch; |
14 | 36 |
|
15 | | - // Get the latest release |
16 | | - const releaseResponse = await fetch(`${baseUrl}/repos/${repo}/releases/latest`, { |
17 | | - headers: { |
18 | | - Accept: 'application/vnd.github.v3+json', |
| 37 | + // Get the tree recursively |
| 38 | + const treeResponse = await fetch(`${baseUrl}/repos/${repo}/git/trees/${defaultBranch}?recursive=1`, { |
| 39 | + headers: { |
| 40 | + Accept: 'application/vnd.github.v3+json', |
| 41 | + 'User-Agent': 'bolt.diy-app', |
| 42 | + ...(githubToken ? { Authorization: `Bearer ${githubToken}` } : {}), |
| 43 | + }, |
| 44 | + }); |
19 | 45 |
|
20 | | - // Add GitHub token if available in environment variables |
21 | | - ...(process.env.GITHUB_TOKEN ? { Authorization: `Bearer ${process.env.GITHUB_TOKEN}` } : {}), |
22 | | - }, |
23 | | - }); |
| 46 | + if (!treeResponse.ok) { |
| 47 | + throw new Error(`Failed to fetch repository tree: ${treeResponse.status}`); |
| 48 | + } |
24 | 49 |
|
25 | | - if (!releaseResponse.ok) { |
26 | | - throw new Error(`GitHub API error: ${releaseResponse.status}`); |
| 50 | + const treeData = (await treeResponse.json()) as any; |
| 51 | + |
| 52 | + // Filter for files only (not directories) and limit size |
| 53 | + const files = treeData.tree.filter((item: any) => { |
| 54 | + if (item.type !== 'blob') { |
| 55 | + return false; |
27 | 56 | } |
28 | 57 |
|
29 | | - const releaseData = (await releaseResponse.json()) as any; |
30 | | - const zipballUrl = releaseData.zipball_url; |
| 58 | + if (item.path.startsWith('.git/')) { |
| 59 | + return false; |
| 60 | + } |
31 | 61 |
|
32 | | - // Fetch the zipball |
33 | | - const zipResponse = await fetch(zipballUrl, { |
34 | | - headers: { |
35 | | - ...(process.env.GITHUB_TOKEN ? { Authorization: `Bearer ${process.env.GITHUB_TOKEN}` } : {}), |
36 | | - }, |
| 62 | + // Allow lock files even if they're large |
| 63 | + const isLockFile = |
| 64 | + item.path.endsWith('package-lock.json') || |
| 65 | + item.path.endsWith('yarn.lock') || |
| 66 | + item.path.endsWith('pnpm-lock.yaml'); |
| 67 | + |
| 68 | + // For non-lock files, limit size to 100KB |
| 69 | + if (!isLockFile && item.size >= 100000) { |
| 70 | + return false; |
| 71 | + } |
| 72 | + |
| 73 | + return true; |
| 74 | + }); |
| 75 | + |
| 76 | + // Fetch file contents in batches to avoid overwhelming the API |
| 77 | + const batchSize = 10; |
| 78 | + const fileContents = []; |
| 79 | + |
| 80 | + for (let i = 0; i < files.length; i += batchSize) { |
| 81 | + const batch = files.slice(i, i + batchSize); |
| 82 | + const batchPromises = batch.map(async (file: any) => { |
| 83 | + try { |
| 84 | + const contentResponse = await fetch(`${baseUrl}/repos/${repo}/contents/${file.path}`, { |
| 85 | + headers: { |
| 86 | + Accept: 'application/vnd.github.v3+json', |
| 87 | + 'User-Agent': 'bolt.diy-app', |
| 88 | + ...(githubToken ? { Authorization: `Bearer ${githubToken}` } : {}), |
| 89 | + }, |
| 90 | + }); |
| 91 | + |
| 92 | + if (!contentResponse.ok) { |
| 93 | + console.warn(`Failed to fetch ${file.path}: ${contentResponse.status}`); |
| 94 | + return null; |
| 95 | + } |
| 96 | + |
| 97 | + const contentData = (await contentResponse.json()) as any; |
| 98 | + const content = atob(contentData.content.replace(/\s/g, '')); |
| 99 | + |
| 100 | + return { |
| 101 | + name: file.path.split('/').pop() || '', |
| 102 | + path: file.path, |
| 103 | + content, |
| 104 | + }; |
| 105 | + } catch (error) { |
| 106 | + console.warn(`Error fetching ${file.path}:`, error); |
| 107 | + return null; |
| 108 | + } |
37 | 109 | }); |
38 | 110 |
|
39 | | - if (!zipResponse.ok) { |
40 | | - throw new Error(`Failed to fetch release zipball: ${zipResponse.status}`); |
| 111 | + const batchResults = await Promise.all(batchPromises); |
| 112 | + fileContents.push(...batchResults.filter(Boolean)); |
| 113 | + |
| 114 | + // Add a small delay between batches to be respectful to the API |
| 115 | + if (i + batchSize < files.length) { |
| 116 | + await new Promise((resolve) => setTimeout(resolve, 100)); |
41 | 117 | } |
| 118 | + } |
42 | 119 |
|
43 | | - // Get the zip content as ArrayBuffer |
44 | | - const zipArrayBuffer = await zipResponse.arrayBuffer(); |
| 120 | + return fileContents; |
| 121 | +} |
45 | 122 |
|
46 | | - // Use JSZip to extract the contents |
47 | | - const zip = await JSZip.loadAsync(zipArrayBuffer); |
| 123 | +// Your existing method for non-Cloudflare environments |
| 124 | +async function fetchRepoContentsZip(repo: string, githubToken?: string) { |
| 125 | + const baseUrl = 'https://api.github.com'; |
| 126 | + |
| 127 | + // Get the latest release |
| 128 | + const releaseResponse = await fetch(`${baseUrl}/repos/${repo}/releases/latest`, { |
| 129 | + headers: { |
| 130 | + Accept: 'application/vnd.github.v3+json', |
| 131 | + 'User-Agent': 'bolt.diy-app', |
| 132 | + ...(githubToken ? { Authorization: `Bearer ${githubToken}` } : {}), |
| 133 | + }, |
| 134 | + }); |
| 135 | + |
| 136 | + if (!releaseResponse.ok) { |
| 137 | + throw new Error(`GitHub API error: ${releaseResponse.status} - ${releaseResponse.statusText}`); |
| 138 | + } |
48 | 139 |
|
49 | | - // Find the root folder name |
50 | | - let rootFolderName = ''; |
51 | | - zip.forEach((relativePath) => { |
52 | | - if (!rootFolderName && relativePath.includes('/')) { |
53 | | - rootFolderName = relativePath.split('/')[0]; |
54 | | - } |
55 | | - }); |
| 140 | + const releaseData = (await releaseResponse.json()) as any; |
| 141 | + const zipballUrl = releaseData.zipball_url; |
56 | 142 |
|
57 | | - // Extract all files |
58 | | - const promises = Object.keys(zip.files).map(async (filename) => { |
59 | | - const zipEntry = zip.files[filename]; |
| 143 | + // Fetch the zipball |
| 144 | + const zipResponse = await fetch(zipballUrl, { |
| 145 | + headers: { |
| 146 | + ...(githubToken ? { Authorization: `Bearer ${githubToken}` } : {}), |
| 147 | + }, |
| 148 | + }); |
60 | 149 |
|
61 | | - // Skip directories |
62 | | - if (zipEntry.dir) { |
63 | | - return null; |
64 | | - } |
| 150 | + if (!zipResponse.ok) { |
| 151 | + throw new Error(`Failed to fetch release zipball: ${zipResponse.status}`); |
| 152 | + } |
65 | 153 |
|
66 | | - // Skip the root folder itself |
67 | | - if (filename === rootFolderName) { |
68 | | - return null; |
69 | | - } |
| 154 | + // Get the zip content as ArrayBuffer |
| 155 | + const zipArrayBuffer = await zipResponse.arrayBuffer(); |
70 | 156 |
|
71 | | - // Remove the root folder from the path |
72 | | - let normalizedPath = filename; |
| 157 | + // Use JSZip to extract the contents |
| 158 | + const zip = await JSZip.loadAsync(zipArrayBuffer); |
73 | 159 |
|
74 | | - if (rootFolderName && filename.startsWith(rootFolderName + '/')) { |
75 | | - normalizedPath = filename.substring(rootFolderName.length + 1); |
76 | | - } |
| 160 | + // Find the root folder name |
| 161 | + let rootFolderName = ''; |
| 162 | + zip.forEach((relativePath) => { |
| 163 | + if (!rootFolderName && relativePath.includes('/')) { |
| 164 | + rootFolderName = relativePath.split('/')[0]; |
| 165 | + } |
| 166 | + }); |
77 | 167 |
|
78 | | - // Get the file content |
79 | | - const content = await zipEntry.async('string'); |
| 168 | + // Extract all files |
| 169 | + const promises = Object.keys(zip.files).map(async (filename) => { |
| 170 | + const zipEntry = zip.files[filename]; |
80 | 171 |
|
81 | | - return { |
82 | | - name: normalizedPath.split('/').pop() || '', |
83 | | - path: normalizedPath, |
84 | | - content, |
85 | | - }; |
86 | | - }); |
| 172 | + // Skip directories |
| 173 | + if (zipEntry.dir) { |
| 174 | + return null; |
| 175 | + } |
| 176 | + |
| 177 | + // Skip the root folder itself |
| 178 | + if (filename === rootFolderName) { |
| 179 | + return null; |
| 180 | + } |
| 181 | + |
| 182 | + // Remove the root folder from the path |
| 183 | + let normalizedPath = filename; |
| 184 | + |
| 185 | + if (rootFolderName && filename.startsWith(rootFolderName + '/')) { |
| 186 | + normalizedPath = filename.substring(rootFolderName.length + 1); |
| 187 | + } |
| 188 | + |
| 189 | + // Get the file content |
| 190 | + const content = await zipEntry.async('string'); |
| 191 | + |
| 192 | + return { |
| 193 | + name: normalizedPath.split('/').pop() || '', |
| 194 | + path: normalizedPath, |
| 195 | + content, |
| 196 | + }; |
| 197 | + }); |
| 198 | + |
| 199 | + const results = await Promise.all(promises); |
| 200 | + |
| 201 | + return results.filter(Boolean); |
| 202 | +} |
87 | 203 |
|
88 | | - const results = await Promise.all(promises); |
89 | | - const fileList = results.filter(Boolean) as { name: string; path: string; content: string }[]; |
| 204 | +export async function loader({ request, context }: { request: Request; context: any }) { |
| 205 | + const url = new URL(request.url); |
| 206 | + const repo = url.searchParams.get('repo'); |
| 207 | + |
| 208 | + if (!repo) { |
| 209 | + return json({ error: 'Repository name is required' }, { status: 400 }); |
| 210 | + } |
| 211 | + |
| 212 | + try { |
| 213 | + // Access environment variables from Cloudflare context or process.env |
| 214 | + const githubToken = context?.cloudflare?.env?.GITHUB_TOKEN || process.env.GITHUB_TOKEN; |
| 215 | + |
| 216 | + let fileList; |
| 217 | + |
| 218 | + if (isCloudflareEnvironment(context)) { |
| 219 | + fileList = await fetchRepoContentsCloudflare(repo, githubToken); |
| 220 | + } else { |
| 221 | + fileList = await fetchRepoContentsZip(repo, githubToken); |
| 222 | + } |
90 | 223 |
|
91 | | - return json(fileList); |
| 224 | + // Filter out .git files for both methods |
| 225 | + const filteredFiles = fileList.filter((file: any) => !file.path.startsWith('.git')); |
| 226 | + |
| 227 | + return json(filteredFiles); |
92 | 228 | } catch (error) { |
93 | 229 | console.error('Error processing GitHub template:', error); |
94 | | - return json({ error: 'Failed to fetch template files' }, { status: 500 }); |
| 230 | + console.error('Repository:', repo); |
| 231 | + console.error('Error details:', error instanceof Error ? error.message : String(error)); |
| 232 | + |
| 233 | + return json( |
| 234 | + { |
| 235 | + error: 'Failed to fetch template files', |
| 236 | + details: error instanceof Error ? error.message : String(error), |
| 237 | + }, |
| 238 | + { status: 500 }, |
| 239 | + ); |
95 | 240 | } |
96 | 241 | } |
0 commit comments