|
| 1 | +/* eslint-disable no-console */ |
| 2 | +import type { CacheMetadata, SupportedFormat } from './types' |
| 3 | +import fs from 'node:fs' |
| 4 | +import path from 'node:path' |
| 5 | +import process from 'node:process' |
| 6 | +import { config } from './config' |
| 7 | + |
| 8 | +// Cache configuration for packages |
| 9 | +const CACHE_DIR = path.join(process.env.HOME || '.', '.cache', 'launchpad') |
| 10 | +const BINARY_CACHE_DIR = path.join(CACHE_DIR, 'binaries', 'packages') |
| 11 | +const CACHE_METADATA_FILE = path.join(CACHE_DIR, 'cache-metadata.json') |
| 12 | + |
| 13 | +/** |
| 14 | + * Load cache metadata |
| 15 | + */ |
| 16 | +export function loadCacheMetadata(): CacheMetadata { |
| 17 | + try { |
| 18 | + if (fs.existsSync(CACHE_METADATA_FILE)) { |
| 19 | + const content = fs.readFileSync(CACHE_METADATA_FILE, 'utf-8') |
| 20 | + return JSON.parse(content) |
| 21 | + } |
| 22 | + } |
| 23 | + catch (error) { |
| 24 | + if (config.verbose) { |
| 25 | + console.warn('Failed to load cache metadata:', error) |
| 26 | + } |
| 27 | + } |
| 28 | + |
| 29 | + return { version: '1.0', packages: {} } |
| 30 | +} |
| 31 | + |
| 32 | +/** |
| 33 | + * Save cache metadata |
| 34 | + */ |
| 35 | +export function saveCacheMetadata(metadata: CacheMetadata): void { |
| 36 | + try { |
| 37 | + fs.mkdirSync(path.dirname(CACHE_METADATA_FILE), { recursive: true }) |
| 38 | + fs.writeFileSync(CACHE_METADATA_FILE, JSON.stringify(metadata, null, 2)) |
| 39 | + } |
| 40 | + catch (error) { |
| 41 | + if (config.verbose) { |
| 42 | + console.warn('Failed to save cache metadata:', error) |
| 43 | + } |
| 44 | + } |
| 45 | +} |
| 46 | + |
| 47 | +/** |
| 48 | + * Get cached package archive path for a specific domain and version with validation |
| 49 | + */ |
| 50 | +export function getCachedPackagePath(domain: string, version: string, format: string): string | null { |
| 51 | + const cacheKey = `${domain}-${version}` |
| 52 | + const cachedArchivePath = path.join(BINARY_CACHE_DIR, cacheKey, `package.${format}`) |
| 53 | + |
| 54 | + if (fs.existsSync(cachedArchivePath)) { |
| 55 | + // Validate cache integrity |
| 56 | + const metadata = loadCacheMetadata() |
| 57 | + const packageMeta = metadata.packages[cacheKey] |
| 58 | + |
| 59 | + if (packageMeta) { |
| 60 | + // Update last accessed time |
| 61 | + packageMeta.lastAccessed = new Date().toISOString() |
| 62 | + saveCacheMetadata(metadata) |
| 63 | + |
| 64 | + // Validate file size matches metadata |
| 65 | + const stats = fs.statSync(cachedArchivePath) |
| 66 | + if (stats.size === packageMeta.size) { |
| 67 | + if (config.verbose) { |
| 68 | + console.warn(`Found cached package: ${cachedArchivePath}`) |
| 69 | + } |
| 70 | + return cachedArchivePath |
| 71 | + } |
| 72 | + else { |
| 73 | + if (config.verbose) { |
| 74 | + console.warn(`Cache file corrupted (size mismatch): ${cachedArchivePath}`) |
| 75 | + } |
| 76 | + // Remove corrupted cache |
| 77 | + fs.unlinkSync(cachedArchivePath) |
| 78 | + delete metadata.packages[cacheKey] |
| 79 | + saveCacheMetadata(metadata) |
| 80 | + } |
| 81 | + } |
| 82 | + else { |
| 83 | + // Cache file exists but no metadata - validate basic integrity |
| 84 | + const stats = fs.statSync(cachedArchivePath) |
| 85 | + if (stats.size > 100) { // Basic size check |
| 86 | + if (config.verbose) { |
| 87 | + console.warn(`Found cached package (no metadata): ${cachedArchivePath}`) |
| 88 | + } |
| 89 | + return cachedArchivePath |
| 90 | + } |
| 91 | + } |
| 92 | + } |
| 93 | + |
| 94 | + return null |
| 95 | +} |
| 96 | + |
| 97 | +/** |
| 98 | + * Save package archive to cache with metadata |
| 99 | + */ |
| 100 | +export function savePackageToCache(domain: string, version: string, format: string, sourcePath: string): string { |
| 101 | + const cacheKey = `${domain}-${version}` |
| 102 | + const cachePackageDir = path.join(BINARY_CACHE_DIR, cacheKey) |
| 103 | + const cachedArchivePath = path.join(cachePackageDir, `package.${format}`) |
| 104 | + |
| 105 | + try { |
| 106 | + // Create cache directory |
| 107 | + fs.mkdirSync(cachePackageDir, { recursive: true }) |
| 108 | + |
| 109 | + // Copy the downloaded file to cache |
| 110 | + fs.copyFileSync(sourcePath, cachedArchivePath) |
| 111 | + |
| 112 | + // Update metadata |
| 113 | + const metadata = loadCacheMetadata() |
| 114 | + const stats = fs.statSync(cachedArchivePath) |
| 115 | + |
| 116 | + metadata.packages[cacheKey] = { |
| 117 | + domain, |
| 118 | + version, |
| 119 | + format, |
| 120 | + downloadedAt: new Date().toISOString(), |
| 121 | + lastAccessed: new Date().toISOString(), |
| 122 | + size: stats.size, |
| 123 | + } |
| 124 | + |
| 125 | + saveCacheMetadata(metadata) |
| 126 | + |
| 127 | + if (config.verbose) { |
| 128 | + console.warn(`Cached package to: ${cachedArchivePath}`) |
| 129 | + } |
| 130 | + |
| 131 | + return cachedArchivePath |
| 132 | + } |
| 133 | + catch (error) { |
| 134 | + if (config.verbose) { |
| 135 | + console.warn(`Failed to cache package: ${error instanceof Error ? error.message : String(error)}`) |
| 136 | + } |
| 137 | + // Return original path if caching fails |
| 138 | + return sourcePath |
| 139 | + } |
| 140 | +} |
| 141 | + |
| 142 | +/** |
| 143 | + * Clean up old cache entries to free disk space |
| 144 | + */ |
| 145 | +export function cleanupCache(maxAgeDays: number = 30, maxSizeGB: number = 5): void { |
| 146 | + try { |
| 147 | + const metadata = loadCacheMetadata() |
| 148 | + const now = new Date() |
| 149 | + const maxAgeMs = maxAgeDays * 24 * 60 * 60 * 1000 |
| 150 | + const maxSizeBytes = maxSizeGB * 1024 * 1024 * 1024 |
| 151 | + |
| 152 | + let totalSize = 0 |
| 153 | + const packages = Object.entries(metadata.packages) |
| 154 | + .map(([key, pkg]) => ({ |
| 155 | + key, |
| 156 | + ...pkg, |
| 157 | + lastAccessedDate: new Date(pkg.lastAccessed), |
| 158 | + })) |
| 159 | + .sort((a, b) => a.lastAccessedDate.getTime() - b.lastAccessedDate.getTime()) // Oldest first |
| 160 | + |
| 161 | + // Calculate total cache size |
| 162 | + packages.forEach(pkg => totalSize += pkg.size) |
| 163 | + |
| 164 | + const toDelete: string[] = [] |
| 165 | + |
| 166 | + // Remove packages older than maxAge |
| 167 | + packages.forEach((pkg) => { |
| 168 | + const age = now.getTime() - pkg.lastAccessedDate.getTime() |
| 169 | + if (age > maxAgeMs) { |
| 170 | + toDelete.push(pkg.key) |
| 171 | + } |
| 172 | + }) |
| 173 | + |
| 174 | + // If still over size limit, remove oldest packages |
| 175 | + let currentSize = totalSize |
| 176 | + packages.forEach((pkg) => { |
| 177 | + if (currentSize > maxSizeBytes && !toDelete.includes(pkg.key)) { |
| 178 | + toDelete.push(pkg.key) |
| 179 | + currentSize -= pkg.size |
| 180 | + } |
| 181 | + }) |
| 182 | + |
| 183 | + // Delete marked packages |
| 184 | + toDelete.forEach((key) => { |
| 185 | + const cacheDir = path.join(BINARY_CACHE_DIR, key) |
| 186 | + if (fs.existsSync(cacheDir)) { |
| 187 | + fs.rmSync(cacheDir, { recursive: true, force: true }) |
| 188 | + delete metadata.packages[key] |
| 189 | + if (config.verbose) { |
| 190 | + console.warn(`Cleaned up cached package: ${key}`) |
| 191 | + } |
| 192 | + } |
| 193 | + }) |
| 194 | + |
| 195 | + if (toDelete.length > 0) { |
| 196 | + saveCacheMetadata(metadata) |
| 197 | + console.log(`🧹 Cleaned up ${toDelete.length} cached packages`) |
| 198 | + } |
| 199 | + } |
| 200 | + catch (error) { |
| 201 | + if (config.verbose) { |
| 202 | + console.warn('Cache cleanup failed:', error) |
| 203 | + } |
| 204 | + } |
| 205 | +} |
| 206 | + |
| 207 | +/** |
| 208 | + * Get cache statistics |
| 209 | + */ |
| 210 | +export function getCacheStats(): { packages: number, size: string, oldestAccess: string, newestAccess: string } { |
| 211 | + try { |
| 212 | + const metadata = loadCacheMetadata() |
| 213 | + const packages = Object.values(metadata.packages) |
| 214 | + |
| 215 | + // If we have metadata, use it |
| 216 | + if (packages.length > 0) { |
| 217 | + const totalSize = packages.reduce((sum, pkg) => sum + pkg.size, 0) |
| 218 | + const accessTimes = packages.map(pkg => new Date(pkg.lastAccessed).getTime()).sort() |
| 219 | + |
| 220 | + const formatSize = (bytes: number): string => { |
| 221 | + if (bytes < 1024) |
| 222 | + return `${bytes} B` |
| 223 | + if (bytes < 1024 * 1024) |
| 224 | + return `${(bytes / 1024).toFixed(1)} KB` |
| 225 | + if (bytes < 1024 * 1024 * 1024) |
| 226 | + return `${(bytes / (1024 * 1024)).toFixed(1)} MB` |
| 227 | + return `${(bytes / (1024 * 1024 * 1024)).toFixed(1)} GB` |
| 228 | + } |
| 229 | + |
| 230 | + return { |
| 231 | + packages: packages.length, |
| 232 | + size: formatSize(totalSize), |
| 233 | + oldestAccess: new Date(accessTimes[0]).toLocaleDateString(), |
| 234 | + newestAccess: new Date(accessTimes[accessTimes.length - 1]).toLocaleDateString(), |
| 235 | + } |
| 236 | + } |
| 237 | + |
| 238 | + // Fallback: scan actual cache directories for files |
| 239 | + const packageCacheDir = path.join(CACHE_DIR, 'binaries', 'packages') |
| 240 | + const bunCacheDir = path.join(CACHE_DIR, 'binaries', 'bun') |
| 241 | + |
| 242 | + let totalFiles = 0 |
| 243 | + let totalSize = 0 |
| 244 | + let oldestTime = Date.now() |
| 245 | + let newestTime = 0 |
| 246 | + |
| 247 | + const formatSize = (bytes: number): string => { |
| 248 | + if (bytes < 1024) |
| 249 | + return `${bytes} B` |
| 250 | + if (bytes < 1024 * 1024) |
| 251 | + return `${(bytes / 1024).toFixed(1)} KB` |
| 252 | + if (bytes < 1024 * 1024 * 1024) |
| 253 | + return `${(bytes / (1024 * 1024)).toFixed(1)} MB` |
| 254 | + return `${(bytes / (1024 * 1024 * 1024)).toFixed(1)} GB` |
| 255 | + } |
| 256 | + |
| 257 | + // Scan package cache |
| 258 | + if (fs.existsSync(packageCacheDir)) { |
| 259 | + const packageDirs = fs.readdirSync(packageCacheDir) |
| 260 | + for (const dir of packageDirs) { |
| 261 | + const dirPath = path.join(packageCacheDir, dir) |
| 262 | + if (fs.statSync(dirPath).isDirectory()) { |
| 263 | + const files = fs.readdirSync(dirPath) |
| 264 | + totalFiles += files.length |
| 265 | + |
| 266 | + for (const file of files) { |
| 267 | + const filePath = path.join(dirPath, file) |
| 268 | + const stats = fs.statSync(filePath) |
| 269 | + totalSize += stats.size |
| 270 | + |
| 271 | + const mtime = stats.mtime.getTime() |
| 272 | + if (mtime < oldestTime) |
| 273 | + oldestTime = mtime |
| 274 | + if (mtime > newestTime) |
| 275 | + newestTime = mtime |
| 276 | + } |
| 277 | + } |
| 278 | + } |
| 279 | + } |
| 280 | + |
| 281 | + // Scan bun cache |
| 282 | + if (fs.existsSync(bunCacheDir)) { |
| 283 | + const bunFiles = fs.readdirSync(bunCacheDir) |
| 284 | + for (const file of bunFiles) { |
| 285 | + const filePath = path.join(bunCacheDir, file) |
| 286 | + if (fs.statSync(filePath).isFile()) { |
| 287 | + totalFiles++ |
| 288 | + const stats = fs.statSync(filePath) |
| 289 | + totalSize += stats.size |
| 290 | + |
| 291 | + const mtime = stats.mtime.getTime() |
| 292 | + if (mtime < oldestTime) |
| 293 | + oldestTime = mtime |
| 294 | + if (mtime > newestTime) |
| 295 | + newestTime = mtime |
| 296 | + } |
| 297 | + } |
| 298 | + } |
| 299 | + |
| 300 | + return { |
| 301 | + packages: totalFiles, |
| 302 | + size: formatSize(totalSize), |
| 303 | + oldestAccess: totalFiles > 0 ? new Date(oldestTime).toLocaleDateString() : 'N/A', |
| 304 | + newestAccess: totalFiles > 0 ? new Date(newestTime).toLocaleDateString() : 'N/A', |
| 305 | + } |
| 306 | + } |
| 307 | + catch { |
| 308 | + return { packages: 0, size: 'Error', oldestAccess: 'Error', newestAccess: 'Error' } |
| 309 | + } |
| 310 | +} |
0 commit comments