Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ coverage
benchmarks
vercel_token

cache/

# IDE
.vscode/*
!.vscode/extensions.json
Expand Down
35 changes: 35 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
FROM node:lts-alpine AS builder

WORKDIR /app

COPY package*.json ./

RUN npm i --omit=dev --ignore-scripts --no-audit
RUN npm i express

COPY api ./api
COPY src ./src
COPY themes ./themes
COPY vercel.json ./
COPY express.js ./

FROM node:lts-alpine

WORKDIR /app

COPY --from=builder /app /app

RUN mkdir -p /app/cache && chmod 777 /app/cache

RUN npm install -g dotenv-cli
RUN apk --no-cache add curl

ARG PORT=9000
ENV PORT=${PORT}

HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f "http://localhost:${PORT:-9000}/api" || exit 1

EXPOSE ${PORT}

CMD ["dotenv", "--", "node", "express.js"]
191 changes: 191 additions & 0 deletions src/common/fileCache.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,191 @@
// @ts-check

import fs from "node:fs";
import path from "node:path";
import crypto from "node:crypto";
import { logger } from "./log.js";

const CACHE_DIR = path.resolve(process.cwd(), "cache");
const DEFAULT_TTL = 24 * 60 * 60 * 1000; // 24 hours in milliseconds

/**
* Ensure cache directory exists.
*/
const ensureCacheDir = () => {
if (!fs.existsSync(CACHE_DIR)) {
fs.mkdirSync(CACHE_DIR, { recursive: true });
}
};

/**
* Generate a unique cache key based on type and parameters.
*
* @param {string} type The type of data being cached (e.g., 'stats', 'top-langs', 'repo').
* @param {Record<string, any>} params The parameters that make this cache entry unique.
* @returns {string} The cache key (filename without extension).
*/
const getCacheKey = (type, params) => {
const sortedParams = Object.keys(params)
.sort()
.reduce((acc, key) => {
if (params[key] !== undefined && params[key] !== null) {
acc[key] = params[key];
}
return acc;
}, /** @type {Record<string, any>} */ ({}));

const hash = crypto
.createHash("md5")
.update(JSON.stringify(sortedParams))
.digest("hex");

return `${type}_${hash}`;
};

/**
* Get the cache file path for a given key.
*
* @param {string} key The cache key.
* @returns {string} The full path to the cache file.
*/
const getCacheFilePath = (key) => {
return path.join(CACHE_DIR, `${key}.json`);
};

/**
* Get cached data if it exists and is not expired.
*
* @param {string} type The type of data being cached.
* @param {Record<string, any>} params The parameters that make this cache entry unique.
* @param {number} [ttl] The TTL in milliseconds (default: 24 hours).
* @returns {any|null} The cached data or null if not found/expired.
*/
const getCachedData = (type, params, ttl = DEFAULT_TTL) => {
try {
const key = getCacheKey(type, params);
const filePath = getCacheFilePath(key);

if (!fs.existsSync(filePath)) {
return null;
}

const fileContent = fs.readFileSync(filePath, "utf-8");
const cached = JSON.parse(fileContent);

// Check if cache has expired
const now = Date.now();
if (now - cached.timestamp > ttl) {
logger.log(`Cache expired for ${type}: ${key}`);
// Optionally delete expired cache file
try {
fs.unlinkSync(filePath);
} catch {
// Ignore deletion errors
}
return null;
}

logger.log(`Cache hit for ${type}: ${key}`);
return cached.data;
} catch (error) {
logger.log(`Cache read error: ${error}`);
return null;
}
};

/**
* Save data to cache.
*
* @param {string} type The type of data being cached.
* @param {Record<string, any>} params The parameters that make this cache entry unique.
* @param {any} data The data to cache.
*/
const setCachedData = (type, params, data) => {
try {
ensureCacheDir();

const key = getCacheKey(type, params);
const filePath = getCacheFilePath(key);

const cacheEntry = {
timestamp: Date.now(),
type,
params,
data,
};

fs.writeFileSync(filePath, JSON.stringify(cacheEntry, null, 2), "utf-8");
logger.log(`Cache saved for ${type}: ${key}`);
} catch (error) {
logger.log(`Cache write error: ${error}`);
// Don't throw - caching is best-effort
}
};

/**
* Clear all cached data or data of a specific type.
*
* @param {string} [type] Optional type to clear only specific cache entries.
*/
const clearCache = (type) => {
try {
if (!fs.existsSync(CACHE_DIR)) {
return;
}

const files = fs.readdirSync(CACHE_DIR);
for (const file of files) {
if (!type || file.startsWith(`${type}_`)) {
fs.unlinkSync(path.join(CACHE_DIR, file));
}
}
logger.log(`Cache cleared${type ? ` for type: ${type}` : ""}`);
} catch (error) {
logger.log(`Cache clear error: ${error}`);
}
};

/**
* Get cache statistics.
*
* @returns {{ totalFiles: number, totalSize: number, types: Record<string, number> }} Cache statistics.
*/
const getCacheStats = () => {
const stats = {
totalFiles: 0,
totalSize: 0,
types: /** @type {Record<string, number>} */ ({}),
};

try {
if (!fs.existsSync(CACHE_DIR)) {
return stats;
}

const files = fs.readdirSync(CACHE_DIR);
for (const file of files) {
const filePath = path.join(CACHE_DIR, file);
const fileStat = fs.statSync(filePath);
stats.totalFiles++;
stats.totalSize += fileStat.size;

// Extract type from filename
const type = file.split("_")[0];
stats.types[type] = (stats.types[type] || 0) + 1;
}
} catch (error) {
logger.log(`Cache stats error: ${error}`);
}

return stats;
};

export {
getCacheKey,
getCachedData,
setCachedData,
clearCache,
getCacheStats,
CACHE_DIR,
DEFAULT_TTL,
};
18 changes: 17 additions & 1 deletion src/fetchers/gist.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import { retryer } from "../common/retryer.js";
import { MissingParamError } from "../common/error.js";
import { request } from "../common/http.js";
import { getCachedData, setCachedData } from "../common/fileCache.js";
import { logger } from "../common/log.js";

const QUERY = `
query gistInfo($gistName: String!) {
Expand Down Expand Up @@ -90,6 +92,15 @@ const fetchGist = async (id) => {
if (!id) {
throw new MissingParamError(["id"], "/api/gist?id=GIST_ID");
}

// Check cache first
const cacheParams = { id };
const cachedGist = getCachedData("gist", cacheParams);
if (cachedGist) {
logger.log(`Returning cached gist data for ${id}`);
return cachedGist;
}

const res = await retryer(fetcher, { gistName: id });
if (res.data.errors) {
throw new Error(res.data.errors[0].message);
Expand All @@ -98,7 +109,7 @@ const fetchGist = async (id) => {
throw new Error("Gist not found");
}
const data = res.data.data.viewer.gist;
return {
const gistData = {
name: data.files[Object.keys(data.files)[0]].name,
nameWithOwner: `${data.owner.login}/${
data.files[Object.keys(data.files)[0]].name
Expand All @@ -108,6 +119,11 @@ const fetchGist = async (id) => {
starsCount: data.stargazerCount,
forksCount: data.forks.totalCount,
};

// Save to cache
setCachedData("gist", cacheParams, gistData);

return gistData;
};

export { fetchGist };
Expand Down
20 changes: 18 additions & 2 deletions src/fetchers/repo.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import { MissingParamError } from "../common/error.js";
import { request } from "../common/http.js";
import { retryer } from "../common/retryer.js";
import { getCachedData, setCachedData } from "../common/fileCache.js";
import { logger } from "../common/log.js";

/**
* Repo data fetcher.
Expand Down Expand Up @@ -77,6 +79,14 @@ const fetchRepo = async (username, reponame) => {
throw new MissingParamError(["repo"], urlExample);
}

// Check cache first
const cacheParams = { username, reponame };
const cachedRepo = getCachedData("repo", cacheParams);
if (cachedRepo) {
logger.log(`Returning cached repo data for ${username}/${reponame}`);
return cachedRepo;
}

let res = await retryer(fetcher, { login: username, repo: reponame });

const data = res.data.data;
Expand All @@ -92,10 +102,13 @@ const fetchRepo = async (username, reponame) => {
if (!data.user.repository || data.user.repository.isPrivate) {
throw new Error("User Repository Not found");
}
return {
const repoData = {
...data.user.repository,
starCount: data.user.repository.stargazers.totalCount,
};
// Save to cache
setCachedData("repo", cacheParams, repoData);
return repoData;
}

if (isOrg) {
Expand All @@ -105,10 +118,13 @@ const fetchRepo = async (username, reponame) => {
) {
throw new Error("Organization Repository Not found");
}
return {
const repoData = {
...data.organization.repository,
starCount: data.organization.repository.stargazers.totalCount,
};
// Save to cache
setCachedData("repo", cacheParams, repoData);
return repoData;
}

throw new Error("Unexpected behavior");
Expand Down
20 changes: 20 additions & 0 deletions src/fetchers/stats.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import { excludeRepositories } from "../common/envs.js";
import { CustomError, MissingParamError } from "../common/error.js";
import { wrapTextMultiline } from "../common/fmt.js";
import { request } from "../common/http.js";
import { getCachedData, setCachedData } from "../common/fileCache.js";

dotenv.config();

Expand Down Expand Up @@ -237,6 +238,22 @@ const fetchStats = async (
throw new MissingParamError(["username"]);
}

// Check cache first
const cacheParams = {
username,
include_all_commits,
exclude_repo: exclude_repo.sort().join(","),
include_merged_pull_requests,
include_discussions,
include_discussions_answers,
commits_year,
};
const cachedStats = getCachedData("stats", cacheParams);
if (cachedStats) {
logger.log(`Returning cached stats for ${username}`);
return cachedStats;
}

const stats = {
name: "",
totalPRs: 0,
Expand Down Expand Up @@ -333,6 +350,9 @@ const fetchStats = async (
followers: user.followers.totalCount,
});

// Save to cache
setCachedData("stats", cacheParams, stats);

return stats;
};

Expand Down
Loading