Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
128 changes: 95 additions & 33 deletions src/lib/statsProvider.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
// src/lib/statsProvider.tsx

/** @jsxImportSource react */
import React, {
Expand Down Expand Up @@ -66,7 +65,9 @@ interface CommunityStatsProviderProps {

const GITHUB_ORG = "recodehive";
const POINTS_PER_PR = 10;
const MAX_CONCURRENT_REQUESTS = 5; // Limit concurrent requests to avoid rate limiting
const MAX_CONCURRENT_REQUESTS = 8; // Increased for better performance
const CACHE_DURATION = 5 * 60 * 1000; // 5 minutes cache
const MAX_PAGES_PER_REPO = 20; // Limit pages to prevent infinite loops on huge repos

export function CommunityStatsProvider({ children }: CommunityStatsProviderProps) {
const {
Expand All @@ -86,6 +87,12 @@ export function CommunityStatsProvider({ children }: CommunityStatsProviderProps
// New state for leaderboard data
const [contributors, setContributors] = useState<Contributor[]>([]);
const [stats, setStats] = useState<Stats | null>(null);

// Cache state
const [cache, setCache] = useState<{
data: { contributors: Contributor[]; stats: Stats } | null;
timestamp: number;
}>({ data: null, timestamp: 0 });

const fetchAllOrgRepos = useCallback(async (headers: Record<string, string>) => {
const repos: any[] = [];
Expand All @@ -108,28 +115,60 @@ export function CommunityStatsProvider({ children }: CommunityStatsProviderProps
const fetchMergedPRsForRepo = useCallback(async (repoName: string, headers: Record<string, string>) => {
const mergedPRs: PullRequestItem[] = [];
let page = 1;
while (true) {
const resp = await fetch(
`https://api.github.com/repos/${GITHUB_ORG}/${repoName}/pulls?state=closed&per_page=100&page=${page}`,
{ headers }

// Create promises for parallel pagination
const pagePromises: Promise<PullRequestItem[]>[] = [];

// First, get the first page to estimate total pages
const firstResp = await fetch(
`https://api.github.com/repos/${GITHUB_ORG}/${repoName}/pulls?state=closed&per_page=100&page=1`,
{ headers }
);

if (!firstResp.ok) {
console.warn(`Failed to fetch PRs for ${repoName}: ${firstResp.status} ${firstResp.statusText}`);
return [];
}

const firstPRs: PullRequestItem[] = await firstResp.json();
if (!Array.isArray(firstPRs) || firstPRs.length === 0) return [];

const firstPageMerged = firstPRs.filter((pr) => Boolean(pr.merged_at));
mergedPRs.push(...firstPageMerged);

// If we got less than 100, that's all there is
if (firstPRs.length < 100) return mergedPRs;

// Estimate remaining pages (with a reasonable limit)
const maxPages = Math.min(MAX_PAGES_PER_REPO, 10); // Conservative estimate

// Create parallel requests for remaining pages
for (let i = 2; i <= maxPages; i++) {
pagePromises.push(
fetch(
`https://api.github.com/repos/${GITHUB_ORG}/${repoName}/pulls?state=closed&per_page=100&page=${i}`,
{ headers }
)
.then(async (resp) => {
if (!resp.ok) return [];
const prs: PullRequestItem[] = await resp.json();
if (!Array.isArray(prs)) return [];
return prs.filter((pr) => Boolean(pr.merged_at));
})
.catch(() => [])
);
if (!resp.ok) {
console.warn(`Failed to fetch PRs for ${repoName}: ${resp.status} ${resp.statusText}`);
break;
}
const prs: PullRequestItem[] = await resp.json();
if (!Array.isArray(prs) || prs.length === 0) break;

const merged = prs.filter((pr) => Boolean(pr.merged_at));
mergedPRs.push(...merged);

if (prs.length < 100) break;
page++;
}

// Wait for all pages in parallel
const remainingPages = await Promise.all(pagePromises);
remainingPages.forEach(pagePRs => {
if (pagePRs.length > 0) mergedPRs.push(...pagePRs);
});

return mergedPRs;
}, []);

// NEW: Concurrent processing function with controlled concurrency
// Concurrent processing function with controlled concurrency
const processBatch = useCallback(async (
repos: any[],
headers: Record<string, string>
Expand Down Expand Up @@ -184,6 +223,17 @@ export function CommunityStatsProvider({ children }: CommunityStatsProviderProps
const fetchAllStats = useCallback(async (signal: AbortSignal) => {
setLoading(true);
setError(null);

// Check cache first
const now = Date.now();
if (cache.data && (now - cache.timestamp) < CACHE_DURATION) {
// console.log('Using cached leaderboard data');
setContributors(cache.data.contributors);
setStats(cache.data.stats);
setLoading(false);
return;
}

if (!token) {
setError("GitHub token not found. Please set customFields.gitToken in docusaurus.config.js.");
setLoading(false);
Expand All @@ -196,29 +246,40 @@ export function CommunityStatsProvider({ children }: CommunityStatsProviderProps
Accept: "application/vnd.github.v3+json",
};

// Fetch general organization stats (unchanged)
const orgStats: GitHubOrgStats = await githubService.fetchOrganizationStats(signal);
// Fetch both org stats and repos in parallel
const [orgStats, repos] = await Promise.all([
githubService.fetchOrganizationStats(signal),
fetchAllOrgRepos(headers)
]);

// Set org stats immediately
setGithubStarCount(orgStats.totalStars);
setGithubContributorsCount(orgStats.totalContributors);
setGithubForksCount(orgStats.totalForks);
setGithubReposCount(orgStats.publicRepositories);
setGithubDiscussionsCount(orgStats.discussionsCount);
setLastUpdated(new Date(orgStats.lastUpdated));

// Fetch leaderboard data with concurrent processing
const repos = await fetchAllOrgRepos(headers);

// NEW: Use concurrent processing instead of sequential
// Process leaderboard data with concurrent processing
const { contributorMap, totalMergedPRs } = await processBatch(repos, headers);

const sortedContributors = Array.from(contributorMap.values()).sort(
(a, b) => b.points - a.points || b.prs - a.prs
);
setContributors(sortedContributors);
setStats({

const statsData = {
flooredTotalPRs: totalMergedPRs,
totalContributors: sortedContributors.length,
flooredTotalPoints: sortedContributors.reduce((sum, c) => sum + c.points, 0),
};

setContributors(sortedContributors);
setStats(statsData);

// Cache the results
setCache({
data: { contributors: sortedContributors, stats: statsData },
timestamp: now
});

} catch (err: any) {
Expand All @@ -236,12 +297,13 @@ export function CommunityStatsProvider({ children }: CommunityStatsProviderProps
} finally {
setLoading(false);
}
}, [token, fetchAllOrgRepos, processBatch]);
}, [token, fetchAllOrgRepos, processBatch, cache]);

const clearCache = useCallback(() => {
githubService.clearCache();
setCache({ data: null, timestamp: 0 }); // Clear local cache too
const abortController = new AbortController();
fetchAllStats(abortController.signal);
fetchAllStats(abortController.signal);// Refetch data after clearing cache
}, [fetchAllStats]);

useEffect(() => {
Expand Down Expand Up @@ -296,16 +358,16 @@ export const useCommunityStatsContext = (): ICommunityStatsContext => {

export const convertStatToText = (num: number): string => {
const hasIntlSupport =
typeof Intl === "object" && Intl && typeof Intl.NumberFormat === "function";
typeof Intl === "object" && Intl && typeof Intl.NumberFormat === "function";

if (!hasIntlSupport) {
return `${(num / 1000).toFixed(1)}k`;
return `${(num / 1000).toFixed(1)}k`; // Fallback for environments without Intl support
}

const formatter = new Intl.NumberFormat("en-US", {
notation: "compact",
notation: "compact",
compactDisplay: "short",
maximumSignificantDigits: 3,
maximumSignificantDigits: 3, // More precise formatting
});
return formatter.format(num);
};
Loading