cleanup
Some checks failed
Build and Deploy / build (push) Failing after 19s

This commit is contained in:
Joakim Repomaa
2026-02-19 21:51:10 +02:00
parent 782f46f69f
commit 96171576c7
32 changed files with 487 additions and 524 deletions

View File

@@ -1,17 +1,21 @@
import { filter, map, pipe, sortBy, take, uniqueBy } from 'remeda';
import type { Project } from './content/types.js';
const GITHUB_API_BASE = 'https://api.github.com';
const MAX_RETRIES = 3;
const INITIAL_RETRY_DELAY = 1000;
const MAX_RETRY_DELAY = 10000;
const RATE_LIMIT_DELAY = 60000;
interface SearchIssue {
const EXCLUDED_OWNERS = new Set(['everii-Group', 'hundertzehn', 'meso-unimpressed']);
interface SearchItem {
repository_url: string;
pull_request?: {
merged_at: string | null;
url: string;
};
pull_request?: { merged_at: string | null };
}
interface SearchResponse {
items: SearchIssue[];
items: SearchItem[];
total_count: number;
}
@@ -25,211 +29,167 @@ interface RepoInfo {
language: string | null;
}
// Retry configuration
const MAX_RETRIES = 3;
const INITIAL_RETRY_DELAY = 1000; // 1 second
const MAX_RETRY_DELAY = 10000; // 10 seconds
const RATE_LIMIT_DELAY = 60000; // 1 minute for rate limit (403/429)
// Owners to exclude from contributed repos
const EXCLUDED_REPO_OWNERS = new Set(['everii-Group', 'hundertzehn', 'meso-unimpressed']);
function getHeaders(): Record<string, string> {
const headers: Record<string, string> = {
Accept: 'application/vnd.github.v3+json',
};
// Use GitHub token if available (for higher rate limits during build)
const token = process.env.GITHUB_TOKEN;
if (token) {
headers.Authorization = `token ${token}`;
}
return headers;
}
// Exponential backoff retry for fetch
async function fetchWithRetry(
url: string,
options: RequestInit = {},
retryCount = 0
): Promise<Response> {
async function fetchWithRetry(url: string, retryCount = 0): Promise<Response> {
try {
const response = await fetch(url, { ...options, headers: getHeaders() });
const response = await fetch(url, { headers: getHeaders() });
if (response.status === 429) {
if (retryCount < MAX_RETRIES) {
const retryAfter = response.headers.get('retry-after');
const delay = retryAfter
? parseInt(retryAfter, 10) * 1000
: Math.min(RATE_LIMIT_DELAY, INITIAL_RETRY_DELAY * Math.pow(2, retryCount));
if (response.status === 429 && retryCount < MAX_RETRIES) {
const retryAfter = response.headers.get('retry-after');
const delay = retryAfter
? parseInt(retryAfter, 10) * 1000
: Math.min(RATE_LIMIT_DELAY, INITIAL_RETRY_DELAY * Math.pow(2, retryCount));
console.warn(
`Rate limited for ${url}, waiting ${delay}ms before retry ${retryCount + 1}/${MAX_RETRIES}`
);
return new Promise((resolve) =>
setTimeout(() => resolve(fetchWithRetry(url, options, retryCount + 1)), delay)
);
}
console.warn(
`Rate limited for ${url}, waiting ${delay}ms before retry ${retryCount + 1}/${MAX_RETRIES}`
);
await new Promise((resolve) => setTimeout(resolve, delay));
return fetchWithRetry(url, retryCount + 1);
}
return response;
} catch (error) {
// Network errors (timeout, connection refused, etc.)
if (retryCount < MAX_RETRIES) {
const delay = Math.min(MAX_RETRY_DELAY, INITIAL_RETRY_DELAY * Math.pow(2, retryCount));
console.warn(
`Network error for ${url}, retrying in ${delay}ms (${retryCount + 1}/${MAX_RETRIES}):`,
error
);
return new Promise((resolve) =>
setTimeout(() => resolve(fetchWithRetry(url, options, retryCount + 1)), delay)
);
await new Promise((resolve) => setTimeout(resolve, delay));
return fetchWithRetry(url, retryCount + 1);
}
throw error;
}
}
function handleApiError(response: Response, context: string): null {
if (response.status === 403 || response.status === 429) {
console.warn(
`GitHub API rate limit exceeded for ${context}. Set GITHUB_TOKEN env var for higher limits.`
);
} else {
console.error(`GitHub API error: ${response.status} ${response.statusText}`);
}
return null;
}
function mapRepoToProject(repo: {
name: string;
full_name?: string;
description: string | null;
html_url: string;
stargazers_count: number;
forks_count: number;
language: string | null;
}): Project {
return {
name: repo.full_name ?? repo.name,
description: repo.description ?? '',
url: repo.html_url,
stars: repo.stargazers_count,
forks: repo.forks_count,
language: repo.language ?? undefined,
isFork: false,
};
}
export async function fetchGitHubProjects(username: string): Promise<Project[]> {
try {
// Use search API to filter non-forks and sort by stars
const query = encodeURIComponent(`user:${username} fork:false`);
const response = await fetchWithRetry(
`${GITHUB_API_BASE}/search/repositories?q=${encodeURIComponent(`user:${username} fork:false`)}&sort=stars&order=desc&per_page=100`,
{}
`${GITHUB_API_BASE}/search/repositories?q=${query}&sort=stars&order=desc&per_page=6`
);
if (!response.ok) {
if (response.status === 403 || response.status === 429) {
console.warn(
`GitHub API rate limit exceeded for user repos. Set GITHUB_TOKEN env var for higher limits.`
);
} else {
console.error(`GitHub API error: ${response.status} ${response.statusText}`);
}
handleApiError(response, 'user repos');
return [];
}
const searchData = await response.json();
return searchData.items.map(
(repo: {
name: string;
description: string | null;
html_url: string;
stargazers_count: number;
forks_count: number;
language: string | null;
pushed_at: string;
}) => ({
name: repo.name,
description: repo.description ?? '',
url: repo.html_url,
stars: repo.stargazers_count,
forks: repo.forks_count,
language: repo.language ?? undefined,
isFork: false,
})
);
const data = await response.json();
return data.items.map(mapRepoToProject);
} catch (error) {
console.error('Error fetching GitHub projects:', error);
return [];
}
}
function getRepoOwner(repoUrl: string): string | null {
const match = repoUrl.match(/\/repos\/([^\/]+)\/([^\/]+)$/);
return match?.[1] ?? null;
}
function isNotExcluded(item: SearchItem): boolean {
const owner = getRepoOwner(item.repository_url);
return owner !== null && !EXCLUDED_OWNERS.has(owner);
}
async function fetchRepoAsProject(repoUrl: string, username: string): Promise<Project | null> {
try {
const response = await fetchWithRetry(repoUrl);
if (!response.ok) {
console.warn(`Could not fetch repo ${repoUrl}: ${response.status}`);
return null;
}
const repo: RepoInfo = await response.json();
const [owner, name] = repo.full_name.split('/');
const prsUrl = `https://github.com/${owner}/${name}/pulls?q=is:pr+author:${encodeURIComponent(username)}+is:merged`;
return { ...mapRepoToProject(repo), url: prsUrl };
} catch (error) {
console.warn(`Error fetching repo details for ${repoUrl}:`, error);
return null;
}
}
export async function fetchContributedRepos(username: string): Promise<Project[]> {
try {
// Search for merged PRs by this user
const searchResponse = await fetchWithRetry(
`${GITHUB_API_BASE}/search/issues?q=${encodeURIComponent(`type:pr author:${username} is:merged`)}&per_page=100`,
{}
const query = encodeURIComponent(`type:pr author:${username} is:merged`);
const response = await fetchWithRetry(
`${GITHUB_API_BASE}/search/issues?q=${query}&per_page=100`
);
if (!searchResponse.ok) {
if (searchResponse.status === 403 || searchResponse.status === 429) {
console.warn(
`GitHub Search API rate limit exceeded. Set GITHUB_TOKEN env var for higher limits.`
);
} else {
console.error(
`GitHub Search API error: ${searchResponse.status} ${searchResponse.statusText}`
);
}
if (!response.ok) {
handleApiError(response, 'search');
return [];
}
const searchData: SearchResponse = await searchResponse.json();
const { total_count, items }: SearchResponse = await response.json();
if (!total_count || !items?.length) return [];
if (searchData.total_count === 0 || !searchData.items || searchData.items.length === 0) {
return [];
}
// Extract unique repositories from closed PRs
const repoData = new Map<string, { owner: string; name: string; stars: number }>();
for (const item of searchData.items) {
if (item.pull_request?.merged_at && item.repository_url) {
const repoUrl = item.repository_url;
if (!repoData.has(repoUrl)) {
// Parse owner and repo name from API URL: https://api.github.com/repos/owner/name
const match = repoUrl.match(/\/repos\/([^\/]+)\/([^\/]+)$/);
if (match) {
const [, owner, name] = match;
// Skip repos from excluded owners
if (!EXCLUDED_REPO_OWNERS.has(owner)) {
repoData.set(repoUrl, { owner, name, stars: 0 });
}
}
}
}
}
if (repoData.size === 0) {
return [];
}
const repos = await Promise.all(
[...repoData.entries()].map(async ([repoUrl, data]) => {
try {
const repoResponse = await fetchWithRetry(repoUrl, {});
if (!repoResponse.ok) {
console.warn(`Could not fetch repo ${repoUrl}: ${repoResponse.status}`);
return null;
}
const repo: RepoInfo = await repoResponse.json();
// Create URL to user's closed PRs in this repo
const prsUrl = `https://github.com/${data.owner}/${data.name}/pulls?q=is:pr+author:${encodeURIComponent(username)}+is:merged`;
return {
name: repo.full_name,
description: repo.description ?? '',
url: prsUrl,
stars: repo.stargazers_count,
forks: repo.forks_count,
language: repo.language ?? undefined,
isFork: false,
};
} catch (error) {
console.warn(`Error fetching repo details for ${repoUrl}:`, error);
return null;
}
})
const repoUrls = pipe(
items,
filter(isNotExcluded),
uniqueBy((item) => item.repository_url),
map((item) => item.repository_url)
);
// Sort by stars descending and take top 5
return repos
.filter((repo) => repo !== null)
.sort((a, b) => b.stars - a.stars)
.slice(0, 5);
if (repoUrls.length === 0) return [];
const projects = await Promise.all(repoUrls.map((url) => fetchRepoAsProject(url, username)));
return pipe(
projects,
filter((p): p is Project => p !== null),
sortBy([(p) => p.stars, 'desc']),
take(6)
);
} catch (error) {
console.error('Error fetching contributed repos:', error);
return [];
}
}
// Helper to get top projects from array
export function getTopProjects(projects: Project[], limit: number): Project[] {
return projects.slice(0, limit);
}