Added gitea-mirror

This commit is contained in:
2026-01-19 08:34:20 +01:00
parent b956b07d1e
commit e19aff248d
385 changed files with 81357 additions and 0 deletions
+171
View File
@@ -0,0 +1,171 @@
// Base API URL
const API_BASE = "/api";
// Helper function for API requests
async function apiRequest<T>(
endpoint: string,
options: RequestInit = {}
): Promise<T> {
const url = `${API_BASE}${endpoint}`;
const headers = {
"Content-Type": "application/json",
...options.headers,
};
const response = await fetch(url, {
...options,
headers,
});
if (!response.ok) {
const error = await response.json().catch(() => ({
message: "An unknown error occurred",
}));
throw new Error(error.message || "An unknown error occurred");
}
return response.json();
}
// Auth API
export const authApi = {
login: async (username: string, password: string) => {
const res = await fetch(`${API_BASE}/auth/login`, {
method: "POST",
headers: { "Content-Type": "application/json" },
credentials: "include", // Send cookies
body: JSON.stringify({ username, password }),
});
if (!res.ok) throw new Error("Login failed");
return await res.json(); // returns user
},
register: async (username: string, email: string, password: string) => {
const res = await fetch(`${API_BASE}/auth/register`, {
method: "POST",
headers: { "Content-Type": "application/json" },
credentials: "include",
body: JSON.stringify({ username, email, password }),
});
if (!res.ok) throw new Error("Registration failed");
return await res.json(); // returns user
},
getCurrentUser: async () => {
const res = await fetch(`${API_BASE}/auth`, {
method: "GET",
credentials: "include", // Send cookies
});
if (!res.ok) throw new Error("Not authenticated");
return await res.json();
},
logout: async () => {
await fetch(`${API_BASE}/auth/logout`, {
method: "POST",
credentials: "include",
});
},
};
// GitHub API
export const githubApi = {
testConnection: (token: string) =>
apiRequest<{ success: boolean }>("/github/test-connection", {
method: "POST",
body: JSON.stringify({ token }),
}),
};
// Gitea API
export const giteaApi = {
testConnection: (url: string, token: string) =>
apiRequest<{ success: boolean }>("/gitea/test-connection", {
method: "POST",
body: JSON.stringify({ url, token }),
}),
};
// Health API
export interface HealthResponse {
status: "ok" | "error";
timestamp: string;
version: string;
latestVersion: string;
updateAvailable: boolean;
database: {
connected: boolean;
message: string;
};
system: {
uptime: {
startTime: string;
uptimeMs: number;
formatted: string;
};
memory: {
rss: string;
heapTotal: string;
heapUsed: string;
external: string;
systemTotal: string;
systemFree: string;
};
os: {
platform: string;
version: string;
arch: string;
};
env: string;
};
error?: string;
}
export const healthApi = {
check: async (): Promise<HealthResponse> => {
try {
const response = await fetch(`${API_BASE}/health`);
if (!response.ok) {
const errorData = await response.json().catch(() => ({
status: "error",
error: "Failed to parse error response",
}));
return {
...errorData,
status: "error",
timestamp: new Date().toISOString(),
} as HealthResponse;
}
return await response.json();
} catch (error) {
return {
status: "error",
timestamp: new Date().toISOString(),
error: error instanceof Error ? error.message : "Unknown error checking health",
version: "unknown",
latestVersion: "unknown",
updateAvailable: false,
database: { connected: false, message: "Failed to connect to API" },
system: {
uptime: { startTime: "", uptimeMs: 0, formatted: "N/A" },
memory: {
rss: "N/A",
heapTotal: "N/A",
heapUsed: "N/A",
external: "N/A",
systemTotal: "N/A",
systemFree: "N/A",
},
os: { platform: "", version: "", arch: "" },
env: "",
},
};
}
},
};
@@ -0,0 +1,64 @@
import "@/lib/polyfills/buffer";
import { createAuthClient } from "better-auth/react";
import { oidcClient } from "better-auth/client/plugins";
import { ssoClient } from "@better-auth/sso/client";
import type { Session as BetterAuthSession, User as BetterAuthUser } from "better-auth";
export const authClient = createAuthClient({
// Use PUBLIC_BETTER_AUTH_URL if set (for multi-origin access), otherwise use current origin
// This allows the client to connect to the auth server even when accessed from different origins
baseURL: (() => {
let url: string | undefined;
// Check for public environment variable first (for client-side access)
if (typeof import.meta !== 'undefined' && import.meta.env?.PUBLIC_BETTER_AUTH_URL) {
url = import.meta.env.PUBLIC_BETTER_AUTH_URL;
}
// Validate and clean the URL if provided
if (url && typeof url === 'string' && url.trim() !== '') {
try {
// Validate URL format and remove trailing slash
const validatedUrl = new URL(url.trim());
return validatedUrl.origin; // Use origin to ensure clean URL without path
} catch (e) {
console.warn(`Invalid PUBLIC_BETTER_AUTH_URL: ${url}, falling back to default`);
}
}
// Fall back to current origin if running in browser
if (typeof window !== 'undefined' && window.location?.origin) {
return window.location.origin;
}
// Default for SSR - always return a valid URL
return 'http://localhost:4321';
})(),
basePath: '/api/auth', // Explicitly set the base path
plugins: [
oidcClient(),
ssoClient(),
],
});
// Export commonly used methods for convenience
export const {
signIn,
signUp,
signOut,
useSession,
sendVerificationEmail,
resetPassword,
requestPasswordReset,
getSession
} = authClient;
// Export types - directly use the types from better-auth
export type Session = BetterAuthSession & {
user: BetterAuthUser & {
username?: string | null;
};
};
export type AuthUser = BetterAuthUser & {
username?: string | null;
};
+139
View File
@@ -0,0 +1,139 @@
import { db, users } from "./db";
import { eq } from "drizzle-orm";
import { nanoid } from "nanoid";
export interface HeaderAuthConfig {
enabled: boolean;
userHeader: string;
emailHeader?: string;
nameHeader?: string;
autoProvision: boolean;
allowedDomains?: string[];
}
// Default configuration - DISABLED by default
export const defaultHeaderAuthConfig: HeaderAuthConfig = {
enabled: false,
userHeader: "X-Authentik-Username", // Common header name
emailHeader: "X-Authentik-Email",
nameHeader: "X-Authentik-Name",
autoProvision: false,
allowedDomains: [],
};
// Get header auth config from environment or database
export function getHeaderAuthConfig(): HeaderAuthConfig {
// Check environment variables for header auth config
const envConfig: Partial<HeaderAuthConfig> = {
enabled: process.env.HEADER_AUTH_ENABLED === "true",
userHeader: process.env.HEADER_AUTH_USER_HEADER || defaultHeaderAuthConfig.userHeader,
emailHeader: process.env.HEADER_AUTH_EMAIL_HEADER || defaultHeaderAuthConfig.emailHeader,
nameHeader: process.env.HEADER_AUTH_NAME_HEADER || defaultHeaderAuthConfig.nameHeader,
autoProvision: process.env.HEADER_AUTH_AUTO_PROVISION === "true",
allowedDomains: process.env.HEADER_AUTH_ALLOWED_DOMAINS?.split(",").map(d => d.trim()),
};
return {
...defaultHeaderAuthConfig,
...envConfig,
};
}
// Check if header authentication is enabled
export function isHeaderAuthEnabled(): boolean {
const config = getHeaderAuthConfig();
return config.enabled === true;
}
// Extract user info from headers
export function extractUserFromHeaders(headers: Headers): {
username?: string;
email?: string;
name?: string;
} | null {
const config = getHeaderAuthConfig();
if (!config.enabled) {
return null;
}
const username = headers.get(config.userHeader);
const email = config.emailHeader ? headers.get(config.emailHeader) : undefined;
const name = config.nameHeader ? headers.get(config.nameHeader) : undefined;
if (!username) {
return null;
}
// If allowed domains are configured, check email domain
if (config.allowedDomains && config.allowedDomains.length > 0 && email) {
const domain = email.split("@")[1];
if (!config.allowedDomains.includes(domain)) {
console.warn(`Header auth rejected: email domain ${domain} not in allowed list`);
return null;
}
}
return {
username: username || undefined,
email: email || undefined,
name: name || undefined
};
}
// Find or create user from header auth
export async function authenticateWithHeaders(headers: Headers) {
const userInfo = extractUserFromHeaders(headers);
if (!userInfo || !userInfo.username) {
return null;
}
const config = getHeaderAuthConfig();
// Try to find existing user by username or email
let existingUser = await db
.select()
.from(users)
.where(eq(users.username, userInfo.username))
.limit(1);
if (existingUser.length === 0 && userInfo.email) {
existingUser = await db
.select()
.from(users)
.where(eq(users.email, userInfo.email))
.limit(1);
}
if (existingUser.length > 0) {
return existingUser[0];
}
// If auto-provisioning is disabled, don't create new users
if (!config.autoProvision) {
console.warn(`Header auth: User ${userInfo.username} not found and auto-provisioning is disabled`);
return null;
}
// Create new user if auto-provisioning is enabled
try {
const newUser = {
id: nanoid(),
username: userInfo.username,
email: userInfo.email || `${userInfo.username}@header-auth.local`,
emailVerified: true, // Trust the auth provider
name: userInfo.name || userInfo.username,
createdAt: new Date(),
updatedAt: new Date(),
};
await db.insert(users).values(newUser);
console.log(`Header auth: Auto-provisioned new user ${userInfo.username}`);
return newUser;
} catch (error) {
console.error("Failed to auto-provision user from header auth:", error);
return null;
}
}
@@ -0,0 +1,190 @@
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
describe("Multiple URL Support in BETTER_AUTH_URL", () => {
let originalAuthUrl: string | undefined;
let originalTrustedOrigins: string | undefined;
beforeEach(() => {
// Save original environment variables
originalAuthUrl = process.env.BETTER_AUTH_URL;
originalTrustedOrigins = process.env.BETTER_AUTH_TRUSTED_ORIGINS;
});
afterEach(() => {
// Restore original environment variables
if (originalAuthUrl !== undefined) {
process.env.BETTER_AUTH_URL = originalAuthUrl;
} else {
delete process.env.BETTER_AUTH_URL;
}
if (originalTrustedOrigins !== undefined) {
process.env.BETTER_AUTH_TRUSTED_ORIGINS = originalTrustedOrigins;
} else {
delete process.env.BETTER_AUTH_TRUSTED_ORIGINS;
}
});
test("should parse single URL correctly", () => {
process.env.BETTER_AUTH_URL = "https://gitea-mirror.mydomain.tld";
const parseAuthUrls = () => {
const urlEnv = process.env.BETTER_AUTH_URL || "http://localhost:4321";
const urls = urlEnv.split(',').map(u => u.trim()).filter(Boolean);
// Find first valid URL
for (const url of urls) {
try {
new URL(url);
return { primary: url, all: urls };
} catch {
// Skip invalid
}
}
return { primary: "http://localhost:4321", all: [] };
};
const result = parseAuthUrls();
expect(result.primary).toBe("https://gitea-mirror.mydomain.tld");
expect(result.all).toEqual(["https://gitea-mirror.mydomain.tld"]);
});
test("should parse multiple URLs and use first as primary", () => {
process.env.BETTER_AUTH_URL = "http://10.10.20.45:4321,https://gitea-mirror.mydomain.tld";
const parseAuthUrls = () => {
const urlEnv = process.env.BETTER_AUTH_URL || "http://localhost:4321";
const urls = urlEnv.split(',').map(u => u.trim()).filter(Boolean);
// Find first valid URL
for (const url of urls) {
try {
new URL(url);
return { primary: url, all: urls };
} catch {
// Skip invalid
}
}
return { primary: "http://localhost:4321", all: [] };
};
const result = parseAuthUrls();
expect(result.primary).toBe("http://10.10.20.45:4321");
expect(result.all).toEqual([
"http://10.10.20.45:4321",
"https://gitea-mirror.mydomain.tld"
]);
});
test("should handle invalid URLs gracefully", () => {
process.env.BETTER_AUTH_URL = "not-a-url,http://valid.url:4321,also-invalid";
const parseAuthUrls = () => {
const urlEnv = process.env.BETTER_AUTH_URL || "http://localhost:4321";
const urls = urlEnv.split(',').map(u => u.trim()).filter(Boolean);
const validUrls: string[] = [];
let primaryUrl = "";
for (const url of urls) {
try {
new URL(url);
validUrls.push(url);
if (!primaryUrl) {
primaryUrl = url;
}
} catch {
// Skip invalid URLs
}
}
return {
primary: primaryUrl || "http://localhost:4321",
all: validUrls
};
};
const result = parseAuthUrls();
expect(result.primary).toBe("http://valid.url:4321");
expect(result.all).toEqual(["http://valid.url:4321"]);
});
test("should include all URLs in trusted origins", () => {
process.env.BETTER_AUTH_URL = "http://10.10.20.45:4321,https://gitea-mirror.mydomain.tld";
process.env.BETTER_AUTH_TRUSTED_ORIGINS = "https://auth.provider.com";
const getTrustedOrigins = () => {
const origins = [
"http://localhost:4321",
"http://localhost:8080",
];
// Add all URLs from BETTER_AUTH_URL
const urlEnv = process.env.BETTER_AUTH_URL || "";
if (urlEnv) {
const urls = urlEnv.split(',').map(u => u.trim()).filter(Boolean);
urls.forEach(url => {
try {
new URL(url);
origins.push(url);
} catch {
// Skip invalid
}
});
}
// Add additional trusted origins
if (process.env.BETTER_AUTH_TRUSTED_ORIGINS) {
origins.push(...process.env.BETTER_AUTH_TRUSTED_ORIGINS.split(',').map(o => o.trim()));
}
// Remove duplicates
return [...new Set(origins.filter(Boolean))];
};
const origins = getTrustedOrigins();
expect(origins).toContain("http://10.10.20.45:4321");
expect(origins).toContain("https://gitea-mirror.mydomain.tld");
expect(origins).toContain("https://auth.provider.com");
expect(origins).toContain("http://localhost:4321");
});
test("should handle empty BETTER_AUTH_URL", () => {
delete process.env.BETTER_AUTH_URL;
const parseAuthUrls = () => {
const urlEnv = process.env.BETTER_AUTH_URL || "http://localhost:4321";
const urls = urlEnv.split(',').map(u => u.trim()).filter(Boolean);
for (const url of urls) {
try {
new URL(url);
return { primary: url, all: urls };
} catch {
// Skip invalid
}
}
return { primary: "http://localhost:4321", all: ["http://localhost:4321"] };
};
const result = parseAuthUrls();
expect(result.primary).toBe("http://localhost:4321");
});
test("should handle whitespace in comma-separated URLs", () => {
process.env.BETTER_AUTH_URL = " http://10.10.20.45:4321 , https://gitea-mirror.mydomain.tld , http://localhost:3000 ";
const parseAuthUrls = () => {
const urlEnv = process.env.BETTER_AUTH_URL || "http://localhost:4321";
const urls = urlEnv.split(',').map(u => u.trim()).filter(Boolean);
return urls;
};
const urls = parseAuthUrls();
expect(urls).toEqual([
"http://10.10.20.45:4321",
"https://gitea-mirror.mydomain.tld",
"http://localhost:3000"
]);
});
});
+176
View File
@@ -0,0 +1,176 @@
import { betterAuth } from "better-auth";
import { drizzleAdapter } from "better-auth/adapters/drizzle";
import { oidcProvider } from "better-auth/plugins";
import { sso } from "@better-auth/sso";
import { db, users } from "./db";
import * as schema from "./db/schema";
import { eq } from "drizzle-orm";
export const auth = betterAuth({
// Database configuration
database: drizzleAdapter(db, {
provider: "sqlite",
usePlural: true, // Our tables use plural names (users, not user)
schema, // Pass the schema explicitly
}),
// Secret for signing tokens
secret: process.env.BETTER_AUTH_SECRET,
// Base URL configuration - use the primary URL (Better Auth only supports single baseURL)
baseURL: (() => {
const url = process.env.BETTER_AUTH_URL;
const defaultUrl = "http://localhost:4321";
// Check if URL is provided and not empty
if (!url || typeof url !== 'string' || url.trim() === '') {
console.info('BETTER_AUTH_URL not set, using default:', defaultUrl);
return defaultUrl;
}
try {
// Validate URL format and ensure it's a proper origin
const validatedUrl = new URL(url.trim());
const cleanUrl = validatedUrl.origin; // Use origin to ensure no trailing paths
console.info('Using BETTER_AUTH_URL:', cleanUrl);
return cleanUrl;
} catch (e) {
console.error(`Invalid BETTER_AUTH_URL format: "${url}"`);
console.error('Error:', e);
console.info('Falling back to default:', defaultUrl);
return defaultUrl;
}
})(),
basePath: "/api/auth", // Specify the base path for auth endpoints
// Trusted origins - this is how we support multiple access URLs
trustedOrigins: (() => {
const origins: string[] = [
"http://localhost:4321",
"http://localhost:8080", // Keycloak
];
// Add the primary URL from BETTER_AUTH_URL
const primaryUrl = process.env.BETTER_AUTH_URL;
if (primaryUrl && typeof primaryUrl === 'string' && primaryUrl.trim() !== '') {
try {
const validatedUrl = new URL(primaryUrl.trim());
origins.push(validatedUrl.origin);
} catch {
// Skip if invalid
}
}
// Add additional trusted origins from environment
// This is where users can specify multiple access URLs
if (process.env.BETTER_AUTH_TRUSTED_ORIGINS) {
const additionalOrigins = process.env.BETTER_AUTH_TRUSTED_ORIGINS
.split(',')
.map(o => o.trim())
.filter(o => o !== '');
// Validate each additional origin
for (const origin of additionalOrigins) {
try {
const validatedUrl = new URL(origin);
origins.push(validatedUrl.origin);
} catch {
console.warn(`Invalid trusted origin: ${origin}, skipping`);
}
}
}
// Remove duplicates and empty strings, then return
const uniqueOrigins = [...new Set(origins.filter(Boolean))];
console.info('Trusted origins:', uniqueOrigins);
return uniqueOrigins;
})(),
// Authentication methods
emailAndPassword: {
enabled: true,
requireEmailVerification: false, // We'll enable this later
sendResetPassword: async ({ user, url }) => {
// TODO: Implement email sending for password reset
console.log("Password reset requested for:", user.email);
console.log("Reset URL:", url);
},
},
// Session configuration
session: {
cookieName: "better-auth-session",
updateSessionCookieAge: true,
expiresIn: 60 * 60 * 24 * 30, // 30 days
},
// User configuration
user: {
additionalFields: {
// Keep the username field from our existing schema
username: {
type: "string",
required: false,
input: false, // Don't show in signup form - we'll derive from email
}
},
},
// Plugins configuration
plugins: [
// OIDC Provider plugin - allows this app to act as an OIDC provider
oidcProvider({
loginPage: "/login",
consentPage: "/oauth/consent",
// Allow dynamic client registration for flexibility
allowDynamicClientRegistration: true,
// Note: trustedClients would be configured here if Better Auth supports it
// For now, we'll use dynamic registration
// Customize user info claims based on scopes
getAdditionalUserInfoClaim: (user, scopes) => {
const claims: Record<string, any> = {};
if (scopes.includes("profile")) {
claims.username = user.username;
}
return claims;
},
}),
// SSO plugin - allows users to authenticate with external OIDC providers
sso({
// Provision new users when they sign in with SSO
provisionUser: async ({ user }: { user: any, userInfo: any }) => {
// Derive username from email if not provided
const username = user.name || user.email?.split('@')[0] || 'user';
// Update user in database if needed
await db.update(users)
.set({ username })
.where(eq(users.id, user.id))
.catch(() => {}); // Ignore errors if user doesn't exist yet
},
// Organization provisioning settings
organizationProvisioning: {
disabled: false,
defaultRole: "member",
getRole: async ({ userInfo }: { user: any, userInfo: any }) => {
// Check if user has admin attribute from SSO provider
const isAdmin = userInfo.attributes?.role === 'admin' ||
userInfo.attributes?.groups?.includes('admins');
return isAdmin ? "admin" : "member";
},
},
// Override user info with provider data by default
defaultOverrideUserInfo: true,
// Allow implicit sign up for new users
disableImplicitSignUp: false,
// Trust email_verified claims from the upstream provider so we can link by matching email
trustEmailVerified: true,
}),
],
});
// Export type for use in other parts of the app
export type Auth = typeof auth;
@@ -0,0 +1,258 @@
/**
* Background cleanup service for automatic database maintenance
* This service runs periodically to clean up old events and mirror jobs
* based on user configuration settings
*/
import { db, configs, events, mirrorJobs } from "@/lib/db";
import { eq, lt, and } from "drizzle-orm";
interface CleanupResult {
userId: string;
eventsDeleted: number;
mirrorJobsDeleted: number;
error?: string;
}
/**
* Calculate cleanup interval in hours based on retention period
* For shorter retention periods, run more frequently
* For longer retention periods, run less frequently
* @param retentionSeconds - Retention period in seconds
*/
export function calculateCleanupInterval(retentionSeconds: number): number {
const retentionDays = retentionSeconds / (24 * 60 * 60); // Convert seconds to days
if (retentionDays <= 1) {
return 6; // Every 6 hours for 1 day retention
} else if (retentionDays <= 3) {
return 12; // Every 12 hours for 1-3 days retention
} else if (retentionDays <= 7) {
return 24; // Daily for 4-7 days retention
} else if (retentionDays <= 30) {
return 48; // Every 2 days for 8-30 days retention
} else {
return 168; // Weekly for 30+ days retention
}
}
/**
* Clean up old events and mirror jobs for a specific user
* @param retentionSeconds - Retention period in seconds
*/
async function cleanupForUser(userId: string, retentionSeconds: number): Promise<CleanupResult> {
try {
const retentionDays = retentionSeconds / (24 * 60 * 60); // Convert to days for logging
console.log(`Running cleanup for user ${userId} with ${retentionDays} days retention (${retentionSeconds} seconds)`);
// Calculate cutoff date using seconds
const cutoffDate = new Date();
cutoffDate.setTime(cutoffDate.getTime() - retentionSeconds * 1000);
let eventsDeleted = 0;
let mirrorJobsDeleted = 0;
// Clean up old events
await db
.delete(events)
.where(
and(
eq(events.userId, userId),
lt(events.createdAt, cutoffDate)
)
);
eventsDeleted = 0; // SQLite delete doesn't return count
// Clean up old mirror jobs (only completed ones)
await db
.delete(mirrorJobs)
.where(
and(
eq(mirrorJobs.userId, userId),
eq(mirrorJobs.inProgress, false),
lt(mirrorJobs.timestamp, cutoffDate)
)
);
mirrorJobsDeleted = 0; // SQLite delete doesn't return count
console.log(`Cleanup completed for user ${userId}: ${eventsDeleted} events, ${mirrorJobsDeleted} jobs deleted`);
return {
userId,
eventsDeleted,
mirrorJobsDeleted,
};
} catch (error) {
console.error(`Error during cleanup for user ${userId}:`, error);
return {
userId,
eventsDeleted: 0,
mirrorJobsDeleted: 0,
error: error instanceof Error ? error.message : 'Unknown error',
};
}
}
/**
* Update the cleanup configuration with last run time and calculate next run
*/
async function updateCleanupConfig(userId: string, cleanupConfig: any) {
try {
const now = new Date();
const retentionSeconds = cleanupConfig.retentionDays || 604800; // Default 7 days in seconds
const cleanupIntervalHours = calculateCleanupInterval(retentionSeconds);
const nextRun = new Date(now.getTime() + cleanupIntervalHours * 60 * 60 * 1000);
const updatedConfig = {
...cleanupConfig,
lastRun: now,
nextRun: nextRun,
};
await db
.update(configs)
.set({
cleanupConfig: updatedConfig,
updatedAt: now,
})
.where(eq(configs.userId, userId));
const retentionDays = retentionSeconds / (24 * 60 * 60);
console.log(`Updated cleanup config for user ${userId}, next run: ${nextRun.toISOString()} (${cleanupIntervalHours}h interval for ${retentionDays}d retention)`);
} catch (error) {
console.error(`Error updating cleanup config for user ${userId}:`, error);
}
}
/**
* Run automatic cleanup for all users with cleanup enabled
*/
export async function runAutomaticCleanup(): Promise<CleanupResult[]> {
try {
console.log('Starting automatic cleanup service...');
// Get all users with cleanup enabled
const userConfigs = await db
.select()
.from(configs)
.where(eq(configs.isActive, true));
const results: CleanupResult[] = [];
const now = new Date();
for (const config of userConfigs) {
try {
const cleanupConfig = config.cleanupConfig;
// Skip if cleanup is not enabled
if (!cleanupConfig?.enabled) {
continue;
}
// Check if it's time to run cleanup
const nextRun = cleanupConfig.nextRun ? new Date(cleanupConfig.nextRun) : null;
// If nextRun is null or in the past, run cleanup
if (!nextRun || now >= nextRun) {
const result = await cleanupForUser(config.userId, cleanupConfig.retentionDays || 604800);
results.push(result);
// Update the cleanup config with new run times
await updateCleanupConfig(config.userId, cleanupConfig);
} else {
console.log(`Skipping cleanup for user ${config.userId}, next run: ${nextRun.toISOString()}`);
}
} catch (error) {
console.error(`Error processing cleanup for user ${config.userId}:`, error);
results.push({
userId: config.userId,
eventsDeleted: 0,
mirrorJobsDeleted: 0,
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
console.log(`Automatic cleanup completed. Processed ${results.length} users.`);
return results;
} catch (error) {
console.error('Error in automatic cleanup service:', error);
return [];
}
}
// Service state tracking
let cleanupIntervalId: NodeJS.Timeout | null = null;
let initialCleanupTimeoutId: NodeJS.Timeout | null = null;
let cleanupServiceRunning = false;
/**
* Start the cleanup service with periodic execution
* This should be called when the application starts
*/
export function startCleanupService() {
if (cleanupServiceRunning) {
console.log('⚠️ Cleanup service already running, skipping start');
return;
}
console.log('Starting background cleanup service...');
// Run cleanup every hour
const CLEANUP_INTERVAL = 60 * 60 * 1000; // 1 hour in milliseconds
// Run initial cleanup after 5 minutes to allow app to fully start
initialCleanupTimeoutId = setTimeout(() => {
runAutomaticCleanup().catch(error => {
console.error('Error in initial cleanup run:', error);
});
}, 5 * 60 * 1000); // 5 minutes
// Set up periodic cleanup
cleanupIntervalId = setInterval(() => {
runAutomaticCleanup().catch(error => {
console.error('Error in periodic cleanup run:', error);
});
}, CLEANUP_INTERVAL);
cleanupServiceRunning = true;
console.log(`✅ Cleanup service started. Will run every ${CLEANUP_INTERVAL / 1000 / 60} minutes.`);
}
/**
* Stop the cleanup service (for testing or shutdown)
*/
export function stopCleanupService() {
if (!cleanupServiceRunning) {
console.log('Cleanup service is not running');
return;
}
console.log('🛑 Stopping cleanup service...');
// Clear the periodic interval
if (cleanupIntervalId) {
clearInterval(cleanupIntervalId);
cleanupIntervalId = null;
}
// Clear the initial timeout
if (initialCleanupTimeoutId) {
clearTimeout(initialCleanupTimeoutId);
initialCleanupTimeoutId = null;
}
cleanupServiceRunning = false;
console.log('✅ Cleanup service stopped');
}
/**
* Get cleanup service status
*/
export function getCleanupServiceStatus() {
return {
running: cleanupServiceRunning,
hasInterval: cleanupIntervalId !== null,
hasInitialTimeout: initialCleanupTimeoutId !== null,
};
}
+28
View File
@@ -0,0 +1,28 @@
/**
* Application configuration
*/
// Environment variables
export const ENV = {
// Runtime environment (development, production, test)
NODE_ENV: process.env.NODE_ENV || "development",
// Database URL - use SQLite by default
get DATABASE_URL() {
// If explicitly set, use the provided DATABASE_URL
if (process.env.DATABASE_URL) {
return process.env.DATABASE_URL;
}
// Otherwise, use the default database
return "sqlite://data/gitea-mirror.db";
},
// Better Auth secret for authentication
BETTER_AUTH_SECRET:
process.env.BETTER_AUTH_SECRET || "your-secret-key-change-this-in-production",
// Server host and port
HOST: process.env.HOST || "localhost",
PORT: parseInt(process.env.PORT || "4321", 10),
};
+102
View File
@@ -0,0 +1,102 @@
/**
* Database adapter for SQLite
* For the self-hosted version of Gitea Mirror
*/
import { drizzle as drizzleSqlite } from 'drizzle-orm/bun-sqlite';
import { Database } from 'bun:sqlite';
import * as schema from './schema';
export type DatabaseClient = ReturnType<typeof createDatabase>;
/**
* Create SQLite database connection
*/
export function createDatabase() {
const dbPath = process.env.DATABASE_PATH || './data/gitea-mirror.db';
// Ensure directory exists
const fs = require('fs');
const path = require('path');
const dir = path.dirname(dbPath);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
// Create SQLite connection
const sqlite = new Database(dbPath);
// Enable foreign keys and WAL mode for better performance
sqlite.exec('PRAGMA foreign_keys = ON');
sqlite.exec('PRAGMA journal_mode = WAL');
sqlite.exec('PRAGMA synchronous = NORMAL');
sqlite.exec('PRAGMA cache_size = -2000'); // 2MB cache
sqlite.exec('PRAGMA temp_store = MEMORY');
// Create Drizzle instance with SQLite
const db = drizzleSqlite(sqlite, {
schema,
logger: process.env.NODE_ENV === 'development',
});
return {
db,
client: sqlite,
type: 'sqlite' as const,
// Helper methods
async close() {
sqlite.close();
},
async healthCheck() {
try {
sqlite.query('SELECT 1').get();
return true;
} catch {
return false;
}
},
async transaction<T>(fn: (tx: any) => Promise<T>) {
return db.transaction(fn);
},
};
}
// Create singleton instance
let dbInstance: DatabaseClient | null = null;
/**
* Get database instance (singleton)
*/
export function getDatabase(): DatabaseClient {
if (!dbInstance) {
dbInstance = createDatabase();
}
return dbInstance;
}
/**
* Close database connection
*/
export async function closeDatabase() {
if (dbInstance) {
await dbInstance.close();
dbInstance = null;
}
}
// Export convenience references
export const { db, client, type: dbType } = getDatabase();
// Re-export schema for convenience
export * from './schema';
/**
* Database migration utilities
*/
export async function runMigrations() {
const { migrate } = await import('drizzle-orm/bun-sqlite/migrator');
await migrate(db, { migrationsFolder: './drizzle' });
}
@@ -0,0 +1,42 @@
import { describe, test, expect, mock, beforeAll, afterAll } from "bun:test";
import { drizzle } from "drizzle-orm/bun-sqlite";
// Silence console logs during tests
let originalConsoleLog: typeof console.log;
beforeAll(() => {
// Save original console.log
originalConsoleLog = console.log;
// Replace with no-op function
console.log = () => {};
});
afterAll(() => {
// Restore original console.log
console.log = originalConsoleLog;
});
// Mock the database module
mock.module("bun:sqlite", () => {
return {
Database: mock(function() {
return {
query: mock(() => ({
all: mock(() => []),
run: mock(() => ({}))
}))
};
})
};
});
// Mock the database tables
describe("Database Schema", () => {
test("database connection can be created", async () => {
// Import the db from the module
const { db } = await import("./index");
// Check that db is defined
expect(db).toBeDefined();
});
});
+87
View File
@@ -0,0 +1,87 @@
import { Database } from "bun:sqlite";
import { drizzle } from "drizzle-orm/bun-sqlite";
import fs from "fs";
import path from "path";
import { migrate } from "drizzle-orm/bun-sqlite/migrator";
// Skip database initialization in test environment
let db: ReturnType<typeof drizzle>;
if (process.env.NODE_ENV !== "test") {
// Define the database URL - for development we'll use a local SQLite file
const dataDir = path.join(process.cwd(), "data");
// Ensure data directory exists
if (!fs.existsSync(dataDir)) {
fs.mkdirSync(dataDir, { recursive: true });
}
const dbPath = path.join(dataDir, "gitea-mirror.db");
// Create an empty database file if it doesn't exist
if (!fs.existsSync(dbPath)) {
fs.writeFileSync(dbPath, "");
}
// Create SQLite database instance using Bun's native driver
let sqlite: Database;
try {
sqlite = new Database(dbPath);
console.log("Successfully connected to SQLite database using Bun's native driver");
} catch (error) {
console.error("Error opening database:", error);
throw error;
}
// Create drizzle instance with the SQLite client
db = drizzle({ client: sqlite });
/**
* Run Drizzle migrations
*/
function runDrizzleMigrations() {
try {
console.log("🔄 Checking for pending migrations...");
// Check if migrations table exists
const migrationsTableExists = sqlite
.query("SELECT name FROM sqlite_master WHERE type='table' AND name='__drizzle_migrations'")
.get();
if (!migrationsTableExists) {
console.log("📦 First time setup - running initial migrations...");
}
// Run migrations using Drizzle migrate function
migrate(db, { migrationsFolder: "./drizzle" });
console.log("✅ Database migrations completed successfully");
} catch (error) {
console.error("❌ Error running migrations:", error);
throw error;
}
}
// Run Drizzle migrations after db is initialized
runDrizzleMigrations();
}
export { db };
// Export all table definitions from schema
export {
users,
events,
configs,
repositories,
mirrorJobs,
organizations,
sessions,
accounts,
verificationTokens,
verifications,
oauthApplications,
oauthAccessTokens,
oauthConsent,
ssoProviders,
rateLimits
} from "./schema";
+699
View File
@@ -0,0 +1,699 @@
import { z } from "zod";
import { sqliteTable, text, integer, index, uniqueIndex } from "drizzle-orm/sqlite-core";
import { sql } from "drizzle-orm";
// ===== Zod Validation Schemas =====
export const userSchema = z.object({
id: z.string(),
username: z.string(),
password: z.string(),
email: z.email(),
emailVerified: z.boolean().default(false),
createdAt: z.coerce.date(),
updatedAt: z.coerce.date(),
});
export const githubConfigSchema = z.object({
owner: z.string(),
type: z.enum(["personal", "organization"]),
token: z.string(),
includeStarred: z.boolean().default(false),
includeForks: z.boolean().default(true),
skipForks: z.boolean().default(false),
includeArchived: z.boolean().default(false),
includePrivate: z.boolean().default(true),
includePublic: z.boolean().default(true),
includeOrganizations: z.array(z.string()).default([]),
starredReposOrg: z.string().optional(),
mirrorStrategy: z.enum(["preserve", "single-org", "flat-user", "mixed"]).default("preserve"),
defaultOrg: z.string().optional(),
starredCodeOnly: z.boolean().default(false),
skipStarredIssues: z.boolean().optional(), // Deprecated: kept for backward compatibility, use starredCodeOnly instead
starredDuplicateStrategy: z.enum(["suffix", "prefix", "owner-org"]).default("suffix").optional(),
});
export const giteaConfigSchema = z.object({
url: z.url(),
token: z.string(),
defaultOwner: z.string(),
organization: z.string().optional(),
mirrorInterval: z.string().default("8h"),
lfs: z.boolean().default(false),
wiki: z.boolean().default(false),
visibility: z
.enum(["public", "private", "limited", "default"])
.default("default"),
createOrg: z.boolean().default(true),
templateOwner: z.string().optional(),
templateRepo: z.string().optional(),
addTopics: z.boolean().default(true),
topicPrefix: z.string().optional(),
preserveVisibility: z.boolean().default(true),
preserveOrgStructure: z.boolean().default(false),
forkStrategy: z
.enum(["skip", "reference", "full-copy"])
.default("reference"),
// Mirror options
issueConcurrency: z.number().int().min(1).default(3),
pullRequestConcurrency: z.number().int().min(1).default(5),
mirrorReleases: z.boolean().default(false),
releaseLimit: z.number().default(10),
mirrorMetadata: z.boolean().default(false),
mirrorIssues: z.boolean().default(false),
mirrorPullRequests: z.boolean().default(false),
mirrorLabels: z.boolean().default(false),
mirrorMilestones: z.boolean().default(false),
});
export const scheduleConfigSchema = z.object({
enabled: z.boolean().default(false),
interval: z.string().default("0 2 * * *"),
concurrent: z.boolean().default(false),
batchSize: z.number().default(10),
pauseBetweenBatches: z.number().default(5000),
retryAttempts: z.number().default(3),
retryDelay: z.number().default(60000),
timeout: z.number().default(3600000),
autoRetry: z.boolean().default(true),
cleanupBeforeMirror: z.boolean().default(false),
notifyOnFailure: z.boolean().default(true),
notifyOnSuccess: z.boolean().default(false),
logLevel: z.enum(["error", "warn", "info", "debug"]).default("info"),
timezone: z.string().default("UTC"),
onlyMirrorUpdated: z.boolean().default(false),
updateInterval: z.number().default(86400000),
skipRecentlyMirrored: z.boolean().default(true),
recentThreshold: z.number().default(3600000),
autoImport: z.boolean().default(true),
autoMirror: z.boolean().default(false),
lastRun: z.coerce.date().optional(),
nextRun: z.coerce.date().optional(),
});
export const cleanupConfigSchema = z.object({
enabled: z.boolean().default(false),
retentionDays: z.number().default(604800), // 7 days in seconds
deleteFromGitea: z.boolean().default(false),
deleteIfNotInGitHub: z.boolean().default(true),
protectedRepos: z.array(z.string()).default([]),
dryRun: z.boolean().default(false),
orphanedRepoAction: z
.enum(["skip", "archive", "delete"])
.default("archive"),
batchSize: z.number().default(10),
pauseBetweenDeletes: z.number().default(2000),
lastRun: z.coerce.date().optional(),
nextRun: z.coerce.date().optional(),
});
export const configSchema = z.object({
id: z.string(),
userId: z.string(),
name: z.string(),
isActive: z.boolean().default(true),
githubConfig: githubConfigSchema,
giteaConfig: giteaConfigSchema,
include: z.array(z.string()).default(["*"]),
exclude: z.array(z.string()).default([]),
scheduleConfig: scheduleConfigSchema,
cleanupConfig: cleanupConfigSchema,
createdAt: z.coerce.date(),
updatedAt: z.coerce.date(),
});
export const repositorySchema = z.object({
id: z.string(),
userId: z.string(),
configId: z.string(),
name: z.string(),
fullName: z.string(),
normalizedFullName: z.string(),
url: z.url(),
cloneUrl: z.url(),
owner: z.string(),
organization: z.string().optional().nullable(),
mirroredLocation: z.string().default(""),
isPrivate: z.boolean().default(false),
isForked: z.boolean().default(false),
forkedFrom: z.string().optional().nullable(),
hasIssues: z.boolean().default(false),
isStarred: z.boolean().default(false),
isArchived: z.boolean().default(false),
size: z.number().default(0),
hasLFS: z.boolean().default(false),
hasSubmodules: z.boolean().default(false),
language: z.string().optional().nullable(),
description: z.string().optional().nullable(),
defaultBranch: z.string(),
visibility: z.enum(["public", "private", "internal"]).default("public"),
status: z
.enum([
"imported",
"mirroring",
"mirrored",
"failed",
"skipped",
"ignored", // User explicitly wants to ignore this repository
"deleting",
"deleted",
"syncing",
"synced",
"archived",
])
.default("imported"),
lastMirrored: z.coerce.date().optional().nullable(),
errorMessage: z.string().optional().nullable(),
destinationOrg: z.string().optional().nullable(),
metadata: z.string().optional().nullable(), // JSON string for metadata sync state
createdAt: z.coerce.date(),
updatedAt: z.coerce.date(),
});
export const mirrorJobSchema = z.object({
id: z.string(),
userId: z.string(),
repositoryId: z.string().optional().nullable(),
repositoryName: z.string().optional().nullable(),
organizationId: z.string().optional().nullable(),
organizationName: z.string().optional().nullable(),
details: z.string().optional().nullable(),
status: z
.enum([
"imported",
"mirroring",
"mirrored",
"failed",
"skipped",
"ignored", // User explicitly wants to ignore this repository
"deleting",
"deleted",
"syncing",
"synced",
"archived",
])
.default("imported"),
message: z.string(),
timestamp: z.coerce.date(),
jobType: z.enum(["mirror", "cleanup", "import"]).default("mirror"),
batchId: z.string().optional().nullable(),
totalItems: z.number().optional().nullable(),
completedItems: z.number().default(0),
itemIds: z.array(z.string()).optional().nullable(),
completedItemIds: z.array(z.string()).default([]),
inProgress: z.boolean().default(false),
startedAt: z.coerce.date().optional().nullable(),
completedAt: z.coerce.date().optional().nullable(),
lastCheckpoint: z.coerce.date().optional().nullable(),
});
export const organizationSchema = z.object({
id: z.string(),
userId: z.string(),
configId: z.string(),
name: z.string(),
normalizedName: z.string(),
avatarUrl: z.string(),
membershipRole: z.enum(["member", "admin", "owner", "billing_manager"]).default("member"),
isIncluded: z.boolean().default(true),
destinationOrg: z.string().optional().nullable(),
status: z
.enum([
"imported",
"mirroring",
"mirrored",
"failed",
"skipped",
"ignored", // User explicitly wants to ignore this repository
"deleting",
"deleted",
"syncing",
"synced",
])
.default("imported"),
lastMirrored: z.coerce.date().optional().nullable(),
errorMessage: z.string().optional().nullable(),
repositoryCount: z.number().default(0),
publicRepositoryCount: z.number().optional(),
privateRepositoryCount: z.number().optional(),
forkRepositoryCount: z.number().optional(),
createdAt: z.coerce.date(),
updatedAt: z.coerce.date(),
});
export const eventSchema = z.object({
id: z.string(),
userId: z.string(),
channel: z.string(),
payload: z.any(),
read: z.boolean().default(false),
createdAt: z.coerce.date(),
});
// ===== Drizzle Table Definitions =====
export const users = sqliteTable("users", {
id: text("id").primaryKey(),
name: text("name"),
email: text("email").notNull().unique(),
emailVerified: integer("email_verified", { mode: "boolean" }).notNull().default(false),
image: text("image"),
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
// Custom fields
username: text("username"),
}, (_table) => []);
export const events = sqliteTable("events", {
id: text("id").primaryKey(),
userId: text("user_id")
.notNull()
.references(() => users.id),
channel: text("channel").notNull(),
payload: text("payload", { mode: "json" }).notNull(),
read: integer("read", { mode: "boolean" }).notNull().default(false),
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_events_user_channel").on(table.userId, table.channel),
index("idx_events_created_at").on(table.createdAt),
index("idx_events_read").on(table.read),
]);
export const configs = sqliteTable("configs", {
id: text("id").primaryKey(),
userId: text("user_id")
.notNull()
.references(() => users.id),
name: text("name").notNull(),
isActive: integer("is_active", { mode: "boolean" }).notNull().default(true),
githubConfig: text("github_config", { mode: "json" })
.$type<z.infer<typeof githubConfigSchema>>()
.notNull(),
giteaConfig: text("gitea_config", { mode: "json" })
.$type<z.infer<typeof giteaConfigSchema>>()
.notNull(),
include: text("include", { mode: "json" })
.$type<string[]>()
.notNull()
.default(sql`'["*"]'`),
exclude: text("exclude", { mode: "json" })
.$type<string[]>()
.notNull()
.default(sql`'[]'`),
scheduleConfig: text("schedule_config", { mode: "json" })
.$type<z.infer<typeof scheduleConfigSchema>>()
.notNull(),
cleanupConfig: text("cleanup_config", { mode: "json" })
.$type<z.infer<typeof cleanupConfigSchema>>()
.notNull(),
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (_table) => []);
export const repositories = sqliteTable("repositories", {
id: text("id").primaryKey(),
userId: text("user_id")
.notNull()
.references(() => users.id),
configId: text("config_id")
.notNull()
.references(() => configs.id),
name: text("name").notNull(),
fullName: text("full_name").notNull(),
normalizedFullName: text("normalized_full_name").notNull(),
url: text("url").notNull(),
cloneUrl: text("clone_url").notNull(),
owner: text("owner").notNull(),
organization: text("organization"),
mirroredLocation: text("mirrored_location").default(""),
isPrivate: integer("is_private", { mode: "boolean" })
.notNull()
.default(false),
isForked: integer("is_fork", { mode: "boolean" }).notNull().default(false),
forkedFrom: text("forked_from"),
hasIssues: integer("has_issues", { mode: "boolean" })
.notNull()
.default(false),
isStarred: integer("is_starred", { mode: "boolean" })
.notNull()
.default(false),
isArchived: integer("is_archived", { mode: "boolean" })
.notNull()
.default(false),
size: integer("size").notNull().default(0),
hasLFS: integer("has_lfs", { mode: "boolean" }).notNull().default(false),
hasSubmodules: integer("has_submodules", { mode: "boolean" })
.notNull()
.default(false),
language: text("language"),
description: text("description"),
defaultBranch: text("default_branch").notNull(),
visibility: text("visibility").notNull().default("public"),
status: text("status").notNull().default("imported"),
lastMirrored: integer("last_mirrored", { mode: "timestamp" }),
errorMessage: text("error_message"),
destinationOrg: text("destination_org"),
metadata: text("metadata"), // JSON string storing metadata sync state (issues, PRs, releases, etc.)
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_repositories_user_id").on(table.userId),
index("idx_repositories_config_id").on(table.configId),
index("idx_repositories_status").on(table.status),
index("idx_repositories_owner").on(table.owner),
index("idx_repositories_organization").on(table.organization),
index("idx_repositories_is_fork").on(table.isForked),
index("idx_repositories_is_starred").on(table.isStarred),
uniqueIndex("uniq_repositories_user_full_name").on(table.userId, table.fullName),
uniqueIndex("uniq_repositories_user_normalized_full_name").on(table.userId, table.normalizedFullName),
]);
export const mirrorJobs = sqliteTable("mirror_jobs", {
id: text("id").primaryKey(),
userId: text("user_id")
.notNull()
.references(() => users.id),
repositoryId: text("repository_id"),
repositoryName: text("repository_name"),
organizationId: text("organization_id"),
organizationName: text("organization_name"),
details: text("details"),
status: text("status").notNull().default("imported"),
message: text("message").notNull(),
timestamp: integer("timestamp", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
// Job resilience fields
jobType: text("job_type").notNull().default("mirror"),
batchId: text("batch_id"),
totalItems: integer("total_items"),
completedItems: integer("completed_items").default(0),
itemIds: text("item_ids", { mode: "json" }).$type<string[]>(),
completedItemIds: text("completed_item_ids", { mode: "json" })
.$type<string[]>()
.default(sql`'[]'`),
inProgress: integer("in_progress", { mode: "boolean" })
.notNull()
.default(false),
startedAt: integer("started_at", { mode: "timestamp" }),
completedAt: integer("completed_at", { mode: "timestamp" }),
lastCheckpoint: integer("last_checkpoint", { mode: "timestamp" }),
}, (table) => [
index("idx_mirror_jobs_user_id").on(table.userId),
index("idx_mirror_jobs_batch_id").on(table.batchId),
index("idx_mirror_jobs_in_progress").on(table.inProgress),
index("idx_mirror_jobs_job_type").on(table.jobType),
index("idx_mirror_jobs_timestamp").on(table.timestamp),
]);
export const organizations = sqliteTable("organizations", {
id: text("id").primaryKey(),
userId: text("user_id")
.notNull()
.references(() => users.id),
configId: text("config_id")
.notNull()
.references(() => configs.id),
name: text("name").notNull(),
normalizedName: text("normalized_name").notNull(),
avatarUrl: text("avatar_url").notNull(),
membershipRole: text("membership_role").notNull().default("member"),
isIncluded: integer("is_included", { mode: "boolean" })
.notNull()
.default(true),
destinationOrg: text("destination_org"),
status: text("status").notNull().default("imported"),
lastMirrored: integer("last_mirrored", { mode: "timestamp" }),
errorMessage: text("error_message"),
repositoryCount: integer("repository_count").notNull().default(0),
publicRepositoryCount: integer("public_repository_count"),
privateRepositoryCount: integer("private_repository_count"),
forkRepositoryCount: integer("fork_repository_count"),
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_organizations_user_id").on(table.userId),
index("idx_organizations_config_id").on(table.configId),
index("idx_organizations_status").on(table.status),
index("idx_organizations_is_included").on(table.isIncluded),
uniqueIndex("uniq_organizations_user_normalized_name").on(table.userId, table.normalizedName),
]);
// ===== Better Auth Tables =====
// Sessions table
export const sessions = sqliteTable("sessions", {
id: text("id").primaryKey(),
token: text("token").notNull().unique(),
userId: text("user_id").notNull().references(() => users.id),
expiresAt: integer("expires_at", { mode: "timestamp" }).notNull(),
ipAddress: text("ip_address"),
userAgent: text("user_agent"),
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_sessions_user_id").on(table.userId),
index("idx_sessions_token").on(table.token),
index("idx_sessions_expires_at").on(table.expiresAt),
]);
// Accounts table (for OAuth providers and credentials)
export const accounts = sqliteTable("accounts", {
id: text("id").primaryKey(),
accountId: text("account_id").notNull(),
userId: text("user_id").notNull().references(() => users.id),
providerId: text("provider_id").notNull(),
providerUserId: text("provider_user_id"), // Make nullable for email/password auth
accessToken: text("access_token"),
refreshToken: text("refresh_token"),
idToken: text("id_token"),
accessTokenExpiresAt: integer("access_token_expires_at", { mode: "timestamp" }),
refreshTokenExpiresAt: integer("refresh_token_expires_at", { mode: "timestamp" }),
scope: text("scope"),
expiresAt: integer("expires_at", { mode: "timestamp" }),
password: text("password"), // For credential provider
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_accounts_account_id").on(table.accountId),
index("idx_accounts_user_id").on(table.userId),
index("idx_accounts_provider").on(table.providerId, table.providerUserId),
]);
// Verification tokens table
export const verificationTokens = sqliteTable("verification_tokens", {
id: text("id").primaryKey(),
token: text("token").notNull().unique(),
identifier: text("identifier").notNull(),
type: text("type").notNull(), // email, password-reset, etc
expiresAt: integer("expires_at", { mode: "timestamp" }).notNull(),
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_verification_tokens_token").on(table.token),
index("idx_verification_tokens_identifier").on(table.identifier),
]);
// Verifications table (for Better Auth)
export const verifications = sqliteTable("verifications", {
id: text("id").primaryKey(),
identifier: text("identifier").notNull(),
value: text("value").notNull(),
expiresAt: integer("expires_at", { mode: "timestamp" }).notNull(),
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_verifications_identifier").on(table.identifier),
]);
// ===== OIDC Provider Tables =====
// OAuth Applications table
export const oauthApplications = sqliteTable("oauth_applications", {
id: text("id").primaryKey(),
clientId: text("client_id").notNull().unique(),
clientSecret: text("client_secret").notNull(),
name: text("name").notNull(),
redirectURLs: text("redirect_urls").notNull(), // Comma-separated list
metadata: text("metadata"), // JSON string
type: text("type").notNull(), // web, mobile, etc
disabled: integer("disabled", { mode: "boolean" }).notNull().default(false),
userId: text("user_id"), // Optional - owner of the application
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_oauth_applications_client_id").on(table.clientId),
index("idx_oauth_applications_user_id").on(table.userId),
]);
// OAuth Access Tokens table
export const oauthAccessTokens = sqliteTable("oauth_access_tokens", {
id: text("id").primaryKey(),
accessToken: text("access_token").notNull(),
refreshToken: text("refresh_token"),
accessTokenExpiresAt: integer("access_token_expires_at", { mode: "timestamp" }).notNull(),
refreshTokenExpiresAt: integer("refresh_token_expires_at", { mode: "timestamp" }),
clientId: text("client_id").notNull(),
userId: text("user_id").notNull().references(() => users.id),
scopes: text("scopes").notNull(), // Comma-separated list
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_oauth_access_tokens_access_token").on(table.accessToken),
index("idx_oauth_access_tokens_user_id").on(table.userId),
index("idx_oauth_access_tokens_client_id").on(table.clientId),
]);
// OAuth Consent table
export const oauthConsent = sqliteTable("oauth_consent", {
id: text("id").primaryKey(),
userId: text("user_id").notNull().references(() => users.id),
clientId: text("client_id").notNull(),
scopes: text("scopes").notNull(), // Comma-separated list
consentGiven: integer("consent_given", { mode: "boolean" }).notNull(),
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_oauth_consent_user_id").on(table.userId),
index("idx_oauth_consent_client_id").on(table.clientId),
index("idx_oauth_consent_user_client").on(table.userId, table.clientId),
]);
// ===== SSO Provider Tables =====
// SSO Providers table
export const ssoProviders = sqliteTable("sso_providers", {
id: text("id").primaryKey(),
issuer: text("issuer").notNull(),
domain: text("domain").notNull(),
oidcConfig: text("oidc_config").notNull(), // JSON string with OIDC configuration
userId: text("user_id").notNull(), // Admin who created this provider
providerId: text("provider_id").notNull().unique(), // Unique identifier for the provider
organizationId: text("organization_id"), // Optional - if provider is linked to an organization
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_sso_providers_provider_id").on(table.providerId),
index("idx_sso_providers_domain").on(table.domain),
index("idx_sso_providers_issuer").on(table.issuer),
]);
// ===== Rate Limit Tracking =====
export const rateLimitSchema = z.object({
id: z.string(),
userId: z.string(),
provider: z.enum(["github", "gitea"]).default("github"),
limit: z.number(),
remaining: z.number(),
used: z.number(),
reset: z.coerce.date(),
retryAfter: z.number().optional(), // seconds to wait
status: z.enum(["ok", "warning", "limited", "exceeded"]).default("ok"),
lastChecked: z.coerce.date(),
createdAt: z.coerce.date(),
updatedAt: z.coerce.date(),
});
export const rateLimits = sqliteTable("rate_limits", {
id: text("id").primaryKey(),
userId: text("user_id")
.notNull()
.references(() => users.id),
provider: text("provider").notNull().default("github"),
limit: integer("limit").notNull(),
remaining: integer("remaining").notNull(),
used: integer("used").notNull(),
reset: integer("reset", { mode: "timestamp" }).notNull(),
retryAfter: integer("retry_after"), // seconds to wait
status: text("status").notNull().default("ok"),
lastChecked: integer("last_checked", { mode: "timestamp" }).notNull(),
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => [
index("idx_rate_limits_user_provider").on(table.userId, table.provider),
index("idx_rate_limits_status").on(table.status),
]);
// Export type definitions
export type User = z.infer<typeof userSchema>;
export type Config = z.infer<typeof configSchema>;
export type Repository = z.infer<typeof repositorySchema>;
export type MirrorJob = z.infer<typeof mirrorJobSchema>;
export type Organization = z.infer<typeof organizationSchema>;
export type Event = z.infer<typeof eventSchema>;
export type RateLimit = z.infer<typeof rateLimitSchema>;
@@ -0,0 +1,22 @@
/**
* Deployment mode utilities
* Supports both self-hosted and hosted versions
*/
export const DEPLOYMENT_MODE = process.env.DEPLOYMENT_MODE || 'selfhosted';
export const isSelfHostedMode = () => DEPLOYMENT_MODE === 'selfhosted';
export const isHostedMode = () => DEPLOYMENT_MODE === 'hosted';
/**
* Feature flags for self-hosted version
*/
export const features = {
// Core features available
githubSync: true,
giteaMirroring: true,
scheduling: true,
multiUser: true,
githubSponsors: true,
unlimitedRepos: true,
};
@@ -0,0 +1,378 @@
/**
* Environment variable configuration loader
* Loads configuration from environment variables and populates the database
*/
import { db, configs, users } from '@/lib/db';
import { eq, and } from 'drizzle-orm';
import { v4 as uuidv4 } from 'uuid';
import { encrypt } from '@/lib/utils/encryption';
interface EnvConfig {
github: {
username?: string;
token?: string;
type?: 'personal' | 'organization';
privateRepositories?: boolean;
publicRepositories?: boolean;
mirrorStarred?: boolean;
skipForks?: boolean;
includeArchived?: boolean;
mirrorOrganizations?: boolean;
preserveOrgStructure?: boolean;
onlyMirrorOrgs?: boolean;
starredCodeOnly?: boolean;
starredReposOrg?: string;
mirrorStrategy?: 'preserve' | 'single-org' | 'flat-user' | 'mixed';
};
gitea: {
url?: string;
username?: string;
token?: string;
organization?: string;
visibility?: 'public' | 'private' | 'limited' | 'default';
mirrorInterval?: string;
lfs?: boolean;
createOrg?: boolean;
templateOwner?: string;
templateRepo?: string;
addTopics?: boolean;
topicPrefix?: string;
preserveVisibility?: boolean;
forkStrategy?: 'skip' | 'reference' | 'full-copy';
};
mirror: {
mirrorIssues?: boolean;
mirrorWiki?: boolean;
mirrorReleases?: boolean;
mirrorPullRequests?: boolean;
mirrorLabels?: boolean;
mirrorMilestones?: boolean;
mirrorMetadata?: boolean;
releaseLimit?: number;
issueConcurrency?: number;
pullRequestConcurrency?: number;
};
schedule: {
enabled?: boolean;
interval?: string;
concurrent?: boolean;
batchSize?: number;
pauseBetweenBatches?: number;
retryAttempts?: number;
retryDelay?: number;
timeout?: number;
autoRetry?: boolean;
cleanupBeforeMirror?: boolean;
notifyOnFailure?: boolean;
notifyOnSuccess?: boolean;
logLevel?: 'error' | 'warn' | 'info' | 'debug';
timezone?: string;
onlyMirrorUpdated?: boolean;
updateInterval?: number;
skipRecentlyMirrored?: boolean;
recentThreshold?: number;
autoImport?: boolean;
autoMirror?: boolean;
};
cleanup: {
enabled?: boolean;
retentionDays?: number;
deleteFromGitea?: boolean;
deleteIfNotInGitHub?: boolean;
protectedRepos?: string[];
dryRun?: boolean;
orphanedRepoAction?: 'skip' | 'archive' | 'delete';
batchSize?: number;
pauseBetweenDeletes?: number;
};
}
/**
* Parse environment variables into configuration object
*/
function parseEnvConfig(): EnvConfig {
// Parse protected repos from comma-separated string
const protectedRepos = process.env.CLEANUP_PROTECTED_REPOS
? process.env.CLEANUP_PROTECTED_REPOS.split(',').map(r => r.trim()).filter(Boolean)
: undefined;
return {
github: {
username: process.env.GITHUB_USERNAME,
token: process.env.GITHUB_TOKEN,
type: process.env.GITHUB_TYPE as 'personal' | 'organization',
privateRepositories: process.env.PRIVATE_REPOSITORIES === 'true',
publicRepositories: process.env.PUBLIC_REPOSITORIES === 'true',
mirrorStarred: process.env.MIRROR_STARRED === 'true',
skipForks: process.env.SKIP_FORKS === 'true',
includeArchived: process.env.INCLUDE_ARCHIVED === 'true',
mirrorOrganizations: process.env.MIRROR_ORGANIZATIONS === 'true',
preserveOrgStructure: process.env.PRESERVE_ORG_STRUCTURE === 'true',
onlyMirrorOrgs: process.env.ONLY_MIRROR_ORGS === 'true',
starredCodeOnly: process.env.SKIP_STARRED_ISSUES === 'true',
starredReposOrg: process.env.STARRED_REPOS_ORG,
mirrorStrategy: process.env.MIRROR_STRATEGY as 'preserve' | 'single-org' | 'flat-user' | 'mixed',
},
gitea: {
url: process.env.GITEA_URL,
username: process.env.GITEA_USERNAME,
token: process.env.GITEA_TOKEN,
organization: process.env.GITEA_ORGANIZATION,
visibility: process.env.GITEA_ORG_VISIBILITY as 'public' | 'private' | 'limited' | 'default',
mirrorInterval: process.env.GITEA_MIRROR_INTERVAL,
lfs: process.env.GITEA_LFS === 'true',
createOrg: process.env.GITEA_CREATE_ORG === 'true',
templateOwner: process.env.GITEA_TEMPLATE_OWNER,
templateRepo: process.env.GITEA_TEMPLATE_REPO,
addTopics: process.env.GITEA_ADD_TOPICS === 'true',
topicPrefix: process.env.GITEA_TOPIC_PREFIX,
preserveVisibility: process.env.GITEA_PRESERVE_VISIBILITY === 'true',
forkStrategy: process.env.GITEA_FORK_STRATEGY as 'skip' | 'reference' | 'full-copy',
},
mirror: {
mirrorIssues: process.env.MIRROR_ISSUES === 'true',
mirrorWiki: process.env.MIRROR_WIKI === 'true',
mirrorReleases: process.env.MIRROR_RELEASES === 'true',
mirrorPullRequests: process.env.MIRROR_PULL_REQUESTS === 'true',
mirrorLabels: process.env.MIRROR_LABELS === 'true',
mirrorMilestones: process.env.MIRROR_MILESTONES === 'true',
mirrorMetadata: process.env.MIRROR_METADATA === 'true',
releaseLimit: process.env.RELEASE_LIMIT ? parseInt(process.env.RELEASE_LIMIT, 10) : undefined,
issueConcurrency: process.env.MIRROR_ISSUE_CONCURRENCY ? parseInt(process.env.MIRROR_ISSUE_CONCURRENCY, 10) : undefined,
pullRequestConcurrency: process.env.MIRROR_PULL_REQUEST_CONCURRENCY ? parseInt(process.env.MIRROR_PULL_REQUEST_CONCURRENCY, 10) : undefined,
},
schedule: {
enabled: process.env.SCHEDULE_ENABLED === 'true' ||
!!process.env.GITEA_MIRROR_INTERVAL ||
!!process.env.SCHEDULE_INTERVAL ||
!!process.env.DELAY, // Auto-enable if any interval is specified
interval: process.env.SCHEDULE_INTERVAL || process.env.GITEA_MIRROR_INTERVAL || process.env.DELAY, // Support GITEA_MIRROR_INTERVAL, SCHEDULE_INTERVAL, and old DELAY
concurrent: process.env.SCHEDULE_CONCURRENT === 'true',
batchSize: process.env.SCHEDULE_BATCH_SIZE ? parseInt(process.env.SCHEDULE_BATCH_SIZE, 10) : undefined,
pauseBetweenBatches: process.env.SCHEDULE_PAUSE_BETWEEN_BATCHES ? parseInt(process.env.SCHEDULE_PAUSE_BETWEEN_BATCHES, 10) : undefined,
retryAttempts: process.env.SCHEDULE_RETRY_ATTEMPTS ? parseInt(process.env.SCHEDULE_RETRY_ATTEMPTS, 10) : undefined,
retryDelay: process.env.SCHEDULE_RETRY_DELAY ? parseInt(process.env.SCHEDULE_RETRY_DELAY, 10) : undefined,
timeout: process.env.SCHEDULE_TIMEOUT ? parseInt(process.env.SCHEDULE_TIMEOUT, 10) : undefined,
autoRetry: process.env.SCHEDULE_AUTO_RETRY === 'true',
cleanupBeforeMirror: process.env.SCHEDULE_CLEANUP_BEFORE_MIRROR === 'true',
notifyOnFailure: process.env.SCHEDULE_NOTIFY_ON_FAILURE === 'true',
notifyOnSuccess: process.env.SCHEDULE_NOTIFY_ON_SUCCESS === 'true',
logLevel: process.env.SCHEDULE_LOG_LEVEL as 'error' | 'warn' | 'info' | 'debug',
timezone: process.env.SCHEDULE_TIMEZONE,
onlyMirrorUpdated: process.env.SCHEDULE_ONLY_MIRROR_UPDATED === 'true',
updateInterval: process.env.SCHEDULE_UPDATE_INTERVAL ? parseInt(process.env.SCHEDULE_UPDATE_INTERVAL, 10) : undefined,
skipRecentlyMirrored: process.env.SCHEDULE_SKIP_RECENTLY_MIRRORED === 'true',
recentThreshold: process.env.SCHEDULE_RECENT_THRESHOLD ? parseInt(process.env.SCHEDULE_RECENT_THRESHOLD, 10) : undefined,
autoImport: process.env.AUTO_IMPORT_REPOS !== 'false',
autoMirror: process.env.AUTO_MIRROR_REPOS === 'true',
},
cleanup: {
enabled: process.env.CLEANUP_ENABLED === 'true' ||
process.env.CLEANUP_DELETE_IF_NOT_IN_GITHUB === 'true', // Auto-enable if deleteIfNotInGitHub is enabled
retentionDays: process.env.CLEANUP_RETENTION_DAYS ? parseInt(process.env.CLEANUP_RETENTION_DAYS, 10) : undefined,
deleteFromGitea: process.env.CLEANUP_DELETE_FROM_GITEA === 'true',
deleteIfNotInGitHub: process.env.CLEANUP_DELETE_IF_NOT_IN_GITHUB === 'true',
protectedRepos,
dryRun: process.env.CLEANUP_DRY_RUN === 'true' ? true : process.env.CLEANUP_DRY_RUN === 'false' ? false : false,
orphanedRepoAction: process.env.CLEANUP_ORPHANED_REPO_ACTION as 'skip' | 'archive' | 'delete',
batchSize: process.env.CLEANUP_BATCH_SIZE ? parseInt(process.env.CLEANUP_BATCH_SIZE, 10) : undefined,
pauseBetweenDeletes: process.env.CLEANUP_PAUSE_BETWEEN_DELETES ? parseInt(process.env.CLEANUP_PAUSE_BETWEEN_DELETES, 10) : undefined,
},
};
}
/**
* Check if environment configuration is available
*/
function hasEnvConfig(envConfig: EnvConfig): boolean {
// Check if any GitHub or Gitea config is provided
return !!(
envConfig.github.username ||
envConfig.github.token ||
envConfig.gitea.url ||
envConfig.gitea.username ||
envConfig.gitea.token
);
}
/**
* Initialize configuration from environment variables
* This function runs on application startup and populates the database
* with configuration from environment variables if available
*/
export async function initializeConfigFromEnv(): Promise<void> {
try {
const envConfig = parseEnvConfig();
// Skip if no environment config is provided
if (!hasEnvConfig(envConfig)) {
console.log('[ENV Config Loader] No environment configuration found, skipping initialization');
return;
}
console.log('[ENV Config Loader] Found environment configuration, initializing...');
// Get the first user (admin user)
const firstUser = await db
.select()
.from(users)
.limit(1);
if (firstUser.length === 0) {
console.log('[ENV Config Loader] No users found, skipping configuration initialization');
return;
}
const userId = firstUser[0].id;
// Check if config already exists for this user
const existingConfig = await db
.select()
.from(configs)
.where(eq(configs.userId, userId))
.limit(1);
// Determine mirror strategy based on environment variables or use explicit value
let mirrorStrategy: 'preserve' | 'single-org' | 'flat-user' | 'mixed' = 'preserve';
if (envConfig.github.mirrorStrategy) {
mirrorStrategy = envConfig.github.mirrorStrategy;
} else if (envConfig.github.preserveOrgStructure === false && envConfig.gitea.organization) {
mirrorStrategy = 'single-org';
} else if (envConfig.github.preserveOrgStructure === true) {
mirrorStrategy = 'preserve';
}
// Build GitHub config
const githubConfig = {
owner: envConfig.github.username || existingConfig?.[0]?.githubConfig?.owner || '',
type: envConfig.github.type || existingConfig?.[0]?.githubConfig?.type || 'personal',
token: envConfig.github.token ? encrypt(envConfig.github.token) : existingConfig?.[0]?.githubConfig?.token || '',
includeStarred: envConfig.github.mirrorStarred ?? existingConfig?.[0]?.githubConfig?.includeStarred ?? false,
includeForks: !(envConfig.github.skipForks ?? false),
skipForks: envConfig.github.skipForks ?? existingConfig?.[0]?.githubConfig?.skipForks ?? false,
includeArchived: envConfig.github.includeArchived ?? existingConfig?.[0]?.githubConfig?.includeArchived ?? false,
includePrivate: envConfig.github.privateRepositories ?? existingConfig?.[0]?.githubConfig?.includePrivate ?? false,
includePublic: envConfig.github.publicRepositories ?? existingConfig?.[0]?.githubConfig?.includePublic ?? true,
includeOrganizations: envConfig.github.mirrorOrganizations ? [] : (existingConfig?.[0]?.githubConfig?.includeOrganizations ?? []),
starredReposOrg: envConfig.github.starredReposOrg || existingConfig?.[0]?.githubConfig?.starredReposOrg || 'starred',
mirrorStrategy,
defaultOrg: envConfig.gitea.organization || existingConfig?.[0]?.githubConfig?.defaultOrg || 'github-mirrors',
starredCodeOnly: envConfig.github.starredCodeOnly ?? existingConfig?.[0]?.githubConfig?.starredCodeOnly ?? false,
};
// Build Gitea config
const giteaConfig = {
url: envConfig.gitea.url || existingConfig?.[0]?.giteaConfig?.url || '',
token: envConfig.gitea.token ? encrypt(envConfig.gitea.token) : existingConfig?.[0]?.giteaConfig?.token || '',
defaultOwner: envConfig.gitea.username || existingConfig?.[0]?.giteaConfig?.defaultOwner || '',
organization: envConfig.gitea.organization || existingConfig?.[0]?.giteaConfig?.organization || undefined,
preserveOrgStructure: mirrorStrategy === 'preserve' || mirrorStrategy === 'mixed',
mirrorInterval: envConfig.gitea.mirrorInterval || existingConfig?.[0]?.giteaConfig?.mirrorInterval || '8h',
lfs: envConfig.gitea.lfs ?? existingConfig?.[0]?.giteaConfig?.lfs ?? false,
wiki: envConfig.mirror.mirrorWiki ?? existingConfig?.[0]?.giteaConfig?.wiki ?? false,
visibility: envConfig.gitea.visibility || existingConfig?.[0]?.giteaConfig?.visibility || 'public',
createOrg: envConfig.gitea.createOrg ?? existingConfig?.[0]?.giteaConfig?.createOrg ?? true,
templateOwner: envConfig.gitea.templateOwner || existingConfig?.[0]?.giteaConfig?.templateOwner || undefined,
templateRepo: envConfig.gitea.templateRepo || existingConfig?.[0]?.giteaConfig?.templateRepo || undefined,
addTopics: envConfig.gitea.addTopics ?? existingConfig?.[0]?.giteaConfig?.addTopics ?? true,
topicPrefix: envConfig.gitea.topicPrefix || existingConfig?.[0]?.giteaConfig?.topicPrefix || undefined,
preserveVisibility: envConfig.gitea.preserveVisibility ?? existingConfig?.[0]?.giteaConfig?.preserveVisibility ?? false,
forkStrategy: envConfig.gitea.forkStrategy || existingConfig?.[0]?.giteaConfig?.forkStrategy || 'reference',
// Mirror metadata options
mirrorReleases: envConfig.mirror.mirrorReleases ?? existingConfig?.[0]?.giteaConfig?.mirrorReleases ?? false,
releaseLimit: envConfig.mirror.releaseLimit ?? existingConfig?.[0]?.giteaConfig?.releaseLimit ?? 10,
issueConcurrency: envConfig.mirror.issueConcurrency && envConfig.mirror.issueConcurrency > 0
? envConfig.mirror.issueConcurrency
: existingConfig?.[0]?.giteaConfig?.issueConcurrency ?? 3,
pullRequestConcurrency: envConfig.mirror.pullRequestConcurrency && envConfig.mirror.pullRequestConcurrency > 0
? envConfig.mirror.pullRequestConcurrency
: existingConfig?.[0]?.giteaConfig?.pullRequestConcurrency ?? 5,
mirrorMetadata: envConfig.mirror.mirrorMetadata ?? (envConfig.mirror.mirrorIssues || envConfig.mirror.mirrorPullRequests || envConfig.mirror.mirrorLabels || envConfig.mirror.mirrorMilestones) ?? existingConfig?.[0]?.giteaConfig?.mirrorMetadata ?? false,
mirrorIssues: envConfig.mirror.mirrorIssues ?? existingConfig?.[0]?.giteaConfig?.mirrorIssues ?? false,
mirrorPullRequests: envConfig.mirror.mirrorPullRequests ?? existingConfig?.[0]?.giteaConfig?.mirrorPullRequests ?? false,
mirrorLabels: envConfig.mirror.mirrorLabels ?? existingConfig?.[0]?.giteaConfig?.mirrorLabels ?? false,
mirrorMilestones: envConfig.mirror.mirrorMilestones ?? existingConfig?.[0]?.giteaConfig?.mirrorMilestones ?? false,
};
// Build schedule config with support for interval as string or number
const scheduleInterval = envConfig.schedule.interval || (existingConfig?.[0]?.scheduleConfig?.interval ?? '3600');
const scheduleConfig = {
enabled: envConfig.schedule.enabled ?? existingConfig?.[0]?.scheduleConfig?.enabled ?? false,
interval: scheduleInterval,
concurrent: envConfig.schedule.concurrent ?? existingConfig?.[0]?.scheduleConfig?.concurrent ?? false,
batchSize: envConfig.schedule.batchSize ?? existingConfig?.[0]?.scheduleConfig?.batchSize ?? 10,
pauseBetweenBatches: envConfig.schedule.pauseBetweenBatches ?? existingConfig?.[0]?.scheduleConfig?.pauseBetweenBatches ?? 5000,
retryAttempts: envConfig.schedule.retryAttempts ?? existingConfig?.[0]?.scheduleConfig?.retryAttempts ?? 3,
retryDelay: envConfig.schedule.retryDelay ?? existingConfig?.[0]?.scheduleConfig?.retryDelay ?? 60000,
timeout: envConfig.schedule.timeout ?? existingConfig?.[0]?.scheduleConfig?.timeout ?? 3600000,
autoRetry: envConfig.schedule.autoRetry ?? existingConfig?.[0]?.scheduleConfig?.autoRetry ?? true,
cleanupBeforeMirror: envConfig.schedule.cleanupBeforeMirror ?? existingConfig?.[0]?.scheduleConfig?.cleanupBeforeMirror ?? false,
notifyOnFailure: envConfig.schedule.notifyOnFailure ?? existingConfig?.[0]?.scheduleConfig?.notifyOnFailure ?? true,
notifyOnSuccess: envConfig.schedule.notifyOnSuccess ?? existingConfig?.[0]?.scheduleConfig?.notifyOnSuccess ?? false,
logLevel: envConfig.schedule.logLevel || existingConfig?.[0]?.scheduleConfig?.logLevel || 'info',
timezone: envConfig.schedule.timezone || existingConfig?.[0]?.scheduleConfig?.timezone || 'UTC',
onlyMirrorUpdated: envConfig.schedule.onlyMirrorUpdated ?? existingConfig?.[0]?.scheduleConfig?.onlyMirrorUpdated ?? false,
updateInterval: envConfig.schedule.updateInterval ?? existingConfig?.[0]?.scheduleConfig?.updateInterval ?? 86400000,
skipRecentlyMirrored: envConfig.schedule.skipRecentlyMirrored ?? existingConfig?.[0]?.scheduleConfig?.skipRecentlyMirrored ?? true,
recentThreshold: envConfig.schedule.recentThreshold ?? existingConfig?.[0]?.scheduleConfig?.recentThreshold ?? 3600000,
autoImport: envConfig.schedule.autoImport ?? existingConfig?.[0]?.scheduleConfig?.autoImport ?? true,
autoMirror: envConfig.schedule.autoMirror ?? existingConfig?.[0]?.scheduleConfig?.autoMirror ?? false,
lastRun: existingConfig?.[0]?.scheduleConfig?.lastRun || undefined,
nextRun: existingConfig?.[0]?.scheduleConfig?.nextRun || undefined,
};
// Build cleanup config
const cleanupConfig = {
enabled: envConfig.cleanup.enabled ?? existingConfig?.[0]?.cleanupConfig?.enabled ?? false,
retentionDays: envConfig.cleanup.retentionDays ? envConfig.cleanup.retentionDays * 86400 : existingConfig?.[0]?.cleanupConfig?.retentionDays ?? 604800, // Convert days to seconds
deleteFromGitea: envConfig.cleanup.deleteFromGitea ?? existingConfig?.[0]?.cleanupConfig?.deleteFromGitea ?? false,
deleteIfNotInGitHub: envConfig.cleanup.deleteIfNotInGitHub ?? existingConfig?.[0]?.cleanupConfig?.deleteIfNotInGitHub ?? true,
protectedRepos: envConfig.cleanup.protectedRepos ?? existingConfig?.[0]?.cleanupConfig?.protectedRepos ?? [],
dryRun: envConfig.cleanup.dryRun ?? existingConfig?.[0]?.cleanupConfig?.dryRun ?? true,
orphanedRepoAction: envConfig.cleanup.orphanedRepoAction || existingConfig?.[0]?.cleanupConfig?.orphanedRepoAction || 'archive',
batchSize: envConfig.cleanup.batchSize ?? existingConfig?.[0]?.cleanupConfig?.batchSize ?? 10,
pauseBetweenDeletes: envConfig.cleanup.pauseBetweenDeletes ?? existingConfig?.[0]?.cleanupConfig?.pauseBetweenDeletes ?? 2000,
lastRun: existingConfig?.[0]?.cleanupConfig?.lastRun || undefined,
nextRun: existingConfig?.[0]?.cleanupConfig?.nextRun || undefined,
};
if (existingConfig.length > 0) {
// Update existing config
console.log('[ENV Config Loader] Updating existing configuration with environment variables');
await db
.update(configs)
.set({
githubConfig,
giteaConfig,
scheduleConfig,
cleanupConfig,
updatedAt: new Date(),
})
.where(eq(configs.id, existingConfig[0].id));
} else {
// Create new config
console.log('[ENV Config Loader] Creating new configuration from environment variables');
const configId = uuidv4();
await db.insert(configs).values({
id: configId,
userId,
name: 'Environment Configuration',
isActive: true,
githubConfig,
giteaConfig,
include: [],
exclude: [],
scheduleConfig,
cleanupConfig,
createdAt: new Date(),
updatedAt: new Date(),
});
}
console.log('[ENV Config Loader] Configuration initialized successfully from environment variables');
} catch (error) {
console.error('[ENV Config Loader] Failed to initialize configuration from environment:', error);
// Don't throw - this is a non-critical initialization
}
}
+258
View File
@@ -0,0 +1,258 @@
import { v4 as uuidv4 } from "uuid";
import { db, events } from "./db";
import { eq, and, gt, lt, inArray } from "drizzle-orm";
/**
* Publishes an event to a specific channel for a user
* This replaces Redis pub/sub with SQLite storage
*/
export async function publishEvent({
userId,
channel,
payload,
deduplicationKey,
}: {
userId: string;
channel: string;
payload: any;
deduplicationKey?: string; // Optional key to prevent duplicate events
}): Promise<string> {
try {
const eventId = uuidv4();
console.log(`Publishing event to channel ${channel} for user ${userId}`);
// Check for duplicate events if deduplication key is provided
if (deduplicationKey) {
const existingEvent = await db
.select()
.from(events)
.where(
and(
eq(events.userId, userId),
eq(events.channel, channel),
eq(events.read, false)
)
)
.limit(10); // Check recent unread events
// Check if any existing event has the same deduplication key in payload
const isDuplicate = existingEvent.some(event => {
try {
const eventPayload = JSON.parse(event.payload as string);
return eventPayload.deduplicationKey === deduplicationKey;
} catch {
return false;
}
});
if (isDuplicate) {
console.log(`Skipping duplicate event with key: ${deduplicationKey}`);
return eventId; // Return a valid ID but don't create the event
}
}
// Add deduplication key to payload if provided
const eventPayload = deduplicationKey
? { ...payload, deduplicationKey }
: payload;
// Insert the event into the SQLite database
await db.insert(events).values({
id: eventId,
userId,
channel,
payload: JSON.stringify(eventPayload),
createdAt: new Date(),
});
console.log(`Event published successfully with ID ${eventId}`);
return eventId;
} catch (error) {
console.error("Error publishing event:", error);
throw new Error("Failed to publish event");
}
}
/**
* Gets new events for a specific user and channel
* This replaces Redis subscribe with SQLite polling
*/
export async function getNewEvents({
userId,
channel,
lastEventTime,
}: {
userId: string;
channel: string;
lastEventTime?: Date;
}): Promise<any[]> {
try {
// Build the query conditions
const conditions = [
eq(events.userId, userId),
eq(events.channel, channel),
eq(events.read, false)
];
// Add time filter if provided
if (lastEventTime) {
conditions.push(gt(events.createdAt, lastEventTime));
}
// Execute the query
const newEvents = await db
.select()
.from(events)
.where(and(...conditions))
.orderBy(events.createdAt);
// Mark events as read
if (newEvents.length > 0) {
await db
.update(events)
.set({ read: true })
.where(
and(
eq(events.userId, userId),
eq(events.channel, channel),
eq(events.read, false)
)
);
}
// Parse the payloads
return newEvents.map(event => ({
...event,
payload: JSON.parse(event.payload as string),
}));
} catch (error) {
console.error("Error getting new events:", error);
return [];
}
}
/**
* Removes duplicate events based on deduplication keys
* This can be called periodically to clean up any duplicates that may have slipped through
*/
export async function removeDuplicateEvents(userId?: string): Promise<{ duplicatesRemoved: number }> {
try {
console.log("Removing duplicate events...");
// Build the base query
const allEvents = userId
? await db.select().from(events).where(eq(events.userId, userId))
: await db.select().from(events);
const duplicateIds: string[] = [];
// Group events by user and channel, then check for duplicates
const eventsByUserChannel = new Map<string, typeof allEvents>();
for (const event of allEvents) {
const key = `${event.userId}-${event.channel}`;
if (!eventsByUserChannel.has(key)) {
eventsByUserChannel.set(key, []);
}
eventsByUserChannel.get(key)!.push(event);
}
// Check each group for duplicates
for (const [, events] of eventsByUserChannel) {
const channelSeenKeys = new Set<string>();
// Sort by creation time (keep the earliest)
events.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
for (const event of events) {
try {
const payload = JSON.parse(event.payload as string);
if (payload.deduplicationKey) {
if (channelSeenKeys.has(payload.deduplicationKey)) {
duplicateIds.push(event.id);
} else {
channelSeenKeys.add(payload.deduplicationKey);
}
}
} catch {
// Skip events with invalid JSON
}
}
}
// Remove duplicates
if (duplicateIds.length > 0) {
console.log(`Removing ${duplicateIds.length} duplicate events`);
// Delete in batches to avoid query size limits
const batchSize = 100;
for (let i = 0; i < duplicateIds.length; i += batchSize) {
const batch = duplicateIds.slice(i, i + batchSize);
await db.delete(events).where(inArray(events.id, batch));
}
}
console.log(`Removed ${duplicateIds.length} duplicate events`);
return { duplicatesRemoved: duplicateIds.length };
} catch (error) {
console.error("Error removing duplicate events:", error);
return { duplicatesRemoved: 0 };
}
}
/**
* Cleans up old events to prevent the database from growing too large
* This function is used by the cleanup button in the Activity Log page
*
* @param maxAgeInDays Number of days to keep events (default: 7)
* @param cleanupUnreadAfterDays Number of days after which to clean up unread events (default: 2x maxAgeInDays)
* @returns Object containing the number of read and unread events deleted
*/
export async function cleanupOldEvents(
maxAgeInDays: number = 7,
cleanupUnreadAfterDays?: number
): Promise<{ readEventsDeleted: number; unreadEventsDeleted: number }> {
try {
console.log(`Cleaning up events older than ${maxAgeInDays} days...`);
// Calculate the cutoff date for read events
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - maxAgeInDays);
// Delete read events older than the cutoff date
const readResult = await db
.delete(events)
.where(
and(
eq(events.read, true),
lt(events.createdAt, cutoffDate)
)
);
const readEventsDeleted = (readResult as any).changes || 0;
console.log(`Deleted ${readEventsDeleted} read events`);
// Calculate the cutoff date for unread events (default to 2x the retention period)
const unreadCutoffDate = new Date();
const unreadMaxAge = cleanupUnreadAfterDays || (maxAgeInDays * 2);
unreadCutoffDate.setDate(unreadCutoffDate.getDate() - unreadMaxAge);
// Delete unread events that are significantly older
const unreadResult = await db
.delete(events)
.where(
and(
eq(events.read, false),
lt(events.createdAt, unreadCutoffDate)
)
);
const unreadEventsDeleted = (unreadResult as any).changes || 0;
console.log(`Deleted ${unreadEventsDeleted} unread events`);
return { readEventsDeleted, unreadEventsDeleted };
} catch (error) {
console.error("Error cleaning up old events:", error);
return { readEventsDeleted: 0, unreadEventsDeleted: 0 };
}
}
@@ -0,0 +1,256 @@
/**
* Real-time event system using EventEmitter
* For the self-hosted version
*/
import { EventEmitter } from 'events';
export interface RealtimeEvent {
type: string;
userId?: string;
data: any;
timestamp: number;
}
/**
* Real-time event bus for local instance
*/
export class RealtimeEventBus extends EventEmitter {
private channels = new Map<string, Set<(event: RealtimeEvent) => void>>();
private userChannels = new Map<string, string[]>();
constructor() {
super();
}
/**
* Handle incoming events
*/
private handleIncomingEvent(channel: string, event: RealtimeEvent) {
// Emit to local listeners
this.emit(channel, event);
// Call channel-specific handlers
const handlers = this.channels.get(channel);
if (handlers) {
handlers.forEach(handler => {
try {
handler(event);
} catch (error) {
console.error('Error in event handler:', error);
}
});
}
}
/**
* Subscribe to a channel
*/
async subscribe(channel: string, handler?: (event: RealtimeEvent) => void) {
// Add handler if provided
if (handler) {
if (!this.channels.has(channel)) {
this.channels.set(channel, new Set());
}
this.channels.get(channel)!.add(handler);
}
// Add local listener
if (!this.listenerCount(channel)) {
this.on(channel, (event) => this.handleIncomingEvent(channel, event));
}
}
/**
* Subscribe to user-specific channels
*/
async subscribeUser(userId: string) {
const channels = [
`user:${userId}`,
`user:${userId}:notifications`,
`user:${userId}:updates`,
];
this.userChannels.set(userId, channels);
for (const channel of channels) {
await this.subscribe(channel);
}
}
/**
* Unsubscribe from a channel
*/
async unsubscribe(channel: string, handler?: (event: RealtimeEvent) => void) {
// Remove handler if provided
if (handler) {
this.channels.get(channel)?.delete(handler);
// Remove channel if no handlers left
if (this.channels.get(channel)?.size === 0) {
this.channels.delete(channel);
}
}
// Remove local listener if no handlers
if (!this.channels.has(channel)) {
this.removeAllListeners(channel);
}
}
/**
* Unsubscribe from user channels
*/
async unsubscribeUser(userId: string) {
const channels = this.userChannels.get(userId) || [];
for (const channel of channels) {
await this.unsubscribe(channel);
}
this.userChannels.delete(userId);
}
/**
* Publish an event
*/
async publish(channel: string, event: Omit<RealtimeEvent, 'timestamp'>) {
const fullEvent: RealtimeEvent = {
...event,
timestamp: Date.now(),
};
// Emit locally
this.handleIncomingEvent(channel, fullEvent);
}
/**
* Broadcast to all users
*/
async broadcast(event: Omit<RealtimeEvent, 'timestamp'>) {
await this.publish('broadcast', event);
}
/**
* Send event to specific user
*/
async sendToUser(userId: string, event: Omit<RealtimeEvent, 'timestamp' | 'userId'>) {
await this.publish(`user:${userId}`, {
...event,
userId,
});
}
/**
* Send activity update
*/
async sendActivity(activity: {
userId: string;
action: string;
resource: string;
resourceId: string;
details?: any;
}) {
const event = {
type: 'activity',
data: activity,
};
// Send to user
await this.sendToUser(activity.userId, event);
// Also publish to activity channel
await this.publish('activity', {
...event,
userId: activity.userId,
});
}
/**
* Get event statistics
*/
getStats() {
return {
channels: this.channels.size,
listeners: Array.from(this.channels.values()).reduce(
(sum, handlers) => sum + handlers.size,
0
),
userChannels: this.userChannels.size,
};
}
}
// Global event bus instance
export const eventBus = new RealtimeEventBus();
/**
* React hook for subscribing to events
*/
export function useRealtimeEvents(
channel: string,
handler: (event: RealtimeEvent) => void,
deps: any[] = []
) {
if (typeof window !== 'undefined') {
const { useEffect } = require('react');
useEffect(() => {
eventBus.subscribe(channel, handler);
return () => {
eventBus.unsubscribe(channel, handler);
};
}, deps);
}
}
/**
* Server-sent events endpoint handler
*/
export async function createSSEHandler(userId: string) {
const encoder = new TextEncoder();
// Create a readable stream for SSE
const stream = new ReadableStream({
async start(controller) {
// Send initial connection event
controller.enqueue(
encoder.encode(`data: ${JSON.stringify({ type: 'connected' })}\n\n`)
);
// Subscribe to user channels
await eventBus.subscribeUser(userId);
// Create event handler
const handleEvent = (event: RealtimeEvent) => {
controller.enqueue(
encoder.encode(`data: ${JSON.stringify(event)}\n\n`)
);
};
// Subscribe to channels
eventBus.on(`user:${userId}`, handleEvent);
// Keep connection alive with heartbeat
const heartbeat = setInterval(() => {
controller.enqueue(encoder.encode(': heartbeat\n\n'));
}, 30000);
// Cleanup on close
return () => {
clearInterval(heartbeat);
eventBus.off(`user:${userId}`, handleEvent);
eventBus.unsubscribeUser(userId);
};
},
});
return new Response(stream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
},
});
}
@@ -0,0 +1,202 @@
/**
* Gitea authentication and permission validation utilities
*/
import type { Config } from "@/types/config";
import { httpGet, HttpError } from "./http-client";
import { decryptConfigTokens } from "./utils/config-encryption";
export interface GiteaUser {
id: number;
login: string;
username: string;
full_name?: string;
email?: string;
is_admin: boolean;
created?: string;
restricted?: boolean;
active?: boolean;
prohibit_login?: boolean;
location?: string;
website?: string;
description?: string;
visibility?: string;
followers_count?: number;
following_count?: number;
starred_repos_count?: number;
language?: string;
}
/**
* Validates Gitea authentication and returns user information
*/
export async function validateGiteaAuth(config: Partial<Config>): Promise<GiteaUser> {
if (!config.giteaConfig?.url || !config.giteaConfig?.token) {
throw new Error("Gitea URL and token are required for authentication validation");
}
const decryptedConfig = decryptConfigTokens(config as Config);
try {
const response = await httpGet<GiteaUser>(
`${config.giteaConfig.url}/api/v1/user`,
{
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
}
);
const user = response.data;
// Validate user data
if (!user.id || user.id === 0) {
throw new Error("Invalid user data received from Gitea: User ID is 0 or missing");
}
if (!user.username && !user.login) {
throw new Error("Invalid user data received from Gitea: Username is missing");
}
console.log(`[Auth Validator] Successfully authenticated as: ${user.username || user.login} (ID: ${user.id}, Admin: ${user.is_admin})`);
return user;
} catch (error) {
if (error instanceof HttpError) {
if (error.status === 401) {
throw new Error(
"Authentication failed: The provided Gitea token is invalid or expired. " +
"Please check your Gitea configuration and ensure the token has the necessary permissions."
);
} else if (error.status === 403) {
throw new Error(
"Permission denied: The Gitea token does not have sufficient permissions. " +
"Please ensure your token has 'read:user' scope at minimum."
);
}
}
throw new Error(
`Failed to validate Gitea authentication: ${error instanceof Error ? error.message : String(error)}`
);
}
}
/**
* Checks if the authenticated user can create organizations
*/
export async function canCreateOrganizations(config: Partial<Config>): Promise<boolean> {
try {
const user = await validateGiteaAuth(config);
// Admin users can always create organizations
if (user.is_admin) {
console.log(`[Auth Validator] User is admin, can create organizations`);
return true;
}
// Check if the instance allows regular users to create organizations
// This would require checking instance settings, which may not be publicly available
// For now, we'll try to create a test org and see if it fails
if (!config.giteaConfig?.url || !config.giteaConfig?.token) {
return false;
}
const decryptedConfig = decryptConfigTokens(config as Config);
try {
// Try to list user's organizations as a proxy for permission check
const orgsResponse = await httpGet(
`${config.giteaConfig.url}/api/v1/user/orgs`,
{
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
}
);
// If we can list orgs, we likely can create them
console.log(`[Auth Validator] User can list organizations, likely can create them`);
return true;
} catch (listError) {
if (listError instanceof HttpError && listError.status === 403) {
console.log(`[Auth Validator] User cannot list/create organizations`);
return false;
}
// For other errors, assume we can try
return true;
}
} catch (error) {
console.error(`[Auth Validator] Error checking organization creation permissions:`, error);
return false;
}
}
/**
* Gets or validates the default owner for repositories
*/
export async function getValidatedDefaultOwner(config: Partial<Config>): Promise<string> {
const user = await validateGiteaAuth(config);
const username = user.username || user.login;
if (!username) {
throw new Error("Unable to determine Gitea username from authentication");
}
// Check if the configured defaultOwner matches the authenticated user
if (config.giteaConfig?.defaultOwner && config.giteaConfig.defaultOwner !== username) {
console.warn(
`[Auth Validator] Configured defaultOwner (${config.giteaConfig.defaultOwner}) ` +
`does not match authenticated user (${username}). Using authenticated user.`
);
}
return username;
}
/**
* Validates that the Gitea configuration is properly set up for mirroring
*/
export async function validateGiteaConfigForMirroring(config: Partial<Config>): Promise<{
valid: boolean;
user: GiteaUser;
canCreateOrgs: boolean;
warnings: string[];
errors: string[];
}> {
const warnings: string[] = [];
const errors: string[] = [];
try {
// Validate authentication
const user = await validateGiteaAuth(config);
// Check organization creation permissions
const canCreateOrgs = await canCreateOrganizations(config);
if (!canCreateOrgs && config.giteaConfig?.preserveOrgStructure) {
warnings.push(
"User cannot create organizations but 'preserveOrgStructure' is enabled. " +
"Repositories will be mirrored to the user account instead."
);
}
// Validate token scopes (this would require additional API calls to check specific permissions)
// For now, we'll just check if basic operations work
return {
valid: true,
user,
canCreateOrgs,
warnings,
errors,
};
} catch (error) {
errors.push(error instanceof Error ? error.message : String(error));
return {
valid: false,
user: {} as GiteaUser,
canCreateOrgs: false,
warnings,
errors,
};
}
}
@@ -0,0 +1,755 @@
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
import { createMockResponse, mockFetch } from "@/tests/mock-fetch";
// Mock the helpers module before importing gitea-enhanced
const mockCreateMirrorJob = mock(() => Promise.resolve("mock-job-id"));
mock.module("@/lib/helpers", () => ({
createMirrorJob: mockCreateMirrorJob
}));
const mockMirrorGitHubReleasesToGitea = mock(() => Promise.resolve());
const mockMirrorGitRepoIssuesToGitea = mock(() => Promise.resolve());
const mockMirrorGitRepoPullRequestsToGitea = mock(() => Promise.resolve());
const mockMirrorGitRepoLabelsToGitea = mock(() => Promise.resolve());
const mockMirrorGitRepoMilestonesToGitea = mock(() => Promise.resolve());
const mockGetGiteaRepoOwnerAsync = mock(() => Promise.resolve("starred"));
// Mock the database module
const mockDb = {
insert: mock((table: any) => ({
values: mock((data: any) => Promise.resolve({ insertedId: "mock-id" }))
})),
update: mock(() => ({
set: mock(() => ({
where: mock(() => Promise.resolve())
}))
}))
};
mock.module("@/lib/db", () => ({
db: mockDb,
mirrorJobs: {},
repositories: {}
}));
// Mock config encryption
mock.module("@/lib/utils/config-encryption", () => ({
decryptConfigTokens: (config: any) => config,
encryptConfigTokens: (config: any) => config,
getDecryptedGitHubToken: (config: any) => config.githubConfig?.token || "",
getDecryptedGiteaToken: (config: any) => config.giteaConfig?.token || ""
}));
// Mock http-client
class MockHttpError extends Error {
constructor(message: string, public status: number, public statusText: string, public response?: string) {
super(message);
this.name = 'HttpError';
}
}
// Track call counts for org tests
let orgCheckCount = 0;
let orgTestContext = "";
let getOrgCalled = false;
let createOrgCalled = false;
const mockHttpGet = mock(async (url: string, headers?: any) => {
// Return different responses based on URL patterns
// Handle user authentication endpoint
if (url.includes("/api/v1/user")) {
return {
data: {
id: 1,
login: "testuser",
username: "testuser",
email: "test@example.com",
is_admin: false,
full_name: "Test User"
},
status: 200,
statusText: "OK",
headers: new Headers()
};
}
if (url.includes("/api/v1/repos/starred/test-repo")) {
return {
data: {
id: 123,
name: "test-repo",
mirror: true,
owner: { login: "starred" },
mirror_interval: "8h",
clone_url: "https://github.com/user/test-repo.git",
private: false
},
status: 200,
statusText: "OK",
headers: new Headers()
};
}
if (url.includes("/api/v1/repos/starred/regular-repo")) {
return {
data: {
id: 124,
name: "regular-repo",
mirror: false,
owner: { login: "starred" }
},
status: 200,
statusText: "OK",
headers: new Headers()
};
}
if (url.includes("/api/v1/repos/starred/non-mirror-repo")) {
return {
data: {
id: 456,
name: "non-mirror-repo",
mirror: false,
owner: { login: "starred" },
private: false
},
status: 200,
statusText: "OK",
headers: new Headers()
};
}
if (url.includes("/api/v1/repos/starred/mirror-repo")) {
return {
data: {
id: 789,
name: "mirror-repo",
mirror: true,
owner: { login: "starred" },
mirror_interval: "8h",
private: false
},
status: 200,
statusText: "OK",
headers: new Headers()
};
}
if (url.includes("/api/v1/repos/starred/metadata-repo")) {
return {
data: {
id: 790,
name: "metadata-repo",
mirror: true,
owner: { login: "starred" },
mirror_interval: "8h",
private: false,
},
status: 200,
statusText: "OK",
headers: new Headers(),
};
}
if (url.includes("/api/v1/repos/starred/already-synced-repo")) {
return {
data: {
id: 791,
name: "already-synced-repo",
mirror: true,
owner: { login: "starred" },
mirror_interval: "8h",
private: false,
},
status: 200,
statusText: "OK",
headers: new Headers(),
};
}
if (url.includes("/api/v1/repos/")) {
throw new MockHttpError("Not Found", 404, "Not Found");
}
// Handle org GET requests based on test context
if (url.includes("/api/v1/orgs/starred")) {
orgCheckCount++;
if (orgTestContext === "duplicate-retry" && orgCheckCount > 2) {
// After retries, org exists
return {
data: { id: 999, username: "starred" },
status: 200,
statusText: "OK",
headers: new Headers()
};
}
// Otherwise, org doesn't exist
throw new MockHttpError("Not Found", 404, "Not Found");
}
if (url.includes("/api/v1/orgs/neworg")) {
getOrgCalled = true;
// Org doesn't exist
throw new MockHttpError("Not Found", 404, "Not Found");
}
return { data: {}, status: 200, statusText: "OK", headers: new Headers() };
});
const mockHttpPost = mock(async (url: string, body?: any, headers?: any) => {
if (url.includes("/api/v1/orgs") && body?.username === "starred") {
// Simulate duplicate org error
throw new MockHttpError(
'insert organization: pq: duplicate key value violates unique constraint "UQE_user_lower_name"',
400,
"Bad Request",
JSON.stringify({ message: 'insert organization: pq: duplicate key value violates unique constraint "UQE_user_lower_name"', url: "https://gitea.example.com/api/swagger" })
);
}
if (url.includes("/api/v1/orgs") && body?.username === "neworg") {
createOrgCalled = true;
return {
data: { id: 777, username: "neworg" },
status: 201,
statusText: "Created",
headers: new Headers()
};
}
if (url.includes("/mirror-sync")) {
return {
data: { success: true },
status: 200,
statusText: "OK",
headers: new Headers()
};
}
return { data: {}, status: 200, statusText: "OK", headers: new Headers() };
});
const mockHttpDelete = mock(async (url: string, headers?: any) => {
if (url.includes("/api/v1/repos/starred/test-repo")) {
return { data: {}, status: 204, statusText: "No Content", headers: new Headers() };
}
return { data: {}, status: 200, statusText: "OK", headers: new Headers() };
});
mock.module("@/lib/http-client", () => ({
httpGet: mockHttpGet,
httpPost: mockHttpPost,
httpDelete: mockHttpDelete,
HttpError: MockHttpError
}));
// Now import the modules we're testing
import {
getGiteaRepoInfo,
getOrCreateGiteaOrgEnhanced,
syncGiteaRepoEnhanced,
handleExistingNonMirrorRepo
} from "./gitea-enhanced";
import type { Config, Repository } from "./db/schema";
import { repoStatusEnum } from "@/types/Repository";
// Get HttpError from the mocked module
const { HttpError } = await import("@/lib/http-client");
describe("Enhanced Gitea Operations", () => {
let originalFetch: typeof global.fetch;
beforeEach(() => {
originalFetch = global.fetch;
// Clear mocks
mockCreateMirrorJob.mockClear();
mockDb.insert.mockClear();
mockDb.update.mockClear();
mockMirrorGitHubReleasesToGitea.mockClear();
mockMirrorGitRepoIssuesToGitea.mockClear();
mockMirrorGitRepoPullRequestsToGitea.mockClear();
mockMirrorGitRepoLabelsToGitea.mockClear();
mockMirrorGitRepoMilestonesToGitea.mockClear();
mockGetGiteaRepoOwnerAsync.mockClear();
mockGetGiteaRepoOwnerAsync.mockImplementation(() => Promise.resolve("starred"));
// Reset tracking variables
orgCheckCount = 0;
orgTestContext = "";
getOrgCalled = false;
createOrgCalled = false;
});
afterEach(() => {
global.fetch = originalFetch;
});
describe("getGiteaRepoInfo", () => {
test("should return repo info for existing mirror repository", async () => {
global.fetch = mockFetch(() =>
createMockResponse({
id: 123,
name: "test-repo",
owner: "starred",
mirror: true,
mirror_interval: "8h",
clone_url: "https://github.com/user/test-repo.git",
private: false,
})
);
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.example.com",
token: "encrypted-token",
defaultOwner: "testuser",
mirrorReleases: true,
},
};
const repoInfo = await getGiteaRepoInfo({
config,
owner: "starred",
repoName: "test-repo",
});
expect(repoInfo).toBeTruthy();
expect(repoInfo?.mirror).toBe(true);
expect(repoInfo?.name).toBe("test-repo");
});
test("should return repo info for existing non-mirror repository", async () => {
global.fetch = mockFetch(() =>
createMockResponse({
id: 124,
name: "regular-repo",
owner: "starred",
mirror: false,
private: false,
})
);
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.example.com",
token: "encrypted-token",
defaultOwner: "testuser",
mirrorReleases: true,
},
};
const repoInfo = await getGiteaRepoInfo({
config,
owner: "starred",
repoName: "regular-repo",
});
expect(repoInfo).toBeTruthy();
expect(repoInfo?.mirror).toBe(false);
});
test("should return null for non-existent repository", async () => {
global.fetch = mockFetch(() =>
createMockResponse(
"Not Found",
{ ok: false, status: 404, statusText: "Not Found" }
)
);
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.example.com",
token: "encrypted-token",
defaultOwner: "testuser",
mirrorReleases: true,
},
};
const repoInfo = await getGiteaRepoInfo({
config,
owner: "starred",
repoName: "non-existent",
});
expect(repoInfo).toBeNull();
});
});
describe("getOrCreateGiteaOrgEnhanced", () => {
test("should handle duplicate organization constraint error with retry", async () => {
orgTestContext = "duplicate-retry";
orgCheckCount = 0; // Reset the count
const config: Partial<Config> = {
userId: "user123",
giteaConfig: {
url: "https://gitea.example.com",
token: "encrypted-token",
defaultOwner: "testuser",
visibility: "public",
},
};
const orgId = await getOrCreateGiteaOrgEnhanced({
orgName: "starred",
config,
maxRetries: 3,
retryDelay: 0, // No delay in tests
});
expect(orgId).toBe(999);
expect(orgCheckCount).toBeGreaterThanOrEqual(3);
});
test("should create organization on first attempt", async () => {
// Reset tracking variables
getOrgCalled = false;
createOrgCalled = false;
const config: Partial<Config> = {
userId: "user123",
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: false,
mirrorStarred: true,
},
giteaConfig: {
url: "https://gitea.example.com",
token: "encrypted-token",
defaultOwner: "testuser",
mirrorReleases: true,
},
};
const orgId = await getOrCreateGiteaOrgEnhanced({
orgName: "neworg",
config,
retryDelay: 0, // No delay in tests
});
expect(orgId).toBe(777);
expect(getOrgCalled).toBe(true);
expect(createOrgCalled).toBe(true);
});
});
describe("syncGiteaRepoEnhanced", () => {
test("should fail gracefully when repository is not a mirror", async () => {
const config: Partial<Config> = {
userId: "user123",
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: false,
mirrorStarred: true,
},
giteaConfig: {
url: "https://gitea.example.com",
token: "encrypted-token",
defaultOwner: "testuser",
mirrorReleases: true,
},
};
const repository: Repository = {
id: "repo123",
name: "non-mirror-repo",
fullName: "user/non-mirror-repo",
owner: "user",
cloneUrl: "https://github.com/user/non-mirror-repo.git",
isPrivate: false,
isStarred: true,
status: repoStatusEnum.parse("mirrored"),
visibility: "public",
userId: "user123",
createdAt: new Date(),
updatedAt: new Date(),
};
await expect(
syncGiteaRepoEnhanced(
{ config, repository },
{
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
mirrorGitHubReleasesToGitea: mockMirrorGitHubReleasesToGitea,
mirrorGitRepoIssuesToGitea: mockMirrorGitRepoIssuesToGitea,
mirrorGitRepoPullRequestsToGitea: mockMirrorGitRepoPullRequestsToGitea,
mirrorGitRepoLabelsToGitea: mockMirrorGitRepoLabelsToGitea,
mirrorGitRepoMilestonesToGitea: mockMirrorGitRepoMilestonesToGitea,
}
)
).rejects.toThrow("Repository non-mirror-repo is not a mirror. Cannot sync.");
expect(mockMirrorGitHubReleasesToGitea).not.toHaveBeenCalled();
});
test("should successfully sync a mirror repository", async () => {
const config: Partial<Config> = {
userId: "user123",
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: false,
mirrorStarred: true,
},
giteaConfig: {
url: "https://gitea.example.com",
token: "encrypted-token",
defaultOwner: "testuser",
mirrorReleases: true,
},
};
const repository: Repository = {
id: "repo456",
name: "mirror-repo",
fullName: "user/mirror-repo",
owner: "user",
cloneUrl: "https://github.com/user/mirror-repo.git",
isPrivate: false,
isStarred: true,
status: repoStatusEnum.parse("mirrored"),
visibility: "public",
userId: "user123",
createdAt: new Date(),
updatedAt: new Date(),
};
const result = await syncGiteaRepoEnhanced(
{ config, repository },
{
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
mirrorGitHubReleasesToGitea: mockMirrorGitHubReleasesToGitea,
mirrorGitRepoIssuesToGitea: mockMirrorGitRepoIssuesToGitea,
mirrorGitRepoPullRequestsToGitea: mockMirrorGitRepoPullRequestsToGitea,
mirrorGitRepoLabelsToGitea: mockMirrorGitRepoLabelsToGitea,
mirrorGitRepoMilestonesToGitea: mockMirrorGitRepoMilestonesToGitea,
}
);
expect(result).toEqual({ success: true });
expect(mockGetGiteaRepoOwnerAsync).toHaveBeenCalled();
expect(mockMirrorGitHubReleasesToGitea).toHaveBeenCalledTimes(1);
const releaseCall = mockMirrorGitHubReleasesToGitea.mock.calls[0][0];
expect(releaseCall.giteaOwner).toBe("starred");
expect(releaseCall.giteaRepoName).toBe("mirror-repo");
expect(releaseCall.config.githubConfig?.token).toBe("github-token");
expect(releaseCall.octokit).toBeDefined();
});
test("mirrors metadata components when enabled and not previously synced", async () => {
const config: Partial<Config> = {
userId: "user123",
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: true,
mirrorStarred: false,
},
giteaConfig: {
url: "https://gitea.example.com",
token: "encrypted-token",
defaultOwner: "testuser",
mirrorReleases: true,
mirrorMetadata: true,
mirrorIssues: true,
mirrorPullRequests: true,
mirrorLabels: true,
mirrorMilestones: true,
},
};
const repository: Repository = {
id: "repo789",
name: "metadata-repo",
fullName: "user/metadata-repo",
owner: "user",
cloneUrl: "https://github.com/user/metadata-repo.git",
isPrivate: false,
isStarred: false,
status: repoStatusEnum.parse("mirrored"),
visibility: "public",
userId: "user123",
createdAt: new Date(),
updatedAt: new Date(),
metadata: null,
};
await syncGiteaRepoEnhanced(
{ config, repository },
{
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
mirrorGitHubReleasesToGitea: mockMirrorGitHubReleasesToGitea,
mirrorGitRepoIssuesToGitea: mockMirrorGitRepoIssuesToGitea,
mirrorGitRepoPullRequestsToGitea: mockMirrorGitRepoPullRequestsToGitea,
mirrorGitRepoLabelsToGitea: mockMirrorGitRepoLabelsToGitea,
mirrorGitRepoMilestonesToGitea: mockMirrorGitRepoMilestonesToGitea,
}
);
expect(mockMirrorGitHubReleasesToGitea).toHaveBeenCalledTimes(1);
expect(mockMirrorGitRepoIssuesToGitea).toHaveBeenCalledTimes(1);
expect(mockMirrorGitRepoPullRequestsToGitea).toHaveBeenCalledTimes(1);
expect(mockMirrorGitRepoMilestonesToGitea).toHaveBeenCalledTimes(1);
// Labels should be skipped because issues already import them
expect(mockMirrorGitRepoLabelsToGitea).not.toHaveBeenCalled();
});
test("skips metadata mirroring when components already synced", async () => {
const config: Partial<Config> = {
userId: "user123",
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: true,
mirrorStarred: false,
},
giteaConfig: {
url: "https://gitea.example.com",
token: "encrypted-token",
defaultOwner: "testuser",
mirrorReleases: false,
mirrorMetadata: true,
mirrorIssues: true,
mirrorPullRequests: true,
mirrorLabels: true,
mirrorMilestones: true,
},
};
const repository: Repository = {
id: "repo790",
name: "already-synced-repo",
fullName: "user/already-synced-repo",
owner: "user",
cloneUrl: "https://github.com/user/already-synced-repo.git",
isPrivate: false,
isStarred: false,
status: repoStatusEnum.parse("mirrored"),
visibility: "public",
userId: "user123",
createdAt: new Date(),
updatedAt: new Date(),
metadata: JSON.stringify({
components: {
releases: true,
issues: true,
pullRequests: true,
labels: true,
milestones: true,
},
lastSyncedAt: new Date().toISOString(),
}),
};
await syncGiteaRepoEnhanced(
{ config, repository },
{
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
mirrorGitHubReleasesToGitea: mockMirrorGitHubReleasesToGitea,
mirrorGitRepoIssuesToGitea: mockMirrorGitRepoIssuesToGitea,
mirrorGitRepoPullRequestsToGitea: mockMirrorGitRepoPullRequestsToGitea,
mirrorGitRepoLabelsToGitea: mockMirrorGitRepoLabelsToGitea,
mirrorGitRepoMilestonesToGitea: mockMirrorGitRepoMilestonesToGitea,
}
);
expect(mockMirrorGitHubReleasesToGitea).not.toHaveBeenCalled();
expect(mockMirrorGitRepoIssuesToGitea).not.toHaveBeenCalled();
expect(mockMirrorGitRepoPullRequestsToGitea).not.toHaveBeenCalled();
expect(mockMirrorGitRepoLabelsToGitea).not.toHaveBeenCalled();
expect(mockMirrorGitRepoMilestonesToGitea).not.toHaveBeenCalled();
});
});
describe("handleExistingNonMirrorRepo", () => {
test("should skip non-mirror repository with skip strategy", async () => {
const repoInfo = {
id: 123,
name: "test-repo",
owner: "starred",
mirror: false,
private: false,
};
const repository: Repository = {
id: "repo123",
name: "test-repo",
fullName: "user/test-repo",
owner: "user",
cloneUrl: "https://github.com/user/test-repo.git",
isPrivate: false,
isStarred: true,
status: repoStatusEnum.parse("imported"),
visibility: "public",
userId: "user123",
createdAt: new Date(),
updatedAt: new Date(),
};
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.example.com",
token: "encrypted-token",
defaultOwner: "testuser",
},
};
await handleExistingNonMirrorRepo({
config,
repository,
repoInfo,
strategy: "skip",
});
// Test passes if no error is thrown
expect(true).toBe(true);
});
test("should delete non-mirror repository with delete strategy", async () => {
// Mock deleteGiteaRepo which uses httpDelete via the http-client mock
const repoInfo = {
id: 124,
name: "test-repo",
owner: "starred",
mirror: false,
private: false,
};
const repository: Repository = {
id: "repo124",
name: "test-repo",
fullName: "user/test-repo",
owner: "user",
cloneUrl: "https://github.com/user/test-repo.git",
isPrivate: false,
isStarred: true,
status: repoStatusEnum.parse("imported"),
visibility: "public",
userId: "user123",
createdAt: new Date(),
updatedAt: new Date(),
};
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.example.com",
token: "encrypted-token",
defaultOwner: "testuser",
},
};
// deleteGiteaRepo in the actual code uses fetch directly, not httpDelete
// We need to mock fetch for this test
let deleteCalled = false;
global.fetch = mockFetch(async (url: string, options?: RequestInit) => {
if (url.includes("/api/v1/repos/starred/test-repo") && options?.method === "DELETE") {
deleteCalled = true;
return createMockResponse(null, { ok: true, status: 204 });
}
return createMockResponse(null, { ok: false, status: 404 });
});
await handleExistingNonMirrorRepo({
config,
repository,
repoInfo,
strategy: "delete",
});
expect(deleteCalled).toBe(true);
});
});
});
@@ -0,0 +1,785 @@
/**
* Enhanced Gitea operations with better error handling for starred repositories
* This module provides fixes for:
* 1. "Repository is not a mirror" errors
* 2. Duplicate organization constraint errors
* 3. Race conditions in parallel processing
*/
import type { Config } from "@/types/config";
import type { Repository } from "./db/schema";
import { Octokit } from "@octokit/rest";
import { createMirrorJob } from "./helpers";
import { decryptConfigTokens } from "./utils/config-encryption";
import { httpPost, httpGet, httpPatch, HttpError } from "./http-client";
import { db, repositories } from "./db";
import { eq } from "drizzle-orm";
import { repoStatusEnum } from "@/types/Repository";
import {
parseRepositoryMetadataState,
serializeRepositoryMetadataState,
} from "./metadata-state";
type SyncDependencies = {
getGiteaRepoOwnerAsync: typeof import("./gitea")["getGiteaRepoOwnerAsync"];
mirrorGitHubReleasesToGitea: typeof import("./gitea")["mirrorGitHubReleasesToGitea"];
mirrorGitRepoIssuesToGitea: typeof import("./gitea")["mirrorGitRepoIssuesToGitea"];
mirrorGitRepoPullRequestsToGitea: typeof import("./gitea")["mirrorGitRepoPullRequestsToGitea"];
mirrorGitRepoLabelsToGitea: typeof import("./gitea")["mirrorGitRepoLabelsToGitea"];
mirrorGitRepoMilestonesToGitea: typeof import("./gitea")["mirrorGitRepoMilestonesToGitea"];
};
/**
* Enhanced repository information including mirror status
*/
interface GiteaRepoInfo {
id: number;
name: string;
owner: { login: string } | string;
mirror: boolean;
mirror_interval?: string;
clone_url?: string;
private: boolean;
}
/**
* Check if a repository exists in Gitea and return its details
*/
export async function getGiteaRepoInfo({
config,
owner,
repoName,
}: {
config: Partial<Config>;
owner: string;
repoName: string;
}): Promise<GiteaRepoInfo | null> {
try {
if (!config.giteaConfig?.url || !config.giteaConfig?.token) {
throw new Error("Gitea config is required.");
}
const decryptedConfig = decryptConfigTokens(config as Config);
const response = await httpGet<GiteaRepoInfo>(
`${config.giteaConfig.url}/api/v1/repos/${owner}/${repoName}`,
{
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
}
);
return response.data;
} catch (error) {
if (error instanceof HttpError && error.status === 404) {
return null; // Repository doesn't exist
}
throw error;
}
}
/**
* Enhanced organization creation with better error handling and retry logic
*/
export async function getOrCreateGiteaOrgEnhanced({
orgName,
orgId,
config,
maxRetries = 3,
retryDelay = 100,
}: {
orgId?: string;
orgName: string;
config: Partial<Config>;
maxRetries?: number;
retryDelay?: number;
}): Promise<number> {
if (!config.giteaConfig?.url || !config.giteaConfig?.token || !config.userId) {
throw new Error("Gitea config is required.");
}
const decryptedConfig = decryptConfigTokens(config as Config);
// First, validate the user's authentication by getting their information
console.log(`[Org Creation] Validating user authentication before organization operations`);
try {
const userResponse = await httpGet(
`${config.giteaConfig.url}/api/v1/user`,
{
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
}
);
console.log(`[Org Creation] Authenticated as user: ${userResponse.data.username || userResponse.data.login} (ID: ${userResponse.data.id})`);
} catch (authError) {
if (authError instanceof HttpError && authError.status === 401) {
console.error(`[Org Creation] Authentication failed: Invalid or expired token`);
throw new Error(`Authentication failed: Please check your Gitea token has the required permissions. The token may be invalid or expired.`);
}
console.error(`[Org Creation] Failed to validate authentication:`, authError);
throw new Error(`Failed to validate Gitea authentication: ${authError instanceof Error ? authError.message : String(authError)}`);
}
for (let attempt = 0; attempt < maxRetries; attempt++) {
try {
console.log(`[Org Creation] Attempting to get or create organization: ${orgName} (attempt ${attempt + 1}/${maxRetries})`);
// Check if org exists
try {
const orgResponse = await httpGet<{ id: number }>(
`${config.giteaConfig.url}/api/v1/orgs/${orgName}`,
{
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
}
);
console.log(`[Org Creation] Organization ${orgName} already exists with ID: ${orgResponse.data.id}`);
return orgResponse.data.id;
} catch (error) {
if (!(error instanceof HttpError) || error.status !== 404) {
throw error; // Unexpected error
}
// Organization doesn't exist, continue to create it
}
// Try to create the organization
console.log(`[Org Creation] Organization ${orgName} not found. Creating new organization.`);
const visibility = config.giteaConfig.visibility || "public";
const createOrgPayload = {
username: orgName,
full_name: orgName === "starred" ? "Starred Repositories" : orgName,
description: orgName === "starred"
? "Repositories starred on GitHub"
: `Mirrored from GitHub organization: ${orgName}`,
website: "",
location: "",
visibility: visibility,
};
try {
const createResponse = await httpPost<{ id: number }>(
`${config.giteaConfig.url}/api/v1/orgs`,
createOrgPayload,
{
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
}
);
console.log(`[Org Creation] Successfully created organization ${orgName} with ID: ${createResponse.data.id}`);
await createMirrorJob({
userId: config.userId,
organizationId: orgId,
organizationName: orgName,
message: `Successfully created Gitea organization: ${orgName}`,
status: "synced",
details: `Organization ${orgName} was created in Gitea with ID ${createResponse.data.id}.`,
});
return createResponse.data.id;
} catch (createError) {
// Check if it's a duplicate error
if (createError instanceof HttpError) {
const errorResponse = createError.response?.toLowerCase() || "";
const isDuplicateError =
errorResponse.includes("duplicate") ||
errorResponse.includes("already exists") ||
errorResponse.includes("uqe_user_lower_name") ||
errorResponse.includes("constraint");
if (isDuplicateError && attempt < maxRetries - 1) {
console.log(`[Org Creation] Organization creation failed due to duplicate. Will retry check.`);
// Wait before retry with exponential backoff
const delay = process.env.NODE_ENV === 'test' ? 0 : retryDelay * Math.pow(2, attempt);
console.log(`[Org Creation] Waiting ${delay}ms before retry...`);
if (delay > 0) {
await new Promise(resolve => setTimeout(resolve, delay));
}
continue; // Retry the loop
}
// Check for permission errors
if (createError.status === 403) {
console.error(`[Org Creation] Permission denied: User may not have rights to create organizations`);
throw new Error(`Permission denied: Your Gitea user account does not have permission to create organizations. Please ensure your account has the necessary privileges or contact your Gitea administrator.`);
}
// Check for authentication errors
if (createError.status === 401) {
console.error(`[Org Creation] Authentication failed when creating organization`);
throw new Error(`Authentication failed: The Gitea token does not have sufficient permissions to create organizations. Please ensure your token has 'write:organization' scope.`);
}
}
throw createError;
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : "Unknown error";
if (attempt === maxRetries - 1) {
// Final attempt failed
console.error(`[Org Creation] Failed to get or create organization ${orgName} after ${maxRetries} attempts: ${errorMessage}`);
await createMirrorJob({
userId: config.userId,
organizationId: orgId,
organizationName: orgName,
message: `Failed to create or fetch Gitea organization: ${orgName}`,
status: "failed",
details: `Error after ${maxRetries} attempts: ${errorMessage}`,
});
throw new Error(`Failed to create organization ${orgName}: ${errorMessage}`);
}
// Log retry attempt
console.warn(`[Org Creation] Attempt ${attempt + 1} failed for organization ${orgName}: ${errorMessage}. Retrying...`);
// Wait before retry
const delay = retryDelay * Math.pow(2, attempt);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
// Should never reach here
throw new Error(`Failed to create organization ${orgName} after ${maxRetries} attempts`);
}
/**
* Enhanced sync operation that handles non-mirror repositories
*/
export async function syncGiteaRepoEnhanced({
config,
repository,
}: {
config: Partial<Config>;
repository: Repository;
}, deps?: SyncDependencies): Promise<any> {
try {
if (!config.userId || !config.giteaConfig?.url || !config.giteaConfig?.token) {
throw new Error("Gitea config is required.");
}
const decryptedConfig = decryptConfigTokens(config as Config);
console.log(`[Sync] Starting sync for repository ${repository.name}`);
// Mark repo as "syncing" in DB
await db
.update(repositories)
.set({
status: repoStatusEnum.parse("syncing"),
updatedAt: new Date(),
})
.where(eq(repositories.id, repository.id!));
// Get the expected owner
const dependencies = deps ?? (await import("./gitea"));
const repoOwner = await dependencies.getGiteaRepoOwnerAsync({ config, repository });
// Check if repo exists and get its info
const repoInfo = await getGiteaRepoInfo({
config,
owner: repoOwner,
repoName: repository.name,
});
if (!repoInfo) {
throw new Error(`Repository ${repository.name} not found in Gitea at ${repoOwner}/${repository.name}`);
}
// Check if it's a mirror repository
if (!repoInfo.mirror) {
console.warn(`[Sync] Repository ${repository.name} exists but is not configured as a mirror`);
// Update database to reflect this status
await db
.update(repositories)
.set({
status: repoStatusEnum.parse("failed"),
updatedAt: new Date(),
errorMessage: "Repository exists in Gitea but is not configured as a mirror. Manual intervention required.",
})
.where(eq(repositories.id, repository.id!));
await createMirrorJob({
userId: config.userId,
repositoryId: repository.id,
repositoryName: repository.name,
message: `Cannot sync ${repository.name}: Not a mirror repository`,
details: `Repository ${repository.name} exists in Gitea but is not configured as a mirror. You may need to delete and recreate it as a mirror, or manually configure it as a mirror in Gitea.`,
status: "failed",
});
throw new Error(`Repository ${repository.name} is not a mirror. Cannot sync.`);
}
// Update mirror interval if needed
if (config.giteaConfig?.mirrorInterval) {
try {
console.log(`[Sync] Updating mirror interval for ${repository.name} to ${config.giteaConfig.mirrorInterval}`);
const updateUrl = `${config.giteaConfig.url}/api/v1/repos/${repoOwner}/${repository.name}`;
await httpPatch(updateUrl, {
mirror_interval: config.giteaConfig.mirrorInterval,
}, {
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
});
console.log(`[Sync] Successfully updated mirror interval for ${repository.name}`);
} catch (updateError) {
console.warn(`[Sync] Failed to update mirror interval for ${repository.name}:`, updateError);
// Continue with sync even if interval update fails
}
}
// Perform the sync
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/${repoOwner}/${repository.name}/mirror-sync`;
try {
const response = await httpPost(apiUrl, undefined, {
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
});
const metadataState = parseRepositoryMetadataState(repository.metadata);
let metadataUpdated = false;
const skipMetadataForStarred =
repository.isStarred && config.githubConfig?.starredCodeOnly;
let metadataOctokit: Octokit | null = null;
const ensureOctokit = (): Octokit | null => {
if (metadataOctokit) {
return metadataOctokit;
}
if (!decryptedConfig.githubConfig?.token) {
return null;
}
metadataOctokit = new Octokit({
auth: decryptedConfig.githubConfig.token,
});
return metadataOctokit;
};
const shouldMirrorReleases =
!!config.giteaConfig?.mirrorReleases && !skipMetadataForStarred;
const shouldMirrorIssuesThisRun =
!!config.giteaConfig?.mirrorIssues &&
!skipMetadataForStarred &&
!metadataState.components.issues;
const shouldMirrorPullRequests =
!!config.giteaConfig?.mirrorPullRequests &&
!skipMetadataForStarred &&
!metadataState.components.pullRequests;
const shouldMirrorLabels =
!!config.giteaConfig?.mirrorLabels &&
!skipMetadataForStarred &&
!shouldMirrorIssuesThisRun &&
!metadataState.components.labels;
const shouldMirrorMilestones =
!!config.giteaConfig?.mirrorMilestones &&
!skipMetadataForStarred &&
!metadataState.components.milestones;
if (shouldMirrorReleases) {
const octokit = ensureOctokit();
if (!octokit) {
console.warn(
`[Sync] Skipping release mirroring for ${repository.name}: Missing GitHub token`
);
} else {
try {
await dependencies.mirrorGitHubReleasesToGitea({
config,
octokit,
repository,
giteaOwner: repoOwner,
giteaRepoName: repository.name,
});
metadataState.components.releases = true;
metadataUpdated = true;
console.log(
`[Sync] Mirrored releases for ${repository.name} after sync`
);
} catch (releaseError) {
console.error(
`[Sync] Failed to mirror releases for ${repository.name}: ${
releaseError instanceof Error
? releaseError.message
: String(releaseError)
}`
);
}
}
}
if (shouldMirrorIssuesThisRun) {
const octokit = ensureOctokit();
if (!octokit) {
console.warn(
`[Sync] Skipping issue mirroring for ${repository.name}: Missing GitHub token`
);
} else {
try {
await dependencies.mirrorGitRepoIssuesToGitea({
config,
octokit,
repository,
giteaOwner: repoOwner,
giteaRepoName: repository.name,
});
metadataState.components.issues = true;
metadataState.components.labels = true;
metadataUpdated = true;
console.log(
`[Sync] Mirrored issues for ${repository.name} after sync`
);
} catch (issueError) {
console.error(
`[Sync] Failed to mirror issues for ${repository.name}: ${
issueError instanceof Error
? issueError.message
: String(issueError)
}`
);
}
}
} else if (
config.giteaConfig?.mirrorIssues &&
metadataState.components.issues
) {
console.log(
`[Sync] Issues already mirrored for ${repository.name}; skipping`
);
}
if (shouldMirrorPullRequests) {
const octokit = ensureOctokit();
if (!octokit) {
console.warn(
`[Sync] Skipping pull request mirroring for ${repository.name}: Missing GitHub token`
);
} else {
try {
await dependencies.mirrorGitRepoPullRequestsToGitea({
config,
octokit,
repository,
giteaOwner: repoOwner,
giteaRepoName: repository.name,
});
metadataState.components.pullRequests = true;
metadataUpdated = true;
console.log(
`[Sync] Mirrored pull requests for ${repository.name} after sync`
);
} catch (prError) {
console.error(
`[Sync] Failed to mirror pull requests for ${repository.name}: ${
prError instanceof Error ? prError.message : String(prError)
}`
);
}
}
} else if (
config.giteaConfig?.mirrorPullRequests &&
metadataState.components.pullRequests
) {
console.log(
`[Sync] Pull requests already mirrored for ${repository.name}; skipping`
);
}
if (shouldMirrorLabels) {
const octokit = ensureOctokit();
if (!octokit) {
console.warn(
`[Sync] Skipping label mirroring for ${repository.name}: Missing GitHub token`
);
} else {
try {
await dependencies.mirrorGitRepoLabelsToGitea({
config,
octokit,
repository,
giteaOwner: repoOwner,
giteaRepoName: repository.name,
});
metadataState.components.labels = true;
metadataUpdated = true;
console.log(
`[Sync] Mirrored labels for ${repository.name} after sync`
);
} catch (labelError) {
console.error(
`[Sync] Failed to mirror labels for ${repository.name}: ${
labelError instanceof Error
? labelError.message
: String(labelError)
}`
);
}
}
} else if (
config.giteaConfig?.mirrorLabels &&
metadataState.components.labels
) {
console.log(
`[Sync] Labels already mirrored for ${repository.name}; skipping`
);
}
if (shouldMirrorMilestones) {
const octokit = ensureOctokit();
if (!octokit) {
console.warn(
`[Sync] Skipping milestone mirroring for ${repository.name}: Missing GitHub token`
);
} else {
try {
await dependencies.mirrorGitRepoMilestonesToGitea({
config,
octokit,
repository,
giteaOwner: repoOwner,
giteaRepoName: repository.name,
});
metadataState.components.milestones = true;
metadataUpdated = true;
console.log(
`[Sync] Mirrored milestones for ${repository.name} after sync`
);
} catch (milestoneError) {
console.error(
`[Sync] Failed to mirror milestones for ${repository.name}: ${
milestoneError instanceof Error
? milestoneError.message
: String(milestoneError)
}`
);
}
}
} else if (
config.giteaConfig?.mirrorMilestones &&
metadataState.components.milestones
) {
console.log(
`[Sync] Milestones already mirrored for ${repository.name}; skipping`
);
}
if (metadataUpdated) {
metadataState.lastSyncedAt = new Date().toISOString();
}
// Mark repo as "synced" in DB
await db
.update(repositories)
.set({
status: repoStatusEnum.parse("synced"),
updatedAt: new Date(),
lastMirrored: new Date(),
errorMessage: null,
mirroredLocation: `${repoOwner}/${repository.name}`,
metadata: metadataUpdated
? serializeRepositoryMetadataState(metadataState)
: repository.metadata ?? null,
})
.where(eq(repositories.id, repository.id!));
await createMirrorJob({
userId: config.userId,
repositoryId: repository.id,
repositoryName: repository.name,
message: `Successfully synced repository: ${repository.name}`,
details: `Repository ${repository.name} was synced with Gitea.`,
status: "synced",
});
console.log(`[Sync] Repository ${repository.name} synced successfully`);
return response.data;
} catch (syncError) {
if (syncError instanceof HttpError && syncError.status === 400) {
// Handle specific mirror-sync errors
const errorMessage = syncError.response?.toLowerCase() || "";
if (errorMessage.includes("not a mirror")) {
// Update status to indicate this specific error
await db
.update(repositories)
.set({
status: repoStatusEnum.parse("failed"),
updatedAt: new Date(),
errorMessage: "Repository is not configured as a mirror in Gitea",
})
.where(eq(repositories.id, repository.id!));
await createMirrorJob({
userId: config.userId,
repositoryId: repository.id,
repositoryName: repository.name,
message: `Sync failed: ${repository.name} is not a mirror`,
details: "The repository exists in Gitea but is not configured as a mirror. Manual intervention required.",
status: "failed",
});
}
}
throw syncError;
}
} catch (error) {
console.error(`[Sync] Error while syncing repository ${repository.name}:`, error);
// Update repo with error status
await db
.update(repositories)
.set({
status: repoStatusEnum.parse("failed"),
updatedAt: new Date(),
errorMessage: error instanceof Error ? error.message : "Unknown error",
})
.where(eq(repositories.id, repository.id!));
if (config.userId && repository.id && repository.name) {
await createMirrorJob({
userId: config.userId,
repositoryId: repository.id,
repositoryName: repository.name,
message: `Failed to sync repository: ${repository.name}`,
details: error instanceof Error ? error.message : "Unknown error",
status: "failed",
});
}
throw error;
}
}
/**
* Delete a repository in Gitea (useful for cleaning up non-mirror repos)
*/
export async function deleteGiteaRepo({
config,
owner,
repoName,
}: {
config: Partial<Config>;
owner: string;
repoName: string;
}): Promise<void> {
if (!config.giteaConfig?.url || !config.giteaConfig?.token) {
throw new Error("Gitea config is required.");
}
const decryptedConfig = decryptConfigTokens(config as Config);
const response = await fetch(
`${config.giteaConfig.url}/api/v1/repos/${owner}/${repoName}`,
{
method: "DELETE",
headers: {
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
},
}
);
if (!response.ok && response.status !== 404) {
throw new Error(`Failed to delete repository: ${response.statusText}`);
}
}
/**
* Convert a regular repository to a mirror (if supported by Gitea version)
* Note: This might not be supported in all Gitea versions
*/
export async function convertToMirror({
config,
owner,
repoName,
cloneUrl,
}: {
config: Partial<Config>;
owner: string;
repoName: string;
cloneUrl: string;
}): Promise<boolean> {
// This is a placeholder - actual implementation depends on Gitea API support
// Most Gitea versions don't support converting existing repos to mirrors
console.warn(`[Convert] Converting existing repositories to mirrors is not supported in most Gitea versions`);
return false;
}
/**
* Sequential organization creation to avoid race conditions
*/
export async function createOrganizationsSequentially({
config,
orgNames,
}: {
config: Partial<Config>;
orgNames: string[];
}): Promise<Map<string, number>> {
const orgIdMap = new Map<string, number>();
for (const orgName of orgNames) {
try {
const orgId = await getOrCreateGiteaOrgEnhanced({
orgName,
config,
maxRetries: 3,
retryDelay: 100,
});
orgIdMap.set(orgName, orgId);
} catch (error) {
console.error(`Failed to create organization ${orgName}:`, error);
// Continue with other organizations
}
}
return orgIdMap;
}
/**
* Check and handle existing non-mirror repositories
*/
export async function handleExistingNonMirrorRepo({
config,
repository,
repoInfo,
strategy = "skip",
}: {
config: Partial<Config>;
repository: Repository;
repoInfo: GiteaRepoInfo;
strategy?: "skip" | "delete" | "rename";
}): Promise<void> {
const owner = typeof repoInfo.owner === 'string' ? repoInfo.owner : repoInfo.owner.login;
const repoName = repoInfo.name;
switch (strategy) {
case "skip":
console.log(`[Handle] Skipping existing non-mirror repository: ${owner}/${repoName}`);
await db
.update(repositories)
.set({
status: repoStatusEnum.parse("failed"),
updatedAt: new Date(),
errorMessage: "Repository exists but is not a mirror. Skipped.",
})
.where(eq(repositories.id, repository.id!));
break;
case "delete":
console.log(`[Handle] Deleting existing non-mirror repository: ${owner}/${repoName}`);
await deleteGiteaRepo({
config,
owner,
repoName,
});
console.log(`[Handle] Deleted repository ${owner}/${repoName}. It can now be recreated as a mirror.`);
break;
case "rename":
console.log(`[Handle] Renaming strategy not implemented yet for: ${owner}/${repoName}`);
// TODO: Implement rename strategy if needed
break;
}
}
@@ -0,0 +1,110 @@
import { describe, test, expect, mock } from "bun:test";
import type { Config } from "./db/schema";
describe("Git LFS Support", () => {
test("should include LFS flag when configured", () => {
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.example.com",
token: "test-token",
defaultOwner: "testuser",
lfs: true, // LFS enabled
},
mirrorOptions: {
mirrorLFS: true, // UI option enabled
},
};
// Mock the payload that would be sent to Gitea API
const createMirrorPayload = (config: Partial<Config>, repoUrl: string) => {
const payload: any = {
clone_addr: repoUrl,
mirror: true,
private: false,
};
// Add LFS flag if configured
if (config.giteaConfig?.lfs || config.mirrorOptions?.mirrorLFS) {
payload.lfs = true;
}
return payload;
};
const payload = createMirrorPayload(config, "https://github.com/user/repo.git");
expect(payload).toHaveProperty("lfs");
expect(payload.lfs).toBe(true);
});
test("should not include LFS flag when not configured", () => {
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.example.com",
token: "test-token",
defaultOwner: "testuser",
lfs: false, // LFS disabled
},
mirrorOptions: {
mirrorLFS: false, // UI option disabled
},
};
const createMirrorPayload = (config: Partial<Config>, repoUrl: string) => {
const payload: any = {
clone_addr: repoUrl,
mirror: true,
private: false,
};
if (config.giteaConfig?.lfs || config.mirrorOptions?.mirrorLFS) {
payload.lfs = true;
}
return payload;
};
const payload = createMirrorPayload(config, "https://github.com/user/repo.git");
expect(payload).not.toHaveProperty("lfs");
});
test("should handle LFS with either giteaConfig or mirrorOptions", () => {
// Test with only giteaConfig.lfs
const config1: Partial<Config> = {
giteaConfig: {
url: "https://gitea.example.com",
token: "test-token",
defaultOwner: "testuser",
lfs: true,
},
};
// Test with only mirrorOptions.mirrorLFS
const config2: Partial<Config> = {
mirrorOptions: {
mirrorLFS: true,
},
};
const createMirrorPayload = (config: Partial<Config>, repoUrl: string) => {
const payload: any = {
clone_addr: repoUrl,
mirror: true,
private: false,
};
if (config.giteaConfig?.lfs || config.mirrorOptions?.mirrorLFS) {
payload.lfs = true;
}
return payload;
};
const payload1 = createMirrorPayload(config1, "https://github.com/user/repo.git");
const payload2 = createMirrorPayload(config2, "https://github.com/user/repo.git");
expect(payload1.lfs).toBe(true);
expect(payload2.lfs).toBe(true);
});
});
@@ -0,0 +1,272 @@
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
import { getOrCreateGiteaOrg } from "./gitea";
import type { Config } from "./db/schema";
import { createMirrorJob } from "./helpers";
import { createMockResponse, mockFetch } from "@/tests/mock-fetch";
// Mock the helpers module
mock.module("@/lib/helpers", () => {
return {
createMirrorJob: mock(() => Promise.resolve("job-id"))
};
});
describe.skip("Gitea Organization Creation Error Handling", () => {
let originalFetch: typeof global.fetch;
let mockCreateMirrorJob: any;
beforeEach(() => {
originalFetch = global.fetch;
mockCreateMirrorJob = mock(() => Promise.resolve("job-id"));
});
afterEach(() => {
global.fetch = originalFetch;
});
describe("Duplicate organization constraint errors", () => {
test("should handle PostgreSQL duplicate key constraint violation", async () => {
global.fetch = mockFetch(async (url: string, options?: RequestInit) => {
if (url.includes("/api/v1/orgs/starred") && options?.method === "GET") {
// Organization doesn't exist according to GET
return createMockResponse(null, {
ok: false,
status: 404,
statusText: "Not Found"
});
}
if (url.includes("/api/v1/orgs") && options?.method === "POST") {
// But creation fails with duplicate key error
return createMockResponse({
message: "insert organization: pq: duplicate key value violates unique constraint \"UQE_user_lower_name\"",
url: "https://gitea.url.com/api/swagger"
}, {
ok: false,
status: 400,
statusText: "Bad Request"
});
}
return createMockResponse(null, { ok: false, status: 404 });
});
const config: Partial<Config> = {
userId: "user-123",
giteaConfig: {
url: "https://gitea.url.com",
token: "gitea-token",
defaultOwner: "testuser"
},
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: false,
mirrorStarred: true
}
};
try {
await getOrCreateGiteaOrg({
orgName: "starred",
config
});
expect(false).toBe(true); // Should not reach here
} catch (error) {
expect(error).toBeInstanceOf(Error);
expect((error as Error).message).toContain("duplicate key value violates unique constraint");
}
});
test.skip("should handle MySQL duplicate entry error", async () => {
let checkCount = 0;
global.fetch = mockFetch(async (url: string, options?: RequestInit) => {
if (url.includes("/api/v1/orgs/starred") && options?.method === "GET") {
checkCount++;
if (checkCount <= 2) {
// First checks: org doesn't exist
return createMockResponse(null, {
ok: false,
status: 404
});
} else {
// After retry: org exists (created by another process)
return createMockResponse({
id: 999,
username: "starred",
full_name: "Starred Repositories"
});
}
}
if (url.includes("/api/v1/orgs") && options?.method === "POST") {
return createMockResponse({
message: "Duplicate entry 'starred' for key 'organizations.username'",
url: "https://gitea.url.com/api/swagger"
}, {
ok: false,
status: 400,
statusText: "Bad Request"
});
}
return createMockResponse(null, { ok: false, status: 404 });
});
const config: Partial<Config> = {
userId: "user-123",
giteaConfig: {
url: "https://gitea.url.com",
token: "gitea-token",
defaultOwner: "testuser",
visibility: "public"
},
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: false,
mirrorStarred: true
}
};
// The enhanced version retries and eventually succeeds
const orgId = await getOrCreateGiteaOrg({
orgName: "starred",
config
});
expect(orgId).toBe(999);
expect(checkCount).toBeGreaterThanOrEqual(3);
});
});
describe("Race condition handling", () => {
test.skip("should handle race condition where org is created between check and create", async () => {
let checkCount = 0;
global.fetch = mockFetch(async (url: string, options?: RequestInit) => {
if (url.includes("/api/v1/orgs/starred") && options?.method === "GET") {
checkCount++;
if (checkCount === 1) {
// First check: org doesn't exist
return createMockResponse(null, {
ok: false,
status: 404
});
} else {
// Subsequent checks: org exists (created by another process)
return createMockResponse({
id: 789,
username: "starred",
full_name: "Starred Repositories"
});
}
}
if (url.includes("/api/v1/orgs") && options?.method === "POST") {
// Creation fails because org was created by another process
return createMockResponse({
message: "Organization already exists",
url: "https://gitea.url.com/api/swagger"
}, {
ok: false,
status: 400,
statusText: "Bad Request"
});
}
return createMockResponse(null, { ok: false, status: 404 });
});
const config: Partial<Config> = {
userId: "user-123",
giteaConfig: {
url: "https://gitea.url.com",
token: "gitea-token",
defaultOwner: "testuser"
},
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: false,
mirrorStarred: true
}
};
// Now we expect this to succeed because it will retry and find the org
const result = await getOrCreateGiteaOrg({
orgName: "starred",
config
});
expect(result).toBeDefined();
expect(result).toBe(789);
});
test.skip("should fail after max retries when organization is never found", async () => {
let checkCount = 0;
let createAttempts = 0;
global.fetch = mockFetch(async (url: string, options?: RequestInit) => {
if (url.includes("/api/v1/orgs/starred") && options?.method === "GET") {
checkCount++;
// Organization never exists
return createMockResponse(null, {
ok: false,
status: 404
});
}
if (url.includes("/api/v1/orgs") && options?.method === "POST") {
createAttempts++;
// Always fail with duplicate constraint error
return createMockResponse({
message: "insert organization: pq: duplicate key value violates unique constraint \"UQE_user_lower_name\"",
url: "https://gitea.url.com/api/swagger"
}, {
ok: false,
status: 400,
statusText: "Bad Request"
});
}
return createMockResponse(null, { ok: false, status: 404 });
});
const config: Partial<Config> = {
userId: "user-123",
giteaConfig: {
url: "https://gitea.url.com",
token: "gitea-token",
defaultOwner: "testuser",
visibility: "public"
},
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: false,
mirrorStarred: true
}
};
try {
await getOrCreateGiteaOrg({
orgName: "starred",
config
});
// Should not reach here - it will fail after 3 attempts
expect(true).toBe(false);
} catch (error) {
// Should fail after max retries
expect(error).toBeInstanceOf(Error);
expect((error as Error).message).toContain("Error in getOrCreateGiteaOrg");
expect((error as Error).message).toContain("Failed to create organization");
// The enhanced version checks once per attempt before creating
expect(checkCount).toBe(3); // One check per attempt
expect(createAttempts).toBe(3); // Should have attempted creation 3 times
}
});
});
});
@@ -0,0 +1,271 @@
import type { Config } from "@/types/config";
import { createMirrorJob } from "./helpers";
import { decryptConfigTokens } from "./utils/config-encryption";
/**
* Enhanced version of getOrCreateGiteaOrg with retry logic for race conditions
* This implementation handles the duplicate organization constraint errors
*/
export async function getOrCreateGiteaOrgWithRetry({
orgName,
orgId,
config,
maxRetries = 3,
retryDelay = 100,
}: {
orgId?: string; // db id
orgName: string;
config: Partial<Config>;
maxRetries?: number;
retryDelay?: number;
}): Promise<number> {
if (
!config.giteaConfig?.url ||
!config.giteaConfig?.token ||
!config.userId
) {
throw new Error("Gitea config is required.");
}
const decryptedConfig = decryptConfigTokens(config as Config);
for (let attempt = 0; attempt < maxRetries; attempt++) {
try {
console.log(`Attempting to get or create Gitea organization: ${orgName} (attempt ${attempt + 1}/${maxRetries})`);
// Check if org exists
const orgRes = await fetch(
`${config.giteaConfig.url}/api/v1/orgs/${orgName}`,
{
headers: {
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
"Content-Type": "application/json",
},
}
);
if (orgRes.ok) {
// Organization exists, return its ID
const contentType = orgRes.headers.get("content-type");
if (!contentType || !contentType.includes("application/json")) {
throw new Error(
`Invalid response format from Gitea API. Expected JSON but got: ${contentType}`
);
}
const org = await orgRes.json();
console.log(`Organization ${orgName} already exists with ID: ${org.id}`);
await createMirrorJob({
userId: config.userId,
organizationId: orgId,
organizationName: orgName,
message: `Found existing Gitea organization: ${orgName}`,
status: "synced",
details: `Organization ${orgName} already exists in Gitea with ID ${org.id}.`,
});
return org.id;
}
if (orgRes.status !== 404) {
// Unexpected error
const errorText = await orgRes.text();
throw new Error(
`Unexpected response from Gitea API: ${orgRes.status} ${orgRes.statusText}. Body: ${errorText}`
);
}
// Organization doesn't exist, try to create it
console.log(`Organization ${orgName} not found. Creating new organization.`);
const visibility = config.giteaConfig.visibility || "public";
const createOrgPayload = {
username: orgName,
full_name: orgName === "starred" ? "Starred Repositories" : orgName,
description: orgName === "starred"
? "Repositories starred on GitHub"
: `Mirrored from GitHub organization: ${orgName}`,
website: "",
location: "",
visibility: visibility,
};
const createRes = await fetch(
`${config.giteaConfig.url}/api/v1/orgs`,
{
method: "POST",
headers: {
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
"Content-Type": "application/json",
},
body: JSON.stringify(createOrgPayload),
}
);
if (createRes.ok) {
// Successfully created
const newOrg = await createRes.json();
console.log(`Successfully created organization ${orgName} with ID: ${newOrg.id}`);
await createMirrorJob({
userId: config.userId,
organizationId: orgId,
organizationName: orgName,
message: `Successfully created Gitea organization: ${orgName}`,
status: "synced",
details: `Organization ${orgName} was created in Gitea with ID ${newOrg.id}.`,
});
return newOrg.id;
}
// Handle creation failure
const createError = await createRes.json();
// Check if it's a duplicate error
if (
createError.message?.includes("duplicate") ||
createError.message?.includes("already exists") ||
createError.message?.includes("UQE_user_lower_name")
) {
console.log(`Organization creation failed due to duplicate. Will retry check.`);
// Wait before retry with exponential backoff
if (attempt < maxRetries - 1) {
const delay = retryDelay * Math.pow(2, attempt);
console.log(`Waiting ${delay}ms before retry...`);
await new Promise(resolve => setTimeout(resolve, delay));
continue; // Retry the loop
}
}
// Non-retryable error
throw new Error(
`Failed to create organization ${orgName}: ${createError.message || createRes.statusText}`
);
} catch (error) {
const errorMessage =
error instanceof Error
? error.message
: "Unknown error occurred in getOrCreateGiteaOrg.";
if (attempt === maxRetries - 1) {
// Final attempt failed
console.error(
`Failed to get or create organization ${orgName} after ${maxRetries} attempts: ${errorMessage}`
);
await createMirrorJob({
userId: config.userId,
organizationId: orgId,
organizationName: orgName,
message: `Failed to create or fetch Gitea organization: ${orgName}`,
status: "failed",
details: `Error after ${maxRetries} attempts: ${errorMessage}`,
});
throw new Error(`Error in getOrCreateGiteaOrg: ${errorMessage}`);
}
// Log retry attempt
console.warn(
`Attempt ${attempt + 1} failed for organization ${orgName}: ${errorMessage}. Retrying...`
);
// Wait before retry
const delay = retryDelay * Math.pow(2, attempt);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
// Should never reach here
throw new Error(`Failed to create organization ${orgName} after ${maxRetries} attempts`);
}
/**
* Helper function to check if an error is retryable
*/
export function isRetryableOrgError(error: any): boolean {
if (!error?.message) return false;
const retryablePatterns = [
"duplicate",
"already exists",
"UQE_user_lower_name",
"constraint",
"timeout",
"ECONNREFUSED",
"ENOTFOUND",
"network"
];
const errorMessage = error.message.toLowerCase();
return retryablePatterns.some(pattern => errorMessage.includes(pattern));
}
/**
* Pre-validate organization setup before bulk operations
*/
export async function validateOrgSetup({
config,
orgNames,
}: {
config: Partial<Config>;
orgNames: string[];
}): Promise<{ valid: boolean; issues: string[] }> {
const issues: string[] = [];
if (!config.giteaConfig?.url || !config.giteaConfig?.token) {
issues.push("Gitea configuration is missing");
return { valid: false, issues };
}
const decryptedConfig = decryptConfigTokens(config as Config);
for (const orgName of orgNames) {
try {
const response = await fetch(
`${config.giteaConfig.url}/api/v1/orgs/${orgName}`,
{
headers: {
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
},
}
);
if (!response.ok && response.status !== 404) {
issues.push(`Cannot check organization '${orgName}': ${response.statusText}`);
}
} catch (error) {
issues.push(`Network error checking organization '${orgName}': ${error}`);
}
}
// Check if user has permission to create organizations
try {
const userResponse = await fetch(
`${config.giteaConfig.url}/api/v1/user`,
{
headers: {
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
},
}
);
if (userResponse.ok) {
const user = await userResponse.json();
if (user.prohibit_login) {
issues.push("User account is prohibited from login");
}
if (user.restricted) {
issues.push("User account is restricted");
}
}
} catch (error) {
issues.push(`Cannot verify user permissions: ${error}`);
}
return { valid: issues.length === 0, issues };
}
@@ -0,0 +1,229 @@
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
import type { Config, Repository } from "./db/schema";
import { repoStatusEnum } from "@/types/Repository";
import { createMockResponse, mockFetch } from "@/tests/mock-fetch";
// Mock the helpers module
mock.module("@/lib/helpers", () => {
return {
createMirrorJob: mock(() => Promise.resolve("job-id")),
createEvent: mock(() => Promise.resolve())
};
});
// Mock the database module
mock.module("@/lib/db", () => {
return {
db: {
update: mock(() => ({
set: mock(() => ({
where: mock(() => Promise.resolve())
}))
})),
insert: mock(() => ({
values: mock(() => Promise.resolve())
}))
},
repositories: {},
organizations: {},
events: {}
};
});
// Mock config encryption
mock.module("@/lib/utils/config-encryption", () => ({
decryptConfigTokens: (config: any) => config,
encryptConfigTokens: (config: any) => config,
getDecryptedGitHubToken: (config: any) => config.githubConfig?.token || "",
getDecryptedGiteaToken: (config: any) => config.giteaConfig?.token || ""
}));
// Track test context for org creation
let orgCheckCount = 0;
let repoCheckCount = 0;
// Mock additional functions from gitea module that are used in tests
const mockGetOrCreateGiteaOrg = mock(async ({ orgName, config }: any) => {
// Simulate retry logic for duplicate org error
orgCheckCount++;
if (orgName === "starred" && orgCheckCount <= 2) {
// First attempts fail with duplicate error (org created by another process)
throw new Error('insert organization: pq: duplicate key value violates unique constraint "UQE_user_lower_name"');
}
// After retries, org exists
if (orgName === "starred") {
return 999;
}
return 123;
});
const mockMirrorGitHubOrgRepoToGiteaOrg = mock(async () => {});
const mockIsRepoPresentInGitea = mock(async () => false);
mock.module("./gitea", () => ({
getOrCreateGiteaOrg: mockGetOrCreateGiteaOrg,
mirrorGitHubOrgRepoToGiteaOrg: mockMirrorGitHubOrgRepoToGiteaOrg,
isRepoPresentInGitea: mockIsRepoPresentInGitea
}));
// Import the mocked functions
const { getOrCreateGiteaOrg, mirrorGitHubOrgRepoToGiteaOrg, isRepoPresentInGitea } = await import("./gitea");
describe("Starred Repository Error Handling", () => {
let originalFetch: typeof global.fetch;
let consoleLogs: string[] = [];
let consoleErrors: string[] = [];
beforeEach(() => {
originalFetch = global.fetch;
consoleLogs = [];
consoleErrors = [];
orgCheckCount = 0;
repoCheckCount = 0;
// Capture console output for debugging
console.log = mock((message: string) => {
consoleLogs.push(message);
});
console.error = mock((message: string) => {
consoleErrors.push(message);
});
});
afterEach(() => {
global.fetch = originalFetch;
});
describe("Repository is not a mirror error", () => {
test("should handle 400 error when trying to sync a non-mirror repo", async () => {
// Mock fetch to simulate the "Repository is not a mirror" error
global.fetch = mockFetch(async (url: string, options?: RequestInit) => {
// Mock organization check - org exists
if (url.includes("/api/v1/orgs/starred") && options?.method === "GET") {
return createMockResponse({
id: 999,
username: "starred",
full_name: "Starred Repositories"
});
}
// Mock repository check - non-mirror repo exists
if (url.includes("/api/v1/repos/starred/test-repo") && options?.method === "GET") {
return createMockResponse({
id: 123,
name: "test-repo",
mirror: false, // Repo is not a mirror
owner: { login: "starred" }
});
}
// Mock repository migration attempt
if (url.includes("/api/v1/repos/migrate")) {
return createMockResponse({
id: 456,
name: "test-repo",
owner: { login: "starred" },
mirror: true,
mirror_interval: "8h"
});
}
return createMockResponse(null, { ok: false, status: 404 });
});
const config: Partial<Config> = {
userId: "user-123",
giteaConfig: {
url: "https://gitea.ui.com",
token: "gitea-token",
defaultOwner: "testuser",
starredReposOrg: "starred"
},
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: false,
mirrorStarred: true,
starredReposOrg: "starred"
}
};
const repository: Repository = {
id: "repo-123",
userId: "user-123",
configId: "config-123",
name: "test-repo",
fullName: "original-owner/test-repo",
url: "https://github.com/original-owner/test-repo",
cloneUrl: "https://github.com/original-owner/test-repo.git",
owner: "original-owner",
isPrivate: false,
isForked: false,
hasIssues: true,
isStarred: true, // This is a starred repo
isArchived: false,
size: 1000,
hasLFS: false,
hasSubmodules: false,
defaultBranch: "main",
visibility: "public",
status: "mirrored",
mirroredLocation: "starred/test-repo",
createdAt: new Date(),
updatedAt: new Date()
};
// Mock octokit
const mockOctokit = {} as any;
// The test name says "should handle 400 error when trying to sync a non-mirror repo"
// But mirrorGitHubOrgRepoToGiteaOrg creates a new mirror, it doesn't sync existing ones
// So it should succeed in creating a mirror even if a non-mirror repo exists
await mirrorGitHubOrgRepoToGiteaOrg({
config,
octokit: mockOctokit,
repository,
orgName: "starred"
});
// If no error is thrown, the operation succeeded
expect(true).toBe(true);
});
});
describe("Duplicate organization error", () => {
test("should handle duplicate organization creation error", async () => {
// Reset the mock to handle this specific test case
mockGetOrCreateGiteaOrg.mockImplementation(async ({ orgName, config }: any) => {
// Simulate successful org creation/fetch after initial duplicate error
return 999;
});
const config: Partial<Config> = {
userId: "user-123",
giteaConfig: {
url: "https://gitea.ui.com",
token: "gitea-token",
defaultOwner: "testuser",
starredReposOrg: "starred"
},
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: false,
mirrorStarred: true
}
};
// Should succeed with the mocked implementation
const result = await getOrCreateGiteaOrg({
orgName: "starred",
config
});
expect(result).toBeDefined();
expect(result).toBe(999);
});
});
});
+495
View File
@@ -0,0 +1,495 @@
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
import { Octokit } from "@octokit/rest";
import { repoStatusEnum } from "@/types/Repository";
import { getOrCreateGiteaOrg, getGiteaRepoOwner, getGiteaRepoOwnerAsync } from "./gitea";
import type { Config, Repository, Organization } from "./db/schema";
import { createMockResponse, mockFetch } from "@/tests/mock-fetch";
// Mock the isRepoPresentInGitea function
const mockIsRepoPresentInGitea = mock(() => Promise.resolve(false));
let mockDbSelectResult: any[] = [];
// Mock the database module
mock.module("@/lib/db", () => {
return {
db: {
select: () => ({
from: () => ({
where: () => ({
limit: () => Promise.resolve(mockDbSelectResult)
})
})
}),
update: () => ({
set: () => ({
where: () => Promise.resolve()
})
})
},
repositories: {},
organizations: {}
};
});
// Mock the helpers module
mock.module("@/lib/helpers", () => {
return {
createMirrorJob: mock(() => Promise.resolve("job-id"))
};
});
// Mock http-client
mock.module("@/lib/http-client", () => {
return {
httpPost: mock(() => Promise.resolve({ data: { id: 123 }, status: 200, statusText: 'OK', headers: new Headers() })),
httpGet: mock(() => Promise.resolve({ data: [], status: 200, statusText: 'OK', headers: new Headers() })),
HttpError: class MockHttpError extends Error {
constructor(message: string, public status: number, public statusText: string, public response?: string) {
super(message);
this.name = 'HttpError';
}
}
};
});
// Mock the gitea module itself
mock.module("./gitea", () => {
return {
isRepoPresentInGitea: mockIsRepoPresentInGitea,
mirrorGithubRepoToGitea: mock(async () => {}),
mirrorGitHubOrgRepoToGiteaOrg: mock(async () => {})
};
});
describe("Gitea Repository Mirroring", () => {
// Mock console.log and console.error to prevent test output noise
let originalConsoleLog: typeof console.log;
let originalConsoleError: typeof console.error;
beforeEach(() => {
originalConsoleLog = console.log;
originalConsoleError = console.error;
console.log = mock(() => {});
console.error = mock(() => {});
mockDbSelectResult = [];
});
afterEach(() => {
console.log = originalConsoleLog;
console.error = originalConsoleError;
});
test("mirrorGithubRepoToGitea handles private repositories correctly", async () => {
// Import the mocked function
const { mirrorGithubRepoToGitea } = await import("./gitea");
// Create mock Octokit instance
const octokit = {} as Octokit;
// Create mock repository (private)
const repository = {
id: "repo-id",
name: "test-repo",
fullName: "testuser/test-repo",
url: "https://github.com/testuser/test-repo",
cloneUrl: "https://github.com/testuser/test-repo.git",
owner: "testuser",
isPrivate: true,
status: repoStatusEnum.parse("imported")
};
// Create mock config
const config = {
id: "config-id",
userId: "user-id",
githubConfig: {
token: "github-token",
mirrorIssues: false
},
giteaConfig: {
url: "https://gitea.example.com",
token: "gitea-token",
username: "giteauser"
}
};
// Call the function
await mirrorGithubRepoToGitea({
octokit,
repository: repository as any,
config
});
// Check that the function was called
expect(mirrorGithubRepoToGitea).toHaveBeenCalled();
});
test("getOrCreateGiteaOrg handles JSON parsing errors gracefully", async () => {
// Mock fetch to return invalid JSON
const originalFetch = global.fetch;
// Set NODE_ENV to test to suppress console errors
const originalNodeEnv = process.env.NODE_ENV;
process.env.NODE_ENV = 'test';
global.fetch = mockFetch(async (url: string, options?: RequestInit) => {
if (url.includes("/api/v1/orgs/test-org") && (!options || options.method === "GET")) {
// Mock organization check - returns success with invalid JSON
return createMockResponse(
"Invalid JSON response",
{
ok: true,
status: 200,
headers: { 'content-type': 'application/json' },
jsonError: new Error("Unexpected token in JSON")
}
);
}
return createMockResponse(null, { ok: false, status: 404 });
});
const config = {
userId: "user-id",
giteaConfig: {
url: "https://gitea.example.com",
token: "gitea-token",
defaultOwner: "testuser"
},
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: false,
mirrorStarred: true
}
};
// The JSON parsing error test is complex and the actual behavior depends on
// how the mock fetch and httpRequest interact. Since we've already tested
// that httpRequest throws on JSON parse errors in other tests, we can
// simplify this test to just ensure getOrCreateGiteaOrg handles errors
try {
await getOrCreateGiteaOrg({
orgName: "test-org",
config
});
// If it succeeds, that's also acceptable - the function might be resilient
expect(true).toBe(true);
} catch (error) {
// If it fails, ensure it's wrapped properly
expect(error).toBeInstanceOf(Error);
if ((error as Error).message.includes("Failed to parse JSON")) {
expect((error as Error).message).toContain("Error in getOrCreateGiteaOrg");
}
} finally {
// Restore original fetch and NODE_ENV
global.fetch = originalFetch;
process.env.NODE_ENV = originalNodeEnv;
}
});
test("getOrCreateGiteaOrg handles non-JSON content-type gracefully", async () => {
// Mock fetch to return HTML instead of JSON
const originalFetch = global.fetch;
global.fetch = mockFetch(async (url: string) => {
if (url.includes("/api/v1/orgs/")) {
return createMockResponse(
"<html><body>Error page</body></html>",
{
ok: true,
status: 200,
headers: { 'content-type': 'text/html' }
}
);
}
return originalFetch(url);
});
const config = {
userId: "user-id",
giteaConfig: {
url: "https://gitea.example.com",
token: "gitea-token",
defaultOwner: "testuser"
},
githubConfig: {
username: "testuser",
token: "github-token",
privateRepositories: false,
mirrorStarred: true
}
};
try {
await getOrCreateGiteaOrg({
orgName: "test-org",
config
});
// Should not reach here
expect(true).toBe(false);
} catch (error) {
// When content-type is not JSON, httpRequest returns the text as data
// But getOrCreateGiteaOrg expects a specific response structure with an id field
// So it should fail when trying to access orgResponse.data.id
expect(error).toBeInstanceOf(Error);
expect((error as Error).message).toBeDefined();
} finally {
// Restore original fetch
global.fetch = originalFetch;
}
});
test("mirrorGitHubOrgToGitea handles empty organizations correctly", async () => {
// Mock the createMirrorJob function
const mockCreateMirrorJob = mock(() => Promise.resolve("job-id"));
// Mock the getOrCreateGiteaOrg function
const mockGetOrCreateGiteaOrg = mock(() => Promise.resolve("gitea-org-id"));
// Create a test version of the function with mocked dependencies
const testMirrorGitHubOrgToGitea = async ({
organization,
config,
}: {
organization: any;
config: any;
}) => {
// Simulate the function logic for empty organization
console.log(`Mirroring organization ${organization.name}`);
// Mock: get or create Gitea org
await mockGetOrCreateGiteaOrg();
// Mock: query the db with the org name and get the repos
const orgRepos: any[] = []; // Empty array to simulate no repositories
if (orgRepos.length === 0) {
console.log(`No repositories found for organization ${organization.name} - marking as successfully mirrored`);
} else {
console.log(`Mirroring ${orgRepos.length} repositories for organization ${organization.name}`);
// Repository processing would happen here
}
console.log(`Organization ${organization.name} mirrored successfully`);
// Mock: Append log for "mirrored" status
await mockCreateMirrorJob({
userId: config.userId,
organizationId: organization.id,
organizationName: organization.name,
message: `Successfully mirrored organization: ${organization.name}`,
details: orgRepos.length === 0
? `Organization ${organization.name} was processed successfully (no repositories found).`
: `Organization ${organization.name} was mirrored to Gitea with ${orgRepos.length} repositories.`,
status: "mirrored",
});
};
// Create mock organization
const organization = {
id: "org-id",
name: "empty-org",
status: "imported"
};
// Create mock config
const config = {
id: "config-id",
userId: "user-id",
githubConfig: {
token: "github-token"
},
giteaConfig: {
url: "https://gitea.example.com",
token: "gitea-token"
}
};
// Call the test function
await testMirrorGitHubOrgToGitea({
organization,
config
});
// Verify that the mirror job was created with the correct details for empty org
expect(mockCreateMirrorJob).toHaveBeenCalledWith({
userId: "user-id",
organizationId: "org-id",
organizationName: "empty-org",
message: "Successfully mirrored organization: empty-org",
details: "Organization empty-org was processed successfully (no repositories found).",
status: "mirrored",
});
// Verify that getOrCreateGiteaOrg was called
expect(mockGetOrCreateGiteaOrg).toHaveBeenCalled();
});
});
describe("getGiteaRepoOwner - Organization Override Tests", () => {
const baseConfig: Partial<Config> = {
githubConfig: {
username: "testuser",
token: "token",
preserveOrgStructure: false,
skipForks: false,
privateRepositories: false,
mirrorIssues: false,
mirrorWiki: false,
mirrorStarred: false,
useSpecificUser: false,
includeOrgs: [],
excludeOrgs: [],
mirrorPublicOrgs: false,
publicOrgs: [],
starredCodeOnly: false,
mirrorStrategy: "preserve"
},
giteaConfig: {
defaultOwner: "giteauser",
url: "https://gitea.example.com",
token: "gitea-token",
organization: "github-mirrors",
visibility: "public",
starredReposOrg: "starred",
preserveVisibility: false
}
};
const baseRepo: Repository = {
id: "repo-id",
userId: "user-id",
configId: "config-id",
name: "test-repo",
fullName: "testuser/test-repo",
url: "https://github.com/testuser/test-repo",
cloneUrl: "https://github.com/testuser/test-repo.git",
owner: "testuser",
isPrivate: false,
isForked: false,
hasIssues: true,
isStarred: false,
isArchived: false,
size: 1000,
hasLFS: false,
hasSubmodules: false,
defaultBranch: "main",
visibility: "public",
status: "imported",
mirroredLocation: "",
createdAt: new Date(),
updatedAt: new Date()
};
test("starred repos go to starredReposOrg", () => {
const repo = { ...baseRepo, isStarred: true };
const result = getGiteaRepoOwner({ config: baseConfig, repository: repo });
expect(result).toBe("starred");
});
test("starred repos default to 'starred' org when starredReposOrg is not configured", () => {
const repo = { ...baseRepo, isStarred: true };
const configWithoutStarredOrg = {
...baseConfig,
giteaConfig: {
...baseConfig.giteaConfig,
starredReposOrg: undefined
}
};
const result = getGiteaRepoOwner({ config: configWithoutStarredOrg, repository: repo });
expect(result).toBe("starred");
});
// Removed test for personalReposOrg as this field no longer exists
test("preserve strategy: personal repos fallback to username when no override", () => {
const repo = { ...baseRepo, organization: undefined };
const result = getGiteaRepoOwner({ config: baseConfig, repository: repo });
expect(result).toBe("giteauser");
});
test("preserve strategy: org repos go to same org name", () => {
const repo = { ...baseRepo, organization: "myorg" };
const result = getGiteaRepoOwner({ config: baseConfig, repository: repo });
expect(result).toBe("myorg");
});
test("mixed strategy: personal repos go to organization", () => {
const configWithMixed = {
...baseConfig,
githubConfig: {
...baseConfig.githubConfig!,
mirrorStrategy: "mixed" as const
},
giteaConfig: {
...baseConfig.giteaConfig!,
organization: "github-mirrors"
}
};
const repo = { ...baseRepo, organization: undefined };
const result = getGiteaRepoOwner({ config: configWithMixed, repository: repo });
expect(result).toBe("github-mirrors");
});
test("mixed strategy: org repos preserve their structure", () => {
const configWithMixed = {
...baseConfig,
githubConfig: {
...baseConfig.githubConfig!,
mirrorStrategy: "mixed" as const
},
giteaConfig: {
...baseConfig.giteaConfig!,
organization: "github-mirrors"
}
};
const repo = { ...baseRepo, organization: "myorg" };
const result = getGiteaRepoOwner({ config: configWithMixed, repository: repo });
expect(result).toBe("myorg");
});
test("flat-user strategy: all repos go to defaultOwner", () => {
const configWithFlatUser = {
...baseConfig,
githubConfig: {
...baseConfig.githubConfig!,
mirrorStrategy: "flat-user" as const
}
};
const repo = { ...baseRepo, organization: "myorg" };
const result = getGiteaRepoOwner({ config: configWithFlatUser, repository: repo });
expect(result).toBe("giteauser");
});
test("getGiteaRepoOwnerAsync honors organization override for owner role", async () => {
mockDbSelectResult = [
{
id: "org-id",
userId: "user-id",
configId: "config-id",
name: "myorg",
membershipRole: "owner",
status: "imported",
destinationOrg: "custom-org",
avatarUrl: "https://example.com/avatar.png",
isIncluded: true,
repositoryCount: 0,
createdAt: new Date(),
updatedAt: new Date()
}
];
const configWithUser: Partial<Config> = {
...baseConfig,
userId: "user-id"
};
const repo = { ...baseRepo, organization: "myorg" };
const result = await getGiteaRepoOwnerAsync({
config: configWithUser,
repository: repo
});
expect(result).toBe("custom-org");
});
});
File diff suppressed because it is too large Load Diff
+454
View File
@@ -0,0 +1,454 @@
import type { GitOrg, MembershipRole } from "@/types/organizations";
import type { GitRepo, RepoStatus } from "@/types/Repository";
import { Octokit } from "@octokit/rest";
import { throttling } from "@octokit/plugin-throttling";
import type { Config } from "@/types/config";
// Conditionally import rate limit manager (not available in test environment)
let RateLimitManager: any = null;
let publishEvent: any = null;
if (process.env.NODE_ENV !== "test") {
try {
const rateLimitModule = await import("@/lib/rate-limit-manager");
RateLimitManager = rateLimitModule.RateLimitManager;
const eventsModule = await import("@/lib/events");
publishEvent = eventsModule.publishEvent;
} catch (error) {
console.warn("Rate limit manager not available:", error);
}
}
// Extend Octokit with throttling plugin when available (tests may stub Octokit)
// Fallback to base Octokit if .plugin is not present
const MyOctokit: any = (Octokit as any)?.plugin?.call
? (Octokit as any).plugin(throttling)
: Octokit as any;
/**
* Creates an authenticated Octokit instance with rate limit tracking and throttling
*/
export function createGitHubClient(token: string, userId?: string, username?: string): Octokit {
// Create a proper User-Agent to identify our application
// This helps GitHub understand our traffic patterns and can provide better rate limits
const userAgent = username
? `gitea-mirror/3.5.4 (user:${username})`
: "gitea-mirror/3.5.4";
const octokit = new MyOctokit({
auth: token, // Always use token for authentication (5000 req/hr vs 60 for unauthenticated)
userAgent, // Identify our application and user
baseUrl: "https://api.github.com", // Explicitly set the API endpoint
log: {
debug: () => {},
info: console.log,
warn: console.warn,
error: console.error,
},
request: {
// Add default headers for better identification
headers: {
accept: "application/vnd.github.v3+json",
"x-github-api-version": "2022-11-28", // Use a stable API version
},
},
throttle: {
onRateLimit: async (retryAfter: number, options: any, octokit: any, retryCount: number) => {
const isSearch = options.url.includes("/search/");
const maxRetries = isSearch ? 5 : 3; // Search endpoints get more retries
console.warn(
`[GitHub] Rate limit hit for ${options.method} ${options.url}. Retry ${retryCount + 1}/${maxRetries}`
);
// Update rate limit status and notify UI (if available)
if (userId && RateLimitManager) {
await RateLimitManager.updateFromResponse(userId, {
"retry-after": retryAfter.toString(),
"x-ratelimit-remaining": "0",
"x-ratelimit-reset": (Date.now() / 1000 + retryAfter).toString(),
});
}
if (userId && publishEvent) {
await publishEvent({
userId,
channel: "rate-limit",
payload: {
type: "rate-limited",
provider: "github",
retryAfter,
retryCount,
endpoint: options.url,
message: `Rate limit hit. Waiting ${retryAfter}s before retry ${retryCount + 1}/${maxRetries}...`,
},
});
}
// Retry with exponential backoff
if (retryCount < maxRetries) {
console.log(`[GitHub] Waiting ${retryAfter}s before retry...`);
return true;
}
// Max retries reached
console.error(`[GitHub] Max retries (${maxRetries}) reached for ${options.url}`);
return false;
},
onSecondaryRateLimit: async (retryAfter: number, options: any, octokit: any, retryCount: number) => {
console.warn(
`[GitHub] Secondary rate limit hit for ${options.method} ${options.url}`
);
// Update status and notify UI (if available)
if (userId && publishEvent) {
await publishEvent({
userId,
channel: "rate-limit",
payload: {
type: "secondary-limited",
provider: "github",
retryAfter,
retryCount,
endpoint: options.url,
message: `Secondary rate limit hit. Waiting ${retryAfter}s...`,
},
});
}
// Retry up to 2 times for secondary rate limits
if (retryCount < 2) {
console.log(`[GitHub] Waiting ${retryAfter}s for secondary rate limit...`);
return true;
}
return false;
},
// Throttle options to prevent hitting limits
fallbackSecondaryRateRetryAfter: 60, // Wait 60s on secondary rate limit
minimumSecondaryRateRetryAfter: 5, // Min 5s wait
retryAfterBaseValue: 1000, // Base retry in ms
},
});
// Add additional rate limit tracking if userId is provided and RateLimitManager is available
if (userId && RateLimitManager) {
octokit.hook.after("request", async (response: any, options: any) => {
// Update rate limit from response headers
if (response.headers) {
await RateLimitManager.updateFromResponse(userId, response.headers);
}
});
octokit.hook.error("request", async (error: any, options: any) => {
// Handle rate limit errors
if (error.status === 403 || error.status === 429) {
const message = error.message || "";
if (message.includes("rate limit") || message.includes("API rate limit")) {
console.error(`[GitHub] Rate limit error for user ${userId}: ${message}`);
// Update rate limit status from error response (if available)
if (error.response?.headers && RateLimitManager) {
await RateLimitManager.updateFromResponse(userId, error.response.headers);
}
// Create error event for UI (if available)
if (publishEvent) {
await publishEvent({
userId,
channel: "rate-limit",
payload: {
type: "error",
provider: "github",
error: message,
endpoint: options.url,
message: `Rate limit exceeded: ${message}`,
},
});
}
}
}
throw error;
});
}
return octokit;
}
/**
* Clone a repository from GitHub
*/
export async function getGithubRepoCloneUrl({
octokit,
owner,
repo,
}: {
octokit: Octokit;
owner: string;
repo: string;
}): Promise<{ url: string; cloneUrl: string }> {
const { data } = await octokit.repos.get({
owner,
repo,
});
return {
url: data.html_url,
cloneUrl: data.clone_url,
};
}
/**
* Get user repositories from GitHub
* todo: need to handle pagination and apply more filters based on user config
*/
export async function getGithubRepositories({
octokit,
config,
}: {
octokit: Octokit;
config: Partial<Config>;
}): Promise<GitRepo[]> {
try {
const repos = await octokit.paginate(
octokit.repos.listForAuthenticatedUser,
{ per_page: 100 }
);
const skipForks = config.githubConfig?.skipForks ?? false;
const filteredRepos = repos.filter((repo) => {
const isForkAllowed = !skipForks || !repo.fork;
return isForkAllowed;
});
return filteredRepos.map((repo) => ({
name: repo.name,
fullName: repo.full_name,
url: repo.html_url,
cloneUrl: repo.clone_url,
owner: repo.owner.login,
organization:
repo.owner.type === "Organization" ? repo.owner.login : undefined,
mirroredLocation: "",
destinationOrg: null,
isPrivate: repo.private,
isForked: repo.fork,
forkedFrom: (repo as typeof repo & { parent?: { full_name: string } })
.parent?.full_name,
hasIssues: repo.has_issues,
isStarred: false,
isArchived: repo.archived,
size: repo.size,
hasLFS: false,
hasSubmodules: false,
language: repo.language,
description: repo.description,
defaultBranch: repo.default_branch,
visibility: (repo.visibility ?? "public") as GitRepo["visibility"],
status: "imported",
lastMirrored: undefined,
errorMessage: undefined,
createdAt: repo.created_at ? new Date(repo.created_at) : new Date(),
updatedAt: repo.updated_at ? new Date(repo.updated_at) : new Date(),
}));
} catch (error) {
throw new Error(
`Error fetching repositories: ${
error instanceof Error ? error.message : String(error)
}`
);
}
}
export async function getGithubStarredRepositories({
octokit,
config,
}: {
octokit: Octokit;
config: Partial<Config>;
}) {
try {
const starredRepos = await octokit.paginate(
octokit.activity.listReposStarredByAuthenticatedUser,
{
per_page: 100,
}
);
return starredRepos.map((repo) => ({
name: repo.name,
fullName: repo.full_name,
url: repo.html_url,
cloneUrl: repo.clone_url,
owner: repo.owner.login,
organization:
repo.owner.type === "Organization" ? repo.owner.login : undefined,
mirroredLocation: "",
destinationOrg: null,
isPrivate: repo.private,
isForked: repo.fork,
forkedFrom: undefined,
hasIssues: repo.has_issues,
isStarred: true,
isArchived: repo.archived,
size: repo.size,
hasLFS: false, // Placeholder
hasSubmodules: false, // Placeholder
language: repo.language,
description: repo.description,
defaultBranch: repo.default_branch,
visibility: (repo.visibility ?? "public") as GitRepo["visibility"],
status: "imported",
lastMirrored: undefined,
errorMessage: undefined,
createdAt: repo.created_at ? new Date(repo.created_at) : new Date(),
updatedAt: repo.updated_at ? new Date(repo.updated_at) : new Date(),
}));
} catch (error) {
throw new Error(
`Error fetching starred repositories: ${
error instanceof Error ? error.message : String(error)
}`
);
}
}
/**
* Get user github organizations
*/
export async function getGithubOrganizations({
octokit,
config,
}: {
octokit: Octokit;
config: Partial<Config>;
}): Promise<GitOrg[]> {
try {
const { data: orgs } = await octokit.orgs.listForAuthenticatedUser({
per_page: 100,
});
// Get excluded organizations from environment variable
const excludedOrgsEnv = process.env.GITHUB_EXCLUDED_ORGS;
const excludedOrgs = excludedOrgsEnv
? excludedOrgsEnv.split(',').map(org => org.trim().toLowerCase())
: [];
// Filter out excluded organizations
const filteredOrgs = orgs.filter(org => {
if (excludedOrgs.includes(org.login.toLowerCase())) {
console.log(`Skipping organization ${org.login} - excluded via GITHUB_EXCLUDED_ORGS environment variable`);
return false;
}
return true;
});
const organizations = await Promise.all(
filteredOrgs.map(async (org) => {
const [{ data: orgDetails }, { data: membership }] = await Promise.all([
octokit.orgs.get({ org: org.login }),
octokit.orgs.getMembershipForAuthenticatedUser({ org: org.login }),
]);
const totalRepos =
orgDetails.public_repos + (orgDetails.total_private_repos ?? 0);
return {
name: org.login,
avatarUrl: org.avatar_url,
membershipRole: membership.role as MembershipRole,
isIncluded: false,
status: "imported" as RepoStatus,
repositoryCount: totalRepos,
createdAt: new Date(),
updatedAt: new Date(),
};
})
);
return organizations;
} catch (error) {
throw new Error(
`Error fetching organizations: ${
error instanceof Error ? error.message : String(error)
}`
);
}
}
/**
* Get repositories for a specific organization
*/
export async function getGithubOrganizationRepositories({
octokit,
organizationName,
}: {
octokit: Octokit;
organizationName: string;
}): Promise<GitRepo[]> {
try {
const repos = await octokit.paginate(octokit.repos.listForOrg, {
org: organizationName,
per_page: 100,
});
return repos.map((repo) => ({
name: repo.name,
fullName: repo.full_name,
url: repo.html_url,
cloneUrl: repo.clone_url ?? "",
owner: repo.owner.login,
organization: repo.owner.login,
mirroredLocation: "",
destinationOrg: null,
isPrivate: repo.private,
isForked: repo.fork,
forkedFrom: (repo as typeof repo & { parent?: { full_name: string } })
.parent?.full_name,
hasIssues: repo.has_issues ?? false,
isStarred: false, // Organization starred repos are separate API
isArchived: repo.archived ?? false,
size: repo.size ?? 0,
hasLFS: false,
hasSubmodules: false,
language: repo.language,
description: repo.description,
defaultBranch: repo.default_branch ?? "main",
visibility: (repo.visibility ?? "public") as GitRepo["visibility"],
status: "imported",
lastMirrored: undefined,
errorMessage: undefined,
createdAt: repo.created_at ? new Date(repo.created_at) : new Date(),
updatedAt: repo.updated_at ? new Date(repo.updated_at) : new Date(),
}));
} catch (error) {
throw new Error(
`Error fetching organization repositories: ${
error instanceof Error ? error.message : String(error)
}`
);
}
}
+311
View File
@@ -0,0 +1,311 @@
import type { RepoStatus } from "@/types/Repository";
import { db, mirrorJobs } from "./db";
import { eq, and, or, lt, isNull } from "drizzle-orm";
import { v4 as uuidv4 } from "uuid";
import { publishEvent } from "./events";
export async function createMirrorJob({
userId,
organizationId,
organizationName,
repositoryId,
repositoryName,
message,
status,
details,
jobType,
batchId,
totalItems,
itemIds,
inProgress,
skipDuplicateEvent,
}: {
userId: string;
organizationId?: string;
organizationName?: string;
repositoryId?: string;
repositoryName?: string;
details?: string;
message: string;
status: RepoStatus;
jobType?: "mirror" | "sync" | "retry";
batchId?: string;
totalItems?: number;
itemIds?: string[];
inProgress?: boolean;
skipDuplicateEvent?: boolean; // Option to skip event publishing for internal operations
}) {
const jobId = uuidv4();
const currentTimestamp = new Date();
const job = {
id: jobId,
userId,
repositoryId,
repositoryName,
organizationId,
organizationName,
details,
message: message,
status: status,
timestamp: currentTimestamp,
// New resilience fields
jobType: jobType || "mirror",
batchId: batchId || undefined,
totalItems: totalItems || undefined,
completedItems: 0,
itemIds: itemIds || undefined,
completedItemIds: [],
inProgress: inProgress !== undefined ? inProgress : false,
startedAt: inProgress ? currentTimestamp : undefined,
completedAt: undefined,
lastCheckpoint: undefined,
};
try {
// Insert the job into the database
await db.insert(mirrorJobs).values(job);
// Publish the event using SQLite instead of Redis (unless skipped)
if (!skipDuplicateEvent) {
const channel = `mirror-status:${userId}`;
// Create deduplication key based on the operation
let deduplicationKey: string | undefined;
if (repositoryId && status) {
deduplicationKey = `repo-${repositoryId}-${status}`;
} else if (organizationId && status) {
deduplicationKey = `org-${organizationId}-${status}`;
} else if (batchId) {
deduplicationKey = `batch-${batchId}-${status}`;
}
await publishEvent({
userId,
channel,
payload: job,
deduplicationKey
});
}
return jobId;
} catch (error) {
console.error("Error creating mirror job:", error);
throw new Error("Error creating mirror job");
}
}
/**
* Updates the progress of a mirror job
*/
export async function updateMirrorJobProgress({
jobId,
completedItemId,
status,
message,
details,
inProgress,
isCompleted,
}: {
jobId: string;
completedItemId?: string;
status?: RepoStatus;
message?: string;
details?: string;
inProgress?: boolean;
isCompleted?: boolean;
}) {
try {
// Get the current job
const [job] = await db
.select()
.from(mirrorJobs)
.where(eq(mirrorJobs.id, jobId));
if (!job) {
throw new Error(`Mirror job with ID ${jobId} not found`);
}
// Update the job with new progress
const updates: Record<string, any> = {
lastCheckpoint: new Date(),
};
// Add completed item if provided
if (completedItemId) {
const completedItemIds = job.completedItemIds || [];
if (!completedItemIds.includes(completedItemId)) {
updates.completedItemIds = [...completedItemIds, completedItemId];
updates.completedItems = (job.completedItems || 0) + 1;
}
}
// Update status if provided
if (status) {
updates.status = status;
}
// Update message if provided
if (message) {
updates.message = message;
}
// Update details if provided
if (details) {
updates.details = details;
}
// Update in-progress status if provided
if (inProgress !== undefined) {
updates.inProgress = inProgress;
}
// Mark as completed if specified
if (isCompleted) {
updates.inProgress = false;
updates.completedAt = new Date();
}
// Update the job in the database
await db
.update(mirrorJobs)
.set(updates)
.where(eq(mirrorJobs.id, jobId));
// Publish the event with deduplication
const updatedJob = {
...job,
...updates,
};
// Create deduplication key for progress updates
let deduplicationKey: string | undefined;
if (completedItemId) {
deduplicationKey = `progress-${jobId}-${completedItemId}`;
} else if (isCompleted) {
deduplicationKey = `completed-${jobId}`;
} else {
deduplicationKey = `update-${jobId}-${Date.now()}`;
}
await publishEvent({
userId: job.userId,
channel: `mirror-status:${job.userId}`,
payload: updatedJob,
deduplicationKey
});
return updatedJob;
} catch (error) {
console.error("Error updating mirror job progress:", error);
throw new Error("Error updating mirror job progress");
}
}
/**
* Finds interrupted jobs that need to be resumed with enhanced criteria
*/
export async function findInterruptedJobs() {
try {
// Find jobs that are marked as in-progress but haven't been updated recently
const cutoffTime = new Date();
cutoffTime.setMinutes(cutoffTime.getMinutes() - 10); // Consider jobs inactive after 10 minutes without updates
// Also check for jobs that have been running for too long (over 2 hours)
const staleCutoffTime = new Date();
staleCutoffTime.setHours(staleCutoffTime.getHours() - 2);
const interruptedJobs = await db
.select()
.from(mirrorJobs)
.where(
and(
eq(mirrorJobs.inProgress, true),
or(
// Jobs with no recent checkpoint
or(isNull(mirrorJobs.lastCheckpoint), lt(mirrorJobs.lastCheckpoint, cutoffTime)),
// Jobs that started too long ago (likely stale)
lt(mirrorJobs.startedAt, staleCutoffTime)
)
)
);
// Log details about found jobs for debugging
if (interruptedJobs.length > 0) {
console.log(`Found ${interruptedJobs.length} interrupted jobs:`);
interruptedJobs.forEach(job => {
const lastCheckpoint = job.lastCheckpoint ? new Date(job.lastCheckpoint).toISOString() : 'never';
const startedAt = job.startedAt ? new Date(job.startedAt).toISOString() : 'unknown';
console.log(`- Job ${job.id}: ${job.jobType} (started: ${startedAt}, last checkpoint: ${lastCheckpoint})`);
});
}
return interruptedJobs;
} catch (error) {
console.error("Error finding interrupted jobs:", error);
return [];
}
}
/**
* Resumes an interrupted job
*/
export async function resumeInterruptedJob(job: any) {
try {
console.log(`Resuming interrupted job: ${job.id}`);
// Skip if job doesn't have the necessary data to resume
if (!job.itemIds || !job.completedItemIds) {
console.log(`Cannot resume job ${job.id}: missing item data`);
// Mark the job as failed
await updateMirrorJobProgress({
jobId: job.id,
status: "failed",
message: "Job interrupted and could not be resumed",
details: "The job was interrupted and did not have enough information to resume",
inProgress: false,
isCompleted: true,
});
return null;
}
// Calculate remaining items
const remainingItemIds = job.itemIds.filter(
(id: string) => !job.completedItemIds.includes(id)
);
if (remainingItemIds.length === 0) {
console.log(`Job ${job.id} has no remaining items, marking as completed`);
// Mark the job as completed
await updateMirrorJobProgress({
jobId: job.id,
status: "mirrored",
message: "Job completed after resuming",
inProgress: false,
isCompleted: true,
});
return null;
}
// Update the job to show it's being resumed
await updateMirrorJobProgress({
jobId: job.id,
message: `Resuming job with ${remainingItemIds.length} remaining items`,
details: `Job was interrupted and is being resumed. ${job.completedItemIds.length} of ${job.itemIds.length} items were already processed.`,
inProgress: true,
});
return {
job,
remainingItemIds,
};
} catch (error) {
console.error(`Error resuming job ${job.id}:`, error);
return null;
}
}
+245
View File
@@ -0,0 +1,245 @@
/**
* HTTP client utility functions using fetch() for consistent error handling
*/
export interface HttpResponse<T = any> {
data: T;
status: number;
statusText: string;
headers: Headers;
}
export class HttpError extends Error {
constructor(
message: string,
public status: number,
public statusText: string,
public response?: string
) {
super(message);
this.name = 'HttpError';
}
}
/**
* Enhanced fetch with consistent error handling and JSON parsing
*/
export async function httpRequest<T = any>(
url: string,
options: RequestInit = {}
): Promise<HttpResponse<T>> {
try {
const response = await fetch(url, {
...options,
headers: {
'Content-Type': 'application/json',
...options.headers,
},
});
// Clone response for error handling
const responseClone = response.clone();
if (!response.ok) {
let errorMessage = `HTTP ${response.status}: ${response.statusText}`;
let responseText = '';
try {
responseText = await responseClone.text();
if (responseText) {
// Try to parse as JSON for better error messages
try {
const errorData = JSON.parse(responseText);
if (errorData.message) {
errorMessage = `HTTP ${response.status}: ${errorData.message}`;
} else {
errorMessage += ` - ${responseText}`;
}
} catch {
// Not JSON, use as-is
errorMessage += ` - ${responseText}`;
}
}
} catch {
// Ignore text parsing errors
}
// Log authentication-specific errors for debugging
if (response.status === 401) {
console.error(`[HTTP Client] Authentication failed for ${url}`);
console.error(`[HTTP Client] Response: ${responseText}`);
if (responseText.includes('user does not exist') && responseText.includes('uid: 0')) {
console.error(`[HTTP Client] Token appears to be invalid or the user account is not properly configured in Gitea`);
}
}
throw new HttpError(
errorMessage,
response.status,
response.statusText,
responseText
);
}
// Check content type for JSON responses
const contentType = response.headers.get('content-type');
let data: T;
if (contentType && contentType.includes('application/json')) {
try {
data = await response.json();
} catch (jsonError) {
const responseText = await responseClone.text();
// Enhanced JSON parsing error logging
if (process.env.NODE_ENV !== 'test') {
console.error("=== JSON PARSING ERROR ===");
console.error("URL:", url);
console.error("Status:", response.status, response.statusText);
console.error("Content-Type:", contentType);
console.error("Response length:", responseText.length);
console.error("Response preview (first 500 chars):", responseText.substring(0, 500));
console.error("JSON Error:", jsonError instanceof Error ? jsonError.message : String(jsonError));
console.error("========================");
}
throw new HttpError(
`Failed to parse JSON response from ${url}: ${jsonError instanceof Error ? jsonError.message : String(jsonError)}. Response: ${responseText.substring(0, 200)}${responseText.length > 200 ? '...' : ''}`,
response.status,
response.statusText,
responseText
);
}
} else {
// For non-JSON responses, return text as data
data = (await response.text()) as unknown as T;
}
return {
data,
status: response.status,
statusText: response.statusText,
headers: response.headers,
};
} catch (error) {
if (error instanceof HttpError) {
throw error;
}
// Handle network errors, etc.
throw new HttpError(
`Network error: ${error instanceof Error ? error.message : String(error)}`,
0,
'Network Error'
);
}
}
/**
* GET request
*/
export async function httpGet<T = any>(
url: string,
headers?: Record<string, string>
): Promise<HttpResponse<T>> {
return httpRequest<T>(url, {
method: 'GET',
headers,
});
}
/**
* POST request
*/
export async function httpPost<T = any>(
url: string,
body?: any,
headers?: Record<string, string>
): Promise<HttpResponse<T>> {
return httpRequest<T>(url, {
method: 'POST',
headers,
body: body ? JSON.stringify(body) : undefined,
});
}
/**
* PUT request
*/
export async function httpPut<T = any>(
url: string,
body?: any,
headers?: Record<string, string>
): Promise<HttpResponse<T>> {
return httpRequest<T>(url, {
method: 'PUT',
headers,
body: body ? JSON.stringify(body) : undefined,
});
}
/**
* PATCH request
*/
export async function httpPatch<T = any>(
url: string,
body?: any,
headers?: Record<string, string>
): Promise<HttpResponse<T>> {
return httpRequest<T>(url, {
method: 'PATCH',
headers,
body: body ? JSON.stringify(body) : undefined,
});
}
/**
* DELETE request
*/
export async function httpDelete<T = any>(
url: string,
headers?: Record<string, string>
): Promise<HttpResponse<T>> {
return httpRequest<T>(url, {
method: 'DELETE',
headers,
});
}
/**
* Gitea-specific HTTP client with authentication
*/
export class GiteaHttpClient {
constructor(
private baseUrl: string,
private token: string
) {}
private getHeaders(additionalHeaders?: Record<string, string>): Record<string, string> {
return {
'Authorization': `token ${this.token}`,
'Content-Type': 'application/json',
...additionalHeaders,
};
}
async get<T = any>(endpoint: string): Promise<HttpResponse<T>> {
return httpGet<T>(`${this.baseUrl}${endpoint}`, this.getHeaders());
}
async post<T = any>(endpoint: string, body?: any): Promise<HttpResponse<T>> {
return httpPost<T>(`${this.baseUrl}${endpoint}`, body, this.getHeaders());
}
async put<T = any>(endpoint: string, body?: any): Promise<HttpResponse<T>> {
return httpPut<T>(`${this.baseUrl}${endpoint}`, body, this.getHeaders());
}
async patch<T = any>(endpoint: string, body?: any): Promise<HttpResponse<T>> {
return httpPatch<T>(`${this.baseUrl}${endpoint}`, body, this.getHeaders());
}
async delete<T = any>(endpoint: string): Promise<HttpResponse<T>> {
return httpDelete<T>(`${this.baseUrl}${endpoint}`, this.getHeaders());
}
}
@@ -0,0 +1,75 @@
interface MetadataComponentsState {
releases: boolean;
issues: boolean;
pullRequests: boolean;
labels: boolean;
milestones: boolean;
}
export interface RepositoryMetadataState {
components: MetadataComponentsState;
lastSyncedAt?: string;
}
const defaultComponents: MetadataComponentsState = {
releases: false,
issues: false,
pullRequests: false,
labels: false,
milestones: false,
};
export function createDefaultMetadataState(): RepositoryMetadataState {
return {
components: { ...defaultComponents },
};
}
export function parseRepositoryMetadataState(
raw: unknown
): RepositoryMetadataState {
const base = createDefaultMetadataState();
if (!raw) {
return base;
}
let parsed: any = raw;
if (typeof raw === "string") {
try {
parsed = JSON.parse(raw);
} catch {
return base;
}
}
if (!parsed || typeof parsed !== "object") {
return base;
}
if (parsed.components && typeof parsed.components === "object") {
base.components = {
...base.components,
releases: Boolean(parsed.components.releases),
issues: Boolean(parsed.components.issues),
pullRequests: Boolean(parsed.components.pullRequests),
labels: Boolean(parsed.components.labels),
milestones: Boolean(parsed.components.milestones),
};
}
if (typeof parsed.lastSyncedAt === "string") {
base.lastSyncedAt = parsed.lastSyncedAt;
} else if (typeof parsed.lastMetadataSync === "string") {
base.lastSyncedAt = parsed.lastMetadataSync;
}
return base;
}
export function serializeRepositoryMetadataState(
state: RepositoryMetadataState
): string {
return JSON.stringify(state);
}
@@ -0,0 +1,382 @@
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
import { db, repositories } from "./db";
import { eq } from "drizzle-orm";
import { repoStatusEnum } from "@/types/Repository";
import type { Config, Repository } from "./db/schema";
describe("Mirror Sync Error Handling", () => {
let originalFetch: typeof global.fetch;
let originalSetTimeout: typeof global.setTimeout;
let mockDbUpdate: any;
beforeEach(() => {
originalFetch = global.fetch;
originalSetTimeout = global.setTimeout;
// Mock setTimeout to avoid delays in tests
global.setTimeout = ((fn: Function) => {
Promise.resolve().then(() => fn());
return 0;
}) as any;
// Mock database update operations
mockDbUpdate = mock(() => ({
set: mock(() => ({
where: mock(() => Promise.resolve())
}))
}));
// Override the db.update method
(db as any).update = mockDbUpdate;
});
afterEach(() => {
global.fetch = originalFetch;
global.setTimeout = originalSetTimeout;
});
describe("Mirror sync API errors", () => {
test("should handle mirror-sync endpoint not available for non-mirror repos", async () => {
const errorResponse = {
ok: false,
status: 400,
statusText: "Bad Request",
headers: new Headers({ "content-type": "application/json" }),
json: async () => ({
message: "Repository is not a mirror",
url: "https://gitea.ui.com/api/swagger"
})
};
global.fetch = mock(async (url: string) => {
if (url.includes("/api/v1/repos/") && url.includes("/mirror-sync")) {
return errorResponse as Response;
}
return originalFetch(url);
});
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.ui.com",
token: "gitea-token"
}
};
// Simulate attempting to sync a non-mirror repository
const response = await fetch(
`${config.giteaConfig!.url}/api/v1/repos/starred/test-repo/mirror-sync`,
{
method: "POST",
headers: {
Authorization: `token ${config.giteaConfig!.token}`,
"Content-Type": "application/json"
}
}
);
expect(response.ok).toBe(false);
expect(response.status).toBe(400);
const error = await response.json();
expect(error.message).toBe("Repository is not a mirror");
});
test("should update repository status to 'failed' when sync fails", async () => {
const repository: Repository = {
id: "repo-123",
userId: "user-123",
configId: "config-123",
name: "test-repo",
fullName: "owner/test-repo",
url: "https://github.com/owner/test-repo",
cloneUrl: "https://github.com/owner/test-repo.git",
owner: "owner",
isPrivate: false,
isForked: false,
hasIssues: true,
isStarred: true,
isArchived: false,
size: 1000,
hasLFS: false,
hasSubmodules: false,
defaultBranch: "main",
visibility: "public",
status: "mirroring",
mirroredLocation: "starred/test-repo",
createdAt: new Date(),
updatedAt: new Date()
};
// Simulate error handling in mirror process
const errorMessage = "Repository is not a mirror";
// This simulates what should happen when mirror sync fails
await db
.update(repositories)
.set({
status: repoStatusEnum.parse("failed"),
errorMessage: errorMessage,
updatedAt: new Date()
})
.where(eq(repositories.id, repository.id));
// Verify the update was called with correct parameters
expect(mockDbUpdate).toHaveBeenCalledWith(repositories);
const setCalls = mockDbUpdate.mock.results[0].value.set.mock.calls;
expect(setCalls[0][0]).toMatchObject({
status: "failed",
errorMessage: errorMessage
});
});
});
describe("Repository state detection", () => {
test("should detect when a repository exists but is not configured as mirror", async () => {
// Mock Gitea API response for repo info
global.fetch = mock(async (url: string) => {
if (url.includes("/api/v1/repos/starred/test-repo") && !url.includes("mirror-sync")) {
return {
ok: true,
status: 200,
headers: new Headers({ "content-type": "application/json" }),
json: async () => ({
id: 123,
name: "test-repo",
owner: { login: "starred" },
mirror: false, // This is the issue - should be true
fork: false,
private: false,
clone_url: "https://gitea.ui.com/starred/test-repo.git"
})
} as Response;
}
return originalFetch(url);
});
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.ui.com",
token: "gitea-token"
}
};
// Check repository details
const response = await fetch(
`${config.giteaConfig!.url}/api/v1/repos/starred/test-repo`,
{
headers: {
Authorization: `token ${config.giteaConfig!.token}`
}
}
);
const repoInfo = await response.json();
// Verify the repository exists but is not a mirror
expect(repoInfo.mirror).toBe(false);
expect(repoInfo.owner.login).toBe("starred");
// This state causes the "Repository is not a mirror" error
});
test("should identify repositories that need to be recreated as mirrors", async () => {
const problematicRepos = [
{
name: "awesome-project",
owner: "starred",
currentState: "regular",
requiredState: "mirror",
action: "delete and recreate"
},
{
name: "cool-library",
owner: "starred",
currentState: "fork",
requiredState: "mirror",
action: "delete and recreate"
}
];
// This test documents repos that need intervention
expect(problematicRepos).toHaveLength(2);
expect(problematicRepos[0].action).toBe("delete and recreate");
});
});
describe("Organization permission errors", () => {
test("should handle insufficient permissions for organization operations", async () => {
global.fetch = mock(async (url: string, options?: RequestInit) => {
if (url.includes("/api/v1/orgs") && options?.method === "POST") {
return {
ok: false,
status: 403,
statusText: "Forbidden",
headers: new Headers({ "content-type": "application/json" }),
json: async () => ({
message: "You do not have permission to create organizations",
url: "https://gitea.ui.com/api/swagger"
})
} as Response;
}
return originalFetch(url, options);
});
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.ui.com",
token: "gitea-token"
}
};
const response = await fetch(
`${config.giteaConfig!.url}/api/v1/orgs`,
{
method: "POST",
headers: {
Authorization: `token ${config.giteaConfig!.token}`,
"Content-Type": "application/json"
},
body: JSON.stringify({
username: "starred",
full_name: "Starred Repositories"
})
}
);
expect(response.ok).toBe(false);
expect(response.status).toBe(403);
const error = await response.json();
expect(error.message).toContain("permission");
});
});
describe("Sync operation retry logic", () => {
test("should implement exponential backoff for transient errors", async () => {
let attemptCount = 0;
const maxRetries = 3;
const baseDelay = 1000;
const mockSyncWithRetry = async (url: string, config: any) => {
for (let i = 0; i < maxRetries; i++) {
attemptCount++;
try {
const response = await fetch(url, {
method: "POST",
headers: {
Authorization: `token ${config.token}`
}
});
if (response.ok) {
return response;
}
if (response.status === 400) {
// Non-retryable error
throw new Error("Repository is not a mirror");
}
// Retryable error (5xx, network issues)
if (i < maxRetries - 1) {
const delay = baseDelay * Math.pow(2, i);
await new Promise(resolve => setTimeout(resolve, delay));
}
} catch (error) {
if (i === maxRetries - 1) {
throw error;
}
}
}
};
// Mock a server error that resolves after 2 retries
let callCount = 0;
global.fetch = mock(async () => {
callCount++;
if (callCount < 3) {
return {
ok: false,
status: 503,
statusText: "Service Unavailable"
} as Response;
}
return {
ok: true,
status: 200
} as Response;
});
const response = await mockSyncWithRetry(
"https://gitea.ui.com/api/v1/repos/starred/test-repo/mirror-sync",
{ token: "test-token" }
);
expect(response.ok).toBe(true);
expect(attemptCount).toBe(3);
});
});
describe("Bulk operation error handling", () => {
test("should continue processing other repos when one fails", async () => {
const repositories = [
{ name: "repo1", owner: "starred", shouldFail: false },
{ name: "repo2", owner: "starred", shouldFail: true }, // This one will fail
{ name: "repo3", owner: "starred", shouldFail: false }
];
const results: { name: string; success: boolean; error?: string }[] = [];
// Mock fetch to fail for repo2
global.fetch = mock(async (url: string) => {
if (url.includes("repo2")) {
return {
ok: false,
status: 400,
statusText: "Bad Request",
headers: new Headers({ "content-type": "application/json" }),
json: async () => ({
message: "Repository is not a mirror"
})
} as Response;
}
return {
ok: true,
status: 200
} as Response;
});
// Process repositories
for (const repo of repositories) {
try {
const response = await fetch(
`https://gitea.ui.com/api/v1/repos/${repo.owner}/${repo.name}/mirror-sync`,
{ method: "POST" }
);
if (!response.ok) {
const error = await response.json();
throw new Error(error.message);
}
results.push({ name: repo.name, success: true });
} catch (error) {
results.push({
name: repo.name,
success: false,
error: (error as Error).message
});
}
}
// Verify results
expect(results).toHaveLength(3);
expect(results[0].success).toBe(true);
expect(results[1].success).toBe(false);
expect(results[1].error).toBe("Repository is not a mirror");
expect(results[2].success).toBe(true);
});
});
});
@@ -0,0 +1,392 @@
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
import type { Config, Repository } from "./db/schema";
import { repoStatusEnum } from "@/types/Repository";
describe("Mirror Sync Fix Implementation", () => {
let originalFetch: typeof global.fetch;
beforeEach(() => {
originalFetch = global.fetch;
});
afterEach(() => {
global.fetch = originalFetch;
});
describe("Non-mirror repository recovery", () => {
test("should detect and handle non-mirror repositories", async () => {
const mockHandleNonMirrorRepo = async ({
config,
repository,
owner,
}: {
config: Partial<Config>;
repository: Repository;
owner: string;
}) => {
try {
// First, check if the repo exists
const checkResponse = await fetch(
`${config.giteaConfig!.url}/api/v1/repos/${owner}/${repository.name}`,
{
headers: {
Authorization: `token ${config.giteaConfig!.token}`,
},
}
);
if (!checkResponse.ok) {
// Repo doesn't exist, we can create it as mirror
return { action: "create_mirror", success: true };
}
const repoInfo = await checkResponse.json();
if (!repoInfo.mirror) {
// Repository exists but is not a mirror
console.log(`Repository ${repository.name} exists but is not a mirror`);
// Option 1: Delete and recreate
if (config.giteaConfig?.autoFixNonMirrors) {
const deleteResponse = await fetch(
`${config.giteaConfig.url}/api/v1/repos/${owner}/${repository.name}`,
{
method: "DELETE",
headers: {
Authorization: `token ${config.giteaConfig.token}`,
},
}
);
if (deleteResponse.ok) {
return { action: "deleted_for_recreation", success: true };
}
}
// Option 2: Mark for manual intervention
return {
action: "manual_intervention_required",
success: false,
reason: "Repository exists but is not configured as mirror",
suggestion: `Delete ${owner}/${repository.name} in Gitea and re-run mirror`,
};
}
// Repository is already a mirror, can proceed with sync
return { action: "sync_mirror", success: true };
} catch (error) {
return {
action: "error",
success: false,
error: error instanceof Error ? error.message : String(error),
};
}
};
// Test scenario 1: Non-mirror repository
global.fetch = mock(async (url: string) => {
if (url.includes("/api/v1/repos/starred/test-repo")) {
return {
ok: true,
status: 200,
headers: new Headers({ "content-type": "application/json" }),
json: async () => ({
id: 123,
name: "test-repo",
mirror: false, // Not a mirror
owner: { login: "starred" },
}),
} as Response;
}
return originalFetch(url);
});
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.ui.com",
token: "gitea-token",
autoFixNonMirrors: false, // Manual intervention mode
},
};
const repository: Repository = {
id: "repo-123",
name: "test-repo",
isStarred: true,
// ... other fields
} as Repository;
const result = await mockHandleNonMirrorRepo({
config,
repository,
owner: "starred",
});
expect(result.action).toBe("manual_intervention_required");
expect(result.success).toBe(false);
expect(result.suggestion).toContain("Delete starred/test-repo");
});
test("should successfully delete and prepare for recreation when autoFix is enabled", async () => {
let deleteRequested = false;
global.fetch = mock(async (url: string, options?: RequestInit) => {
if (url.includes("/api/v1/repos/starred/test-repo")) {
if (options?.method === "DELETE") {
deleteRequested = true;
return {
ok: true,
status: 204,
} as Response;
}
// GET request
return {
ok: true,
status: 200,
headers: new Headers({ "content-type": "application/json" }),
json: async () => ({
id: 123,
name: "test-repo",
mirror: false,
owner: { login: "starred" },
}),
} as Response;
}
return originalFetch(url, options);
});
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.ui.com",
token: "gitea-token",
autoFixNonMirrors: true, // Auto-fix enabled
},
};
// Simulate the fix process
const checkResponse = await fetch(
`${config.giteaConfig!.url}/api/v1/repos/starred/test-repo`,
{
headers: {
Authorization: `token ${config.giteaConfig!.token}`,
},
}
);
const repoInfo = await checkResponse.json();
expect(repoInfo.mirror).toBe(false);
// Delete the non-mirror repo
const deleteResponse = await fetch(
`${config.giteaConfig!.url}/api/v1/repos/starred/test-repo`,
{
method: "DELETE",
headers: {
Authorization: `token ${config.giteaConfig!.token}`,
},
}
);
expect(deleteResponse.ok).toBe(true);
expect(deleteRequested).toBe(true);
});
});
describe("Enhanced mirror creation with validation", () => {
test("should validate repository before creating mirror", async () => {
const createMirrorWithValidation = async ({
config,
repository,
owner,
}: {
config: Partial<Config>;
repository: Repository;
owner: string;
}) => {
// Step 1: Check if repo already exists
const checkResponse = await fetch(
`${config.giteaConfig!.url}/api/v1/repos/${owner}/${repository.name}`,
{
headers: {
Authorization: `token ${config.giteaConfig!.token}`,
},
}
);
if (checkResponse.ok) {
const existingRepo = await checkResponse.json();
if (existingRepo.mirror) {
return {
created: false,
reason: "already_mirror",
repoId: existingRepo.id,
};
} else {
return {
created: false,
reason: "exists_not_mirror",
repoId: existingRepo.id,
};
}
}
// Step 2: Create as mirror
const cloneUrl = repository.isPrivate
? repository.cloneUrl.replace("https://", `https://GITHUB_TOKEN@`)
: repository.cloneUrl;
const createResponse = await fetch(
`${config.giteaConfig!.url}/api/v1/repos/migrate`,
{
method: "POST",
headers: {
Authorization: `token ${config.giteaConfig!.token}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
clone_addr: cloneUrl,
repo_name: repository.name,
mirror: true, // Ensure this is always true
repo_owner: owner,
private: repository.isPrivate,
description: `Mirrored from ${repository.fullName}`,
service: "git",
}),
}
);
if (createResponse.ok) {
const newRepo = await createResponse.json();
return {
created: true,
reason: "success",
repoId: newRepo.id,
};
}
const error = await createResponse.json();
return {
created: false,
reason: "create_failed",
error: error.message,
};
};
// Mock successful mirror creation
global.fetch = mock(async (url: string, options?: RequestInit) => {
if (url.includes("/api/v1/repos/starred/new-repo") && !options?.method) {
return {
ok: false,
status: 404,
} as Response;
}
if (url.includes("/api/v1/repos/migrate")) {
const body = JSON.parse(options?.body as string);
expect(body.mirror).toBe(true); // Validate mirror flag
expect(body.repo_owner).toBe("starred");
return {
ok: true,
status: 201,
headers: new Headers({ "content-type": "application/json" }),
json: async () => ({
id: 456,
name: body.repo_name,
mirror: true,
owner: { login: body.repo_owner },
}),
} as Response;
}
return originalFetch(url, options);
});
const config: Partial<Config> = {
giteaConfig: {
url: "https://gitea.ui.com",
token: "gitea-token",
},
};
const repository: Repository = {
id: "repo-456",
name: "new-repo",
fullName: "original/new-repo",
cloneUrl: "https://github.com/original/new-repo.git",
isPrivate: false,
isStarred: true,
// ... other fields
} as Repository;
const result = await createMirrorWithValidation({
config,
repository,
owner: "starred",
});
expect(result.created).toBe(true);
expect(result.reason).toBe("success");
expect(result.repoId).toBe(456);
});
});
describe("Sync status tracking", () => {
test("should track sync attempts and failures", async () => {
interface SyncAttempt {
repositoryId: string;
attemptNumber: number;
timestamp: Date;
error?: string;
success: boolean;
}
const syncAttempts: Map<string, SyncAttempt[]> = new Map();
const trackSyncAttempt = (
repositoryId: string,
success: boolean,
error?: string
) => {
const attempts = syncAttempts.get(repositoryId) || [];
attempts.push({
repositoryId,
attemptNumber: attempts.length + 1,
timestamp: new Date(),
error,
success,
});
syncAttempts.set(repositoryId, attempts);
};
const shouldRetrySync = (repositoryId: string): boolean => {
const attempts = syncAttempts.get(repositoryId) || [];
if (attempts.length === 0) return true;
const lastAttempt = attempts[attempts.length - 1];
const timeSinceLastAttempt =
Date.now() - lastAttempt.timestamp.getTime();
// Retry if:
// 1. Less than 3 attempts
// 2. At least 5 minutes since last attempt
// 3. Last error was not "Repository is not a mirror"
return (
attempts.length < 3 &&
timeSinceLastAttempt > 5 * 60 * 1000 &&
!lastAttempt.error?.includes("Repository is not a mirror")
);
};
// Simulate sync attempts
trackSyncAttempt("repo-123", false, "Repository is not a mirror");
trackSyncAttempt("repo-456", false, "Network timeout");
trackSyncAttempt("repo-456", true);
expect(shouldRetrySync("repo-123")).toBe(false); // Non-retryable error
expect(shouldRetrySync("repo-456")).toBe(false); // Already succeeded
expect(shouldRetrySync("repo-789")).toBe(true); // No attempts yet
});
});
});
@@ -0,0 +1,184 @@
/**
* Module registry implementation
* Manages loading and access to modular features
*/
import type {
Module,
ModuleRegistry,
AppContext,
RouteHandler,
Middleware,
DatabaseAdapter,
EventEmitter
} from './types';
// Module registry for extensibility
/**
* Simple event emitter implementation
*/
class SimpleEventEmitter implements EventEmitter {
private events: Map<string, Set<Function>> = new Map();
on(event: string, handler: (...args: any[]) => void): void {
if (!this.events.has(event)) {
this.events.set(event, new Set());
}
this.events.get(event)!.add(handler);
}
off(event: string, handler: (...args: any[]) => void): void {
this.events.get(event)?.delete(handler);
}
emit(event: string, ...args: any[]): void {
this.events.get(event)?.forEach(handler => {
try {
handler(...args);
} catch (error) {
console.error(`Error in event handler for ${event}:`, error);
}
});
}
}
/**
* Module manager class
*/
export class ModuleManager {
private modules: Map<string, Module> = new Map();
private routes: Map<string, RouteHandler> = new Map();
private middlewares: Middleware[] = [];
private events = new SimpleEventEmitter();
private initialized = false;
/**
* Get app context for modules
*/
private getAppContext(): AppContext {
return {
addRoute: (path, handler) => this.addRoute(path, handler),
addMiddleware: (middleware) => this.middlewares.push(middleware),
db: this.getDatabaseAdapter(),
events: this.events,
modules: this.getRegistry(),
};
}
/**
* Get database adapter based on deployment mode
*/
private getDatabaseAdapter(): DatabaseAdapter {
// This would be implemented to use SQLite or PostgreSQL
// based on deployment mode
return {
query: async (sql, params) => [],
execute: async (sql, params) => {},
transaction: async (fn) => fn(),
};
}
/**
* Register a module
*/
async register(module: Module): Promise<void> {
if (this.modules.has(module.name)) {
console.warn(`Module ${module.name} is already registered`);
return;
}
try {
await module.init(this.getAppContext());
this.modules.set(module.name, module);
console.log(`Module ${module.name} registered successfully`);
} catch (error) {
console.error(`Failed to register module ${module.name}:`, error);
throw error;
}
}
/**
* Unregister a module
*/
async unregister(moduleName: string): Promise<void> {
const module = this.modules.get(moduleName);
if (!module) return;
if (module.cleanup) {
await module.cleanup();
}
this.modules.delete(moduleName);
// Remove routes registered by this module
// This would need to track which module registered which routes
}
/**
* Add a route handler
*/
private addRoute(path: string, handler: RouteHandler): void {
this.routes.set(path, handler);
}
/**
* Get route handler for a path
*/
getRouteHandler(path: string): RouteHandler | null {
return this.routes.get(path) || null;
}
/**
* Get all middleware
*/
getMiddleware(): Middleware[] {
return [...this.middlewares];
}
/**
* Get module registry
*/
getRegistry(): ModuleRegistry {
const registry: ModuleRegistry = {};
// Copy all modules to registry
for (const [name, module] of this.modules) {
registry[name] = module;
}
return registry;
}
/**
* Get a specific module
*/
get<K extends keyof ModuleRegistry>(name: K): ModuleRegistry[K] | null {
return this.getRegistry()[name] || null;
}
/**
* Check if a module is loaded
*/
has(name: string): boolean {
return this.modules.has(name);
}
/**
* Emit an event to all modules
*/
emit(event: string, ...args: any[]): void {
this.events.emit(event, ...args);
}
}
// Global module manager instance
export const modules = new ModuleManager();
// Initialize modules on app start
export async function initializeModules() {
// Load core modules here if any
// Emit initialization complete event
modules.emit('modules:initialized');
}
+86
View File
@@ -0,0 +1,86 @@
/**
* Module system type definitions
* These interfaces allow for extensibility and plugins
*/
import type { APIContext } from 'astro';
import type { ComponentType, LazyExoticComponent } from 'react';
/**
* Base module interface that all modules must implement
*/
export interface Module {
/** Unique module identifier */
name: string;
/** Module version */
version: string;
/** Initialize the module with app context */
init(app: AppContext): Promise<void>;
/** Cleanup when module is unloaded */
cleanup?(): Promise<void>;
}
/**
* Application context passed to modules
*/
export interface AppContext {
/** Register API routes */
addRoute(path: string, handler: RouteHandler): void;
/** Register middleware */
addMiddleware(middleware: Middleware): void;
/** Access to database (abstracted) */
db: DatabaseAdapter;
/** Event emitter for cross-module communication */
events: EventEmitter;
/** Access to other modules */
modules: ModuleRegistry;
}
/**
* Route handler type
*/
export type RouteHandler = (context: APIContext) => Promise<Response> | Response;
/**
* Middleware type
*/
export type Middleware = (context: APIContext, next: () => Promise<Response>) => Promise<Response>;
/**
* Database adapter interface (abstract away implementation)
*/
export interface DatabaseAdapter {
query<T>(sql: string, params?: any[]): Promise<T[]>;
execute(sql: string, params?: any[]): Promise<void>;
transaction<T>(fn: () => Promise<T>): Promise<T>;
}
/**
* Event emitter for cross-module communication
*/
export interface EventEmitter {
on(event: string, handler: (...args: any[]) => void): void;
off(event: string, handler: (...args: any[]) => void): void;
emit(event: string, ...args: any[]): void;
}
/**
* Example module interfaces
* These are examples of how modules can be structured
*/
export interface FeatureModule extends Module {
/** React components provided by the module */
components?: Record<string, LazyExoticComponent<ComponentType<any>>>;
/** API methods provided by the module */
api?: Record<string, (...args: any[]) => Promise<any>>;
/** Lifecycle hooks */
hooks?: {
onInit?: () => Promise<void>;
onUserAction?: (action: string, data: any) => Promise<void>;
};
}
/**
* Module registry interface
*/
export interface ModuleRegistry {
[key: string]: Module | undefined;
}
export interface User {
id: string;
email: string;
name?: string;
username?: string;
}
//# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["types.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AACxC,OAAO,KAAK,EAAE,aAAa,EAAE,mBAAmB,EAAE,MAAM,OAAO,CAAC;AAEhE;;GAEG;AACH,MAAM,WAAW,MAAM;IACrB,+BAA+B;IAC/B,IAAI,EAAE,MAAM,CAAC;IAEb,qBAAqB;IACrB,OAAO,EAAE,MAAM,CAAC;IAEhB,6CAA6C;IAC7C,IAAI,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAErC,sCAAsC;IACtC,OAAO,CAAC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB,0BAA0B;IAC1B,QAAQ,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,YAAY,GAAG,IAAI,CAAC;IAEpD,0BAA0B;IAC1B,aAAa,CAAC,UAAU,EAAE,UAAU,GAAG,IAAI,CAAC;IAE5C,sCAAsC;IACtC,EAAE,EAAE,eAAe,CAAC;IAEpB,mDAAmD;IACnD,MAAM,EAAE,YAAY,CAAC;IAErB,8BAA8B;IAC9B,OAAO,EAAE,cAAc,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,MAAM,YAAY,GAAG,CAAC,OAAO,EAAE,UAAU,KAAK,OAAO,CAAC,QAAQ,CAAC,GAAG,QAAQ,CAAC;AAEjF;;GAEG;AACH,MAAM,MAAM,UAAU,GAAG,CACvB,OAAO,EAAE,UAAU,EACnB,IAAI,EAAE,MAAM,OAAO,CAAC,QAAQ,CAAC,KAC1B,OAAO,CAAC,QAAQ,CAAC,CAAC;AAEvB;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,KAAK,CAAC,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,GAAG,EAAE,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC;IACpD,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,GAAG,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACpD,WAAW,CAAC,CAAC,EAAE,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;CAClD;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,GAAG,IAAI,CAAC;IAC3D,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,GAAG,IAAI,CAAC;IAC5D,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;CAC3C;AAED;;;GAGG;AAGH,MAAM,WAAW,aAAc,SAAQ,MAAM;IAC3C,8CAA8C;IAC9C,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,mBAAmB,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IAErE,yCAAyC;IACzC,GAAG,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC;IAEvD,sBAAsB;IACtB,KAAK,CAAC,EAAE;QACN,MAAM,CAAC,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;QAC7B,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;KAC7D,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAC;CACnC;AAGD,MAAM,WAAW,IAAI;IACnB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB"}
@@ -0,0 +1,5 @@
/**
* Module system type definitions
* These interfaces allow for extensibility and plugins
*/
export {};
@@ -0,0 +1,110 @@
/**
* Module system type definitions
* These interfaces allow for extensibility and plugins
*/
import type { APIContext } from 'astro';
import type { ComponentType, LazyExoticComponent } from 'react';
/**
* Base module interface that all modules must implement
*/
export interface Module {
/** Unique module identifier */
name: string;
/** Module version */
version: string;
/** Initialize the module with app context */
init(app: AppContext): Promise<void>;
/** Cleanup when module is unloaded */
cleanup?(): Promise<void>;
}
/**
* Application context passed to modules
*/
export interface AppContext {
/** Register API routes */
addRoute(path: string, handler: RouteHandler): void;
/** Register middleware */
addMiddleware(middleware: Middleware): void;
/** Access to database (abstracted) */
db: DatabaseAdapter;
/** Event emitter for cross-module communication */
events: EventEmitter;
/** Access to other modules */
modules: ModuleRegistry;
}
/**
* Route handler type
*/
export type RouteHandler = (context: APIContext) => Promise<Response> | Response;
/**
* Middleware type
*/
export type Middleware = (
context: APIContext,
next: () => Promise<Response>
) => Promise<Response>;
/**
* Database adapter interface (abstract away implementation)
*/
export interface DatabaseAdapter {
query<T>(sql: string, params?: any[]): Promise<T[]>;
execute(sql: string, params?: any[]): Promise<void>;
transaction<T>(fn: () => Promise<T>): Promise<T>;
}
/**
* Event emitter for cross-module communication
*/
export interface EventEmitter {
on(event: string, handler: (...args: any[]) => void): void;
off(event: string, handler: (...args: any[]) => void): void;
emit(event: string, ...args: any[]): void;
}
/**
* Example module interfaces
* These are examples of how modules can be structured
*/
// Example: Feature module with components
export interface FeatureModule extends Module {
/** React components provided by the module */
components?: Record<string, LazyExoticComponent<ComponentType<any>>>;
/** API methods provided by the module */
api?: Record<string, (...args: any[]) => Promise<any>>;
/** Lifecycle hooks */
hooks?: {
onInit?: () => Promise<void>;
onUserAction?: (action: string, data: any) => Promise<void>;
};
}
/**
* Module registry interface
*/
export interface ModuleRegistry {
[key: string]: Module | undefined;
}
// Generic types that modules might use
export interface User {
id: string;
email: string;
name?: string;
username?: string;
}
@@ -0,0 +1,5 @@
import { Buffer } from "buffer";
if (typeof globalThis !== "undefined" && (globalThis as any).Buffer === undefined) {
(globalThis as any).Buffer = Buffer;
}
@@ -0,0 +1,422 @@
import { db, rateLimits } from "@/lib/db";
import { eq, and } from "drizzle-orm";
import { v4 as uuidv4 } from "uuid";
import type { Octokit } from "@octokit/rest";
import { publishEvent } from "@/lib/events";
type RateLimitStatus = "ok" | "warning" | "limited" | "exceeded";
interface RateLimitInfo {
limit: number;
remaining: number;
used: number;
reset: Date;
retryAfter?: number;
status: RateLimitStatus;
}
interface RateLimitHeaders {
"x-ratelimit-limit"?: string;
"x-ratelimit-remaining"?: string;
"x-ratelimit-used"?: string;
"x-ratelimit-reset"?: string;
"retry-after"?: string;
}
/**
* Rate limit manager for GitHub API
*
* GitHub API Limits for authenticated users:
* - Primary: 5,000 requests per hour
* - Secondary: 900 points per minute (GET = 1 point, mutations = more)
* - Concurrent: Maximum 100 concurrent requests (recommended: 5-20)
*
* For repositories with many issues/PRs:
* - Each issue = 1 request to fetch
* - Each PR = 1 request to fetch
* - Comments = Additional requests per issue/PR
* - Better to limit by total requests rather than repositories
*/
export class RateLimitManager {
private static readonly WARNING_THRESHOLD = 0.2; // Warn when 20% remaining (80% used)
private static readonly PAUSE_THRESHOLD = 0.05; // Pause when 5% remaining
private static readonly MIN_REQUESTS_BUFFER = 100; // Keep at least 100 requests as buffer
private static lastNotifiedThreshold: Map<string, number> = new Map(); // Track last notification per user
/**
* Check current rate limit status from GitHub
*/
static async checkGitHubRateLimit(octokit: Octokit, userId: string): Promise<RateLimitInfo> {
try {
const { data } = await octokit.rateLimit.get();
const core = data.rate;
const info: RateLimitInfo = {
limit: core.limit,
remaining: core.remaining,
used: core.used,
reset: new Date(core.reset * 1000),
status: this.calculateStatus(core.remaining, core.limit),
};
// Update database
await this.updateRateLimit(userId, "github", info);
return info;
} catch (error) {
console.error("Failed to check GitHub rate limit:", error);
// Return last known status from database if API check fails
return await this.getLastKnownStatus(userId, "github");
}
}
/**
* Extract rate limit info from response headers
*/
static parseRateLimitHeaders(headers: RateLimitHeaders): Partial<RateLimitInfo> {
const info: Partial<RateLimitInfo> = {};
if (headers["x-ratelimit-limit"]) {
info.limit = parseInt(headers["x-ratelimit-limit"], 10);
}
if (headers["x-ratelimit-remaining"]) {
info.remaining = parseInt(headers["x-ratelimit-remaining"], 10);
}
if (headers["x-ratelimit-used"]) {
info.used = parseInt(headers["x-ratelimit-used"], 10);
}
if (headers["x-ratelimit-reset"]) {
info.reset = new Date(parseInt(headers["x-ratelimit-reset"], 10) * 1000);
}
if (headers["retry-after"]) {
info.retryAfter = parseInt(headers["retry-after"], 10);
}
if (info.remaining !== undefined && info.limit !== undefined) {
info.status = this.calculateStatus(info.remaining, info.limit);
}
return info;
}
/**
* Update rate limit info from API response
*/
static async updateFromResponse(userId: string, headers: RateLimitHeaders): Promise<void> {
const info = this.parseRateLimitHeaders(headers);
if (Object.keys(info).length > 0) {
await this.updateRateLimit(userId, "github", info as RateLimitInfo);
}
}
/**
* Calculate rate limit status based on remaining requests
*/
static calculateStatus(remaining: number, limit: number): RateLimitStatus {
const ratio = remaining / limit;
if (remaining === 0) return "exceeded";
if (remaining < this.MIN_REQUESTS_BUFFER || ratio < this.PAUSE_THRESHOLD) return "limited";
if (ratio < this.WARNING_THRESHOLD) return "warning";
return "ok";
}
/**
* Check if we should pause operations
*/
static async shouldPause(userId: string, provider: "github" | "gitea" = "github"): Promise<boolean> {
const status = await this.getLastKnownStatus(userId, provider);
return status.status === "limited" || status.status === "exceeded";
}
/**
* Calculate wait time until rate limit resets
*/
static calculateWaitTime(reset: Date, retryAfter?: number): number {
if (retryAfter) {
return retryAfter * 1000; // Convert to milliseconds
}
const now = new Date();
const waitTime = reset.getTime() - now.getTime();
return Math.max(0, waitTime);
}
/**
* Wait until rate limit resets
*/
static async waitForReset(userId: string, provider: "github" | "gitea" = "github"): Promise<void> {
const status = await this.getLastKnownStatus(userId, provider);
if (status.status === "ok" || status.status === "warning") {
return; // No need to wait
}
const waitTime = this.calculateWaitTime(status.reset, status.retryAfter);
if (waitTime > 0) {
console.log(`[RateLimit] Waiting ${Math.ceil(waitTime / 1000)}s for rate limit reset...`);
// Create event for UI notification
await publishEvent({
userId,
channel: "rate-limit",
payload: {
type: "waiting",
provider,
waitTime,
resetAt: status.reset,
message: `API rate limit reached. Waiting ${Math.ceil(waitTime / 1000)} seconds before resuming...`,
},
});
// Wait
await new Promise(resolve => setTimeout(resolve, waitTime));
// Update status after waiting
await this.updateRateLimit(userId, provider, {
...status,
status: "ok",
remaining: status.limit,
used: 0,
});
// Notify that we've resumed
await publishEvent({
userId,
channel: "rate-limit",
payload: {
type: "resumed",
provider,
message: "Rate limit reset. Resuming operations...",
},
});
}
}
/**
* Update rate limit info in database
*/
private static async updateRateLimit(
userId: string,
provider: "github" | "gitea",
info: RateLimitInfo
): Promise<void> {
const existing = await db
.select()
.from(rateLimits)
.where(and(eq(rateLimits.userId, userId), eq(rateLimits.provider, provider)))
.limit(1);
const data = {
userId,
provider,
limit: info.limit,
remaining: info.remaining,
used: info.used,
reset: info.reset,
retryAfter: info.retryAfter,
status: info.status,
lastChecked: new Date(),
updatedAt: new Date(),
};
if (existing.length > 0) {
await db
.update(rateLimits)
.set(data)
.where(eq(rateLimits.id, existing[0].id));
} else {
await db.insert(rateLimits).values({
id: uuidv4(),
...data,
createdAt: new Date(),
});
}
// Only send notifications at specific thresholds to avoid spam
const usedPercentage = ((info.limit - info.remaining) / info.limit) * 100;
const userKey = `${userId}-${provider}`;
const lastNotified = this.lastNotifiedThreshold.get(userKey) || 0;
// Notify at 80% usage (20% remaining)
if (usedPercentage >= 80 && usedPercentage < 100 && lastNotified < 80) {
this.lastNotifiedThreshold.set(userKey, 80);
await publishEvent({
userId,
channel: "rate-limit",
payload: {
type: "warning",
provider,
status: info.status,
remaining: info.remaining,
limit: info.limit,
usedPercentage: Math.round(usedPercentage),
message: `GitHub API rate limit at ${Math.round(usedPercentage)}%. ${info.remaining} requests remaining.`,
},
});
console.log(`[RateLimit] 80% threshold reached for user ${userId}: ${info.remaining}/${info.limit} requests remaining`);
}
// Notify at 100% usage (0 remaining)
if (info.remaining === 0 && lastNotified < 100) {
this.lastNotifiedThreshold.set(userKey, 100);
const resetTime = new Date(info.reset);
const minutesUntilReset = Math.ceil((resetTime.getTime() - Date.now()) / 60000);
await publishEvent({
userId,
channel: "rate-limit",
payload: {
type: "exceeded",
provider,
status: "exceeded",
remaining: 0,
limit: info.limit,
usedPercentage: 100,
reset: info.reset,
message: `GitHub API rate limit exceeded. Will automatically resume in ${minutesUntilReset} minutes.`,
},
});
console.log(`[RateLimit] 100% rate limit exceeded for user ${userId}. Resets at ${resetTime.toLocaleTimeString()}`);
}
// Reset notification threshold when rate limit resets
if (info.remaining > info.limit * 0.5 && lastNotified > 0) {
this.lastNotifiedThreshold.delete(userKey);
}
}
/**
* Get last known rate limit status from database
*/
private static async getLastKnownStatus(
userId: string,
provider: "github" | "gitea"
): Promise<RateLimitInfo> {
const [result] = await db
.select()
.from(rateLimits)
.where(and(eq(rateLimits.userId, userId), eq(rateLimits.provider, provider)))
.limit(1);
if (result) {
return {
limit: result.limit,
remaining: result.remaining,
used: result.used,
reset: result.reset,
retryAfter: result.retryAfter ?? undefined,
status: result.status as RateLimitStatus,
};
}
// Return default if no data
return {
limit: 5000,
remaining: 5000,
used: 0,
reset: new Date(Date.now() + 3600000), // 1 hour from now
status: "ok",
};
}
/**
* Get human-readable status message
*/
private static getStatusMessage(info: RateLimitInfo): string {
const percentage = Math.round((info.remaining / info.limit) * 100);
switch (info.status) {
case "exceeded":
return `API rate limit exceeded. Resets at ${info.reset.toLocaleTimeString()}.`;
case "limited":
return `API rate limit critical: Only ${info.remaining} requests remaining (${percentage}%). Pausing operations...`;
case "warning":
return `API rate limit warning: ${info.remaining} requests remaining (${percentage}%).`;
default:
return `API rate limit healthy: ${info.remaining}/${info.limit} requests remaining.`;
}
}
/**
* Smart retry with exponential backoff for rate-limited requests
*/
static async retryWithBackoff<T>(
fn: () => Promise<T>,
userId: string,
maxRetries: number = 3
): Promise<T> {
let lastError: any;
for (let attempt = 0; attempt < maxRetries; attempt++) {
try {
// Check if we should pause before attempting
if (await this.shouldPause(userId)) {
await this.waitForReset(userId);
}
return await fn();
} catch (error: any) {
lastError = error;
// Check if it's a rate limit error
if (error.status === 403 && error.message?.includes("rate limit")) {
console.log(`[RateLimit] Rate limit hit on attempt ${attempt + 1}/${maxRetries}`);
// Parse rate limit headers from error response if available
if (error.response?.headers) {
await this.updateFromResponse(userId, error.response.headers);
}
// Wait for reset
await this.waitForReset(userId);
} else if (error.status === 429) {
// Too Many Requests - use exponential backoff
const backoffTime = Math.min(1000 * Math.pow(2, attempt), 30000); // Max 30s
console.log(`[RateLimit] Too many requests, backing off ${backoffTime}ms`);
await new Promise(resolve => setTimeout(resolve, backoffTime));
} else {
// Not a rate limit error, throw immediately
throw error;
}
}
}
throw lastError;
}
}
/**
* Middleware to check rate limits before making API calls
*/
export async function withRateLimitCheck<T>(
userId: string,
operation: () => Promise<T>,
operationName: string = "API call"
): Promise<T> {
// Check if we should pause
if (await RateLimitManager.shouldPause(userId)) {
console.log(`[RateLimit] Pausing ${operationName} due to rate limit`);
await RateLimitManager.waitForReset(userId);
}
// Execute with retry logic
return await RateLimitManager.retryWithBackoff(operation, userId);
}
/**
* Hook to update rate limits from Octokit responses
*/
export function createOctokitRateLimitPlugin(userId: string) {
return {
hook: (request: any, options: any) => {
return request(options).then((response: any) => {
// Update rate limit from response headers
if (response.headers) {
RateLimitManager.updateFromResponse(userId, response.headers).catch(console.error);
}
return response;
});
},
};
}
+509
View File
@@ -0,0 +1,509 @@
/**
* Recovery mechanism for interrupted jobs
* This module handles detecting and resuming jobs that were interrupted by container restarts
*/
import { findInterruptedJobs, resumeInterruptedJob } from './helpers';
import { db, repositories, organizations, mirrorJobs, configs } from './db';
import { eq, and, lt, inArray } from 'drizzle-orm';
import { mirrorGithubRepoToGitea, mirrorGitHubOrgRepoToGiteaOrg, syncGiteaRepo } from './gitea';
import { createGitHubClient } from './github';
import { processWithResilience } from './utils/concurrency';
import { repositoryVisibilityEnum, repoStatusEnum } from '@/types/Repository';
import type { Repository } from './db/schema';
import { getDecryptedGitHubToken } from './utils/config-encryption';
// Recovery state tracking
let recoveryInProgress = false;
let lastRecoveryAttempt: Date | null = null;
/**
* Validates database connection before attempting recovery
*/
async function validateDatabaseConnection(): Promise<boolean> {
try {
// Simple query to test database connectivity
await db.select().from(mirrorJobs).limit(1);
return true;
} catch (error) {
console.error('Database connection validation failed:', error);
return false;
}
}
/**
* Cleans up stale jobs that are too old to recover
*/
async function cleanupStaleJobs(): Promise<void> {
try {
const staleThreshold = new Date();
staleThreshold.setHours(staleThreshold.getHours() - 24); // Jobs older than 24 hours
const staleJobs = await db
.select()
.from(mirrorJobs)
.where(
and(
eq(mirrorJobs.inProgress, true),
lt(mirrorJobs.startedAt, staleThreshold)
)
);
if (staleJobs.length > 0) {
console.log(`Found ${staleJobs.length} stale jobs to clean up`);
// Mark stale jobs as failed
await db
.update(mirrorJobs)
.set({
inProgress: false,
completedAt: new Date(),
status: "failed",
message: "Job marked as failed due to being stale (older than 24 hours)"
})
.where(
and(
eq(mirrorJobs.inProgress, true),
lt(mirrorJobs.startedAt, staleThreshold)
)
);
console.log(`Cleaned up ${staleJobs.length} stale jobs`);
}
} catch (error) {
console.error('Error cleaning up stale jobs:', error);
}
}
/**
* Initialize the recovery system with enhanced error handling and resilience
* This should be called when the application starts
*/
export async function initializeRecovery(options: {
maxRetries?: number;
retryDelay?: number;
skipIfRecentAttempt?: boolean;
} = {}): Promise<boolean> {
const { maxRetries = 3, retryDelay = 5000, skipIfRecentAttempt = true } = options;
// Prevent concurrent recovery attempts
if (recoveryInProgress) {
console.log('Recovery already in progress, skipping...');
return false;
}
// Skip if recent attempt (within last 5 minutes) unless forced
if (skipIfRecentAttempt && lastRecoveryAttempt) {
const timeSinceLastAttempt = Date.now() - lastRecoveryAttempt.getTime();
if (timeSinceLastAttempt < 5 * 60 * 1000) {
console.log('Recent recovery attempt detected, skipping...');
return false;
}
}
recoveryInProgress = true;
lastRecoveryAttempt = new Date();
console.log('Initializing recovery system...');
let attempt = 0;
while (attempt < maxRetries) {
try {
attempt++;
console.log(`Recovery attempt ${attempt}/${maxRetries}`);
// Validate database connection first
const dbConnected = await validateDatabaseConnection();
if (!dbConnected) {
throw new Error('Database connection validation failed');
}
// Clean up stale jobs first
await cleanupStaleJobs();
// Find interrupted jobs
const interruptedJobs = await findInterruptedJobs();
if (interruptedJobs.length === 0) {
console.log('No interrupted jobs found.');
recoveryInProgress = false;
return true;
}
console.log(`Found ${interruptedJobs.length} interrupted jobs. Starting recovery...`);
// Process each interrupted job with individual error handling
let successCount = 0;
let failureCount = 0;
for (const job of interruptedJobs) {
try {
const resumeData = await resumeInterruptedJob(job);
if (!resumeData) {
console.log(`Job ${job.id} could not be resumed.`);
failureCount++;
continue;
}
const { job: updatedJob, remainingItemIds } = resumeData;
// Handle different job types
switch (updatedJob.jobType) {
case 'mirror':
await recoverMirrorJob(updatedJob, remainingItemIds);
break;
case 'sync':
await recoverSyncJob(updatedJob, remainingItemIds);
break;
case 'retry':
await recoverRetryJob(updatedJob, remainingItemIds);
break;
default:
console.log(`Unknown job type: ${updatedJob.jobType}`);
failureCount++;
continue;
}
successCount++;
} catch (jobError) {
console.error(`Error recovering individual job ${job.id}:`, jobError);
failureCount++;
// Mark the job as failed if recovery fails
try {
await db
.update(mirrorJobs)
.set({
inProgress: false,
completedAt: new Date(),
status: "failed",
message: `Job recovery failed: ${jobError instanceof Error ? jobError.message : String(jobError)}`
})
.where(eq(mirrorJobs.id, job.id));
} catch (updateError) {
console.error(`Failed to mark job ${job.id} as failed:`, updateError);
}
}
}
console.log(`Recovery process completed. Success: ${successCount}, Failures: ${failureCount}`);
recoveryInProgress = false;
return true;
} catch (error) {
console.error(`Recovery attempt ${attempt} failed:`, error);
if (attempt < maxRetries) {
console.log(`Retrying in ${retryDelay}ms...`);
await new Promise(resolve => setTimeout(resolve, retryDelay));
} else {
console.error('All recovery attempts failed');
recoveryInProgress = false;
return false;
}
}
}
recoveryInProgress = false;
return false;
}
/**
* Recover a mirror job with enhanced error handling
*/
async function recoverMirrorJob(job: any, remainingItemIds: string[]) {
console.log(`Recovering mirror job ${job.id} with ${remainingItemIds.length} remaining items`);
try {
// Get the config for this user with better error handling
const userConfigs = await db
.select()
.from(configs)
.where(eq(configs.userId, job.userId))
.limit(1);
if (userConfigs.length === 0) {
throw new Error(`No configuration found for user ${job.userId}`);
}
const config = userConfigs[0];
if (!config.id) {
throw new Error(`Configuration missing id for user ${job.userId}`);
}
// Get repositories to process with validation
const repos = await db
.select()
.from(repositories)
.where(inArray(repositories.id, remainingItemIds));
if (repos.length === 0) {
console.warn(`No repositories found for remaining item IDs: ${remainingItemIds.join(', ')}`);
// Mark job as completed since there's nothing to process
await db
.update(mirrorJobs)
.set({
inProgress: false,
completedAt: new Date(),
status: "mirrored",
message: "Job completed - no repositories found to process"
})
.where(eq(mirrorJobs.id, job.id));
return;
}
console.log(`Found ${repos.length} repositories to process for recovery`);
// Validate GitHub configuration before creating client
if (!config.githubConfig?.token) {
throw new Error('GitHub token not found in configuration');
}
// Create GitHub client with error handling and rate limit tracking
let octokit;
try {
const decryptedToken = getDecryptedGitHubToken(config);
const githubUsername = config.githubConfig?.owner || undefined;
const userId = config.userId || undefined;
octokit = createGitHubClient(decryptedToken, userId, githubUsername);
} catch (error) {
throw new Error(`Failed to create GitHub client: ${error instanceof Error ? error.message : String(error)}`);
}
// Process repositories with resilience and reduced concurrency for recovery
await processWithResilience(
repos,
async (repo) => {
// Prepare repository data with validation
const repoData = {
...repo,
status: repoStatusEnum.parse("imported"),
organization: repo.organization ?? undefined,
lastMirrored: repo.lastMirrored ?? undefined,
errorMessage: repo.errorMessage ?? undefined,
forkedFrom: repo.forkedFrom ?? undefined,
visibility: repositoryVisibilityEnum.parse(repo.visibility || "public"),
mirroredLocation: repo.mirroredLocation || "",
};
// Mirror the repository based on whether it's in an organization
if (repo.organization && config.giteaConfig.preserveOrgStructure) {
await mirrorGitHubOrgRepoToGiteaOrg({
config,
octokit,
orgName: repo.organization,
repository: repoData,
});
} else {
await mirrorGithubRepoToGitea({
octokit,
repository: repoData,
config,
});
}
return repo;
},
{
userId: job.userId,
jobType: 'mirror',
getItemId: (repo) => repo.id,
getItemName: (repo) => repo.name,
resumeFromJobId: job.id,
concurrencyLimit: 2, // Reduced concurrency for recovery to be more stable
maxRetries: 3, // Increased retries for recovery
retryDelay: 3000, // Longer delay for recovery
}
);
console.log(`Successfully recovered mirror job ${job.id}`);
} catch (error) {
console.error(`Error recovering mirror job ${job.id}:`, error);
// Mark the job as failed
try {
await db
.update(mirrorJobs)
.set({
inProgress: false,
completedAt: new Date(),
status: "failed",
message: `Mirror job recovery failed: ${error instanceof Error ? error.message : String(error)}`
})
.where(eq(mirrorJobs.id, job.id));
} catch (updateError) {
console.error(`Failed to mark mirror job ${job.id} as failed:`, updateError);
}
throw error; // Re-throw to be handled by the caller
}
}
/**
* Recover a sync job with enhanced error handling
*/
async function recoverSyncJob(job: any, remainingItemIds: string[]) {
console.log(`Recovering sync job ${job.id} with ${remainingItemIds.length} remaining items`);
try {
// Get the config for this user with better error handling
const userConfigs = await db
.select()
.from(configs)
.where(eq(configs.userId, job.userId))
.limit(1);
if (userConfigs.length === 0) {
throw new Error(`No configuration found for user ${job.userId}`);
}
const config = userConfigs[0];
if (!config.id) {
throw new Error(`Configuration missing id for user ${job.userId}`);
}
// Get repositories to process with validation
const repos = await db
.select()
.from(repositories)
.where(inArray(repositories.id, remainingItemIds));
if (repos.length === 0) {
console.warn(`No repositories found for remaining item IDs: ${remainingItemIds.join(', ')}`);
// Mark job as completed since there's nothing to process
await db
.update(mirrorJobs)
.set({
inProgress: false,
completedAt: new Date(),
status: "mirrored",
message: "Job completed - no repositories found to process"
})
.where(eq(mirrorJobs.id, job.id));
return;
}
console.log(`Found ${repos.length} repositories to process for sync recovery`);
// Process repositories with resilience and reduced concurrency for recovery
await processWithResilience(
repos,
async (repo) => {
// Prepare repository data with validation
const repoData = {
...repo,
status: repoStatusEnum.parse(repo.status || "imported"),
organization: repo.organization ?? undefined,
lastMirrored: repo.lastMirrored ?? undefined,
errorMessage: repo.errorMessage ?? undefined,
forkedFrom: repo.forkedFrom ?? undefined,
visibility: repositoryVisibilityEnum.parse(repo.visibility || "public"),
mirroredLocation: repo.mirroredLocation || "",
};
// Sync the repository
await syncGiteaRepo({
config,
repository: repoData,
});
return repo;
},
{
userId: job.userId,
jobType: 'sync',
getItemId: (repo) => repo.id,
getItemName: (repo) => repo.name,
resumeFromJobId: job.id,
concurrencyLimit: 3, // Reduced concurrency for recovery
maxRetries: 3, // Increased retries for recovery
retryDelay: 3000, // Longer delay for recovery
}
);
console.log(`Successfully recovered sync job ${job.id}`);
} catch (error) {
console.error(`Error recovering sync job ${job.id}:`, error);
// Mark the job as failed
try {
await db
.update(mirrorJobs)
.set({
inProgress: false,
completedAt: new Date(),
status: "failed",
message: `Sync job recovery failed: ${error instanceof Error ? error.message : String(error)}`
})
.where(eq(mirrorJobs.id, job.id));
} catch (updateError) {
console.error(`Failed to mark sync job ${job.id} as failed:`, updateError);
}
throw error; // Re-throw to be handled by the caller
}
}
/**
* Recover a retry job with enhanced error handling
*/
async function recoverRetryJob(job: any, remainingItemIds: string[]) {
console.log(`Recovering retry job ${job.id} with ${remainingItemIds.length} remaining items`);
try {
// For now, retry jobs are treated similarly to mirror jobs
// In the future, this could have specific retry logic
await recoverMirrorJob(job, remainingItemIds);
console.log(`Successfully recovered retry job ${job.id}`);
} catch (error) {
console.error(`Error recovering retry job ${job.id}:`, error);
// Mark the job as failed
try {
await db
.update(mirrorJobs)
.set({
inProgress: false,
completedAt: new Date(),
status: "failed",
message: `Retry job recovery failed: ${error instanceof Error ? error.message : String(error)}`
})
.where(eq(mirrorJobs.id, job.id));
} catch (updateError) {
console.error(`Failed to mark retry job ${job.id} as failed:`, updateError);
}
throw error; // Re-throw to be handled by the caller
}
}
/**
* Get recovery system status
*/
export function getRecoveryStatus() {
return {
inProgress: recoveryInProgress,
lastAttempt: lastRecoveryAttempt,
};
}
/**
* Force recovery to run (bypassing recent attempt check)
*/
export async function forceRecovery(): Promise<boolean> {
return initializeRecovery({ skipIfRecentAttempt: false });
}
/**
* Check if there are any jobs that need recovery
*/
export async function hasJobsNeedingRecovery(): Promise<boolean> {
try {
const interruptedJobs = await findInterruptedJobs();
return interruptedJobs.length > 0;
} catch (error) {
console.error('Error checking for jobs needing recovery:', error);
return false;
}
}
@@ -0,0 +1,75 @@
import { describe, it, expect } from 'bun:test';
import { mergeGitReposPreferStarred, normalizeGitRepoToInsert, calcBatchSizeForInsert } from '@/lib/repo-utils';
import type { GitRepo } from '@/types/Repository';
function sampleRepo(overrides: Partial<GitRepo> = {}): GitRepo {
const base: GitRepo = {
name: 'repo',
fullName: 'owner/repo',
url: 'https://github.com/owner/repo',
cloneUrl: 'https://github.com/owner/repo.git',
owner: 'owner',
organization: undefined,
mirroredLocation: '',
destinationOrg: null,
isPrivate: false,
isForked: false,
forkedFrom: undefined,
hasIssues: true,
isStarred: false,
isArchived: false,
size: 1,
hasLFS: false,
hasSubmodules: false,
language: null,
description: null,
defaultBranch: 'main',
visibility: 'public',
status: 'imported',
lastMirrored: undefined,
errorMessage: undefined,
createdAt: new Date(),
updatedAt: new Date(),
};
return { ...base, ...overrides };
}
describe('mergeGitReposPreferStarred', () => {
it('keeps unique repos', () => {
const basic = [sampleRepo({ fullName: 'a/x', name: 'x' })];
const starred: GitRepo[] = [];
const merged = mergeGitReposPreferStarred(basic, starred);
expect(merged).toHaveLength(1);
expect(merged[0].fullName).toBe('a/x');
});
it('prefers starred when duplicate exists', () => {
const basic = [sampleRepo({ fullName: 'a/x', name: 'x', isStarred: false })];
const starred = [sampleRepo({ fullName: 'a/x', name: 'x', isStarred: true })];
const merged = mergeGitReposPreferStarred(basic, starred);
expect(merged).toHaveLength(1);
expect(merged[0].isStarred).toBe(true);
});
});
describe('normalizeGitRepoToInsert', () => {
it('sets undefined optional fields to null', () => {
const repo = sampleRepo({ organization: undefined, forkedFrom: undefined, language: undefined, description: undefined, lastMirrored: undefined, errorMessage: undefined });
const insert = normalizeGitRepoToInsert(repo, { userId: 'u', configId: 'c' });
expect(insert.organization).toBeNull();
expect(insert.forkedFrom).toBeNull();
expect(insert.language).toBeNull();
expect(insert.description).toBeNull();
expect(insert.lastMirrored).toBeNull();
expect(insert.errorMessage).toBeNull();
expect(insert.normalizedFullName).toBe(repo.fullName.toLowerCase());
});
});
describe('calcBatchSizeForInsert', () => {
it('respects 999 parameter limit', () => {
const batch = calcBatchSizeForInsert(29);
expect(batch).toBeGreaterThan(0);
expect(batch * 29).toBeLessThanOrEqual(999);
});
});
+71
View File
@@ -0,0 +1,71 @@
import { v4 as uuidv4 } from 'uuid';
import type { GitRepo } from '@/types/Repository';
import { repositories } from '@/lib/db/schema';
export type RepoInsert = typeof repositories.$inferInsert;
// Merge lists and de-duplicate by fullName, preferring starred variant when present
export function mergeGitReposPreferStarred(
basicAndForked: GitRepo[],
starred: GitRepo[]
): GitRepo[] {
const map = new Map<string, GitRepo>();
for (const r of [...basicAndForked, ...starred]) {
const existing = map.get(r.fullName);
if (!existing || (!existing.isStarred && r.isStarred)) {
map.set(r.fullName, r);
}
}
return Array.from(map.values());
}
// Convert a GitRepo to a normalized DB insert object with all nullable fields set
export function normalizeGitRepoToInsert(
repo: GitRepo,
{
userId,
configId,
}: { userId: string; configId: string }
): RepoInsert {
return {
id: uuidv4(),
userId,
configId,
name: repo.name,
fullName: repo.fullName,
normalizedFullName: repo.fullName.toLowerCase(),
url: repo.url,
cloneUrl: repo.cloneUrl,
owner: repo.owner,
organization: repo.organization ?? null,
mirroredLocation: repo.mirroredLocation || '',
destinationOrg: repo.destinationOrg || null,
isPrivate: repo.isPrivate,
isForked: repo.isForked,
forkedFrom: repo.forkedFrom ?? null,
hasIssues: repo.hasIssues,
isStarred: repo.isStarred,
isArchived: repo.isArchived,
size: repo.size,
hasLFS: repo.hasLFS,
hasSubmodules: repo.hasSubmodules,
language: repo.language ?? null,
description: repo.description ?? null,
defaultBranch: repo.defaultBranch,
visibility: repo.visibility,
status: 'imported',
lastMirrored: repo.lastMirrored ?? null,
errorMessage: repo.errorMessage ?? null,
createdAt: repo.createdAt || new Date(),
updatedAt: repo.updatedAt || new Date(),
};
}
// Compute a safe batch size based on SQLite 999-parameter limit
export function calcBatchSizeForInsert(columnCount: number, maxParams = 999): number {
if (columnCount <= 0) return 1;
// Reserve a little headroom in case column count drifts
const safety = 0;
const effectiveMax = Math.max(1, maxParams - safety);
return Math.max(1, Math.floor(effectiveMax / columnCount));
}
@@ -0,0 +1,443 @@
/**
* Repository cleanup service for handling orphaned repositories
* This service identifies and handles repositories that exist in Gitea
* but are no longer present in GitHub (e.g., unstarred repositories)
*/
import { db, configs, repositories } from '@/lib/db';
import { eq, and, or, sql, not, inArray } from 'drizzle-orm';
import { createGitHubClient, getGithubRepositories, getGithubStarredRepositories } from '@/lib/github';
import { createGiteaClient, deleteGiteaRepo, archiveGiteaRepo, getGiteaRepoOwnerAsync, checkRepoLocation } from '@/lib/gitea';
import { getDecryptedGitHubToken, getDecryptedGiteaToken } from '@/lib/utils/config-encryption';
import { publishEvent } from '@/lib/events';
let cleanupInterval: NodeJS.Timeout | null = null;
let isCleanupRunning = false;
/**
* Identify orphaned repositories for a user
* These are repositories that exist in our database (and likely in Gitea)
* but are no longer in GitHub based on current criteria
*/
async function identifyOrphanedRepositories(config: any): Promise<any[]> {
const userId = config.userId;
try {
// Get current GitHub repositories with rate limit tracking
const decryptedToken = getDecryptedGitHubToken(config);
const githubUsername = config.githubConfig?.owner || undefined;
const octokit = createGitHubClient(decryptedToken, userId, githubUsername);
let allGithubRepos = [];
let githubApiAccessible = true;
try {
// Fetch GitHub data
const [basicAndForkedRepos, starredRepos] = await Promise.all([
getGithubRepositories({ octokit, config }),
config.githubConfig?.includeStarred
? getGithubStarredRepositories({ octokit, config })
: Promise.resolve([]),
]);
allGithubRepos = [...basicAndForkedRepos, ...starredRepos];
} catch (githubError: any) {
// Handle GitHub API errors gracefully
console.warn(`[Repository Cleanup] GitHub API error for user ${userId}: ${githubError.message}`);
// Check if it's a critical error (like account deleted/banned)
if (githubError.status === 404 || githubError.status === 403) {
console.error(`[Repository Cleanup] CRITICAL: GitHub account may be deleted/banned. Skipping cleanup to prevent data loss.`);
console.error(`[Repository Cleanup] Consider using CLEANUP_ORPHANED_REPO_ACTION=archive instead of delete for safety.`);
// Return empty array to skip cleanup entirely when GitHub account is inaccessible
return [];
}
// For other errors, also skip cleanup to be safe
console.error(`[Repository Cleanup] Skipping cleanup due to GitHub API error. This prevents accidental deletion of backups.`);
return [];
}
const githubRepoFullNames = new Set(allGithubRepos.map(repo => repo.fullName));
// Get all repositories from our database
const dbRepos = await db
.select()
.from(repositories)
.where(eq(repositories.userId, userId));
// Only identify repositories as orphaned if we successfully accessed GitHub
// This prevents false positives when GitHub is down or account is inaccessible
const orphanedRepos = dbRepos.filter(repo => {
const isOrphaned = !githubRepoFullNames.has(repo.fullName);
if (!isOrphaned) {
return false;
}
// Skip repositories we've already archived/preserved
if (repo.status === 'archived' || repo.isArchived) {
console.log(`[Repository Cleanup] Skipping ${repo.fullName} - already archived`);
return false;
}
return true;
});
if (orphanedRepos.length > 0) {
console.log(`[Repository Cleanup] Found ${orphanedRepos.length} orphaned repositories for user ${userId}`);
}
return orphanedRepos;
} catch (error) {
console.error(`[Repository Cleanup] Error identifying orphaned repositories for user ${userId}:`, error);
// Return empty array on error to prevent accidental deletions
return [];
}
}
/**
* Handle an orphaned repository based on configuration
*/
async function handleOrphanedRepository(
config: any,
repo: any,
action: 'skip' | 'archive' | 'delete',
dryRun: boolean
): Promise<void> {
const repoFullName = repo.fullName;
if (action === 'skip') {
console.log(`[Repository Cleanup] Skipping orphaned repository ${repoFullName}`);
return;
}
if (repo.status === 'archived' || repo.isArchived) {
console.log(`[Repository Cleanup] Repository ${repoFullName} already archived; skipping additional actions`);
return;
}
if (dryRun) {
console.log(`[Repository Cleanup] DRY RUN: Would ${action} orphaned repository ${repoFullName}`);
return;
}
try {
// Get Gitea client
const giteaToken = getDecryptedGiteaToken(config);
const giteaClient = createGiteaClient(config.giteaConfig.url, giteaToken);
// Determine the Gitea owner and repo name more robustly
const mirroredLocation = (repo.mirroredLocation || '').trim();
let giteaOwner: string;
let giteaRepoName: string;
if (mirroredLocation && mirroredLocation.includes('/')) {
const [ownerPart, namePart] = mirroredLocation.split('/');
giteaOwner = ownerPart;
giteaRepoName = namePart;
} else {
// Fall back to expected owner based on config and repo flags (starred/org overrides)
giteaOwner = await getGiteaRepoOwnerAsync({ config, repository: repo });
giteaRepoName = repo.name;
}
// Normalize owner casing to avoid GetUserByName issues on some Gitea setups
giteaOwner = giteaOwner.trim();
if (action === 'archive') {
console.log(`[Repository Cleanup] Archiving orphaned repository ${repoFullName} in Gitea`);
// Best-effort check to validate actual location; falls back gracefully
try {
const { present, actualOwner } = await checkRepoLocation({
config,
repository: repo,
expectedOwner: giteaOwner,
});
if (present) {
giteaOwner = actualOwner;
}
} catch {
// Non-fatal; continue with best guess
}
await archiveGiteaRepo(giteaClient, giteaOwner, giteaRepoName);
// Update database status
await db.update(repositories).set({
status: 'archived',
isArchived: true,
errorMessage: 'Repository archived - no longer in GitHub',
updatedAt: new Date(),
}).where(eq(repositories.id, repo.id));
// Create event
await publishEvent({
userId: config.userId,
channel: 'repository',
payload: {
type: 'repository.archived',
message: `Repository ${repoFullName} archived (no longer in GitHub)`,
metadata: {
repositoryId: repo.id,
repositoryName: repo.name,
action: 'archive',
reason: 'orphaned',
},
},
});
} else if (action === 'delete') {
console.log(`[Repository Cleanup] Deleting orphaned repository ${repoFullName} from Gitea`);
await deleteGiteaRepo(giteaClient, giteaOwner, giteaRepoName);
// Delete from database
await db.delete(repositories).where(eq(repositories.id, repo.id));
// Create event
await publishEvent({
userId: config.userId,
channel: 'repository',
payload: {
type: 'repository.deleted',
message: `Repository ${repoFullName} deleted (no longer in GitHub)`,
metadata: {
repositoryId: repo.id,
repositoryName: repo.name,
action: 'delete',
reason: 'orphaned',
},
},
});
}
} catch (error) {
console.error(`[Repository Cleanup] Error handling orphaned repository ${repoFullName}:`, error);
// Update repository with error status
await db.update(repositories).set({
status: 'failed',
errorMessage: `Cleanup failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
updatedAt: new Date(),
}).where(eq(repositories.id, repo.id));
throw error;
}
}
/**
* Run repository cleanup for a single configuration
*/
async function runRepositoryCleanup(config: any): Promise<{
orphanedCount: number;
processedCount: number;
errors: string[];
}> {
const userId = config.userId;
const cleanupConfig = config.cleanupConfig || {};
console.log(`[Repository Cleanup] Starting repository cleanup for user ${userId}`);
const results = {
orphanedCount: 0,
processedCount: 0,
errors: [] as string[],
};
try {
// Check if repository cleanup is enabled - either through the main toggle or the specific feature
const isCleanupEnabled = cleanupConfig.enabled || cleanupConfig.deleteIfNotInGitHub;
if (!isCleanupEnabled) {
console.log(`[Repository Cleanup] Repository cleanup disabled for user ${userId} (enabled=${cleanupConfig.enabled}, deleteIfNotInGitHub=${cleanupConfig.deleteIfNotInGitHub})`);
return results;
}
// Only process if deleteIfNotInGitHub is enabled (this is the main feature flag)
if (!cleanupConfig.deleteIfNotInGitHub) {
console.log(`[Repository Cleanup] Delete if not in GitHub disabled for user ${userId}`);
return results;
}
// Warn if deleteFromGitea is explicitly disabled but deleteIfNotInGitHub is enabled
if (cleanupConfig.deleteFromGitea === false && cleanupConfig.deleteIfNotInGitHub) {
console.warn(`[Repository Cleanup] Warning: CLEANUP_DELETE_FROM_GITEA is false but CLEANUP_DELETE_IF_NOT_IN_GITHUB is true. Proceeding with cleanup.`);
}
// Identify orphaned repositories
const orphanedRepos = await identifyOrphanedRepositories(config);
results.orphanedCount = orphanedRepos.length;
if (orphanedRepos.length === 0) {
console.log(`[Repository Cleanup] No orphaned repositories found for user ${userId}`);
return results;
}
console.log(`[Repository Cleanup] Found ${orphanedRepos.length} orphaned repositories for user ${userId}`);
// Get protected repositories
const protectedRepos = new Set(cleanupConfig.protectedRepos || []);
// Process orphaned repositories
const action = cleanupConfig.orphanedRepoAction || 'archive';
const dryRun = cleanupConfig.dryRun ?? false;
const batchSize = cleanupConfig.batchSize || 10;
const pauseBetweenDeletes = cleanupConfig.pauseBetweenDeletes || 2000;
for (let i = 0; i < orphanedRepos.length; i += batchSize) {
const batch = orphanedRepos.slice(i, i + batchSize);
for (const repo of batch) {
// Skip protected repositories
if (protectedRepos.has(repo.name) || protectedRepos.has(repo.fullName)) {
console.log(`[Repository Cleanup] Skipping protected repository ${repo.fullName}`);
continue;
}
try {
await handleOrphanedRepository(config, repo, action, dryRun);
results.processedCount++;
} catch (error) {
const errorMsg = `Failed to ${action} ${repo.fullName}: ${error instanceof Error ? error.message : 'Unknown error'}`;
console.error(`[Repository Cleanup] ${errorMsg}`);
results.errors.push(errorMsg);
}
// Pause between operations to avoid rate limiting
if (i < orphanedRepos.length - 1) {
await new Promise(resolve => setTimeout(resolve, pauseBetweenDeletes));
}
}
}
// Update cleanup timestamps
const currentTime = new Date();
await db.update(configs).set({
cleanupConfig: {
...cleanupConfig,
lastRun: currentTime,
nextRun: new Date(currentTime.getTime() + 24 * 60 * 60 * 1000), // Next run in 24 hours
},
updatedAt: currentTime,
}).where(eq(configs.id, config.id));
console.log(`[Repository Cleanup] Completed cleanup for user ${userId}: ${results.processedCount}/${results.orphanedCount} processed`);
} catch (error) {
console.error(`[Repository Cleanup] Error during cleanup for user ${userId}:`, error);
results.errors.push(`General cleanup error: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
return results;
}
/**
* Main repository cleanup loop
*/
async function repositoryCleanupLoop(): Promise<void> {
if (isCleanupRunning) {
console.log('[Repository Cleanup] Cleanup is already running, skipping this cycle');
return;
}
isCleanupRunning = true;
try {
// Get all active configurations with repository cleanup enabled
const activeConfigs = await db
.select()
.from(configs)
.where(eq(configs.isActive, true));
const enabledConfigs = activeConfigs.filter(config => {
const cleanupConfig = config.cleanupConfig || {};
// Enable cleanup if either the main toggle is on OR deleteIfNotInGitHub is enabled
return cleanupConfig.enabled === true || cleanupConfig.deleteIfNotInGitHub === true;
});
if (enabledConfigs.length === 0) {
console.log('[Repository Cleanup] No configurations with repository cleanup enabled');
return;
}
console.log(`[Repository Cleanup] Processing ${enabledConfigs.length} configurations`);
// Process each configuration
for (const config of enabledConfigs) {
await runRepositoryCleanup(config);
}
} catch (error) {
console.error('[Repository Cleanup] Error in cleanup loop:', error);
} finally {
isCleanupRunning = false;
}
}
/**
* Start the repository cleanup service
*/
export function startRepositoryCleanupService(): void {
if (cleanupInterval) {
console.log('[Repository Cleanup] Service is already running');
return;
}
console.log('[Repository Cleanup] Starting repository cleanup service');
// Run immediately on start
repositoryCleanupLoop().catch(error => {
console.error('[Repository Cleanup] Error during initial cleanup run:', error);
});
// Run every 6 hours to check for orphaned repositories
const checkInterval = 6 * 60 * 60 * 1000; // 6 hours
cleanupInterval = setInterval(() => {
repositoryCleanupLoop().catch(error => {
console.error('[Repository Cleanup] Error during cleanup run:', error);
});
}, checkInterval);
console.log('[Repository Cleanup] Service started, checking every 6 hours');
}
/**
* Stop the repository cleanup service
*/
export function stopRepositoryCleanupService(): void {
if (cleanupInterval) {
clearInterval(cleanupInterval);
cleanupInterval = null;
console.log('[Repository Cleanup] Service stopped');
}
}
/**
* Check if the repository cleanup service is running
*/
export function isRepositoryCleanupServiceRunning(): boolean {
return cleanupInterval !== null;
}
// Export functions for use by scheduler
export { identifyOrphanedRepositories, handleOrphanedRepository };
/**
* Manually trigger repository cleanup for a specific user
*/
export async function triggerRepositoryCleanup(userId: string): Promise<{
orphanedCount: number;
processedCount: number;
errors: string[];
}> {
const [config] = await db
.select()
.from(configs)
.where(and(
eq(configs.userId, userId),
eq(configs.isActive, true)
))
.limit(1);
if (!config) {
throw new Error('No active configuration found for user');
}
return runRepositoryCleanup(config);
}
@@ -0,0 +1,82 @@
import { describe, test, expect, mock } from "bun:test";
import { repoStatusEnum } from "@/types/Repository";
import type { Repository } from "./db/schema";
describe("Scheduler Service - Ignored Repository Handling", () => {
test("should skip repositories with 'ignored' status", async () => {
// Create a repository with ignored status
const ignoredRepo: Partial<Repository> = {
id: "ignored-repo-id",
name: "ignored-repo",
fullName: "user/ignored-repo",
status: repoStatusEnum.parse("ignored"),
userId: "user-id",
};
// Mock the scheduler logic that checks repository status
const shouldMirrorRepository = (repo: Partial<Repository>): boolean => {
// Skip ignored repositories
if (repo.status === "ignored") {
return false;
}
// Skip recently mirrored repositories
if (repo.status === "synced" || repo.status === "mirrored") {
const lastUpdated = repo.updatedAt;
if (lastUpdated && Date.now() - lastUpdated.getTime() < 3600000) {
return false; // Skip if mirrored within last hour
}
}
return true;
};
// Test that ignored repository is skipped
expect(shouldMirrorRepository(ignoredRepo)).toBe(false);
// Test that non-ignored repository is not skipped
const activeRepo: Partial<Repository> = {
...ignoredRepo,
status: repoStatusEnum.parse("imported"),
};
expect(shouldMirrorRepository(activeRepo)).toBe(true);
// Test that recently synced repository is skipped
const recentlySyncedRepo: Partial<Repository> = {
...ignoredRepo,
status: repoStatusEnum.parse("synced"),
updatedAt: new Date(),
};
expect(shouldMirrorRepository(recentlySyncedRepo)).toBe(false);
// Test that old synced repository is not skipped
const oldSyncedRepo: Partial<Repository> = {
...ignoredRepo,
status: repoStatusEnum.parse("synced"),
updatedAt: new Date(Date.now() - 7200000), // 2 hours ago
};
expect(shouldMirrorRepository(oldSyncedRepo)).toBe(true);
});
test("should validate all repository status enum values", () => {
const validStatuses = [
"imported",
"mirroring",
"mirrored",
"syncing",
"synced",
"failed",
"skipped",
"ignored",
"deleting",
"deleted"
];
validStatuses.forEach(status => {
expect(() => repoStatusEnum.parse(status)).not.toThrow();
});
// Test invalid status
expect(() => repoStatusEnum.parse("invalid-status")).toThrow();
});
});
@@ -0,0 +1,736 @@
/**
* Scheduler service for automatic repository mirroring
* This service runs in the background and automatically mirrors repositories
* based on the configured schedule
*/
import { db, configs, repositories } from '@/lib/db';
import { eq, and, or } from 'drizzle-orm';
import { syncGiteaRepo, mirrorGithubRepoToGitea } from '@/lib/gitea';
import { getDecryptedGitHubToken } from '@/lib/utils/config-encryption';
import { parseInterval, formatDuration } from '@/lib/utils/duration-parser';
import type { Repository } from '@/lib/db/schema';
import { repoStatusEnum, repositoryVisibilityEnum } from '@/types/Repository';
import { mergeGitReposPreferStarred, normalizeGitRepoToInsert, calcBatchSizeForInsert } from '@/lib/repo-utils';
let schedulerInterval: NodeJS.Timeout | null = null;
let isSchedulerRunning = false;
let hasPerformedAutoStart = false; // Track if we've already done auto-start
/**
* Parse schedule interval with enhanced support for duration strings, cron, and numbers
* Supports formats like: "8h", "30m", "24h", "0 0/2 * * *", or plain numbers (seconds)
*/
function parseScheduleInterval(interval: string | number): number {
try {
const milliseconds = parseInterval(interval);
console.log(`[Scheduler] Parsed interval "${interval}" as ${formatDuration(milliseconds)}`);
return milliseconds;
} catch (error) {
console.error(`[Scheduler] Failed to parse interval "${interval}": ${error instanceof Error ? error.message : 'Unknown error'}`);
const defaultInterval = 60 * 60 * 1000; // 1 hour
console.log(`[Scheduler] Using default interval: ${formatDuration(defaultInterval)}`);
return defaultInterval;
}
}
/**
* Run scheduled mirror sync for a single user configuration
*/
async function runScheduledSync(config: any): Promise<void> {
const userId = config.userId;
console.log(`[Scheduler] Running scheduled sync for user ${userId}`);
try {
// Check if tokens are configured before proceeding
if (!config.githubConfig?.token || !config.giteaConfig?.token) {
console.log(`[Scheduler] Skipping sync for user ${userId}: GitHub or Gitea tokens not configured`);
return;
}
// Update lastRun timestamp
const currentTime = new Date();
const scheduleConfig = config.scheduleConfig || {};
// Priority order: scheduleConfig.interval > giteaConfig.mirrorInterval > default
const intervalSource = scheduleConfig.interval ||
config.giteaConfig?.mirrorInterval ||
'1h'; // Default to 1 hour instead of 3600 seconds
console.log(`[Scheduler] Using interval source for user ${userId}: ${intervalSource}`);
const interval = parseScheduleInterval(intervalSource);
// Note: The interval timing is calculated from the LAST RUN time, not from container startup
// This means if GITEA_MIRROR_INTERVAL=8h, the next sync will be 8 hours from the last completed sync
const nextRun = new Date(currentTime.getTime() + interval);
console.log(`[Scheduler] Next sync for user ${userId} scheduled for: ${nextRun.toISOString()} (in ${formatDuration(interval)})`);
await db.update(configs).set({
scheduleConfig: {
...scheduleConfig,
lastRun: currentTime,
nextRun: nextRun,
},
updatedAt: currentTime,
}).where(eq(configs.id, config.id));
// Auto-discovery: Check for new GitHub repositories
if (scheduleConfig.autoImport !== false) {
console.log(`[Scheduler] Checking for new GitHub repositories for user ${userId}...`);
try {
const { getGithubRepositories, getGithubStarredRepositories } = await import('@/lib/github');
const { v4: uuidv4 } = await import('uuid');
const { getDecryptedGitHubToken } = await import('@/lib/utils/config-encryption');
// Create GitHub client
const decryptedToken = getDecryptedGitHubToken(config);
const { Octokit } = await import('@octokit/rest');
const octokit = new Octokit({ auth: decryptedToken });
// Fetch GitHub data
const [basicAndForkedRepos, starredRepos] = await Promise.all([
getGithubRepositories({ octokit, config }),
config.githubConfig?.includeStarred
? getGithubStarredRepositories({ octokit, config })
: Promise.resolve([]),
]);
const allGithubRepos = mergeGitReposPreferStarred(basicAndForkedRepos, starredRepos);
// Check for new repositories
const existingRepos = await db
.select({ normalizedFullName: repositories.normalizedFullName })
.from(repositories)
.where(eq(repositories.userId, userId));
const existingRepoNames = new Set(existingRepos.map(r => r.normalizedFullName));
const newRepos = allGithubRepos.filter(r => !existingRepoNames.has(r.fullName.toLowerCase()));
if (newRepos.length > 0) {
console.log(`[Scheduler] Found ${newRepos.length} new repositories for user ${userId}`);
// Insert new repositories
const reposToInsert = newRepos.map(repo =>
normalizeGitRepoToInsert(repo, { userId, configId: config.id })
);
// Batch insert to avoid SQLite parameter limit
const sample = reposToInsert[0];
const columnCount = Object.keys(sample ?? {}).length || 1;
const BATCH_SIZE = calcBatchSizeForInsert(columnCount);
for (let i = 0; i < reposToInsert.length; i += BATCH_SIZE) {
const batch = reposToInsert.slice(i, i + BATCH_SIZE);
await db
.insert(repositories)
.values(batch)
.onConflictDoNothing({ target: [repositories.userId, repositories.normalizedFullName] });
}
console.log(`[Scheduler] Successfully imported ${newRepos.length} new repositories for user ${userId}`);
} else {
console.log(`[Scheduler] No new repositories found for user ${userId}`);
}
} catch (error) {
console.error(`[Scheduler] Failed to auto-import repositories for user ${userId}:`, error);
}
}
// Auto-cleanup: Remove orphaned repositories (repos that no longer exist in GitHub)
if (config.cleanupConfig?.deleteIfNotInGitHub) {
console.log(`[Scheduler] Checking for orphaned repositories to cleanup for user ${userId}...`);
try {
const { identifyOrphanedRepositories, handleOrphanedRepository } = await import('@/lib/repository-cleanup-service');
const orphanedRepos = await identifyOrphanedRepositories(config);
if (orphanedRepos.length > 0) {
console.log(`[Scheduler] Found ${orphanedRepos.length} orphaned repositories for cleanup`);
for (const repo of orphanedRepos) {
try {
await handleOrphanedRepository(
config,
repo,
config.cleanupConfig.orphanedRepoAction || 'archive',
config.cleanupConfig.dryRun ?? false
);
console.log(`[Scheduler] Handled orphaned repository: ${repo.fullName}`);
} catch (error) {
console.error(`[Scheduler] Failed to handle orphaned repository ${repo.fullName}:`, error);
}
}
} else {
console.log(`[Scheduler] No orphaned repositories found for cleanup`);
}
} catch (error) {
console.error(`[Scheduler] Failed to cleanup orphaned repositories for user ${userId}:`, error);
}
}
// Auto-mirror: Mirror imported/pending/failed repositories if enabled
if (scheduleConfig.autoMirror) {
try {
console.log(`[Scheduler] Auto-mirror enabled - checking for repositories to mirror for user ${userId}...`);
const reposNeedingMirror = await db
.select()
.from(repositories)
.where(
and(
eq(repositories.userId, userId),
or(
eq(repositories.status, 'imported'),
eq(repositories.status, 'pending'),
eq(repositories.status, 'failed')
)
)
);
if (reposNeedingMirror.length > 0) {
console.log(`[Scheduler] Found ${reposNeedingMirror.length} repositories that need initial mirroring`);
// Prepare Octokit client
const decryptedToken = getDecryptedGitHubToken(config);
const { Octokit } = await import('@octokit/rest');
const octokit = new Octokit({ auth: decryptedToken });
// Process repositories in batches
const batchSize = scheduleConfig.batchSize || 10;
const pauseBetweenBatches = scheduleConfig.pauseBetweenBatches || 2000;
for (let i = 0; i < reposNeedingMirror.length; i += batchSize) {
const batch = reposNeedingMirror.slice(i, Math.min(i + batchSize, reposNeedingMirror.length));
console.log(`[Scheduler] Auto-mirror batch ${Math.floor(i / batchSize) + 1} of ${Math.ceil(reposNeedingMirror.length / batchSize)} (${batch.length} repos)`);
await Promise.all(
batch.map(async (repo) => {
try {
const repository: Repository = {
...repo,
status: repoStatusEnum.parse(repo.status),
organization: repo.organization ?? undefined,
lastMirrored: repo.lastMirrored ?? undefined,
errorMessage: repo.errorMessage ?? undefined,
mirroredLocation: repo.mirroredLocation || '',
forkedFrom: repo.forkedFrom ?? undefined,
visibility: repositoryVisibilityEnum.parse(repo.visibility),
};
await mirrorGithubRepoToGitea({ octokit, repository, config });
console.log(`[Scheduler] Auto-mirrored repository: ${repo.fullName}`);
} catch (error) {
console.error(`[Scheduler] Failed to auto-mirror repository ${repo.fullName}:`, error);
}
})
);
// Pause between batches if configured
if (i + batchSize < reposNeedingMirror.length) {
console.log(`[Scheduler] Pausing for ${pauseBetweenBatches}ms before next auto-mirror batch...`);
await new Promise(resolve => setTimeout(resolve, pauseBetweenBatches));
}
}
} else {
console.log(`[Scheduler] No repositories need initial mirroring`);
}
} catch (mirrorError) {
console.error(`[Scheduler] Error during auto-mirror phase for user ${userId}:`, mirrorError);
}
}
// Get repositories to sync
let reposToSync = await db
.select()
.from(repositories)
.where(
and(
eq(repositories.userId, userId),
or(
eq(repositories.status, 'mirrored'),
eq(repositories.status, 'synced'),
eq(repositories.status, 'failed'),
eq(repositories.status, 'pending')
)
)
);
// Filter based on schedule configuration
if (scheduleConfig.skipRecentlyMirrored) {
const recentThreshold = scheduleConfig.recentThreshold || 3600000; // Default 1 hour
const thresholdTime = new Date(currentTime.getTime() - recentThreshold);
reposToSync = reposToSync.filter(repo => {
if (!repo.lastMirrored) return true; // Never mirrored
return repo.lastMirrored < thresholdTime;
});
}
if (scheduleConfig.onlyMirrorUpdated) {
const updateInterval = scheduleConfig.updateInterval || 86400000; // Default 24 hours
const updateThreshold = new Date(currentTime.getTime() - updateInterval);
// Check GitHub for updates (this would need to be implemented)
// For now, we'll sync repos that haven't been synced in the update interval
reposToSync = reposToSync.filter(repo => {
if (!repo.lastMirrored) return true;
return repo.lastMirrored < updateThreshold;
});
}
if (reposToSync.length === 0) {
console.log(`[Scheduler] No repositories to sync for user ${userId}`);
return;
}
console.log(`[Scheduler] Syncing ${reposToSync.length} repositories for user ${userId}`);
// Process repositories in batches
const batchSize = scheduleConfig.batchSize || 10;
const pauseBetweenBatches = scheduleConfig.pauseBetweenBatches || 5000;
const concurrent = scheduleConfig.concurrent ?? false;
for (let i = 0; i < reposToSync.length; i += batchSize) {
const batch = reposToSync.slice(i, i + batchSize);
if (concurrent) {
// Process batch concurrently
await Promise.allSettled(
batch.map(repo => syncSingleRepository(config, repo))
);
} else {
// Process batch sequentially
for (const repo of batch) {
await syncSingleRepository(config, repo);
}
}
// Pause between batches if not the last batch
if (i + batchSize < reposToSync.length) {
await new Promise(resolve => setTimeout(resolve, pauseBetweenBatches));
}
}
console.log(`[Scheduler] Completed scheduled sync for user ${userId}`);
} catch (error) {
console.error(`[Scheduler] Error during scheduled sync for user ${userId}:`, error);
}
}
/**
* Sync a single repository
*/
async function syncSingleRepository(config: any, repo: any): Promise<void> {
try {
const repository: Repository = {
...repo,
status: repoStatusEnum.parse(repo.status),
organization: repo.organization ?? undefined,
lastMirrored: repo.lastMirrored ?? undefined,
errorMessage: repo.errorMessage ?? undefined,
mirroredLocation: repo.mirroredLocation || '',
forkedFrom: repo.forkedFrom ?? undefined,
visibility: repositoryVisibilityEnum.parse(repo.visibility),
};
await syncGiteaRepo({ config, repository });
console.log(`[Scheduler] Successfully synced repository ${repo.fullName}`);
} catch (error) {
console.error(`[Scheduler] Failed to sync repository ${repo.fullName}:`, error);
// Update repository status to failed
await db.update(repositories).set({
status: 'failed',
errorMessage: error instanceof Error ? error.message : 'Unknown error',
updatedAt: new Date(),
}).where(eq(repositories.id, repo.id));
}
}
/**
* Check if we should auto-start based on environment configuration
*/
async function checkAutoStartConfiguration(): Promise<boolean> {
// Don't auto-start more than once
if (hasPerformedAutoStart) {
return false;
}
try {
// Check if any configuration has scheduling enabled or mirror interval set
const activeConfigs = await db
.select()
.from(configs)
.where(eq(configs.isActive, true));
for (const config of activeConfigs) {
// Check if scheduling is enabled via environment
const scheduleEnabled = config.scheduleConfig?.enabled === true;
const hasMirrorInterval = !!config.giteaConfig?.mirrorInterval;
// If either SCHEDULE_ENABLED=true or GITEA_MIRROR_INTERVAL is set, we should auto-start
if (scheduleEnabled || hasMirrorInterval) {
console.log(`[Scheduler] Auto-start conditions met for user ${config.userId} (scheduleEnabled=${scheduleEnabled}, hasMirrorInterval=${hasMirrorInterval})`);
return true;
}
}
return false;
} catch (error) {
console.error('[Scheduler] Error checking auto-start configuration:', error);
return false;
}
}
/**
* Perform initial auto-start: import repositories and trigger mirror
*/
async function performInitialAutoStart(): Promise<void> {
hasPerformedAutoStart = true;
try {
console.log('[Scheduler] Performing initial auto-start...');
// Get all active configurations
const activeConfigs = await db
.select()
.from(configs)
.where(eq(configs.isActive, true));
for (const config of activeConfigs) {
// Skip if tokens are not configured
if (!config.githubConfig?.token || !config.giteaConfig?.token) {
console.log(`[Scheduler] Skipping auto-start for user ${config.userId}: tokens not configured`);
continue;
}
const scheduleEnabled = config.scheduleConfig?.enabled === true;
const hasMirrorInterval = !!config.giteaConfig?.mirrorInterval;
// Only process configs that have scheduling or mirror interval configured
if (!scheduleEnabled && !hasMirrorInterval) {
continue;
}
console.log(`[Scheduler] Auto-starting for user ${config.userId}...`);
try {
// Step 1: Import repositories from GitHub
console.log(`[Scheduler] Step 1: Importing repositories from GitHub for user ${config.userId}...`);
const { getGithubRepositories, getGithubStarredRepositories } = await import('@/lib/github');
const { v4: uuidv4 } = await import('uuid');
// Create GitHub client
const decryptedToken = getDecryptedGitHubToken(config);
const { Octokit } = await import('@octokit/rest');
const octokit = new Octokit({ auth: decryptedToken });
// Fetch GitHub data
const [basicAndForkedRepos, starredRepos] = await Promise.all([
getGithubRepositories({ octokit, config }),
config.githubConfig?.includeStarred
? getGithubStarredRepositories({ octokit, config })
: Promise.resolve([]),
]);
const allGithubRepos = mergeGitReposPreferStarred(basicAndForkedRepos, starredRepos);
// Check for new repositories
const existingRepos = await db
.select({ normalizedFullName: repositories.normalizedFullName })
.from(repositories)
.where(eq(repositories.userId, config.userId));
const existingRepoNames = new Set(existingRepos.map(r => r.normalizedFullName));
const reposToImport = allGithubRepos.filter(r => !existingRepoNames.has(r.fullName.toLowerCase()));
if (reposToImport.length > 0) {
console.log(`[Scheduler] Importing ${reposToImport.length} repositories for user ${config.userId}...`);
// Insert new repositories
const reposToInsert = reposToImport.map(repo =>
normalizeGitRepoToInsert(repo, { userId: config.userId, configId: config.id })
);
// Batch insert to avoid SQLite parameter limit
const sample = reposToInsert[0];
const columnCount = Object.keys(sample ?? {}).length || 1;
const BATCH_SIZE = calcBatchSizeForInsert(columnCount);
for (let i = 0; i < reposToInsert.length; i += BATCH_SIZE) {
const batch = reposToInsert.slice(i, i + BATCH_SIZE);
await db
.insert(repositories)
.values(batch)
.onConflictDoNothing({ target: [repositories.userId, repositories.normalizedFullName] });
}
console.log(`[Scheduler] Successfully imported ${reposToImport.length} repositories`);
} else {
console.log(`[Scheduler] No new repositories to import for user ${config.userId}`);
}
// Check if we already have mirrored repositories (indicating this isn't first run)
const mirroredRepos = await db
.select()
.from(repositories)
.where(
and(
eq(repositories.userId, config.userId),
or(
eq(repositories.status, 'mirrored'),
eq(repositories.status, 'synced')
)
)
)
.limit(1);
// If we already have mirrored repos, skip the initial mirror (let regular sync handle it)
if (mirroredRepos.length > 0) {
console.log(`[Scheduler] User ${config.userId} already has mirrored repositories, skipping initial mirror (let regular sync handle updates)`);
// Still update the schedule config to indicate scheduling is active
const currentTime = new Date();
const intervalSource = config.scheduleConfig?.interval ||
config.giteaConfig?.mirrorInterval ||
'8h';
const interval = parseScheduleInterval(intervalSource);
const nextRun = new Date(currentTime.getTime() + interval);
await db.update(configs).set({
scheduleConfig: {
...config.scheduleConfig,
enabled: true,
lastRun: currentTime,
nextRun: nextRun,
},
updatedAt: currentTime,
}).where(eq(configs.id, config.id));
console.log(`[Scheduler] Scheduling enabled for user ${config.userId}, next sync at ${nextRun.toISOString()}`);
continue;
}
// Step 2: Trigger mirror for all repositories that need mirroring
console.log(`[Scheduler] Step 2: Triggering mirror for repositories that need mirroring...`);
const reposNeedingMirror = await db
.select()
.from(repositories)
.where(
and(
eq(repositories.userId, config.userId),
or(
eq(repositories.status, 'imported'),
eq(repositories.status, 'pending'),
eq(repositories.status, 'failed')
)
)
);
if (reposNeedingMirror.length > 0) {
console.log(`[Scheduler] Found ${reposNeedingMirror.length} repositories that need mirroring`);
// Reuse the octokit instance from above
// (octokit was already created in the import phase)
// Process repositories in batches
const batchSize = config.scheduleConfig?.batchSize || 5;
for (let i = 0; i < reposNeedingMirror.length; i += batchSize) {
const batch = reposNeedingMirror.slice(i, Math.min(i + batchSize, reposNeedingMirror.length));
console.log(`[Scheduler] Processing batch ${Math.floor(i / batchSize) + 1} of ${Math.ceil(reposNeedingMirror.length / batchSize)} (${batch.length} repos)`);
await Promise.all(
batch.map(async (repo) => {
try {
const repository: Repository = {
...repo,
status: repoStatusEnum.parse(repo.status),
organization: repo.organization ?? undefined,
lastMirrored: repo.lastMirrored ?? undefined,
errorMessage: repo.errorMessage ?? undefined,
mirroredLocation: repo.mirroredLocation || '',
forkedFrom: repo.forkedFrom ?? undefined,
visibility: repositoryVisibilityEnum.parse(repo.visibility),
};
await mirrorGithubRepoToGitea({
octokit,
repository,
config
});
console.log(`[Scheduler] Successfully mirrored repository: ${repo.fullName}`);
} catch (error) {
console.error(`[Scheduler] Failed to mirror repository ${repo.fullName}:`, error);
}
})
);
// Pause between batches if configured
if (i + batchSize < reposNeedingMirror.length) {
const pauseTime = config.scheduleConfig?.pauseBetweenBatches || 2000;
console.log(`[Scheduler] Pausing for ${pauseTime}ms before next batch...`);
await new Promise(resolve => setTimeout(resolve, pauseTime));
}
}
console.log(`[Scheduler] Completed initial mirror for ${reposNeedingMirror.length} repositories`);
} else {
console.log(`[Scheduler] No repositories need mirroring`);
}
// Update the schedule config to indicate we've run
const currentTime = new Date();
const intervalSource = config.scheduleConfig?.interval ||
config.giteaConfig?.mirrorInterval ||
'8h';
const interval = parseScheduleInterval(intervalSource);
const nextRun = new Date(currentTime.getTime() + interval);
await db.update(configs).set({
scheduleConfig: {
...config.scheduleConfig,
enabled: true, // Ensure scheduling is enabled
lastRun: currentTime,
nextRun: nextRun,
},
updatedAt: currentTime,
}).where(eq(configs.id, config.id));
console.log(`[Scheduler] Auto-start completed for user ${config.userId}, next sync at ${nextRun.toISOString()}`);
} catch (error) {
console.error(`[Scheduler] Failed to auto-start for user ${config.userId}:`, error);
}
}
console.log('[Scheduler] Initial auto-start completed');
} catch (error) {
console.error('[Scheduler] Failed to perform initial auto-start:', error);
}
}
/**
* Main scheduler loop
*/
async function schedulerLoop(): Promise<void> {
if (isSchedulerRunning) {
console.log('[Scheduler] Scheduler is already running, skipping this cycle');
return;
}
isSchedulerRunning = true;
try {
// Get all active configurations with scheduling enabled
const activeConfigs = await db
.select()
.from(configs)
.where(
and(
eq(configs.isActive, true)
)
);
const enabledConfigs = activeConfigs.filter(config =>
config.scheduleConfig?.enabled === true
);
// Further filter configs that have valid tokens
const validConfigs = enabledConfigs.filter(config => {
const hasGitHubToken = !!config.githubConfig?.token;
const hasGiteaToken = !!config.giteaConfig?.token;
if (!hasGitHubToken || !hasGiteaToken) {
console.log(`[Scheduler] User ${config.userId}: Scheduling enabled but tokens missing (GitHub: ${hasGitHubToken}, Gitea: ${hasGiteaToken})`);
return false;
}
return true;
});
if (validConfigs.length === 0) {
if (enabledConfigs.length > 0) {
console.log(`[Scheduler] ${enabledConfigs.length} config(s) have scheduling enabled but lack required tokens`);
} else {
console.log(`[Scheduler] No configurations with scheduling enabled (found ${activeConfigs.length} active configs)`);
// Show details about why configs are not enabled
activeConfigs.forEach(config => {
const scheduleEnabled = config.scheduleConfig?.enabled;
const mirrorInterval = config.giteaConfig?.mirrorInterval;
console.log(`[Scheduler] User ${config.userId}: scheduleEnabled=${scheduleEnabled}, mirrorInterval=${mirrorInterval}`);
});
}
return;
}
console.log(`[Scheduler] Processing ${validConfigs.length} valid configurations (out of ${enabledConfigs.length} with scheduling enabled)`);
// Check each configuration to see if it's time to run
const currentTime = new Date();
for (const config of validConfigs) {
const scheduleConfig = config.scheduleConfig || {};
// Check if it's time to run based on nextRun
if (scheduleConfig.nextRun && new Date(scheduleConfig.nextRun) > currentTime) {
console.log(`[Scheduler] Skipping user ${config.userId} - next run at ${scheduleConfig.nextRun}`);
continue;
}
// If no nextRun is set, or it's past due, run the sync
await runScheduledSync(config);
}
} catch (error) {
console.error('[Scheduler] Error in scheduler loop:', error);
} finally {
isSchedulerRunning = false;
}
}
/**
* Start the scheduler service
*/
export async function startSchedulerService(): Promise<void> {
if (schedulerInterval) {
console.log('[Scheduler] Scheduler service is already running');
return;
}
console.log('[Scheduler] Starting scheduler service');
// Check if we should auto-start mirroring based on environment variables
const shouldAutoStart = await checkAutoStartConfiguration();
if (shouldAutoStart) {
console.log('[Scheduler] Auto-start detected from environment variables, triggering initial import and mirror...');
await performInitialAutoStart();
}
// Run immediately on start
schedulerLoop().catch(error => {
console.error('[Scheduler] Error during initial scheduler run:', error);
});
// Run every minute to check for scheduled tasks
const checkInterval = 60 * 1000; // 1 minute
schedulerInterval = setInterval(() => {
schedulerLoop().catch(error => {
console.error('[Scheduler] Error during scheduler run:', error);
});
}, checkInterval);
console.log(`[Scheduler] Scheduler service started, checking every ${formatDuration(checkInterval)} for scheduled tasks`);
console.log('[Scheduler] To trigger manual sync, check your configuration intervals and ensure SCHEDULE_ENABLED=true or use GITEA_MIRROR_INTERVAL');
}
/**
* Stop the scheduler service
*/
export function stopSchedulerService(): void {
if (schedulerInterval) {
clearInterval(schedulerInterval);
schedulerInterval = null;
console.log('[Scheduler] Scheduler service stopped');
}
}
/**
* Check if the scheduler service is running
*/
export function isSchedulerServiceRunning(): boolean {
return schedulerInterval !== null;
}
@@ -0,0 +1,240 @@
/**
* Shutdown Manager for Graceful Application Termination
*
* This module provides centralized shutdown coordination for the gitea-mirror application.
* It ensures that:
* - In-progress jobs are properly saved to the database
* - Database connections are closed cleanly
* - Background services are stopped gracefully
* - No data loss occurs during container restarts
*/
import { db, mirrorJobs } from './db';
import { eq, and } from 'drizzle-orm';
import type { MirrorJob } from './db/schema';
// Shutdown state tracking
let shutdownInProgress = false;
let shutdownStartTime: Date | null = null;
let shutdownCallbacks: Array<() => Promise<void>> = [];
let activeJobs = new Set<string>();
let shutdownTimeout: NodeJS.Timeout | null = null;
// Configuration
const SHUTDOWN_TIMEOUT = 30000; // 30 seconds max shutdown time
const JOB_SAVE_TIMEOUT = 10000; // 10 seconds to save job state
/**
* Register a callback to be executed during shutdown
*/
export function registerShutdownCallback(callback: () => Promise<void>): void {
shutdownCallbacks.push(callback);
}
/**
* Register an active job that needs to be tracked during shutdown
*/
export function registerActiveJob(jobId: string): void {
activeJobs.add(jobId);
console.log(`Registered active job: ${jobId} (${activeJobs.size} total active jobs)`);
}
/**
* Unregister a job when it completes normally
*/
export function unregisterActiveJob(jobId: string): void {
activeJobs.delete(jobId);
console.log(`Unregistered job: ${jobId} (${activeJobs.size} remaining active jobs)`);
}
/**
* Check if shutdown is currently in progress
*/
export function isShuttingDown(): boolean {
return shutdownInProgress;
}
/**
* Get shutdown status information
*/
export function getShutdownStatus() {
return {
inProgress: shutdownInProgress,
startTime: shutdownStartTime,
activeJobs: Array.from(activeJobs),
registeredCallbacks: shutdownCallbacks.length,
};
}
/**
* Save the current state of an active job to the database
*/
async function saveJobState(jobId: string): Promise<void> {
try {
console.log(`Saving state for job ${jobId}...`);
// Update the job to mark it as interrupted but not failed
await db
.update(mirrorJobs)
.set({
inProgress: false,
lastCheckpoint: new Date(),
message: 'Job interrupted by application shutdown - will resume on restart',
})
.where(eq(mirrorJobs.id, jobId));
console.log(`✅ Saved state for job ${jobId}`);
} catch (error) {
console.error(`❌ Failed to save state for job ${jobId}:`, error);
throw error;
}
}
/**
* Save all active jobs to the database
*/
async function saveAllActiveJobs(): Promise<void> {
if (activeJobs.size === 0) {
console.log('No active jobs to save');
return;
}
console.log(`Saving state for ${activeJobs.size} active jobs...`);
const savePromises = Array.from(activeJobs).map(async (jobId) => {
try {
await Promise.race([
saveJobState(jobId),
new Promise<never>((_, reject) => {
setTimeout(() => reject(new Error(`Timeout saving job ${jobId}`)), JOB_SAVE_TIMEOUT);
})
]);
} catch (error) {
console.error(`Failed to save job ${jobId} within timeout:`, error);
// Continue with other jobs even if one fails
}
});
await Promise.allSettled(savePromises);
console.log('✅ Completed saving all active jobs');
}
/**
* Execute all registered shutdown callbacks
*/
async function executeShutdownCallbacks(): Promise<void> {
if (shutdownCallbacks.length === 0) {
console.log('No shutdown callbacks to execute');
return;
}
console.log(`Executing ${shutdownCallbacks.length} shutdown callbacks...`);
const callbackPromises = shutdownCallbacks.map(async (callback, index) => {
try {
await callback();
console.log(`✅ Shutdown callback ${index + 1} completed`);
} catch (error) {
console.error(`❌ Shutdown callback ${index + 1} failed:`, error);
// Continue with other callbacks even if one fails
}
});
await Promise.allSettled(callbackPromises);
console.log('✅ Completed all shutdown callbacks');
}
/**
* Perform graceful shutdown of the application
*/
export async function gracefulShutdown(signal: string = 'UNKNOWN'): Promise<void> {
if (shutdownInProgress) {
console.log('⚠️ Shutdown already in progress, ignoring additional signal');
return;
}
shutdownInProgress = true;
shutdownStartTime = new Date();
console.log(`\n🛑 Graceful shutdown initiated by signal: ${signal}`);
console.log(`📊 Shutdown status: ${activeJobs.size} active jobs, ${shutdownCallbacks.length} callbacks`);
// Set up shutdown timeout
shutdownTimeout = setTimeout(() => {
console.error(`❌ Shutdown timeout reached (${SHUTDOWN_TIMEOUT}ms), forcing exit`);
process.exit(1);
}, SHUTDOWN_TIMEOUT);
try {
// Step 1: Save all active job states
console.log('\n📝 Step 1: Saving active job states...');
await saveAllActiveJobs();
// Step 2: Execute shutdown callbacks (stop services, close connections, etc.)
console.log('\n🔧 Step 2: Executing shutdown callbacks...');
await executeShutdownCallbacks();
// Step 3: Close database connections
console.log('\n💾 Step 3: Closing database connections...');
// Note: Drizzle with bun:sqlite doesn't require explicit connection closing
// but we'll add this for completeness and future database changes
console.log('\n✅ Graceful shutdown completed successfully');
// Clear the timeout since we completed successfully
if (shutdownTimeout) {
clearTimeout(shutdownTimeout);
shutdownTimeout = null;
}
// Exit with success code
process.exit(0);
} catch (error) {
console.error('\n❌ Error during graceful shutdown:', error);
// Clear the timeout
if (shutdownTimeout) {
clearTimeout(shutdownTimeout);
shutdownTimeout = null;
}
// Exit with error code
process.exit(1);
}
}
/**
* Initialize the shutdown manager
* This should be called early in the application lifecycle
*/
export function initializeShutdownManager(): void {
console.log('🔧 Initializing shutdown manager...');
// Reset state in case of re-initialization
shutdownInProgress = false;
shutdownStartTime = null;
activeJobs.clear();
shutdownCallbacks = []; // Reset callbacks too
// Clear any existing timeout
if (shutdownTimeout) {
clearTimeout(shutdownTimeout);
shutdownTimeout = null;
}
console.log('✅ Shutdown manager initialized');
}
/**
* Force immediate shutdown (for emergencies)
*/
export function forceShutdown(exitCode: number = 1): void {
console.error('🚨 Force shutdown requested');
if (shutdownTimeout) {
clearTimeout(shutdownTimeout);
}
process.exit(exitCode);
}
@@ -0,0 +1,141 @@
/**
* Signal Handlers for Graceful Shutdown
*
* This module sets up proper signal handling for container environments.
* It ensures the application responds correctly to SIGTERM, SIGINT, and other signals.
*/
import { gracefulShutdown, isShuttingDown } from './shutdown-manager';
// Track if signal handlers have been registered
let signalHandlersRegistered = false;
/**
* Setup signal handlers for graceful shutdown
* This should be called early in the application lifecycle
*/
export function setupSignalHandlers(): void {
if (signalHandlersRegistered) {
console.log('⚠️ Signal handlers already registered, skipping');
return;
}
console.log('🔧 Setting up signal handlers for graceful shutdown...');
// Handle SIGTERM (Docker stop, Kubernetes termination)
process.on('SIGTERM', () => {
console.log('\n📡 Received SIGTERM signal');
if (!isShuttingDown()) {
gracefulShutdown('SIGTERM').catch((error) => {
console.error('Error during SIGTERM shutdown:', error);
process.exit(1);
});
}
});
// Handle SIGINT (Ctrl+C)
process.on('SIGINT', () => {
console.log('\n📡 Received SIGINT signal');
if (!isShuttingDown()) {
gracefulShutdown('SIGINT').catch((error) => {
console.error('Error during SIGINT shutdown:', error);
process.exit(1);
});
}
});
// Handle SIGHUP (terminal hangup)
process.on('SIGHUP', () => {
console.log('\n📡 Received SIGHUP signal');
if (!isShuttingDown()) {
gracefulShutdown('SIGHUP').catch((error) => {
console.error('Error during SIGHUP shutdown:', error);
process.exit(1);
});
}
});
// Handle uncaught exceptions
process.on('uncaughtException', (error) => {
console.error('\n💥 Uncaught Exception:', error);
console.error('Stack trace:', error.stack);
if (!isShuttingDown()) {
console.log('Initiating emergency shutdown due to uncaught exception...');
gracefulShutdown('UNCAUGHT_EXCEPTION').catch((shutdownError) => {
console.error('Error during emergency shutdown:', shutdownError);
process.exit(1);
});
} else {
// If already shutting down, force exit
console.error('Uncaught exception during shutdown, forcing exit');
process.exit(1);
}
});
// Handle unhandled promise rejections
process.on('unhandledRejection', (reason, promise) => {
console.error('\n💥 Unhandled Promise Rejection at:', promise);
console.error('Reason:', reason);
if (!isShuttingDown()) {
console.log('Initiating emergency shutdown due to unhandled rejection...');
gracefulShutdown('UNHANDLED_REJECTION').catch((shutdownError) => {
console.error('Error during emergency shutdown:', shutdownError);
process.exit(1);
});
} else {
// If already shutting down, force exit
console.error('Unhandled rejection during shutdown, forcing exit');
process.exit(1);
}
});
// Handle process warnings (for debugging)
process.on('warning', (warning) => {
console.warn('⚠️ Process Warning:', warning.name);
console.warn('Message:', warning.message);
if (warning.stack) {
console.warn('Stack:', warning.stack);
}
});
signalHandlersRegistered = true;
console.log('✅ Signal handlers registered successfully');
}
/**
* Remove signal handlers (for testing)
*/
export function removeSignalHandlers(): void {
if (!signalHandlersRegistered) {
return;
}
console.log('🔧 Removing signal handlers...');
process.removeAllListeners('SIGTERM');
process.removeAllListeners('SIGINT');
process.removeAllListeners('SIGHUP');
process.removeAllListeners('uncaughtException');
process.removeAllListeners('unhandledRejection');
process.removeAllListeners('warning');
signalHandlersRegistered = false;
console.log('✅ Signal handlers removed');
}
/**
* Check if signal handlers are registered
*/
export function areSignalHandlersRegistered(): boolean {
return signalHandlersRegistered;
}
/**
* Send a test signal to the current process (for testing)
*/
export function sendTestSignal(signal: NodeJS.Signals = 'SIGTERM'): void {
console.log(`🧪 Sending test signal: ${signal}`);
process.kill(process.pid, signal);
}
@@ -0,0 +1,75 @@
import { describe, expect, it } from "bun:test";
import { normalizeOidcProviderConfig, OidcConfigError } from "./oidc-config";
const issuer = "https://auth.example.com";
describe("normalizeOidcProviderConfig", () => {
it("returns provided endpoints when complete", async () => {
const result = await normalizeOidcProviderConfig(issuer, {
clientId: "client",
clientSecret: "secret",
authorizationEndpoint: "https://auth.example.com/auth",
tokenEndpoint: "https://auth.example.com/token",
jwksEndpoint: "https://auth.example.com/jwks",
userInfoEndpoint: "https://auth.example.com/userinfo",
scopes: ["openid", "email"],
pkce: false,
}, async () => {
throw new Error("fetch should not be called when endpoints are provided");
});
expect(result.oidcConfig.authorizationEndpoint).toBe("https://auth.example.com/auth");
expect(result.oidcConfig.tokenEndpoint).toBe("https://auth.example.com/token");
expect(result.oidcConfig.jwksEndpoint).toBe("https://auth.example.com/jwks");
expect(result.oidcConfig.userInfoEndpoint).toBe("https://auth.example.com/userinfo");
expect(result.oidcConfig.scopes).toEqual(["openid", "email"]);
expect(result.oidcConfig.pkce).toBe(false);
expect(result.oidcConfig.discoveryEndpoint).toBe("https://auth.example.com/.well-known/openid-configuration");
});
it("derives missing fields from discovery", async () => {
const fetchMock = async () =>
new Response(JSON.stringify({
authorization_endpoint: "https://auth.example.com/auth",
token_endpoint: "https://auth.example.com/token",
jwks_uri: "https://auth.example.com/jwks",
userinfo_endpoint: "https://auth.example.com/userinfo",
scopes_supported: ["openid", "email", "profile"],
}));
const result = await normalizeOidcProviderConfig(issuer, {
clientId: "client",
clientSecret: "secret",
}, fetchMock);
expect(result.oidcConfig.authorizationEndpoint).toBe("https://auth.example.com/auth");
expect(result.oidcConfig.tokenEndpoint).toBe("https://auth.example.com/token");
expect(result.oidcConfig.jwksEndpoint).toBe("https://auth.example.com/jwks");
expect(result.oidcConfig.userInfoEndpoint).toBe("https://auth.example.com/userinfo");
expect(result.oidcConfig.scopes).toEqual(["openid", "email", "profile"]);
expect(result.oidcConfig.discoveryEndpoint).toBe("https://auth.example.com/.well-known/openid-configuration");
});
it("preserves trailing slash issuers when building discovery endpoints", async () => {
const trailingIssuer = "https://auth.example.com/application/o/example/";
const requestedUrls: string[] = [];
const fetchMock: typeof fetch = async (url) => {
requestedUrls.push(typeof url === "string" ? url : url.url);
return new Response(JSON.stringify({
authorization_endpoint: "https://auth.example.com/application/o/example/auth",
token_endpoint: "https://auth.example.com/application/o/example/token",
}));
};
const result = await normalizeOidcProviderConfig(trailingIssuer, {}, fetchMock);
expect(requestedUrls[0]).toBe("https://auth.example.com/application/o/example/.well-known/openid-configuration");
expect(result.oidcConfig.discoveryEndpoint).toBe("https://auth.example.com/application/o/example/.well-known/openid-configuration");
});
it("throws for invalid issuer URL", async () => {
await expect(
normalizeOidcProviderConfig("not-a-url", {}),
).rejects.toBeInstanceOf(OidcConfigError);
});
});
@@ -0,0 +1,205 @@
import { z } from "zod";
const DEFAULT_SCOPES = ["openid", "email", "profile"] as const;
const DISCOVERY_TIMEOUT_MS = 10000;
const discoverySchema = z.object({
issuer: z.string().url().optional(),
authorization_endpoint: z.string().url().optional(),
token_endpoint: z.string().url().optional(),
userinfo_endpoint: z.string().url().optional(),
jwks_uri: z.string().url().optional(),
scopes_supported: z.array(z.string()).optional(),
});
export class OidcConfigError extends Error {
constructor(message: string) {
super(message);
this.name = "OidcConfigError";
}
}
export type RawOidcConfig = {
clientId?: string;
clientSecret?: string;
authorizationEndpoint?: string;
tokenEndpoint?: string;
jwksEndpoint?: string;
userInfoEndpoint?: string;
discoveryEndpoint?: string;
scopes?: string[];
pkce?: boolean;
mapping?: ProviderMapping;
};
export type ProviderMapping = {
id: string;
email: string;
emailVerified?: string;
name?: string;
image?: string;
firstName?: string;
lastName?: string;
};
export type NormalizedOidcConfig = {
oidcConfig: {
clientId?: string;
clientSecret?: string;
authorizationEndpoint: string;
tokenEndpoint: string;
jwksEndpoint?: string;
userInfoEndpoint?: string;
discoveryEndpoint: string;
scopes: string[];
pkce: boolean;
};
mapping: ProviderMapping;
};
type FetchFn = typeof fetch;
function cleanUrl(value: string | undefined, field: string): string | undefined {
if (!value || typeof value !== "string") return undefined;
const trimmed = value.trim();
if (!trimmed) return undefined;
try {
return new URL(trimmed).toString();
} catch {
throw new OidcConfigError(`Invalid ${field} URL: ${value}`);
}
}
function sanitizeScopes(scopes: string[] | undefined, fallback: readonly string[]): string[] {
const candidates = Array.isArray(scopes) ? scopes : [];
const sanitized = candidates
.map(scope => scope?.trim())
.filter((scope): scope is string => Boolean(scope));
if (sanitized.length === 0) {
return [...fallback];
}
return Array.from(new Set(sanitized));
}
async function fetchDiscoveryDocument(url: string, fetchFn: FetchFn): Promise<z.infer<typeof discoverySchema>> {
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), DISCOVERY_TIMEOUT_MS);
try {
const response = await fetchFn(url, {
signal: controller.signal,
headers: { Accept: "application/json" },
});
if (!response.ok) {
throw new OidcConfigError(`OIDC discovery request failed (${response.status} ${response.statusText})`);
}
let payload: unknown;
try {
payload = await response.json();
} catch {
throw new OidcConfigError("OIDC discovery response is not valid JSON");
}
const parsed = discoverySchema.parse(payload);
if (!parsed.authorization_endpoint || !parsed.token_endpoint) {
throw new OidcConfigError("OIDC discovery document is missing required endpoints");
}
return parsed;
} catch (error) {
if (error instanceof OidcConfigError) {
throw error;
}
if (error instanceof Error && error.name === "AbortError") {
throw new OidcConfigError(`OIDC discovery timed out after ${DISCOVERY_TIMEOUT_MS / 1000}s`);
}
throw new OidcConfigError(`Failed to fetch OIDC discovery document: ${error instanceof Error ? error.message : "unknown error"}`);
} finally {
clearTimeout(timeoutId);
}
}
export async function normalizeOidcProviderConfig(
issuer: string,
rawConfig: RawOidcConfig,
fetchFn: FetchFn = fetch,
): Promise<NormalizedOidcConfig> {
if (!issuer || typeof issuer !== "string") {
throw new OidcConfigError("Issuer is required");
}
const trimmedIssuer = issuer.trim();
try {
// Validate issuer but keep caller-provided formatting so we don't break provider expectations
new URL(trimmedIssuer);
} catch {
throw new OidcConfigError(`Invalid issuer URL: ${issuer}`);
}
const issuerForDiscovery = trimmedIssuer.replace(/\/$/, "");
const discoveryEndpoint = cleanUrl(
rawConfig.discoveryEndpoint,
"discovery endpoint",
) ?? `${issuerForDiscovery}/.well-known/openid-configuration`;
const authorizationEndpoint = cleanUrl(rawConfig.authorizationEndpoint, "authorization endpoint");
const tokenEndpoint = cleanUrl(rawConfig.tokenEndpoint, "token endpoint");
const jwksEndpoint = cleanUrl(rawConfig.jwksEndpoint, "JWKS endpoint");
const userInfoEndpoint = cleanUrl(rawConfig.userInfoEndpoint, "userinfo endpoint");
const providedScopes = Array.isArray(rawConfig.scopes) ? rawConfig.scopes : undefined;
let scopes = sanitizeScopes(providedScopes, DEFAULT_SCOPES);
const shouldFetchDiscovery =
!authorizationEndpoint ||
!tokenEndpoint ||
!jwksEndpoint ||
!userInfoEndpoint ||
!providedScopes ||
providedScopes.length === 0;
let resolvedAuthorization = authorizationEndpoint;
let resolvedToken = tokenEndpoint;
let resolvedJwks = jwksEndpoint;
let resolvedUserInfo = userInfoEndpoint;
if (shouldFetchDiscovery) {
const discovery = await fetchDiscoveryDocument(discoveryEndpoint, fetchFn);
resolvedAuthorization = resolvedAuthorization ?? discovery.authorization_endpoint;
resolvedToken = resolvedToken ?? discovery.token_endpoint;
resolvedJwks = resolvedJwks ?? discovery.jwks_uri;
resolvedUserInfo = resolvedUserInfo ?? discovery.userinfo_endpoint;
if (!providedScopes || providedScopes.length === 0) {
scopes = sanitizeScopes(discovery.scopes_supported, DEFAULT_SCOPES);
}
}
if (!resolvedAuthorization || !resolvedToken) {
throw new OidcConfigError("OIDC configuration must include authorization and token endpoints");
}
return {
oidcConfig: {
clientId: rawConfig.clientId,
clientSecret: rawConfig.clientSecret,
authorizationEndpoint: resolvedAuthorization,
tokenEndpoint: resolvedToken,
jwksEndpoint: resolvedJwks,
userInfoEndpoint: resolvedUserInfo,
discoveryEndpoint,
scopes,
pkce: rawConfig.pkce !== false,
},
mapping: rawConfig.mapping ?? {
id: "sub",
email: "email",
emailVerified: "email_verified",
name: "name",
image: "picture",
},
};
}
@@ -0,0 +1,290 @@
/**
* Enhanced handler for starred repositories with improved error handling
*/
import type { Config, Repository } from "./db/schema";
import { Octokit } from "@octokit/rest";
import { processWithRetry } from "./utils/concurrency";
import {
getOrCreateGiteaOrgEnhanced,
getGiteaRepoInfo,
handleExistingNonMirrorRepo,
createOrganizationsSequentially
} from "./gitea-enhanced";
import { mirrorGithubRepoToGitea } from "./gitea";
import { getMirrorStrategyConfig } from "./utils/mirror-strategies";
import { createMirrorJob } from "./helpers";
/**
* Process starred repositories with enhanced error handling
*/
export async function processStarredRepositories({
config,
repositories,
octokit,
}: {
config: Config;
repositories: Repository[];
octokit: Octokit;
}): Promise<void> {
if (!config.userId) {
throw new Error("User ID is required");
}
const strategyConfig = getMirrorStrategyConfig();
console.log(`Processing ${repositories.length} starred repositories`);
console.log(`Using strategy config:`, strategyConfig);
// Step 1: Pre-create organizations to avoid race conditions
if (strategyConfig.sequentialOrgCreation) {
await preCreateOrganizations({ config, repositories });
}
// Step 2: Process repositories with enhanced error handling
await processWithRetry(
repositories,
async (repository) => {
try {
await processStarredRepository({
config,
repository,
octokit,
strategyConfig,
});
return repository;
} catch (error) {
console.error(`Failed to process starred repository ${repository.name}:`, error);
throw error;
}
},
{
concurrencyLimit: strategyConfig.repoBatchSize,
maxRetries: 2,
retryDelay: 2000,
onProgress: (completed, total, result) => {
const percentComplete = Math.round((completed / total) * 100);
if (result) {
console.log(
`Processed starred repository "${result.name}" (${completed}/${total}, ${percentComplete}%)`
);
}
},
onRetry: (repo, error, attempt) => {
console.log(
`Retrying starred repository ${repo.name} (attempt ${attempt}): ${error.message}`
);
},
}
);
}
/**
* Pre-create all required organizations sequentially
*/
async function preCreateOrganizations({
config,
repositories,
}: {
config: Config;
repositories: Repository[];
}): Promise<void> {
// Get unique organization names
const orgNames = new Set<string>();
// Add starred repos org
if (config.githubConfig?.starredReposOrg) {
orgNames.add(config.githubConfig.starredReposOrg);
} else {
orgNames.add("starred");
}
// Add any other organizations based on mirror strategy
for (const repo of repositories) {
if (repo.destinationOrg) {
orgNames.add(repo.destinationOrg);
}
}
console.log(`Pre-creating ${orgNames.size} organizations sequentially`);
// Create organizations sequentially
await createOrganizationsSequentially({
config,
orgNames: Array.from(orgNames),
});
}
/**
* Process a single starred repository with enhanced error handling
*/
async function processStarredRepository({
config,
repository,
octokit,
strategyConfig,
}: {
config: Config;
repository: Repository;
octokit: Octokit;
strategyConfig: ReturnType<typeof getMirrorStrategyConfig>;
}): Promise<void> {
const starredOrg = config.githubConfig?.starredReposOrg || "starred";
// Check if repository exists in Gitea
const existingRepo = await getGiteaRepoInfo({
config,
owner: starredOrg,
repoName: repository.name,
});
if (existingRepo) {
if (existingRepo.mirror) {
console.log(`Starred repository ${repository.name} already exists as a mirror`);
// Update database status
const { db, repositories: reposTable } = await import("./db");
const { eq } = await import("drizzle-orm");
const { repoStatusEnum } = await import("@/types/Repository");
await db
.update(reposTable)
.set({
status: repoStatusEnum.parse("mirrored"),
updatedAt: new Date(),
lastMirrored: new Date(),
errorMessage: null,
mirroredLocation: `${starredOrg}/${repository.name}`,
})
.where(eq(reposTable.id, repository.id!));
return;
} else {
// Repository exists but is not a mirror
console.warn(`Starred repository ${repository.name} exists but is not a mirror`);
await handleExistingNonMirrorRepo({
config,
repository,
repoInfo: existingRepo,
strategy: strategyConfig.nonMirrorStrategy,
});
// If we deleted it, continue to create the mirror
if (strategyConfig.nonMirrorStrategy !== "delete") {
return; // Skip if we're not deleting
}
}
}
// Create the mirror
try {
await mirrorGithubRepoToGitea({
octokit,
repository,
config,
});
} catch (error) {
// Enhanced error handling for specific scenarios
if (error instanceof Error) {
const errorMessage = error.message.toLowerCase();
if (errorMessage.includes("already exists")) {
// Handle race condition where repo was created by another process
console.log(`Repository ${repository.name} was created by another process`);
// Check if it's a mirror now
const recheck = await getGiteaRepoInfo({
config,
owner: starredOrg,
repoName: repository.name,
});
if (recheck && recheck.mirror) {
// It's now a mirror, update database
const { db, repositories: reposTable } = await import("./db");
const { eq } = await import("drizzle-orm");
const { repoStatusEnum } = await import("@/types/Repository");
await db
.update(reposTable)
.set({
status: repoStatusEnum.parse("mirrored"),
updatedAt: new Date(),
lastMirrored: new Date(),
errorMessage: null,
mirroredLocation: `${starredOrg}/${repository.name}`,
})
.where(eq(reposTable.id, repository.id!));
return;
}
}
}
throw error;
}
}
/**
* Sync all starred repositories
*/
export async function syncStarredRepositories({
config,
repositories,
}: {
config: Config;
repositories: Repository[];
}): Promise<void> {
const strategyConfig = getMirrorStrategyConfig();
console.log(`Syncing ${repositories.length} starred repositories`);
await processWithRetry(
repositories,
async (repository) => {
try {
// Import syncGiteaRepo
const { syncGiteaRepo } = await import("./gitea");
await syncGiteaRepo({
config,
repository,
});
return repository;
} catch (error) {
if (error instanceof Error && error.message.includes("not a mirror")) {
console.warn(`Repository ${repository.name} is not a mirror, handling...`);
const starredOrg = config.githubConfig?.starredReposOrg || "starred";
const repoInfo = await getGiteaRepoInfo({
config,
owner: starredOrg,
repoName: repository.name,
});
if (repoInfo) {
await handleExistingNonMirrorRepo({
config,
repository,
repoInfo,
strategy: strategyConfig.nonMirrorStrategy,
});
}
}
throw error;
}
},
{
concurrencyLimit: strategyConfig.repoBatchSize,
maxRetries: 1,
retryDelay: 1000,
onProgress: (completed, total) => {
const percentComplete = Math.round((completed / total) * 100);
console.log(`Sync progress: ${completed}/${total} (${percentComplete}%)`);
},
}
);
}
+172
View File
@@ -0,0 +1,172 @@
import { describe, test, expect } from "bun:test";
import { jsonResponse, formatDate, formatDateShort, truncate, safeParse, parseErrorMessage, showErrorToast } from "./utils";
describe("jsonResponse", () => {
test("creates a Response with JSON content", () => {
const data = { message: "Hello, world!" };
const response = jsonResponse({ data });
expect(response).toBeInstanceOf(Response);
expect(response.status).toBe(200);
expect(response.headers.get("Content-Type")).toBe("application/json");
});
test("uses the provided status code", () => {
const data = { error: "Not found" };
const response = jsonResponse({ data, status: 404 });
expect(response.status).toBe(404);
});
test("correctly serializes complex objects", async () => {
const now = new Date();
const data = {
message: "Complex object",
date: now,
nested: { foo: "bar" },
array: [1, 2, 3]
};
const response = jsonResponse({ data });
const responseBody = await response.json();
expect(responseBody).toEqual({
message: "Complex object",
date: now.toISOString(),
nested: { foo: "bar" },
array: [1, 2, 3]
});
});
});
describe("formatDate", () => {
test("formats a date object", () => {
const date = new Date("2023-01-15T12:30:45Z");
const formatted = formatDate(date);
// The exact format might depend on the locale, so we'll check for parts
expect(formatted).toContain("2023");
expect(formatted).toContain("January");
expect(formatted).toContain("15");
});
test("formats a date string", () => {
const dateStr = "2023-01-15T12:30:45Z";
const formatted = formatDate(dateStr);
expect(formatted).toContain("2023");
expect(formatted).toContain("January");
expect(formatted).toContain("15");
});
test("returns 'Never' for null or undefined", () => {
expect(formatDate(null)).toBe("Never");
expect(formatDate(undefined)).toBe("Never");
});
});
describe("formatDateShort", () => {
test("returns formatted date when input is provided", () => {
const formatted = formatDateShort("2014-10-20T15:32:10Z");
expect(formatted).toBe("Oct 20, 2014");
});
test("returns undefined when date is missing", () => {
expect(formatDateShort(null)).toBeUndefined();
expect(formatDateShort(undefined)).toBeUndefined();
});
});
describe("truncate", () => {
test("truncates a string that exceeds the length", () => {
const str = "This is a long string that needs truncation";
const truncated = truncate(str, 10);
expect(truncated).toBe("This is a ...");
expect(truncated.length).toBe(13); // 10 chars + "..."
});
test("does not truncate a string that is shorter than the length", () => {
const str = "Short";
const truncated = truncate(str, 10);
expect(truncated).toBe("Short");
});
test("handles empty strings", () => {
expect(truncate("", 10)).toBe("");
});
});
describe("safeParse", () => {
test("parses valid JSON strings", () => {
const jsonStr = '{"name":"John","age":30}';
const parsed = safeParse(jsonStr);
expect(parsed).toEqual({ name: "John", age: 30 });
});
test("returns undefined for invalid JSON strings", () => {
const invalidJson = '{"name":"John",age:30}'; // Missing quotes around age
const parsed = safeParse(invalidJson);
expect(parsed).toBeUndefined();
});
test("returns the original value for non-string inputs", () => {
const obj = { name: "John", age: 30 };
const parsed = safeParse(obj);
expect(parsed).toBe(obj);
});
});
describe("parseErrorMessage", () => {
test("parses JSON error with error and troubleshooting fields", () => {
const errorMessage = JSON.stringify({
error: "Unexpected end of JSON input",
errorType: "SyntaxError",
timestamp: "2025-05-28T09:08:02.37Z",
troubleshooting: "JSON parsing error detected. Check Gitea server status and logs. Ensure Gitea is returning valid JSON responses."
});
const result = parseErrorMessage(errorMessage);
expect(result.title).toBe("Unexpected end of JSON input");
expect(result.description).toBe("JSON parsing error detected. Check Gitea server status and logs. Ensure Gitea is returning valid JSON responses.");
expect(result.isStructured).toBe(true);
});
test("parses JSON error with title and description fields", () => {
const errorMessage = JSON.stringify({
title: "Connection Failed",
description: "Unable to connect to the server. Please check your network connection."
});
const result = parseErrorMessage(errorMessage);
expect(result.title).toBe("Connection Failed");
expect(result.description).toBe("Unable to connect to the server. Please check your network connection.");
expect(result.isStructured).toBe(true);
});
test("handles plain string error messages", () => {
const errorMessage = "Simple error message";
const result = parseErrorMessage(errorMessage);
expect(result.title).toBe("Simple error message");
expect(result.description).toBeUndefined();
expect(result.isStructured).toBe(false);
});
test("handles Error objects", () => {
const error = new Error("Something went wrong");
const result = parseErrorMessage(error);
expect(result.title).toBe("Something went wrong");
expect(result.description).toBeUndefined();
expect(result.isStructured).toBe(false);
});
});
+340
View File
@@ -0,0 +1,340 @@
import { clsx, type ClassValue } from "clsx";
import { twMerge } from "tailwind-merge";
import { httpRequest, HttpError } from "@/lib/http-client";
import type { RepoStatus } from "@/types/Repository";
export const API_BASE = "/api";
export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
}
export function generateRandomString(length: number): string {
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
let result = '';
for (let i = 0; i < length; i++) {
result += chars.charAt(Math.floor(Math.random() * chars.length));
}
return result;
}
export function formatDate(date?: Date | string | null): string {
if (!date) return "Never";
return new Intl.DateTimeFormat("en-US", {
year: "numeric",
month: "long",
day: "numeric",
hour: "2-digit",
minute: "2-digit",
}).format(new Date(date));
}
export function formatDateShort(date?: Date | string | null): string | undefined {
if (!date) return undefined;
return new Intl.DateTimeFormat("en-US", {
year: "numeric",
month: "short",
day: "numeric",
}).format(new Date(date));
}
export function formatLastSyncTime(date: Date | string | null): string {
if (!date) return "Never";
const now = new Date();
const syncDate = new Date(date);
const diffMs = now.getTime() - syncDate.getTime();
const diffMins = Math.floor(diffMs / 60000);
const diffHours = Math.floor(diffMs / 3600000);
const diffDays = Math.floor(diffMs / 86400000);
// Show relative time for recent syncs
if (diffMins < 1) return "Just now";
if (diffMins < 60) return `${diffMins} min ago`;
if (diffHours < 24) return `${diffHours} hr${diffHours === 1 ? '' : 's'} ago`;
if (diffDays < 7) return `${diffDays} day${diffDays === 1 ? '' : 's'} ago`;
// For older syncs, show week count
const diffWeeks = Math.floor(diffDays / 7);
if (diffWeeks < 4) return `${diffWeeks} week${diffWeeks === 1 ? '' : 's'} ago`;
// For even older, show month count
const diffMonths = Math.floor(diffDays / 30);
return `${diffMonths} month${diffMonths === 1 ? '' : 's'} ago`;
}
export function truncate(str: string, length: number): string {
if (str.length <= length) return str;
return str.slice(0, length) + "...";
}
export function safeParse<T>(value: unknown): T | undefined {
if (typeof value === "string") {
try {
return JSON.parse(value) as T;
} catch {
return undefined;
}
}
return value as T;
}
// Enhanced error message parsing for toast notifications
export interface ParsedErrorMessage {
title: string;
description?: string;
isStructured: boolean;
}
export function parseErrorMessage(error: unknown): ParsedErrorMessage {
// Handle Error objects
if (error instanceof Error) {
return parseErrorMessage(error.message);
}
// Handle string messages
if (typeof error === "string") {
// Try to parse as JSON first
try {
const parsed = JSON.parse(error);
// Check for common structured error formats
if (typeof parsed === "object" && parsed !== null) {
// Format 1: { error: "message", errorType: "type", troubleshooting: "info" }
if (parsed.error) {
return {
title: parsed.error,
description: parsed.troubleshooting || parsed.errorType || undefined,
isStructured: true,
};
}
// Format 2: { title: "title", description: "desc" }
if (parsed.title) {
return {
title: parsed.title,
description: parsed.description || undefined,
isStructured: true,
};
}
// Format 3: { message: "msg", details: "details" }
if (parsed.message) {
return {
title: parsed.message,
description: parsed.details || undefined,
isStructured: true,
};
}
}
} catch {
// Not valid JSON, treat as plain string
}
// Plain string message
return {
title: error,
description: undefined,
isStructured: false,
};
}
// Handle objects directly
if (typeof error === "object" && error !== null) {
const errorObj = error as any;
if (errorObj.error) {
return {
title: errorObj.error,
description: errorObj.troubleshooting || errorObj.errorType || undefined,
isStructured: true,
};
}
if (errorObj.title) {
return {
title: errorObj.title,
description: errorObj.description || undefined,
isStructured: true,
};
}
if (errorObj.message) {
return {
title: errorObj.message,
description: errorObj.details || undefined,
isStructured: true,
};
}
}
// Fallback for unknown types
return {
title: String(error),
description: undefined,
isStructured: false,
};
}
// Enhanced toast helper that parses structured error messages
export function showErrorToast(error: unknown, toast: any) {
const parsed = parseErrorMessage(error);
if (parsed.description) {
// Use sonner's rich toast format with title and description
toast.error(parsed.title, {
description: parsed.description,
});
} else {
// Simple error toast
toast.error(parsed.title);
}
}
// Helper function for API requests
export async function apiRequest<T>(
endpoint: string,
options: (RequestInit & { data?: any }) = {}
): Promise<T> {
try {
// Handle the custom 'data' property by converting it to 'body'
const { data, ...requestOptions } = options;
const finalOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
...(requestOptions.headers || {}),
},
...requestOptions,
};
// If data is provided, stringify it and set as body
if (data !== undefined) {
finalOptions.body = JSON.stringify(data);
}
const response = await httpRequest<T>(`${API_BASE}${endpoint}`, finalOptions);
return response.data;
} catch (err) {
const error = err as HttpError;
const message =
error.response ||
error.message ||
"An unknown error occurred";
throw new Error(message);
}
}
export const getStatusColor = (status: string): string => {
switch (status) {
case "imported":
return "bg-yellow-500"; // Ready to mirror
case "mirroring":
return "bg-amber-500"; // In progress
case "mirrored":
return "bg-green-500"; // Successfully mirrored
case "failed":
return "bg-red-500"; // Error
case "syncing":
return "bg-blue-500"; // Sync in progress
case "synced":
return "bg-emerald-500"; // Successfully synced
case "skipped":
return "bg-gray-500"; // Skipped
case "deleting":
return "bg-orange-500"; // Deleting
case "deleted":
return "bg-gray-600"; // Deleted
default:
return "bg-gray-400"; // Unknown/neutral
}
};
export const jsonResponse = ({
data,
status = 200,
}: {
data: unknown;
status?: number;
}): Response => {
return new Response(JSON.stringify(data), {
status,
headers: { "Content-Type": "application/json" },
});
};
/**
* Securely handles errors for API responses by sanitizing error messages
* and preventing sensitive information exposure while maintaining proper logging
*/
export function createSecureErrorResponse(
error: unknown,
context: string,
status: number = 500
): Response {
// Log the full error details server-side for debugging
console.error(`Error in ${context}:`, error);
// Log additional error details if it's an Error object
if (error instanceof Error) {
console.error(`Error name: ${error.name}`);
console.error(`Error message: ${error.message}`);
if (error.stack) {
console.error(`Error stack: ${error.stack}`);
}
}
// Determine safe error message for client
let clientMessage = "An internal server error occurred";
// Only expose specific safe error types to clients
if (error instanceof Error) {
// Safe error patterns that can be exposed (add more as needed)
const safeErrorPatterns = [
/missing required field/i,
/invalid.*format/i,
/not found/i,
/unauthorized/i,
/forbidden/i,
/bad request/i,
/validation.*failed/i,
/user id is required/i,
/no repositories found/i,
/config missing/i,
/invalid userid/i,
/no users found/i,
/missing userid/i,
/github token is required/i,
/invalid github token/i,
/invalid gitea token/i,
/username and password are required/i,
/invalid username or password/i,
/organization already exists/i,
/no configuration found/i,
/github token is missing/i,
/use post method/i,
];
const isSafeError = safeErrorPatterns.some(pattern =>
pattern.test(error.message)
);
if (isSafeError) {
clientMessage = error.message;
}
}
return new Response(
JSON.stringify({
error: clientMessage,
timestamp: new Date().toISOString(),
}),
{
status,
headers: { "Content-Type": "application/json" },
}
);
}
@@ -0,0 +1,58 @@
import type { APIRoute, APIContext } from "astro";
import { auth } from "@/lib/auth";
/**
* Get authenticated user from request
* @param request - The request object from Astro API route
* @returns The authenticated user or null if not authenticated
*/
export async function getAuthenticatedUser(request: Request) {
try {
const session = await auth.api.getSession({
headers: request.headers,
});
return session ? session.user : null;
} catch (error) {
console.error("Error getting session:", error);
return null;
}
}
/**
* Require authentication for API routes
* Returns an error response if user is not authenticated
* @param context - The API context from Astro
* @returns Object with user if authenticated, or error response if not
*/
export async function requireAuth(context: APIContext) {
const user = await getAuthenticatedUser(context.request);
if (!user) {
return {
user: null,
response: new Response(
JSON.stringify({
success: false,
error: "Unauthorized - Please log in",
}),
{
status: 401,
headers: { "Content-Type": "application/json" },
}
),
};
}
return { user, response: null };
}
/**
* Get user ID from authenticated session
* @param request - The request object from Astro API route
* @returns The user ID or null if not authenticated
*/
export async function getAuthenticatedUserId(request: Request): Promise<string | null> {
const user = await getAuthenticatedUser(request);
return user?.id || null;
}
@@ -0,0 +1,167 @@
import { describe, test, expect, mock } from "bun:test";
import { processInParallel, processWithRetry } from "./concurrency";
describe("processInParallel", () => {
test("processes items in parallel with concurrency control", async () => {
// Create an array of numbers to process
const items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
// Create a mock function to track execution
const processItem = mock(async (item: number) => {
// Simulate async work
await new Promise(resolve => setTimeout(resolve, 10));
return item * 2;
});
// Create a mock progress callback
const onProgress = mock((completed: number, total: number, result?: number) => {
// Progress tracking
});
// Process the items with a concurrency limit of 3
const results = await processInParallel(
items,
processItem,
3,
onProgress
);
// Verify results
expect(results).toEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
// Verify that processItem was called for each item
expect(processItem).toHaveBeenCalledTimes(10);
// Verify that onProgress was called for each item
expect(onProgress).toHaveBeenCalledTimes(10);
// Verify the last call to onProgress had the correct completed/total values
expect(onProgress.mock.calls[9][0]).toBe(10); // completed
expect(onProgress.mock.calls[9][1]).toBe(10); // total
});
test("handles errors in processing", async () => {
// Create an array of numbers to process
const items = [1, 2, 3, 4, 5];
// Create a mock function that throws an error for item 3
const processItem = mock(async (item: number) => {
if (item === 3) {
throw new Error("Test error");
}
return item * 2;
});
// Create a spy for console.error
const originalConsoleError = console.error;
const consoleErrorMock = mock(() => {});
console.error = consoleErrorMock;
try {
// Process the items
const results = await processInParallel(items, processItem);
// Verify results (should have 4 items, missing the one that errored)
expect(results).toEqual([2, 4, 8, 10]);
// Verify that processItem was called for each item
expect(processItem).toHaveBeenCalledTimes(5);
// Verify that console.error was called (enhanced logging calls it multiple times)
expect(consoleErrorMock).toHaveBeenCalled();
} finally {
// Restore console.error
console.error = originalConsoleError;
}
});
});
describe("processWithRetry", () => {
test("retries failed operations", async () => {
// Create an array of numbers to process
const items = [1, 2, 3];
// Create a counter to track retry attempts
const attemptCounts: Record<number, number> = { 1: 0, 2: 0, 3: 0 };
// Create a mock function that fails on first attempt for item 2
const processItem = mock(async (item: number) => {
attemptCounts[item]++;
if (item === 2 && attemptCounts[item] === 1) {
throw new Error("Temporary error");
}
return item * 2;
});
// Create a mock for the onRetry callback
const onRetry = mock((item: number, error: Error, attempt: number) => {
// Retry tracking
});
// Process the items with retry
const results = await processWithRetry(items, processItem, {
maxRetries: 2,
retryDelay: 10,
onRetry,
});
// Verify results
expect(results).toEqual([2, 4, 6]);
// Verify that item 2 was retried once
expect(attemptCounts[1]).toBe(1); // No retries
expect(attemptCounts[2]).toBe(2); // One retry
expect(attemptCounts[3]).toBe(1); // No retries
// Verify that onRetry was called once
expect(onRetry).toHaveBeenCalledTimes(1);
expect(onRetry.mock.calls[0][0]).toBe(2); // item
expect(onRetry.mock.calls[0][2]).toBe(1); // attempt
});
test("gives up after max retries", async () => {
// Create an array of numbers to process
const items = [1, 2];
// Create a mock function that always fails for item 2
const processItem = mock(async (item: number) => {
if (item === 2) {
throw new Error("Persistent error");
}
return item * 2;
});
// Create a mock for the onRetry callback
const onRetry = mock((item: number, error: Error, attempt: number) => {
// Retry tracking
});
// Create a spy for console.error
const originalConsoleError = console.error;
const consoleErrorMock = mock(() => {});
console.error = consoleErrorMock;
try {
// Process the items with retry
const results = await processWithRetry(items, processItem, {
maxRetries: 2,
retryDelay: 10,
onRetry,
});
// Verify results (should have 1 item, missing the one that errored)
expect(results).toEqual([2]);
// Verify that onRetry was called twice (for 2 retry attempts)
expect(onRetry).toHaveBeenCalledTimes(2);
// Verify that console.error was called (enhanced logging calls it multiple times)
expect(consoleErrorMock).toHaveBeenCalled();
} finally {
// Restore console.error
console.error = originalConsoleError;
}
});
});
@@ -0,0 +1,365 @@
/**
* Utility for processing items in parallel with concurrency control
*
* @param items Array of items to process
* @param processItem Function to process each item
* @param concurrencyLimit Maximum number of concurrent operations
* @param onProgress Optional callback for progress updates
* @returns Promise that resolves when all items are processed
*/
export async function processInParallel<T, R>(
items: T[],
processItem: (item: T) => Promise<R>,
concurrencyLimit: number = 5, // Safe default for GitHub API (max 100 concurrent, but 5-10 recommended)
onProgress?: (completed: number, total: number, result?: R) => void
): Promise<R[]> {
const results: R[] = [];
let completed = 0;
const total = items.length;
// Process items in batches to control concurrency
for (let i = 0; i < total; i += concurrencyLimit) {
const batch = items.slice(i, i + concurrencyLimit);
const batchPromises = batch.map(async (item) => {
try {
const result = await processItem(item);
completed++;
if (onProgress) {
onProgress(completed, total, result);
}
return result;
} catch (error) {
completed++;
if (onProgress) {
onProgress(completed, total);
}
throw error;
}
});
// Wait for the current batch to complete before starting the next batch
const batchResults = await Promise.allSettled(batchPromises);
// Process results and handle errors
for (let j = 0; j < batchResults.length; j++) {
const result = batchResults[j];
if (result.status === 'fulfilled') {
results.push(result.value);
} else {
const itemIndex = i + j;
console.error("=== BATCH ITEM PROCESSING ERROR ===");
console.error("Batch index:", Math.floor(i / concurrencyLimit));
console.error("Item index in batch:", j);
console.error("Global item index:", itemIndex);
console.error("Error type:", result.reason?.constructor?.name);
console.error("Error message:", result.reason instanceof Error ? result.reason.message : String(result.reason));
if (result.reason instanceof Error && result.reason.message.includes('JSON')) {
console.error("🚨 JSON parsing error in batch processing");
console.error("This indicates an API response issue from Gitea");
}
console.error("==================================");
}
}
}
return results;
}
/**
* Utility for processing items in parallel with automatic retry for failed operations
*
* @param items Array of items to process
* @param processItem Function to process each item
* @param options Configuration options
* @returns Promise that resolves when all items are processed
*/
export async function processWithRetry<T, R>(
items: T[],
processItem: (item: T) => Promise<R>,
options: {
concurrencyLimit?: number;
maxRetries?: number;
retryDelay?: number;
onProgress?: (completed: number, total: number, result?: R) => void;
onRetry?: (item: T, error: Error, attempt: number) => void;
jobId?: string; // Optional job ID for checkpointing
getItemId?: (item: T) => string; // Function to get a unique ID for each item
onCheckpoint?: (jobId: string, completedItemId: string) => Promise<void>; // Callback for checkpointing
checkpointInterval?: number; // How many items to process before checkpointing
} = {}
): Promise<R[]> {
const {
concurrencyLimit = 5,
maxRetries = 3,
retryDelay = 1000,
onProgress,
onRetry,
jobId,
getItemId,
onCheckpoint,
checkpointInterval = 1 // Default to checkpointing after each item
} = options;
// Track checkpoint counter
let itemsProcessedSinceLastCheckpoint = 0;
// Wrap the process function with retry logic
const processWithRetryLogic = async (item: T): Promise<R> => {
let lastError: Error | null = null;
for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {
try {
// Check for shutdown before processing each item (only in production)
try {
const { isShuttingDown } = await import('@/lib/shutdown-manager');
if (isShuttingDown()) {
throw new Error('Processing interrupted by application shutdown');
}
} catch (importError) {
// Ignore import errors during testing
}
const result = await processItem(item);
// Handle checkpointing if enabled
if (jobId && getItemId && onCheckpoint) {
const itemId = getItemId(item);
itemsProcessedSinceLastCheckpoint++;
// Checkpoint based on the interval
if (itemsProcessedSinceLastCheckpoint >= checkpointInterval) {
await onCheckpoint(jobId, itemId);
itemsProcessedSinceLastCheckpoint = 0;
}
}
return result;
} catch (error) {
lastError = error instanceof Error ? error : new Error(String(error));
if (attempt <= maxRetries) {
if (onRetry) {
onRetry(item, lastError, attempt);
}
// Exponential backoff
const delay = retryDelay * Math.pow(2, attempt - 1);
await new Promise(resolve => setTimeout(resolve, delay));
} else {
// Enhanced error logging for final failure
console.error("=== ITEM PROCESSING FAILED (MAX RETRIES EXCEEDED) ===");
console.error("Item:", getItemId ? getItemId(item) : 'unknown');
console.error("Error type:", lastError.constructor.name);
console.error("Error message:", lastError.message);
console.error("Attempts made:", maxRetries + 1);
if (lastError.message.includes('JSON')) {
console.error("🚨 JSON-related error detected in item processing");
console.error("This suggests an issue with API responses from Gitea");
}
console.error("Stack trace:", lastError.stack);
console.error("================================================");
throw lastError;
}
}
}
// This should never be reached due to the throw in the catch block
throw lastError || new Error('Unknown error occurred');
};
const results = await processInParallel(
items,
processWithRetryLogic,
concurrencyLimit,
onProgress
);
// Final checkpoint if there are remaining items since the last checkpoint
if (jobId && getItemId && onCheckpoint && itemsProcessedSinceLastCheckpoint > 0) {
// We don't have a specific item ID for the final checkpoint, so we'll use a placeholder
await onCheckpoint(jobId, 'final');
}
return results;
}
/**
* Process items in parallel with resilience to container restarts
* This version supports resuming from a previous checkpoint
*/
export async function processWithResilience<T, R>(
items: T[],
processItem: (item: T) => Promise<R>,
options: {
concurrencyLimit?: number;
maxRetries?: number;
retryDelay?: number;
onProgress?: (completed: number, total: number, result?: R) => void;
onRetry?: (item: T, error: Error, attempt: number) => void;
userId: string; // Required for creating mirror jobs
jobType: "mirror" | "sync" | "retry";
getItemId: (item: T) => string; // Required function to get a unique ID for each item
getItemName: (item: T) => string; // Required function to get a display name for each item
checkpointInterval?: number;
resumeFromJobId?: string; // Optional job ID to resume from
}
): Promise<R[]> {
const {
userId,
jobType,
getItemId,
getItemName,
resumeFromJobId,
checkpointInterval = 10, // Increased from 5 to reduce event frequency
...otherOptions
} = options;
// Import helpers for job management and shutdown handling
const { createMirrorJob, updateMirrorJobProgress } = await import('@/lib/helpers');
// Import shutdown manager (with fallback for testing)
let registerActiveJob: (jobId: string) => void = () => {};
let unregisterActiveJob: (jobId: string) => void = () => {};
let isShuttingDown: () => boolean = () => false;
try {
const shutdownManager = await import('@/lib/shutdown-manager');
registerActiveJob = shutdownManager.registerActiveJob;
unregisterActiveJob = shutdownManager.unregisterActiveJob;
isShuttingDown = shutdownManager.isShuttingDown;
} catch (importError) {
// Use fallback functions during testing
console.log('Using fallback shutdown manager functions (testing mode)');
}
// Get item IDs for all items
const allItemIds = items.map(getItemId);
// Create or resume a job
let jobId: string;
let completedItemIds: string[] = [];
let itemsToProcess = [...items];
if (resumeFromJobId) {
// We're resuming an existing job
jobId = resumeFromJobId;
// Get the job from the database to find completed items
const { db, mirrorJobs } = await import('@/lib/db');
const { eq } = await import('drizzle-orm');
const [job] = await db
.select()
.from(mirrorJobs)
.where(eq(mirrorJobs.id, resumeFromJobId));
if (job && job.completedItemIds) {
completedItemIds = job.completedItemIds;
// Filter out already completed items
itemsToProcess = items.filter(item => !completedItemIds.includes(getItemId(item)));
console.log(`Resuming job ${jobId} with ${itemsToProcess.length} remaining items`);
// Update the job to show it's being resumed
await updateMirrorJobProgress({
jobId,
message: `Resuming job with ${itemsToProcess.length} remaining items`,
details: `Job is being resumed. ${completedItemIds.length} of ${items.length} items were already processed.`,
inProgress: true,
});
}
} else {
// Create a new job
jobId = await createMirrorJob({
userId,
message: `Started ${jobType} job with ${items.length} items`,
details: `Processing ${items.length} items in parallel with checkpointing`,
status: "mirroring",
jobType,
totalItems: items.length,
itemIds: allItemIds,
inProgress: true,
});
console.log(`Created new job ${jobId} with ${items.length} items`);
}
// Register the job with the shutdown manager
registerActiveJob(jobId);
// Define the checkpoint function
const onCheckpoint = async (jobId: string, completedItemId: string) => {
const itemName = items.find(item => getItemId(item) === completedItemId)
? getItemName(items.find(item => getItemId(item) === completedItemId)!)
: 'unknown';
await updateMirrorJobProgress({
jobId,
completedItemId,
message: `Processed item: ${itemName}`,
});
};
try {
// Check if shutdown is in progress before starting
if (isShuttingDown()) {
console.log(`⚠️ Shutdown in progress, aborting job ${jobId}`);
throw new Error('Job aborted due to application shutdown');
}
// Process the items with checkpointing
const results = await processWithRetry(
itemsToProcess,
processItem,
{
...otherOptions,
jobId,
getItemId,
onCheckpoint,
checkpointInterval,
}
);
// Mark the job as completed
await updateMirrorJobProgress({
jobId,
status: "mirrored",
message: `Completed ${jobType} job with ${items.length} items`,
inProgress: false,
isCompleted: true,
});
// Unregister the job from shutdown manager
unregisterActiveJob(jobId);
return results;
} catch (error) {
// Mark the job as failed (unless it was interrupted by shutdown)
const isShutdownError = error instanceof Error && error.message.includes('shutdown');
await updateMirrorJobProgress({
jobId,
status: isShutdownError ? "imported" : "failed", // Keep as imported if shutdown interrupted
message: isShutdownError
? 'Job interrupted by application shutdown - will resume on restart'
: `Failed ${jobType} job: ${error instanceof Error ? error.message : String(error)}`,
inProgress: false,
isCompleted: !isShutdownError, // Don't mark as completed if shutdown interrupted
});
// Unregister the job from shutdown manager
unregisterActiveJob(jobId);
throw error;
}
}
@@ -0,0 +1,135 @@
import { db, configs } from "@/lib/db";
import { eq } from "drizzle-orm";
import { v4 as uuidv4 } from "uuid";
import { encrypt } from "@/lib/utils/encryption";
export interface DefaultConfigOptions {
userId: string;
envOverrides?: {
githubToken?: string;
githubUsername?: string;
giteaUrl?: string;
giteaToken?: string;
giteaUsername?: string;
scheduleEnabled?: boolean;
scheduleInterval?: number;
cleanupEnabled?: boolean;
cleanupRetentionDays?: number;
};
}
/**
* Creates a default configuration for a new user with sensible defaults
* Environment variables can override these defaults
*/
export async function createDefaultConfig({ userId, envOverrides = {} }: DefaultConfigOptions) {
// Check if config already exists
const existingConfig = await db
.select()
.from(configs)
.where(eq(configs.userId, userId))
.limit(1);
if (existingConfig.length > 0) {
return existingConfig[0];
}
// Read environment variables for overrides
const githubToken = envOverrides.githubToken || process.env.GITHUB_TOKEN || "";
const githubUsername = envOverrides.githubUsername || process.env.GITHUB_USERNAME || "";
const giteaUrl = envOverrides.giteaUrl || process.env.GITEA_URL || "";
const giteaToken = envOverrides.giteaToken || process.env.GITEA_TOKEN || "";
const giteaUsername = envOverrides.giteaUsername || process.env.GITEA_USERNAME || "";
// Schedule config from env - default to ENABLED
const scheduleEnabled = envOverrides.scheduleEnabled ??
(process.env.SCHEDULE_ENABLED === "false" ? false : true); // Default: ENABLED
const scheduleInterval = envOverrides.scheduleInterval ??
(process.env.SCHEDULE_INTERVAL ? parseInt(process.env.SCHEDULE_INTERVAL, 10) : 86400); // Default: daily
// Cleanup config from env - default to ENABLED
const cleanupEnabled = envOverrides.cleanupEnabled ??
(process.env.CLEANUP_ENABLED === "false" ? false : true); // Default: ENABLED
const cleanupRetentionDays = envOverrides.cleanupRetentionDays ??
(process.env.CLEANUP_RETENTION_DAYS ? parseInt(process.env.CLEANUP_RETENTION_DAYS, 10) * 86400 : 604800); // Default: 7 days
// Create default configuration
const configId = uuidv4();
const defaultConfig = {
id: configId,
userId,
name: "Default Configuration",
isActive: true,
githubConfig: {
owner: githubUsername,
type: "personal",
token: githubToken ? encrypt(githubToken) : "",
includeStarred: false,
includeForks: true,
includeArchived: false,
includePrivate: false,
includePublic: true,
includeOrganizations: [],
starredReposOrg: "starred",
mirrorStrategy: "preserve",
defaultOrg: "github-mirrors",
},
giteaConfig: {
url: giteaUrl,
token: giteaToken ? encrypt(giteaToken) : "",
defaultOwner: giteaUsername,
mirrorInterval: "8h",
lfs: false,
wiki: false,
visibility: "public",
createOrg: true,
addTopics: true,
preserveVisibility: false,
forkStrategy: "reference",
issueConcurrency: 3,
pullRequestConcurrency: 5,
},
include: [],
exclude: [],
scheduleConfig: {
enabled: scheduleEnabled,
interval: scheduleInterval,
concurrent: false,
batchSize: 5, // Reduced from 10 to be more conservative with GitHub API limits
lastRun: null,
nextRun: scheduleEnabled ? new Date(Date.now() + scheduleInterval * 1000) : null,
},
cleanupConfig: {
enabled: cleanupEnabled,
retentionDays: cleanupRetentionDays,
deleteFromGitea: false,
deleteIfNotInGitHub: true,
protectedRepos: [],
dryRun: false,
orphanedRepoAction: "archive",
batchSize: 10,
pauseBetweenDeletes: 2000,
lastRun: null,
nextRun: cleanupEnabled ? new Date(Date.now() + getCleanupInterval(cleanupRetentionDays) * 1000) : null,
},
createdAt: new Date(),
updatedAt: new Date(),
};
// Insert the default config
await db.insert(configs).values(defaultConfig);
return defaultConfig;
}
/**
* Calculate cleanup interval based on retention period
*/
function getCleanupInterval(retentionSeconds: number): number {
const days = retentionSeconds / 86400;
if (days <= 1) return 21600; // 6 hours
if (days <= 3) return 43200; // 12 hours
if (days <= 7) return 86400; // 24 hours
if (days <= 30) return 172800; // 48 hours
return 604800; // 1 week
}
@@ -0,0 +1,52 @@
import { decrypt } from "./encryption";
import type { Config } from "@/types/config";
/**
* Decrypts tokens in a config object for use in API calls
* @param config The config object with potentially encrypted tokens
* @returns Config object with decrypted tokens
*/
export function decryptConfigTokens(config: Config): Config {
const decryptedConfig = { ...config };
// Deep clone the config objects
if (config.githubConfig) {
decryptedConfig.githubConfig = { ...config.githubConfig };
if (config.githubConfig.token) {
decryptedConfig.githubConfig.token = decrypt(config.githubConfig.token);
}
}
if (config.giteaConfig) {
decryptedConfig.giteaConfig = { ...config.giteaConfig };
if (config.giteaConfig.token) {
decryptedConfig.giteaConfig.token = decrypt(config.giteaConfig.token);
}
}
return decryptedConfig;
}
/**
* Gets a decrypted GitHub token from config
* @param config The config object
* @returns Decrypted GitHub token
*/
export function getDecryptedGitHubToken(config: Config): string {
if (!config.githubConfig?.token) {
throw new Error("GitHub token not found in config");
}
return decrypt(config.githubConfig.token);
}
/**
* Gets a decrypted Gitea token from config
* @param config The config object
* @returns Decrypted Gitea token
*/
export function getDecryptedGiteaToken(config: Config): string {
if (!config.giteaConfig?.token) {
throw new Error("Gitea token not found in config");
}
return decrypt(config.giteaConfig.token);
}
@@ -0,0 +1,282 @@
/**
* Maps between UI config structure and database schema structure
*/
import type {
GitHubConfig,
GiteaConfig,
MirrorOptions,
AdvancedOptions,
SaveConfigApiRequest
} from "@/types/config";
import { z } from "zod";
import { githubConfigSchema, giteaConfigSchema, scheduleConfigSchema, cleanupConfigSchema } from "@/lib/db/schema";
import { parseInterval } from "@/lib/utils/duration-parser";
// Use the actual database schema types
type DbGitHubConfig = z.infer<typeof githubConfigSchema>;
type DbGiteaConfig = z.infer<typeof giteaConfigSchema>;
type DbScheduleConfig = z.infer<typeof scheduleConfigSchema>;
type DbCleanupConfig = z.infer<typeof cleanupConfigSchema>;
/**
* Maps UI config structure to database schema structure
*/
export function mapUiToDbConfig(
githubConfig: GitHubConfig,
giteaConfig: GiteaConfig,
mirrorOptions: MirrorOptions,
advancedOptions: AdvancedOptions
): { githubConfig: DbGitHubConfig; giteaConfig: DbGiteaConfig } {
// Map GitHub config to match database schema fields
const dbGithubConfig: DbGitHubConfig = {
// Map username to owner field
owner: githubConfig.username,
type: "personal", // Default to personal, could be made configurable
token: githubConfig.token || "",
// Map checkbox fields with proper names
includeStarred: githubConfig.mirrorStarred,
includePrivate: githubConfig.privateRepositories,
includeForks: !advancedOptions.skipForks, // Note: UI has skipForks, DB has includeForks
skipForks: advancedOptions.skipForks, // Add skipForks field
includeArchived: false, // Not in UI yet, default to false
includePublic: true, // Not in UI yet, default to true
// Organization related fields
includeOrganizations: [], // Not in UI yet
// Starred repos organization
starredReposOrg: giteaConfig.starredReposOrg,
// Mirror strategy
mirrorStrategy: giteaConfig.mirrorStrategy || "preserve",
defaultOrg: giteaConfig.organization,
// Advanced options
starredCodeOnly: advancedOptions.starredCodeOnly,
};
// Map Gitea config to match database schema
const dbGiteaConfig: DbGiteaConfig = {
url: giteaConfig.url,
token: giteaConfig.token,
defaultOwner: giteaConfig.username, // Map username to defaultOwner
organization: giteaConfig.organization, // Add organization field
preserveOrgStructure: giteaConfig.mirrorStrategy === "preserve" || giteaConfig.mirrorStrategy === "mixed", // Add preserveOrgStructure field
// Mirror interval and options
mirrorInterval: "8h", // Default value, could be made configurable
lfs: mirrorOptions.mirrorLFS || false, // LFS mirroring option
wiki: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.wiki,
// Visibility settings
visibility: giteaConfig.visibility || "default",
preserveVisibility: false, // This should be a separate field, not the same as preserveOrgStructure
// Organization creation
createOrg: true, // Default to true
// Template settings (not in UI yet)
templateOwner: undefined,
templateRepo: undefined,
// Topics
addTopics: true, // Default to true
topicPrefix: undefined,
// Fork strategy
forkStrategy: advancedOptions.skipForks ? "skip" : "reference",
// Mirror options from UI
issueConcurrency: giteaConfig.issueConcurrency ?? 3,
pullRequestConcurrency: giteaConfig.pullRequestConcurrency ?? 5,
mirrorReleases: mirrorOptions.mirrorReleases,
releaseLimit: mirrorOptions.releaseLimit || 10,
mirrorMetadata: mirrorOptions.mirrorMetadata,
mirrorIssues: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.issues,
mirrorPullRequests: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.pullRequests,
mirrorLabels: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.labels,
mirrorMilestones: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.milestones,
};
return {
githubConfig: dbGithubConfig,
giteaConfig: dbGiteaConfig,
};
}
/**
* Maps database schema structure to UI config structure
*/
export function mapDbToUiConfig(dbConfig: any): {
githubConfig: GitHubConfig;
giteaConfig: GiteaConfig;
mirrorOptions: MirrorOptions;
advancedOptions: AdvancedOptions;
} {
// Map from database GitHub config to UI fields
const githubConfig: GitHubConfig = {
username: dbConfig.githubConfig?.owner || "", // Map owner to username
token: dbConfig.githubConfig?.token || "",
privateRepositories: dbConfig.githubConfig?.includePrivate || false, // Map includePrivate to privateRepositories
mirrorStarred: dbConfig.githubConfig?.includeStarred || false, // Map includeStarred to mirrorStarred
};
// Map from database Gitea config to UI fields
const giteaConfig: GiteaConfig = {
url: dbConfig.giteaConfig?.url || "",
username: dbConfig.giteaConfig?.defaultOwner || "", // Map defaultOwner to username
token: dbConfig.giteaConfig?.token || "",
organization: dbConfig.githubConfig?.defaultOrg || "github-mirrors", // Get from GitHub config
visibility: dbConfig.giteaConfig?.visibility === "default" ? "public" : dbConfig.giteaConfig?.visibility || "public",
starredReposOrg: dbConfig.githubConfig?.starredReposOrg || "starred", // Get from GitHub config
preserveOrgStructure: dbConfig.giteaConfig?.preserveVisibility || false, // Map preserveVisibility
mirrorStrategy: dbConfig.githubConfig?.mirrorStrategy || "preserve", // Get from GitHub config
personalReposOrg: undefined, // Not stored in current schema
issueConcurrency: dbConfig.giteaConfig?.issueConcurrency ?? 3,
pullRequestConcurrency: dbConfig.giteaConfig?.pullRequestConcurrency ?? 5,
};
// Map mirror options from various database fields
const mirrorOptions: MirrorOptions = {
mirrorReleases: dbConfig.giteaConfig?.mirrorReleases || false,
releaseLimit: dbConfig.giteaConfig?.releaseLimit || 10,
mirrorLFS: dbConfig.giteaConfig?.lfs || false,
mirrorMetadata: dbConfig.giteaConfig?.mirrorMetadata || false,
metadataComponents: {
issues: dbConfig.giteaConfig?.mirrorIssues || false,
pullRequests: dbConfig.giteaConfig?.mirrorPullRequests || false,
labels: dbConfig.giteaConfig?.mirrorLabels || false,
milestones: dbConfig.giteaConfig?.mirrorMilestones || false,
wiki: dbConfig.giteaConfig?.wiki || false,
},
};
// Map advanced options
const advancedOptions: AdvancedOptions = {
skipForks: !(dbConfig.githubConfig?.includeForks ?? true), // Invert includeForks to get skipForks
// Support both old (skipStarredIssues) and new (starredCodeOnly) field names for backward compatibility
starredCodeOnly: dbConfig.githubConfig?.starredCodeOnly ?? (dbConfig.githubConfig as any)?.skipStarredIssues ?? false,
};
return {
githubConfig,
giteaConfig,
mirrorOptions,
advancedOptions,
};
}
/**
* Maps UI schedule config to database schema
*/
export function mapUiScheduleToDb(uiSchedule: any, existing?: DbScheduleConfig): DbScheduleConfig {
// Preserve existing schedule config and only update fields controlled by the UI
const base: DbScheduleConfig = existing
? { ...(existing as unknown as DbScheduleConfig) }
: (scheduleConfigSchema.parse({}) as unknown as DbScheduleConfig);
// Store interval as seconds string to avoid lossy cron conversion
const intervalSeconds = typeof uiSchedule.interval === 'number' && uiSchedule.interval > 0
? String(uiSchedule.interval)
: (typeof base.interval === 'string' ? base.interval : String(86400));
return {
...base,
enabled: !!uiSchedule.enabled,
interval: intervalSeconds,
} as DbScheduleConfig;
}
/**
* Maps database schedule config to UI format
*/
export function mapDbScheduleToUi(dbSchedule: DbScheduleConfig): any {
// Handle null/undefined schedule config
if (!dbSchedule) {
return {
enabled: false,
interval: 86400, // Default to daily (24 hours)
lastRun: null,
nextRun: null,
};
}
// Parse interval supporting numbers (seconds), duration strings, and cron
let intervalSeconds = 86400; // Default to daily (24 hours)
try {
const ms = parseInterval(
typeof dbSchedule.interval === 'number'
? dbSchedule.interval
: (dbSchedule.interval as unknown as string)
);
intervalSeconds = Math.max(1, Math.floor(ms / 1000));
} catch (_e) {
// Fallback to default if unparsable
intervalSeconds = 86400;
}
return {
enabled: dbSchedule.enabled || false,
interval: intervalSeconds,
lastRun: dbSchedule.lastRun || null,
nextRun: dbSchedule.nextRun || null,
};
}
/**
* Maps UI cleanup config to database schema
*/
export function mapUiCleanupToDb(uiCleanup: any): DbCleanupConfig {
const parsedRetention =
typeof uiCleanup.retentionDays === "string"
? parseInt(uiCleanup.retentionDays, 10)
: uiCleanup.retentionDays;
const retentionSeconds = Number.isFinite(parsedRetention)
? parsedRetention
: 604800;
return {
enabled: Boolean(uiCleanup.enabled),
retentionDays: retentionSeconds,
deleteFromGitea: uiCleanup.deleteFromGitea ?? false,
deleteIfNotInGitHub: uiCleanup.deleteIfNotInGitHub ?? true,
protectedRepos: uiCleanup.protectedRepos ?? [],
dryRun: uiCleanup.dryRun ?? false,
orphanedRepoAction: (uiCleanup.orphanedRepoAction as DbCleanupConfig["orphanedRepoAction"]) || "archive",
batchSize: uiCleanup.batchSize ?? 10,
pauseBetweenDeletes: uiCleanup.pauseBetweenDeletes ?? 2000,
lastRun: uiCleanup.lastRun ?? null,
nextRun: uiCleanup.nextRun ?? null,
};
}
/**
* Maps database cleanup config to UI format
*/
export function mapDbCleanupToUi(dbCleanup: DbCleanupConfig): any {
// Handle null/undefined cleanup config
if (!dbCleanup) {
return {
enabled: false,
retentionDays: 604800, // Default to 7 days in seconds
lastRun: null,
nextRun: null,
};
}
return {
enabled: dbCleanup.enabled ?? false,
retentionDays: dbCleanup.retentionDays ?? 604800,
deleteFromGitea: dbCleanup.deleteFromGitea ?? false,
deleteIfNotInGitHub: dbCleanup.deleteIfNotInGitHub ?? true,
protectedRepos: dbCleanup.protectedRepos ?? [],
dryRun: dbCleanup.dryRun ?? false,
orphanedRepoAction: dbCleanup.orphanedRepoAction ?? "archive",
batchSize: dbCleanup.batchSize ?? 10,
pauseBetweenDeletes: dbCleanup.pauseBetweenDeletes ?? 2000,
lastRun: dbCleanup.lastRun ?? null,
nextRun: dbCleanup.nextRun ?? null,
};
}
@@ -0,0 +1,94 @@
import { test, expect } from 'bun:test';
import { parseDuration, parseInterval, formatDuration, parseCronInterval } from './duration-parser';
test('parseDuration - handles duration strings correctly', () => {
// Hours
expect(parseDuration('8h')).toBe(8 * 60 * 60 * 1000);
expect(parseDuration('1h')).toBe(60 * 60 * 1000);
expect(parseDuration('24h')).toBe(24 * 60 * 60 * 1000);
// Minutes
expect(parseDuration('30m')).toBe(30 * 60 * 1000);
expect(parseDuration('5m')).toBe(5 * 60 * 1000);
// Seconds
expect(parseDuration('45s')).toBe(45 * 1000);
expect(parseDuration('1s')).toBe(1000);
// Days
expect(parseDuration('1d')).toBe(24 * 60 * 60 * 1000);
expect(parseDuration('7d')).toBe(7 * 24 * 60 * 60 * 1000);
// Numbers (treated as seconds)
expect(parseDuration(3600)).toBe(3600 * 1000);
expect(parseDuration('3600')).toBe(3600 * 1000);
});
test('parseDuration - handles edge cases', () => {
// Case insensitive
expect(parseDuration('8H')).toBe(8 * 60 * 60 * 1000);
expect(parseDuration('30M')).toBe(30 * 60 * 1000);
// With spaces
expect(parseDuration('8 h')).toBe(8 * 60 * 60 * 1000);
expect(parseDuration('30 minutes')).toBe(30 * 60 * 1000);
// Fractional values
expect(parseDuration('1.5h')).toBe(1.5 * 60 * 60 * 1000);
expect(parseDuration('2.5m')).toBe(2.5 * 60 * 1000);
});
test('parseDuration - throws on invalid input', () => {
expect(() => parseDuration('')).toThrow();
expect(() => parseDuration('invalid')).toThrow();
expect(() => parseDuration('8x')).toThrow();
expect(() => parseDuration('-1h')).toThrow();
});
test('parseInterval - handles cron expressions', () => {
// Every 2 hours
expect(parseInterval('0 */2 * * *')).toBe(2 * 60 * 60 * 1000);
// Every 15 minutes
expect(parseInterval('*/15 * * * *')).toBe(15 * 60 * 1000);
// Daily at 2 AM
expect(parseInterval('0 2 * * *')).toBe(24 * 60 * 60 * 1000);
});
test('parseInterval - prioritizes duration strings over cron', () => {
expect(parseInterval('8h')).toBe(8 * 60 * 60 * 1000);
expect(parseInterval('30m')).toBe(30 * 60 * 1000);
expect(parseInterval(3600)).toBe(3600 * 1000);
});
test('formatDuration - converts milliseconds back to readable format', () => {
expect(formatDuration(1000)).toBe('1s');
expect(formatDuration(60 * 1000)).toBe('1m');
expect(formatDuration(60 * 60 * 1000)).toBe('1h');
expect(formatDuration(24 * 60 * 60 * 1000)).toBe('1d');
expect(formatDuration(8 * 60 * 60 * 1000)).toBe('8h');
expect(formatDuration(500)).toBe('500ms');
});
test('parseCronInterval - handles common cron patterns', () => {
expect(parseCronInterval('0 */8 * * *')).toBe(8 * 60 * 60 * 1000);
expect(parseCronInterval('*/30 * * * *')).toBe(30 * 60 * 1000);
expect(parseCronInterval('0 2 * * *')).toBe(24 * 60 * 60 * 1000);
expect(parseCronInterval('0 0 * * 0')).toBe(7 * 24 * 60 * 60 * 1000); // Weekly
});
test('Integration test - Issue #72 scenario', () => {
// User sets GITEA_MIRROR_INTERVAL=8h
const userInterval = '8h';
const parsedMs = parseInterval(userInterval);
expect(parsedMs).toBe(8 * 60 * 60 * 1000); // 8 hours in milliseconds
expect(formatDuration(parsedMs)).toBe('8h');
// Should work from container startup time
const startTime = new Date();
const nextRun = new Date(startTime.getTime() + parsedMs);
expect(nextRun.getTime() - startTime.getTime()).toBe(8 * 60 * 60 * 1000);
});
@@ -0,0 +1,251 @@
/**
* Duration parser utility for converting human-readable duration strings to milliseconds
* Supports formats like: 8h, 30m, 24h, 1d, 5s, etc.
*/
export interface ParsedDuration {
value: number;
unit: string;
milliseconds: number;
}
/**
* Parse a duration string into milliseconds
* @param duration - Duration string (e.g., "8h", "30m", "1d", "5s") or number in seconds
* @returns Duration in milliseconds
*/
export function parseDuration(duration: string | number): number {
if (typeof duration === 'number') {
return duration * 1000; // Convert seconds to milliseconds
}
if (!duration || typeof duration !== 'string') {
throw new Error('Invalid duration: must be a string or number');
}
// Try to parse as number first (assume seconds)
const parsed = parseInt(duration, 10);
if (!isNaN(parsed) && duration === parsed.toString()) {
return parsed * 1000; // Convert seconds to milliseconds
}
// Parse duration string with unit
const match = duration.trim().match(/^(\d+(?:\.\d+)?)\s*([a-zA-Z]+)$/);
if (!match) {
throw new Error(`Invalid duration format: "${duration}". Expected format like "8h", "30m", "1d"`);
}
const [, valueStr, unit] = match;
const value = parseFloat(valueStr);
if (isNaN(value) || value < 0) {
throw new Error(`Invalid duration value: "${valueStr}". Must be a positive number`);
}
const unitLower = unit.toLowerCase();
let multiplier: number;
switch (unitLower) {
case 'ms':
case 'millisecond':
case 'milliseconds':
multiplier = 1;
break;
case 's':
case 'sec':
case 'second':
case 'seconds':
multiplier = 1000;
break;
case 'm':
case 'min':
case 'minute':
case 'minutes':
multiplier = 60 * 1000;
break;
case 'h':
case 'hr':
case 'hour':
case 'hours':
multiplier = 60 * 60 * 1000;
break;
case 'd':
case 'day':
case 'days':
multiplier = 24 * 60 * 60 * 1000;
break;
case 'w':
case 'week':
case 'weeks':
multiplier = 7 * 24 * 60 * 60 * 1000;
break;
default:
throw new Error(`Unsupported duration unit: "${unit}". Supported units: ms, s, m, h, d, w`);
}
return Math.floor(value * multiplier);
}
/**
* Parse a duration string and return detailed information
* @param duration - Duration string
* @returns Parsed duration with value, unit, and milliseconds
*/
export function parseDurationDetailed(duration: string | number): ParsedDuration {
const milliseconds = parseDuration(duration);
if (typeof duration === 'number') {
return {
value: duration,
unit: 's',
milliseconds
};
}
const match = duration.trim().match(/^(\d+(?:\.\d+)?)\s*([a-zA-Z]+)$/);
if (!match) {
// If it's just a number as string
const value = parseFloat(duration);
if (!isNaN(value)) {
return {
value,
unit: 's',
milliseconds
};
}
throw new Error(`Invalid duration format: "${duration}"`);
}
const [, valueStr, unit] = match;
return {
value: parseFloat(valueStr),
unit: unit.toLowerCase(),
milliseconds
};
}
/**
* Format milliseconds back to human-readable duration
* @param milliseconds - Duration in milliseconds
* @returns Human-readable duration string
*/
export function formatDuration(milliseconds: number): string {
if (milliseconds < 1000) {
return `${milliseconds}ms`;
}
const seconds = Math.floor(milliseconds / 1000);
if (seconds < 60) {
return `${seconds}s`;
}
const minutes = Math.floor(seconds / 60);
if (minutes < 60) {
return `${minutes}m`;
}
const hours = Math.floor(minutes / 60);
if (hours < 24) {
return `${hours}h`;
}
const days = Math.floor(hours / 24);
return `${days}d`;
}
/**
* Parse cron expression to approximate milliseconds interval
* This is a simplified parser for common cron patterns
* @param cron - Cron expression
* @returns Approximate interval in milliseconds
*/
export function parseCronInterval(cron: string): number {
if (!cron || typeof cron !== 'string') {
throw new Error('Invalid cron expression');
}
const parts = cron.trim().split(/\s+/);
if (parts.length !== 5) {
throw new Error('Cron expression must have 5 parts (minute hour day month weekday)');
}
const [minute, hour, day, month, weekday] = parts;
// Extract hour interval from patterns like "*/2" (every 2 hours)
if (hour.includes('*/')) {
const everyMatch = hour.match(/\*\/(\d+)/);
if (everyMatch) {
const hours = parseInt(everyMatch[1], 10);
return hours * 60 * 60 * 1000; // Convert hours to milliseconds
}
}
// Extract minute interval from patterns like "*/15" (every 15 minutes)
if (minute.includes('*/')) {
const everyMatch = minute.match(/\*\/(\d+)/);
if (everyMatch) {
const minutes = parseInt(everyMatch[1], 10);
return minutes * 60 * 1000; // Convert minutes to milliseconds
}
}
// Daily patterns like "0 2 * * *" (daily at 2 AM)
if (hour !== '*' && minute !== '*' && day === '*' && month === '*' && weekday === '*') {
return 24 * 60 * 60 * 1000; // 24 hours in milliseconds
}
// Weekly patterns
if (weekday !== '*') {
return 7 * 24 * 60 * 60 * 1000; // 7 days in milliseconds
}
// Monthly patterns
if (day !== '*') {
return 30 * 24 * 60 * 60 * 1000; // Approximate month (30 days)
}
// Default to 1 hour if unable to parse
return 60 * 60 * 1000;
}
/**
* Enhanced interval parser that handles duration strings, cron expressions, and numbers
* @param interval - Interval specification (duration string, cron, or number)
* @returns Interval in milliseconds
*/
export function parseInterval(interval: string | number): number {
if (typeof interval === 'number') {
return interval * 1000; // Convert seconds to milliseconds
}
if (!interval || typeof interval !== 'string') {
throw new Error('Invalid interval: must be a string or number');
}
const trimmed = interval.trim();
// Check if it's a cron expression (contains spaces and specific patterns)
if (trimmed.includes(' ') && trimmed.split(/\s+/).length === 5) {
try {
return parseCronInterval(trimmed);
} catch (error) {
console.warn(`Failed to parse as cron expression: ${error instanceof Error ? error.message : 'Unknown error'}`);
// Fall through to duration parsing
}
}
// Try to parse as duration string
try {
return parseDuration(trimmed);
} catch (error) {
console.warn(`Failed to parse as duration: ${error instanceof Error ? error.message : 'Unknown error'}`);
// Last resort: try as plain number (seconds)
const parsed = parseInt(trimmed, 10);
if (!isNaN(parsed)) {
return parsed * 1000;
}
throw new Error(`Unable to parse interval: "${interval}". Expected duration (e.g., "8h"), cron expression (e.g., "0 */2 * * *"), or number of seconds`);
}
}
@@ -0,0 +1,169 @@
import * as crypto from "crypto";
// Encryption configuration
const ALGORITHM = "aes-256-gcm";
const IV_LENGTH = 16; // 128 bits
const SALT_LENGTH = 32; // 256 bits
const TAG_LENGTH = 16; // 128 bits
const KEY_LENGTH = 32; // 256 bits
const ITERATIONS = 100000; // PBKDF2 iterations
// Get or generate encryption key
function getEncryptionKey(): Buffer {
const secret = process.env.ENCRYPTION_SECRET || process.env.JWT_SECRET || process.env.BETTER_AUTH_SECRET;
if (!secret) {
throw new Error("No encryption secret found. Please set ENCRYPTION_SECRET environment variable.");
}
// Use a static salt derived from the secret for consistent key generation
// This ensures the same key is generated across application restarts
const salt = crypto.createHash('sha256').update('gitea-mirror-salt' + secret).digest();
return crypto.pbkdf2Sync(secret, salt, ITERATIONS, KEY_LENGTH, 'sha256');
}
export interface EncryptedData {
encrypted: string;
iv: string;
salt: string;
tag: string;
version: number;
}
/**
* Encrypts sensitive data like API tokens
* @param plaintext The data to encrypt
* @returns Encrypted data with metadata
*/
export function encrypt(plaintext: string): string {
if (!plaintext) {
return '';
}
try {
const key = getEncryptionKey();
const iv = crypto.randomBytes(IV_LENGTH);
const salt = crypto.randomBytes(SALT_LENGTH);
const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
const encrypted = Buffer.concat([
cipher.update(plaintext, 'utf8'),
cipher.final()
]);
const tag = cipher.getAuthTag();
const encryptedData: EncryptedData = {
encrypted: encrypted.toString('base64'),
iv: iv.toString('base64'),
salt: salt.toString('base64'),
tag: tag.toString('base64'),
version: 1
};
// Return as base64 encoded JSON for easy storage
return Buffer.from(JSON.stringify(encryptedData)).toString('base64');
} catch (error) {
console.error('Encryption error:', error);
throw new Error('Failed to encrypt data');
}
}
/**
* Decrypts encrypted data
* @param encryptedString The encrypted data string
* @returns Decrypted plaintext
*/
export function decrypt(encryptedString: string): string {
if (!encryptedString) {
return '';
}
try {
// Check if it's already plaintext (for backward compatibility during migration)
if (!isEncrypted(encryptedString)) {
return encryptedString;
}
const encryptedData: EncryptedData = JSON.parse(
Buffer.from(encryptedString, 'base64').toString('utf8')
);
const key = getEncryptionKey();
const iv = Buffer.from(encryptedData.iv, 'base64');
const tag = Buffer.from(encryptedData.tag, 'base64');
const encrypted = Buffer.from(encryptedData.encrypted, 'base64');
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
decipher.setAuthTag(tag);
const decrypted = Buffer.concat([
decipher.update(encrypted),
decipher.final()
]);
return decrypted.toString('utf8');
} catch (error) {
// If decryption fails, check if it's plaintext (backward compatibility)
try {
JSON.parse(Buffer.from(encryptedString, 'base64').toString('utf8'));
throw error; // It was encrypted but failed to decrypt
} catch {
// Not encrypted, return as-is for backward compatibility
console.warn('Token appears to be unencrypted, returning as-is for backward compatibility');
return encryptedString;
}
}
}
/**
* Checks if a string is encrypted
* @param value The string to check
* @returns true if encrypted, false otherwise
*/
export function isEncrypted(value: string): boolean {
if (!value) {
return false;
}
try {
const decoded = Buffer.from(value, 'base64').toString('utf8');
const data = JSON.parse(decoded);
return data.version === 1 && data.encrypted && data.iv && data.tag;
} catch {
return false;
}
}
/**
* Migrates unencrypted tokens to encrypted format
* @param token The token to migrate
* @returns Encrypted token if it wasn't already encrypted
*/
export function migrateToken(token: string): string {
if (!token || isEncrypted(token)) {
return token;
}
return encrypt(token);
}
/**
* Generates a secure random token
* @param length Token length in bytes (default: 32)
* @returns Hex encoded random token
*/
export function generateSecureToken(length: number = 32): string {
return crypto.randomBytes(length).toString('hex');
}
/**
* Hashes a value using SHA-256 (for non-reversible values like API keys for comparison)
* @param value The value to hash
* @returns Hex encoded hash
*/
export function hashValue(value: string): string {
return crypto.createHash('sha256').update(value).digest('hex');
}
@@ -0,0 +1,93 @@
/**
* Mirror strategy configuration for handling various repository scenarios
*/
export type NonMirrorStrategy = "skip" | "delete" | "rename";
export interface MirrorStrategyConfig {
/**
* How to handle repositories that exist in Gitea but are not mirrors
* - "skip": Leave the repository as-is and mark as failed
* - "delete": Delete the repository and recreate as mirror
* - "rename": Rename the existing repository (not implemented yet)
* Note: "convert" strategy was removed as it's not supported by most Gitea versions
*/
nonMirrorStrategy: NonMirrorStrategy;
/**
* Maximum retries for organization creation
*/
orgCreationRetries: number;
/**
* Base delay in milliseconds for exponential backoff
*/
orgCreationRetryDelay: number;
/**
* Whether to create organizations sequentially to avoid race conditions
*/
sequentialOrgCreation: boolean;
/**
* Batch size for parallel repository processing
*/
repoBatchSize: number;
/**
* Timeout for sync operations in milliseconds
*/
syncTimeout: number;
}
export const DEFAULT_MIRROR_STRATEGY: MirrorStrategyConfig = {
nonMirrorStrategy: "delete", // Safe default: delete and recreate
orgCreationRetries: 3,
orgCreationRetryDelay: 100,
sequentialOrgCreation: true,
repoBatchSize: 3,
syncTimeout: 30000, // 30 seconds
};
/**
* Get mirror strategy configuration from environment or defaults
*/
export function getMirrorStrategyConfig(): MirrorStrategyConfig {
return {
nonMirrorStrategy: (process.env.NON_MIRROR_STRATEGY as NonMirrorStrategy) || DEFAULT_MIRROR_STRATEGY.nonMirrorStrategy,
orgCreationRetries: parseInt(process.env.ORG_CREATION_RETRIES || "") || DEFAULT_MIRROR_STRATEGY.orgCreationRetries,
orgCreationRetryDelay: parseInt(process.env.ORG_CREATION_RETRY_DELAY || "") || DEFAULT_MIRROR_STRATEGY.orgCreationRetryDelay,
sequentialOrgCreation: process.env.SEQUENTIAL_ORG_CREATION !== "false",
repoBatchSize: parseInt(process.env.REPO_BATCH_SIZE || "") || DEFAULT_MIRROR_STRATEGY.repoBatchSize,
syncTimeout: parseInt(process.env.SYNC_TIMEOUT || "") || DEFAULT_MIRROR_STRATEGY.syncTimeout,
};
}
/**
* Validate strategy configuration
*/
export function validateStrategyConfig(config: MirrorStrategyConfig): string[] {
const errors: string[] = [];
if (!["skip", "delete", "rename"].includes(config.nonMirrorStrategy)) {
errors.push(`Invalid nonMirrorStrategy: ${config.nonMirrorStrategy}`);
}
if (config.orgCreationRetries < 1 || config.orgCreationRetries > 10) {
errors.push("orgCreationRetries must be between 1 and 10");
}
if (config.orgCreationRetryDelay < 10 || config.orgCreationRetryDelay > 5000) {
errors.push("orgCreationRetryDelay must be between 10ms and 5000ms");
}
if (config.repoBatchSize < 1 || config.repoBatchSize > 50) {
errors.push("repoBatchSize must be between 1 and 50");
}
if (config.syncTimeout < 5000 || config.syncTimeout > 300000) {
errors.push("syncTimeout must be between 5s and 5min");
}
return errors;
}
@@ -0,0 +1,85 @@
import { describe, test, expect } from "bun:test";
import { isValidRedirectUri, parseRedirectUris } from "./oauth-validation";
describe("OAuth Validation", () => {
describe("parseRedirectUris", () => {
test("parses comma-separated URIs", () => {
const result = parseRedirectUris("https://app1.com,https://app2.com, https://app3.com ");
expect(result).toEqual([
"https://app1.com",
"https://app2.com",
"https://app3.com"
]);
});
test("handles empty string", () => {
expect(parseRedirectUris("")).toEqual([]);
});
test("filters out empty values", () => {
const result = parseRedirectUris("https://app1.com,,https://app2.com,");
expect(result).toEqual(["https://app1.com", "https://app2.com"]);
});
});
describe("isValidRedirectUri", () => {
test("validates exact match", () => {
const authorizedUris = ["https://app.example.com/callback"];
expect(isValidRedirectUri("https://app.example.com/callback", authorizedUris)).toBe(true);
expect(isValidRedirectUri("https://app.example.com/other", authorizedUris)).toBe(false);
});
test("validates wildcard paths", () => {
const authorizedUris = ["https://app.example.com/*"];
expect(isValidRedirectUri("https://app.example.com/", authorizedUris)).toBe(true);
expect(isValidRedirectUri("https://app.example.com/callback", authorizedUris)).toBe(true);
expect(isValidRedirectUri("https://app.example.com/deep/path", authorizedUris)).toBe(true);
// Different domain should fail
expect(isValidRedirectUri("https://evil.com/callback", authorizedUris)).toBe(false);
});
test("validates protocol", () => {
const authorizedUris = ["https://app.example.com/callback"];
// HTTP instead of HTTPS should fail
expect(isValidRedirectUri("http://app.example.com/callback", authorizedUris)).toBe(false);
});
test("validates host and port", () => {
const authorizedUris = ["https://app.example.com:3000/callback"];
// Different port should fail
expect(isValidRedirectUri("https://app.example.com/callback", authorizedUris)).toBe(false);
expect(isValidRedirectUri("https://app.example.com:3000/callback", authorizedUris)).toBe(true);
expect(isValidRedirectUri("https://app.example.com:4000/callback", authorizedUris)).toBe(false);
});
test("handles invalid URIs", () => {
const authorizedUris = ["not-a-valid-uri", "https://valid.com"];
// Invalid redirect URI
expect(isValidRedirectUri("not-a-valid-uri", authorizedUris)).toBe(false);
// Valid redirect URI with invalid authorized URI should still work if it matches valid one
expect(isValidRedirectUri("https://valid.com", authorizedUris)).toBe(true);
});
test("handles empty inputs", () => {
expect(isValidRedirectUri("", ["https://app.com"])).toBe(false);
expect(isValidRedirectUri("https://app.com", [])).toBe(false);
});
test("prevents open redirect attacks", () => {
const authorizedUris = ["https://app.example.com/callback"];
// Various attack vectors
expect(isValidRedirectUri("https://app.example.com.evil.com/callback", authorizedUris)).toBe(false);
expect(isValidRedirectUri("https://app.example.com@evil.com/callback", authorizedUris)).toBe(false);
expect(isValidRedirectUri("//evil.com/callback", authorizedUris)).toBe(false);
expect(isValidRedirectUri("https:evil.com/callback", authorizedUris)).toBe(false);
});
});
});
@@ -0,0 +1,59 @@
/**
* Validates a redirect URI against a list of authorized URIs
* @param redirectUri The redirect URI to validate
* @param authorizedUris List of authorized redirect URIs
* @returns true if the redirect URI is authorized, false otherwise
*/
export function isValidRedirectUri(redirectUri: string, authorizedUris: string[]): boolean {
if (!redirectUri || authorizedUris.length === 0) {
return false;
}
try {
// Parse the redirect URI to ensure it's valid
const redirectUrl = new URL(redirectUri);
return authorizedUris.some(authorizedUri => {
try {
// Handle wildcard paths (e.g., https://example.com/*)
if (authorizedUri.endsWith('/*')) {
const baseUri = authorizedUri.slice(0, -2);
const baseUrl = new URL(baseUri);
// Check protocol, host, and port match
return redirectUrl.protocol === baseUrl.protocol &&
redirectUrl.host === baseUrl.host &&
redirectUrl.pathname.startsWith(baseUrl.pathname);
}
// Handle exact match
const authorizedUrl = new URL(authorizedUri);
// For exact match, everything must match including path and query params
return redirectUrl.href === authorizedUrl.href;
} catch {
// If authorized URI is not a valid URL, treat as invalid
return false;
}
});
} catch {
// If redirect URI is not a valid URL, it's invalid
return false;
}
}
/**
* Parses a comma-separated list of redirect URIs and trims whitespace
* @param redirectUrls Comma-separated list of redirect URIs
* @returns Array of trimmed redirect URIs
*/
export function parseRedirectUris(redirectUrls: string): string[] {
if (!redirectUrls) {
return [];
}
return redirectUrls
.split(',')
.map(uri => uri.trim())
.filter(uri => uri.length > 0);
}