Added gitea-mirror
This commit is contained in:
@@ -0,0 +1,58 @@
|
||||
import type { APIRoute, APIContext } from "astro";
|
||||
import { auth } from "@/lib/auth";
|
||||
|
||||
/**
|
||||
* Get authenticated user from request
|
||||
* @param request - The request object from Astro API route
|
||||
* @returns The authenticated user or null if not authenticated
|
||||
*/
|
||||
export async function getAuthenticatedUser(request: Request) {
|
||||
try {
|
||||
const session = await auth.api.getSession({
|
||||
headers: request.headers,
|
||||
});
|
||||
|
||||
return session ? session.user : null;
|
||||
} catch (error) {
|
||||
console.error("Error getting session:", error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Require authentication for API routes
|
||||
* Returns an error response if user is not authenticated
|
||||
* @param context - The API context from Astro
|
||||
* @returns Object with user if authenticated, or error response if not
|
||||
*/
|
||||
export async function requireAuth(context: APIContext) {
|
||||
const user = await getAuthenticatedUser(context.request);
|
||||
|
||||
if (!user) {
|
||||
return {
|
||||
user: null,
|
||||
response: new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
error: "Unauthorized - Please log in",
|
||||
}),
|
||||
{
|
||||
status: 401,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
}
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
return { user, response: null };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get user ID from authenticated session
|
||||
* @param request - The request object from Astro API route
|
||||
* @returns The user ID or null if not authenticated
|
||||
*/
|
||||
export async function getAuthenticatedUserId(request: Request): Promise<string | null> {
|
||||
const user = await getAuthenticatedUser(request);
|
||||
return user?.id || null;
|
||||
}
|
||||
@@ -0,0 +1,167 @@
|
||||
import { describe, test, expect, mock } from "bun:test";
|
||||
import { processInParallel, processWithRetry } from "./concurrency";
|
||||
|
||||
describe("processInParallel", () => {
|
||||
test("processes items in parallel with concurrency control", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
|
||||
// Create a mock function to track execution
|
||||
const processItem = mock(async (item: number) => {
|
||||
// Simulate async work
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock progress callback
|
||||
const onProgress = mock((completed: number, total: number, result?: number) => {
|
||||
// Progress tracking
|
||||
});
|
||||
|
||||
// Process the items with a concurrency limit of 3
|
||||
const results = await processInParallel(
|
||||
items,
|
||||
processItem,
|
||||
3,
|
||||
onProgress
|
||||
);
|
||||
|
||||
// Verify results
|
||||
expect(results).toEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
|
||||
|
||||
// Verify that processItem was called for each item
|
||||
expect(processItem).toHaveBeenCalledTimes(10);
|
||||
|
||||
// Verify that onProgress was called for each item
|
||||
expect(onProgress).toHaveBeenCalledTimes(10);
|
||||
|
||||
// Verify the last call to onProgress had the correct completed/total values
|
||||
expect(onProgress.mock.calls[9][0]).toBe(10); // completed
|
||||
expect(onProgress.mock.calls[9][1]).toBe(10); // total
|
||||
});
|
||||
|
||||
test("handles errors in processing", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3, 4, 5];
|
||||
|
||||
// Create a mock function that throws an error for item 3
|
||||
const processItem = mock(async (item: number) => {
|
||||
if (item === 3) {
|
||||
throw new Error("Test error");
|
||||
}
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a spy for console.error
|
||||
const originalConsoleError = console.error;
|
||||
const consoleErrorMock = mock(() => {});
|
||||
console.error = consoleErrorMock;
|
||||
|
||||
try {
|
||||
// Process the items
|
||||
const results = await processInParallel(items, processItem);
|
||||
|
||||
// Verify results (should have 4 items, missing the one that errored)
|
||||
expect(results).toEqual([2, 4, 8, 10]);
|
||||
|
||||
// Verify that processItem was called for each item
|
||||
expect(processItem).toHaveBeenCalledTimes(5);
|
||||
|
||||
// Verify that console.error was called (enhanced logging calls it multiple times)
|
||||
expect(consoleErrorMock).toHaveBeenCalled();
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("processWithRetry", () => {
|
||||
test("retries failed operations", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3];
|
||||
|
||||
// Create a counter to track retry attempts
|
||||
const attemptCounts: Record<number, number> = { 1: 0, 2: 0, 3: 0 };
|
||||
|
||||
// Create a mock function that fails on first attempt for item 2
|
||||
const processItem = mock(async (item: number) => {
|
||||
attemptCounts[item]++;
|
||||
|
||||
if (item === 2 && attemptCounts[item] === 1) {
|
||||
throw new Error("Temporary error");
|
||||
}
|
||||
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock for the onRetry callback
|
||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||
// Retry tracking
|
||||
});
|
||||
|
||||
// Process the items with retry
|
||||
const results = await processWithRetry(items, processItem, {
|
||||
maxRetries: 2,
|
||||
retryDelay: 10,
|
||||
onRetry,
|
||||
});
|
||||
|
||||
// Verify results
|
||||
expect(results).toEqual([2, 4, 6]);
|
||||
|
||||
// Verify that item 2 was retried once
|
||||
expect(attemptCounts[1]).toBe(1); // No retries
|
||||
expect(attemptCounts[2]).toBe(2); // One retry
|
||||
expect(attemptCounts[3]).toBe(1); // No retries
|
||||
|
||||
// Verify that onRetry was called once
|
||||
expect(onRetry).toHaveBeenCalledTimes(1);
|
||||
expect(onRetry.mock.calls[0][0]).toBe(2); // item
|
||||
expect(onRetry.mock.calls[0][2]).toBe(1); // attempt
|
||||
});
|
||||
|
||||
test("gives up after max retries", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2];
|
||||
|
||||
// Create a mock function that always fails for item 2
|
||||
const processItem = mock(async (item: number) => {
|
||||
if (item === 2) {
|
||||
throw new Error("Persistent error");
|
||||
}
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock for the onRetry callback
|
||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||
// Retry tracking
|
||||
});
|
||||
|
||||
// Create a spy for console.error
|
||||
const originalConsoleError = console.error;
|
||||
const consoleErrorMock = mock(() => {});
|
||||
console.error = consoleErrorMock;
|
||||
|
||||
try {
|
||||
// Process the items with retry
|
||||
const results = await processWithRetry(items, processItem, {
|
||||
maxRetries: 2,
|
||||
retryDelay: 10,
|
||||
onRetry,
|
||||
});
|
||||
|
||||
// Verify results (should have 1 item, missing the one that errored)
|
||||
expect(results).toEqual([2]);
|
||||
|
||||
// Verify that onRetry was called twice (for 2 retry attempts)
|
||||
expect(onRetry).toHaveBeenCalledTimes(2);
|
||||
|
||||
// Verify that console.error was called (enhanced logging calls it multiple times)
|
||||
expect(consoleErrorMock).toHaveBeenCalled();
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,365 @@
|
||||
/**
|
||||
* Utility for processing items in parallel with concurrency control
|
||||
*
|
||||
* @param items Array of items to process
|
||||
* @param processItem Function to process each item
|
||||
* @param concurrencyLimit Maximum number of concurrent operations
|
||||
* @param onProgress Optional callback for progress updates
|
||||
* @returns Promise that resolves when all items are processed
|
||||
*/
|
||||
export async function processInParallel<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
concurrencyLimit: number = 5, // Safe default for GitHub API (max 100 concurrent, but 5-10 recommended)
|
||||
onProgress?: (completed: number, total: number, result?: R) => void
|
||||
): Promise<R[]> {
|
||||
const results: R[] = [];
|
||||
let completed = 0;
|
||||
const total = items.length;
|
||||
|
||||
// Process items in batches to control concurrency
|
||||
for (let i = 0; i < total; i += concurrencyLimit) {
|
||||
const batch = items.slice(i, i + concurrencyLimit);
|
||||
|
||||
const batchPromises = batch.map(async (item) => {
|
||||
try {
|
||||
const result = await processItem(item);
|
||||
completed++;
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(completed, total, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
completed++;
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(completed, total);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
// Wait for the current batch to complete before starting the next batch
|
||||
const batchResults = await Promise.allSettled(batchPromises);
|
||||
|
||||
// Process results and handle errors
|
||||
for (let j = 0; j < batchResults.length; j++) {
|
||||
const result = batchResults[j];
|
||||
if (result.status === 'fulfilled') {
|
||||
results.push(result.value);
|
||||
} else {
|
||||
const itemIndex = i + j;
|
||||
console.error("=== BATCH ITEM PROCESSING ERROR ===");
|
||||
console.error("Batch index:", Math.floor(i / concurrencyLimit));
|
||||
console.error("Item index in batch:", j);
|
||||
console.error("Global item index:", itemIndex);
|
||||
console.error("Error type:", result.reason?.constructor?.name);
|
||||
console.error("Error message:", result.reason instanceof Error ? result.reason.message : String(result.reason));
|
||||
|
||||
if (result.reason instanceof Error && result.reason.message.includes('JSON')) {
|
||||
console.error("🚨 JSON parsing error in batch processing");
|
||||
console.error("This indicates an API response issue from Gitea");
|
||||
}
|
||||
|
||||
console.error("==================================");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility for processing items in parallel with automatic retry for failed operations
|
||||
*
|
||||
* @param items Array of items to process
|
||||
* @param processItem Function to process each item
|
||||
* @param options Configuration options
|
||||
* @returns Promise that resolves when all items are processed
|
||||
*/
|
||||
export async function processWithRetry<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
options: {
|
||||
concurrencyLimit?: number;
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||
jobId?: string; // Optional job ID for checkpointing
|
||||
getItemId?: (item: T) => string; // Function to get a unique ID for each item
|
||||
onCheckpoint?: (jobId: string, completedItemId: string) => Promise<void>; // Callback for checkpointing
|
||||
checkpointInterval?: number; // How many items to process before checkpointing
|
||||
} = {}
|
||||
): Promise<R[]> {
|
||||
const {
|
||||
concurrencyLimit = 5,
|
||||
maxRetries = 3,
|
||||
retryDelay = 1000,
|
||||
onProgress,
|
||||
onRetry,
|
||||
jobId,
|
||||
getItemId,
|
||||
onCheckpoint,
|
||||
checkpointInterval = 1 // Default to checkpointing after each item
|
||||
} = options;
|
||||
|
||||
// Track checkpoint counter
|
||||
let itemsProcessedSinceLastCheckpoint = 0;
|
||||
|
||||
// Wrap the process function with retry logic
|
||||
const processWithRetryLogic = async (item: T): Promise<R> => {
|
||||
let lastError: Error | null = null;
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {
|
||||
try {
|
||||
// Check for shutdown before processing each item (only in production)
|
||||
try {
|
||||
const { isShuttingDown } = await import('@/lib/shutdown-manager');
|
||||
if (isShuttingDown()) {
|
||||
throw new Error('Processing interrupted by application shutdown');
|
||||
}
|
||||
} catch (importError) {
|
||||
// Ignore import errors during testing
|
||||
}
|
||||
|
||||
const result = await processItem(item);
|
||||
|
||||
// Handle checkpointing if enabled
|
||||
if (jobId && getItemId && onCheckpoint) {
|
||||
const itemId = getItemId(item);
|
||||
itemsProcessedSinceLastCheckpoint++;
|
||||
|
||||
// Checkpoint based on the interval
|
||||
if (itemsProcessedSinceLastCheckpoint >= checkpointInterval) {
|
||||
await onCheckpoint(jobId, itemId);
|
||||
itemsProcessedSinceLastCheckpoint = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
|
||||
if (attempt <= maxRetries) {
|
||||
if (onRetry) {
|
||||
onRetry(item, lastError, attempt);
|
||||
}
|
||||
|
||||
// Exponential backoff
|
||||
const delay = retryDelay * Math.pow(2, attempt - 1);
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
} else {
|
||||
// Enhanced error logging for final failure
|
||||
console.error("=== ITEM PROCESSING FAILED (MAX RETRIES EXCEEDED) ===");
|
||||
console.error("Item:", getItemId ? getItemId(item) : 'unknown');
|
||||
console.error("Error type:", lastError.constructor.name);
|
||||
console.error("Error message:", lastError.message);
|
||||
console.error("Attempts made:", maxRetries + 1);
|
||||
|
||||
if (lastError.message.includes('JSON')) {
|
||||
console.error("🚨 JSON-related error detected in item processing");
|
||||
console.error("This suggests an issue with API responses from Gitea");
|
||||
}
|
||||
|
||||
console.error("Stack trace:", lastError.stack);
|
||||
console.error("================================================");
|
||||
|
||||
throw lastError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This should never be reached due to the throw in the catch block
|
||||
throw lastError || new Error('Unknown error occurred');
|
||||
};
|
||||
|
||||
const results = await processInParallel(
|
||||
items,
|
||||
processWithRetryLogic,
|
||||
concurrencyLimit,
|
||||
onProgress
|
||||
);
|
||||
|
||||
// Final checkpoint if there are remaining items since the last checkpoint
|
||||
if (jobId && getItemId && onCheckpoint && itemsProcessedSinceLastCheckpoint > 0) {
|
||||
// We don't have a specific item ID for the final checkpoint, so we'll use a placeholder
|
||||
await onCheckpoint(jobId, 'final');
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process items in parallel with resilience to container restarts
|
||||
* This version supports resuming from a previous checkpoint
|
||||
*/
|
||||
export async function processWithResilience<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
options: {
|
||||
concurrencyLimit?: number;
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||
userId: string; // Required for creating mirror jobs
|
||||
jobType: "mirror" | "sync" | "retry";
|
||||
getItemId: (item: T) => string; // Required function to get a unique ID for each item
|
||||
getItemName: (item: T) => string; // Required function to get a display name for each item
|
||||
checkpointInterval?: number;
|
||||
resumeFromJobId?: string; // Optional job ID to resume from
|
||||
}
|
||||
): Promise<R[]> {
|
||||
const {
|
||||
userId,
|
||||
jobType,
|
||||
getItemId,
|
||||
getItemName,
|
||||
resumeFromJobId,
|
||||
checkpointInterval = 10, // Increased from 5 to reduce event frequency
|
||||
...otherOptions
|
||||
} = options;
|
||||
|
||||
// Import helpers for job management and shutdown handling
|
||||
const { createMirrorJob, updateMirrorJobProgress } = await import('@/lib/helpers');
|
||||
|
||||
// Import shutdown manager (with fallback for testing)
|
||||
let registerActiveJob: (jobId: string) => void = () => {};
|
||||
let unregisterActiveJob: (jobId: string) => void = () => {};
|
||||
let isShuttingDown: () => boolean = () => false;
|
||||
|
||||
try {
|
||||
const shutdownManager = await import('@/lib/shutdown-manager');
|
||||
registerActiveJob = shutdownManager.registerActiveJob;
|
||||
unregisterActiveJob = shutdownManager.unregisterActiveJob;
|
||||
isShuttingDown = shutdownManager.isShuttingDown;
|
||||
} catch (importError) {
|
||||
// Use fallback functions during testing
|
||||
console.log('Using fallback shutdown manager functions (testing mode)');
|
||||
}
|
||||
|
||||
// Get item IDs for all items
|
||||
const allItemIds = items.map(getItemId);
|
||||
|
||||
// Create or resume a job
|
||||
let jobId: string;
|
||||
let completedItemIds: string[] = [];
|
||||
let itemsToProcess = [...items];
|
||||
|
||||
if (resumeFromJobId) {
|
||||
// We're resuming an existing job
|
||||
jobId = resumeFromJobId;
|
||||
|
||||
// Get the job from the database to find completed items
|
||||
const { db, mirrorJobs } = await import('@/lib/db');
|
||||
const { eq } = await import('drizzle-orm');
|
||||
const [job] = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(eq(mirrorJobs.id, resumeFromJobId));
|
||||
|
||||
if (job && job.completedItemIds) {
|
||||
completedItemIds = job.completedItemIds;
|
||||
|
||||
// Filter out already completed items
|
||||
itemsToProcess = items.filter(item => !completedItemIds.includes(getItemId(item)));
|
||||
|
||||
console.log(`Resuming job ${jobId} with ${itemsToProcess.length} remaining items`);
|
||||
|
||||
// Update the job to show it's being resumed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
message: `Resuming job with ${itemsToProcess.length} remaining items`,
|
||||
details: `Job is being resumed. ${completedItemIds.length} of ${items.length} items were already processed.`,
|
||||
inProgress: true,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Create a new job
|
||||
jobId = await createMirrorJob({
|
||||
userId,
|
||||
message: `Started ${jobType} job with ${items.length} items`,
|
||||
details: `Processing ${items.length} items in parallel with checkpointing`,
|
||||
status: "mirroring",
|
||||
jobType,
|
||||
totalItems: items.length,
|
||||
itemIds: allItemIds,
|
||||
inProgress: true,
|
||||
});
|
||||
|
||||
console.log(`Created new job ${jobId} with ${items.length} items`);
|
||||
}
|
||||
|
||||
// Register the job with the shutdown manager
|
||||
registerActiveJob(jobId);
|
||||
|
||||
// Define the checkpoint function
|
||||
const onCheckpoint = async (jobId: string, completedItemId: string) => {
|
||||
const itemName = items.find(item => getItemId(item) === completedItemId)
|
||||
? getItemName(items.find(item => getItemId(item) === completedItemId)!)
|
||||
: 'unknown';
|
||||
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
completedItemId,
|
||||
message: `Processed item: ${itemName}`,
|
||||
});
|
||||
};
|
||||
|
||||
try {
|
||||
// Check if shutdown is in progress before starting
|
||||
if (isShuttingDown()) {
|
||||
console.log(`⚠️ Shutdown in progress, aborting job ${jobId}`);
|
||||
throw new Error('Job aborted due to application shutdown');
|
||||
}
|
||||
|
||||
// Process the items with checkpointing
|
||||
const results = await processWithRetry(
|
||||
itemsToProcess,
|
||||
processItem,
|
||||
{
|
||||
...otherOptions,
|
||||
jobId,
|
||||
getItemId,
|
||||
onCheckpoint,
|
||||
checkpointInterval,
|
||||
}
|
||||
);
|
||||
|
||||
// Mark the job as completed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
status: "mirrored",
|
||||
message: `Completed ${jobType} job with ${items.length} items`,
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
// Unregister the job from shutdown manager
|
||||
unregisterActiveJob(jobId);
|
||||
|
||||
return results;
|
||||
} catch (error) {
|
||||
// Mark the job as failed (unless it was interrupted by shutdown)
|
||||
const isShutdownError = error instanceof Error && error.message.includes('shutdown');
|
||||
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
status: isShutdownError ? "imported" : "failed", // Keep as imported if shutdown interrupted
|
||||
message: isShutdownError
|
||||
? 'Job interrupted by application shutdown - will resume on restart'
|
||||
: `Failed ${jobType} job: ${error instanceof Error ? error.message : String(error)}`,
|
||||
inProgress: false,
|
||||
isCompleted: !isShutdownError, // Don't mark as completed if shutdown interrupted
|
||||
});
|
||||
|
||||
// Unregister the job from shutdown manager
|
||||
unregisterActiveJob(jobId);
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,135 @@
|
||||
import { db, configs } from "@/lib/db";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { encrypt } from "@/lib/utils/encryption";
|
||||
|
||||
export interface DefaultConfigOptions {
|
||||
userId: string;
|
||||
envOverrides?: {
|
||||
githubToken?: string;
|
||||
githubUsername?: string;
|
||||
giteaUrl?: string;
|
||||
giteaToken?: string;
|
||||
giteaUsername?: string;
|
||||
scheduleEnabled?: boolean;
|
||||
scheduleInterval?: number;
|
||||
cleanupEnabled?: boolean;
|
||||
cleanupRetentionDays?: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a default configuration for a new user with sensible defaults
|
||||
* Environment variables can override these defaults
|
||||
*/
|
||||
export async function createDefaultConfig({ userId, envOverrides = {} }: DefaultConfigOptions) {
|
||||
// Check if config already exists
|
||||
const existingConfig = await db
|
||||
.select()
|
||||
.from(configs)
|
||||
.where(eq(configs.userId, userId))
|
||||
.limit(1);
|
||||
|
||||
if (existingConfig.length > 0) {
|
||||
return existingConfig[0];
|
||||
}
|
||||
|
||||
// Read environment variables for overrides
|
||||
const githubToken = envOverrides.githubToken || process.env.GITHUB_TOKEN || "";
|
||||
const githubUsername = envOverrides.githubUsername || process.env.GITHUB_USERNAME || "";
|
||||
const giteaUrl = envOverrides.giteaUrl || process.env.GITEA_URL || "";
|
||||
const giteaToken = envOverrides.giteaToken || process.env.GITEA_TOKEN || "";
|
||||
const giteaUsername = envOverrides.giteaUsername || process.env.GITEA_USERNAME || "";
|
||||
|
||||
// Schedule config from env - default to ENABLED
|
||||
const scheduleEnabled = envOverrides.scheduleEnabled ??
|
||||
(process.env.SCHEDULE_ENABLED === "false" ? false : true); // Default: ENABLED
|
||||
const scheduleInterval = envOverrides.scheduleInterval ??
|
||||
(process.env.SCHEDULE_INTERVAL ? parseInt(process.env.SCHEDULE_INTERVAL, 10) : 86400); // Default: daily
|
||||
|
||||
// Cleanup config from env - default to ENABLED
|
||||
const cleanupEnabled = envOverrides.cleanupEnabled ??
|
||||
(process.env.CLEANUP_ENABLED === "false" ? false : true); // Default: ENABLED
|
||||
const cleanupRetentionDays = envOverrides.cleanupRetentionDays ??
|
||||
(process.env.CLEANUP_RETENTION_DAYS ? parseInt(process.env.CLEANUP_RETENTION_DAYS, 10) * 86400 : 604800); // Default: 7 days
|
||||
|
||||
// Create default configuration
|
||||
const configId = uuidv4();
|
||||
const defaultConfig = {
|
||||
id: configId,
|
||||
userId,
|
||||
name: "Default Configuration",
|
||||
isActive: true,
|
||||
githubConfig: {
|
||||
owner: githubUsername,
|
||||
type: "personal",
|
||||
token: githubToken ? encrypt(githubToken) : "",
|
||||
includeStarred: false,
|
||||
includeForks: true,
|
||||
includeArchived: false,
|
||||
includePrivate: false,
|
||||
includePublic: true,
|
||||
includeOrganizations: [],
|
||||
starredReposOrg: "starred",
|
||||
mirrorStrategy: "preserve",
|
||||
defaultOrg: "github-mirrors",
|
||||
},
|
||||
giteaConfig: {
|
||||
url: giteaUrl,
|
||||
token: giteaToken ? encrypt(giteaToken) : "",
|
||||
defaultOwner: giteaUsername,
|
||||
mirrorInterval: "8h",
|
||||
lfs: false,
|
||||
wiki: false,
|
||||
visibility: "public",
|
||||
createOrg: true,
|
||||
addTopics: true,
|
||||
preserveVisibility: false,
|
||||
forkStrategy: "reference",
|
||||
issueConcurrency: 3,
|
||||
pullRequestConcurrency: 5,
|
||||
},
|
||||
include: [],
|
||||
exclude: [],
|
||||
scheduleConfig: {
|
||||
enabled: scheduleEnabled,
|
||||
interval: scheduleInterval,
|
||||
concurrent: false,
|
||||
batchSize: 5, // Reduced from 10 to be more conservative with GitHub API limits
|
||||
lastRun: null,
|
||||
nextRun: scheduleEnabled ? new Date(Date.now() + scheduleInterval * 1000) : null,
|
||||
},
|
||||
cleanupConfig: {
|
||||
enabled: cleanupEnabled,
|
||||
retentionDays: cleanupRetentionDays,
|
||||
deleteFromGitea: false,
|
||||
deleteIfNotInGitHub: true,
|
||||
protectedRepos: [],
|
||||
dryRun: false,
|
||||
orphanedRepoAction: "archive",
|
||||
batchSize: 10,
|
||||
pauseBetweenDeletes: 2000,
|
||||
lastRun: null,
|
||||
nextRun: cleanupEnabled ? new Date(Date.now() + getCleanupInterval(cleanupRetentionDays) * 1000) : null,
|
||||
},
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
// Insert the default config
|
||||
await db.insert(configs).values(defaultConfig);
|
||||
|
||||
return defaultConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate cleanup interval based on retention period
|
||||
*/
|
||||
function getCleanupInterval(retentionSeconds: number): number {
|
||||
const days = retentionSeconds / 86400;
|
||||
if (days <= 1) return 21600; // 6 hours
|
||||
if (days <= 3) return 43200; // 12 hours
|
||||
if (days <= 7) return 86400; // 24 hours
|
||||
if (days <= 30) return 172800; // 48 hours
|
||||
return 604800; // 1 week
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
import { decrypt } from "./encryption";
|
||||
import type { Config } from "@/types/config";
|
||||
|
||||
/**
|
||||
* Decrypts tokens in a config object for use in API calls
|
||||
* @param config The config object with potentially encrypted tokens
|
||||
* @returns Config object with decrypted tokens
|
||||
*/
|
||||
export function decryptConfigTokens(config: Config): Config {
|
||||
const decryptedConfig = { ...config };
|
||||
|
||||
// Deep clone the config objects
|
||||
if (config.githubConfig) {
|
||||
decryptedConfig.githubConfig = { ...config.githubConfig };
|
||||
if (config.githubConfig.token) {
|
||||
decryptedConfig.githubConfig.token = decrypt(config.githubConfig.token);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.giteaConfig) {
|
||||
decryptedConfig.giteaConfig = { ...config.giteaConfig };
|
||||
if (config.giteaConfig.token) {
|
||||
decryptedConfig.giteaConfig.token = decrypt(config.giteaConfig.token);
|
||||
}
|
||||
}
|
||||
|
||||
return decryptedConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a decrypted GitHub token from config
|
||||
* @param config The config object
|
||||
* @returns Decrypted GitHub token
|
||||
*/
|
||||
export function getDecryptedGitHubToken(config: Config): string {
|
||||
if (!config.githubConfig?.token) {
|
||||
throw new Error("GitHub token not found in config");
|
||||
}
|
||||
return decrypt(config.githubConfig.token);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a decrypted Gitea token from config
|
||||
* @param config The config object
|
||||
* @returns Decrypted Gitea token
|
||||
*/
|
||||
export function getDecryptedGiteaToken(config: Config): string {
|
||||
if (!config.giteaConfig?.token) {
|
||||
throw new Error("Gitea token not found in config");
|
||||
}
|
||||
return decrypt(config.giteaConfig.token);
|
||||
}
|
||||
@@ -0,0 +1,282 @@
|
||||
/**
|
||||
* Maps between UI config structure and database schema structure
|
||||
*/
|
||||
|
||||
import type {
|
||||
GitHubConfig,
|
||||
GiteaConfig,
|
||||
MirrorOptions,
|
||||
AdvancedOptions,
|
||||
SaveConfigApiRequest
|
||||
} from "@/types/config";
|
||||
import { z } from "zod";
|
||||
import { githubConfigSchema, giteaConfigSchema, scheduleConfigSchema, cleanupConfigSchema } from "@/lib/db/schema";
|
||||
import { parseInterval } from "@/lib/utils/duration-parser";
|
||||
|
||||
// Use the actual database schema types
|
||||
type DbGitHubConfig = z.infer<typeof githubConfigSchema>;
|
||||
type DbGiteaConfig = z.infer<typeof giteaConfigSchema>;
|
||||
type DbScheduleConfig = z.infer<typeof scheduleConfigSchema>;
|
||||
type DbCleanupConfig = z.infer<typeof cleanupConfigSchema>;
|
||||
|
||||
/**
|
||||
* Maps UI config structure to database schema structure
|
||||
*/
|
||||
export function mapUiToDbConfig(
|
||||
githubConfig: GitHubConfig,
|
||||
giteaConfig: GiteaConfig,
|
||||
mirrorOptions: MirrorOptions,
|
||||
advancedOptions: AdvancedOptions
|
||||
): { githubConfig: DbGitHubConfig; giteaConfig: DbGiteaConfig } {
|
||||
// Map GitHub config to match database schema fields
|
||||
const dbGithubConfig: DbGitHubConfig = {
|
||||
// Map username to owner field
|
||||
owner: githubConfig.username,
|
||||
type: "personal", // Default to personal, could be made configurable
|
||||
token: githubConfig.token || "",
|
||||
|
||||
// Map checkbox fields with proper names
|
||||
includeStarred: githubConfig.mirrorStarred,
|
||||
includePrivate: githubConfig.privateRepositories,
|
||||
includeForks: !advancedOptions.skipForks, // Note: UI has skipForks, DB has includeForks
|
||||
skipForks: advancedOptions.skipForks, // Add skipForks field
|
||||
includeArchived: false, // Not in UI yet, default to false
|
||||
includePublic: true, // Not in UI yet, default to true
|
||||
|
||||
// Organization related fields
|
||||
includeOrganizations: [], // Not in UI yet
|
||||
|
||||
// Starred repos organization
|
||||
starredReposOrg: giteaConfig.starredReposOrg,
|
||||
|
||||
// Mirror strategy
|
||||
mirrorStrategy: giteaConfig.mirrorStrategy || "preserve",
|
||||
defaultOrg: giteaConfig.organization,
|
||||
|
||||
// Advanced options
|
||||
starredCodeOnly: advancedOptions.starredCodeOnly,
|
||||
};
|
||||
|
||||
// Map Gitea config to match database schema
|
||||
const dbGiteaConfig: DbGiteaConfig = {
|
||||
url: giteaConfig.url,
|
||||
token: giteaConfig.token,
|
||||
defaultOwner: giteaConfig.username, // Map username to defaultOwner
|
||||
organization: giteaConfig.organization, // Add organization field
|
||||
preserveOrgStructure: giteaConfig.mirrorStrategy === "preserve" || giteaConfig.mirrorStrategy === "mixed", // Add preserveOrgStructure field
|
||||
|
||||
// Mirror interval and options
|
||||
mirrorInterval: "8h", // Default value, could be made configurable
|
||||
lfs: mirrorOptions.mirrorLFS || false, // LFS mirroring option
|
||||
wiki: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.wiki,
|
||||
|
||||
// Visibility settings
|
||||
visibility: giteaConfig.visibility || "default",
|
||||
preserveVisibility: false, // This should be a separate field, not the same as preserveOrgStructure
|
||||
|
||||
// Organization creation
|
||||
createOrg: true, // Default to true
|
||||
|
||||
// Template settings (not in UI yet)
|
||||
templateOwner: undefined,
|
||||
templateRepo: undefined,
|
||||
|
||||
// Topics
|
||||
addTopics: true, // Default to true
|
||||
topicPrefix: undefined,
|
||||
|
||||
// Fork strategy
|
||||
forkStrategy: advancedOptions.skipForks ? "skip" : "reference",
|
||||
|
||||
// Mirror options from UI
|
||||
issueConcurrency: giteaConfig.issueConcurrency ?? 3,
|
||||
pullRequestConcurrency: giteaConfig.pullRequestConcurrency ?? 5,
|
||||
mirrorReleases: mirrorOptions.mirrorReleases,
|
||||
releaseLimit: mirrorOptions.releaseLimit || 10,
|
||||
mirrorMetadata: mirrorOptions.mirrorMetadata,
|
||||
mirrorIssues: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.issues,
|
||||
mirrorPullRequests: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.pullRequests,
|
||||
mirrorLabels: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.labels,
|
||||
mirrorMilestones: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.milestones,
|
||||
};
|
||||
|
||||
return {
|
||||
githubConfig: dbGithubConfig,
|
||||
giteaConfig: dbGiteaConfig,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps database schema structure to UI config structure
|
||||
*/
|
||||
export function mapDbToUiConfig(dbConfig: any): {
|
||||
githubConfig: GitHubConfig;
|
||||
giteaConfig: GiteaConfig;
|
||||
mirrorOptions: MirrorOptions;
|
||||
advancedOptions: AdvancedOptions;
|
||||
} {
|
||||
// Map from database GitHub config to UI fields
|
||||
const githubConfig: GitHubConfig = {
|
||||
username: dbConfig.githubConfig?.owner || "", // Map owner to username
|
||||
token: dbConfig.githubConfig?.token || "",
|
||||
privateRepositories: dbConfig.githubConfig?.includePrivate || false, // Map includePrivate to privateRepositories
|
||||
mirrorStarred: dbConfig.githubConfig?.includeStarred || false, // Map includeStarred to mirrorStarred
|
||||
};
|
||||
|
||||
// Map from database Gitea config to UI fields
|
||||
const giteaConfig: GiteaConfig = {
|
||||
url: dbConfig.giteaConfig?.url || "",
|
||||
username: dbConfig.giteaConfig?.defaultOwner || "", // Map defaultOwner to username
|
||||
token: dbConfig.giteaConfig?.token || "",
|
||||
organization: dbConfig.githubConfig?.defaultOrg || "github-mirrors", // Get from GitHub config
|
||||
visibility: dbConfig.giteaConfig?.visibility === "default" ? "public" : dbConfig.giteaConfig?.visibility || "public",
|
||||
starredReposOrg: dbConfig.githubConfig?.starredReposOrg || "starred", // Get from GitHub config
|
||||
preserveOrgStructure: dbConfig.giteaConfig?.preserveVisibility || false, // Map preserveVisibility
|
||||
mirrorStrategy: dbConfig.githubConfig?.mirrorStrategy || "preserve", // Get from GitHub config
|
||||
personalReposOrg: undefined, // Not stored in current schema
|
||||
issueConcurrency: dbConfig.giteaConfig?.issueConcurrency ?? 3,
|
||||
pullRequestConcurrency: dbConfig.giteaConfig?.pullRequestConcurrency ?? 5,
|
||||
};
|
||||
|
||||
// Map mirror options from various database fields
|
||||
const mirrorOptions: MirrorOptions = {
|
||||
mirrorReleases: dbConfig.giteaConfig?.mirrorReleases || false,
|
||||
releaseLimit: dbConfig.giteaConfig?.releaseLimit || 10,
|
||||
mirrorLFS: dbConfig.giteaConfig?.lfs || false,
|
||||
mirrorMetadata: dbConfig.giteaConfig?.mirrorMetadata || false,
|
||||
metadataComponents: {
|
||||
issues: dbConfig.giteaConfig?.mirrorIssues || false,
|
||||
pullRequests: dbConfig.giteaConfig?.mirrorPullRequests || false,
|
||||
labels: dbConfig.giteaConfig?.mirrorLabels || false,
|
||||
milestones: dbConfig.giteaConfig?.mirrorMilestones || false,
|
||||
wiki: dbConfig.giteaConfig?.wiki || false,
|
||||
},
|
||||
};
|
||||
|
||||
// Map advanced options
|
||||
const advancedOptions: AdvancedOptions = {
|
||||
skipForks: !(dbConfig.githubConfig?.includeForks ?? true), // Invert includeForks to get skipForks
|
||||
// Support both old (skipStarredIssues) and new (starredCodeOnly) field names for backward compatibility
|
||||
starredCodeOnly: dbConfig.githubConfig?.starredCodeOnly ?? (dbConfig.githubConfig as any)?.skipStarredIssues ?? false,
|
||||
};
|
||||
|
||||
return {
|
||||
githubConfig,
|
||||
giteaConfig,
|
||||
mirrorOptions,
|
||||
advancedOptions,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps UI schedule config to database schema
|
||||
*/
|
||||
export function mapUiScheduleToDb(uiSchedule: any, existing?: DbScheduleConfig): DbScheduleConfig {
|
||||
// Preserve existing schedule config and only update fields controlled by the UI
|
||||
const base: DbScheduleConfig = existing
|
||||
? { ...(existing as unknown as DbScheduleConfig) }
|
||||
: (scheduleConfigSchema.parse({}) as unknown as DbScheduleConfig);
|
||||
|
||||
// Store interval as seconds string to avoid lossy cron conversion
|
||||
const intervalSeconds = typeof uiSchedule.interval === 'number' && uiSchedule.interval > 0
|
||||
? String(uiSchedule.interval)
|
||||
: (typeof base.interval === 'string' ? base.interval : String(86400));
|
||||
|
||||
return {
|
||||
...base,
|
||||
enabled: !!uiSchedule.enabled,
|
||||
interval: intervalSeconds,
|
||||
} as DbScheduleConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps database schedule config to UI format
|
||||
*/
|
||||
export function mapDbScheduleToUi(dbSchedule: DbScheduleConfig): any {
|
||||
// Handle null/undefined schedule config
|
||||
if (!dbSchedule) {
|
||||
return {
|
||||
enabled: false,
|
||||
interval: 86400, // Default to daily (24 hours)
|
||||
lastRun: null,
|
||||
nextRun: null,
|
||||
};
|
||||
}
|
||||
|
||||
// Parse interval supporting numbers (seconds), duration strings, and cron
|
||||
let intervalSeconds = 86400; // Default to daily (24 hours)
|
||||
try {
|
||||
const ms = parseInterval(
|
||||
typeof dbSchedule.interval === 'number'
|
||||
? dbSchedule.interval
|
||||
: (dbSchedule.interval as unknown as string)
|
||||
);
|
||||
intervalSeconds = Math.max(1, Math.floor(ms / 1000));
|
||||
} catch (_e) {
|
||||
// Fallback to default if unparsable
|
||||
intervalSeconds = 86400;
|
||||
}
|
||||
|
||||
return {
|
||||
enabled: dbSchedule.enabled || false,
|
||||
interval: intervalSeconds,
|
||||
lastRun: dbSchedule.lastRun || null,
|
||||
nextRun: dbSchedule.nextRun || null,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps UI cleanup config to database schema
|
||||
*/
|
||||
export function mapUiCleanupToDb(uiCleanup: any): DbCleanupConfig {
|
||||
const parsedRetention =
|
||||
typeof uiCleanup.retentionDays === "string"
|
||||
? parseInt(uiCleanup.retentionDays, 10)
|
||||
: uiCleanup.retentionDays;
|
||||
const retentionSeconds = Number.isFinite(parsedRetention)
|
||||
? parsedRetention
|
||||
: 604800;
|
||||
|
||||
return {
|
||||
enabled: Boolean(uiCleanup.enabled),
|
||||
retentionDays: retentionSeconds,
|
||||
deleteFromGitea: uiCleanup.deleteFromGitea ?? false,
|
||||
deleteIfNotInGitHub: uiCleanup.deleteIfNotInGitHub ?? true,
|
||||
protectedRepos: uiCleanup.protectedRepos ?? [],
|
||||
dryRun: uiCleanup.dryRun ?? false,
|
||||
orphanedRepoAction: (uiCleanup.orphanedRepoAction as DbCleanupConfig["orphanedRepoAction"]) || "archive",
|
||||
batchSize: uiCleanup.batchSize ?? 10,
|
||||
pauseBetweenDeletes: uiCleanup.pauseBetweenDeletes ?? 2000,
|
||||
lastRun: uiCleanup.lastRun ?? null,
|
||||
nextRun: uiCleanup.nextRun ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps database cleanup config to UI format
|
||||
*/
|
||||
export function mapDbCleanupToUi(dbCleanup: DbCleanupConfig): any {
|
||||
// Handle null/undefined cleanup config
|
||||
if (!dbCleanup) {
|
||||
return {
|
||||
enabled: false,
|
||||
retentionDays: 604800, // Default to 7 days in seconds
|
||||
lastRun: null,
|
||||
nextRun: null,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
enabled: dbCleanup.enabled ?? false,
|
||||
retentionDays: dbCleanup.retentionDays ?? 604800,
|
||||
deleteFromGitea: dbCleanup.deleteFromGitea ?? false,
|
||||
deleteIfNotInGitHub: dbCleanup.deleteIfNotInGitHub ?? true,
|
||||
protectedRepos: dbCleanup.protectedRepos ?? [],
|
||||
dryRun: dbCleanup.dryRun ?? false,
|
||||
orphanedRepoAction: dbCleanup.orphanedRepoAction ?? "archive",
|
||||
batchSize: dbCleanup.batchSize ?? 10,
|
||||
pauseBetweenDeletes: dbCleanup.pauseBetweenDeletes ?? 2000,
|
||||
lastRun: dbCleanup.lastRun ?? null,
|
||||
nextRun: dbCleanup.nextRun ?? null,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
import { test, expect } from 'bun:test';
|
||||
import { parseDuration, parseInterval, formatDuration, parseCronInterval } from './duration-parser';
|
||||
|
||||
test('parseDuration - handles duration strings correctly', () => {
|
||||
// Hours
|
||||
expect(parseDuration('8h')).toBe(8 * 60 * 60 * 1000);
|
||||
expect(parseDuration('1h')).toBe(60 * 60 * 1000);
|
||||
expect(parseDuration('24h')).toBe(24 * 60 * 60 * 1000);
|
||||
|
||||
// Minutes
|
||||
expect(parseDuration('30m')).toBe(30 * 60 * 1000);
|
||||
expect(parseDuration('5m')).toBe(5 * 60 * 1000);
|
||||
|
||||
// Seconds
|
||||
expect(parseDuration('45s')).toBe(45 * 1000);
|
||||
expect(parseDuration('1s')).toBe(1000);
|
||||
|
||||
// Days
|
||||
expect(parseDuration('1d')).toBe(24 * 60 * 60 * 1000);
|
||||
expect(parseDuration('7d')).toBe(7 * 24 * 60 * 60 * 1000);
|
||||
|
||||
// Numbers (treated as seconds)
|
||||
expect(parseDuration(3600)).toBe(3600 * 1000);
|
||||
expect(parseDuration('3600')).toBe(3600 * 1000);
|
||||
});
|
||||
|
||||
test('parseDuration - handles edge cases', () => {
|
||||
// Case insensitive
|
||||
expect(parseDuration('8H')).toBe(8 * 60 * 60 * 1000);
|
||||
expect(parseDuration('30M')).toBe(30 * 60 * 1000);
|
||||
|
||||
// With spaces
|
||||
expect(parseDuration('8 h')).toBe(8 * 60 * 60 * 1000);
|
||||
expect(parseDuration('30 minutes')).toBe(30 * 60 * 1000);
|
||||
|
||||
// Fractional values
|
||||
expect(parseDuration('1.5h')).toBe(1.5 * 60 * 60 * 1000);
|
||||
expect(parseDuration('2.5m')).toBe(2.5 * 60 * 1000);
|
||||
});
|
||||
|
||||
test('parseDuration - throws on invalid input', () => {
|
||||
expect(() => parseDuration('')).toThrow();
|
||||
expect(() => parseDuration('invalid')).toThrow();
|
||||
expect(() => parseDuration('8x')).toThrow();
|
||||
expect(() => parseDuration('-1h')).toThrow();
|
||||
});
|
||||
|
||||
test('parseInterval - handles cron expressions', () => {
|
||||
// Every 2 hours
|
||||
expect(parseInterval('0 */2 * * *')).toBe(2 * 60 * 60 * 1000);
|
||||
|
||||
// Every 15 minutes
|
||||
expect(parseInterval('*/15 * * * *')).toBe(15 * 60 * 1000);
|
||||
|
||||
// Daily at 2 AM
|
||||
expect(parseInterval('0 2 * * *')).toBe(24 * 60 * 60 * 1000);
|
||||
});
|
||||
|
||||
test('parseInterval - prioritizes duration strings over cron', () => {
|
||||
expect(parseInterval('8h')).toBe(8 * 60 * 60 * 1000);
|
||||
expect(parseInterval('30m')).toBe(30 * 60 * 1000);
|
||||
expect(parseInterval(3600)).toBe(3600 * 1000);
|
||||
});
|
||||
|
||||
test('formatDuration - converts milliseconds back to readable format', () => {
|
||||
expect(formatDuration(1000)).toBe('1s');
|
||||
expect(formatDuration(60 * 1000)).toBe('1m');
|
||||
expect(formatDuration(60 * 60 * 1000)).toBe('1h');
|
||||
expect(formatDuration(24 * 60 * 60 * 1000)).toBe('1d');
|
||||
expect(formatDuration(8 * 60 * 60 * 1000)).toBe('8h');
|
||||
expect(formatDuration(500)).toBe('500ms');
|
||||
});
|
||||
|
||||
test('parseCronInterval - handles common cron patterns', () => {
|
||||
expect(parseCronInterval('0 */8 * * *')).toBe(8 * 60 * 60 * 1000);
|
||||
expect(parseCronInterval('*/30 * * * *')).toBe(30 * 60 * 1000);
|
||||
expect(parseCronInterval('0 2 * * *')).toBe(24 * 60 * 60 * 1000);
|
||||
expect(parseCronInterval('0 0 * * 0')).toBe(7 * 24 * 60 * 60 * 1000); // Weekly
|
||||
});
|
||||
|
||||
test('Integration test - Issue #72 scenario', () => {
|
||||
// User sets GITEA_MIRROR_INTERVAL=8h
|
||||
const userInterval = '8h';
|
||||
const parsedMs = parseInterval(userInterval);
|
||||
|
||||
expect(parsedMs).toBe(8 * 60 * 60 * 1000); // 8 hours in milliseconds
|
||||
expect(formatDuration(parsedMs)).toBe('8h');
|
||||
|
||||
// Should work from container startup time
|
||||
const startTime = new Date();
|
||||
const nextRun = new Date(startTime.getTime() + parsedMs);
|
||||
|
||||
expect(nextRun.getTime() - startTime.getTime()).toBe(8 * 60 * 60 * 1000);
|
||||
});
|
||||
@@ -0,0 +1,251 @@
|
||||
/**
|
||||
* Duration parser utility for converting human-readable duration strings to milliseconds
|
||||
* Supports formats like: 8h, 30m, 24h, 1d, 5s, etc.
|
||||
*/
|
||||
|
||||
export interface ParsedDuration {
|
||||
value: number;
|
||||
unit: string;
|
||||
milliseconds: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a duration string into milliseconds
|
||||
* @param duration - Duration string (e.g., "8h", "30m", "1d", "5s") or number in seconds
|
||||
* @returns Duration in milliseconds
|
||||
*/
|
||||
export function parseDuration(duration: string | number): number {
|
||||
if (typeof duration === 'number') {
|
||||
return duration * 1000; // Convert seconds to milliseconds
|
||||
}
|
||||
|
||||
if (!duration || typeof duration !== 'string') {
|
||||
throw new Error('Invalid duration: must be a string or number');
|
||||
}
|
||||
|
||||
// Try to parse as number first (assume seconds)
|
||||
const parsed = parseInt(duration, 10);
|
||||
if (!isNaN(parsed) && duration === parsed.toString()) {
|
||||
return parsed * 1000; // Convert seconds to milliseconds
|
||||
}
|
||||
|
||||
// Parse duration string with unit
|
||||
const match = duration.trim().match(/^(\d+(?:\.\d+)?)\s*([a-zA-Z]+)$/);
|
||||
if (!match) {
|
||||
throw new Error(`Invalid duration format: "${duration}". Expected format like "8h", "30m", "1d"`);
|
||||
}
|
||||
|
||||
const [, valueStr, unit] = match;
|
||||
const value = parseFloat(valueStr);
|
||||
|
||||
if (isNaN(value) || value < 0) {
|
||||
throw new Error(`Invalid duration value: "${valueStr}". Must be a positive number`);
|
||||
}
|
||||
|
||||
const unitLower = unit.toLowerCase();
|
||||
let multiplier: number;
|
||||
|
||||
switch (unitLower) {
|
||||
case 'ms':
|
||||
case 'millisecond':
|
||||
case 'milliseconds':
|
||||
multiplier = 1;
|
||||
break;
|
||||
case 's':
|
||||
case 'sec':
|
||||
case 'second':
|
||||
case 'seconds':
|
||||
multiplier = 1000;
|
||||
break;
|
||||
case 'm':
|
||||
case 'min':
|
||||
case 'minute':
|
||||
case 'minutes':
|
||||
multiplier = 60 * 1000;
|
||||
break;
|
||||
case 'h':
|
||||
case 'hr':
|
||||
case 'hour':
|
||||
case 'hours':
|
||||
multiplier = 60 * 60 * 1000;
|
||||
break;
|
||||
case 'd':
|
||||
case 'day':
|
||||
case 'days':
|
||||
multiplier = 24 * 60 * 60 * 1000;
|
||||
break;
|
||||
case 'w':
|
||||
case 'week':
|
||||
case 'weeks':
|
||||
multiplier = 7 * 24 * 60 * 60 * 1000;
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported duration unit: "${unit}". Supported units: ms, s, m, h, d, w`);
|
||||
}
|
||||
|
||||
return Math.floor(value * multiplier);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a duration string and return detailed information
|
||||
* @param duration - Duration string
|
||||
* @returns Parsed duration with value, unit, and milliseconds
|
||||
*/
|
||||
export function parseDurationDetailed(duration: string | number): ParsedDuration {
|
||||
const milliseconds = parseDuration(duration);
|
||||
|
||||
if (typeof duration === 'number') {
|
||||
return {
|
||||
value: duration,
|
||||
unit: 's',
|
||||
milliseconds
|
||||
};
|
||||
}
|
||||
|
||||
const match = duration.trim().match(/^(\d+(?:\.\d+)?)\s*([a-zA-Z]+)$/);
|
||||
if (!match) {
|
||||
// If it's just a number as string
|
||||
const value = parseFloat(duration);
|
||||
if (!isNaN(value)) {
|
||||
return {
|
||||
value,
|
||||
unit: 's',
|
||||
milliseconds
|
||||
};
|
||||
}
|
||||
throw new Error(`Invalid duration format: "${duration}"`);
|
||||
}
|
||||
|
||||
const [, valueStr, unit] = match;
|
||||
return {
|
||||
value: parseFloat(valueStr),
|
||||
unit: unit.toLowerCase(),
|
||||
milliseconds
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format milliseconds back to human-readable duration
|
||||
* @param milliseconds - Duration in milliseconds
|
||||
* @returns Human-readable duration string
|
||||
*/
|
||||
export function formatDuration(milliseconds: number): string {
|
||||
if (milliseconds < 1000) {
|
||||
return `${milliseconds}ms`;
|
||||
}
|
||||
|
||||
const seconds = Math.floor(milliseconds / 1000);
|
||||
if (seconds < 60) {
|
||||
return `${seconds}s`;
|
||||
}
|
||||
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
if (minutes < 60) {
|
||||
return `${minutes}m`;
|
||||
}
|
||||
|
||||
const hours = Math.floor(minutes / 60);
|
||||
if (hours < 24) {
|
||||
return `${hours}h`;
|
||||
}
|
||||
|
||||
const days = Math.floor(hours / 24);
|
||||
return `${days}d`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse cron expression to approximate milliseconds interval
|
||||
* This is a simplified parser for common cron patterns
|
||||
* @param cron - Cron expression
|
||||
* @returns Approximate interval in milliseconds
|
||||
*/
|
||||
export function parseCronInterval(cron: string): number {
|
||||
if (!cron || typeof cron !== 'string') {
|
||||
throw new Error('Invalid cron expression');
|
||||
}
|
||||
|
||||
const parts = cron.trim().split(/\s+/);
|
||||
if (parts.length !== 5) {
|
||||
throw new Error('Cron expression must have 5 parts (minute hour day month weekday)');
|
||||
}
|
||||
|
||||
const [minute, hour, day, month, weekday] = parts;
|
||||
|
||||
// Extract hour interval from patterns like "*/2" (every 2 hours)
|
||||
if (hour.includes('*/')) {
|
||||
const everyMatch = hour.match(/\*\/(\d+)/);
|
||||
if (everyMatch) {
|
||||
const hours = parseInt(everyMatch[1], 10);
|
||||
return hours * 60 * 60 * 1000; // Convert hours to milliseconds
|
||||
}
|
||||
}
|
||||
|
||||
// Extract minute interval from patterns like "*/15" (every 15 minutes)
|
||||
if (minute.includes('*/')) {
|
||||
const everyMatch = minute.match(/\*\/(\d+)/);
|
||||
if (everyMatch) {
|
||||
const minutes = parseInt(everyMatch[1], 10);
|
||||
return minutes * 60 * 1000; // Convert minutes to milliseconds
|
||||
}
|
||||
}
|
||||
|
||||
// Daily patterns like "0 2 * * *" (daily at 2 AM)
|
||||
if (hour !== '*' && minute !== '*' && day === '*' && month === '*' && weekday === '*') {
|
||||
return 24 * 60 * 60 * 1000; // 24 hours in milliseconds
|
||||
}
|
||||
|
||||
// Weekly patterns
|
||||
if (weekday !== '*') {
|
||||
return 7 * 24 * 60 * 60 * 1000; // 7 days in milliseconds
|
||||
}
|
||||
|
||||
// Monthly patterns
|
||||
if (day !== '*') {
|
||||
return 30 * 24 * 60 * 60 * 1000; // Approximate month (30 days)
|
||||
}
|
||||
|
||||
// Default to 1 hour if unable to parse
|
||||
return 60 * 60 * 1000;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enhanced interval parser that handles duration strings, cron expressions, and numbers
|
||||
* @param interval - Interval specification (duration string, cron, or number)
|
||||
* @returns Interval in milliseconds
|
||||
*/
|
||||
export function parseInterval(interval: string | number): number {
|
||||
if (typeof interval === 'number') {
|
||||
return interval * 1000; // Convert seconds to milliseconds
|
||||
}
|
||||
|
||||
if (!interval || typeof interval !== 'string') {
|
||||
throw new Error('Invalid interval: must be a string or number');
|
||||
}
|
||||
|
||||
const trimmed = interval.trim();
|
||||
|
||||
// Check if it's a cron expression (contains spaces and specific patterns)
|
||||
if (trimmed.includes(' ') && trimmed.split(/\s+/).length === 5) {
|
||||
try {
|
||||
return parseCronInterval(trimmed);
|
||||
} catch (error) {
|
||||
console.warn(`Failed to parse as cron expression: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
// Fall through to duration parsing
|
||||
}
|
||||
}
|
||||
|
||||
// Try to parse as duration string
|
||||
try {
|
||||
return parseDuration(trimmed);
|
||||
} catch (error) {
|
||||
console.warn(`Failed to parse as duration: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
|
||||
// Last resort: try as plain number (seconds)
|
||||
const parsed = parseInt(trimmed, 10);
|
||||
if (!isNaN(parsed)) {
|
||||
return parsed * 1000;
|
||||
}
|
||||
|
||||
throw new Error(`Unable to parse interval: "${interval}". Expected duration (e.g., "8h"), cron expression (e.g., "0 */2 * * *"), or number of seconds`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,169 @@
|
||||
import * as crypto from "crypto";
|
||||
|
||||
// Encryption configuration
|
||||
const ALGORITHM = "aes-256-gcm";
|
||||
const IV_LENGTH = 16; // 128 bits
|
||||
const SALT_LENGTH = 32; // 256 bits
|
||||
const TAG_LENGTH = 16; // 128 bits
|
||||
const KEY_LENGTH = 32; // 256 bits
|
||||
const ITERATIONS = 100000; // PBKDF2 iterations
|
||||
|
||||
// Get or generate encryption key
|
||||
function getEncryptionKey(): Buffer {
|
||||
const secret = process.env.ENCRYPTION_SECRET || process.env.JWT_SECRET || process.env.BETTER_AUTH_SECRET;
|
||||
|
||||
if (!secret) {
|
||||
throw new Error("No encryption secret found. Please set ENCRYPTION_SECRET environment variable.");
|
||||
}
|
||||
|
||||
// Use a static salt derived from the secret for consistent key generation
|
||||
// This ensures the same key is generated across application restarts
|
||||
const salt = crypto.createHash('sha256').update('gitea-mirror-salt' + secret).digest();
|
||||
|
||||
return crypto.pbkdf2Sync(secret, salt, ITERATIONS, KEY_LENGTH, 'sha256');
|
||||
}
|
||||
|
||||
export interface EncryptedData {
|
||||
encrypted: string;
|
||||
iv: string;
|
||||
salt: string;
|
||||
tag: string;
|
||||
version: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypts sensitive data like API tokens
|
||||
* @param plaintext The data to encrypt
|
||||
* @returns Encrypted data with metadata
|
||||
*/
|
||||
export function encrypt(plaintext: string): string {
|
||||
if (!plaintext) {
|
||||
return '';
|
||||
}
|
||||
|
||||
try {
|
||||
const key = getEncryptionKey();
|
||||
const iv = crypto.randomBytes(IV_LENGTH);
|
||||
const salt = crypto.randomBytes(SALT_LENGTH);
|
||||
|
||||
const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
|
||||
|
||||
const encrypted = Buffer.concat([
|
||||
cipher.update(plaintext, 'utf8'),
|
||||
cipher.final()
|
||||
]);
|
||||
|
||||
const tag = cipher.getAuthTag();
|
||||
|
||||
const encryptedData: EncryptedData = {
|
||||
encrypted: encrypted.toString('base64'),
|
||||
iv: iv.toString('base64'),
|
||||
salt: salt.toString('base64'),
|
||||
tag: tag.toString('base64'),
|
||||
version: 1
|
||||
};
|
||||
|
||||
// Return as base64 encoded JSON for easy storage
|
||||
return Buffer.from(JSON.stringify(encryptedData)).toString('base64');
|
||||
} catch (error) {
|
||||
console.error('Encryption error:', error);
|
||||
throw new Error('Failed to encrypt data');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypts encrypted data
|
||||
* @param encryptedString The encrypted data string
|
||||
* @returns Decrypted plaintext
|
||||
*/
|
||||
export function decrypt(encryptedString: string): string {
|
||||
if (!encryptedString) {
|
||||
return '';
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if it's already plaintext (for backward compatibility during migration)
|
||||
if (!isEncrypted(encryptedString)) {
|
||||
return encryptedString;
|
||||
}
|
||||
|
||||
const encryptedData: EncryptedData = JSON.parse(
|
||||
Buffer.from(encryptedString, 'base64').toString('utf8')
|
||||
);
|
||||
|
||||
const key = getEncryptionKey();
|
||||
const iv = Buffer.from(encryptedData.iv, 'base64');
|
||||
const tag = Buffer.from(encryptedData.tag, 'base64');
|
||||
const encrypted = Buffer.from(encryptedData.encrypted, 'base64');
|
||||
|
||||
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
|
||||
decipher.setAuthTag(tag);
|
||||
|
||||
const decrypted = Buffer.concat([
|
||||
decipher.update(encrypted),
|
||||
decipher.final()
|
||||
]);
|
||||
|
||||
return decrypted.toString('utf8');
|
||||
} catch (error) {
|
||||
// If decryption fails, check if it's plaintext (backward compatibility)
|
||||
try {
|
||||
JSON.parse(Buffer.from(encryptedString, 'base64').toString('utf8'));
|
||||
throw error; // It was encrypted but failed to decrypt
|
||||
} catch {
|
||||
// Not encrypted, return as-is for backward compatibility
|
||||
console.warn('Token appears to be unencrypted, returning as-is for backward compatibility');
|
||||
return encryptedString;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a string is encrypted
|
||||
* @param value The string to check
|
||||
* @returns true if encrypted, false otherwise
|
||||
*/
|
||||
export function isEncrypted(value: string): boolean {
|
||||
if (!value) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const decoded = Buffer.from(value, 'base64').toString('utf8');
|
||||
const data = JSON.parse(decoded);
|
||||
return data.version === 1 && data.encrypted && data.iv && data.tag;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates unencrypted tokens to encrypted format
|
||||
* @param token The token to migrate
|
||||
* @returns Encrypted token if it wasn't already encrypted
|
||||
*/
|
||||
export function migrateToken(token: string): string {
|
||||
if (!token || isEncrypted(token)) {
|
||||
return token;
|
||||
}
|
||||
|
||||
return encrypt(token);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a secure random token
|
||||
* @param length Token length in bytes (default: 32)
|
||||
* @returns Hex encoded random token
|
||||
*/
|
||||
export function generateSecureToken(length: number = 32): string {
|
||||
return crypto.randomBytes(length).toString('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Hashes a value using SHA-256 (for non-reversible values like API keys for comparison)
|
||||
* @param value The value to hash
|
||||
* @returns Hex encoded hash
|
||||
*/
|
||||
export function hashValue(value: string): string {
|
||||
return crypto.createHash('sha256').update(value).digest('hex');
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
/**
|
||||
* Mirror strategy configuration for handling various repository scenarios
|
||||
*/
|
||||
|
||||
export type NonMirrorStrategy = "skip" | "delete" | "rename";
|
||||
|
||||
export interface MirrorStrategyConfig {
|
||||
/**
|
||||
* How to handle repositories that exist in Gitea but are not mirrors
|
||||
* - "skip": Leave the repository as-is and mark as failed
|
||||
* - "delete": Delete the repository and recreate as mirror
|
||||
* - "rename": Rename the existing repository (not implemented yet)
|
||||
* Note: "convert" strategy was removed as it's not supported by most Gitea versions
|
||||
*/
|
||||
nonMirrorStrategy: NonMirrorStrategy;
|
||||
|
||||
/**
|
||||
* Maximum retries for organization creation
|
||||
*/
|
||||
orgCreationRetries: number;
|
||||
|
||||
/**
|
||||
* Base delay in milliseconds for exponential backoff
|
||||
*/
|
||||
orgCreationRetryDelay: number;
|
||||
|
||||
/**
|
||||
* Whether to create organizations sequentially to avoid race conditions
|
||||
*/
|
||||
sequentialOrgCreation: boolean;
|
||||
|
||||
/**
|
||||
* Batch size for parallel repository processing
|
||||
*/
|
||||
repoBatchSize: number;
|
||||
|
||||
/**
|
||||
* Timeout for sync operations in milliseconds
|
||||
*/
|
||||
syncTimeout: number;
|
||||
}
|
||||
|
||||
export const DEFAULT_MIRROR_STRATEGY: MirrorStrategyConfig = {
|
||||
nonMirrorStrategy: "delete", // Safe default: delete and recreate
|
||||
orgCreationRetries: 3,
|
||||
orgCreationRetryDelay: 100,
|
||||
sequentialOrgCreation: true,
|
||||
repoBatchSize: 3,
|
||||
syncTimeout: 30000, // 30 seconds
|
||||
};
|
||||
|
||||
/**
|
||||
* Get mirror strategy configuration from environment or defaults
|
||||
*/
|
||||
export function getMirrorStrategyConfig(): MirrorStrategyConfig {
|
||||
return {
|
||||
nonMirrorStrategy: (process.env.NON_MIRROR_STRATEGY as NonMirrorStrategy) || DEFAULT_MIRROR_STRATEGY.nonMirrorStrategy,
|
||||
orgCreationRetries: parseInt(process.env.ORG_CREATION_RETRIES || "") || DEFAULT_MIRROR_STRATEGY.orgCreationRetries,
|
||||
orgCreationRetryDelay: parseInt(process.env.ORG_CREATION_RETRY_DELAY || "") || DEFAULT_MIRROR_STRATEGY.orgCreationRetryDelay,
|
||||
sequentialOrgCreation: process.env.SEQUENTIAL_ORG_CREATION !== "false",
|
||||
repoBatchSize: parseInt(process.env.REPO_BATCH_SIZE || "") || DEFAULT_MIRROR_STRATEGY.repoBatchSize,
|
||||
syncTimeout: parseInt(process.env.SYNC_TIMEOUT || "") || DEFAULT_MIRROR_STRATEGY.syncTimeout,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate strategy configuration
|
||||
*/
|
||||
export function validateStrategyConfig(config: MirrorStrategyConfig): string[] {
|
||||
const errors: string[] = [];
|
||||
|
||||
if (!["skip", "delete", "rename"].includes(config.nonMirrorStrategy)) {
|
||||
errors.push(`Invalid nonMirrorStrategy: ${config.nonMirrorStrategy}`);
|
||||
}
|
||||
|
||||
if (config.orgCreationRetries < 1 || config.orgCreationRetries > 10) {
|
||||
errors.push("orgCreationRetries must be between 1 and 10");
|
||||
}
|
||||
|
||||
if (config.orgCreationRetryDelay < 10 || config.orgCreationRetryDelay > 5000) {
|
||||
errors.push("orgCreationRetryDelay must be between 10ms and 5000ms");
|
||||
}
|
||||
|
||||
if (config.repoBatchSize < 1 || config.repoBatchSize > 50) {
|
||||
errors.push("repoBatchSize must be between 1 and 50");
|
||||
}
|
||||
|
||||
if (config.syncTimeout < 5000 || config.syncTimeout > 300000) {
|
||||
errors.push("syncTimeout must be between 5s and 5min");
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
@@ -0,0 +1,85 @@
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import { isValidRedirectUri, parseRedirectUris } from "./oauth-validation";
|
||||
|
||||
describe("OAuth Validation", () => {
|
||||
describe("parseRedirectUris", () => {
|
||||
test("parses comma-separated URIs", () => {
|
||||
const result = parseRedirectUris("https://app1.com,https://app2.com, https://app3.com ");
|
||||
expect(result).toEqual([
|
||||
"https://app1.com",
|
||||
"https://app2.com",
|
||||
"https://app3.com"
|
||||
]);
|
||||
});
|
||||
|
||||
test("handles empty string", () => {
|
||||
expect(parseRedirectUris("")).toEqual([]);
|
||||
});
|
||||
|
||||
test("filters out empty values", () => {
|
||||
const result = parseRedirectUris("https://app1.com,,https://app2.com,");
|
||||
expect(result).toEqual(["https://app1.com", "https://app2.com"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isValidRedirectUri", () => {
|
||||
test("validates exact match", () => {
|
||||
const authorizedUris = ["https://app.example.com/callback"];
|
||||
|
||||
expect(isValidRedirectUri("https://app.example.com/callback", authorizedUris)).toBe(true);
|
||||
expect(isValidRedirectUri("https://app.example.com/other", authorizedUris)).toBe(false);
|
||||
});
|
||||
|
||||
test("validates wildcard paths", () => {
|
||||
const authorizedUris = ["https://app.example.com/*"];
|
||||
|
||||
expect(isValidRedirectUri("https://app.example.com/", authorizedUris)).toBe(true);
|
||||
expect(isValidRedirectUri("https://app.example.com/callback", authorizedUris)).toBe(true);
|
||||
expect(isValidRedirectUri("https://app.example.com/deep/path", authorizedUris)).toBe(true);
|
||||
|
||||
// Different domain should fail
|
||||
expect(isValidRedirectUri("https://evil.com/callback", authorizedUris)).toBe(false);
|
||||
});
|
||||
|
||||
test("validates protocol", () => {
|
||||
const authorizedUris = ["https://app.example.com/callback"];
|
||||
|
||||
// HTTP instead of HTTPS should fail
|
||||
expect(isValidRedirectUri("http://app.example.com/callback", authorizedUris)).toBe(false);
|
||||
});
|
||||
|
||||
test("validates host and port", () => {
|
||||
const authorizedUris = ["https://app.example.com:3000/callback"];
|
||||
|
||||
// Different port should fail
|
||||
expect(isValidRedirectUri("https://app.example.com/callback", authorizedUris)).toBe(false);
|
||||
expect(isValidRedirectUri("https://app.example.com:3000/callback", authorizedUris)).toBe(true);
|
||||
expect(isValidRedirectUri("https://app.example.com:4000/callback", authorizedUris)).toBe(false);
|
||||
});
|
||||
|
||||
test("handles invalid URIs", () => {
|
||||
const authorizedUris = ["not-a-valid-uri", "https://valid.com"];
|
||||
|
||||
// Invalid redirect URI
|
||||
expect(isValidRedirectUri("not-a-valid-uri", authorizedUris)).toBe(false);
|
||||
|
||||
// Valid redirect URI with invalid authorized URI should still work if it matches valid one
|
||||
expect(isValidRedirectUri("https://valid.com", authorizedUris)).toBe(true);
|
||||
});
|
||||
|
||||
test("handles empty inputs", () => {
|
||||
expect(isValidRedirectUri("", ["https://app.com"])).toBe(false);
|
||||
expect(isValidRedirectUri("https://app.com", [])).toBe(false);
|
||||
});
|
||||
|
||||
test("prevents open redirect attacks", () => {
|
||||
const authorizedUris = ["https://app.example.com/callback"];
|
||||
|
||||
// Various attack vectors
|
||||
expect(isValidRedirectUri("https://app.example.com.evil.com/callback", authorizedUris)).toBe(false);
|
||||
expect(isValidRedirectUri("https://app.example.com@evil.com/callback", authorizedUris)).toBe(false);
|
||||
expect(isValidRedirectUri("//evil.com/callback", authorizedUris)).toBe(false);
|
||||
expect(isValidRedirectUri("https:evil.com/callback", authorizedUris)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,59 @@
|
||||
/**
|
||||
* Validates a redirect URI against a list of authorized URIs
|
||||
* @param redirectUri The redirect URI to validate
|
||||
* @param authorizedUris List of authorized redirect URIs
|
||||
* @returns true if the redirect URI is authorized, false otherwise
|
||||
*/
|
||||
export function isValidRedirectUri(redirectUri: string, authorizedUris: string[]): boolean {
|
||||
if (!redirectUri || authorizedUris.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
// Parse the redirect URI to ensure it's valid
|
||||
const redirectUrl = new URL(redirectUri);
|
||||
|
||||
return authorizedUris.some(authorizedUri => {
|
||||
try {
|
||||
// Handle wildcard paths (e.g., https://example.com/*)
|
||||
if (authorizedUri.endsWith('/*')) {
|
||||
const baseUri = authorizedUri.slice(0, -2);
|
||||
const baseUrl = new URL(baseUri);
|
||||
|
||||
// Check protocol, host, and port match
|
||||
return redirectUrl.protocol === baseUrl.protocol &&
|
||||
redirectUrl.host === baseUrl.host &&
|
||||
redirectUrl.pathname.startsWith(baseUrl.pathname);
|
||||
}
|
||||
|
||||
// Handle exact match
|
||||
const authorizedUrl = new URL(authorizedUri);
|
||||
|
||||
// For exact match, everything must match including path and query params
|
||||
return redirectUrl.href === authorizedUrl.href;
|
||||
} catch {
|
||||
// If authorized URI is not a valid URL, treat as invalid
|
||||
return false;
|
||||
}
|
||||
});
|
||||
} catch {
|
||||
// If redirect URI is not a valid URL, it's invalid
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a comma-separated list of redirect URIs and trims whitespace
|
||||
* @param redirectUrls Comma-separated list of redirect URIs
|
||||
* @returns Array of trimmed redirect URIs
|
||||
*/
|
||||
export function parseRedirectUris(redirectUrls: string): string[] {
|
||||
if (!redirectUrls) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return redirectUrls
|
||||
.split(',')
|
||||
.map(uri => uri.trim())
|
||||
.filter(uri => uri.length > 0);
|
||||
}
|
||||
Reference in New Issue
Block a user