Remove proactive PyPI dependency caching feature
The background task queue for proactively caching package dependencies was causing server instability and unnecessary growth. The PyPI proxy now only caches packages on-demand when users request them. Removed: - PyPI cache worker (background task queue and worker pool) - PyPICacheTask model and related database schema - Cache management API endpoints (/pypi/cache/*) - Background Jobs admin dashboard - Dependency extraction and queueing logic Kept: - On-demand package caching (still works when users request packages) - Async httpx for non-blocking downloads (prevents health check failures) - URL-based cache lookups for deduplication
This commit is contained in:
@@ -747,58 +747,3 @@ export async function testUpstreamSource(id: string): Promise<UpstreamSourceTest
|
||||
return handleResponse<UpstreamSourceTestResult>(response);
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// PyPI Cache Jobs API
|
||||
// =============================================================================
|
||||
|
||||
import {
|
||||
PyPICacheStatus,
|
||||
PyPICacheTask,
|
||||
PyPICacheActiveTask,
|
||||
PyPICacheRetryResponse,
|
||||
} from './types';
|
||||
|
||||
export async function getPyPICacheStatus(): Promise<PyPICacheStatus> {
|
||||
const response = await fetch('/pypi/cache/status', {
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<PyPICacheStatus>(response);
|
||||
}
|
||||
|
||||
export async function getPyPICacheFailedTasks(limit: number = 50): Promise<PyPICacheTask[]> {
|
||||
const response = await fetch(`/pypi/cache/failed?limit=${limit}`, {
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<PyPICacheTask[]>(response);
|
||||
}
|
||||
|
||||
export async function getPyPICacheActiveTasks(limit: number = 50): Promise<PyPICacheActiveTask[]> {
|
||||
const response = await fetch(`/pypi/cache/active?limit=${limit}`, {
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<PyPICacheActiveTask[]>(response);
|
||||
}
|
||||
|
||||
export async function retryPyPICacheTask(packageName: string): Promise<PyPICacheRetryResponse> {
|
||||
const response = await fetch(`/pypi/cache/retry/${encodeURIComponent(packageName)}`, {
|
||||
method: 'POST',
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<PyPICacheRetryResponse>(response);
|
||||
}
|
||||
|
||||
export async function retryAllPyPICacheTasks(): Promise<PyPICacheRetryResponse> {
|
||||
const response = await fetch('/pypi/cache/retry-all', {
|
||||
method: 'POST',
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<PyPICacheRetryResponse>(response);
|
||||
}
|
||||
|
||||
export async function cancelPyPICacheTask(packageName: string): Promise<PyPICacheRetryResponse> {
|
||||
const response = await fetch(`/pypi/cache/cancel/${encodeURIComponent(packageName)}`, {
|
||||
method: 'POST',
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<PyPICacheRetryResponse>(response);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user