Compare commits
120 Commits
main
...
fix/pypi-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c31a147e1f | ||
|
|
6cf487b224 | ||
|
|
7140c9f4f2 | ||
|
|
6957bb26e9 | ||
|
|
561b82da92 | ||
|
|
0fb69a6aaa | ||
|
|
f1ac43c1cb | ||
|
|
23ffbada00 | ||
|
|
2ea3a39416 | ||
|
|
7bfec020c8 | ||
|
|
6b9863f9c3 | ||
|
|
5cff4092e3 | ||
|
|
b82bd1c85a | ||
|
|
632bf54087 | ||
|
|
170561b32a | ||
|
|
6e05697ae2 | ||
|
|
08b6589712 | ||
|
|
7ad5a15ef4 | ||
|
|
8fdb73901e | ||
|
|
79dd7b833e | ||
|
|
71089aee0e | ||
|
|
ffe0529ea8 | ||
|
|
146ca2ad74 | ||
|
|
a045509fe4 | ||
|
|
14806b05f0 | ||
|
|
c67004af52 | ||
|
|
8c6ba01a73 | ||
|
|
196f3f957c | ||
|
|
9cadfa3b1b | ||
|
|
19e034ef56 | ||
|
|
45a48cc1ee | ||
|
|
7068f36cb5 | ||
|
|
e471202f2e | ||
|
|
d12e4cdfc5 | ||
|
|
1ffe17bf62 | ||
|
|
c21af708af | ||
|
|
1ae989249b | ||
|
|
c0c8603d05 | ||
|
|
2501ba21d4 | ||
|
|
c94fe0389b | ||
|
|
9a95421064 | ||
|
|
87f30ea898 | ||
|
|
106e30b533 | ||
|
|
c4c9c20763 | ||
|
|
62c709e368 | ||
|
|
b6fb9e7546 | ||
|
|
9db94d035d | ||
|
|
6d9cd9d45d | ||
|
|
f5b60468ce | ||
|
|
f7643a5c13 | ||
|
|
281474d72f | ||
|
|
bb7c30b15c | ||
|
|
9587ed8f17 | ||
|
|
e86d974339 | ||
|
|
bf2737b3a2 | ||
|
|
17d3004058 | ||
|
|
549c85900e | ||
|
|
c60ed9ab21 | ||
|
|
34ff9caa08 | ||
|
|
ac3477ff22 | ||
|
|
f87e5b4a51 | ||
|
|
01915bcb45 | ||
|
|
72952d84a1 | ||
|
|
e6d42d91cd | ||
|
|
b3ae3b03eb | ||
|
|
ba0a658611 | ||
|
|
081cc6df83 | ||
|
|
cf7bdccb3a | ||
|
|
1329d380a4 | ||
|
|
361210a2bc | ||
|
|
415ad9a29a | ||
|
|
1667c5a416 | ||
|
|
1021e2b942 | ||
|
|
d0e91658d7 | ||
|
|
7b89f41704 | ||
|
|
ba43110123 | ||
|
|
92edef92e6 | ||
|
|
47b137f4eb | ||
|
|
1138309aaa | ||
|
|
3bdeade7ca | ||
|
|
8edb45879f | ||
|
|
97b39d000b | ||
|
|
ba708332a5 | ||
|
|
d274f3f375 | ||
|
|
490b05438d | ||
|
|
3c2ab70ef0 | ||
|
|
109a593f83 | ||
|
|
1d727b3f8c | ||
|
|
47aa0afe91 | ||
|
|
f992fc540e | ||
|
|
044a6c1d27 | ||
|
|
62c77dc16d | ||
|
|
7c05360eed | ||
|
|
76878279e9 | ||
|
|
e1b01abf9b | ||
|
|
d07936b666 | ||
|
|
47b3eb439d | ||
|
|
c5f75e4fd6 | ||
|
|
ff31379649 | ||
|
|
424b1e5770 | ||
|
|
7b5b0c78d8 | ||
|
|
924826f07a | ||
|
|
fe6c6c52d2 | ||
|
|
701e11ce83 | ||
|
|
ff9e02606e | ||
|
|
f3afdd3bbf | ||
|
|
4b73196664 | ||
|
|
7ef66745f1 | ||
|
|
2dc7fe5a7b | ||
|
|
534e4b964f | ||
|
|
757e43fc34 | ||
|
|
d78092de55 | ||
|
|
0fa991f536 | ||
|
|
00fb2729e4 | ||
|
|
8ae4d7a685 | ||
|
|
4b887d1aad | ||
|
|
4dc54ace8a | ||
|
|
64bfd3902f | ||
|
|
bdfed77cb1 | ||
|
|
140f6c926a |
@@ -213,6 +213,74 @@ integration_test_feature:
|
||||
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
||||
when: on_success
|
||||
|
||||
# Reset feature environment after integration tests
|
||||
# Calls factory-reset to clean up test data created during integration tests
|
||||
reset_feature:
|
||||
stage: deploy
|
||||
needs: [integration_test_feature]
|
||||
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||
timeout: 5m
|
||||
before_script:
|
||||
- pip install --index-url "$PIP_INDEX_URL" httpx
|
||||
script:
|
||||
# Debug: Check if variable is set at shell level
|
||||
- echo "RESET_ADMIN_PASSWORD length at shell level:${#RESET_ADMIN_PASSWORD}"
|
||||
- |
|
||||
python - <<'RESET_SCRIPT'
|
||||
import httpx
|
||||
import os
|
||||
import sys
|
||||
|
||||
BASE_URL = f"https://orchard-{os.environ['CI_COMMIT_REF_SLUG']}.common.global.bsf.tools"
|
||||
PASSWORD_RAW = os.environ.get("RESET_ADMIN_PASSWORD")
|
||||
|
||||
if not PASSWORD_RAW:
|
||||
print("ERROR: RESET_ADMIN_PASSWORD not set")
|
||||
sys.exit(1)
|
||||
|
||||
# Debug: check for hidden characters
|
||||
print(f"Raw password repr (first 3 chars): {repr(PASSWORD_RAW[:3])}")
|
||||
print(f"Raw password repr (last 3 chars): {repr(PASSWORD_RAW[-3:])}")
|
||||
print(f"Raw length: {len(PASSWORD_RAW)}")
|
||||
|
||||
# Strip any whitespace
|
||||
PASSWORD = PASSWORD_RAW.strip()
|
||||
print(f"Stripped length: {len(PASSWORD)}")
|
||||
|
||||
print(f"Resetting environment at {BASE_URL}")
|
||||
client = httpx.Client(base_url=BASE_URL, timeout=60.0)
|
||||
|
||||
# Login as admin
|
||||
login_resp = client.post("/api/v1/auth/login", json={
|
||||
"username": "admin",
|
||||
"password": PASSWORD
|
||||
})
|
||||
if login_resp.status_code != 200:
|
||||
print(f"ERROR: Login failed: {login_resp.status_code}")
|
||||
print(f"Response: {login_resp.text}")
|
||||
sys.exit(1)
|
||||
|
||||
# Call factory reset
|
||||
reset_resp = client.post(
|
||||
"/api/v1/admin/factory-reset",
|
||||
headers={"X-Confirm-Reset": "yes-delete-all-data"}
|
||||
)
|
||||
if reset_resp.status_code == 200:
|
||||
print("SUCCESS: Factory reset completed")
|
||||
print(reset_resp.json())
|
||||
else:
|
||||
print(f"ERROR: Factory reset failed: {reset_resp.status_code}")
|
||||
print(reset_resp.text)
|
||||
sys.exit(1)
|
||||
RESET_SCRIPT
|
||||
variables:
|
||||
# Use same pattern as integration_test_feature - create new variable from CI variable
|
||||
RESET_ADMIN_PASSWORD: $DEV_ADMIN_PASSWORD
|
||||
rules:
|
||||
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
||||
when: on_success
|
||||
allow_failure: true # Don't fail the pipeline if reset fails
|
||||
|
||||
# Run Python backend unit tests
|
||||
python_unit_tests:
|
||||
stage: test
|
||||
|
||||
35
CHANGELOG.md
35
CHANGELOG.md
@@ -7,6 +7,34 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
### Added
|
||||
- Added auto-fetch capability to dependency resolution endpoint
|
||||
- `GET /api/v1/project/{project}/{package}/+/{ref}/resolve?auto_fetch=true` fetches missing dependencies from upstream registries
|
||||
- PyPI registry client queries PyPI JSON API to resolve version constraints
|
||||
- Fetched artifacts are cached and included in response `fetched` field
|
||||
- Missing dependencies show `fetch_attempted` and `fetch_error` status
|
||||
- Configurable max fetch depth via `ORCHARD_AUTO_FETCH_MAX_DEPTH` (default: 3)
|
||||
- Added `backend/app/registry_client.py` with extensible registry client abstraction
|
||||
- `RegistryClient` ABC for implementing upstream registry clients
|
||||
- `PyPIRegistryClient` implementation using PyPI JSON API
|
||||
- `get_registry_client()` factory function for future npm/maven support
|
||||
- Added `fetch_and_cache_pypi_package()` reusable function for PyPI package fetching
|
||||
- Added HTTP connection pooling infrastructure for improved PyPI proxy performance
|
||||
- `HttpClientManager` with configurable pool size, timeouts, and thread pool executor
|
||||
- Eliminates per-request connection overhead (~100-500ms → ~5ms)
|
||||
- Added Redis caching layer with category-aware TTL for hermetic builds
|
||||
- `CacheService` with graceful fallback when Redis unavailable
|
||||
- Immutable data (artifact metadata, dependencies) cached forever
|
||||
- Mutable data (package index, versions) uses configurable TTL
|
||||
- Added `ArtifactRepository` for batch database operations
|
||||
- `batch_upsert_dependencies()` reduces N+1 queries to single INSERT
|
||||
- `get_or_create_artifact()` uses atomic ON CONFLICT upsert
|
||||
- Added infrastructure status to health endpoint (`/health`)
|
||||
- Reports HTTP pool size and worker threads
|
||||
- Reports Redis cache connection status
|
||||
- Added new configuration settings for HTTP client, Redis, and cache TTL
|
||||
- `ORCHARD_HTTP_MAX_CONNECTIONS`, `ORCHARD_HTTP_CONNECT_TIMEOUT`, etc.
|
||||
- `ORCHARD_REDIS_HOST`, `ORCHARD_REDIS_PORT`, `ORCHARD_REDIS_ENABLED`
|
||||
- `ORCHARD_CACHE_TTL_INDEX`, `ORCHARD_CACHE_TTL_VERSIONS`, etc.
|
||||
- Added transparent PyPI proxy implementing PEP 503 Simple API (#108)
|
||||
- `GET /pypi/simple/` - package index (proxied from upstream)
|
||||
- `GET /pypi/simple/{package}/` - version list with rewritten download links
|
||||
@@ -16,6 +44,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Added `POST /api/v1/cache/resolve` endpoint to cache packages by coordinates instead of URL (#108)
|
||||
|
||||
### Changed
|
||||
- Removed Usage section from Package page (curl command examples)
|
||||
- PyPI proxy now uses shared HTTP connection pool instead of per-request clients
|
||||
- PyPI proxy now caches upstream source configuration in Redis
|
||||
- Dependency storage now uses batch INSERT instead of individual queries
|
||||
- Increased default database pool size from 5 to 20 connections
|
||||
- Increased default database max overflow from 10 to 30 connections
|
||||
- Enabled Redis in Helm chart values for dev, stage, and prod environments
|
||||
- Upstream sources table text is now centered under column headers (#108)
|
||||
- ENV badge now appears inline with source name instead of separate column (#108)
|
||||
- Test and Edit buttons now have more prominent button styling (#108)
|
||||
|
||||
262
backend/app/cache_service.py
Normal file
262
backend/app/cache_service.py
Normal file
@@ -0,0 +1,262 @@
|
||||
"""
|
||||
Redis-backed caching service with category-aware TTL and invalidation.
|
||||
|
||||
Provides:
|
||||
- Immutable caching for artifact data (hermetic builds)
|
||||
- TTL-based caching for discovery data
|
||||
- Event-driven invalidation for config changes
|
||||
- Graceful fallback when Redis unavailable
|
||||
"""
|
||||
|
||||
import logging
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from .config import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CacheCategory(Enum):
|
||||
"""
|
||||
Cache categories with different TTL and invalidation rules.
|
||||
|
||||
Immutable (cache forever):
|
||||
- ARTIFACT_METADATA: Artifact info by SHA256
|
||||
- ARTIFACT_DEPENDENCIES: Extracted deps by SHA256
|
||||
- DEPENDENCY_RESOLUTION: Resolution results by input hash
|
||||
|
||||
Mutable (TTL + event invalidation):
|
||||
- UPSTREAM_SOURCES: Upstream config, invalidate on DB change
|
||||
- PACKAGE_INDEX: PyPI/npm index pages, TTL only
|
||||
- PACKAGE_VERSIONS: Version listings, TTL only
|
||||
"""
|
||||
|
||||
# Immutable - cache forever (hermetic builds)
|
||||
ARTIFACT_METADATA = "artifact"
|
||||
ARTIFACT_DEPENDENCIES = "deps"
|
||||
DEPENDENCY_RESOLUTION = "resolve"
|
||||
|
||||
# Mutable - TTL + event invalidation
|
||||
UPSTREAM_SOURCES = "upstream"
|
||||
PACKAGE_INDEX = "index"
|
||||
PACKAGE_VERSIONS = "versions"
|
||||
|
||||
|
||||
def get_category_ttl(category: CacheCategory, settings: Settings) -> Optional[int]:
|
||||
"""
|
||||
Get TTL for a cache category.
|
||||
|
||||
Returns:
|
||||
TTL in seconds, or None for no expiry (immutable).
|
||||
"""
|
||||
ttl_map = {
|
||||
# Immutable - no TTL
|
||||
CacheCategory.ARTIFACT_METADATA: None,
|
||||
CacheCategory.ARTIFACT_DEPENDENCIES: None,
|
||||
CacheCategory.DEPENDENCY_RESOLUTION: None,
|
||||
# Mutable - configurable TTL
|
||||
CacheCategory.UPSTREAM_SOURCES: settings.cache_ttl_upstream,
|
||||
CacheCategory.PACKAGE_INDEX: settings.cache_ttl_index,
|
||||
CacheCategory.PACKAGE_VERSIONS: settings.cache_ttl_versions,
|
||||
}
|
||||
return ttl_map.get(category)
|
||||
|
||||
|
||||
class CacheService:
|
||||
"""
|
||||
Redis-backed caching with category-aware TTL.
|
||||
|
||||
Key format: orchard:{category}:{protocol}:{identifier}
|
||||
Example: orchard:deps:pypi:abc123def456
|
||||
|
||||
When Redis is disabled or unavailable, operations gracefully
|
||||
return None/no-op to allow the application to function without caching.
|
||||
"""
|
||||
|
||||
def __init__(self, settings: Settings):
|
||||
self._settings = settings
|
||||
self._enabled = settings.redis_enabled
|
||||
self._redis: Optional["redis.asyncio.Redis"] = None
|
||||
self._started = False
|
||||
|
||||
async def startup(self) -> None:
|
||||
"""Initialize Redis connection. Called by FastAPI lifespan."""
|
||||
if self._started:
|
||||
return
|
||||
|
||||
if not self._enabled:
|
||||
logger.info("CacheService disabled (redis_enabled=False)")
|
||||
self._started = True
|
||||
return
|
||||
|
||||
try:
|
||||
import redis.asyncio as redis
|
||||
|
||||
logger.info(
|
||||
f"Connecting to Redis at {self._settings.redis_host}:"
|
||||
f"{self._settings.redis_port}/{self._settings.redis_db}"
|
||||
)
|
||||
|
||||
self._redis = redis.Redis(
|
||||
host=self._settings.redis_host,
|
||||
port=self._settings.redis_port,
|
||||
db=self._settings.redis_db,
|
||||
password=self._settings.redis_password,
|
||||
decode_responses=False, # We handle bytes
|
||||
)
|
||||
|
||||
# Test connection
|
||||
await self._redis.ping()
|
||||
logger.info("CacheService connected to Redis")
|
||||
|
||||
except ImportError:
|
||||
logger.warning("redis package not installed, caching disabled")
|
||||
self._enabled = False
|
||||
except Exception as e:
|
||||
logger.warning(f"Redis connection failed, caching disabled: {e}")
|
||||
self._enabled = False
|
||||
self._redis = None
|
||||
|
||||
self._started = True
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
"""Close Redis connection. Called by FastAPI lifespan."""
|
||||
if not self._started:
|
||||
return
|
||||
|
||||
if self._redis:
|
||||
await self._redis.aclose()
|
||||
self._redis = None
|
||||
|
||||
self._started = False
|
||||
logger.info("CacheService shutdown complete")
|
||||
|
||||
@staticmethod
|
||||
def _make_key(category: CacheCategory, protocol: str, identifier: str) -> str:
|
||||
"""Build namespaced cache key."""
|
||||
return f"orchard:{category.value}:{protocol}:{identifier}"
|
||||
|
||||
async def get(
|
||||
self,
|
||||
category: CacheCategory,
|
||||
key: str,
|
||||
protocol: str = "default",
|
||||
) -> Optional[bytes]:
|
||||
"""
|
||||
Get cached value.
|
||||
|
||||
Args:
|
||||
category: Cache category for TTL rules
|
||||
key: Unique identifier within category
|
||||
protocol: Protocol namespace (pypi, npm, etc.)
|
||||
|
||||
Returns:
|
||||
Cached bytes or None if not found/disabled.
|
||||
"""
|
||||
if not self._enabled or not self._redis:
|
||||
return None
|
||||
|
||||
try:
|
||||
full_key = self._make_key(category, protocol, key)
|
||||
return await self._redis.get(full_key)
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache get failed for {key}: {e}")
|
||||
return None
|
||||
|
||||
async def set(
|
||||
self,
|
||||
category: CacheCategory,
|
||||
key: str,
|
||||
value: bytes,
|
||||
protocol: str = "default",
|
||||
) -> None:
|
||||
"""
|
||||
Set cached value with category-appropriate TTL.
|
||||
|
||||
Args:
|
||||
category: Cache category for TTL rules
|
||||
key: Unique identifier within category
|
||||
value: Bytes to cache
|
||||
protocol: Protocol namespace (pypi, npm, etc.)
|
||||
"""
|
||||
if not self._enabled or not self._redis:
|
||||
return
|
||||
|
||||
try:
|
||||
full_key = self._make_key(category, protocol, key)
|
||||
ttl = get_category_ttl(category, self._settings)
|
||||
|
||||
if ttl is None:
|
||||
await self._redis.set(full_key, value)
|
||||
else:
|
||||
await self._redis.setex(full_key, ttl, value)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache set failed for {key}: {e}")
|
||||
|
||||
async def delete(
|
||||
self,
|
||||
category: CacheCategory,
|
||||
key: str,
|
||||
protocol: str = "default",
|
||||
) -> None:
|
||||
"""Delete a specific cache entry."""
|
||||
if not self._enabled or not self._redis:
|
||||
return
|
||||
|
||||
try:
|
||||
full_key = self._make_key(category, protocol, key)
|
||||
await self._redis.delete(full_key)
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache delete failed for {key}: {e}")
|
||||
|
||||
async def invalidate_pattern(
|
||||
self,
|
||||
category: CacheCategory,
|
||||
pattern: str = "*",
|
||||
protocol: str = "default",
|
||||
) -> int:
|
||||
"""
|
||||
Invalidate all entries matching pattern.
|
||||
|
||||
Args:
|
||||
category: Cache category
|
||||
pattern: Glob pattern for keys (default "*" = all in category)
|
||||
protocol: Protocol namespace
|
||||
|
||||
Returns:
|
||||
Number of keys deleted.
|
||||
"""
|
||||
if not self._enabled or not self._redis:
|
||||
return 0
|
||||
|
||||
try:
|
||||
full_pattern = self._make_key(category, protocol, pattern)
|
||||
keys = []
|
||||
async for key in self._redis.scan_iter(match=full_pattern):
|
||||
keys.append(key)
|
||||
|
||||
if keys:
|
||||
return await self._redis.delete(*keys)
|
||||
return 0
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache invalidate failed for pattern {pattern}: {e}")
|
||||
return 0
|
||||
|
||||
async def ping(self) -> bool:
|
||||
"""Check if Redis is connected and responding."""
|
||||
if not self._enabled or not self._redis:
|
||||
return False
|
||||
|
||||
try:
|
||||
await self._redis.ping()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@property
|
||||
def enabled(self) -> bool:
|
||||
"""Check if caching is enabled."""
|
||||
return self._enabled
|
||||
@@ -22,8 +22,8 @@ class Settings(BaseSettings):
|
||||
database_sslmode: str = "disable"
|
||||
|
||||
# Database connection pool settings
|
||||
database_pool_size: int = 5 # Number of connections to keep open
|
||||
database_max_overflow: int = 10 # Max additional connections beyond pool_size
|
||||
database_pool_size: int = 20 # Number of connections to keep open
|
||||
database_max_overflow: int = 30 # Max additional connections beyond pool_size
|
||||
database_pool_timeout: int = 30 # Seconds to wait for a connection from pool
|
||||
database_pool_recycle: int = (
|
||||
1800 # Recycle connections after this many seconds (30 min)
|
||||
@@ -51,6 +51,26 @@ class Settings(BaseSettings):
|
||||
presigned_url_expiry: int = (
|
||||
3600 # Presigned URL expiry in seconds (default: 1 hour)
|
||||
)
|
||||
pypi_download_mode: str = "redirect" # "redirect" (to S3) or "proxy" (stream through Orchard)
|
||||
|
||||
# HTTP Client pool settings
|
||||
http_max_connections: int = 100 # Max connections per pool
|
||||
http_max_keepalive: int = 20 # Keep-alive connections
|
||||
http_connect_timeout: float = 30.0 # Connection timeout seconds
|
||||
http_read_timeout: float = 60.0 # Read timeout seconds
|
||||
http_worker_threads: int = 32 # Thread pool for blocking ops
|
||||
|
||||
# Redis cache settings
|
||||
redis_host: str = "localhost"
|
||||
redis_port: int = 6379
|
||||
redis_db: int = 0
|
||||
redis_password: Optional[str] = None
|
||||
redis_enabled: bool = True # Set False to disable caching
|
||||
|
||||
# Cache TTL settings (seconds, 0 = no expiry)
|
||||
cache_ttl_index: int = 300 # Package index pages: 5 min
|
||||
cache_ttl_versions: int = 300 # Version listings: 5 min
|
||||
cache_ttl_upstream: int = 3600 # Upstream source config: 1 hour
|
||||
|
||||
# Logging settings
|
||||
log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
|
||||
@@ -64,6 +84,15 @@ class Settings(BaseSettings):
|
||||
# Global cache settings override (None = use DB value, True/False = override DB)
|
||||
cache_auto_create_system_projects: Optional[bool] = None # Override auto_create_system_projects
|
||||
|
||||
# PyPI Cache Worker settings
|
||||
pypi_cache_workers: int = 5 # Number of concurrent cache workers
|
||||
pypi_cache_max_depth: int = 10 # Maximum recursion depth for dependency caching
|
||||
pypi_cache_max_attempts: int = 3 # Maximum retry attempts for failed cache tasks
|
||||
|
||||
# Auto-fetch configuration for dependency resolution
|
||||
auto_fetch_dependencies: bool = False # Server default for auto_fetch parameter
|
||||
auto_fetch_timeout: int = 300 # Total timeout for auto-fetch resolution in seconds
|
||||
|
||||
# JWT Authentication settings (optional, for external identity providers)
|
||||
jwt_enabled: bool = False # Enable JWT token validation
|
||||
jwt_secret: str = "" # Secret key for HS256, or leave empty for RS256 with JWKS
|
||||
@@ -88,6 +117,24 @@ class Settings(BaseSettings):
|
||||
def is_production(self) -> bool:
|
||||
return self.env.lower() == "production"
|
||||
|
||||
@property
|
||||
def PORT(self) -> int:
|
||||
"""Alias for server_port for compatibility."""
|
||||
return self.server_port
|
||||
|
||||
# Uppercase aliases for PyPI cache settings (for backward compatibility)
|
||||
@property
|
||||
def PYPI_CACHE_WORKERS(self) -> int:
|
||||
return self.pypi_cache_workers
|
||||
|
||||
@property
|
||||
def PYPI_CACHE_MAX_DEPTH(self) -> int:
|
||||
return self.pypi_cache_max_depth
|
||||
|
||||
@property
|
||||
def PYPI_CACHE_MAX_ATTEMPTS(self) -> int:
|
||||
return self.pypi_cache_max_attempts
|
||||
|
||||
class Config:
|
||||
env_prefix = "ORCHARD_"
|
||||
case_sensitive = False
|
||||
|
||||
@@ -220,17 +220,7 @@ def _run_migrations():
|
||||
CREATE UNIQUE INDEX idx_packages_project_name ON packages(project_id, name);
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_name'
|
||||
) THEN
|
||||
CREATE UNIQUE INDEX idx_tags_package_name ON tags(package_id, name);
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_created_at'
|
||||
) THEN
|
||||
CREATE INDEX idx_tags_package_created_at ON tags(package_id, created_at);
|
||||
END IF;
|
||||
-- Tag indexes removed: tags table no longer exists (removed in tag system removal)
|
||||
END $$;
|
||||
""",
|
||||
),
|
||||
@@ -287,27 +277,8 @@ def _run_migrations():
|
||||
Migration(
|
||||
name="008_create_tags_ref_count_triggers",
|
||||
sql="""
|
||||
DO $$
|
||||
BEGIN
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
|
||||
CREATE TRIGGER tags_ref_count_insert_trigger
|
||||
AFTER INSERT ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION increment_artifact_ref_count();
|
||||
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
|
||||
CREATE TRIGGER tags_ref_count_delete_trigger
|
||||
AFTER DELETE ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION decrement_artifact_ref_count();
|
||||
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
|
||||
CREATE TRIGGER tags_ref_count_update_trigger
|
||||
AFTER UPDATE ON tags
|
||||
FOR EACH ROW
|
||||
WHEN (OLD.artifact_id IS DISTINCT FROM NEW.artifact_id)
|
||||
EXECUTE FUNCTION update_artifact_ref_count();
|
||||
END $$;
|
||||
-- Tags table removed: triggers no longer needed (tag system removed)
|
||||
DO $$ BEGIN NULL; END $$;
|
||||
""",
|
||||
),
|
||||
Migration(
|
||||
@@ -354,9 +325,11 @@ def _run_migrations():
|
||||
Migration(
|
||||
name="011_migrate_semver_tags_to_versions",
|
||||
sql=r"""
|
||||
-- Migrate semver tags to versions (only if both tables exist - for existing databases)
|
||||
DO $$
|
||||
BEGIN
|
||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions')
|
||||
AND EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'tags') THEN
|
||||
INSERT INTO package_versions (id, package_id, artifact_id, version, version_source, created_by, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
@@ -565,6 +538,62 @@ def _run_migrations():
|
||||
WHERE name IN ('npm-public', 'pypi-public', 'maven-central', 'docker-hub');
|
||||
""",
|
||||
),
|
||||
Migration(
|
||||
name="024_remove_tags",
|
||||
sql="""
|
||||
-- Remove tag system, keeping only versions for artifact references
|
||||
DO $$
|
||||
BEGIN
|
||||
-- Drop triggers on tags table (if they exist)
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
|
||||
DROP TRIGGER IF EXISTS tags_updated_at_trigger ON tags;
|
||||
DROP TRIGGER IF EXISTS tag_changes_trigger ON tags;
|
||||
|
||||
-- Drop the tag change tracking function
|
||||
DROP FUNCTION IF EXISTS track_tag_changes();
|
||||
|
||||
-- Remove tag_constraint from artifact_dependencies
|
||||
IF EXISTS (
|
||||
SELECT 1 FROM information_schema.table_constraints
|
||||
WHERE constraint_name = 'check_constraint_type'
|
||||
AND table_name = 'artifact_dependencies'
|
||||
) THEN
|
||||
ALTER TABLE artifact_dependencies DROP CONSTRAINT check_constraint_type;
|
||||
END IF;
|
||||
|
||||
-- Remove the tag_constraint column if it exists
|
||||
IF EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'artifact_dependencies' AND column_name = 'tag_constraint'
|
||||
) THEN
|
||||
ALTER TABLE artifact_dependencies DROP COLUMN tag_constraint;
|
||||
END IF;
|
||||
|
||||
-- Make version_constraint NOT NULL
|
||||
UPDATE artifact_dependencies SET version_constraint = '*' WHERE version_constraint IS NULL;
|
||||
ALTER TABLE artifact_dependencies ALTER COLUMN version_constraint SET NOT NULL;
|
||||
|
||||
-- Drop tag_history table first (depends on tags)
|
||||
DROP TABLE IF EXISTS tag_history;
|
||||
|
||||
-- Drop tags table
|
||||
DROP TABLE IF EXISTS tags;
|
||||
|
||||
-- Rename uploads.tag_name to version if it exists and version doesn't
|
||||
IF EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'uploads' AND column_name = 'tag_name'
|
||||
) AND NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'uploads' AND column_name = 'version'
|
||||
) THEN
|
||||
ALTER TABLE uploads RENAME COLUMN tag_name TO version;
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
),
|
||||
]
|
||||
|
||||
with engine.connect() as conn:
|
||||
|
||||
175
backend/app/db_utils.py
Normal file
175
backend/app/db_utils.py
Normal file
@@ -0,0 +1,175 @@
|
||||
"""
|
||||
Database utilities for optimized artifact operations.
|
||||
|
||||
Provides batch operations to eliminate N+1 queries.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from .models import Artifact, ArtifactDependency, CachedUrl
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ArtifactRepository:
|
||||
"""
|
||||
Optimized database operations for artifact storage.
|
||||
|
||||
Key optimizations:
|
||||
- Atomic upserts using ON CONFLICT
|
||||
- Batch inserts for dependencies
|
||||
- Joined queries to avoid N+1
|
||||
"""
|
||||
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
@staticmethod
|
||||
def _format_dependency_values(
|
||||
artifact_id: str,
|
||||
dependencies: list[tuple[str, str, str]],
|
||||
) -> list[dict]:
|
||||
"""
|
||||
Format dependencies for batch insert.
|
||||
|
||||
Args:
|
||||
artifact_id: SHA256 of the artifact
|
||||
dependencies: List of (project, package, version_constraint)
|
||||
|
||||
Returns:
|
||||
List of dicts ready for bulk insert.
|
||||
"""
|
||||
return [
|
||||
{
|
||||
"artifact_id": artifact_id,
|
||||
"dependency_project": proj,
|
||||
"dependency_package": pkg,
|
||||
"version_constraint": ver,
|
||||
}
|
||||
for proj, pkg, ver in dependencies
|
||||
]
|
||||
|
||||
def get_or_create_artifact(
|
||||
self,
|
||||
sha256: str,
|
||||
size: int,
|
||||
filename: str,
|
||||
content_type: Optional[str] = None,
|
||||
created_by: str = "system",
|
||||
s3_key: Optional[str] = None,
|
||||
) -> tuple[Artifact, bool]:
|
||||
"""
|
||||
Get existing artifact or create new one atomically.
|
||||
|
||||
Uses INSERT ... ON CONFLICT DO UPDATE to handle races.
|
||||
If artifact exists, increments ref_count.
|
||||
|
||||
Args:
|
||||
sha256: Content hash (primary key)
|
||||
size: File size in bytes
|
||||
filename: Original filename
|
||||
content_type: MIME type
|
||||
created_by: User who created the artifact
|
||||
s3_key: S3 storage key (defaults to standard path)
|
||||
|
||||
Returns:
|
||||
(artifact, created) tuple where created is True for new artifacts.
|
||||
"""
|
||||
if s3_key is None:
|
||||
s3_key = f"fruits/{sha256[:2]}/{sha256[2:4]}/{sha256}"
|
||||
|
||||
stmt = pg_insert(Artifact).values(
|
||||
id=sha256,
|
||||
size=size,
|
||||
original_name=filename,
|
||||
content_type=content_type,
|
||||
ref_count=1,
|
||||
created_by=created_by,
|
||||
s3_key=s3_key,
|
||||
).on_conflict_do_update(
|
||||
index_elements=['id'],
|
||||
set_={'ref_count': Artifact.ref_count + 1}
|
||||
).returning(Artifact)
|
||||
|
||||
result = self.db.execute(stmt)
|
||||
artifact = result.scalar_one()
|
||||
|
||||
# Check if this was an insert or update by comparing ref_count
|
||||
# ref_count=1 means new, >1 means existing
|
||||
created = artifact.ref_count == 1
|
||||
|
||||
return artifact, created
|
||||
|
||||
def batch_upsert_dependencies(
|
||||
self,
|
||||
artifact_id: str,
|
||||
dependencies: list[tuple[str, str, str]],
|
||||
) -> int:
|
||||
"""
|
||||
Insert dependencies in a single batch operation.
|
||||
|
||||
Uses ON CONFLICT DO NOTHING to skip duplicates.
|
||||
|
||||
Args:
|
||||
artifact_id: SHA256 of the artifact
|
||||
dependencies: List of (project, package, version_constraint)
|
||||
|
||||
Returns:
|
||||
Number of dependencies inserted.
|
||||
"""
|
||||
if not dependencies:
|
||||
return 0
|
||||
|
||||
values = self._format_dependency_values(artifact_id, dependencies)
|
||||
|
||||
stmt = pg_insert(ArtifactDependency).values(values)
|
||||
stmt = stmt.on_conflict_do_nothing(
|
||||
index_elements=['artifact_id', 'dependency_project', 'dependency_package']
|
||||
)
|
||||
|
||||
result = self.db.execute(stmt)
|
||||
return result.rowcount
|
||||
|
||||
def get_cached_url_with_artifact(
|
||||
self,
|
||||
url_hash: str,
|
||||
) -> Optional[tuple[CachedUrl, Artifact]]:
|
||||
"""
|
||||
Get cached URL and its artifact in a single query.
|
||||
|
||||
Args:
|
||||
url_hash: SHA256 of the URL
|
||||
|
||||
Returns:
|
||||
(CachedUrl, Artifact) tuple or None if not found.
|
||||
"""
|
||||
result = (
|
||||
self.db.query(CachedUrl, Artifact)
|
||||
.join(Artifact, CachedUrl.artifact_id == Artifact.id)
|
||||
.filter(CachedUrl.url_hash == url_hash)
|
||||
.first()
|
||||
)
|
||||
return result
|
||||
|
||||
def get_artifact_dependencies(
|
||||
self,
|
||||
artifact_id: str,
|
||||
) -> list[ArtifactDependency]:
|
||||
"""
|
||||
Get all dependencies for an artifact in a single query.
|
||||
|
||||
Args:
|
||||
artifact_id: SHA256 of the artifact
|
||||
|
||||
Returns:
|
||||
List of ArtifactDependency objects.
|
||||
"""
|
||||
return (
|
||||
self.db.query(ArtifactDependency)
|
||||
.filter(ArtifactDependency.artifact_id == artifact_id)
|
||||
.all()
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
179
backend/app/http_client.py
Normal file
179
backend/app/http_client.py
Normal file
@@ -0,0 +1,179 @@
|
||||
"""
|
||||
HTTP client manager with connection pooling and lifecycle management.
|
||||
|
||||
Provides:
|
||||
- Shared connection pools for upstream requests
|
||||
- Per-upstream client isolation when needed
|
||||
- Thread pool for blocking I/O operations
|
||||
- FastAPI lifespan integration
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
from .config import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HttpClientManager:
|
||||
"""
|
||||
Manages httpx.AsyncClient pools with FastAPI lifespan integration.
|
||||
|
||||
Features:
|
||||
- Default shared pool for general requests
|
||||
- Per-upstream pools for sources needing specific config/auth
|
||||
- Dedicated thread pool for blocking operations
|
||||
- Graceful shutdown
|
||||
"""
|
||||
|
||||
def __init__(self, settings: Settings):
|
||||
self.max_connections = settings.http_max_connections
|
||||
self.max_keepalive = settings.http_max_keepalive
|
||||
self.connect_timeout = settings.http_connect_timeout
|
||||
self.read_timeout = settings.http_read_timeout
|
||||
self.worker_threads = settings.http_worker_threads
|
||||
|
||||
self._default_client: Optional[httpx.AsyncClient] = None
|
||||
self._upstream_clients: dict[str, httpx.AsyncClient] = {}
|
||||
self._executor: Optional[ThreadPoolExecutor] = None
|
||||
self._started = False
|
||||
|
||||
async def startup(self) -> None:
|
||||
"""Initialize clients and thread pool. Called by FastAPI lifespan."""
|
||||
if self._started:
|
||||
return
|
||||
|
||||
logger.info(
|
||||
f"Starting HttpClientManager: max_connections={self.max_connections}, "
|
||||
f"worker_threads={self.worker_threads}"
|
||||
)
|
||||
|
||||
# Create connection limits
|
||||
limits = httpx.Limits(
|
||||
max_connections=self.max_connections,
|
||||
max_keepalive_connections=self.max_keepalive,
|
||||
)
|
||||
|
||||
# Create timeout config
|
||||
timeout = httpx.Timeout(
|
||||
connect=self.connect_timeout,
|
||||
read=self.read_timeout,
|
||||
write=self.read_timeout,
|
||||
pool=self.connect_timeout,
|
||||
)
|
||||
|
||||
# Create default client
|
||||
self._default_client = httpx.AsyncClient(
|
||||
limits=limits,
|
||||
timeout=timeout,
|
||||
follow_redirects=False, # Handle redirects manually for auth
|
||||
)
|
||||
|
||||
# Create thread pool for blocking operations
|
||||
self._executor = ThreadPoolExecutor(
|
||||
max_workers=self.worker_threads,
|
||||
thread_name_prefix="orchard-blocking-",
|
||||
)
|
||||
|
||||
self._started = True
|
||||
logger.info("HttpClientManager started")
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
"""Close all clients and thread pool. Called by FastAPI lifespan."""
|
||||
if not self._started:
|
||||
return
|
||||
|
||||
logger.info("Shutting down HttpClientManager")
|
||||
|
||||
# Close default client
|
||||
if self._default_client:
|
||||
await self._default_client.aclose()
|
||||
self._default_client = None
|
||||
|
||||
# Close upstream-specific clients
|
||||
for name, client in self._upstream_clients.items():
|
||||
logger.debug(f"Closing upstream client: {name}")
|
||||
await client.aclose()
|
||||
self._upstream_clients.clear()
|
||||
|
||||
# Shutdown thread pool
|
||||
if self._executor:
|
||||
self._executor.shutdown(wait=True)
|
||||
self._executor = None
|
||||
|
||||
self._started = False
|
||||
logger.info("HttpClientManager shutdown complete")
|
||||
|
||||
def get_client(self, upstream_name: Optional[str] = None) -> httpx.AsyncClient:
|
||||
"""
|
||||
Get HTTP client for making requests.
|
||||
|
||||
Args:
|
||||
upstream_name: Optional upstream source name for dedicated pool.
|
||||
If None, returns the default shared client.
|
||||
|
||||
Returns:
|
||||
httpx.AsyncClient configured for the request.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If manager not started.
|
||||
"""
|
||||
if not self._started or not self._default_client:
|
||||
raise RuntimeError("HttpClientManager not started. Call startup() first.")
|
||||
|
||||
if upstream_name and upstream_name in self._upstream_clients:
|
||||
return self._upstream_clients[upstream_name]
|
||||
|
||||
return self._default_client
|
||||
|
||||
async def run_blocking(self, func: Callable[..., Any], *args: Any) -> Any:
|
||||
"""
|
||||
Run a blocking function in the thread pool.
|
||||
|
||||
Use this for:
|
||||
- File I/O operations
|
||||
- Archive extraction (zipfile, tarfile)
|
||||
- Hash computation on large data
|
||||
|
||||
Args:
|
||||
func: Synchronous function to execute
|
||||
*args: Arguments to pass to the function
|
||||
|
||||
Returns:
|
||||
The function's return value.
|
||||
"""
|
||||
if not self._executor:
|
||||
raise RuntimeError("HttpClientManager not started. Call startup() first.")
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(self._executor, func, *args)
|
||||
|
||||
@property
|
||||
def active_connections(self) -> int:
|
||||
"""Get approximate number of active connections (for health checks)."""
|
||||
if not self._default_client:
|
||||
return 0
|
||||
# httpx doesn't expose this directly, return pool size as approximation
|
||||
return self.max_connections
|
||||
|
||||
@property
|
||||
def pool_size(self) -> int:
|
||||
"""Get configured pool size."""
|
||||
return self.max_connections
|
||||
|
||||
@property
|
||||
def executor_active(self) -> int:
|
||||
"""Get number of active thread pool workers."""
|
||||
if not self._executor:
|
||||
return 0
|
||||
return len(self._executor._threads)
|
||||
|
||||
@property
|
||||
def executor_max(self) -> int:
|
||||
"""Get max thread pool workers."""
|
||||
return self.worker_threads
|
||||
@@ -15,6 +15,8 @@ from .pypi_proxy import router as pypi_router
|
||||
from .seed import seed_database
|
||||
from .auth import create_default_admin
|
||||
from .rate_limit import limiter
|
||||
from .http_client import HttpClientManager
|
||||
from .cache_service import CacheService
|
||||
|
||||
settings = get_settings()
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
@@ -38,6 +40,17 @@ async def lifespan(app: FastAPI):
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
# Initialize infrastructure services
|
||||
logger.info("Initializing infrastructure services...")
|
||||
|
||||
app.state.http_client = HttpClientManager(settings)
|
||||
await app.state.http_client.startup()
|
||||
|
||||
app.state.cache = CacheService(settings)
|
||||
await app.state.cache.startup()
|
||||
|
||||
logger.info("Infrastructure services ready")
|
||||
|
||||
# Seed test data in development mode
|
||||
if settings.is_development:
|
||||
logger.info(f"Running in {settings.env} mode - checking for seed data")
|
||||
@@ -50,7 +63,12 @@ async def lifespan(app: FastAPI):
|
||||
logger.info(f"Running in {settings.env} mode - skipping seed data")
|
||||
|
||||
yield
|
||||
# Shutdown: cleanup if needed
|
||||
|
||||
# Shutdown infrastructure services
|
||||
logger.info("Shutting down infrastructure services...")
|
||||
await app.state.http_client.shutdown()
|
||||
await app.state.cache.shutdown()
|
||||
logger.info("Shutdown complete")
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
|
||||
@@ -71,7 +71,6 @@ class Package(Base):
|
||||
)
|
||||
|
||||
project = relationship("Project", back_populates="packages")
|
||||
tags = relationship("Tag", back_populates="package", cascade="all, delete-orphan")
|
||||
uploads = relationship(
|
||||
"Upload", back_populates="package", cascade="all, delete-orphan"
|
||||
)
|
||||
@@ -120,7 +119,6 @@ class Artifact(Base):
|
||||
ref_count = Column(Integer, default=1)
|
||||
s3_key = Column(String(1024), nullable=False)
|
||||
|
||||
tags = relationship("Tag", back_populates="artifact")
|
||||
uploads = relationship("Upload", back_populates="artifact")
|
||||
versions = relationship("PackageVersion", back_populates="artifact")
|
||||
dependencies = relationship(
|
||||
@@ -151,65 +149,6 @@ class Artifact(Base):
|
||||
)
|
||||
|
||||
|
||||
class Tag(Base):
|
||||
__tablename__ = "tags"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
package_id = Column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("packages.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
name = Column(String(255), nullable=False)
|
||||
artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
|
||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
||||
)
|
||||
created_by = Column(String(255), nullable=False)
|
||||
|
||||
package = relationship("Package", back_populates="tags")
|
||||
artifact = relationship("Artifact", back_populates="tags")
|
||||
history = relationship(
|
||||
"TagHistory", back_populates="tag", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_tags_package_id", "package_id"),
|
||||
Index("idx_tags_artifact_id", "artifact_id"),
|
||||
Index(
|
||||
"idx_tags_package_name", "package_id", "name", unique=True
|
||||
), # Composite unique index
|
||||
Index(
|
||||
"idx_tags_package_created_at", "package_id", "created_at"
|
||||
), # For recent tags queries
|
||||
)
|
||||
|
||||
|
||||
class TagHistory(Base):
|
||||
__tablename__ = "tag_history"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tag_id = Column(
|
||||
UUID(as_uuid=True), ForeignKey("tags.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
old_artifact_id = Column(String(64), ForeignKey("artifacts.id"))
|
||||
new_artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
|
||||
change_type = Column(String(20), nullable=False, default="update")
|
||||
changed_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||
changed_by = Column(String(255), nullable=False)
|
||||
|
||||
tag = relationship("Tag", back_populates="history")
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_tag_history_tag_id", "tag_id"),
|
||||
Index("idx_tag_history_changed_at", "changed_at"),
|
||||
CheckConstraint(
|
||||
"change_type IN ('create', 'update', 'delete')", name="check_change_type"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class PackageVersion(Base):
|
||||
"""Immutable version record for a package-artifact relationship.
|
||||
|
||||
@@ -249,7 +188,7 @@ class Upload(Base):
|
||||
artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
|
||||
package_id = Column(UUID(as_uuid=True), ForeignKey("packages.id"), nullable=False)
|
||||
original_name = Column(String(1024))
|
||||
tag_name = Column(String(255)) # Tag assigned during upload
|
||||
version = Column(String(255)) # Version assigned during upload
|
||||
user_agent = Column(String(512)) # Client identification
|
||||
duration_ms = Column(Integer) # Upload timing in milliseconds
|
||||
deduplicated = Column(Boolean, default=False) # Whether artifact was deduplicated
|
||||
@@ -524,8 +463,8 @@ class PackageHistory(Base):
|
||||
class ArtifactDependency(Base):
|
||||
"""Dependency declared by an artifact on another package.
|
||||
|
||||
Each artifact can declare dependencies on other packages, specifying either
|
||||
an exact version or a tag. This enables recursive dependency resolution.
|
||||
Each artifact can declare dependencies on other packages, specifying a version.
|
||||
This enables recursive dependency resolution.
|
||||
"""
|
||||
|
||||
__tablename__ = "artifact_dependencies"
|
||||
@@ -538,20 +477,13 @@ class ArtifactDependency(Base):
|
||||
)
|
||||
dependency_project = Column(String(255), nullable=False)
|
||||
dependency_package = Column(String(255), nullable=False)
|
||||
version_constraint = Column(String(255), nullable=True)
|
||||
tag_constraint = Column(String(255), nullable=True)
|
||||
version_constraint = Column(String(255), nullable=False)
|
||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||
|
||||
# Relationship to the artifact that declares this dependency
|
||||
artifact = relationship("Artifact", back_populates="dependencies")
|
||||
|
||||
__table_args__ = (
|
||||
# Exactly one of version_constraint or tag_constraint must be set
|
||||
CheckConstraint(
|
||||
"(version_constraint IS NOT NULL AND tag_constraint IS NULL) OR "
|
||||
"(version_constraint IS NULL AND tag_constraint IS NOT NULL)",
|
||||
name="check_constraint_type",
|
||||
),
|
||||
# Each artifact can only depend on a specific project/package once
|
||||
Index(
|
||||
"idx_artifact_dependencies_artifact_id",
|
||||
|
||||
@@ -12,7 +12,6 @@ from .models import (
|
||||
Project,
|
||||
Package,
|
||||
Artifact,
|
||||
Tag,
|
||||
Upload,
|
||||
PackageVersion,
|
||||
ArtifactDependency,
|
||||
@@ -60,7 +59,6 @@ def purge_seed_data(db: Session) -> dict:
|
||||
|
||||
results = {
|
||||
"dependencies_deleted": 0,
|
||||
"tags_deleted": 0,
|
||||
"versions_deleted": 0,
|
||||
"uploads_deleted": 0,
|
||||
"artifacts_deleted": 0,
|
||||
@@ -103,15 +101,7 @@ def purge_seed_data(db: Session) -> dict:
|
||||
results["dependencies_deleted"] = count
|
||||
logger.info(f"Deleted {count} artifact dependencies")
|
||||
|
||||
# 2. Delete tags
|
||||
if seed_package_ids:
|
||||
count = db.query(Tag).filter(Tag.package_id.in_(seed_package_ids)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
results["tags_deleted"] = count
|
||||
logger.info(f"Deleted {count} tags")
|
||||
|
||||
# 3. Delete package versions
|
||||
# 2. Delete package versions
|
||||
if seed_package_ids:
|
||||
count = db.query(PackageVersion).filter(
|
||||
PackageVersion.package_id.in_(seed_package_ids)
|
||||
@@ -119,7 +109,7 @@ def purge_seed_data(db: Session) -> dict:
|
||||
results["versions_deleted"] = count
|
||||
logger.info(f"Deleted {count} package versions")
|
||||
|
||||
# 4. Delete uploads
|
||||
# 3. Delete uploads
|
||||
if seed_package_ids:
|
||||
count = db.query(Upload).filter(Upload.package_id.in_(seed_package_ids)).delete(
|
||||
synchronize_session=False
|
||||
@@ -127,7 +117,7 @@ def purge_seed_data(db: Session) -> dict:
|
||||
results["uploads_deleted"] = count
|
||||
logger.info(f"Deleted {count} uploads")
|
||||
|
||||
# 5. Delete S3 objects for seed artifacts
|
||||
# 4. Delete S3 objects for seed artifacts
|
||||
if seed_artifact_ids:
|
||||
seed_artifacts = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).all()
|
||||
for artifact in seed_artifacts:
|
||||
@@ -139,8 +129,8 @@ def purge_seed_data(db: Session) -> dict:
|
||||
logger.warning(f"Failed to delete S3 object {artifact.s3_key}: {e}")
|
||||
logger.info(f"Deleted {results['s3_objects_deleted']} S3 objects")
|
||||
|
||||
# 6. Delete artifacts (only those with ref_count that would be 0 after our deletions)
|
||||
# Since we deleted all tags/versions pointing to these artifacts, we can delete them
|
||||
# 5. Delete artifacts (only those with ref_count that would be 0 after our deletions)
|
||||
# Since we deleted all versions pointing to these artifacts, we can delete them
|
||||
if seed_artifact_ids:
|
||||
count = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).delete(
|
||||
synchronize_session=False
|
||||
@@ -148,7 +138,7 @@ def purge_seed_data(db: Session) -> dict:
|
||||
results["artifacts_deleted"] = count
|
||||
logger.info(f"Deleted {count} artifacts")
|
||||
|
||||
# 7. Delete packages
|
||||
# 6. Delete packages
|
||||
if seed_package_ids:
|
||||
count = db.query(Package).filter(Package.id.in_(seed_package_ids)).delete(
|
||||
synchronize_session=False
|
||||
@@ -156,7 +146,7 @@ def purge_seed_data(db: Session) -> dict:
|
||||
results["packages_deleted"] = count
|
||||
logger.info(f"Deleted {count} packages")
|
||||
|
||||
# 8. Delete access permissions for seed projects
|
||||
# 7. Delete access permissions for seed projects
|
||||
if seed_project_ids:
|
||||
count = db.query(AccessPermission).filter(
|
||||
AccessPermission.project_id.in_(seed_project_ids)
|
||||
@@ -164,14 +154,14 @@ def purge_seed_data(db: Session) -> dict:
|
||||
results["permissions_deleted"] = count
|
||||
logger.info(f"Deleted {count} access permissions")
|
||||
|
||||
# 9. Delete seed projects
|
||||
# 8. Delete seed projects
|
||||
count = db.query(Project).filter(Project.name.in_(SEED_PROJECT_NAMES)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
results["projects_deleted"] = count
|
||||
logger.info(f"Deleted {count} projects")
|
||||
|
||||
# 10. Find and delete seed team
|
||||
# 9. Find and delete seed team
|
||||
seed_team = db.query(Team).filter(Team.slug == SEED_TEAM_SLUG).first()
|
||||
if seed_team:
|
||||
# Delete team memberships first
|
||||
@@ -186,7 +176,7 @@ def purge_seed_data(db: Session) -> dict:
|
||||
results["teams_deleted"] = 1
|
||||
logger.info(f"Deleted team: {SEED_TEAM_SLUG}")
|
||||
|
||||
# 11. Delete seed users (but NOT admin)
|
||||
# 10. Delete seed users (but NOT admin)
|
||||
seed_users = db.query(User).filter(User.username.in_(SEED_USERNAMES)).all()
|
||||
for user in seed_users:
|
||||
# Delete any remaining team memberships for this user
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
426
backend/app/registry_client.py
Normal file
426
backend/app/registry_client.py
Normal file
@@ -0,0 +1,426 @@
|
||||
"""
|
||||
Registry client abstraction for upstream package registries.
|
||||
|
||||
Provides a pluggable interface for fetching packages from upstream registries
|
||||
(PyPI, npm, Maven, etc.) during dependency resolution with auto-fetch enabled.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional, TYPE_CHECKING
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
import httpx
|
||||
from packaging.specifiers import SpecifierSet, InvalidSpecifier
|
||||
from packaging.version import Version, InvalidVersion
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .storage import S3Storage
|
||||
from .http_client import HttpClientManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class VersionInfo:
|
||||
"""Information about a package version from an upstream registry."""
|
||||
|
||||
version: str
|
||||
download_url: str
|
||||
filename: str
|
||||
sha256: Optional[str] = None
|
||||
size: Optional[int] = None
|
||||
content_type: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class FetchResult:
|
||||
"""Result of fetching a package from upstream."""
|
||||
|
||||
artifact_id: str # SHA256 hash
|
||||
size: int
|
||||
version: str
|
||||
filename: str
|
||||
already_cached: bool = False
|
||||
|
||||
|
||||
class RegistryClient(ABC):
|
||||
"""Abstract base class for upstream registry clients."""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def source_type(self) -> str:
|
||||
"""Return the source type this client handles (e.g., 'pypi', 'npm')."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_available_versions(self, package_name: str) -> List[str]:
|
||||
"""
|
||||
Get all available versions of a package from upstream.
|
||||
|
||||
Args:
|
||||
package_name: The normalized package name
|
||||
|
||||
Returns:
|
||||
List of version strings, sorted from oldest to newest
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def resolve_constraint(
|
||||
self, package_name: str, constraint: str
|
||||
) -> Optional[VersionInfo]:
|
||||
"""
|
||||
Find the best version matching a constraint.
|
||||
|
||||
Args:
|
||||
package_name: The normalized package name
|
||||
constraint: Version constraint (e.g., '>=1.9', '<2.0,>=1.5', '*')
|
||||
|
||||
Returns:
|
||||
VersionInfo with download URL, or None if no matching version found
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def fetch_package(
|
||||
self,
|
||||
package_name: str,
|
||||
version_info: VersionInfo,
|
||||
db: Session,
|
||||
storage: "S3Storage",
|
||||
) -> Optional[FetchResult]:
|
||||
"""
|
||||
Fetch and cache a package from upstream.
|
||||
|
||||
Args:
|
||||
package_name: The normalized package name
|
||||
version_info: Version details including download URL
|
||||
db: Database session for creating records
|
||||
storage: S3 storage for caching the artifact
|
||||
|
||||
Returns:
|
||||
FetchResult with artifact_id, or None if fetch failed
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class PyPIRegistryClient(RegistryClient):
|
||||
"""PyPI registry client using the JSON API."""
|
||||
|
||||
# Timeout configuration for PyPI requests
|
||||
CONNECT_TIMEOUT = 30.0
|
||||
READ_TIMEOUT = 60.0
|
||||
DOWNLOAD_TIMEOUT = 300.0 # Longer timeout for file downloads
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
http_client: httpx.AsyncClient,
|
||||
upstream_sources: List,
|
||||
pypi_api_url: str = "https://pypi.org/pypi",
|
||||
):
|
||||
"""
|
||||
Initialize PyPI registry client.
|
||||
|
||||
Args:
|
||||
http_client: Shared async HTTP client
|
||||
upstream_sources: List of configured upstream sources for auth
|
||||
pypi_api_url: Base URL for PyPI JSON API
|
||||
"""
|
||||
self.client = http_client
|
||||
self.sources = upstream_sources
|
||||
self.api_url = pypi_api_url
|
||||
|
||||
@property
|
||||
def source_type(self) -> str:
|
||||
return "pypi"
|
||||
|
||||
def _normalize_package_name(self, name: str) -> str:
|
||||
"""Normalize a PyPI package name per PEP 503."""
|
||||
return re.sub(r"[-_.]+", "-", name).lower()
|
||||
|
||||
def _get_auth_headers(self) -> dict:
|
||||
"""Get authentication headers from configured sources."""
|
||||
headers = {"User-Agent": "Orchard-Registry-Client/1.0"}
|
||||
if self.sources:
|
||||
source = self.sources[0]
|
||||
if hasattr(source, "auth_type"):
|
||||
if source.auth_type == "bearer":
|
||||
password = (
|
||||
source.get_password()
|
||||
if hasattr(source, "get_password")
|
||||
else getattr(source, "password", None)
|
||||
)
|
||||
if password:
|
||||
headers["Authorization"] = f"Bearer {password}"
|
||||
elif source.auth_type == "api_key":
|
||||
custom_headers = (
|
||||
source.get_headers()
|
||||
if hasattr(source, "get_headers")
|
||||
else {}
|
||||
)
|
||||
if custom_headers:
|
||||
headers.update(custom_headers)
|
||||
return headers
|
||||
|
||||
def _get_basic_auth(self) -> Optional[tuple]:
|
||||
"""Get basic auth credentials if configured."""
|
||||
if self.sources:
|
||||
source = self.sources[0]
|
||||
if hasattr(source, "auth_type") and source.auth_type == "basic":
|
||||
username = getattr(source, "username", None)
|
||||
if username:
|
||||
password = (
|
||||
source.get_password()
|
||||
if hasattr(source, "get_password")
|
||||
else getattr(source, "password", "")
|
||||
)
|
||||
return (username, password or "")
|
||||
return None
|
||||
|
||||
async def get_available_versions(self, package_name: str) -> List[str]:
|
||||
"""Get all available versions from PyPI JSON API."""
|
||||
normalized = self._normalize_package_name(package_name)
|
||||
url = f"{self.api_url}/{normalized}/json"
|
||||
|
||||
headers = self._get_auth_headers()
|
||||
auth = self._get_basic_auth()
|
||||
timeout = httpx.Timeout(self.READ_TIMEOUT, connect=self.CONNECT_TIMEOUT)
|
||||
|
||||
try:
|
||||
response = await self.client.get(
|
||||
url, headers=headers, auth=auth, timeout=timeout
|
||||
)
|
||||
|
||||
if response.status_code == 404:
|
||||
logger.debug(f"Package {normalized} not found on PyPI")
|
||||
return []
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.warning(
|
||||
f"PyPI API returned {response.status_code} for {normalized}"
|
||||
)
|
||||
return []
|
||||
|
||||
data = response.json()
|
||||
releases = data.get("releases", {})
|
||||
|
||||
# Filter to valid versions and sort
|
||||
versions = []
|
||||
for v in releases.keys():
|
||||
try:
|
||||
Version(v)
|
||||
versions.append(v)
|
||||
except InvalidVersion:
|
||||
continue
|
||||
|
||||
versions.sort(key=lambda x: Version(x))
|
||||
return versions
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.warning(f"Failed to query PyPI for {normalized}: {e}")
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.warning(f"Error parsing PyPI response for {normalized}: {e}")
|
||||
return []
|
||||
|
||||
async def resolve_constraint(
|
||||
self, package_name: str, constraint: str
|
||||
) -> Optional[VersionInfo]:
|
||||
"""Find best version matching constraint from PyPI."""
|
||||
normalized = self._normalize_package_name(package_name)
|
||||
url = f"{self.api_url}/{normalized}/json"
|
||||
|
||||
headers = self._get_auth_headers()
|
||||
auth = self._get_basic_auth()
|
||||
timeout = httpx.Timeout(self.READ_TIMEOUT, connect=self.CONNECT_TIMEOUT)
|
||||
|
||||
try:
|
||||
response = await self.client.get(
|
||||
url, headers=headers, auth=auth, timeout=timeout
|
||||
)
|
||||
|
||||
if response.status_code == 404:
|
||||
logger.debug(f"Package {normalized} not found on PyPI")
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.warning(
|
||||
f"PyPI API returned {response.status_code} for {normalized}"
|
||||
)
|
||||
return None
|
||||
|
||||
data = response.json()
|
||||
releases = data.get("releases", {})
|
||||
|
||||
# Handle wildcard - return latest version
|
||||
if constraint == "*":
|
||||
latest_version = data.get("info", {}).get("version")
|
||||
if latest_version and latest_version in releases:
|
||||
return self._get_version_info(
|
||||
normalized, latest_version, releases[latest_version]
|
||||
)
|
||||
return None
|
||||
|
||||
# Parse constraint
|
||||
# If constraint looks like a bare version (no operator), treat as exact match
|
||||
# e.g., "2025.10.5" -> "==2025.10.5"
|
||||
effective_constraint = constraint
|
||||
if constraint and constraint[0].isdigit():
|
||||
effective_constraint = f"=={constraint}"
|
||||
logger.debug(
|
||||
f"Bare version '{constraint}' for {normalized}, "
|
||||
f"treating as exact match '{effective_constraint}'"
|
||||
)
|
||||
|
||||
try:
|
||||
specifier = SpecifierSet(effective_constraint)
|
||||
except InvalidSpecifier:
|
||||
# Invalid constraint - treat as wildcard
|
||||
logger.warning(
|
||||
f"Invalid version constraint '{constraint}' for {normalized}, "
|
||||
"treating as wildcard"
|
||||
)
|
||||
latest_version = data.get("info", {}).get("version")
|
||||
if latest_version and latest_version in releases:
|
||||
return self._get_version_info(
|
||||
normalized, latest_version, releases[latest_version]
|
||||
)
|
||||
return None
|
||||
|
||||
# Find matching versions
|
||||
matching = []
|
||||
for v_str, files in releases.items():
|
||||
if not files: # Skip versions with no files
|
||||
continue
|
||||
try:
|
||||
v = Version(v_str)
|
||||
if v in specifier:
|
||||
matching.append((v_str, v, files))
|
||||
except InvalidVersion:
|
||||
continue
|
||||
|
||||
if not matching:
|
||||
logger.debug(
|
||||
f"No versions of {normalized} match constraint '{constraint}'"
|
||||
)
|
||||
return None
|
||||
|
||||
# Sort by version and return highest match
|
||||
matching.sort(key=lambda x: x[1], reverse=True)
|
||||
best_version, _, best_files = matching[0]
|
||||
|
||||
return self._get_version_info(normalized, best_version, best_files)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.warning(f"Failed to query PyPI for {normalized}: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.warning(f"Error resolving {normalized}@{constraint}: {e}")
|
||||
return None
|
||||
|
||||
def _get_version_info(
|
||||
self, package_name: str, version: str, files: List[dict]
|
||||
) -> Optional[VersionInfo]:
|
||||
"""Extract download info from PyPI release files."""
|
||||
if not files:
|
||||
return None
|
||||
|
||||
# Prefer wheel over sdist
|
||||
wheel_file = None
|
||||
sdist_file = None
|
||||
|
||||
for f in files:
|
||||
filename = f.get("filename", "")
|
||||
if filename.endswith(".whl"):
|
||||
# Prefer platform-agnostic wheels
|
||||
if "py3-none-any" in filename or wheel_file is None:
|
||||
wheel_file = f
|
||||
elif filename.endswith(".tar.gz") and sdist_file is None:
|
||||
sdist_file = f
|
||||
|
||||
selected = wheel_file or sdist_file
|
||||
if not selected:
|
||||
# Fall back to first available file
|
||||
selected = files[0]
|
||||
|
||||
return VersionInfo(
|
||||
version=version,
|
||||
download_url=selected.get("url", ""),
|
||||
filename=selected.get("filename", ""),
|
||||
sha256=selected.get("digests", {}).get("sha256"),
|
||||
size=selected.get("size"),
|
||||
content_type="application/zip"
|
||||
if selected.get("filename", "").endswith(".whl")
|
||||
else "application/gzip",
|
||||
)
|
||||
|
||||
async def fetch_package(
|
||||
self,
|
||||
package_name: str,
|
||||
version_info: VersionInfo,
|
||||
db: Session,
|
||||
storage: "S3Storage",
|
||||
) -> Optional[FetchResult]:
|
||||
"""Fetch and cache a PyPI package."""
|
||||
# Import here to avoid circular imports
|
||||
from .pypi_proxy import fetch_and_cache_pypi_package
|
||||
|
||||
normalized = self._normalize_package_name(package_name)
|
||||
|
||||
logger.info(
|
||||
f"Fetching {normalized}=={version_info.version} from upstream PyPI"
|
||||
)
|
||||
|
||||
result = await fetch_and_cache_pypi_package(
|
||||
db=db,
|
||||
storage=storage,
|
||||
http_client=self.client,
|
||||
package_name=normalized,
|
||||
filename=version_info.filename,
|
||||
download_url=version_info.download_url,
|
||||
expected_sha256=version_info.sha256,
|
||||
)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
|
||||
return FetchResult(
|
||||
artifact_id=result["artifact_id"],
|
||||
size=result["size"],
|
||||
version=version_info.version,
|
||||
filename=version_info.filename,
|
||||
already_cached=result.get("already_cached", False),
|
||||
)
|
||||
|
||||
|
||||
def get_registry_client(
|
||||
source_type: str,
|
||||
http_client: httpx.AsyncClient,
|
||||
upstream_sources: List,
|
||||
) -> Optional[RegistryClient]:
|
||||
"""
|
||||
Factory function to get a registry client for a source type.
|
||||
|
||||
Args:
|
||||
source_type: The registry type ('pypi', 'npm', etc.)
|
||||
http_client: Shared async HTTP client
|
||||
upstream_sources: List of configured upstream sources
|
||||
|
||||
Returns:
|
||||
RegistryClient for the source type, or None if not supported
|
||||
"""
|
||||
if source_type == "pypi":
|
||||
# Filter to PyPI sources
|
||||
pypi_sources = [s for s in upstream_sources if getattr(s, "source_type", "") == "pypi"]
|
||||
return PyPIRegistryClient(http_client, pypi_sources)
|
||||
|
||||
# Future: Add npm, maven, etc.
|
||||
logger.debug(f"No registry client available for source type: {source_type}")
|
||||
return None
|
||||
@@ -9,7 +9,6 @@ from .base import BaseRepository
|
||||
from .project import ProjectRepository
|
||||
from .package import PackageRepository
|
||||
from .artifact import ArtifactRepository
|
||||
from .tag import TagRepository
|
||||
from .upload import UploadRepository
|
||||
|
||||
__all__ = [
|
||||
@@ -17,6 +16,5 @@ __all__ = [
|
||||
"ProjectRepository",
|
||||
"PackageRepository",
|
||||
"ArtifactRepository",
|
||||
"TagRepository",
|
||||
"UploadRepository",
|
||||
]
|
||||
|
||||
@@ -8,7 +8,7 @@ from sqlalchemy import func, or_
|
||||
from uuid import UUID
|
||||
|
||||
from .base import BaseRepository
|
||||
from ..models import Artifact, Tag, Upload, Package, Project
|
||||
from ..models import Artifact, PackageVersion, Upload, Package, Project
|
||||
|
||||
|
||||
class ArtifactRepository(BaseRepository[Artifact]):
|
||||
@@ -77,14 +77,14 @@ class ArtifactRepository(BaseRepository[Artifact]):
|
||||
.all()
|
||||
)
|
||||
|
||||
def get_artifacts_without_tags(self, limit: int = 100) -> List[Artifact]:
|
||||
"""Get artifacts that have no tags pointing to them."""
|
||||
# Subquery to find artifact IDs that have tags
|
||||
tagged_artifacts = self.db.query(Tag.artifact_id).distinct().subquery()
|
||||
def get_artifacts_without_versions(self, limit: int = 100) -> List[Artifact]:
|
||||
"""Get artifacts that have no versions pointing to them."""
|
||||
# Subquery to find artifact IDs that have versions
|
||||
versioned_artifacts = self.db.query(PackageVersion.artifact_id).distinct().subquery()
|
||||
|
||||
return (
|
||||
self.db.query(Artifact)
|
||||
.filter(~Artifact.id.in_(tagged_artifacts))
|
||||
.filter(~Artifact.id.in_(versioned_artifacts))
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
@@ -115,34 +115,34 @@ class ArtifactRepository(BaseRepository[Artifact]):
|
||||
|
||||
return artifacts, total
|
||||
|
||||
def get_referencing_tags(self, artifact_id: str) -> List[Tuple[Tag, Package, Project]]:
|
||||
"""Get all tags referencing this artifact with package and project info."""
|
||||
def get_referencing_versions(self, artifact_id: str) -> List[Tuple[PackageVersion, Package, Project]]:
|
||||
"""Get all versions referencing this artifact with package and project info."""
|
||||
return (
|
||||
self.db.query(Tag, Package, Project)
|
||||
.join(Package, Tag.package_id == Package.id)
|
||||
self.db.query(PackageVersion, Package, Project)
|
||||
.join(Package, PackageVersion.package_id == Package.id)
|
||||
.join(Project, Package.project_id == Project.id)
|
||||
.filter(Tag.artifact_id == artifact_id)
|
||||
.filter(PackageVersion.artifact_id == artifact_id)
|
||||
.all()
|
||||
)
|
||||
|
||||
def search(self, query_str: str, limit: int = 10) -> List[Tuple[Tag, Artifact, str, str]]:
|
||||
def search(self, query_str: str, limit: int = 10) -> List[Tuple[PackageVersion, Artifact, str, str]]:
|
||||
"""
|
||||
Search artifacts by tag name or original filename.
|
||||
Returns (tag, artifact, package_name, project_name) tuples.
|
||||
Search artifacts by version or original filename.
|
||||
Returns (version, artifact, package_name, project_name) tuples.
|
||||
"""
|
||||
search_lower = query_str.lower()
|
||||
return (
|
||||
self.db.query(Tag, Artifact, Package.name, Project.name)
|
||||
.join(Artifact, Tag.artifact_id == Artifact.id)
|
||||
.join(Package, Tag.package_id == Package.id)
|
||||
self.db.query(PackageVersion, Artifact, Package.name, Project.name)
|
||||
.join(Artifact, PackageVersion.artifact_id == Artifact.id)
|
||||
.join(Package, PackageVersion.package_id == Package.id)
|
||||
.join(Project, Package.project_id == Project.id)
|
||||
.filter(
|
||||
or_(
|
||||
func.lower(Tag.name).contains(search_lower),
|
||||
func.lower(PackageVersion.version).contains(search_lower),
|
||||
func.lower(Artifact.original_name).contains(search_lower)
|
||||
)
|
||||
)
|
||||
.order_by(Tag.name)
|
||||
.order_by(PackageVersion.version)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
@@ -8,7 +8,7 @@ from sqlalchemy import func, or_, asc, desc
|
||||
from uuid import UUID
|
||||
|
||||
from .base import BaseRepository
|
||||
from ..models import Package, Project, Tag, Upload, Artifact
|
||||
from ..models import Package, Project, PackageVersion, Upload, Artifact
|
||||
|
||||
|
||||
class PackageRepository(BaseRepository[Package]):
|
||||
@@ -136,10 +136,10 @@ class PackageRepository(BaseRepository[Package]):
|
||||
return self.update(package, **updates)
|
||||
|
||||
def get_stats(self, package_id: UUID) -> dict:
|
||||
"""Get package statistics (tag count, artifact count, total size)."""
|
||||
tag_count = (
|
||||
self.db.query(func.count(Tag.id))
|
||||
.filter(Tag.package_id == package_id)
|
||||
"""Get package statistics (version count, artifact count, total size)."""
|
||||
version_count = (
|
||||
self.db.query(func.count(PackageVersion.id))
|
||||
.filter(PackageVersion.package_id == package_id)
|
||||
.scalar() or 0
|
||||
)
|
||||
|
||||
@@ -154,7 +154,7 @@ class PackageRepository(BaseRepository[Package]):
|
||||
)
|
||||
|
||||
return {
|
||||
"tag_count": tag_count,
|
||||
"version_count": version_count,
|
||||
"artifact_count": artifact_stats[0] if artifact_stats else 0,
|
||||
"total_size": artifact_stats[1] if artifact_stats else 0,
|
||||
}
|
||||
|
||||
@@ -1,168 +0,0 @@
|
||||
"""
|
||||
Tag repository for data access operations.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Tuple
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import func, or_, asc, desc
|
||||
from uuid import UUID
|
||||
|
||||
from .base import BaseRepository
|
||||
from ..models import Tag, TagHistory, Artifact, Package, Project
|
||||
|
||||
|
||||
class TagRepository(BaseRepository[Tag]):
|
||||
"""Repository for Tag entity operations."""
|
||||
|
||||
model = Tag
|
||||
|
||||
def get_by_name(self, package_id: UUID, name: str) -> Optional[Tag]:
|
||||
"""Get tag by name within a package."""
|
||||
return (
|
||||
self.db.query(Tag)
|
||||
.filter(Tag.package_id == package_id, Tag.name == name)
|
||||
.first()
|
||||
)
|
||||
|
||||
def get_with_artifact(self, package_id: UUID, name: str) -> Optional[Tuple[Tag, Artifact]]:
|
||||
"""Get tag with its artifact."""
|
||||
return (
|
||||
self.db.query(Tag, Artifact)
|
||||
.join(Artifact, Tag.artifact_id == Artifact.id)
|
||||
.filter(Tag.package_id == package_id, Tag.name == name)
|
||||
.first()
|
||||
)
|
||||
|
||||
def exists_by_name(self, package_id: UUID, name: str) -> bool:
|
||||
"""Check if tag with name exists in package."""
|
||||
return self.db.query(
|
||||
self.db.query(Tag)
|
||||
.filter(Tag.package_id == package_id, Tag.name == name)
|
||||
.exists()
|
||||
).scalar()
|
||||
|
||||
def list_by_package(
|
||||
self,
|
||||
package_id: UUID,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
search: Optional[str] = None,
|
||||
sort: str = "name",
|
||||
order: str = "asc",
|
||||
) -> Tuple[List[Tuple[Tag, Artifact]], int]:
|
||||
"""
|
||||
List tags in a package with artifact metadata.
|
||||
|
||||
Returns tuple of ((tag, artifact) tuples, total_count).
|
||||
"""
|
||||
query = (
|
||||
self.db.query(Tag, Artifact)
|
||||
.join(Artifact, Tag.artifact_id == Artifact.id)
|
||||
.filter(Tag.package_id == package_id)
|
||||
)
|
||||
|
||||
# Apply search filter (tag name or artifact original filename)
|
||||
if search:
|
||||
search_lower = search.lower()
|
||||
query = query.filter(
|
||||
or_(
|
||||
func.lower(Tag.name).contains(search_lower),
|
||||
func.lower(Artifact.original_name).contains(search_lower)
|
||||
)
|
||||
)
|
||||
|
||||
# Get total count
|
||||
total = query.count()
|
||||
|
||||
# Apply sorting
|
||||
sort_columns = {
|
||||
"name": Tag.name,
|
||||
"created_at": Tag.created_at,
|
||||
}
|
||||
sort_column = sort_columns.get(sort, Tag.name)
|
||||
if order == "desc":
|
||||
query = query.order_by(desc(sort_column))
|
||||
else:
|
||||
query = query.order_by(asc(sort_column))
|
||||
|
||||
# Apply pagination
|
||||
offset = (page - 1) * limit
|
||||
results = query.offset(offset).limit(limit).all()
|
||||
|
||||
return results, total
|
||||
|
||||
def create_tag(
|
||||
self,
|
||||
package_id: UUID,
|
||||
name: str,
|
||||
artifact_id: str,
|
||||
created_by: str,
|
||||
) -> Tag:
|
||||
"""Create a new tag."""
|
||||
return self.create(
|
||||
package_id=package_id,
|
||||
name=name,
|
||||
artifact_id=artifact_id,
|
||||
created_by=created_by,
|
||||
)
|
||||
|
||||
def update_artifact(
|
||||
self,
|
||||
tag: Tag,
|
||||
new_artifact_id: str,
|
||||
changed_by: str,
|
||||
record_history: bool = True,
|
||||
) -> Tag:
|
||||
"""
|
||||
Update tag to point to a different artifact.
|
||||
Optionally records change in tag history.
|
||||
"""
|
||||
old_artifact_id = tag.artifact_id
|
||||
|
||||
if record_history and old_artifact_id != new_artifact_id:
|
||||
history = TagHistory(
|
||||
tag_id=tag.id,
|
||||
old_artifact_id=old_artifact_id,
|
||||
new_artifact_id=new_artifact_id,
|
||||
changed_by=changed_by,
|
||||
)
|
||||
self.db.add(history)
|
||||
|
||||
tag.artifact_id = new_artifact_id
|
||||
tag.created_by = changed_by
|
||||
self.db.flush()
|
||||
return tag
|
||||
|
||||
def get_history(self, tag_id: UUID) -> List[TagHistory]:
|
||||
"""Get tag change history."""
|
||||
return (
|
||||
self.db.query(TagHistory)
|
||||
.filter(TagHistory.tag_id == tag_id)
|
||||
.order_by(TagHistory.changed_at.desc())
|
||||
.all()
|
||||
)
|
||||
|
||||
def get_latest_in_package(self, package_id: UUID) -> Optional[Tag]:
|
||||
"""Get the most recently created/updated tag in a package."""
|
||||
return (
|
||||
self.db.query(Tag)
|
||||
.filter(Tag.package_id == package_id)
|
||||
.order_by(Tag.created_at.desc())
|
||||
.first()
|
||||
)
|
||||
|
||||
def get_by_artifact(self, artifact_id: str) -> List[Tag]:
|
||||
"""Get all tags pointing to an artifact."""
|
||||
return (
|
||||
self.db.query(Tag)
|
||||
.filter(Tag.artifact_id == artifact_id)
|
||||
.all()
|
||||
)
|
||||
|
||||
def count_by_artifact(self, artifact_id: str) -> int:
|
||||
"""Count tags pointing to an artifact."""
|
||||
return (
|
||||
self.db.query(func.count(Tag.id))
|
||||
.filter(Tag.artifact_id == artifact_id)
|
||||
.scalar() or 0
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -33,6 +33,7 @@ class ProjectResponse(BaseModel):
|
||||
name: str
|
||||
description: Optional[str]
|
||||
is_public: bool
|
||||
is_system: bool = False
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: str
|
||||
@@ -113,14 +114,6 @@ class PackageUpdate(BaseModel):
|
||||
platform: Optional[str] = None
|
||||
|
||||
|
||||
class TagSummary(BaseModel):
|
||||
"""Lightweight tag info for embedding in package responses"""
|
||||
|
||||
name: str
|
||||
artifact_id: str
|
||||
created_at: datetime
|
||||
|
||||
|
||||
class PackageDetailResponse(BaseModel):
|
||||
"""Package with aggregated metadata"""
|
||||
|
||||
@@ -133,13 +126,9 @@ class PackageDetailResponse(BaseModel):
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
# Aggregated fields
|
||||
tag_count: int = 0
|
||||
artifact_count: int = 0
|
||||
total_size: int = 0
|
||||
latest_tag: Optional[str] = None
|
||||
latest_upload_at: Optional[datetime] = None
|
||||
# Recent tags (limit 5)
|
||||
recent_tags: List[TagSummary] = []
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
@@ -164,79 +153,6 @@ class ArtifactResponse(BaseModel):
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# Tag schemas
|
||||
class TagCreate(BaseModel):
|
||||
name: str
|
||||
artifact_id: str
|
||||
|
||||
|
||||
class TagResponse(BaseModel):
|
||||
id: UUID
|
||||
package_id: UUID
|
||||
name: str
|
||||
artifact_id: str
|
||||
created_at: datetime
|
||||
created_by: str
|
||||
version: Optional[str] = None # Version of the artifact this tag points to
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TagDetailResponse(BaseModel):
|
||||
"""Tag with embedded artifact metadata"""
|
||||
|
||||
id: UUID
|
||||
package_id: UUID
|
||||
name: str
|
||||
artifact_id: str
|
||||
created_at: datetime
|
||||
created_by: str
|
||||
version: Optional[str] = None # Version of the artifact this tag points to
|
||||
# Artifact metadata
|
||||
artifact_size: int
|
||||
artifact_content_type: Optional[str]
|
||||
artifact_original_name: Optional[str]
|
||||
artifact_created_at: datetime
|
||||
artifact_format_metadata: Optional[Dict[str, Any]] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TagHistoryResponse(BaseModel):
|
||||
"""History entry for tag changes"""
|
||||
|
||||
id: UUID
|
||||
tag_id: UUID
|
||||
old_artifact_id: Optional[str]
|
||||
new_artifact_id: str
|
||||
changed_at: datetime
|
||||
changed_by: str
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TagHistoryDetailResponse(BaseModel):
|
||||
"""Tag history with artifact metadata for each version"""
|
||||
|
||||
id: UUID
|
||||
tag_id: UUID
|
||||
tag_name: str
|
||||
old_artifact_id: Optional[str]
|
||||
new_artifact_id: str
|
||||
changed_at: datetime
|
||||
changed_by: str
|
||||
# Artifact metadata for new artifact
|
||||
artifact_size: int
|
||||
artifact_original_name: Optional[str]
|
||||
artifact_content_type: Optional[str]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# Audit log schemas
|
||||
class AuditLogResponse(BaseModel):
|
||||
"""Audit log entry response"""
|
||||
@@ -263,7 +179,7 @@ class UploadHistoryResponse(BaseModel):
|
||||
package_name: str
|
||||
project_name: str
|
||||
original_name: Optional[str]
|
||||
tag_name: Optional[str]
|
||||
version: Optional[str]
|
||||
uploaded_at: datetime
|
||||
uploaded_by: str
|
||||
source_ip: Optional[str]
|
||||
@@ -294,10 +210,10 @@ class ArtifactProvenanceResponse(BaseModel):
|
||||
# Usage statistics
|
||||
upload_count: int
|
||||
# References
|
||||
packages: List[Dict[str, Any]] # List of {project_name, package_name, tag_names}
|
||||
tags: List[
|
||||
packages: List[Dict[str, Any]] # List of {project_name, package_name, versions}
|
||||
versions: List[
|
||||
Dict[str, Any]
|
||||
] # List of {project_name, package_name, tag_name, created_at}
|
||||
] # List of {project_name, package_name, version, created_at}
|
||||
# Upload history
|
||||
uploads: List[Dict[str, Any]] # List of upload events
|
||||
|
||||
@@ -305,18 +221,8 @@ class ArtifactProvenanceResponse(BaseModel):
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ArtifactTagInfo(BaseModel):
|
||||
"""Tag info for embedding in artifact responses"""
|
||||
|
||||
id: UUID
|
||||
name: str
|
||||
package_id: UUID
|
||||
package_name: str
|
||||
project_name: str
|
||||
|
||||
|
||||
class ArtifactDetailResponse(BaseModel):
|
||||
"""Artifact with list of tags/packages referencing it"""
|
||||
"""Artifact with metadata"""
|
||||
|
||||
id: str
|
||||
sha256: str # Explicit SHA256 field (same as id)
|
||||
@@ -330,14 +236,14 @@ class ArtifactDetailResponse(BaseModel):
|
||||
created_by: str
|
||||
ref_count: int
|
||||
format_metadata: Optional[Dict[str, Any]] = None
|
||||
tags: List[ArtifactTagInfo] = []
|
||||
versions: List[Dict[str, Any]] = [] # List of {version, package_name, project_name}
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class PackageArtifactResponse(BaseModel):
|
||||
"""Artifact with tags for package artifact listing"""
|
||||
"""Artifact for package artifact listing"""
|
||||
|
||||
id: str
|
||||
sha256: str # Explicit SHA256 field (same as id)
|
||||
@@ -350,7 +256,7 @@ class PackageArtifactResponse(BaseModel):
|
||||
created_at: datetime
|
||||
created_by: str
|
||||
format_metadata: Optional[Dict[str, Any]] = None
|
||||
tags: List[str] = [] # Tag names pointing to this artifact
|
||||
version: Optional[str] = None # Version from PackageVersion if exists
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
@@ -368,28 +274,9 @@ class GlobalArtifactResponse(BaseModel):
|
||||
created_by: str
|
||||
format_metadata: Optional[Dict[str, Any]] = None
|
||||
ref_count: int = 0
|
||||
# Context from tags/packages
|
||||
# Context from versions/packages
|
||||
projects: List[str] = [] # List of project names containing this artifact
|
||||
packages: List[str] = [] # List of "project/package" paths
|
||||
tags: List[str] = [] # List of "project/package:tag" references
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class GlobalTagResponse(BaseModel):
|
||||
"""Tag with project/package context for global listing"""
|
||||
|
||||
id: UUID
|
||||
name: str
|
||||
artifact_id: str
|
||||
created_at: datetime
|
||||
created_by: str
|
||||
project_name: str
|
||||
package_name: str
|
||||
artifact_size: Optional[int] = None
|
||||
artifact_content_type: Optional[str] = None
|
||||
version: Optional[str] = None # Version of the artifact this tag points to
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
@@ -402,7 +289,6 @@ class UploadResponse(BaseModel):
|
||||
size: int
|
||||
project: str
|
||||
package: str
|
||||
tag: Optional[str]
|
||||
version: Optional[str] = None # Version assigned to this artifact
|
||||
version_source: Optional[str] = None # How version was determined: 'explicit', 'filename', 'metadata'
|
||||
checksum_md5: Optional[str] = None
|
||||
@@ -429,7 +315,6 @@ class ResumableUploadInitRequest(BaseModel):
|
||||
filename: str
|
||||
content_type: Optional[str] = None
|
||||
size: int
|
||||
tag: Optional[str] = None
|
||||
version: Optional[str] = None # Explicit version (auto-detected if not provided)
|
||||
|
||||
@field_validator("expected_hash")
|
||||
@@ -464,7 +349,7 @@ class ResumableUploadPartResponse(BaseModel):
|
||||
class ResumableUploadCompleteRequest(BaseModel):
|
||||
"""Request to complete a resumable upload"""
|
||||
|
||||
tag: Optional[str] = None
|
||||
pass
|
||||
|
||||
|
||||
class ResumableUploadCompleteResponse(BaseModel):
|
||||
@@ -474,7 +359,6 @@ class ResumableUploadCompleteResponse(BaseModel):
|
||||
size: int
|
||||
project: str
|
||||
package: str
|
||||
tag: Optional[str]
|
||||
|
||||
|
||||
class ResumableUploadStatusResponse(BaseModel):
|
||||
@@ -527,7 +411,6 @@ class PackageVersionResponse(BaseModel):
|
||||
size: Optional[int] = None
|
||||
content_type: Optional[str] = None
|
||||
original_name: Optional[str] = None
|
||||
tags: List[str] = [] # Tag names pointing to this artifact
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
@@ -569,11 +452,10 @@ class SearchResultPackage(BaseModel):
|
||||
|
||||
|
||||
class SearchResultArtifact(BaseModel):
|
||||
"""Artifact/tag result for global search"""
|
||||
"""Artifact result for global search"""
|
||||
|
||||
tag_id: UUID
|
||||
tag_name: str
|
||||
artifact_id: str
|
||||
version: Optional[str]
|
||||
package_id: UUID
|
||||
package_name: str
|
||||
project_name: str
|
||||
@@ -611,6 +493,8 @@ class HealthResponse(BaseModel):
|
||||
version: str = "1.0.0"
|
||||
storage_healthy: Optional[bool] = None
|
||||
database_healthy: Optional[bool] = None
|
||||
http_pool: Optional[Dict[str, Any]] = None
|
||||
cache: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
# Garbage collection schemas
|
||||
@@ -686,7 +570,7 @@ class ProjectStatsResponse(BaseModel):
|
||||
project_id: str
|
||||
project_name: str
|
||||
package_count: int
|
||||
tag_count: int
|
||||
version_count: int
|
||||
artifact_count: int
|
||||
total_size_bytes: int
|
||||
upload_count: int
|
||||
@@ -701,7 +585,7 @@ class PackageStatsResponse(BaseModel):
|
||||
package_id: str
|
||||
package_name: str
|
||||
project_name: str
|
||||
tag_count: int
|
||||
version_count: int
|
||||
artifact_count: int
|
||||
total_size_bytes: int
|
||||
upload_count: int
|
||||
@@ -718,9 +602,9 @@ class ArtifactStatsResponse(BaseModel):
|
||||
size: int
|
||||
ref_count: int
|
||||
storage_savings: int # (ref_count - 1) * size
|
||||
tags: List[Dict[str, Any]] # Tags referencing this artifact
|
||||
projects: List[str] # Projects using this artifact
|
||||
packages: List[str] # Packages using this artifact
|
||||
versions: List[Dict[str, Any]] = [] # List of {version, package_name, project_name}
|
||||
first_uploaded: Optional[datetime] = None
|
||||
last_referenced: Optional[datetime] = None
|
||||
|
||||
@@ -929,20 +813,7 @@ class DependencyCreate(BaseModel):
|
||||
"""Schema for creating a dependency"""
|
||||
project: str
|
||||
package: str
|
||||
version: Optional[str] = None
|
||||
tag: Optional[str] = None
|
||||
|
||||
@field_validator('version', 'tag')
|
||||
@classmethod
|
||||
def validate_constraint(cls, v, info):
|
||||
return v
|
||||
|
||||
def model_post_init(self, __context):
|
||||
"""Validate that exactly one of version or tag is set"""
|
||||
if self.version is None and self.tag is None:
|
||||
raise ValueError("Either 'version' or 'tag' must be specified")
|
||||
if self.version is not None and self.tag is not None:
|
||||
raise ValueError("Cannot specify both 'version' and 'tag'")
|
||||
version: str
|
||||
|
||||
|
||||
class DependencyResponse(BaseModel):
|
||||
@@ -951,8 +822,7 @@ class DependencyResponse(BaseModel):
|
||||
artifact_id: str
|
||||
project: str
|
||||
package: str
|
||||
version: Optional[str] = None
|
||||
tag: Optional[str] = None
|
||||
version: str
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
@@ -967,7 +837,6 @@ class DependencyResponse(BaseModel):
|
||||
project=dep.dependency_project,
|
||||
package=dep.dependency_package,
|
||||
version=dep.version_constraint,
|
||||
tag=dep.tag_constraint,
|
||||
created_at=dep.created_at,
|
||||
)
|
||||
|
||||
@@ -984,7 +853,6 @@ class DependentInfo(BaseModel):
|
||||
project: str
|
||||
package: str
|
||||
version: Optional[str] = None
|
||||
constraint_type: str # 'version' or 'tag'
|
||||
constraint_value: str
|
||||
|
||||
|
||||
@@ -1000,20 +868,7 @@ class EnsureFileDependency(BaseModel):
|
||||
"""Dependency entry from orchard.ensure file"""
|
||||
project: str
|
||||
package: str
|
||||
version: Optional[str] = None
|
||||
tag: Optional[str] = None
|
||||
|
||||
@field_validator('version', 'tag')
|
||||
@classmethod
|
||||
def validate_constraint(cls, v, info):
|
||||
return v
|
||||
|
||||
def model_post_init(self, __context):
|
||||
"""Validate that exactly one of version or tag is set"""
|
||||
if self.version is None and self.tag is None:
|
||||
raise ValueError("Either 'version' or 'tag' must be specified")
|
||||
if self.version is not None and self.tag is not None:
|
||||
raise ValueError("Cannot specify both 'version' and 'tag'")
|
||||
version: str
|
||||
|
||||
|
||||
class EnsureFileContent(BaseModel):
|
||||
@@ -1027,15 +882,26 @@ class ResolvedArtifact(BaseModel):
|
||||
project: str
|
||||
package: str
|
||||
version: Optional[str] = None
|
||||
tag: Optional[str] = None
|
||||
size: int
|
||||
download_url: str
|
||||
|
||||
|
||||
class MissingDependency(BaseModel):
|
||||
"""A dependency that could not be resolved (not cached on server)"""
|
||||
project: str
|
||||
package: str
|
||||
constraint: Optional[str] = None
|
||||
required_by: Optional[str] = None
|
||||
fetch_attempted: bool = False # True if auto-fetch was attempted
|
||||
fetch_error: Optional[str] = None # Error message if fetch failed
|
||||
|
||||
|
||||
class DependencyResolutionResponse(BaseModel):
|
||||
"""Response from dependency resolution endpoint"""
|
||||
requested: Dict[str, str] # project, package, ref
|
||||
resolved: List[ResolvedArtifact]
|
||||
missing: List[MissingDependency] = []
|
||||
fetched: List[ResolvedArtifact] = [] # Artifacts fetched from upstream during resolution
|
||||
total_size: int
|
||||
artifact_count: int
|
||||
|
||||
@@ -1044,7 +910,7 @@ class DependencyConflict(BaseModel):
|
||||
"""Details about a dependency conflict"""
|
||||
project: str
|
||||
package: str
|
||||
requirements: List[Dict[str, Any]] # version/tag and required_by info
|
||||
requirements: List[Dict[str, Any]] # version and required_by info
|
||||
|
||||
|
||||
class DependencyConflictError(BaseModel):
|
||||
@@ -1378,10 +1244,10 @@ class CacheRequest(BaseModel):
|
||||
url: str
|
||||
source_type: str
|
||||
package_name: Optional[str] = None # Auto-derived from URL if not provided
|
||||
tag: Optional[str] = None # Auto-derived from URL if not provided
|
||||
version: Optional[str] = None # Auto-derived from URL if not provided
|
||||
user_project: Optional[str] = None # Cross-reference to user project
|
||||
user_package: Optional[str] = None
|
||||
user_tag: Optional[str] = None
|
||||
user_version: Optional[str] = None
|
||||
expected_hash: Optional[str] = None # Verify downloaded content
|
||||
|
||||
@field_validator('url')
|
||||
@@ -1428,8 +1294,8 @@ class CacheResponse(BaseModel):
|
||||
source_name: Optional[str]
|
||||
system_project: str
|
||||
system_package: str
|
||||
system_tag: Optional[str]
|
||||
user_reference: Optional[str] = None # e.g., "my-app/npm-deps:lodash-4.17.21"
|
||||
system_version: Optional[str]
|
||||
user_reference: Optional[str] = None # e.g., "my-app/npm-deps/+/4.17.21"
|
||||
|
||||
|
||||
class CacheResolveRequest(BaseModel):
|
||||
@@ -1443,7 +1309,7 @@ class CacheResolveRequest(BaseModel):
|
||||
version: str
|
||||
user_project: Optional[str] = None
|
||||
user_package: Optional[str] = None
|
||||
user_tag: Optional[str] = None
|
||||
user_version: Optional[str] = None
|
||||
|
||||
@field_validator('source_type')
|
||||
@classmethod
|
||||
|
||||
@@ -5,7 +5,7 @@ import hashlib
|
||||
import logging
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from .models import Project, Package, Artifact, Tag, Upload, PackageVersion, ArtifactDependency, Team, TeamMembership, User
|
||||
from .models import Project, Package, Artifact, Upload, PackageVersion, ArtifactDependency, Team, TeamMembership, User
|
||||
from .storage import get_storage
|
||||
from .auth import hash_password
|
||||
|
||||
@@ -125,14 +125,14 @@ TEST_ARTIFACTS = [
|
||||
]
|
||||
|
||||
# Dependencies to create (source artifact -> dependency)
|
||||
# Format: (source_project, source_package, source_version, dep_project, dep_package, version_constraint, tag_constraint)
|
||||
# Format: (source_project, source_package, source_version, dep_project, dep_package, version_constraint)
|
||||
TEST_DEPENDENCIES = [
|
||||
# ui-components v1.1.0 depends on design-tokens v1.0.0
|
||||
("frontend-libs", "ui-components", "1.1.0", "frontend-libs", "design-tokens", "1.0.0", None),
|
||||
("frontend-libs", "ui-components", "1.1.0", "frontend-libs", "design-tokens", "1.0.0"),
|
||||
# auth-lib v1.0.0 depends on common-utils v2.0.0
|
||||
("backend-services", "auth-lib", "1.0.0", "backend-services", "common-utils", "2.0.0", None),
|
||||
# auth-lib v1.0.0 also depends on design-tokens (stable tag)
|
||||
("backend-services", "auth-lib", "1.0.0", "frontend-libs", "design-tokens", None, "latest"),
|
||||
("backend-services", "auth-lib", "1.0.0", "backend-services", "common-utils", "2.0.0"),
|
||||
# auth-lib v1.0.0 also depends on design-tokens v1.0.0
|
||||
("backend-services", "auth-lib", "1.0.0", "frontend-libs", "design-tokens", "1.0.0"),
|
||||
]
|
||||
|
||||
|
||||
@@ -252,9 +252,8 @@ def seed_database(db: Session) -> None:
|
||||
|
||||
logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages (assigned to {demo_team.slug})")
|
||||
|
||||
# Create artifacts, tags, and versions
|
||||
# Create artifacts and versions
|
||||
artifact_count = 0
|
||||
tag_count = 0
|
||||
version_count = 0
|
||||
|
||||
for artifact_data in TEST_ARTIFACTS:
|
||||
@@ -316,23 +315,12 @@ def seed_database(db: Session) -> None:
|
||||
db.add(version)
|
||||
version_count += 1
|
||||
|
||||
# Create tags
|
||||
for tag_name in artifact_data["tags"]:
|
||||
tag = Tag(
|
||||
package_id=package.id,
|
||||
name=tag_name,
|
||||
artifact_id=sha256_hash,
|
||||
created_by=team_owner_username,
|
||||
)
|
||||
db.add(tag)
|
||||
tag_count += 1
|
||||
|
||||
db.flush()
|
||||
|
||||
# Create dependencies
|
||||
dependency_count = 0
|
||||
for dep_data in TEST_DEPENDENCIES:
|
||||
src_project, src_package, src_version, dep_project, dep_package, version_constraint, tag_constraint = dep_data
|
||||
src_project, src_package, src_version, dep_project, dep_package, version_constraint = dep_data
|
||||
|
||||
# Find the source artifact by looking up its version
|
||||
src_pkg = package_map.get((src_project, src_package))
|
||||
@@ -356,11 +344,10 @@ def seed_database(db: Session) -> None:
|
||||
dependency_project=dep_project,
|
||||
dependency_package=dep_package,
|
||||
version_constraint=version_constraint,
|
||||
tag_constraint=tag_constraint,
|
||||
)
|
||||
db.add(dependency)
|
||||
dependency_count += 1
|
||||
|
||||
db.commit()
|
||||
logger.info(f"Created {artifact_count} artifacts, {tag_count} tags, {version_count} versions, and {dependency_count} dependencies")
|
||||
logger.info(f"Created {artifact_count} artifacts, {version_count} versions, and {dependency_count} dependencies")
|
||||
logger.info("Database seeding complete")
|
||||
|
||||
@@ -6,9 +6,8 @@ from typing import List, Optional, Tuple
|
||||
from sqlalchemy.orm import Session
|
||||
import logging
|
||||
|
||||
from ..models import Artifact, Tag
|
||||
from ..models import Artifact, PackageVersion
|
||||
from ..repositories.artifact import ArtifactRepository
|
||||
from ..repositories.tag import TagRepository
|
||||
from ..storage import S3Storage
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -21,8 +20,8 @@ class ArtifactCleanupService:
|
||||
Reference counting rules:
|
||||
- ref_count starts at 1 when artifact is first uploaded
|
||||
- ref_count increments when the same artifact is uploaded again (deduplication)
|
||||
- ref_count decrements when a tag is deleted or updated to point elsewhere
|
||||
- ref_count decrements when a package is deleted (for each tag pointing to artifact)
|
||||
- ref_count decrements when a version is deleted or updated to point elsewhere
|
||||
- ref_count decrements when a package is deleted (for each version pointing to artifact)
|
||||
- When ref_count reaches 0, artifact is a candidate for deletion from S3
|
||||
"""
|
||||
|
||||
@@ -30,12 +29,11 @@ class ArtifactCleanupService:
|
||||
self.db = db
|
||||
self.storage = storage
|
||||
self.artifact_repo = ArtifactRepository(db)
|
||||
self.tag_repo = TagRepository(db)
|
||||
|
||||
def on_tag_deleted(self, artifact_id: str) -> Artifact:
|
||||
def on_version_deleted(self, artifact_id: str) -> Artifact:
|
||||
"""
|
||||
Called when a tag is deleted.
|
||||
Decrements ref_count for the artifact the tag was pointing to.
|
||||
Called when a version is deleted.
|
||||
Decrements ref_count for the artifact the version was pointing to.
|
||||
"""
|
||||
artifact = self.artifact_repo.get_by_sha256(artifact_id)
|
||||
if artifact:
|
||||
@@ -45,11 +43,11 @@ class ArtifactCleanupService:
|
||||
)
|
||||
return artifact
|
||||
|
||||
def on_tag_updated(
|
||||
def on_version_updated(
|
||||
self, old_artifact_id: str, new_artifact_id: str
|
||||
) -> Tuple[Optional[Artifact], Optional[Artifact]]:
|
||||
"""
|
||||
Called when a tag is updated to point to a different artifact.
|
||||
Called when a version is updated to point to a different artifact.
|
||||
Decrements ref_count for old artifact, increments for new (if different).
|
||||
|
||||
Returns (old_artifact, new_artifact) tuple.
|
||||
@@ -79,21 +77,21 @@ class ArtifactCleanupService:
|
||||
def on_package_deleted(self, package_id) -> List[str]:
|
||||
"""
|
||||
Called when a package is deleted.
|
||||
Decrements ref_count for all artifacts that had tags in the package.
|
||||
Decrements ref_count for all artifacts that had versions in the package.
|
||||
|
||||
Returns list of artifact IDs that were affected.
|
||||
"""
|
||||
# Get all tags in the package before deletion
|
||||
tags = self.db.query(Tag).filter(Tag.package_id == package_id).all()
|
||||
# Get all versions in the package before deletion
|
||||
versions = self.db.query(PackageVersion).filter(PackageVersion.package_id == package_id).all()
|
||||
|
||||
affected_artifacts = []
|
||||
for tag in tags:
|
||||
artifact = self.artifact_repo.get_by_sha256(tag.artifact_id)
|
||||
for version in versions:
|
||||
artifact = self.artifact_repo.get_by_sha256(version.artifact_id)
|
||||
if artifact:
|
||||
self.artifact_repo.decrement_ref_count(artifact)
|
||||
affected_artifacts.append(tag.artifact_id)
|
||||
affected_artifacts.append(version.artifact_id)
|
||||
logger.info(
|
||||
f"Decremented ref_count for artifact {tag.artifact_id} (package delete)"
|
||||
f"Decremented ref_count for artifact {version.artifact_id} (package delete)"
|
||||
)
|
||||
|
||||
return affected_artifacts
|
||||
@@ -152,7 +150,7 @@ class ArtifactCleanupService:
|
||||
|
||||
def verify_ref_counts(self, fix: bool = False) -> List[dict]:
|
||||
"""
|
||||
Verify that ref_counts match actual tag references.
|
||||
Verify that ref_counts match actual version references.
|
||||
|
||||
Args:
|
||||
fix: If True, fix any mismatched ref_counts
|
||||
@@ -162,28 +160,28 @@ class ArtifactCleanupService:
|
||||
"""
|
||||
from sqlalchemy import func
|
||||
|
||||
# Get actual tag counts per artifact
|
||||
tag_counts = (
|
||||
self.db.query(Tag.artifact_id, func.count(Tag.id).label("tag_count"))
|
||||
.group_by(Tag.artifact_id)
|
||||
# Get actual version counts per artifact
|
||||
version_counts = (
|
||||
self.db.query(PackageVersion.artifact_id, func.count(PackageVersion.id).label("version_count"))
|
||||
.group_by(PackageVersion.artifact_id)
|
||||
.all()
|
||||
)
|
||||
tag_count_map = {artifact_id: count for artifact_id, count in tag_counts}
|
||||
version_count_map = {artifact_id: count for artifact_id, count in version_counts}
|
||||
|
||||
# Check all artifacts
|
||||
artifacts = self.db.query(Artifact).all()
|
||||
mismatches = []
|
||||
|
||||
for artifact in artifacts:
|
||||
actual_count = tag_count_map.get(artifact.id, 0)
|
||||
actual_count = version_count_map.get(artifact.id, 0)
|
||||
# ref_count should be at least 1 (initial upload) + additional uploads
|
||||
# But tags are the primary reference, so we check against tag count
|
||||
# But versions are the primary reference, so we check against version count
|
||||
|
||||
if artifact.ref_count < actual_count:
|
||||
mismatch = {
|
||||
"artifact_id": artifact.id,
|
||||
"stored_ref_count": artifact.ref_count,
|
||||
"actual_tag_count": actual_count,
|
||||
"actual_version_count": actual_count,
|
||||
}
|
||||
mismatches.append(mismatch)
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ passlib[bcrypt]==1.7.4
|
||||
bcrypt==4.0.1
|
||||
slowapi==0.1.9
|
||||
httpx>=0.25.0
|
||||
redis>=5.0.0
|
||||
|
||||
# Test dependencies
|
||||
pytest>=7.4.0
|
||||
|
||||
1
backend/scripts/__init__.py
Normal file
1
backend/scripts/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Scripts package
|
||||
262
backend/scripts/backfill_pypi_dependencies.py
Normal file
262
backend/scripts/backfill_pypi_dependencies.py
Normal file
@@ -0,0 +1,262 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Backfill script to extract dependencies from cached PyPI packages.
|
||||
|
||||
This script scans all artifacts in the _pypi project and extracts
|
||||
Requires-Dist metadata from wheel and sdist files that don't already
|
||||
have dependencies recorded.
|
||||
|
||||
Usage:
|
||||
# From within the container:
|
||||
python -m scripts.backfill_pypi_dependencies
|
||||
|
||||
# Or with docker exec:
|
||||
docker exec orchard_orchard-server_1 python -m scripts.backfill_pypi_dependencies
|
||||
|
||||
# Dry run (preview only):
|
||||
docker exec orchard_orchard-server_1 python -m scripts.backfill_pypi_dependencies --dry-run
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
import tarfile
|
||||
import zipfile
|
||||
from io import BytesIO
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.insert(0, "/app")
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from backend.app.config import get_settings
|
||||
from backend.app.models import (
|
||||
Artifact,
|
||||
ArtifactDependency,
|
||||
Package,
|
||||
Project,
|
||||
Tag,
|
||||
)
|
||||
from backend.app.storage import get_storage
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(levelname)s - %(message)s",
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def parse_requires_dist(requires_dist: str) -> Tuple[Optional[str], Optional[str]]:
|
||||
"""Parse a Requires-Dist line into (package_name, version_constraint)."""
|
||||
# Remove any environment markers (after semicolon)
|
||||
if ";" in requires_dist:
|
||||
requires_dist = requires_dist.split(";")[0].strip()
|
||||
|
||||
# Match patterns like "package (>=1.0)" or "package>=1.0" or "package"
|
||||
match = re.match(
|
||||
r"^([a-zA-Z0-9][-a-zA-Z0-9._]*)\s*(?:\(([^)]+)\)|([<>=!~][^\s;]+))?",
|
||||
requires_dist.strip(),
|
||||
)
|
||||
|
||||
if not match:
|
||||
return None, None
|
||||
|
||||
package_name = match.group(1)
|
||||
version_constraint = match.group(2) or match.group(3)
|
||||
|
||||
# Normalize package name (PEP 503)
|
||||
normalized_name = re.sub(r"[-_.]+", "-", package_name).lower()
|
||||
|
||||
if version_constraint:
|
||||
version_constraint = version_constraint.strip()
|
||||
|
||||
return normalized_name, version_constraint
|
||||
|
||||
|
||||
def extract_requires_from_metadata(metadata_content: str) -> List[Tuple[str, Optional[str]]]:
|
||||
"""Extract all Requires-Dist entries from METADATA/PKG-INFO content."""
|
||||
dependencies = []
|
||||
|
||||
for line in metadata_content.split("\n"):
|
||||
if line.startswith("Requires-Dist:"):
|
||||
value = line[len("Requires-Dist:"):].strip()
|
||||
pkg_name, version = parse_requires_dist(value)
|
||||
if pkg_name:
|
||||
dependencies.append((pkg_name, version))
|
||||
|
||||
return dependencies
|
||||
|
||||
|
||||
def extract_metadata_from_wheel(content: bytes) -> Optional[str]:
|
||||
"""Extract METADATA file content from a wheel (zip) file."""
|
||||
try:
|
||||
with zipfile.ZipFile(BytesIO(content)) as zf:
|
||||
for name in zf.namelist():
|
||||
if name.endswith(".dist-info/METADATA"):
|
||||
return zf.read(name).decode("utf-8", errors="replace")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to extract metadata from wheel: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def extract_metadata_from_sdist(content: bytes) -> Optional[str]:
|
||||
"""Extract PKG-INFO file content from a source distribution (.tar.gz)."""
|
||||
try:
|
||||
with tarfile.open(fileobj=BytesIO(content), mode="r:gz") as tf:
|
||||
for member in tf.getmembers():
|
||||
if member.name.endswith("/PKG-INFO") and member.name.count("/") == 1:
|
||||
f = tf.extractfile(member)
|
||||
if f:
|
||||
return f.read().decode("utf-8", errors="replace")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to extract metadata from sdist: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def extract_dependencies(content: bytes, filename: str) -> List[Tuple[str, Optional[str]]]:
|
||||
"""Extract dependencies from a PyPI package file."""
|
||||
metadata = None
|
||||
|
||||
if filename.endswith(".whl"):
|
||||
metadata = extract_metadata_from_wheel(content)
|
||||
elif filename.endswith(".tar.gz"):
|
||||
metadata = extract_metadata_from_sdist(content)
|
||||
|
||||
if metadata:
|
||||
return extract_requires_from_metadata(metadata)
|
||||
|
||||
return []
|
||||
|
||||
|
||||
def backfill_dependencies(dry_run: bool = False):
|
||||
"""Main backfill function."""
|
||||
settings = get_settings()
|
||||
|
||||
# Create database connection
|
||||
engine = create_engine(settings.database_url)
|
||||
Session = sessionmaker(bind=engine)
|
||||
db = Session()
|
||||
|
||||
# Create storage client
|
||||
storage = get_storage()
|
||||
|
||||
try:
|
||||
# Find the _pypi project
|
||||
pypi_project = db.query(Project).filter(Project.name == "_pypi").first()
|
||||
if not pypi_project:
|
||||
logger.info("No _pypi project found. Nothing to backfill.")
|
||||
return
|
||||
|
||||
# Get all packages in _pypi
|
||||
packages = db.query(Package).filter(Package.project_id == pypi_project.id).all()
|
||||
logger.info(f"Found {len(packages)} packages in _pypi project")
|
||||
|
||||
total_artifacts = 0
|
||||
artifacts_with_deps = 0
|
||||
artifacts_processed = 0
|
||||
dependencies_added = 0
|
||||
|
||||
for package in packages:
|
||||
# Get all tags (each tag points to an artifact)
|
||||
tags = db.query(Tag).filter(Tag.package_id == package.id).all()
|
||||
|
||||
for tag in tags:
|
||||
total_artifacts += 1
|
||||
filename = tag.name
|
||||
|
||||
# Skip non-package files (like .metadata files)
|
||||
if not (filename.endswith(".whl") or filename.endswith(".tar.gz")):
|
||||
continue
|
||||
|
||||
# Check if this artifact already has dependencies
|
||||
existing_deps = db.query(ArtifactDependency).filter(
|
||||
ArtifactDependency.artifact_id == tag.artifact_id
|
||||
).count()
|
||||
|
||||
if existing_deps > 0:
|
||||
artifacts_with_deps += 1
|
||||
continue
|
||||
|
||||
# Get the artifact
|
||||
artifact = db.query(Artifact).filter(Artifact.id == tag.artifact_id).first()
|
||||
if not artifact:
|
||||
logger.warning(f"Artifact {tag.artifact_id} not found for tag {filename}")
|
||||
continue
|
||||
|
||||
logger.info(f"Processing {package.name}/{filename}...")
|
||||
|
||||
if dry_run:
|
||||
logger.info(f" [DRY RUN] Would extract dependencies from {filename}")
|
||||
artifacts_processed += 1
|
||||
continue
|
||||
|
||||
# Download the artifact from S3
|
||||
try:
|
||||
content = storage.get(artifact.s3_key)
|
||||
except Exception as e:
|
||||
logger.error(f" Failed to download {filename}: {e}")
|
||||
continue
|
||||
|
||||
# Extract dependencies
|
||||
deps = extract_dependencies(content, filename)
|
||||
|
||||
if deps:
|
||||
logger.info(f" Found {len(deps)} dependencies")
|
||||
for dep_name, dep_version in deps:
|
||||
# Check if already exists (race condition protection)
|
||||
existing = db.query(ArtifactDependency).filter(
|
||||
ArtifactDependency.artifact_id == tag.artifact_id,
|
||||
ArtifactDependency.dependency_project == "_pypi",
|
||||
ArtifactDependency.dependency_package == dep_name,
|
||||
).first()
|
||||
|
||||
if not existing:
|
||||
dep = ArtifactDependency(
|
||||
artifact_id=tag.artifact_id,
|
||||
dependency_project="_pypi",
|
||||
dependency_package=dep_name,
|
||||
version_constraint=dep_version if dep_version else "*",
|
||||
)
|
||||
db.add(dep)
|
||||
dependencies_added += 1
|
||||
logger.info(f" + {dep_name} {dep_version or '*'}")
|
||||
|
||||
db.commit()
|
||||
else:
|
||||
logger.info(f" No dependencies found")
|
||||
|
||||
artifacts_processed += 1
|
||||
|
||||
logger.info("")
|
||||
logger.info("=" * 50)
|
||||
logger.info("Backfill complete!")
|
||||
logger.info(f" Total artifacts: {total_artifacts}")
|
||||
logger.info(f" Already had deps: {artifacts_with_deps}")
|
||||
logger.info(f" Processed: {artifacts_processed}")
|
||||
logger.info(f" Dependencies added: {dependencies_added}")
|
||||
if dry_run:
|
||||
logger.info(" (DRY RUN - no changes made)")
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Backfill dependencies for cached PyPI packages"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Preview what would be done without making changes",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
backfill_dependencies(dry_run=args.dry_run)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -96,7 +96,6 @@ def upload_test_file(
|
||||
package: str,
|
||||
content: bytes,
|
||||
filename: str = "test.bin",
|
||||
tag: Optional[str] = None,
|
||||
version: Optional[str] = None,
|
||||
) -> dict:
|
||||
"""
|
||||
@@ -108,7 +107,6 @@ def upload_test_file(
|
||||
package: Package name
|
||||
content: File content as bytes
|
||||
filename: Original filename
|
||||
tag: Optional tag to assign
|
||||
version: Optional version to assign
|
||||
|
||||
Returns:
|
||||
@@ -116,8 +114,6 @@ def upload_test_file(
|
||||
"""
|
||||
files = {"file": (filename, io.BytesIO(content), "application/octet-stream")}
|
||||
data = {}
|
||||
if tag:
|
||||
data["tag"] = tag
|
||||
if version:
|
||||
data["version"] = version
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ class TestArtifactRetrieval:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, tag="v1"
|
||||
integration_client, project_name, package_name, content, version="v1"
|
||||
)
|
||||
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
@@ -46,27 +46,27 @@ class TestArtifactRetrieval:
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_artifact_includes_tags(self, integration_client, test_package):
|
||||
"""Test artifact response includes tags pointing to it."""
|
||||
def test_artifact_includes_versions(self, integration_client, test_package):
|
||||
"""Test artifact response includes versions pointing to it."""
|
||||
project_name, package_name = test_package
|
||||
content = b"artifact with tags test"
|
||||
content = b"artifact with versions test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, tag="tagged-v1"
|
||||
integration_client, project_name, package_name, content, version="1.0.0"
|
||||
)
|
||||
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert "tags" in data
|
||||
assert len(data["tags"]) >= 1
|
||||
assert "versions" in data
|
||||
assert len(data["versions"]) >= 1
|
||||
|
||||
tag = data["tags"][0]
|
||||
assert "name" in tag
|
||||
assert "package_name" in tag
|
||||
assert "project_name" in tag
|
||||
version = data["versions"][0]
|
||||
assert "version" in version
|
||||
assert "package_name" in version
|
||||
assert "project_name" in version
|
||||
|
||||
|
||||
class TestArtifactStats:
|
||||
@@ -82,7 +82,7 @@ class TestArtifactStats:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag=f"art-{unique_test_id}"
|
||||
integration_client, project, package, content, version=f"art-{unique_test_id}"
|
||||
)
|
||||
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
|
||||
@@ -94,7 +94,7 @@ class TestArtifactStats:
|
||||
assert "size" in data
|
||||
assert "ref_count" in data
|
||||
assert "storage_savings" in data
|
||||
assert "tags" in data
|
||||
assert "versions" in data
|
||||
assert "projects" in data
|
||||
assert "packages" in data
|
||||
|
||||
@@ -136,8 +136,8 @@ class TestArtifactStats:
|
||||
)
|
||||
|
||||
# Upload same content to both projects
|
||||
upload_test_file(integration_client, proj1, "pkg", content, tag="v1")
|
||||
upload_test_file(integration_client, proj2, "pkg", content, tag="v1")
|
||||
upload_test_file(integration_client, proj1, "pkg", content, version="v1")
|
||||
upload_test_file(integration_client, proj2, "pkg", content, version="v1")
|
||||
|
||||
# Check artifact stats
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
|
||||
@@ -203,7 +203,7 @@ class TestArtifactProvenance:
|
||||
assert "first_uploaded_by" in data
|
||||
assert "upload_count" in data
|
||||
assert "packages" in data
|
||||
assert "tags" in data
|
||||
assert "versions" in data
|
||||
assert "uploads" in data
|
||||
|
||||
@pytest.mark.integration
|
||||
@@ -214,17 +214,17 @@ class TestArtifactProvenance:
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_artifact_history_with_tag(self, integration_client, test_package):
|
||||
"""Test artifact history includes tag information when tagged."""
|
||||
def test_artifact_history_with_version(self, integration_client, test_package):
|
||||
"""Test artifact history includes version information when versioned."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
upload_result = upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"tagged provenance test",
|
||||
"tagged.txt",
|
||||
tag="v1.0.0",
|
||||
b"versioned provenance test",
|
||||
"versioned.txt",
|
||||
version="v1.0.0",
|
||||
)
|
||||
artifact_id = upload_result["artifact_id"]
|
||||
|
||||
@@ -232,12 +232,12 @@ class TestArtifactProvenance:
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert len(data["tags"]) >= 1
|
||||
assert len(data["versions"]) >= 1
|
||||
|
||||
tag = data["tags"][0]
|
||||
assert "project_name" in tag
|
||||
assert "package_name" in tag
|
||||
assert "tag_name" in tag
|
||||
version = data["versions"][0]
|
||||
assert "project_name" in version
|
||||
assert "package_name" in version
|
||||
assert "version" in version
|
||||
|
||||
|
||||
class TestArtifactUploads:
|
||||
@@ -306,24 +306,24 @@ class TestOrphanedArtifacts:
|
||||
assert len(response.json()) <= 5
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_artifact_becomes_orphaned_when_tag_deleted(
|
||||
def test_artifact_becomes_orphaned_when_version_deleted(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test artifact appears in orphaned list after tag is deleted."""
|
||||
"""Test artifact appears in orphaned list after version is deleted."""
|
||||
project, package = test_package
|
||||
content = f"orphan test {unique_test_id}".encode()
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload with tag
|
||||
upload_test_file(integration_client, project, package, content, tag="temp-tag")
|
||||
# Upload with version
|
||||
upload_test_file(integration_client, project, package, content, version="1.0.0-temp")
|
||||
|
||||
# Verify not in orphaned list
|
||||
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
|
||||
orphaned_ids = [a["id"] for a in response.json()]
|
||||
assert expected_hash not in orphaned_ids
|
||||
|
||||
# Delete the tag
|
||||
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/temp-tag")
|
||||
# Delete the version
|
||||
integration_client.delete(f"/api/v1/project/{project}/{package}/versions/1.0.0-temp")
|
||||
|
||||
# Verify now in orphaned list
|
||||
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
|
||||
@@ -356,9 +356,9 @@ class TestGarbageCollection:
|
||||
content = f"dry run test {unique_test_id}".encode()
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload and delete tag to create orphan
|
||||
upload_test_file(integration_client, project, package, content, tag="dry-run")
|
||||
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/dry-run")
|
||||
# Upload and delete version to create orphan
|
||||
upload_test_file(integration_client, project, package, content, version="1.0.0-dryrun")
|
||||
integration_client.delete(f"/api/v1/project/{project}/{package}/versions/1.0.0-dryrun")
|
||||
|
||||
# Verify artifact exists
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
@@ -385,7 +385,7 @@ class TestGarbageCollection:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload with tag (ref_count=1)
|
||||
upload_test_file(integration_client, project, package, content, tag="keep-this")
|
||||
upload_test_file(integration_client, project, package, content, version="keep-this")
|
||||
|
||||
# Verify artifact exists with ref_count=1
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
@@ -534,50 +534,6 @@ class TestGlobalArtifacts:
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
class TestGlobalTags:
|
||||
"""Tests for global tags endpoint."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_global_tags_returns_200(self, integration_client):
|
||||
"""Test global tags endpoint returns 200."""
|
||||
response = integration_client.get("/api/v1/tags")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert "items" in data
|
||||
assert "pagination" in data
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_global_tags_pagination(self, integration_client):
|
||||
"""Test global tags endpoint respects pagination."""
|
||||
response = integration_client.get("/api/v1/tags?limit=5&page=1")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert len(data["items"]) <= 5
|
||||
assert data["pagination"]["limit"] == 5
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_global_tags_has_project_context(self, integration_client):
|
||||
"""Test global tags response includes project/package context."""
|
||||
response = integration_client.get("/api/v1/tags?limit=1")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
if len(data["items"]) > 0:
|
||||
item = data["items"][0]
|
||||
assert "project_name" in item
|
||||
assert "package_name" in item
|
||||
assert "artifact_id" in item
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_global_tags_search_with_wildcard(self, integration_client):
|
||||
"""Test global tags search supports wildcards."""
|
||||
response = integration_client.get("/api/v1/tags?search=v*")
|
||||
assert response.status_code == 200
|
||||
# Just verify it doesn't error; results may vary
|
||||
|
||||
|
||||
class TestAuditLogs:
|
||||
"""Tests for global audit logs endpoint."""
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ class TestConcurrentUploads:
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"concurrent-{idx}"},
|
||||
data={"version": f"concurrent-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
@@ -117,7 +117,7 @@ class TestConcurrentUploads:
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"concurrent5-{idx}"},
|
||||
data={"version": f"concurrent5-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
@@ -171,7 +171,7 @@ class TestConcurrentUploads:
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"concurrent10-{idx}"},
|
||||
data={"version": f"concurrent10-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
@@ -195,19 +195,38 @@ class TestConcurrentUploads:
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_concurrent_uploads_same_file_deduplication(self, integration_client, test_package):
|
||||
"""Test concurrent uploads of same file handle deduplication correctly."""
|
||||
project, package = test_package
|
||||
def test_concurrent_uploads_same_file_deduplication(
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test concurrent uploads of same file handle deduplication correctly.
|
||||
|
||||
Same content uploaded to different packages should result in:
|
||||
- Same artifact_id (content-addressable)
|
||||
- ref_count = number of packages (one version per package)
|
||||
"""
|
||||
project = test_project
|
||||
api_key = get_api_key(integration_client)
|
||||
assert api_key, "Failed to create API key"
|
||||
|
||||
content, expected_hash = generate_content_with_hash(4096, seed=999)
|
||||
num_concurrent = 5
|
||||
package_names = []
|
||||
|
||||
# Create multiple packages for concurrent uploads
|
||||
for i in range(num_concurrent):
|
||||
pkg_name = f"dedup-pkg-{unique_test_id}-{i}"
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/packages",
|
||||
json={"name": pkg_name, "description": f"Dedup test package {i}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
package_names.append(pkg_name)
|
||||
|
||||
content, expected_hash = generate_content_with_hash(4096, seed=999)
|
||||
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def upload_worker(idx):
|
||||
def upload_worker(idx, package):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
@@ -219,7 +238,7 @@ class TestConcurrentUploads:
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"dedup-{idx}"},
|
||||
data={"version": "1.0.0"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
@@ -230,7 +249,10 @@ class TestConcurrentUploads:
|
||||
errors.append(f"Worker {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=num_concurrent) as executor:
|
||||
futures = [executor.submit(upload_worker, i) for i in range(num_concurrent)]
|
||||
futures = [
|
||||
executor.submit(upload_worker, i, package_names[i])
|
||||
for i in range(num_concurrent)
|
||||
]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
@@ -242,7 +264,7 @@ class TestConcurrentUploads:
|
||||
assert len(artifact_ids) == 1
|
||||
assert expected_hash in artifact_ids
|
||||
|
||||
# Verify final ref_count equals number of uploads
|
||||
# Verify final ref_count equals number of packages
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["ref_count"] == num_concurrent
|
||||
@@ -287,7 +309,7 @@ class TestConcurrentUploads:
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": "latest"},
|
||||
data={"version": "latest"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
@@ -321,7 +343,7 @@ class TestConcurrentDownloads:
|
||||
content, expected_hash = generate_content_with_hash(2048, seed=400)
|
||||
|
||||
# Upload first
|
||||
upload_test_file(integration_client, project, package, content, tag="download-test")
|
||||
upload_test_file(integration_client, project, package, content, version="download-test")
|
||||
|
||||
results = []
|
||||
errors = []
|
||||
@@ -362,7 +384,7 @@ class TestConcurrentDownloads:
|
||||
project, package = test_package
|
||||
content, expected_hash = generate_content_with_hash(4096, seed=500)
|
||||
|
||||
upload_test_file(integration_client, project, package, content, tag="download5-test")
|
||||
upload_test_file(integration_client, project, package, content, version="download5-test")
|
||||
|
||||
num_downloads = 5
|
||||
results = []
|
||||
@@ -403,7 +425,7 @@ class TestConcurrentDownloads:
|
||||
project, package = test_package
|
||||
content, expected_hash = generate_content_with_hash(8192, seed=600)
|
||||
|
||||
upload_test_file(integration_client, project, package, content, tag="download10-test")
|
||||
upload_test_file(integration_client, project, package, content, version="download10-test")
|
||||
|
||||
num_downloads = 10
|
||||
results = []
|
||||
@@ -450,7 +472,7 @@ class TestConcurrentDownloads:
|
||||
content, expected_hash = generate_content_with_hash(1024, seed=700 + i)
|
||||
upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
tag=f"multi-download-{i}"
|
||||
version=f"multi-download-{i}"
|
||||
)
|
||||
uploads.append((f"multi-download-{i}", content))
|
||||
|
||||
@@ -502,7 +524,7 @@ class TestMixedConcurrentOperations:
|
||||
|
||||
# Upload initial content
|
||||
content1, hash1 = generate_content_with_hash(10240, seed=800) # 10KB
|
||||
upload_test_file(integration_client, project, package, content1, tag="initial")
|
||||
upload_test_file(integration_client, project, package, content1, version="initial")
|
||||
|
||||
# New content for upload during download
|
||||
content2, hash2 = generate_content_with_hash(10240, seed=801)
|
||||
@@ -539,7 +561,7 @@ class TestMixedConcurrentOperations:
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": "during-download"},
|
||||
data={"version": "during-download"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
@@ -579,7 +601,7 @@ class TestMixedConcurrentOperations:
|
||||
existing_files = []
|
||||
for i in range(3):
|
||||
content, hash = generate_content_with_hash(2048, seed=900 + i)
|
||||
upload_test_file(integration_client, project, package, content, tag=f"existing-{i}")
|
||||
upload_test_file(integration_client, project, package, content, version=f"existing-{i}")
|
||||
existing_files.append((f"existing-{i}", content))
|
||||
|
||||
# New files for uploading
|
||||
@@ -619,7 +641,7 @@ class TestMixedConcurrentOperations:
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"new-{idx}"},
|
||||
data={"version": f"new-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
@@ -689,7 +711,7 @@ class TestMixedConcurrentOperations:
|
||||
upload_resp = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"pattern-{idx}"},
|
||||
data={"version": f"pattern-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if upload_resp.status_code != 200:
|
||||
|
||||
@@ -68,7 +68,7 @@ class TestUploadErrorHandling:
|
||||
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
data={"tag": "no-file-provided"},
|
||||
data={"version": "no-file-provided"},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
|
||||
@@ -200,7 +200,7 @@ class TestTimeoutBehavior:
|
||||
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="timeout-test"
|
||||
integration_client, project, package, content, version="timeout-test"
|
||||
)
|
||||
elapsed = time.time() - start_time
|
||||
|
||||
@@ -219,7 +219,7 @@ class TestTimeoutBehavior:
|
||||
|
||||
# First upload
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="download-timeout-test"
|
||||
integration_client, project, package, content, version="download-timeout-test"
|
||||
)
|
||||
|
||||
# Then download and time it
|
||||
|
||||
@@ -41,7 +41,7 @@ class TestRoundTripVerification:
|
||||
|
||||
# Upload and capture returned hash
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="roundtrip"
|
||||
integration_client, project, package, content, version="roundtrip"
|
||||
)
|
||||
uploaded_hash = result["artifact_id"]
|
||||
|
||||
@@ -84,7 +84,7 @@ class TestRoundTripVerification:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="header-check"
|
||||
integration_client, project, package, content, version="header-check"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -102,7 +102,7 @@ class TestRoundTripVerification:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="etag-check"
|
||||
integration_client, project, package, content, version="etag-check"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -186,7 +186,7 @@ class TestClientSideVerificationWorkflow:
|
||||
content = b"Client post-download verification"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="verify-after"
|
||||
integration_client, project, package, content, version="verify-after"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -215,7 +215,7 @@ class TestIntegritySizeVariants:
|
||||
content, expected_hash = sized_content(SIZE_1KB, seed=100)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="int-1kb"
|
||||
integration_client, project, package, content, version="int-1kb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -234,7 +234,7 @@ class TestIntegritySizeVariants:
|
||||
content, expected_hash = sized_content(SIZE_100KB, seed=101)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="int-100kb"
|
||||
integration_client, project, package, content, version="int-100kb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -253,7 +253,7 @@ class TestIntegritySizeVariants:
|
||||
content, expected_hash = sized_content(SIZE_1MB, seed=102)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="int-1mb"
|
||||
integration_client, project, package, content, version="int-1mb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -273,7 +273,7 @@ class TestIntegritySizeVariants:
|
||||
content, expected_hash = sized_content(SIZE_10MB, seed=103)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="int-10mb"
|
||||
integration_client, project, package, content, version="int-10mb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -323,7 +323,13 @@ class TestConsistencyCheck:
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_consistency_check_after_upload(self, integration_client, test_package):
|
||||
"""Test consistency check passes after valid upload."""
|
||||
"""Test consistency check runs successfully after a valid upload.
|
||||
|
||||
Note: We don't assert healthy=True because other tests (especially
|
||||
corruption detection tests) may leave orphaned S3 objects behind.
|
||||
This test validates the consistency check endpoint works and the
|
||||
uploaded artifact is included in the check count.
|
||||
"""
|
||||
project, package = test_package
|
||||
content = b"Consistency check test content"
|
||||
|
||||
@@ -335,9 +341,10 @@ class TestConsistencyCheck:
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Verify check ran and no issues
|
||||
# Verify check ran - at least 1 artifact was checked
|
||||
assert data["total_artifacts_checked"] >= 1
|
||||
assert data["healthy"] is True
|
||||
# Verify no missing S3 objects (uploaded artifact should exist)
|
||||
assert data["missing_s3_objects"] == 0
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_consistency_check_limit_parameter(self, integration_client):
|
||||
@@ -366,7 +373,7 @@ class TestDigestHeader:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="digest-test"
|
||||
integration_client, project, package, content, version="digest-test"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -390,7 +397,7 @@ class TestDigestHeader:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="digest-b64"
|
||||
integration_client, project, package, content, version="digest-b64"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -420,7 +427,7 @@ class TestVerificationModes:
|
||||
content = b"Pre-verification mode test"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="pre-verify"
|
||||
integration_client, project, package, content, version="pre-verify"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -440,7 +447,7 @@ class TestVerificationModes:
|
||||
content = b"Stream verification mode test"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="stream-verify"
|
||||
integration_client, project, package, content, version="stream-verify"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -477,7 +484,7 @@ class TestArtifactIntegrityEndpoint:
|
||||
expected_size = len(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="content-len"
|
||||
integration_client, project, package, content, version="content-len"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -513,7 +520,7 @@ class TestCorruptionDetection:
|
||||
|
||||
# Upload original content
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="corrupt-test"
|
||||
integration_client, project, package, content, version="corrupt-test"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -555,7 +562,7 @@ class TestCorruptionDetection:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="bitflip-test"
|
||||
integration_client, project, package, content, version="bitflip-test"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -592,7 +599,7 @@ class TestCorruptionDetection:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="truncate-test"
|
||||
integration_client, project, package, content, version="truncate-test"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -627,7 +634,7 @@ class TestCorruptionDetection:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="append-test"
|
||||
integration_client, project, package, content, version="append-test"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -670,7 +677,7 @@ class TestCorruptionDetection:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="client-detect"
|
||||
integration_client, project, package, content, version="client-detect"
|
||||
)
|
||||
|
||||
# Corrupt the S3 object
|
||||
@@ -713,7 +720,7 @@ class TestCorruptionDetection:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="size-mismatch"
|
||||
integration_client, project, package, content, version="size-mismatch"
|
||||
)
|
||||
|
||||
# Modify S3 object to have different size
|
||||
@@ -747,7 +754,7 @@ class TestCorruptionDetection:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="missing-s3"
|
||||
integration_client, project, package, content, version="missing-s3"
|
||||
)
|
||||
|
||||
# Delete the S3 object
|
||||
|
||||
@@ -41,7 +41,7 @@ class TestUploadMetrics:
|
||||
content = b"duration test content"
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="duration-test"
|
||||
integration_client, project, package, content, version="duration-test"
|
||||
)
|
||||
|
||||
assert "duration_ms" in result
|
||||
@@ -55,7 +55,7 @@ class TestUploadMetrics:
|
||||
content = b"throughput test content"
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="throughput-test"
|
||||
integration_client, project, package, content, version="throughput-test"
|
||||
)
|
||||
|
||||
assert "throughput_mbps" in result
|
||||
@@ -72,7 +72,7 @@ class TestUploadMetrics:
|
||||
|
||||
start = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="duration-check"
|
||||
integration_client, project, package, content, version="duration-check"
|
||||
)
|
||||
actual_duration = (time.time() - start) * 1000 # ms
|
||||
|
||||
@@ -92,7 +92,7 @@ class TestLargeFileUploads:
|
||||
content, expected_hash = sized_content(SIZE_10MB, seed=200)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="large-10mb"
|
||||
integration_client, project, package, content, version="large-10mb"
|
||||
)
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
@@ -109,7 +109,7 @@ class TestLargeFileUploads:
|
||||
content, expected_hash = sized_content(SIZE_100MB, seed=300)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="large-100mb"
|
||||
integration_client, project, package, content, version="large-100mb"
|
||||
)
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
@@ -126,7 +126,7 @@ class TestLargeFileUploads:
|
||||
content, expected_hash = sized_content(SIZE_1GB, seed=400)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="large-1gb"
|
||||
integration_client, project, package, content, version="large-1gb"
|
||||
)
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
@@ -147,14 +147,14 @@ class TestLargeFileUploads:
|
||||
|
||||
# First upload
|
||||
result1 = upload_test_file(
|
||||
integration_client, project, package, content, tag=f"dedup-{unique_test_id}-1"
|
||||
integration_client, project, package, content, version=f"dedup-{unique_test_id}-1"
|
||||
)
|
||||
# Note: may be True if previous test uploaded same content
|
||||
first_dedupe = result1["deduplicated"]
|
||||
|
||||
# Second upload of same content
|
||||
result2 = upload_test_file(
|
||||
integration_client, project, package, content, tag=f"dedup-{unique_test_id}-2"
|
||||
integration_client, project, package, content, version=f"dedup-{unique_test_id}-2"
|
||||
)
|
||||
assert result2["artifact_id"] == expected_hash
|
||||
# Second upload MUST be deduplicated
|
||||
@@ -277,7 +277,7 @@ class TestUploadSizeLimits:
|
||||
content = b"X"
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="min-size"
|
||||
integration_client, project, package, content, version="min-size"
|
||||
)
|
||||
|
||||
assert result["size"] == 1
|
||||
@@ -289,7 +289,7 @@ class TestUploadSizeLimits:
|
||||
content = b"content length verification test"
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="content-length-test"
|
||||
integration_client, project, package, content, version="content-length-test"
|
||||
)
|
||||
|
||||
# Size in response should match actual content length
|
||||
@@ -336,7 +336,7 @@ class TestUploadErrorHandling:
|
||||
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
data={"tag": "no-file"},
|
||||
data={"version": "no-file"},
|
||||
)
|
||||
|
||||
assert response.status_code == 422
|
||||
@@ -459,7 +459,7 @@ class TestUploadTimeout:
|
||||
|
||||
# httpx client should handle this quickly
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="timeout-small"
|
||||
integration_client, project, package, content, version="timeout-small"
|
||||
)
|
||||
|
||||
assert result["artifact_id"] is not None
|
||||
@@ -474,7 +474,7 @@ class TestUploadTimeout:
|
||||
|
||||
start = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="timeout-check"
|
||||
integration_client, project, package, content, version="timeout-check"
|
||||
)
|
||||
duration = time.time() - start
|
||||
|
||||
@@ -525,7 +525,7 @@ class TestConcurrentUploads:
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"concurrent-diff-{idx}"},
|
||||
data={"version": f"concurrent-diff-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
|
||||
@@ -175,7 +175,7 @@ class TestPackageStats:
|
||||
assert "package_id" in data
|
||||
assert "package_name" in data
|
||||
assert "project_name" in data
|
||||
assert "tag_count" in data
|
||||
assert "version_count" in data
|
||||
assert "artifact_count" in data
|
||||
assert "total_size_bytes" in data
|
||||
assert "upload_count" in data
|
||||
@@ -234,7 +234,11 @@ class TestPackageCascadeDelete:
|
||||
def test_ref_count_decrements_on_package_delete(
|
||||
self, integration_client, unique_test_id
|
||||
):
|
||||
"""Test ref_count decrements for all tags when package is deleted."""
|
||||
"""Test ref_count decrements when package is deleted.
|
||||
|
||||
Each package can only have one version per artifact (same content = same version).
|
||||
This test verifies that deleting a package decrements the artifact's ref_count.
|
||||
"""
|
||||
project_name = f"cascade-pkg-{unique_test_id}"
|
||||
package_name = f"test-pkg-{unique_test_id}"
|
||||
|
||||
@@ -256,23 +260,17 @@ class TestPackageCascadeDelete:
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Upload content with multiple tags
|
||||
# Upload content with version
|
||||
content = f"cascade delete test {unique_test_id}".encode()
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, tag="v1"
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, tag="v2"
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, tag="v3"
|
||||
integration_client, project_name, package_name, content, version="1.0.0"
|
||||
)
|
||||
|
||||
# Verify ref_count is 3
|
||||
# Verify ref_count is 1
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 3
|
||||
assert response.json()["ref_count"] == 1
|
||||
|
||||
# Delete the package
|
||||
delete_response = integration_client.delete(
|
||||
|
||||
@@ -128,7 +128,9 @@ class TestProjectListingFilters:
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
names = [p["name"] for p in data["items"]]
|
||||
# Filter out system projects (names starting with "_") as they may have
|
||||
# collation-specific sort behavior and aren't part of the test data
|
||||
names = [p["name"] for p in data["items"] if not p["name"].startswith("_")]
|
||||
assert names == sorted(names)
|
||||
|
||||
|
||||
@@ -147,7 +149,7 @@ class TestProjectStats:
|
||||
assert "project_id" in data
|
||||
assert "project_name" in data
|
||||
assert "package_count" in data
|
||||
assert "tag_count" in data
|
||||
assert "version_count" in data
|
||||
assert "artifact_count" in data
|
||||
assert "total_size_bytes" in data
|
||||
assert "upload_count" in data
|
||||
@@ -227,7 +229,11 @@ class TestProjectCascadeDelete:
|
||||
def test_ref_count_decrements_on_project_delete(
|
||||
self, integration_client, unique_test_id
|
||||
):
|
||||
"""Test ref_count decrements for all tags when project is deleted."""
|
||||
"""Test ref_count decrements for all versions when project is deleted.
|
||||
|
||||
Each package can only have one version per artifact (same content = same version).
|
||||
With 2 packages, ref_count should be 2, and go to 0 when project is deleted.
|
||||
"""
|
||||
project_name = f"cascade-proj-{unique_test_id}"
|
||||
package1_name = f"pkg1-{unique_test_id}"
|
||||
package2_name = f"pkg2-{unique_test_id}"
|
||||
@@ -251,26 +257,20 @@ class TestProjectCascadeDelete:
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Upload same content with tags in both packages
|
||||
# Upload same content to both packages
|
||||
content = f"project cascade test {unique_test_id}".encode()
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project_name, package1_name, content, tag="v1"
|
||||
integration_client, project_name, package1_name, content, version="1.0.0"
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package1_name, content, tag="v2"
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package2_name, content, tag="latest"
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package2_name, content, tag="stable"
|
||||
integration_client, project_name, package2_name, content, version="1.0.0"
|
||||
)
|
||||
|
||||
# Verify ref_count is 4 (2 tags in each of 2 packages)
|
||||
# Verify ref_count is 2 (1 version in each of 2 packages)
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 4
|
||||
assert response.json()["ref_count"] == 2
|
||||
|
||||
# Delete the project
|
||||
delete_response = integration_client.delete(f"/api/v1/projects/{project_name}")
|
||||
|
||||
@@ -17,21 +17,31 @@ class TestPyPIProxyEndpoints:
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_pypi_simple_index_no_sources(self):
|
||||
"""Test that /pypi/simple/ returns 503 when no sources configured."""
|
||||
def test_pypi_simple_index(self):
|
||||
"""Test that /pypi/simple/ returns HTML response."""
|
||||
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
|
||||
response = client.get("/pypi/simple/")
|
||||
# Should return 503 when no PyPI upstream sources are configured
|
||||
assert response.status_code == 503
|
||||
assert "No PyPI upstream sources configured" in response.json()["detail"]
|
||||
# Returns 200 if sources configured, 503 if not
|
||||
assert response.status_code in (200, 503)
|
||||
if response.status_code == 200:
|
||||
assert "text/html" in response.headers.get("content-type", "")
|
||||
else:
|
||||
assert "No PyPI upstream sources configured" in response.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_pypi_package_no_sources(self):
|
||||
"""Test that /pypi/simple/{package}/ returns 503 when no sources configured."""
|
||||
def test_pypi_package_endpoint(self):
|
||||
"""Test that /pypi/simple/{package}/ returns appropriate response."""
|
||||
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
|
||||
response = client.get("/pypi/simple/requests/")
|
||||
assert response.status_code == 503
|
||||
assert "No PyPI upstream sources configured" in response.json()["detail"]
|
||||
# Returns 200 if sources configured and package found,
|
||||
# 404 if package not found, 503 if no sources
|
||||
assert response.status_code in (200, 404, 503)
|
||||
if response.status_code == 200:
|
||||
assert "text/html" in response.headers.get("content-type", "")
|
||||
elif response.status_code == 404:
|
||||
assert "not found" in response.json()["detail"].lower()
|
||||
else: # 503
|
||||
assert "No PyPI upstream sources configured" in response.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_pypi_download_missing_upstream_param(self):
|
||||
@@ -58,7 +68,13 @@ class TestPyPILinkRewriting:
|
||||
</html>
|
||||
'''
|
||||
|
||||
result = _rewrite_package_links(html, "http://localhost:8080", "requests")
|
||||
# upstream_base_url is used to resolve relative URLs (not needed here since URLs are absolute)
|
||||
result = _rewrite_package_links(
|
||||
html,
|
||||
"http://localhost:8080",
|
||||
"requests",
|
||||
"https://pypi.org/simple/requests/"
|
||||
)
|
||||
|
||||
# Links should be rewritten to go through our proxy
|
||||
assert "/pypi/simple/requests/requests-2.31.0.tar.gz?upstream=" in result
|
||||
@@ -69,25 +85,69 @@ class TestPyPILinkRewriting:
|
||||
assert "#sha256=abc123" in result
|
||||
assert "#sha256=def456" in result
|
||||
|
||||
def test_rewrite_relative_links(self):
|
||||
"""Test that relative URLs are resolved to absolute URLs."""
|
||||
from app.pypi_proxy import _rewrite_package_links
|
||||
|
||||
# Artifactory-style relative URLs
|
||||
html = '''
|
||||
<html>
|
||||
<body>
|
||||
<a href="../../packages/ab/cd/requests-2.31.0.tar.gz#sha256=abc123">requests-2.31.0.tar.gz</a>
|
||||
</body>
|
||||
</html>
|
||||
'''
|
||||
|
||||
result = _rewrite_package_links(
|
||||
html,
|
||||
"https://orchard.example.com",
|
||||
"requests",
|
||||
"https://artifactory.example.com/api/pypi/pypi-remote/simple/requests/"
|
||||
)
|
||||
|
||||
# The relative URL should be resolved to absolute
|
||||
# ../../packages/ab/cd/... from /api/pypi/pypi-remote/simple/requests/ resolves to /api/pypi/pypi-remote/packages/ab/cd/...
|
||||
assert "upstream=https%3A%2F%2Fartifactory.example.com%2Fapi%2Fpypi%2Fpypi-remote%2Fpackages" in result
|
||||
# Hash fragment should be preserved
|
||||
assert "#sha256=abc123" in result
|
||||
|
||||
|
||||
class TestPyPIPackageNormalization:
|
||||
"""Tests for PyPI package name normalization."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_package_name_normalized(self):
|
||||
"""Test that package names are normalized per PEP 503."""
|
||||
# These should all be treated the same:
|
||||
# requests, Requests, requests_, requests-
|
||||
# The endpoint normalizes to lowercase with hyphens
|
||||
"""Test that package names are normalized per PEP 503.
|
||||
|
||||
Different capitalizations/separators should all be valid paths.
|
||||
The endpoint normalizes to lowercase with hyphens before lookup.
|
||||
"""
|
||||
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
|
||||
# Without upstream sources, we get 503, but the normalization
|
||||
# happens before the source lookup
|
||||
response = client.get("/pypi/simple/Requests/")
|
||||
assert response.status_code == 503 # No sources, but path was valid
|
||||
# Test various name formats - all should be valid endpoint paths
|
||||
for package_name in ["Requests", "some_package", "some-package"]:
|
||||
response = client.get(f"/pypi/simple/{package_name}/")
|
||||
# 200 = found, 404 = not found, 503 = no sources configured
|
||||
assert response.status_code in (200, 404, 503), \
|
||||
f"Unexpected status {response.status_code} for {package_name}"
|
||||
|
||||
response = client.get("/pypi/simple/some_package/")
|
||||
assert response.status_code == 503
|
||||
# Verify response is appropriate for the status code
|
||||
if response.status_code == 200:
|
||||
assert "text/html" in response.headers.get("content-type", "")
|
||||
elif response.status_code == 503:
|
||||
assert "No PyPI upstream sources configured" in response.json()["detail"]
|
||||
|
||||
response = client.get("/pypi/simple/some-package/")
|
||||
assert response.status_code == 503
|
||||
|
||||
class TestPyPIProxyInfrastructure:
|
||||
"""Tests for PyPI proxy infrastructure integration."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_health_endpoint_includes_infrastructure(self, integration_client):
|
||||
"""Health endpoint should report infrastructure status."""
|
||||
response = integration_client.get("/health")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["status"] == "ok"
|
||||
# Infrastructure status should be present
|
||||
assert "http_pool" in data
|
||||
assert "cache" in data
|
||||
|
||||
@@ -48,7 +48,7 @@ class TestSmallFileSizes:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="1byte.bin", tag="1byte"
|
||||
filename="1byte.bin", version="1byte"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_1B
|
||||
@@ -70,7 +70,7 @@ class TestSmallFileSizes:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="1kb.bin", tag="1kb"
|
||||
filename="1kb.bin", version="1kb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_1KB
|
||||
@@ -90,7 +90,7 @@ class TestSmallFileSizes:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="10kb.bin", tag="10kb"
|
||||
filename="10kb.bin", version="10kb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_10KB
|
||||
@@ -110,7 +110,7 @@ class TestSmallFileSizes:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="100kb.bin", tag="100kb"
|
||||
filename="100kb.bin", version="100kb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_100KB
|
||||
@@ -134,7 +134,7 @@ class TestMediumFileSizes:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="1mb.bin", tag="1mb"
|
||||
filename="1mb.bin", version="1mb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_1MB
|
||||
@@ -155,7 +155,7 @@ class TestMediumFileSizes:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="5mb.bin", tag="5mb"
|
||||
filename="5mb.bin", version="5mb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_5MB
|
||||
@@ -177,7 +177,7 @@ class TestMediumFileSizes:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="10mb.bin", tag="10mb"
|
||||
filename="10mb.bin", version="10mb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_10MB
|
||||
@@ -200,7 +200,7 @@ class TestMediumFileSizes:
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="50mb.bin", tag="50mb"
|
||||
filename="50mb.bin", version="50mb"
|
||||
)
|
||||
upload_time = time.time() - start_time
|
||||
|
||||
@@ -240,7 +240,7 @@ class TestLargeFileSizes:
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="100mb.bin", tag="100mb"
|
||||
filename="100mb.bin", version="100mb"
|
||||
)
|
||||
upload_time = time.time() - start_time
|
||||
|
||||
@@ -271,7 +271,7 @@ class TestLargeFileSizes:
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="250mb.bin", tag="250mb"
|
||||
filename="250mb.bin", version="250mb"
|
||||
)
|
||||
upload_time = time.time() - start_time
|
||||
|
||||
@@ -302,7 +302,7 @@ class TestLargeFileSizes:
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="500mb.bin", tag="500mb"
|
||||
filename="500mb.bin", version="500mb"
|
||||
)
|
||||
upload_time = time.time() - start_time
|
||||
|
||||
@@ -336,7 +336,7 @@ class TestLargeFileSizes:
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="1gb.bin", tag="1gb"
|
||||
filename="1gb.bin", version="1gb"
|
||||
)
|
||||
upload_time = time.time() - start_time
|
||||
|
||||
@@ -368,7 +368,7 @@ class TestChunkBoundaries:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="chunk.bin", tag="chunk-exact"
|
||||
filename="chunk.bin", version="chunk-exact"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == CHUNK_SIZE
|
||||
@@ -389,7 +389,7 @@ class TestChunkBoundaries:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="chunk_plus.bin", tag="chunk-plus"
|
||||
filename="chunk_plus.bin", version="chunk-plus"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == size
|
||||
@@ -410,7 +410,7 @@ class TestChunkBoundaries:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="chunk_minus.bin", tag="chunk-minus"
|
||||
filename="chunk_minus.bin", version="chunk-minus"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == size
|
||||
@@ -431,7 +431,7 @@ class TestChunkBoundaries:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="multi_chunk.bin", tag="multi-chunk"
|
||||
filename="multi_chunk.bin", version="multi-chunk"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == size
|
||||
@@ -457,7 +457,7 @@ class TestDataIntegrity:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="binary.bin", tag="binary"
|
||||
filename="binary.bin", version="binary"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -477,7 +477,7 @@ class TestDataIntegrity:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="text.txt", tag="text"
|
||||
filename="text.txt", version="text"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -498,7 +498,7 @@ class TestDataIntegrity:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="nulls.bin", tag="nulls"
|
||||
filename="nulls.bin", version="nulls"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -519,7 +519,7 @@ class TestDataIntegrity:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="文件名.txt", tag="unicode-name"
|
||||
filename="文件名.txt", version="unicode-name"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["original_name"] == "文件名.txt"
|
||||
@@ -543,7 +543,7 @@ class TestDataIntegrity:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="data.gz", tag="compressed"
|
||||
filename="data.gz", version="compressed"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@@ -568,7 +568,7 @@ class TestDataIntegrity:
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename=f"hash_test_{size}.bin", tag=f"hash-{size}"
|
||||
filename=f"hash_test_{size}.bin", version=f"hash-{size}"
|
||||
)
|
||||
|
||||
# Verify artifact_id matches expected hash
|
||||
|
||||
@@ -32,7 +32,7 @@ class TestRangeRequests:
|
||||
"""Test range request for first N bytes."""
|
||||
project, package = test_package
|
||||
content = b"0123456789" * 100 # 1000 bytes
|
||||
upload_test_file(integration_client, project, package, content, tag="range-test")
|
||||
upload_test_file(integration_client, project, package, content, version="range-test")
|
||||
|
||||
# Request first 10 bytes
|
||||
response = integration_client.get(
|
||||
@@ -50,7 +50,7 @@ class TestRangeRequests:
|
||||
"""Test range request for bytes in the middle."""
|
||||
project, package = test_package
|
||||
content = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
upload_test_file(integration_client, project, package, content, tag="range-mid")
|
||||
upload_test_file(integration_client, project, package, content, version="range-mid")
|
||||
|
||||
# Request bytes 10-19 (KLMNOPQRST)
|
||||
response = integration_client.get(
|
||||
@@ -66,7 +66,7 @@ class TestRangeRequests:
|
||||
"""Test range request for last N bytes (suffix range)."""
|
||||
project, package = test_package
|
||||
content = b"0123456789ABCDEF" # 16 bytes
|
||||
upload_test_file(integration_client, project, package, content, tag="range-suffix")
|
||||
upload_test_file(integration_client, project, package, content, version="range-suffix")
|
||||
|
||||
# Request last 4 bytes
|
||||
response = integration_client.get(
|
||||
@@ -82,7 +82,7 @@ class TestRangeRequests:
|
||||
"""Test range request from offset to end."""
|
||||
project, package = test_package
|
||||
content = b"0123456789"
|
||||
upload_test_file(integration_client, project, package, content, tag="range-open")
|
||||
upload_test_file(integration_client, project, package, content, version="range-open")
|
||||
|
||||
# Request from byte 5 to end
|
||||
response = integration_client.get(
|
||||
@@ -100,7 +100,7 @@ class TestRangeRequests:
|
||||
"""Test that range requests include Accept-Ranges header."""
|
||||
project, package = test_package
|
||||
content = b"test content"
|
||||
upload_test_file(integration_client, project, package, content, tag="accept-ranges")
|
||||
upload_test_file(integration_client, project, package, content, version="accept-ranges")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/accept-ranges",
|
||||
@@ -117,7 +117,7 @@ class TestRangeRequests:
|
||||
"""Test that full downloads advertise range support."""
|
||||
project, package = test_package
|
||||
content = b"test content"
|
||||
upload_test_file(integration_client, project, package, content, tag="full-accept")
|
||||
upload_test_file(integration_client, project, package, content, version="full-accept")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/full-accept",
|
||||
@@ -136,7 +136,7 @@ class TestConditionalRequests:
|
||||
project, package = test_package
|
||||
content = b"conditional request test content"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="cond-etag")
|
||||
upload_test_file(integration_client, project, package, content, version="cond-etag")
|
||||
|
||||
# Request with matching ETag
|
||||
response = integration_client.get(
|
||||
@@ -153,7 +153,7 @@ class TestConditionalRequests:
|
||||
project, package = test_package
|
||||
content = b"etag no quotes test"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="cond-noquote")
|
||||
upload_test_file(integration_client, project, package, content, version="cond-noquote")
|
||||
|
||||
# Request with ETag without quotes
|
||||
response = integration_client.get(
|
||||
@@ -168,7 +168,7 @@ class TestConditionalRequests:
|
||||
"""Test If-None-Match with non-matching ETag returns 200."""
|
||||
project, package = test_package
|
||||
content = b"etag mismatch test"
|
||||
upload_test_file(integration_client, project, package, content, tag="cond-mismatch")
|
||||
upload_test_file(integration_client, project, package, content, version="cond-mismatch")
|
||||
|
||||
# Request with different ETag
|
||||
response = integration_client.get(
|
||||
@@ -184,7 +184,7 @@ class TestConditionalRequests:
|
||||
"""Test If-Modified-Since with future date returns 304."""
|
||||
project, package = test_package
|
||||
content = b"modified since test"
|
||||
upload_test_file(integration_client, project, package, content, tag="cond-modified")
|
||||
upload_test_file(integration_client, project, package, content, version="cond-modified")
|
||||
|
||||
# Request with future date (artifact was definitely created before this)
|
||||
future_date = formatdate(time.time() + 86400, usegmt=True) # Tomorrow
|
||||
@@ -202,7 +202,7 @@ class TestConditionalRequests:
|
||||
"""Test If-Modified-Since with old date returns 200."""
|
||||
project, package = test_package
|
||||
content = b"old date test"
|
||||
upload_test_file(integration_client, project, package, content, tag="cond-old")
|
||||
upload_test_file(integration_client, project, package, content, version="cond-old")
|
||||
|
||||
# Request with old date (2020-01-01)
|
||||
old_date = "Wed, 01 Jan 2020 00:00:00 GMT"
|
||||
@@ -220,7 +220,7 @@ class TestConditionalRequests:
|
||||
project, package = test_package
|
||||
content = b"304 etag test"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="304-etag")
|
||||
upload_test_file(integration_client, project, package, content, version="304-etag")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/304-etag",
|
||||
@@ -236,7 +236,7 @@ class TestConditionalRequests:
|
||||
project, package = test_package
|
||||
content = b"304 cache test"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="304-cache")
|
||||
upload_test_file(integration_client, project, package, content, version="304-cache")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/304-cache",
|
||||
@@ -255,7 +255,7 @@ class TestCachingHeaders:
|
||||
"""Test download response includes Cache-Control header."""
|
||||
project, package = test_package
|
||||
content = b"cache control test"
|
||||
upload_test_file(integration_client, project, package, content, tag="cache-ctl")
|
||||
upload_test_file(integration_client, project, package, content, version="cache-ctl")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/cache-ctl",
|
||||
@@ -272,7 +272,7 @@ class TestCachingHeaders:
|
||||
"""Test download response includes Last-Modified header."""
|
||||
project, package = test_package
|
||||
content = b"last modified test"
|
||||
upload_test_file(integration_client, project, package, content, tag="last-mod")
|
||||
upload_test_file(integration_client, project, package, content, version="last-mod")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/last-mod",
|
||||
@@ -290,7 +290,7 @@ class TestCachingHeaders:
|
||||
project, package = test_package
|
||||
content = b"etag header test"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="etag-hdr")
|
||||
upload_test_file(integration_client, project, package, content, version="etag-hdr")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/etag-hdr",
|
||||
@@ -308,7 +308,7 @@ class TestDownloadResume:
|
||||
"""Test resuming download from where it left off."""
|
||||
project, package = test_package
|
||||
content = b"ABCDEFGHIJ" * 100 # 1000 bytes
|
||||
upload_test_file(integration_client, project, package, content, tag="resume-test")
|
||||
upload_test_file(integration_client, project, package, content, version="resume-test")
|
||||
|
||||
# Simulate partial download (first 500 bytes)
|
||||
response1 = integration_client.get(
|
||||
@@ -340,7 +340,7 @@ class TestDownloadResume:
|
||||
project, package = test_package
|
||||
content = b"resume etag verification test content"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="resume-etag")
|
||||
upload_test_file(integration_client, project, package, content, version="resume-etag")
|
||||
|
||||
# Get ETag from first request
|
||||
response1 = integration_client.get(
|
||||
@@ -373,7 +373,7 @@ class TestLargeFileStreaming:
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_1MB, seed=500)
|
||||
|
||||
upload_test_file(integration_client, project, package, content, tag="stream-1mb")
|
||||
upload_test_file(integration_client, project, package, content, version="stream-1mb")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/stream-1mb",
|
||||
@@ -391,7 +391,7 @@ class TestLargeFileStreaming:
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_100KB, seed=501)
|
||||
|
||||
upload_test_file(integration_client, project, package, content, tag="stream-hdr")
|
||||
upload_test_file(integration_client, project, package, content, version="stream-hdr")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/stream-hdr",
|
||||
@@ -410,7 +410,7 @@ class TestLargeFileStreaming:
|
||||
project, package = test_package
|
||||
content, _ = sized_content(SIZE_100KB, seed=502)
|
||||
|
||||
upload_test_file(integration_client, project, package, content, tag="range-large")
|
||||
upload_test_file(integration_client, project, package, content, version="range-large")
|
||||
|
||||
# Request a slice from the middle
|
||||
start = 50000
|
||||
@@ -433,7 +433,7 @@ class TestDownloadModes:
|
||||
"""Test proxy mode streams content through backend."""
|
||||
project, package = test_package
|
||||
content = b"proxy mode test content"
|
||||
upload_test_file(integration_client, project, package, content, tag="mode-proxy")
|
||||
upload_test_file(integration_client, project, package, content, version="mode-proxy")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/mode-proxy",
|
||||
@@ -447,7 +447,7 @@ class TestDownloadModes:
|
||||
"""Test presigned mode returns JSON with URL."""
|
||||
project, package = test_package
|
||||
content = b"presigned mode test"
|
||||
upload_test_file(integration_client, project, package, content, tag="mode-presign")
|
||||
upload_test_file(integration_client, project, package, content, version="mode-presign")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/mode-presign",
|
||||
@@ -464,7 +464,7 @@ class TestDownloadModes:
|
||||
"""Test redirect mode returns 302 to presigned URL."""
|
||||
project, package = test_package
|
||||
content = b"redirect mode test"
|
||||
upload_test_file(integration_client, project, package, content, tag="mode-redir")
|
||||
upload_test_file(integration_client, project, package, content, version="mode-redir")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/mode-redir",
|
||||
@@ -484,7 +484,7 @@ class TestIntegrityDuringStreaming:
|
||||
project, package = test_package
|
||||
content = b"integrity check content"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="integrity")
|
||||
upload_test_file(integration_client, project, package, content, version="integrity")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/integrity",
|
||||
@@ -505,7 +505,7 @@ class TestIntegrityDuringStreaming:
|
||||
project, package = test_package
|
||||
content = b"etag integrity test"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="etag-int")
|
||||
upload_test_file(integration_client, project, package, content, version="etag-int")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/etag-int",
|
||||
@@ -524,7 +524,7 @@ class TestIntegrityDuringStreaming:
|
||||
"""Test Digest header is present in RFC 3230 format."""
|
||||
project, package = test_package
|
||||
content = b"digest header test"
|
||||
upload_test_file(integration_client, project, package, content, tag="digest")
|
||||
upload_test_file(integration_client, project, package, content, version="digest")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/digest",
|
||||
|
||||
@@ -1,403 +0,0 @@
|
||||
"""
|
||||
Integration tests for tag API endpoints.
|
||||
|
||||
Tests cover:
|
||||
- Tag CRUD operations
|
||||
- Tag listing with pagination and search
|
||||
- Tag history tracking
|
||||
- ref_count behavior with tag operations
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from tests.factories import compute_sha256, upload_test_file
|
||||
|
||||
|
||||
class TestTagCRUD:
|
||||
"""Tests for tag create, read, delete operations."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_create_tag_via_upload(self, integration_client, test_package):
|
||||
"""Test creating a tag via upload endpoint."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"tag create test",
|
||||
tag="v1.0.0",
|
||||
)
|
||||
|
||||
assert result["tag"] == "v1.0.0"
|
||||
assert result["artifact_id"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_create_tag_via_post(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test creating a tag via POST /tags endpoint."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# First upload an artifact
|
||||
result = upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"artifact for tag",
|
||||
)
|
||||
artifact_id = result["artifact_id"]
|
||||
|
||||
# Create tag via POST
|
||||
tag_name = f"post-tag-{unique_test_id}"
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags",
|
||||
json={"name": tag_name, "artifact_id": artifact_id},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["name"] == tag_name
|
||||
assert data["artifact_id"] == artifact_id
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_tag(self, integration_client, test_package):
|
||||
"""Test getting a tag by name."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"get tag test",
|
||||
tag="get-tag",
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags/get-tag"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["name"] == "get-tag"
|
||||
assert "artifact_id" in data
|
||||
assert "artifact_size" in data
|
||||
assert "artifact_content_type" in data
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_list_tags(self, integration_client, test_package):
|
||||
"""Test listing tags for a package."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Create some tags
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"list tags test",
|
||||
tag="list-v1",
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert "items" in data
|
||||
assert "pagination" in data
|
||||
|
||||
tag_names = [t["name"] for t in data["items"]]
|
||||
assert "list-v1" in tag_names
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_delete_tag(self, integration_client, test_package):
|
||||
"""Test deleting a tag."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"delete tag test",
|
||||
tag="to-delete",
|
||||
)
|
||||
|
||||
# Delete tag
|
||||
response = integration_client.delete(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags/to-delete"
|
||||
)
|
||||
assert response.status_code == 204
|
||||
|
||||
# Verify deleted
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags/to-delete"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
class TestTagListingFilters:
|
||||
"""Tests for tag listing with filters and search."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_tags_pagination(self, integration_client, test_package):
|
||||
"""Test tag listing respects pagination."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags?limit=5"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert len(data["items"]) <= 5
|
||||
assert data["pagination"]["limit"] == 5
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_tags_search(self, integration_client, test_package, unique_test_id):
|
||||
"""Test tag search by name."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
tag_name = f"searchable-{unique_test_id}"
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"search test",
|
||||
tag=tag_name,
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags?search=searchable"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
tag_names = [t["name"] for t in data["items"]]
|
||||
assert tag_name in tag_names
|
||||
|
||||
|
||||
class TestTagHistory:
|
||||
"""Tests for tag history tracking."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_tag_history_on_create(self, integration_client, test_package):
|
||||
"""Test tag history is created when tag is created."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"history create test",
|
||||
tag="history-create",
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags/history-create/history"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert len(data) >= 1
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_tag_history_on_update(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test tag history is created when tag is updated."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
tag_name = f"history-update-{unique_test_id}"
|
||||
|
||||
# Create tag with first artifact
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"first content",
|
||||
tag=tag_name,
|
||||
)
|
||||
|
||||
# Update tag with second artifact
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"second content",
|
||||
tag=tag_name,
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags/{tag_name}/history"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
# Should have at least 2 history entries (create + update)
|
||||
assert len(data) >= 2
|
||||
|
||||
|
||||
class TestTagRefCount:
|
||||
"""Tests for ref_count behavior with tag operations."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_ref_count_decrements_on_tag_delete(self, integration_client, test_package):
|
||||
"""Test ref_count decrements when a tag is deleted."""
|
||||
project_name, package_name = test_package
|
||||
content = b"ref count delete test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload with two tags
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, tag="rc-v1"
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, tag="rc-v2"
|
||||
)
|
||||
|
||||
# Verify ref_count is 2
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 2
|
||||
|
||||
# Delete one tag
|
||||
delete_response = integration_client.delete(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags/rc-v1"
|
||||
)
|
||||
assert delete_response.status_code == 204
|
||||
|
||||
# Verify ref_count is now 1
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 1
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_ref_count_zero_after_all_tags_deleted(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test ref_count goes to 0 when all tags are deleted."""
|
||||
project_name, package_name = test_package
|
||||
content = b"orphan test content"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload with one tag
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, tag="only-tag"
|
||||
)
|
||||
|
||||
# Delete the tag
|
||||
integration_client.delete(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags/only-tag"
|
||||
)
|
||||
|
||||
# Verify ref_count is 0
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 0
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_ref_count_adjusts_on_tag_update(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test ref_count adjusts when a tag is updated to point to different artifact."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Upload two different artifacts
|
||||
content1 = f"artifact one {unique_test_id}".encode()
|
||||
content2 = f"artifact two {unique_test_id}".encode()
|
||||
hash1 = compute_sha256(content1)
|
||||
hash2 = compute_sha256(content2)
|
||||
|
||||
# Upload first artifact with tag "latest"
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content1, tag="latest"
|
||||
)
|
||||
|
||||
# Verify first artifact has ref_count 1
|
||||
response = integration_client.get(f"/api/v1/artifact/{hash1}")
|
||||
assert response.json()["ref_count"] == 1
|
||||
|
||||
# Upload second artifact with different tag
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content2, tag="stable"
|
||||
)
|
||||
|
||||
# Now update "latest" tag to point to second artifact
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content2, tag="latest"
|
||||
)
|
||||
|
||||
# Verify first artifact ref_count decreased to 0
|
||||
response = integration_client.get(f"/api/v1/artifact/{hash1}")
|
||||
assert response.json()["ref_count"] == 0
|
||||
|
||||
# Verify second artifact ref_count increased to 2
|
||||
response = integration_client.get(f"/api/v1/artifact/{hash2}")
|
||||
assert response.json()["ref_count"] == 2
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_ref_count_unchanged_when_tag_same_artifact(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test ref_count doesn't change when tag is 'updated' to same artifact."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
content = f"same artifact {unique_test_id}".encode()
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload with tag
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, tag="same-v1"
|
||||
)
|
||||
|
||||
# Verify ref_count is 1
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 1
|
||||
|
||||
# Upload same content with same tag (no-op)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, tag="same-v1"
|
||||
)
|
||||
|
||||
# Verify ref_count is still 1
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 1
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_tag_via_post_endpoint_increments_ref_count(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test creating tag via POST /tags endpoint increments ref_count."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
content = f"tag endpoint test {unique_test_id}".encode()
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload artifact without tag
|
||||
result = upload_test_file(
|
||||
integration_client, project_name, package_name, content, filename="test.bin"
|
||||
)
|
||||
artifact_id = result["artifact_id"]
|
||||
|
||||
# Verify ref_count is 0 (no tags yet)
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 0
|
||||
|
||||
# Create tag via POST endpoint
|
||||
tag_response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags",
|
||||
json={"name": "post-v1", "artifact_id": artifact_id},
|
||||
)
|
||||
assert tag_response.status_code == 200
|
||||
|
||||
# Verify ref_count is now 1
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 1
|
||||
|
||||
# Create another tag via POST endpoint
|
||||
tag_response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags",
|
||||
json={"name": "post-latest", "artifact_id": artifact_id},
|
||||
)
|
||||
assert tag_response.status_code == 200
|
||||
|
||||
# Verify ref_count is now 2
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 2
|
||||
@@ -47,7 +47,7 @@ class TestUploadBasics:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project_name, package_name, content, tag="v1"
|
||||
integration_client, project_name, package_name, content, version="v1"
|
||||
)
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
@@ -116,31 +116,23 @@ class TestUploadBasics:
|
||||
assert result["created_at"] is not None
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_without_tag_succeeds(self, integration_client, test_package):
|
||||
"""Test upload without tag succeeds (no tag created)."""
|
||||
def test_upload_without_version_succeeds(self, integration_client, test_package):
|
||||
"""Test upload without version succeeds (no version created)."""
|
||||
project, package = test_package
|
||||
content = b"upload without tag test"
|
||||
content = b"upload without version test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
files = {"file": ("no_tag.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
files = {"file": ("no_version.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
# No tag parameter
|
||||
# No version parameter
|
||||
)
|
||||
assert response.status_code == 200
|
||||
result = response.json()
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
# Verify no tag was created - list tags and check
|
||||
tags_response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/tags"
|
||||
)
|
||||
assert tags_response.status_code == 200
|
||||
tags = tags_response.json()
|
||||
# Filter for tags pointing to this artifact
|
||||
artifact_tags = [t for t in tags.get("items", tags) if t.get("artifact_id") == expected_hash]
|
||||
assert len(artifact_tags) == 0, "Tag should not be created when not specified"
|
||||
# Version should be None when not specified
|
||||
assert result.get("version") is None
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_creates_artifact_in_database(self, integration_client, test_package):
|
||||
@@ -172,25 +164,29 @@ class TestUploadBasics:
|
||||
assert s3_object_exists(expected_hash), "S3 object should exist after upload"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_with_tag_creates_tag_record(self, integration_client, test_package):
|
||||
"""Test upload with tag creates tag record."""
|
||||
def test_upload_with_version_creates_version_record(self, integration_client, test_package):
|
||||
"""Test upload with version creates version record."""
|
||||
project, package = test_package
|
||||
content = b"tag creation test"
|
||||
content = b"version creation test"
|
||||
expected_hash = compute_sha256(content)
|
||||
tag_name = "my-tag-v1"
|
||||
version_name = "1.0.0"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag=tag_name
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, version=version_name
|
||||
)
|
||||
|
||||
# Verify tag exists
|
||||
tags_response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/tags"
|
||||
# Verify version was created
|
||||
assert result.get("version") == version_name
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
# Verify version exists in versions list
|
||||
versions_response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/versions"
|
||||
)
|
||||
assert tags_response.status_code == 200
|
||||
tags = tags_response.json()
|
||||
tag_names = [t["name"] for t in tags.get("items", tags)]
|
||||
assert tag_name in tag_names
|
||||
assert versions_response.status_code == 200
|
||||
versions = versions_response.json()
|
||||
version_names = [v["version"] for v in versions.get("items", [])]
|
||||
assert version_name in version_names
|
||||
|
||||
|
||||
class TestDuplicateUploads:
|
||||
@@ -207,36 +203,44 @@ class TestDuplicateUploads:
|
||||
|
||||
# First upload
|
||||
result1 = upload_test_file(
|
||||
integration_client, project, package, content, tag="first"
|
||||
integration_client, project, package, content, version="first"
|
||||
)
|
||||
assert result1["artifact_id"] == expected_hash
|
||||
|
||||
# Second upload
|
||||
result2 = upload_test_file(
|
||||
integration_client, project, package, content, tag="second"
|
||||
integration_client, project, package, content, version="second"
|
||||
)
|
||||
assert result2["artifact_id"] == expected_hash
|
||||
assert result1["artifact_id"] == result2["artifact_id"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_same_file_twice_increments_ref_count(
|
||||
def test_same_file_twice_returns_existing_version(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test uploading same file twice increments ref_count to 2."""
|
||||
"""Test uploading same file twice in same package returns existing version.
|
||||
|
||||
Same artifact can only have one version per package. Uploading the same content
|
||||
with a different version name returns the existing version, not a new one.
|
||||
ref_count stays at 1 because there's still only one PackageVersion reference.
|
||||
"""
|
||||
project, package = test_package
|
||||
content = b"content for ref count increment test"
|
||||
|
||||
# First upload
|
||||
result1 = upload_test_file(
|
||||
integration_client, project, package, content, tag="v1"
|
||||
integration_client, project, package, content, version="v1"
|
||||
)
|
||||
assert result1["ref_count"] == 1
|
||||
|
||||
# Second upload
|
||||
# Second upload with different version name returns existing version
|
||||
result2 = upload_test_file(
|
||||
integration_client, project, package, content, tag="v2"
|
||||
integration_client, project, package, content, version="v2"
|
||||
)
|
||||
assert result2["ref_count"] == 2
|
||||
# Same artifact, same package = same version returned, ref_count stays 1
|
||||
assert result2["ref_count"] == 1
|
||||
assert result2["deduplicated"] is True
|
||||
assert result1["version"] == result2["version"] # Both return "v1"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_same_file_different_packages_shares_artifact(
|
||||
@@ -261,12 +265,12 @@ class TestDuplicateUploads:
|
||||
)
|
||||
|
||||
# Upload to first package
|
||||
result1 = upload_test_file(integration_client, project, pkg1, content, tag="v1")
|
||||
result1 = upload_test_file(integration_client, project, pkg1, content, version="v1")
|
||||
assert result1["artifact_id"] == expected_hash
|
||||
assert result1["deduplicated"] is False
|
||||
|
||||
# Upload to second package
|
||||
result2 = upload_test_file(integration_client, project, pkg2, content, tag="v1")
|
||||
result2 = upload_test_file(integration_client, project, pkg2, content, version="v1")
|
||||
assert result2["artifact_id"] == expected_hash
|
||||
assert result2["deduplicated"] is True
|
||||
|
||||
@@ -286,7 +290,7 @@ class TestDuplicateUploads:
|
||||
package,
|
||||
content,
|
||||
filename="file1.bin",
|
||||
tag="v1",
|
||||
version="v1",
|
||||
)
|
||||
assert result1["artifact_id"] == expected_hash
|
||||
|
||||
@@ -297,7 +301,7 @@ class TestDuplicateUploads:
|
||||
package,
|
||||
content,
|
||||
filename="file2.bin",
|
||||
tag="v2",
|
||||
version="v2",
|
||||
)
|
||||
assert result2["artifact_id"] == expected_hash
|
||||
assert result2["deduplicated"] is True
|
||||
@@ -307,17 +311,17 @@ class TestDownload:
|
||||
"""Tests for download functionality."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_by_tag(self, integration_client, test_package):
|
||||
"""Test downloading artifact by tag name."""
|
||||
def test_download_by_version(self, integration_client, test_package):
|
||||
"""Test downloading artifact by version."""
|
||||
project, package = test_package
|
||||
original_content = b"download by tag test"
|
||||
original_content = b"download by version test"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, original_content, tag="download-tag"
|
||||
integration_client, project, package, original_content, version="1.0.0"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/download-tag",
|
||||
f"/api/v1/project/{project}/{package}/+/1.0.0",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
@@ -340,29 +344,29 @@ class TestDownload:
|
||||
assert response.content == original_content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_by_tag_prefix(self, integration_client, test_package):
|
||||
"""Test downloading artifact using tag: prefix."""
|
||||
def test_download_by_version_prefix(self, integration_client, test_package):
|
||||
"""Test downloading artifact using version: prefix."""
|
||||
project, package = test_package
|
||||
original_content = b"download by tag prefix test"
|
||||
original_content = b"download by version prefix test"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, original_content, tag="prefix-tag"
|
||||
integration_client, project, package, original_content, version="2.0.0"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/tag:prefix-tag",
|
||||
f"/api/v1/project/{project}/{package}/+/version:2.0.0",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == original_content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_nonexistent_tag(self, integration_client, test_package):
|
||||
"""Test downloading nonexistent tag returns 404."""
|
||||
def test_download_nonexistent_version(self, integration_client, test_package):
|
||||
"""Test downloading nonexistent version returns 404."""
|
||||
project, package = test_package
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/nonexistent-tag"
|
||||
f"/api/v1/project/{project}/{package}/+/nonexistent-version"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@@ -400,7 +404,7 @@ class TestDownload:
|
||||
original_content = b"exact content verification test data 12345"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, original_content, tag="verify"
|
||||
integration_client, project, package, original_content, version="verify"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -421,7 +425,7 @@ class TestDownloadHeaders:
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="test.txt", tag="content-type-test"
|
||||
filename="test.txt", version="content-type-test"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -440,7 +444,7 @@ class TestDownloadHeaders:
|
||||
expected_length = len(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="content-length-test"
|
||||
integration_client, project, package, content, version="content-length-test"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -460,7 +464,7 @@ class TestDownloadHeaders:
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename=filename, tag="disposition-test"
|
||||
filename=filename, version="disposition-test"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -481,7 +485,7 @@ class TestDownloadHeaders:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="checksum-headers"
|
||||
integration_client, project, package, content, version="checksum-headers"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -501,7 +505,7 @@ class TestDownloadHeaders:
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="etag-test"
|
||||
integration_client, project, package, content, version="etag-test"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -519,17 +523,31 @@ class TestConcurrentUploads:
|
||||
"""Tests for concurrent upload handling."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_concurrent_uploads_same_file(self, integration_client, test_package):
|
||||
"""Test concurrent uploads of same file handle deduplication correctly."""
|
||||
project, package = test_package
|
||||
def test_concurrent_uploads_same_file(self, integration_client, test_project, unique_test_id):
|
||||
"""Test concurrent uploads of same file to different packages handle deduplication correctly.
|
||||
|
||||
Same artifact can only have one version per package, so we create multiple packages
|
||||
to test that concurrent uploads to different packages correctly increment ref_count.
|
||||
"""
|
||||
content = b"content for concurrent upload test"
|
||||
expected_hash = compute_sha256(content)
|
||||
num_concurrent = 5
|
||||
|
||||
# Create packages for each concurrent upload
|
||||
packages = []
|
||||
for i in range(num_concurrent):
|
||||
pkg_name = f"concurrent-pkg-{unique_test_id}-{i}"
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/packages",
|
||||
json={"name": pkg_name},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
packages.append(pkg_name)
|
||||
|
||||
# Create an API key for worker threads
|
||||
api_key_response = integration_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": "concurrent-test-key"},
|
||||
json={"name": f"concurrent-test-key-{unique_test_id}"},
|
||||
)
|
||||
assert api_key_response.status_code == 200, f"Failed to create API key: {api_key_response.text}"
|
||||
api_key = api_key_response.json()["key"]
|
||||
@@ -537,7 +555,7 @@ class TestConcurrentUploads:
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def upload_worker(tag_suffix):
|
||||
def upload_worker(idx):
|
||||
try:
|
||||
from httpx import Client
|
||||
|
||||
@@ -545,15 +563,15 @@ class TestConcurrentUploads:
|
||||
with Client(base_url=base_url, timeout=30.0) as client:
|
||||
files = {
|
||||
"file": (
|
||||
f"concurrent-{tag_suffix}.bin",
|
||||
f"concurrent-{idx}.bin",
|
||||
io.BytesIO(content),
|
||||
"application/octet-stream",
|
||||
)
|
||||
}
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
f"/api/v1/project/{test_project}/{packages[idx]}/upload",
|
||||
files=files,
|
||||
data={"tag": f"concurrent-{tag_suffix}"},
|
||||
data={"version": "1.0.0"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
@@ -576,7 +594,7 @@ class TestConcurrentUploads:
|
||||
assert len(artifact_ids) == 1
|
||||
assert expected_hash in artifact_ids
|
||||
|
||||
# Verify final ref_count
|
||||
# Verify final ref_count equals number of packages
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["ref_count"] == num_concurrent
|
||||
@@ -605,7 +623,7 @@ class TestFileSizeValidation:
|
||||
content = b"X"
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="tiny"
|
||||
integration_client, project, package, content, version="tiny"
|
||||
)
|
||||
|
||||
assert result["artifact_id"] is not None
|
||||
@@ -621,7 +639,7 @@ class TestFileSizeValidation:
|
||||
expected_size = len(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="size-test"
|
||||
integration_client, project, package, content, version="size-test"
|
||||
)
|
||||
|
||||
assert result["size"] == expected_size
|
||||
@@ -649,7 +667,7 @@ class TestUploadFailureCleanup:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload",
|
||||
files=files,
|
||||
data={"tag": "test"},
|
||||
data={"version": "test"},
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
@@ -672,7 +690,7 @@ class TestUploadFailureCleanup:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
||||
files=files,
|
||||
data={"tag": "test"},
|
||||
data={"version": "test"},
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
@@ -693,7 +711,7 @@ class TestUploadFailureCleanup:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
||||
files=files,
|
||||
data={"tag": "test"},
|
||||
data={"version": "test"},
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
@@ -719,7 +737,7 @@ class TestS3StorageVerification:
|
||||
|
||||
# Upload same content multiple times
|
||||
for tag in ["s3test1", "s3test2", "s3test3"]:
|
||||
upload_test_file(integration_client, project, package, content, tag=tag)
|
||||
upload_test_file(integration_client, project, package, content, version=tag)
|
||||
|
||||
# Verify only one S3 object exists
|
||||
s3_objects = list_s3_objects_by_hash(expected_hash)
|
||||
@@ -735,16 +753,26 @@ class TestS3StorageVerification:
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_artifact_table_single_row_after_duplicates(
|
||||
self, integration_client, test_package
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test artifact table contains only one row after duplicate uploads."""
|
||||
project, package = test_package
|
||||
"""Test artifact table contains only one row after duplicate uploads to different packages.
|
||||
|
||||
Same artifact can only have one version per package, so we create multiple packages
|
||||
to test deduplication across packages.
|
||||
"""
|
||||
content = b"content for single row test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload same content multiple times
|
||||
for tag in ["v1", "v2", "v3"]:
|
||||
upload_test_file(integration_client, project, package, content, tag=tag)
|
||||
# Create 3 packages and upload same content to each
|
||||
for i in range(3):
|
||||
pkg_name = f"single-row-pkg-{unique_test_id}-{i}"
|
||||
integration_client.post(
|
||||
f"/api/v1/project/{test_project}/packages",
|
||||
json={"name": pkg_name},
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, test_project, pkg_name, content, version="1.0.0"
|
||||
)
|
||||
|
||||
# Query artifact
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
@@ -783,7 +811,7 @@ class TestSecurityPathTraversal:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": "traversal-test"},
|
||||
data={"version": "traversal-test"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
result = response.json()
|
||||
@@ -801,48 +829,16 @@ class TestSecurityPathTraversal:
|
||||
assert response.status_code in [400, 404, 422]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_path_traversal_in_tag_name(self, integration_client, test_package):
|
||||
"""Test tag names with path traversal are handled safely."""
|
||||
def test_path_traversal_in_version_name(self, integration_client, test_package):
|
||||
"""Test version names with path traversal are handled safely."""
|
||||
project, package = test_package
|
||||
content = b"tag traversal test"
|
||||
content = b"version traversal test"
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": "../../../etc/passwd"},
|
||||
)
|
||||
assert response.status_code in [200, 400, 422]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_path_traversal_in_ref(self, integration_client, test_package):
|
||||
"""Test download ref with path traversal is rejected."""
|
||||
project, package = test_package
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/../../../etc/passwd"
|
||||
)
|
||||
assert response.status_code in [400, 404, 422]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_path_traversal_in_package_name(self, integration_client, test_project):
|
||||
"""Test package names with path traversal sequences are rejected."""
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{test_project}/packages/../../../etc/passwd"
|
||||
)
|
||||
assert response.status_code in [400, 404, 422]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_path_traversal_in_tag_name(self, integration_client, test_package):
|
||||
"""Test tag names with path traversal are rejected or handled safely."""
|
||||
project, package = test_package
|
||||
content = b"tag traversal test"
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": "../../../etc/passwd"},
|
||||
data={"version": "../../../etc/passwd"},
|
||||
)
|
||||
assert response.status_code in [200, 400, 422]
|
||||
|
||||
@@ -867,7 +863,7 @@ class TestSecurityMalformedRequests:
|
||||
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
data={"tag": "no-file"},
|
||||
data={"version": "no-file"},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
|
||||
|
||||
@@ -39,31 +39,6 @@ class TestVersionCreation:
|
||||
assert result.get("version") == "1.0.0"
|
||||
assert result.get("version_source") == "explicit"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_with_version_and_tag(self, integration_client, test_package):
|
||||
"""Test upload with both version and tag creates both records."""
|
||||
project, package = test_package
|
||||
content = b"version and tag test"
|
||||
|
||||
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"version": "2.0.0", "tag": "latest"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
result = response.json()
|
||||
assert result.get("version") == "2.0.0"
|
||||
|
||||
# Verify tag was also created
|
||||
tags_response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/tags"
|
||||
)
|
||||
assert tags_response.status_code == 200
|
||||
tags = tags_response.json()
|
||||
tag_names = [t["name"] for t in tags.get("items", tags)]
|
||||
assert "latest" in tag_names
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_duplicate_version_same_content_succeeds(self, integration_client, test_package):
|
||||
"""Test uploading same version with same content succeeds (deduplication)."""
|
||||
@@ -262,11 +237,10 @@ class TestDownloadByVersion:
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_version_resolution_priority(self, integration_client, test_package):
|
||||
"""Test that version: prefix explicitly resolves to version, not tag."""
|
||||
def test_version_resolution_with_prefix(self, integration_client, test_package):
|
||||
"""Test that version: prefix explicitly resolves to version."""
|
||||
project, package = test_package
|
||||
version_content = b"this is the version content"
|
||||
tag_content = b"this is the tag content"
|
||||
|
||||
# Create a version 6.0.0
|
||||
files1 = {"file": ("app-v.tar.gz", io.BytesIO(version_content), "application/octet-stream")}
|
||||
@@ -276,14 +250,6 @@ class TestDownloadByVersion:
|
||||
data={"version": "6.0.0"},
|
||||
)
|
||||
|
||||
# Create a tag named "6.0.0" pointing to different content
|
||||
files2 = {"file": ("app-t.tar.gz", io.BytesIO(tag_content), "application/octet-stream")}
|
||||
integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files2,
|
||||
data={"tag": "6.0.0"},
|
||||
)
|
||||
|
||||
# Download with version: prefix should get version content
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/version:6.0.0",
|
||||
@@ -292,14 +258,6 @@ class TestDownloadByVersion:
|
||||
assert response.status_code == 200
|
||||
assert response.content == version_content
|
||||
|
||||
# Download with tag: prefix should get tag content
|
||||
response2 = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/tag:6.0.0",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response2.status_code == 200
|
||||
assert response2.content == tag_content
|
||||
|
||||
|
||||
class TestVersionDeletion:
|
||||
"""Tests for deleting versions."""
|
||||
|
||||
@@ -27,11 +27,9 @@ class TestVersionCreation:
|
||||
project_name,
|
||||
package_name,
|
||||
b"version create test",
|
||||
tag="latest",
|
||||
version="1.0.0",
|
||||
)
|
||||
|
||||
assert result["tag"] == "latest"
|
||||
assert result["version"] == "1.0.0"
|
||||
assert result["version_source"] == "explicit"
|
||||
assert result["artifact_id"]
|
||||
@@ -149,7 +147,6 @@ class TestVersionCRUD:
|
||||
package_name,
|
||||
b"version with info",
|
||||
version="1.0.0",
|
||||
tag="release",
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -166,8 +163,6 @@ class TestVersionCRUD:
|
||||
assert version_item is not None
|
||||
assert "size" in version_item
|
||||
assert "artifact_id" in version_item
|
||||
assert "tags" in version_item
|
||||
assert "release" in version_item["tags"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_version(self, integration_client, test_package):
|
||||
@@ -272,94 +267,9 @@ class TestVersionDownload:
|
||||
follow_redirects=False,
|
||||
)
|
||||
|
||||
# Should resolve version first (before tag)
|
||||
# Should resolve version
|
||||
assert response.status_code in [200, 302, 307]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_version_takes_precedence_over_tag(self, integration_client, test_package):
|
||||
"""Test that version is checked before tag when resolving refs."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Upload with version "1.0"
|
||||
version_result = upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"version content",
|
||||
version="1.0",
|
||||
)
|
||||
|
||||
# Create a tag with the same name "1.0" pointing to different artifact
|
||||
tag_result = upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"tag content different",
|
||||
tag="1.0",
|
||||
)
|
||||
|
||||
# Download by "1.0" should resolve to version, not tag
|
||||
# Since version:1.0 artifact was uploaded first
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/1.0",
|
||||
follow_redirects=False,
|
||||
)
|
||||
|
||||
assert response.status_code in [200, 302, 307]
|
||||
|
||||
|
||||
class TestTagVersionEnrichment:
|
||||
"""Tests for tag responses including version information."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_tag_response_includes_version(self, integration_client, test_package):
|
||||
"""Test that tag responses include version of the artifact."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Upload with both version and tag
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"enriched tag test",
|
||||
version="7.0.0",
|
||||
tag="stable",
|
||||
)
|
||||
|
||||
# Get tag and check version field
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags/stable"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["name"] == "stable"
|
||||
assert data["version"] == "7.0.0"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_tag_list_includes_versions(self, integration_client, test_package):
|
||||
"""Test that tag list responses include version for each tag."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"list version test",
|
||||
version="8.0.0",
|
||||
tag="latest",
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/tags"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
tag_item = next((t for t in data["items"] if t["name"] == "latest"), None)
|
||||
assert tag_item is not None
|
||||
assert tag_item.get("version") == "8.0.0"
|
||||
|
||||
|
||||
class TestVersionPagination:
|
||||
"""Tests for version listing pagination and sorting."""
|
||||
|
||||
@@ -39,7 +39,7 @@ class TestDependencySchema:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -59,29 +59,17 @@ class TestDependencySchema:
|
||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_dependency_requires_version_or_tag(self, integration_client):
|
||||
"""Test that dependency must have either version or tag, not both or neither."""
|
||||
def test_dependency_requires_version(self, integration_client):
|
||||
"""Test that dependency requires version."""
|
||||
from app.schemas import DependencyCreate
|
||||
|
||||
# Test: neither version nor tag
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
# Test: missing version
|
||||
with pytest.raises(ValidationError):
|
||||
DependencyCreate(project="proj", package="pkg")
|
||||
assert "Either 'version' or 'tag' must be specified" in str(exc_info.value)
|
||||
|
||||
# Test: both version and tag
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
DependencyCreate(project="proj", package="pkg", version="1.0.0", tag="stable")
|
||||
assert "Cannot specify both 'version' and 'tag'" in str(exc_info.value)
|
||||
|
||||
# Test: valid with version
|
||||
dep = DependencyCreate(project="proj", package="pkg", version="1.0.0")
|
||||
assert dep.version == "1.0.0"
|
||||
assert dep.tag is None
|
||||
|
||||
# Test: valid with tag
|
||||
dep = DependencyCreate(project="proj", package="pkg", tag="stable")
|
||||
assert dep.tag == "stable"
|
||||
assert dep.version is None
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_dependency_unique_constraint(
|
||||
@@ -126,7 +114,7 @@ class TestEnsureFileParsing:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
@@ -162,7 +150,7 @@ class TestEnsureFileParsing:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "Invalid ensure file" in response.json().get("detail", "")
|
||||
@@ -188,7 +176,7 @@ class TestEnsureFileParsing:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "Project" in response.json().get("detail", "")
|
||||
@@ -208,7 +196,7 @@ class TestEnsureFileParsing:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-nodeps-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-nodeps-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -226,13 +214,14 @@ class TestEnsureFileParsing:
|
||||
assert response.status_code == 200
|
||||
|
||||
try:
|
||||
# Test with missing version field (version is now required)
|
||||
ensure_content = yaml.dump({
|
||||
"dependencies": [
|
||||
{"project": dep_project_name, "package": "pkg", "version": "1.0.0", "tag": "stable"}
|
||||
{"project": dep_project_name, "package": "pkg"} # Missing version
|
||||
]
|
||||
})
|
||||
|
||||
content = unique_content("test-both", unique_test_id, "constraint")
|
||||
content = unique_content("test-missing-version", unique_test_id, "constraint")
|
||||
files = {
|
||||
"file": ("test.tar.gz", BytesIO(content), "application/gzip"),
|
||||
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
||||
@@ -240,11 +229,10 @@ class TestEnsureFileParsing:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "both" in response.json().get("detail", "").lower() or \
|
||||
"version" in response.json().get("detail", "").lower()
|
||||
assert "version" in response.json().get("detail", "").lower()
|
||||
finally:
|
||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||
|
||||
@@ -271,7 +259,7 @@ class TestDependencyQueryEndpoints:
|
||||
ensure_content = yaml.dump({
|
||||
"dependencies": [
|
||||
{"project": dep_project_name, "package": "lib-a", "version": "1.0.0"},
|
||||
{"project": dep_project_name, "package": "lib-b", "tag": "stable"},
|
||||
{"project": dep_project_name, "package": "lib-b", "version": "2.0.0"},
|
||||
]
|
||||
})
|
||||
|
||||
@@ -283,7 +271,7 @@ class TestDependencyQueryEndpoints:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v2.0.0-{unique_test_id}"},
|
||||
data={"version": f"v2.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
artifact_id = response.json()["artifact_id"]
|
||||
@@ -299,10 +287,8 @@ class TestDependencyQueryEndpoints:
|
||||
deps = {d["package"]: d for d in data["dependencies"]}
|
||||
assert "lib-a" in deps
|
||||
assert deps["lib-a"]["version"] == "1.0.0"
|
||||
assert deps["lib-a"]["tag"] is None
|
||||
assert "lib-b" in deps
|
||||
assert deps["lib-b"]["tag"] == "stable"
|
||||
assert deps["lib-b"]["version"] is None
|
||||
assert deps["lib-b"]["version"] == "2.0.0"
|
||||
|
||||
finally:
|
||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||
@@ -336,7 +322,7 @@ class TestDependencyQueryEndpoints:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": tag_name},
|
||||
data={"version": tag_name},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -381,7 +367,7 @@ class TestDependencyQueryEndpoints:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{dep_project_name}/target-lib/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -400,7 +386,7 @@ class TestDependencyQueryEndpoints:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v4.0.0-{unique_test_id}"},
|
||||
data={"version": f"v4.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -419,7 +405,6 @@ class TestDependencyQueryEndpoints:
|
||||
for dep in data["dependents"]:
|
||||
if dep["project"] == project_name:
|
||||
found = True
|
||||
assert dep["constraint_type"] == "version"
|
||||
assert dep["constraint_value"] == "1.0.0"
|
||||
break
|
||||
assert found, "Our package should be in the dependents list"
|
||||
@@ -442,7 +427,7 @@ class TestDependencyQueryEndpoints:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v5.0.0-nodeps-{unique_test_id}"},
|
||||
data={"version": f"v5.0.0-nodeps-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
artifact_id = response.json()["artifact_id"]
|
||||
@@ -482,7 +467,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -500,7 +485,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -518,7 +503,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -566,7 +551,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_d}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -584,7 +569,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -602,7 +587,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -621,7 +606,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -663,7 +648,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"solo-{unique_test_id}"},
|
||||
data={"version": f"solo-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -698,17 +683,21 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"missing-dep-{unique_test_id}"},
|
||||
data={"version": f"missing-dep-{unique_test_id}"},
|
||||
)
|
||||
# Should fail at upload time since package doesn't exist
|
||||
# OR succeed at upload but fail at resolution
|
||||
# Depending on implementation choice
|
||||
if response.status_code == 200:
|
||||
# Resolution should fail
|
||||
# Resolution should return missing dependencies
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/missing-dep-{unique_test_id}/resolve"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
# Expect 200 with missing dependencies listed
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
# The missing dependency should be in the 'missing' list
|
||||
assert len(data.get("missing", [])) >= 1
|
||||
|
||||
|
||||
class TestCircularDependencyDetection:
|
||||
@@ -736,7 +725,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -754,7 +743,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -772,7 +761,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "2.0.0"},
|
||||
data={"version": "2.0.0"},
|
||||
)
|
||||
# Should be rejected with 400 (circular dependency)
|
||||
assert response.status_code == 400
|
||||
@@ -807,7 +796,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -825,7 +814,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -843,7 +832,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -861,7 +850,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "2.0.0"},
|
||||
data={"version": "2.0.0"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
@@ -884,10 +873,14 @@ class TestCircularDependencyDetection:
|
||||
|
||||
|
||||
class TestConflictDetection:
|
||||
"""Tests for #81: Dependency Conflict Detection and Reporting"""
|
||||
"""Tests for dependency conflict handling.
|
||||
|
||||
The resolver uses "first version wins" strategy for version conflicts,
|
||||
allowing resolution to succeed rather than failing with an error.
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_detect_version_conflict(
|
||||
def test_version_conflict_uses_first_version(
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test conflict when two deps require different versions of same package."""
|
||||
@@ -910,7 +903,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -920,7 +913,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||
files=files,
|
||||
data={"tag": "2.0.0"},
|
||||
data={"version": "2.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -938,7 +931,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_lib_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -956,7 +949,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_lib_b}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -975,25 +968,23 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Try to resolve app - should report conflict
|
||||
# Try to resolve app - with lenient conflict handling, this should succeed
|
||||
# The resolver uses "first version wins" strategy for conflicting versions
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{test_project}/{pkg_app}/+/1.0.0/resolve"
|
||||
)
|
||||
assert response.status_code == 409
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
# Error details are nested in "detail" for HTTPException
|
||||
detail = data.get("detail", data)
|
||||
assert detail.get("error") == "dependency_conflict"
|
||||
assert len(detail.get("conflicts", [])) > 0
|
||||
|
||||
# Verify conflict details
|
||||
conflict = detail["conflicts"][0]
|
||||
assert conflict["package"] == pkg_common
|
||||
assert len(conflict["requirements"]) == 2
|
||||
# Resolution should succeed with first-encountered version of common
|
||||
assert data["artifact_count"] >= 1
|
||||
# Find the common package in resolved list
|
||||
common_resolved = [r for r in data["resolved"] if r["package"] == pkg_common]
|
||||
assert len(common_resolved) == 1 # Only one version should be included
|
||||
|
||||
finally:
|
||||
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
||||
@@ -1023,7 +1014,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -1042,7 +1033,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{lib_pkg}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -1061,7 +1052,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -1078,3 +1069,277 @@ class TestConflictDetection:
|
||||
finally:
|
||||
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
||||
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
||||
|
||||
|
||||
class TestAutoFetchDependencies:
|
||||
"""Tests for auto-fetch functionality in dependency resolution.
|
||||
|
||||
These tests verify:
|
||||
- Resolution with auto_fetch=true (default) fetches missing dependencies from upstream
|
||||
- Resolution with auto_fetch=false skips network calls for fast resolution
|
||||
- Proper handling of missing/non-existent packages
|
||||
- Response schema includes fetched artifacts list
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resolve_auto_fetch_true_is_default(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that auto_fetch=true is the default (no fetch needed when all deps cached)."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Upload a simple artifact without dependencies
|
||||
content = unique_content("autofetch-default", unique_test_id, "nodeps")
|
||||
files = {"file": ("default.tar.gz", BytesIO(content), "application/gzip")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Resolve without auto_fetch param (should default to false)
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/v1.0.0-{unique_test_id}/resolve"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should have empty fetched list
|
||||
assert data.get("fetched", []) == []
|
||||
assert data["artifact_count"] == 1
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resolve_auto_fetch_explicit_false(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that auto_fetch=false works explicitly."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
content = unique_content("autofetch-explicit-false", unique_test_id, "nodeps")
|
||||
files = {"file": ("explicit.tar.gz", BytesIO(content), "application/gzip")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"version": f"v2.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Resolve with explicit auto_fetch=false
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/v2.0.0-{unique_test_id}/resolve",
|
||||
params={"auto_fetch": "false"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data.get("fetched", []) == []
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resolve_auto_fetch_true_no_missing_deps(
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test that auto_fetch=true works when all deps are already cached."""
|
||||
pkg_a = f"fetch-a-{unique_test_id}"
|
||||
pkg_b = f"fetch-b-{unique_test_id}"
|
||||
|
||||
for pkg in [pkg_a, pkg_b]:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/packages",
|
||||
json={"name": pkg}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
try:
|
||||
# Upload B (no deps)
|
||||
content_b = unique_content("B", unique_test_id, "fetch")
|
||||
files = {"file": ("b.tar.gz", BytesIO(content_b), "application/gzip")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||
files=files,
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Upload A (depends on B)
|
||||
ensure_a = yaml.dump({
|
||||
"dependencies": [
|
||||
{"project": test_project, "package": pkg_b, "version": "1.0.0"}
|
||||
]
|
||||
})
|
||||
content_a = unique_content("A", unique_test_id, "fetch")
|
||||
files = {
|
||||
"file": ("a.tar.gz", BytesIO(content_a), "application/gzip"),
|
||||
"ensure": ("orchard.ensure", BytesIO(ensure_a.encode()), "application/x-yaml"),
|
||||
}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Resolve with auto_fetch=true - should work since deps are cached
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/+/1.0.0/resolve",
|
||||
params={"auto_fetch": "true"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should resolve successfully
|
||||
assert data["artifact_count"] == 2
|
||||
# Nothing fetched since everything was cached
|
||||
assert len(data.get("fetched", [])) == 0
|
||||
# No missing deps
|
||||
assert len(data.get("missing", [])) == 0
|
||||
|
||||
finally:
|
||||
for pkg in [pkg_a, pkg_b]:
|
||||
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resolve_missing_dep_with_auto_fetch_false(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that missing deps are reported when auto_fetch=false."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Create _pypi system project if it doesn't exist
|
||||
response = integration_client.get("/api/v1/projects/_pypi")
|
||||
if response.status_code == 404:
|
||||
response = integration_client.post(
|
||||
"/api/v1/projects",
|
||||
json={"name": "_pypi", "description": "System project for PyPI packages"}
|
||||
)
|
||||
# May fail if already exists or can't create - that's ok
|
||||
|
||||
# Upload artifact with dependency on _pypi package that doesn't exist locally
|
||||
ensure_content = yaml.dump({
|
||||
"dependencies": [
|
||||
{"project": "_pypi", "package": "nonexistent-pkg-xyz123", "version": ">=1.0.0"}
|
||||
]
|
||||
})
|
||||
|
||||
content = unique_content("missing-pypi", unique_test_id, "dep")
|
||||
files = {
|
||||
"file": ("missing-pypi-dep.tar.gz", BytesIO(content), "application/gzip"),
|
||||
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
||||
}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"version": f"v3.0.0-{unique_test_id}"},
|
||||
)
|
||||
# Upload should succeed - validation is loose for system projects
|
||||
if response.status_code == 200:
|
||||
# Resolve without auto_fetch - should report missing
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/v3.0.0-{unique_test_id}/resolve",
|
||||
params={"auto_fetch": "false"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should have missing dependencies
|
||||
assert len(data.get("missing", [])) >= 1
|
||||
|
||||
# Verify missing dependency structure
|
||||
missing = data["missing"][0]
|
||||
assert missing["project"] == "_pypi"
|
||||
assert missing["package"] == "nonexistent-pkg-xyz123"
|
||||
# Without auto_fetch, these should be false/None
|
||||
assert missing.get("fetch_attempted", False) is False
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resolve_response_schema_has_fetched_field(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that the resolve response always includes the fetched field."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
content = unique_content("schema-check", unique_test_id, "nodeps")
|
||||
files = {"file": ("schema.tar.gz", BytesIO(content), "application/gzip")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"version": f"v4.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check both auto_fetch modes include fetched field
|
||||
for auto_fetch in ["false", "true"]:
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/v4.0.0-{unique_test_id}/resolve",
|
||||
params={"auto_fetch": auto_fetch},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Required fields
|
||||
assert "requested" in data
|
||||
assert "resolved" in data
|
||||
assert "missing" in data
|
||||
assert "fetched" in data # New field
|
||||
assert "total_size" in data
|
||||
assert "artifact_count" in data
|
||||
|
||||
# Types
|
||||
assert isinstance(data["fetched"], list)
|
||||
assert isinstance(data["missing"], list)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_missing_dep_schema_has_fetch_fields(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that missing dependency entries have fetch_attempted and fetch_error fields."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Create a dependency on a non-existent package in a real project
|
||||
dep_project_name = f"dep-test-{unique_test_id}"
|
||||
response = integration_client.post(
|
||||
"/api/v1/projects", json={"name": dep_project_name}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
try:
|
||||
ensure_content = yaml.dump({
|
||||
"dependencies": [
|
||||
{"project": dep_project_name, "package": "nonexistent-pkg", "version": "1.0.0"}
|
||||
]
|
||||
})
|
||||
|
||||
content = unique_content("missing-schema", unique_test_id, "check")
|
||||
files = {
|
||||
"file": ("missing-schema.tar.gz", BytesIO(content), "application/gzip"),
|
||||
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
||||
}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"version": f"v5.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Resolve
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/v5.0.0-{unique_test_id}/resolve",
|
||||
params={"auto_fetch": "true"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should have missing dependencies
|
||||
assert len(data.get("missing", [])) >= 1
|
||||
|
||||
# Check schema for missing dependency
|
||||
missing = data["missing"][0]
|
||||
assert "project" in missing
|
||||
assert "package" in missing
|
||||
assert "constraint" in missing
|
||||
assert "required_by" in missing
|
||||
# New fields
|
||||
assert "fetch_attempted" in missing
|
||||
assert "fetch_error" in missing # May be None
|
||||
|
||||
finally:
|
||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||
|
||||
@@ -26,16 +26,16 @@ def upload_test_file(integration_client):
|
||||
Factory fixture to upload a test file and return its artifact ID.
|
||||
|
||||
Usage:
|
||||
artifact_id = upload_test_file(project, package, content, tag="v1.0")
|
||||
artifact_id = upload_test_file(project, package, content, version="v1.0")
|
||||
"""
|
||||
|
||||
def _upload(project_name: str, package_name: str, content: bytes, tag: str = None):
|
||||
def _upload(project_name: str, package_name: str, content: bytes, version: str = None):
|
||||
files = {
|
||||
"file": ("test-file.bin", io.BytesIO(content), "application/octet-stream")
|
||||
}
|
||||
data = {}
|
||||
if tag:
|
||||
data["tag"] = tag
|
||||
if version:
|
||||
data["version"] = version
|
||||
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
@@ -66,7 +66,7 @@ class TestDownloadChecksumHeaders:
|
||||
|
||||
# Upload file
|
||||
artifact_id = upload_test_file(
|
||||
project_name, package_name, content, tag="sha256-header-test"
|
||||
project_name, package_name, content, version="sha256-header-test"
|
||||
)
|
||||
|
||||
# Download with proxy mode
|
||||
@@ -88,7 +88,7 @@ class TestDownloadChecksumHeaders:
|
||||
content = b"Content for ETag header test"
|
||||
|
||||
artifact_id = upload_test_file(
|
||||
project_name, package_name, content, tag="etag-test"
|
||||
project_name, package_name, content, version="etag-test"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -110,7 +110,7 @@ class TestDownloadChecksumHeaders:
|
||||
content = b"Content for Digest header test"
|
||||
sha256 = hashlib.sha256(content).hexdigest()
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="digest-test")
|
||||
upload_test_file(project_name, package_name, content, version="digest-test")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/digest-test",
|
||||
@@ -137,7 +137,7 @@ class TestDownloadChecksumHeaders:
|
||||
project_name, package_name = test_package
|
||||
content = b"Content for X-Content-Length test"
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="content-length-test")
|
||||
upload_test_file(project_name, package_name, content, version="content-length-test")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/content-length-test",
|
||||
@@ -156,7 +156,7 @@ class TestDownloadChecksumHeaders:
|
||||
project_name, package_name = test_package
|
||||
content = b"Content for X-Verified false test"
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="verified-false-test")
|
||||
upload_test_file(project_name, package_name, content, version="verified-false-test")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/verified-false-test",
|
||||
@@ -184,7 +184,7 @@ class TestPreVerificationMode:
|
||||
project_name, package_name = test_package
|
||||
content = b"Content for pre-verification success test"
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="pre-verify-success")
|
||||
upload_test_file(project_name, package_name, content, version="pre-verify-success")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-success",
|
||||
@@ -205,7 +205,7 @@ class TestPreVerificationMode:
|
||||
# Use binary content to verify no corruption
|
||||
content = bytes(range(256)) * 10 # 2560 bytes of all byte values
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="pre-verify-content")
|
||||
upload_test_file(project_name, package_name, content, version="pre-verify-content")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-content",
|
||||
@@ -233,7 +233,7 @@ class TestStreamingVerificationMode:
|
||||
content = b"Content for streaming verification success test"
|
||||
|
||||
upload_test_file(
|
||||
project_name, package_name, content, tag="stream-verify-success"
|
||||
project_name, package_name, content, version="stream-verify-success"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
@@ -255,7 +255,7 @@ class TestStreamingVerificationMode:
|
||||
# 100KB of content
|
||||
content = b"x" * (100 * 1024)
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="stream-verify-large")
|
||||
upload_test_file(project_name, package_name, content, version="stream-verify-large")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/stream-verify-large",
|
||||
@@ -283,7 +283,7 @@ class TestHeadRequestHeaders:
|
||||
content = b"Content for HEAD SHA256 test"
|
||||
|
||||
artifact_id = upload_test_file(
|
||||
project_name, package_name, content, tag="head-sha256-test"
|
||||
project_name, package_name, content, version="head-sha256-test"
|
||||
)
|
||||
|
||||
response = integration_client.head(
|
||||
@@ -303,7 +303,7 @@ class TestHeadRequestHeaders:
|
||||
content = b"Content for HEAD ETag test"
|
||||
|
||||
artifact_id = upload_test_file(
|
||||
project_name, package_name, content, tag="head-etag-test"
|
||||
project_name, package_name, content, version="head-etag-test"
|
||||
)
|
||||
|
||||
response = integration_client.head(
|
||||
@@ -322,7 +322,7 @@ class TestHeadRequestHeaders:
|
||||
project_name, package_name = test_package
|
||||
content = b"Content for HEAD Digest test"
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="head-digest-test")
|
||||
upload_test_file(project_name, package_name, content, version="head-digest-test")
|
||||
|
||||
response = integration_client.head(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/head-digest-test"
|
||||
@@ -340,7 +340,7 @@ class TestHeadRequestHeaders:
|
||||
project_name, package_name = test_package
|
||||
content = b"Content for HEAD Content-Length test"
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="head-length-test")
|
||||
upload_test_file(project_name, package_name, content, version="head-length-test")
|
||||
|
||||
response = integration_client.head(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/head-length-test"
|
||||
@@ -356,7 +356,7 @@ class TestHeadRequestHeaders:
|
||||
project_name, package_name = test_package
|
||||
content = b"Content for HEAD no-body test"
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="head-no-body-test")
|
||||
upload_test_file(project_name, package_name, content, version="head-no-body-test")
|
||||
|
||||
response = integration_client.head(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/head-no-body-test"
|
||||
@@ -382,7 +382,7 @@ class TestRangeRequestHeaders:
|
||||
project_name, package_name = test_package
|
||||
content = b"Content for range request checksum header test"
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="range-checksum-test")
|
||||
upload_test_file(project_name, package_name, content, version="range-checksum-test")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/range-checksum-test",
|
||||
@@ -412,7 +412,7 @@ class TestClientSideVerification:
|
||||
project_name, package_name = test_package
|
||||
content = b"Content for client-side verification test"
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="client-verify-test")
|
||||
upload_test_file(project_name, package_name, content, version="client-verify-test")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/client-verify-test",
|
||||
@@ -438,7 +438,7 @@ class TestClientSideVerification:
|
||||
project_name, package_name = test_package
|
||||
content = b"Content for Digest header verification"
|
||||
|
||||
upload_test_file(project_name, package_name, content, tag="digest-verify-test")
|
||||
upload_test_file(project_name, package_name, content, version="digest-verify-test")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/digest-verify-test",
|
||||
|
||||
@@ -192,7 +192,6 @@ class TestCacheSettingsModel:
|
||||
|
||||
settings = CacheSettings()
|
||||
assert hasattr(settings, 'id')
|
||||
assert hasattr(settings, 'allow_public_internet')
|
||||
assert hasattr(settings, 'auto_create_system_projects')
|
||||
|
||||
def test_model_with_values(self):
|
||||
@@ -201,11 +200,9 @@ class TestCacheSettingsModel:
|
||||
|
||||
settings = CacheSettings(
|
||||
id=1,
|
||||
allow_public_internet=False,
|
||||
auto_create_system_projects=True,
|
||||
)
|
||||
assert settings.id == 1
|
||||
assert settings.allow_public_internet is False
|
||||
assert settings.auto_create_system_projects is True
|
||||
|
||||
|
||||
@@ -365,16 +362,14 @@ class TestCacheSettingsSchemas:
|
||||
from app.schemas import CacheSettingsUpdate
|
||||
|
||||
update = CacheSettingsUpdate()
|
||||
assert update.allow_public_internet is None
|
||||
assert update.auto_create_system_projects is None
|
||||
|
||||
def test_update_schema_partial(self):
|
||||
"""Test CacheSettingsUpdate with partial fields."""
|
||||
from app.schemas import CacheSettingsUpdate
|
||||
|
||||
update = CacheSettingsUpdate(allow_public_internet=False)
|
||||
assert update.allow_public_internet is False
|
||||
assert update.auto_create_system_projects is None
|
||||
update = CacheSettingsUpdate(auto_create_system_projects=True)
|
||||
assert update.auto_create_system_projects is True
|
||||
|
||||
|
||||
class TestCacheRequestSchemas:
|
||||
@@ -388,7 +383,7 @@ class TestCacheRequestSchemas:
|
||||
url="https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||
source_type="npm",
|
||||
package_name="lodash",
|
||||
tag="4.17.21",
|
||||
version="4.17.21",
|
||||
)
|
||||
assert request.url == "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"
|
||||
assert request.source_type == "npm"
|
||||
@@ -1137,7 +1132,7 @@ class TestCacheRequestValidation:
|
||||
url="https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||
source_type="npm",
|
||||
package_name="lodash",
|
||||
tag="4.17.21",
|
||||
version="4.17.21",
|
||||
)
|
||||
assert request.url == "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"
|
||||
assert request.source_type == "npm"
|
||||
@@ -1604,11 +1599,9 @@ class TestCacheSettingsAdminAPI:
|
||||
|
||||
data = response.json()
|
||||
# Check expected fields exist
|
||||
assert "allow_public_internet" in data
|
||||
assert "auto_create_system_projects" in data
|
||||
|
||||
# Check types
|
||||
assert isinstance(data["allow_public_internet"], bool)
|
||||
assert isinstance(data["auto_create_system_projects"], bool)
|
||||
|
||||
@pytest.mark.integration
|
||||
@@ -1621,7 +1614,7 @@ class TestCacheSettingsAdminAPI:
|
||||
with httpx.Client(base_url=base_url, timeout=30.0) as unauthenticated_client:
|
||||
response = unauthenticated_client.put(
|
||||
"/api/v1/admin/cache-settings",
|
||||
json={"allow_public_internet": False},
|
||||
json={"auto_create_system_projects": False},
|
||||
)
|
||||
assert response.status_code in (401, 403)
|
||||
|
||||
@@ -1635,76 +1628,43 @@ class TestCacheSettingsAdminAPI:
|
||||
response = integration_client.put(
|
||||
"/api/v1/admin/cache-settings",
|
||||
json={
|
||||
"allow_public_internet": not original["allow_public_internet"],
|
||||
"auto_create_system_projects": not original["auto_create_system_projects"],
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["allow_public_internet"] == (not original["allow_public_internet"])
|
||||
assert data["auto_create_system_projects"] == (not original["auto_create_system_projects"])
|
||||
|
||||
# Restore original settings
|
||||
integration_client.put(
|
||||
"/api/v1/admin/cache-settings",
|
||||
json={
|
||||
"allow_public_internet": original["allow_public_internet"],
|
||||
"auto_create_system_projects": original["auto_create_system_projects"],
|
||||
},
|
||||
)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_update_cache_settings_allow_public_internet(self, integration_client):
|
||||
"""Test enabling and disabling public internet access (air-gap mode)."""
|
||||
# First get current settings to restore later
|
||||
original = integration_client.get("/api/v1/admin/cache-settings").json()
|
||||
|
||||
# Disable public internet (enable air-gap mode)
|
||||
response = integration_client.put(
|
||||
"/api/v1/admin/cache-settings",
|
||||
json={"allow_public_internet": False},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["allow_public_internet"] is False
|
||||
|
||||
# Enable public internet (disable air-gap mode)
|
||||
response = integration_client.put(
|
||||
"/api/v1/admin/cache-settings",
|
||||
json={"allow_public_internet": True},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["allow_public_internet"] is True
|
||||
|
||||
# Restore original settings
|
||||
integration_client.put(
|
||||
"/api/v1/admin/cache-settings",
|
||||
json={"allow_public_internet": original["allow_public_internet"]},
|
||||
)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_update_cache_settings_partial(self, integration_client):
|
||||
"""Test that partial updates only change specified fields."""
|
||||
# Get current settings
|
||||
original = integration_client.get("/api/v1/admin/cache-settings").json()
|
||||
|
||||
# Update only allow_public_internet
|
||||
new_value = not original["allow_public_internet"]
|
||||
# Update only auto_create_system_projects
|
||||
new_value = not original["auto_create_system_projects"]
|
||||
response = integration_client.put(
|
||||
"/api/v1/admin/cache-settings",
|
||||
json={"allow_public_internet": new_value},
|
||||
json={"auto_create_system_projects": new_value},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["allow_public_internet"] == new_value
|
||||
# Other field should be unchanged
|
||||
assert data["auto_create_system_projects"] == original["auto_create_system_projects"]
|
||||
assert data["auto_create_system_projects"] == new_value
|
||||
|
||||
# Restore
|
||||
integration_client.put(
|
||||
"/api/v1/admin/cache-settings",
|
||||
json={"allow_public_internet": original["allow_public_internet"]},
|
||||
json={"auto_create_system_projects": original["auto_create_system_projects"]},
|
||||
)
|
||||
|
||||
@pytest.mark.integration
|
||||
@@ -1942,5 +1902,4 @@ class TestCacheSettingsEnvOverride:
|
||||
|
||||
data = response.json()
|
||||
# These fields should exist (may be null if no env override)
|
||||
assert "allow_public_internet_env_override" in data
|
||||
assert "auto_create_system_projects_env_override" in data
|
||||
|
||||
374
backend/tests/unit/test_cache_service.py
Normal file
374
backend/tests/unit/test_cache_service.py
Normal file
@@ -0,0 +1,374 @@
|
||||
"""Tests for CacheService."""
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, AsyncMock, patch
|
||||
|
||||
|
||||
class TestCacheCategory:
|
||||
"""Tests for cache category enum."""
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_immutable_categories_have_no_ttl(self):
|
||||
"""Immutable categories should return None for TTL."""
|
||||
from app.cache_service import CacheCategory, get_category_ttl
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings()
|
||||
|
||||
assert get_category_ttl(CacheCategory.ARTIFACT_METADATA, settings) is None
|
||||
assert get_category_ttl(CacheCategory.ARTIFACT_DEPENDENCIES, settings) is None
|
||||
assert get_category_ttl(CacheCategory.DEPENDENCY_RESOLUTION, settings) is None
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_mutable_categories_have_ttl(self):
|
||||
"""Mutable categories should return configured TTL."""
|
||||
from app.cache_service import CacheCategory, get_category_ttl
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(
|
||||
cache_ttl_index=300,
|
||||
cache_ttl_upstream=3600,
|
||||
)
|
||||
|
||||
assert get_category_ttl(CacheCategory.PACKAGE_INDEX, settings) == 300
|
||||
assert get_category_ttl(CacheCategory.UPSTREAM_SOURCES, settings) == 3600
|
||||
|
||||
|
||||
class TestCacheService:
|
||||
"""Tests for Redis cache service."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_disabled_cache_returns_none(self):
|
||||
"""When Redis disabled, get() should return None."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=False)
|
||||
cache = CacheService(settings)
|
||||
await cache.startup()
|
||||
|
||||
result = await cache.get(CacheCategory.PACKAGE_INDEX, "test-key")
|
||||
|
||||
assert result is None
|
||||
await cache.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_disabled_cache_set_is_noop(self):
|
||||
"""When Redis disabled, set() should be a no-op."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=False)
|
||||
cache = CacheService(settings)
|
||||
await cache.startup()
|
||||
|
||||
# Should not raise
|
||||
await cache.set(CacheCategory.PACKAGE_INDEX, "test-key", b"test-value")
|
||||
|
||||
await cache.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_cache_key_namespacing(self):
|
||||
"""Cache keys should be properly namespaced."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
|
||||
key = CacheService._make_key(CacheCategory.PACKAGE_INDEX, "pypi", "numpy")
|
||||
|
||||
assert key == "orchard:index:pypi:numpy"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_ping_returns_false_when_disabled(self):
|
||||
"""ping() should return False when Redis is disabled."""
|
||||
from app.cache_service import CacheService
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=False)
|
||||
cache = CacheService(settings)
|
||||
await cache.startup()
|
||||
|
||||
result = await cache.ping()
|
||||
|
||||
assert result is False
|
||||
await cache.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_enabled_property(self):
|
||||
"""enabled property should reflect Redis state."""
|
||||
from app.cache_service import CacheService
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=False)
|
||||
cache = CacheService(settings)
|
||||
|
||||
assert cache.enabled is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_delete_is_noop_when_disabled(self):
|
||||
"""delete() should be a no-op when Redis is disabled."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=False)
|
||||
cache = CacheService(settings)
|
||||
await cache.startup()
|
||||
|
||||
# Should not raise
|
||||
await cache.delete(CacheCategory.PACKAGE_INDEX, "test-key")
|
||||
|
||||
await cache.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_invalidate_pattern_returns_zero_when_disabled(self):
|
||||
"""invalidate_pattern() should return 0 when Redis is disabled."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=False)
|
||||
cache = CacheService(settings)
|
||||
await cache.startup()
|
||||
|
||||
result = await cache.invalidate_pattern(CacheCategory.PACKAGE_INDEX)
|
||||
|
||||
assert result == 0
|
||||
await cache.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_startup_already_started(self):
|
||||
"""startup() should be idempotent."""
|
||||
from app.cache_service import CacheService
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=False)
|
||||
cache = CacheService(settings)
|
||||
await cache.startup()
|
||||
await cache.startup() # Should not raise
|
||||
|
||||
assert cache._started is True
|
||||
await cache.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_shutdown_not_started(self):
|
||||
"""shutdown() should handle not-started state."""
|
||||
from app.cache_service import CacheService
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=False)
|
||||
cache = CacheService(settings)
|
||||
|
||||
# Should not raise
|
||||
await cache.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_make_key_with_default_protocol(self):
|
||||
"""_make_key should work with default protocol."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
|
||||
key = CacheService._make_key(CacheCategory.ARTIFACT_METADATA, "default", "abc123")
|
||||
|
||||
assert key == "orchard:artifact:default:abc123"
|
||||
|
||||
|
||||
class TestCacheServiceWithMockedRedis:
|
||||
"""Tests for CacheService with mocked Redis client."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_get_returns_cached_value(self):
|
||||
"""get() should return cached value when available."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=True)
|
||||
cache = CacheService(settings)
|
||||
|
||||
# Mock the redis client
|
||||
mock_redis = AsyncMock()
|
||||
mock_redis.get.return_value = b"cached-data"
|
||||
cache._redis = mock_redis
|
||||
cache._enabled = True
|
||||
cache._started = True
|
||||
|
||||
result = await cache.get(CacheCategory.PACKAGE_INDEX, "test-key", "pypi")
|
||||
|
||||
assert result == b"cached-data"
|
||||
mock_redis.get.assert_called_once_with("orchard:index:pypi:test-key")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_set_with_ttl(self):
|
||||
"""set() should use setex for mutable categories."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=True, cache_ttl_index=300)
|
||||
cache = CacheService(settings)
|
||||
|
||||
mock_redis = AsyncMock()
|
||||
cache._redis = mock_redis
|
||||
cache._enabled = True
|
||||
cache._started = True
|
||||
|
||||
await cache.set(CacheCategory.PACKAGE_INDEX, "test-key", b"test-value", "pypi")
|
||||
|
||||
mock_redis.setex.assert_called_once_with(
|
||||
"orchard:index:pypi:test-key", 300, b"test-value"
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_set_without_ttl(self):
|
||||
"""set() should use set (no expiry) for immutable categories."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=True)
|
||||
cache = CacheService(settings)
|
||||
|
||||
mock_redis = AsyncMock()
|
||||
cache._redis = mock_redis
|
||||
cache._enabled = True
|
||||
cache._started = True
|
||||
|
||||
await cache.set(
|
||||
CacheCategory.ARTIFACT_METADATA, "abc123", b"metadata", "pypi"
|
||||
)
|
||||
|
||||
mock_redis.set.assert_called_once_with(
|
||||
"orchard:artifact:pypi:abc123", b"metadata"
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_delete_calls_redis_delete(self):
|
||||
"""delete() should call Redis delete."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=True)
|
||||
cache = CacheService(settings)
|
||||
|
||||
mock_redis = AsyncMock()
|
||||
cache._redis = mock_redis
|
||||
cache._enabled = True
|
||||
cache._started = True
|
||||
|
||||
await cache.delete(CacheCategory.PACKAGE_INDEX, "test-key", "pypi")
|
||||
|
||||
mock_redis.delete.assert_called_once_with("orchard:index:pypi:test-key")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_invalidate_pattern_deletes_matching_keys(self):
|
||||
"""invalidate_pattern() should delete all matching keys."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=True)
|
||||
cache = CacheService(settings)
|
||||
|
||||
mock_redis = AsyncMock()
|
||||
|
||||
# Create an async generator for scan_iter
|
||||
async def mock_scan_iter(match=None):
|
||||
for key in [b"orchard:index:pypi:numpy", b"orchard:index:pypi:requests"]:
|
||||
yield key
|
||||
|
||||
mock_redis.scan_iter = mock_scan_iter
|
||||
mock_redis.delete.return_value = 2
|
||||
cache._redis = mock_redis
|
||||
cache._enabled = True
|
||||
cache._started = True
|
||||
|
||||
result = await cache.invalidate_pattern(CacheCategory.PACKAGE_INDEX, "*", "pypi")
|
||||
|
||||
assert result == 2
|
||||
mock_redis.delete.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_ping_returns_true_when_connected(self):
|
||||
"""ping() should return True when Redis responds."""
|
||||
from app.cache_service import CacheService
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=True)
|
||||
cache = CacheService(settings)
|
||||
|
||||
mock_redis = AsyncMock()
|
||||
mock_redis.ping.return_value = True
|
||||
cache._redis = mock_redis
|
||||
cache._enabled = True
|
||||
cache._started = True
|
||||
|
||||
result = await cache.ping()
|
||||
|
||||
assert result is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_get_handles_exception(self):
|
||||
"""get() should return None and log warning on exception."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=True)
|
||||
cache = CacheService(settings)
|
||||
|
||||
mock_redis = AsyncMock()
|
||||
mock_redis.get.side_effect = Exception("Connection lost")
|
||||
cache._redis = mock_redis
|
||||
cache._enabled = True
|
||||
cache._started = True
|
||||
|
||||
result = await cache.get(CacheCategory.PACKAGE_INDEX, "test-key")
|
||||
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_set_handles_exception(self):
|
||||
"""set() should log warning on exception."""
|
||||
from app.cache_service import CacheService, CacheCategory
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=True, cache_ttl_index=300)
|
||||
cache = CacheService(settings)
|
||||
|
||||
mock_redis = AsyncMock()
|
||||
mock_redis.setex.side_effect = Exception("Connection lost")
|
||||
cache._redis = mock_redis
|
||||
cache._enabled = True
|
||||
cache._started = True
|
||||
|
||||
# Should not raise
|
||||
await cache.set(CacheCategory.PACKAGE_INDEX, "test-key", b"value")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_ping_returns_false_on_exception(self):
|
||||
"""ping() should return False on exception."""
|
||||
from app.cache_service import CacheService
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(redis_enabled=True)
|
||||
cache = CacheService(settings)
|
||||
|
||||
mock_redis = AsyncMock()
|
||||
mock_redis.ping.side_effect = Exception("Connection lost")
|
||||
cache._redis = mock_redis
|
||||
cache._enabled = True
|
||||
cache._started = True
|
||||
|
||||
result = await cache.ping()
|
||||
|
||||
assert result is False
|
||||
|
||||
167
backend/tests/unit/test_db_utils.py
Normal file
167
backend/tests/unit/test_db_utils.py
Normal file
@@ -0,0 +1,167 @@
|
||||
"""Tests for database utility functions."""
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
|
||||
class TestArtifactRepository:
|
||||
"""Tests for ArtifactRepository."""
|
||||
|
||||
def test_batch_dependency_values_formatting(self):
|
||||
"""batch_upsert_dependencies should format values correctly."""
|
||||
from app.db_utils import ArtifactRepository
|
||||
|
||||
deps = [
|
||||
("_pypi", "numpy", ">=1.21.0"),
|
||||
("_pypi", "requests", "*"),
|
||||
("myproject", "mylib", "==1.0.0"),
|
||||
]
|
||||
|
||||
values = ArtifactRepository._format_dependency_values("abc123", deps)
|
||||
|
||||
assert len(values) == 3
|
||||
assert values[0] == {
|
||||
"artifact_id": "abc123",
|
||||
"dependency_project": "_pypi",
|
||||
"dependency_package": "numpy",
|
||||
"version_constraint": ">=1.21.0",
|
||||
}
|
||||
assert values[2]["dependency_project"] == "myproject"
|
||||
|
||||
def test_empty_dependencies_returns_empty_list(self):
|
||||
"""Empty dependency list should return empty values."""
|
||||
from app.db_utils import ArtifactRepository
|
||||
|
||||
values = ArtifactRepository._format_dependency_values("abc123", [])
|
||||
|
||||
assert values == []
|
||||
|
||||
def test_format_dependency_values_preserves_special_characters(self):
|
||||
"""Version constraints with special characters should be preserved."""
|
||||
from app.db_utils import ArtifactRepository
|
||||
|
||||
deps = [
|
||||
("_pypi", "package-name", ">=1.0.0,<2.0.0"),
|
||||
("_pypi", "another_pkg", "~=1.4.2"),
|
||||
]
|
||||
|
||||
values = ArtifactRepository._format_dependency_values("hash123", deps)
|
||||
|
||||
assert values[0]["version_constraint"] == ">=1.0.0,<2.0.0"
|
||||
assert values[1]["version_constraint"] == "~=1.4.2"
|
||||
|
||||
def test_batch_upsert_dependencies_returns_zero_for_empty(self):
|
||||
"""batch_upsert_dependencies should return 0 for empty list without DB call."""
|
||||
from app.db_utils import ArtifactRepository
|
||||
|
||||
mock_db = MagicMock()
|
||||
repo = ArtifactRepository(mock_db)
|
||||
|
||||
result = repo.batch_upsert_dependencies("abc123", [])
|
||||
|
||||
assert result == 0
|
||||
# Verify no DB operations were performed
|
||||
mock_db.execute.assert_not_called()
|
||||
|
||||
def test_get_or_create_artifact_builds_correct_statement(self):
|
||||
"""get_or_create_artifact should use ON CONFLICT DO UPDATE."""
|
||||
from app.db_utils import ArtifactRepository
|
||||
from app.models import Artifact
|
||||
|
||||
mock_db = MagicMock()
|
||||
mock_result = MagicMock()
|
||||
mock_artifact = MagicMock()
|
||||
mock_artifact.ref_count = 1
|
||||
mock_result.scalar_one.return_value = mock_artifact
|
||||
mock_db.execute.return_value = mock_result
|
||||
|
||||
repo = ArtifactRepository(mock_db)
|
||||
artifact, created = repo.get_or_create_artifact(
|
||||
sha256="abc123def456",
|
||||
size=1024,
|
||||
filename="test.whl",
|
||||
content_type="application/zip",
|
||||
)
|
||||
|
||||
assert mock_db.execute.called
|
||||
assert created is True
|
||||
assert artifact == mock_artifact
|
||||
|
||||
def test_get_or_create_artifact_existing_not_created(self):
|
||||
"""get_or_create_artifact should return created=False for existing artifact."""
|
||||
from app.db_utils import ArtifactRepository
|
||||
|
||||
mock_db = MagicMock()
|
||||
mock_result = MagicMock()
|
||||
mock_artifact = MagicMock()
|
||||
mock_artifact.ref_count = 5 # Existing artifact with ref_count > 1
|
||||
mock_result.scalar_one.return_value = mock_artifact
|
||||
mock_db.execute.return_value = mock_result
|
||||
|
||||
repo = ArtifactRepository(mock_db)
|
||||
artifact, created = repo.get_or_create_artifact(
|
||||
sha256="abc123def456",
|
||||
size=1024,
|
||||
filename="test.whl",
|
||||
)
|
||||
|
||||
assert created is False
|
||||
|
||||
def test_get_cached_url_with_artifact_returns_tuple(self):
|
||||
"""get_cached_url_with_artifact should return (CachedUrl, Artifact) tuple."""
|
||||
from app.db_utils import ArtifactRepository
|
||||
|
||||
mock_db = MagicMock()
|
||||
mock_cached_url = MagicMock()
|
||||
mock_artifact = MagicMock()
|
||||
mock_db.query.return_value.join.return_value.filter.return_value.first.return_value = (
|
||||
mock_cached_url,
|
||||
mock_artifact,
|
||||
)
|
||||
|
||||
repo = ArtifactRepository(mock_db)
|
||||
result = repo.get_cached_url_with_artifact("url_hash_123")
|
||||
|
||||
assert result == (mock_cached_url, mock_artifact)
|
||||
|
||||
def test_get_cached_url_with_artifact_returns_none_when_not_found(self):
|
||||
"""get_cached_url_with_artifact should return None when URL not cached."""
|
||||
from app.db_utils import ArtifactRepository
|
||||
|
||||
mock_db = MagicMock()
|
||||
mock_db.query.return_value.join.return_value.filter.return_value.first.return_value = None
|
||||
|
||||
repo = ArtifactRepository(mock_db)
|
||||
result = repo.get_cached_url_with_artifact("nonexistent_hash")
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_get_artifact_dependencies_returns_list(self):
|
||||
"""get_artifact_dependencies should return list of dependencies."""
|
||||
from app.db_utils import ArtifactRepository
|
||||
|
||||
mock_db = MagicMock()
|
||||
mock_dep1 = MagicMock()
|
||||
mock_dep2 = MagicMock()
|
||||
mock_db.query.return_value.filter.return_value.all.return_value = [
|
||||
mock_dep1,
|
||||
mock_dep2,
|
||||
]
|
||||
|
||||
repo = ArtifactRepository(mock_db)
|
||||
result = repo.get_artifact_dependencies("artifact_hash_123")
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0] == mock_dep1
|
||||
assert result[1] == mock_dep2
|
||||
|
||||
def test_get_artifact_dependencies_returns_empty_list(self):
|
||||
"""get_artifact_dependencies should return empty list when no dependencies."""
|
||||
from app.db_utils import ArtifactRepository
|
||||
|
||||
mock_db = MagicMock()
|
||||
mock_db.query.return_value.filter.return_value.all.return_value = []
|
||||
|
||||
repo = ArtifactRepository(mock_db)
|
||||
result = repo.get_artifact_dependencies("artifact_without_deps")
|
||||
|
||||
assert result == []
|
||||
194
backend/tests/unit/test_http_client.py
Normal file
194
backend/tests/unit/test_http_client.py
Normal file
@@ -0,0 +1,194 @@
|
||||
"""Tests for HttpClientManager."""
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, AsyncMock, patch
|
||||
|
||||
|
||||
class TestHttpClientManager:
|
||||
"""Tests for HTTP client pool management."""
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_manager_initializes_with_settings(self):
|
||||
"""Manager should initialize with config settings."""
|
||||
from app.http_client import HttpClientManager
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(
|
||||
http_max_connections=50,
|
||||
http_connect_timeout=15.0,
|
||||
)
|
||||
manager = HttpClientManager(settings)
|
||||
|
||||
assert manager.max_connections == 50
|
||||
assert manager.connect_timeout == 15.0
|
||||
assert manager._default_client is None # Not started yet
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_startup_creates_client(self):
|
||||
"""Startup should create the default async client."""
|
||||
from app.http_client import HttpClientManager
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings()
|
||||
manager = HttpClientManager(settings)
|
||||
|
||||
await manager.startup()
|
||||
|
||||
assert manager._default_client is not None
|
||||
|
||||
await manager.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_shutdown_closes_client(self):
|
||||
"""Shutdown should close all clients gracefully."""
|
||||
from app.http_client import HttpClientManager
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings()
|
||||
manager = HttpClientManager(settings)
|
||||
|
||||
await manager.startup()
|
||||
client = manager._default_client
|
||||
|
||||
await manager.shutdown()
|
||||
|
||||
assert manager._default_client is None
|
||||
assert client.is_closed
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_get_client_returns_default(self):
|
||||
"""get_client() should return the default client."""
|
||||
from app.http_client import HttpClientManager
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings()
|
||||
manager = HttpClientManager(settings)
|
||||
await manager.startup()
|
||||
|
||||
client = manager.get_client()
|
||||
|
||||
assert client is manager._default_client
|
||||
|
||||
await manager.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_get_client_raises_if_not_started(self):
|
||||
"""get_client() should raise RuntimeError if manager not started."""
|
||||
from app.http_client import HttpClientManager
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings()
|
||||
manager = HttpClientManager(settings)
|
||||
|
||||
with pytest.raises(RuntimeError, match="not started"):
|
||||
manager.get_client()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_run_blocking_executes_in_thread_pool(self):
|
||||
"""run_blocking should execute sync functions in thread pool."""
|
||||
from app.http_client import HttpClientManager
|
||||
from app.config import Settings
|
||||
import threading
|
||||
|
||||
settings = Settings()
|
||||
manager = HttpClientManager(settings)
|
||||
await manager.startup()
|
||||
|
||||
main_thread = threading.current_thread()
|
||||
execution_thread = None
|
||||
|
||||
def blocking_func():
|
||||
nonlocal execution_thread
|
||||
execution_thread = threading.current_thread()
|
||||
return "result"
|
||||
|
||||
result = await manager.run_blocking(blocking_func)
|
||||
|
||||
assert result == "result"
|
||||
assert execution_thread is not main_thread
|
||||
|
||||
await manager.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_run_blocking_raises_if_not_started(self):
|
||||
"""run_blocking should raise RuntimeError if manager not started."""
|
||||
from app.http_client import HttpClientManager
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings()
|
||||
manager = HttpClientManager(settings)
|
||||
|
||||
with pytest.raises(RuntimeError, match="not started"):
|
||||
await manager.run_blocking(lambda: None)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_startup_idempotent(self):
|
||||
"""Calling startup multiple times should be safe."""
|
||||
from app.http_client import HttpClientManager
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings()
|
||||
manager = HttpClientManager(settings)
|
||||
|
||||
await manager.startup()
|
||||
client1 = manager._default_client
|
||||
|
||||
await manager.startup() # Should not create a new client
|
||||
client2 = manager._default_client
|
||||
|
||||
assert client1 is client2 # Same client instance
|
||||
|
||||
await manager.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_shutdown_idempotent(self):
|
||||
"""Calling shutdown multiple times should be safe."""
|
||||
from app.http_client import HttpClientManager
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings()
|
||||
manager = HttpClientManager(settings)
|
||||
|
||||
await manager.startup()
|
||||
await manager.shutdown()
|
||||
await manager.shutdown() # Should not raise
|
||||
|
||||
assert manager._default_client is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_properties_return_configured_values(self):
|
||||
"""Properties should return configured values."""
|
||||
from app.http_client import HttpClientManager
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings(
|
||||
http_max_connections=75,
|
||||
http_worker_threads=16,
|
||||
)
|
||||
manager = HttpClientManager(settings)
|
||||
await manager.startup()
|
||||
|
||||
assert manager.pool_size == 75
|
||||
assert manager.executor_max == 16
|
||||
|
||||
await manager.shutdown()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.unit
|
||||
async def test_active_connections_when_not_started(self):
|
||||
"""active_connections should return 0 when not started."""
|
||||
from app.http_client import HttpClientManager
|
||||
from app.config import Settings
|
||||
|
||||
settings = Settings()
|
||||
manager = HttpClientManager(settings)
|
||||
|
||||
assert manager.active_connections == 0
|
||||
243
backend/tests/unit/test_metadata.py
Normal file
243
backend/tests/unit/test_metadata.py
Normal file
@@ -0,0 +1,243 @@
|
||||
"""Unit tests for metadata extraction functionality."""
|
||||
|
||||
import io
|
||||
import gzip
|
||||
import tarfile
|
||||
import zipfile
|
||||
import pytest
|
||||
from app.metadata import (
|
||||
extract_metadata,
|
||||
extract_deb_metadata,
|
||||
extract_wheel_metadata,
|
||||
extract_tarball_metadata,
|
||||
extract_jar_metadata,
|
||||
parse_deb_control,
|
||||
)
|
||||
|
||||
|
||||
class TestDebMetadata:
|
||||
"""Tests for Debian package metadata extraction."""
|
||||
|
||||
def test_parse_deb_control_basic(self):
|
||||
"""Test parsing a basic control file."""
|
||||
control = """Package: my-package
|
||||
Version: 1.2.3
|
||||
Architecture: amd64
|
||||
Maintainer: Test <test@example.com>
|
||||
Description: A test package
|
||||
"""
|
||||
result = parse_deb_control(control)
|
||||
assert result["package_name"] == "my-package"
|
||||
assert result["version"] == "1.2.3"
|
||||
assert result["architecture"] == "amd64"
|
||||
assert result["format"] == "deb"
|
||||
|
||||
def test_parse_deb_control_with_epoch(self):
|
||||
"""Test parsing version with epoch."""
|
||||
control = """Package: another-pkg
|
||||
Version: 2:1.0.0-1
|
||||
"""
|
||||
result = parse_deb_control(control)
|
||||
assert result["version"] == "2:1.0.0-1"
|
||||
assert result["package_name"] == "another-pkg"
|
||||
assert result["format"] == "deb"
|
||||
|
||||
def test_extract_deb_metadata_invalid_magic(self):
|
||||
"""Test that invalid ar magic returns empty dict."""
|
||||
file = io.BytesIO(b"not an ar archive")
|
||||
result = extract_deb_metadata(file)
|
||||
assert result == {}
|
||||
|
||||
def test_extract_deb_metadata_valid_ar_no_control(self):
|
||||
"""Test ar archive without control.tar returns empty."""
|
||||
# Create minimal ar archive with just debian-binary
|
||||
ar_data = b"!<arch>\n"
|
||||
ar_data += b"debian-binary/ 0 0 0 100644 4 `\n"
|
||||
ar_data += b"2.0\n"
|
||||
|
||||
file = io.BytesIO(ar_data)
|
||||
result = extract_deb_metadata(file)
|
||||
# Should return empty since no control.tar found
|
||||
assert result == {} or "version" not in result
|
||||
|
||||
|
||||
class TestWheelMetadata:
|
||||
"""Tests for Python wheel metadata extraction."""
|
||||
|
||||
def _create_wheel_with_metadata(self, metadata_content: str) -> io.BytesIO:
|
||||
"""Helper to create a wheel file with given METADATA content."""
|
||||
buf = io.BytesIO()
|
||||
with zipfile.ZipFile(buf, 'w') as zf:
|
||||
zf.writestr('package-1.0.0.dist-info/METADATA', metadata_content)
|
||||
buf.seek(0)
|
||||
return buf
|
||||
|
||||
def test_extract_wheel_version(self):
|
||||
"""Test extracting version from wheel METADATA."""
|
||||
metadata = """Metadata-Version: 2.1
|
||||
Name: my-package
|
||||
Version: 2.3.4
|
||||
Summary: A test package
|
||||
"""
|
||||
file = self._create_wheel_with_metadata(metadata)
|
||||
result = extract_wheel_metadata(file)
|
||||
assert result.get("version") == "2.3.4"
|
||||
assert result.get("package_name") == "my-package"
|
||||
assert result.get("format") == "wheel"
|
||||
|
||||
def test_extract_wheel_no_version(self):
|
||||
"""Test wheel without version field."""
|
||||
metadata = """Metadata-Version: 2.1
|
||||
Name: no-version-pkg
|
||||
"""
|
||||
file = self._create_wheel_with_metadata(metadata)
|
||||
result = extract_wheel_metadata(file)
|
||||
assert "version" not in result
|
||||
assert result.get("package_name") == "no-version-pkg"
|
||||
assert result.get("format") == "wheel"
|
||||
|
||||
def test_extract_wheel_invalid_zip(self):
|
||||
"""Test that invalid zip returns format-only dict."""
|
||||
file = io.BytesIO(b"not a zip file")
|
||||
result = extract_wheel_metadata(file)
|
||||
assert result == {"format": "wheel"}
|
||||
|
||||
def test_extract_wheel_no_metadata_file(self):
|
||||
"""Test wheel without METADATA file returns format-only dict."""
|
||||
buf = io.BytesIO()
|
||||
with zipfile.ZipFile(buf, 'w') as zf:
|
||||
zf.writestr('some_file.py', 'print("hello")')
|
||||
buf.seek(0)
|
||||
result = extract_wheel_metadata(buf)
|
||||
assert result == {"format": "wheel"}
|
||||
|
||||
|
||||
class TestTarballMetadata:
|
||||
"""Tests for tarball metadata extraction from filename."""
|
||||
|
||||
def test_extract_version_from_filename_standard(self):
|
||||
"""Test standard package-version.tar.gz format."""
|
||||
file = io.BytesIO(b"") # Content doesn't matter for filename extraction
|
||||
result = extract_tarball_metadata(file, "mypackage-1.2.3.tar.gz")
|
||||
assert result.get("version") == "1.2.3"
|
||||
assert result.get("package_name") == "mypackage"
|
||||
assert result.get("format") == "tarball"
|
||||
|
||||
def test_extract_version_with_v_prefix(self):
|
||||
"""Test version with v prefix."""
|
||||
file = io.BytesIO(b"")
|
||||
result = extract_tarball_metadata(file, "package-v2.0.0.tar.gz")
|
||||
assert result.get("version") == "2.0.0"
|
||||
assert result.get("package_name") == "package"
|
||||
assert result.get("format") == "tarball"
|
||||
|
||||
def test_extract_version_underscore_separator(self):
|
||||
"""Test package_version format."""
|
||||
file = io.BytesIO(b"")
|
||||
result = extract_tarball_metadata(file, "my_package_3.1.4.tar.gz")
|
||||
assert result.get("version") == "3.1.4"
|
||||
assert result.get("package_name") == "my_package"
|
||||
assert result.get("format") == "tarball"
|
||||
|
||||
def test_extract_version_complex(self):
|
||||
"""Test complex version string."""
|
||||
file = io.BytesIO(b"")
|
||||
result = extract_tarball_metadata(file, "package-1.0.0-beta.1.tar.gz")
|
||||
# The regex handles versions with suffix like -beta_1
|
||||
assert result.get("format") == "tarball"
|
||||
# May or may not extract version depending on regex match
|
||||
if "version" in result:
|
||||
assert result.get("package_name") == "package"
|
||||
|
||||
def test_extract_no_version_in_filename(self):
|
||||
"""Test filename without version returns format-only dict."""
|
||||
file = io.BytesIO(b"")
|
||||
result = extract_tarball_metadata(file, "package.tar.gz")
|
||||
# Should return format but no version
|
||||
assert result.get("version") is None
|
||||
assert result.get("format") == "tarball"
|
||||
|
||||
|
||||
class TestJarMetadata:
|
||||
"""Tests for JAR/Java metadata extraction."""
|
||||
|
||||
def _create_jar_with_manifest(self, manifest_content: str) -> io.BytesIO:
|
||||
"""Helper to create a JAR file with given MANIFEST.MF content."""
|
||||
buf = io.BytesIO()
|
||||
with zipfile.ZipFile(buf, 'w') as zf:
|
||||
zf.writestr('META-INF/MANIFEST.MF', manifest_content)
|
||||
buf.seek(0)
|
||||
return buf
|
||||
|
||||
def test_extract_jar_version_from_manifest(self):
|
||||
"""Test extracting version from MANIFEST.MF."""
|
||||
manifest = """Manifest-Version: 1.0
|
||||
Implementation-Title: my-library
|
||||
Implementation-Version: 4.5.6
|
||||
"""
|
||||
file = self._create_jar_with_manifest(manifest)
|
||||
result = extract_jar_metadata(file)
|
||||
assert result.get("version") == "4.5.6"
|
||||
assert result.get("package_name") == "my-library"
|
||||
assert result.get("format") == "jar"
|
||||
|
||||
def test_extract_jar_bundle_version(self):
|
||||
"""Test extracting OSGi Bundle-Version."""
|
||||
manifest = """Manifest-Version: 1.0
|
||||
Bundle-Version: 2.1.0
|
||||
Bundle-Name: Test Bundle
|
||||
"""
|
||||
file = self._create_jar_with_manifest(manifest)
|
||||
result = extract_jar_metadata(file)
|
||||
# Bundle-Version is stored in bundle_version, not version
|
||||
assert result.get("bundle_version") == "2.1.0"
|
||||
assert result.get("bundle_name") == "Test Bundle"
|
||||
assert result.get("format") == "jar"
|
||||
|
||||
def test_extract_jar_invalid_zip(self):
|
||||
"""Test that invalid JAR returns format-only dict."""
|
||||
file = io.BytesIO(b"not a jar file")
|
||||
result = extract_jar_metadata(file)
|
||||
assert result == {"format": "jar"}
|
||||
|
||||
|
||||
class TestExtractMetadataDispatch:
|
||||
"""Tests for the main extract_metadata dispatcher function."""
|
||||
|
||||
def test_dispatch_to_wheel(self):
|
||||
"""Test that .whl files use wheel extractor."""
|
||||
buf = io.BytesIO()
|
||||
with zipfile.ZipFile(buf, 'w') as zf:
|
||||
zf.writestr('pkg-1.0.dist-info/METADATA', 'Version: 1.0.0\nName: pkg')
|
||||
buf.seek(0)
|
||||
|
||||
result = extract_metadata(buf, "package-1.0.0-py3-none-any.whl")
|
||||
assert result.get("version") == "1.0.0"
|
||||
assert result.get("package_name") == "pkg"
|
||||
assert result.get("format") == "wheel"
|
||||
|
||||
def test_dispatch_to_tarball(self):
|
||||
"""Test that .tar.gz files use tarball extractor."""
|
||||
file = io.BytesIO(b"")
|
||||
result = extract_metadata(file, "mypackage-2.3.4.tar.gz")
|
||||
assert result.get("version") == "2.3.4"
|
||||
assert result.get("package_name") == "mypackage"
|
||||
assert result.get("format") == "tarball"
|
||||
|
||||
def test_dispatch_unknown_extension(self):
|
||||
"""Test that unknown extensions return empty dict."""
|
||||
file = io.BytesIO(b"some content")
|
||||
result = extract_metadata(file, "unknown.xyz")
|
||||
assert result == {}
|
||||
|
||||
def test_file_position_reset_after_extraction(self):
|
||||
"""Test that file position is reset to start after extraction."""
|
||||
buf = io.BytesIO()
|
||||
with zipfile.ZipFile(buf, 'w') as zf:
|
||||
zf.writestr('pkg-1.0.dist-info/METADATA', 'Version: 1.0.0\nName: pkg')
|
||||
buf.seek(0)
|
||||
|
||||
extract_metadata(buf, "package.whl")
|
||||
|
||||
# File should be back at position 0
|
||||
assert buf.tell() == 0
|
||||
@@ -145,54 +145,6 @@ class TestPackageModel:
|
||||
assert platform_col.default.arg == "any"
|
||||
|
||||
|
||||
class TestTagModel:
|
||||
"""Tests for the Tag model."""
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_tag_requires_package_id(self):
|
||||
"""Test tag requires package_id."""
|
||||
from app.models import Tag
|
||||
|
||||
tag = Tag(
|
||||
name="v1.0.0",
|
||||
package_id=uuid.uuid4(),
|
||||
artifact_id="f" * 64,
|
||||
created_by="test-user",
|
||||
)
|
||||
|
||||
assert tag.package_id is not None
|
||||
assert tag.artifact_id == "f" * 64
|
||||
|
||||
|
||||
class TestTagHistoryModel:
|
||||
"""Tests for the TagHistory model."""
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_tag_history_default_change_type(self):
|
||||
"""Test tag history change_type column has default value of 'update'."""
|
||||
from app.models import TagHistory
|
||||
|
||||
# Check the column definition has the right default
|
||||
change_type_col = TagHistory.__table__.columns["change_type"]
|
||||
assert change_type_col.default is not None
|
||||
assert change_type_col.default.arg == "update"
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_tag_history_allows_null_old_artifact(self):
|
||||
"""Test tag history allows null old_artifact_id (for create events)."""
|
||||
from app.models import TagHistory
|
||||
|
||||
history = TagHistory(
|
||||
tag_id=uuid.uuid4(),
|
||||
old_artifact_id=None,
|
||||
new_artifact_id="h" * 64,
|
||||
change_type="create",
|
||||
changed_by="test-user",
|
||||
)
|
||||
|
||||
assert history.old_artifact_id is None
|
||||
|
||||
|
||||
class TestUploadModel:
|
||||
"""Tests for the Upload model."""
|
||||
|
||||
|
||||
85
backend/tests/unit/test_pypi_proxy.py
Normal file
85
backend/tests/unit/test_pypi_proxy.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""Unit tests for PyPI proxy functionality."""
|
||||
|
||||
import pytest
|
||||
from app.pypi_proxy import _parse_requires_dist
|
||||
|
||||
|
||||
class TestParseRequiresDist:
|
||||
"""Tests for _parse_requires_dist function."""
|
||||
|
||||
def test_simple_package(self):
|
||||
"""Test parsing a simple package name."""
|
||||
name, version = _parse_requires_dist("numpy")
|
||||
assert name == "numpy"
|
||||
assert version is None
|
||||
|
||||
def test_package_with_version(self):
|
||||
"""Test parsing package with version constraint."""
|
||||
name, version = _parse_requires_dist("numpy>=1.21.0")
|
||||
assert name == "numpy"
|
||||
assert version == ">=1.21.0"
|
||||
|
||||
def test_package_with_parenthesized_version(self):
|
||||
"""Test parsing package with parenthesized version."""
|
||||
name, version = _parse_requires_dist("requests (>=2.25.0)")
|
||||
assert name == "requests"
|
||||
assert version == ">=2.25.0"
|
||||
|
||||
def test_package_with_python_version_marker(self):
|
||||
"""Test that python_version markers are preserved but marker stripped."""
|
||||
name, version = _parse_requires_dist("typing-extensions; python_version < '3.8'")
|
||||
assert name == "typing-extensions"
|
||||
assert version is None
|
||||
|
||||
def test_filters_extra_dependencies(self):
|
||||
"""Test that extra dependencies are filtered out."""
|
||||
# Extra dependencies should return (None, None)
|
||||
name, version = _parse_requires_dist("pytest; extra == 'test'")
|
||||
assert name is None
|
||||
assert version is None
|
||||
|
||||
name, version = _parse_requires_dist("sphinx; extra == 'docs'")
|
||||
assert name is None
|
||||
assert version is None
|
||||
|
||||
def test_filters_platform_specific_darwin(self):
|
||||
"""Test that macOS-specific dependencies are filtered out."""
|
||||
name, version = _parse_requires_dist("pyobjc; sys_platform == 'darwin'")
|
||||
assert name is None
|
||||
assert version is None
|
||||
|
||||
def test_filters_platform_specific_win32(self):
|
||||
"""Test that Windows-specific dependencies are filtered out."""
|
||||
name, version = _parse_requires_dist("pywin32; sys_platform == 'win32'")
|
||||
assert name is None
|
||||
assert version is None
|
||||
|
||||
def test_filters_platform_system_marker(self):
|
||||
"""Test that platform_system markers are filtered out."""
|
||||
name, version = _parse_requires_dist("jaraco-windows; platform_system == 'Windows'")
|
||||
assert name is None
|
||||
assert version is None
|
||||
|
||||
def test_normalizes_package_name(self):
|
||||
"""Test that package names are normalized (PEP 503)."""
|
||||
name, version = _parse_requires_dist("Typing_Extensions>=3.7.4")
|
||||
assert name == "typing-extensions"
|
||||
assert version == ">=3.7.4"
|
||||
|
||||
def test_complex_version_constraint(self):
|
||||
"""Test parsing complex version constraints."""
|
||||
name, version = _parse_requires_dist("gast!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1")
|
||||
assert name == "gast"
|
||||
assert version == "!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1"
|
||||
|
||||
def test_version_range(self):
|
||||
"""Test parsing version range constraints."""
|
||||
name, version = _parse_requires_dist("grpcio<2.0,>=1.24.3")
|
||||
assert name == "grpcio"
|
||||
assert version == "<2.0,>=1.24.3"
|
||||
|
||||
def test_tilde_version(self):
|
||||
"""Test parsing tilde version constraints."""
|
||||
name, version = _parse_requires_dist("tensorboard~=2.20.0")
|
||||
assert name == "tensorboard"
|
||||
assert version == "~=2.20.0"
|
||||
65
backend/tests/unit/test_rate_limit.py
Normal file
65
backend/tests/unit/test_rate_limit.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Unit tests for rate limiting configuration."""
|
||||
|
||||
import os
|
||||
import pytest
|
||||
|
||||
|
||||
class TestRateLimitConfiguration:
|
||||
"""Tests for rate limit configuration."""
|
||||
|
||||
def test_default_login_rate_limit(self):
|
||||
"""Test default login rate limit is 5/minute."""
|
||||
# Import fresh to get default value
|
||||
import importlib
|
||||
import app.rate_limit as rate_limit_module
|
||||
|
||||
# Save original env value
|
||||
original = os.environ.get("ORCHARD_LOGIN_RATE_LIMIT")
|
||||
|
||||
try:
|
||||
# Clear env variable to test default
|
||||
if "ORCHARD_LOGIN_RATE_LIMIT" in os.environ:
|
||||
del os.environ["ORCHARD_LOGIN_RATE_LIMIT"]
|
||||
|
||||
# Reload module to pick up new env
|
||||
importlib.reload(rate_limit_module)
|
||||
|
||||
assert rate_limit_module.LOGIN_RATE_LIMIT == "5/minute"
|
||||
finally:
|
||||
# Restore original env value
|
||||
if original is not None:
|
||||
os.environ["ORCHARD_LOGIN_RATE_LIMIT"] = original
|
||||
importlib.reload(rate_limit_module)
|
||||
|
||||
def test_custom_login_rate_limit(self):
|
||||
"""Test custom login rate limit from environment."""
|
||||
import importlib
|
||||
import app.rate_limit as rate_limit_module
|
||||
|
||||
# Save original env value
|
||||
original = os.environ.get("ORCHARD_LOGIN_RATE_LIMIT")
|
||||
|
||||
try:
|
||||
# Set custom rate limit
|
||||
os.environ["ORCHARD_LOGIN_RATE_LIMIT"] = "10/minute"
|
||||
|
||||
# Reload module to pick up new env
|
||||
importlib.reload(rate_limit_module)
|
||||
|
||||
assert rate_limit_module.LOGIN_RATE_LIMIT == "10/minute"
|
||||
finally:
|
||||
# Restore original env value
|
||||
if original is not None:
|
||||
os.environ["ORCHARD_LOGIN_RATE_LIMIT"] = original
|
||||
else:
|
||||
if "ORCHARD_LOGIN_RATE_LIMIT" in os.environ:
|
||||
del os.environ["ORCHARD_LOGIN_RATE_LIMIT"]
|
||||
importlib.reload(rate_limit_module)
|
||||
|
||||
def test_limiter_exists(self):
|
||||
"""Test that limiter object is created."""
|
||||
from app.rate_limit import limiter
|
||||
|
||||
assert limiter is not None
|
||||
# Limiter should have a key_func set
|
||||
assert limiter._key_func is not None
|
||||
300
backend/tests/unit/test_registry_client.py
Normal file
300
backend/tests/unit/test_registry_client.py
Normal file
@@ -0,0 +1,300 @@
|
||||
"""Unit tests for registry client functionality."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
import httpx
|
||||
from packaging.specifiers import SpecifierSet
|
||||
|
||||
from app.registry_client import (
|
||||
PyPIRegistryClient,
|
||||
VersionInfo,
|
||||
FetchResult,
|
||||
get_registry_client,
|
||||
)
|
||||
|
||||
|
||||
class TestPyPIRegistryClient:
|
||||
"""Tests for PyPI registry client."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_http_client(self):
|
||||
"""Create a mock async HTTP client."""
|
||||
return AsyncMock(spec=httpx.AsyncClient)
|
||||
|
||||
@pytest.fixture
|
||||
def client(self, mock_http_client):
|
||||
"""Create a PyPI registry client with mocked HTTP."""
|
||||
return PyPIRegistryClient(
|
||||
http_client=mock_http_client,
|
||||
upstream_sources=[],
|
||||
pypi_api_url="https://pypi.org/pypi",
|
||||
)
|
||||
|
||||
def test_source_type(self, client):
|
||||
"""Test source_type returns 'pypi'."""
|
||||
assert client.source_type == "pypi"
|
||||
|
||||
def test_normalize_package_name(self, client):
|
||||
"""Test package name normalization per PEP 503."""
|
||||
assert client._normalize_package_name("My_Package") == "my-package"
|
||||
assert client._normalize_package_name("my.package") == "my-package"
|
||||
assert client._normalize_package_name("my-package") == "my-package"
|
||||
assert client._normalize_package_name("MY-PACKAGE") == "my-package"
|
||||
assert client._normalize_package_name("my__package") == "my-package"
|
||||
assert client._normalize_package_name("my..package") == "my-package"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_available_versions_success(self, client, mock_http_client):
|
||||
"""Test fetching available versions from PyPI."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"releases": {
|
||||
"1.0.0": [{"packagetype": "bdist_wheel"}],
|
||||
"1.1.0": [{"packagetype": "bdist_wheel"}],
|
||||
"2.0.0": [{"packagetype": "bdist_wheel"}],
|
||||
}
|
||||
}
|
||||
mock_http_client.get.return_value = mock_response
|
||||
|
||||
versions = await client.get_available_versions("test-package")
|
||||
|
||||
assert "1.0.0" in versions
|
||||
assert "1.1.0" in versions
|
||||
assert "2.0.0" in versions
|
||||
mock_http_client.get.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_available_versions_empty(self, client, mock_http_client):
|
||||
"""Test handling package with no releases."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"releases": {}}
|
||||
mock_http_client.get.return_value = mock_response
|
||||
|
||||
versions = await client.get_available_versions("empty-package")
|
||||
|
||||
assert versions == []
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_available_versions_404(self, client, mock_http_client):
|
||||
"""Test handling non-existent package."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 404
|
||||
mock_http_client.get.return_value = mock_response
|
||||
|
||||
versions = await client.get_available_versions("nonexistent")
|
||||
|
||||
assert versions == []
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_resolve_constraint_wildcard(self, client, mock_http_client):
|
||||
"""Test resolving wildcard constraint returns latest."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"info": {"version": "2.0.0"},
|
||||
"releases": {
|
||||
"1.0.0": [
|
||||
{
|
||||
"packagetype": "bdist_wheel",
|
||||
"url": "https://files.pythonhosted.org/test-1.0.0.whl",
|
||||
"filename": "test-1.0.0.whl",
|
||||
"digests": {"sha256": "abc123"},
|
||||
"size": 1000,
|
||||
}
|
||||
],
|
||||
"2.0.0": [
|
||||
{
|
||||
"packagetype": "bdist_wheel",
|
||||
"url": "https://files.pythonhosted.org/test-2.0.0.whl",
|
||||
"filename": "test-2.0.0.whl",
|
||||
"digests": {"sha256": "def456"},
|
||||
"size": 2000,
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
mock_http_client.get.return_value = mock_response
|
||||
|
||||
result = await client.resolve_constraint("test-package", "*")
|
||||
|
||||
assert result is not None
|
||||
assert result.version == "2.0.0"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_resolve_constraint_specific_version(self, client, mock_http_client):
|
||||
"""Test resolving specific version constraint."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"releases": {
|
||||
"1.0.0": [
|
||||
{
|
||||
"packagetype": "bdist_wheel",
|
||||
"url": "https://files.pythonhosted.org/test-1.0.0.whl",
|
||||
"filename": "test-1.0.0.whl",
|
||||
"digests": {"sha256": "abc123"},
|
||||
"size": 1000,
|
||||
}
|
||||
],
|
||||
"2.0.0": [
|
||||
{
|
||||
"packagetype": "bdist_wheel",
|
||||
"url": "https://files.pythonhosted.org/test-2.0.0.whl",
|
||||
"filename": "test-2.0.0.whl",
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
mock_http_client.get.return_value = mock_response
|
||||
|
||||
result = await client.resolve_constraint("test-package", ">=1.0.0,<2.0.0")
|
||||
|
||||
assert result is not None
|
||||
assert result.version == "1.0.0"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_resolve_constraint_no_match(self, client, mock_http_client):
|
||||
"""Test resolving constraint with no matching version."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"releases": {
|
||||
"1.0.0": [
|
||||
{
|
||||
"packagetype": "bdist_wheel",
|
||||
"url": "https://files.pythonhosted.org/test-1.0.0.whl",
|
||||
"filename": "test-1.0.0.whl",
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
mock_http_client.get.return_value = mock_response
|
||||
|
||||
result = await client.resolve_constraint("test-package", ">=5.0.0")
|
||||
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_resolve_constraint_bare_version(self, client, mock_http_client):
|
||||
"""Test resolving bare version string as exact match."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"info": {"version": "2.0.0"},
|
||||
"releases": {
|
||||
"1.0.0": [
|
||||
{
|
||||
"packagetype": "bdist_wheel",
|
||||
"url": "https://files.pythonhosted.org/test-1.0.0.whl",
|
||||
"filename": "test-1.0.0.whl",
|
||||
"digests": {"sha256": "abc123"},
|
||||
"size": 1000,
|
||||
}
|
||||
],
|
||||
"2.0.0": [
|
||||
{
|
||||
"packagetype": "bdist_wheel",
|
||||
"url": "https://files.pythonhosted.org/test-2.0.0.whl",
|
||||
"filename": "test-2.0.0.whl",
|
||||
"digests": {"sha256": "def456"},
|
||||
"size": 2000,
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
mock_http_client.get.return_value = mock_response
|
||||
|
||||
# Bare version "1.0.0" should resolve to exactly 1.0.0, not latest
|
||||
result = await client.resolve_constraint("test-package", "1.0.0")
|
||||
|
||||
assert result is not None
|
||||
assert result.version == "1.0.0"
|
||||
|
||||
|
||||
class TestVersionInfo:
|
||||
"""Tests for VersionInfo dataclass."""
|
||||
|
||||
def test_create_version_info(self):
|
||||
"""Test creating VersionInfo with all fields."""
|
||||
info = VersionInfo(
|
||||
version="1.0.0",
|
||||
download_url="https://example.com/pkg-1.0.0.whl",
|
||||
filename="pkg-1.0.0.whl",
|
||||
sha256="abc123",
|
||||
size=5000,
|
||||
content_type="application/zip",
|
||||
)
|
||||
assert info.version == "1.0.0"
|
||||
assert info.download_url == "https://example.com/pkg-1.0.0.whl"
|
||||
assert info.filename == "pkg-1.0.0.whl"
|
||||
assert info.sha256 == "abc123"
|
||||
assert info.size == 5000
|
||||
|
||||
def test_create_version_info_minimal(self):
|
||||
"""Test creating VersionInfo with only required fields."""
|
||||
info = VersionInfo(
|
||||
version="1.0.0",
|
||||
download_url="https://example.com/pkg.whl",
|
||||
filename="pkg.whl",
|
||||
)
|
||||
assert info.sha256 is None
|
||||
assert info.size is None
|
||||
|
||||
|
||||
class TestFetchResult:
|
||||
"""Tests for FetchResult dataclass."""
|
||||
|
||||
def test_create_fetch_result(self):
|
||||
"""Test creating FetchResult."""
|
||||
result = FetchResult(
|
||||
artifact_id="abc123def456",
|
||||
size=10000,
|
||||
version="2.0.0",
|
||||
filename="pkg-2.0.0.whl",
|
||||
already_cached=True,
|
||||
)
|
||||
assert result.artifact_id == "abc123def456"
|
||||
assert result.size == 10000
|
||||
assert result.version == "2.0.0"
|
||||
assert result.already_cached is True
|
||||
|
||||
def test_fetch_result_default_not_cached(self):
|
||||
"""Test FetchResult defaults to not cached."""
|
||||
result = FetchResult(
|
||||
artifact_id="xyz",
|
||||
size=100,
|
||||
version="1.0.0",
|
||||
filename="pkg.whl",
|
||||
)
|
||||
assert result.already_cached is False
|
||||
|
||||
|
||||
class TestGetRegistryClient:
|
||||
"""Tests for registry client factory function."""
|
||||
|
||||
def test_get_pypi_client(self):
|
||||
"""Test getting PyPI client."""
|
||||
mock_client = MagicMock()
|
||||
mock_sources = []
|
||||
|
||||
client = get_registry_client("pypi", mock_client, mock_sources)
|
||||
|
||||
assert isinstance(client, PyPIRegistryClient)
|
||||
|
||||
def test_get_unsupported_client(self):
|
||||
"""Test getting unsupported registry type returns None."""
|
||||
mock_client = MagicMock()
|
||||
|
||||
client = get_registry_client("npm", mock_client, [])
|
||||
|
||||
assert client is None
|
||||
|
||||
def test_get_unknown_client(self):
|
||||
"""Test getting unknown registry type returns None."""
|
||||
mock_client = MagicMock()
|
||||
|
||||
client = get_registry_client("unknown", mock_client, [])
|
||||
|
||||
assert client is None
|
||||
228
docs/plans/2026-02-04-pypi-proxy-performance-design.md
Normal file
228
docs/plans/2026-02-04-pypi-proxy-performance-design.md
Normal file
@@ -0,0 +1,228 @@
|
||||
# PyPI Proxy Performance & Multi-Protocol Architecture Design
|
||||
|
||||
**Date:** 2026-02-04
|
||||
**Status:** Approved
|
||||
**Branch:** fix/pypi-proxy-timeout
|
||||
|
||||
## Overview
|
||||
|
||||
Comprehensive infrastructure overhaul to address latency, throughput, and resource consumption issues in the PyPI proxy, while establishing a foundation for npm, Maven, and other package protocols.
|
||||
|
||||
## Goals
|
||||
|
||||
1. **Reduce latency** - Eliminate per-request connection overhead, cache aggressively
|
||||
2. **Increase throughput** - Handle hundreds of concurrent requests without degradation
|
||||
3. **Lower resource usage** - Connection pooling, efficient DB queries, proper async I/O
|
||||
4. **Enable multi-protocol** - Abstract base class ready for npm/Maven/etc.
|
||||
5. **Maintain hermetic builds** - Immutable artifact content and metadata, mutable discovery data
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────────┐
|
||||
│ FastAPI Application │
|
||||
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
|
||||
│ │ PyPI Proxy │ │ npm Proxy │ │ Maven Proxy │ │ (future) │ │
|
||||
│ │ Router │ │ Router │ │ Router │ │ │ │
|
||||
│ └──────┬──────┘ └──────┬──────┘ └──────┬──────┘ └─────────────┘ │
|
||||
│ │ │ │ │
|
||||
│ └────────────────┼────────────────┘ │
|
||||
│ ▼ │
|
||||
│ ┌───────────────────────┐ │
|
||||
│ │ PackageProxyBase │ ← Abstract base class │
|
||||
│ │ - check_cache() │ │
|
||||
│ │ - fetch_upstream() │ │
|
||||
│ │ - store_artifact() │ │
|
||||
│ │ - serve_artifact() │ │
|
||||
│ └───────────┬───────────┘ │
|
||||
│ │ │
|
||||
│ ┌────────────────┼────────────────┐ │
|
||||
│ ▼ ▼ ▼ │
|
||||
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
|
||||
│ │ HttpClient │ │ CacheService│ │ ThreadPool │ │
|
||||
│ │ Manager │ │ (Redis) │ │ Executor │ │
|
||||
│ └─────────────┘ └─────────────┘ └─────────────┘ │
|
||||
│ │ │ │ │
|
||||
└─────────┼────────────────┼────────────────┼──────────────────────────┘
|
||||
▼ ▼ ▼
|
||||
┌──────────┐ ┌──────────┐ ┌──────────────┐
|
||||
│ Upstream │ │ Redis │ │ S3/MinIO │
|
||||
│ Sources │ │ │ │ │
|
||||
└──────────┘ └──────────┘ └──────────────┘
|
||||
```
|
||||
|
||||
## Components
|
||||
|
||||
### 1. HttpClientManager
|
||||
|
||||
Manages httpx.AsyncClient pools with FastAPI lifespan integration.
|
||||
|
||||
**Features:**
|
||||
- Default pool for general requests
|
||||
- Per-upstream pools for sources needing specific config/auth
|
||||
- Graceful shutdown drains in-flight requests
|
||||
- Dedicated thread pool for blocking operations
|
||||
|
||||
**Configuration:**
|
||||
```bash
|
||||
ORCHARD_HTTP_MAX_CONNECTIONS=100 # Default pool size
|
||||
ORCHARD_HTTP_KEEPALIVE_CONNECTIONS=20 # Keep-alive connections
|
||||
ORCHARD_HTTP_CONNECT_TIMEOUT=30 # Connection timeout (seconds)
|
||||
ORCHARD_HTTP_READ_TIMEOUT=60 # Read timeout (seconds)
|
||||
ORCHARD_HTTP_WORKER_THREADS=32 # Thread pool size
|
||||
```
|
||||
|
||||
**File:** `backend/app/http_client.py`
|
||||
|
||||
### 2. CacheService (Redis Layer)
|
||||
|
||||
Redis-backed caching with category-aware TTL and invalidation.
|
||||
|
||||
**Cache Categories:**
|
||||
|
||||
| Category | TTL | Invalidation | Purpose |
|
||||
|----------|-----|--------------|---------|
|
||||
| ARTIFACT_METADATA | Forever | Never (immutable) | Artifact info by SHA256 |
|
||||
| ARTIFACT_DEPENDENCIES | Forever | Never (immutable) | Extracted deps by SHA256 |
|
||||
| DEPENDENCY_RESOLUTION | Forever | Manual/refresh param | Resolution results |
|
||||
| UPSTREAM_SOURCES | 1 hour | On DB change | Upstream config |
|
||||
| PACKAGE_INDEX | 5 min | TTL only | PyPI/npm index pages |
|
||||
| PACKAGE_VERSIONS | 5 min | TTL only | Version listings |
|
||||
|
||||
**Key format:** `orchard:{category}:{protocol}:{identifier}`
|
||||
|
||||
**Configuration:**
|
||||
```bash
|
||||
ORCHARD_REDIS_HOST=redis
|
||||
ORCHARD_REDIS_PORT=6379
|
||||
ORCHARD_REDIS_DB=0
|
||||
ORCHARD_CACHE_TTL_INDEX=300 # Package index: 5 minutes
|
||||
ORCHARD_CACHE_TTL_VERSIONS=300 # Version listings: 5 minutes
|
||||
ORCHARD_CACHE_TTL_UPSTREAM=3600 # Upstream config: 1 hour
|
||||
```
|
||||
|
||||
**File:** `backend/app/cache_service.py`
|
||||
|
||||
### 3. PackageProxyBase
|
||||
|
||||
Abstract base class defining the cache→fetch→store→serve flow.
|
||||
|
||||
**Abstract methods (protocol-specific):**
|
||||
- `get_protocol_name()` - Return 'pypi', 'npm', 'maven'
|
||||
- `get_system_project_name()` - Return '_pypi', '_npm'
|
||||
- `rewrite_index_html()` - Rewrite upstream index to Orchard URLs
|
||||
- `extract_metadata()` - Extract deps from package file
|
||||
- `parse_package_url()` - Parse URL into package/version/filename
|
||||
|
||||
**Concrete methods (shared):**
|
||||
- `serve_index()` - Serve package index with caching
|
||||
- `serve_artifact()` - Full cache→fetch→store→serve flow
|
||||
|
||||
**File:** `backend/app/proxy_base.py`
|
||||
|
||||
### 4. ArtifactRepository (DB Optimization)
|
||||
|
||||
Optimized database operations eliminating N+1 queries.
|
||||
|
||||
**Key methods:**
|
||||
- `get_or_create_artifact()` - Atomic upsert via ON CONFLICT
|
||||
- `batch_upsert_dependencies()` - Single INSERT for all deps
|
||||
- `get_cached_url_with_artifact()` - Joined query for cache lookup
|
||||
|
||||
**Query reduction:**
|
||||
|
||||
| Operation | Before | After |
|
||||
|-----------|--------|-------|
|
||||
| Cache hit check | 2 queries | 1 query (joined) |
|
||||
| Store artifact | 3-4 queries | 1 query (upsert) |
|
||||
| Store 50 deps | 50+ queries | 1 query (batch) |
|
||||
|
||||
**Configuration:**
|
||||
```bash
|
||||
ORCHARD_DATABASE_POOL_SIZE=20 # Base connections (up from 5)
|
||||
ORCHARD_DATABASE_MAX_OVERFLOW=30 # Burst capacity (up from 10)
|
||||
ORCHARD_DATABASE_POOL_TIMEOUT=30 # Wait timeout
|
||||
ORCHARD_DATABASE_POOL_PRE_PING=false # Disable in prod for performance
|
||||
```
|
||||
|
||||
**File:** `backend/app/db_utils.py`
|
||||
|
||||
### 5. Dependency Resolution Caching
|
||||
|
||||
Cache resolution results for ensure files and API queries.
|
||||
|
||||
**Cache key:** Hash of (artifact_id, max_depth, include_optional)
|
||||
|
||||
**Invalidation:** Manual only (immutable artifact deps mean cached resolutions stay valid)
|
||||
|
||||
**Refresh:** `?refresh=true` parameter forces fresh resolution
|
||||
|
||||
**File:** Updates to `backend/app/dependencies.py`
|
||||
|
||||
### 6. FastAPI Integration
|
||||
|
||||
Lifespan-managed infrastructure with dependency injection.
|
||||
|
||||
**Startup:**
|
||||
1. Initialize HttpClientManager (connection pools)
|
||||
2. Initialize CacheService (Redis connection)
|
||||
3. Load upstream source configs
|
||||
|
||||
**Shutdown:**
|
||||
1. Drain in-flight HTTP requests
|
||||
2. Close Redis connections
|
||||
3. Shutdown thread pool
|
||||
|
||||
**Health endpoint additions:**
|
||||
- Database connection status
|
||||
- Redis ping
|
||||
- HTTP pool active/max connections
|
||||
- Thread pool active/max workers
|
||||
|
||||
**File:** Updates to `backend/app/main.py`
|
||||
|
||||
## Files Summary
|
||||
|
||||
**New files:**
|
||||
- `backend/app/http_client.py` - HttpClientManager
|
||||
- `backend/app/cache_service.py` - CacheService
|
||||
- `backend/app/proxy_base.py` - PackageProxyBase
|
||||
- `backend/app/db_utils.py` - ArtifactRepository
|
||||
|
||||
**Modified files:**
|
||||
- `backend/app/config.py` - New settings
|
||||
- `backend/app/main.py` - Lifespan integration
|
||||
- `backend/app/pypi_proxy.py` - Refactor to use base class
|
||||
- `backend/app/dependencies.py` - Resolution caching
|
||||
- `backend/app/routes.py` - Health endpoint, DI
|
||||
|
||||
## Hermetic Build Guarantees
|
||||
|
||||
**Immutable (cached forever):**
|
||||
- Artifact content (by SHA256)
|
||||
- Extracted dependencies for a specific artifact
|
||||
- Dependency resolution results
|
||||
|
||||
**Mutable (TTL + event invalidation):**
|
||||
- Package index listings
|
||||
- Version discovery
|
||||
- Upstream source configuration
|
||||
|
||||
Once an artifact is cached with SHA256 `abc123` and dependencies extracted, that data never changes.
|
||||
|
||||
## Performance Expectations
|
||||
|
||||
| Metric | Before | After |
|
||||
|--------|--------|-------|
|
||||
| HTTP connection setup | Per request (~100-500ms) | Pooled (~5ms) |
|
||||
| Cache hit (index page) | N/A | ~5ms (Redis) |
|
||||
| Store 50 dependencies | ~500ms (50 queries) | ~10ms (1 query) |
|
||||
| Dependency resolution (cached) | N/A | ~5ms |
|
||||
| Concurrent request capacity | ~15 (DB pool) | ~50 (configurable) |
|
||||
|
||||
## Testing Requirements
|
||||
|
||||
- Unit tests for each new component
|
||||
- Integration tests for full proxy flow
|
||||
- Load tests to verify pool sizing
|
||||
- Cache hit/miss verification tests
|
||||
1587
docs/plans/2026-02-04-pypi-proxy-performance-implementation.md
Normal file
1587
docs/plans/2026-02-04-pypi-proxy-performance-implementation.md
Normal file
File diff suppressed because it is too large
Load Diff
1117
frontend/package-lock.json
generated
1117
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -12,9 +12,12 @@
|
||||
"test:coverage": "vitest run --coverage"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/dagre": "^0.7.53",
|
||||
"dagre": "^0.8.5",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-router-dom": "6.28.0"
|
||||
"react-router-dom": "6.28.0",
|
||||
"reactflow": "^11.11.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@testing-library/jest-dom": "^6.4.2",
|
||||
@@ -34,6 +37,15 @@
|
||||
"ufo": "1.5.4",
|
||||
"rollup": "4.52.4",
|
||||
"caniuse-lite": "1.0.30001692",
|
||||
"baseline-browser-mapping": "2.9.5"
|
||||
"baseline-browser-mapping": "2.9.5",
|
||||
"lodash": "4.17.21",
|
||||
"electron-to-chromium": "1.5.72",
|
||||
"@babel/core": "7.26.0",
|
||||
"@babel/traverse": "7.26.4",
|
||||
"@babel/types": "7.26.3",
|
||||
"@babel/compat-data": "7.26.3",
|
||||
"@babel/parser": "7.26.3",
|
||||
"@babel/generator": "7.26.3",
|
||||
"@babel/code-frame": "7.26.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import {
|
||||
Project,
|
||||
Package,
|
||||
Tag,
|
||||
TagDetail,
|
||||
Artifact,
|
||||
ArtifactDetail,
|
||||
PackageArtifact,
|
||||
UploadResponse,
|
||||
PaginatedResponse,
|
||||
ListParams,
|
||||
TagListParams,
|
||||
PackageListParams,
|
||||
ArtifactListParams,
|
||||
ProjectListParams,
|
||||
@@ -78,7 +75,13 @@ export class ForbiddenError extends ApiError {
|
||||
async function handleResponse<T>(response: Response): Promise<T> {
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||
const message = error.detail || `HTTP ${response.status}`;
|
||||
// Handle detail as string or object (backend may return structured errors)
|
||||
let message: string;
|
||||
if (typeof error.detail === 'object') {
|
||||
message = JSON.stringify(error.detail);
|
||||
} else {
|
||||
message = error.detail || `HTTP ${response.status}`;
|
||||
}
|
||||
|
||||
if (response.status === 401) {
|
||||
throw new UnauthorizedError(message);
|
||||
@@ -234,32 +237,6 @@ export async function createPackage(projectName: string, data: { name: string; d
|
||||
return handleResponse<Package>(response);
|
||||
}
|
||||
|
||||
// Tag API
|
||||
export async function listTags(projectName: string, packageName: string, params: TagListParams = {}): Promise<PaginatedResponse<TagDetail>> {
|
||||
const query = buildQueryString(params as Record<string, unknown>);
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags${query}`);
|
||||
return handleResponse<PaginatedResponse<TagDetail>>(response);
|
||||
}
|
||||
|
||||
export async function listTagsSimple(projectName: string, packageName: string, params: TagListParams = {}): Promise<TagDetail[]> {
|
||||
const data = await listTags(projectName, packageName, params);
|
||||
return data.items;
|
||||
}
|
||||
|
||||
export async function getTag(projectName: string, packageName: string, tagName: string): Promise<TagDetail> {
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags/${tagName}`);
|
||||
return handleResponse<TagDetail>(response);
|
||||
}
|
||||
|
||||
export async function createTag(projectName: string, packageName: string, data: { name: string; artifact_id: string }): Promise<Tag> {
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(data),
|
||||
});
|
||||
return handleResponse<Tag>(response);
|
||||
}
|
||||
|
||||
// Artifact API
|
||||
export async function getArtifact(artifactId: string): Promise<ArtifactDetail> {
|
||||
const response = await fetch(`${API_BASE}/artifact/${artifactId}`);
|
||||
@@ -270,10 +247,10 @@ export async function listPackageArtifacts(
|
||||
projectName: string,
|
||||
packageName: string,
|
||||
params: ArtifactListParams = {}
|
||||
): Promise<PaginatedResponse<Artifact & { tags: string[] }>> {
|
||||
): Promise<PaginatedResponse<PackageArtifact>> {
|
||||
const query = buildQueryString(params as Record<string, unknown>);
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/artifacts${query}`);
|
||||
return handleResponse<PaginatedResponse<Artifact & { tags: string[] }>>(response);
|
||||
return handleResponse<PaginatedResponse<PackageArtifact>>(response);
|
||||
}
|
||||
|
||||
// Upload
|
||||
@@ -281,14 +258,10 @@ export async function uploadArtifact(
|
||||
projectName: string,
|
||||
packageName: string,
|
||||
file: File,
|
||||
tag?: string,
|
||||
version?: string
|
||||
): Promise<UploadResponse> {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
if (tag) {
|
||||
formData.append('tag', tag);
|
||||
}
|
||||
if (version) {
|
||||
formData.append('version', version);
|
||||
}
|
||||
@@ -746,3 +719,4 @@ export async function testUpstreamSource(id: string): Promise<UpstreamSourceTest
|
||||
});
|
||||
return handleResponse<UpstreamSourceTestResult>(response);
|
||||
}
|
||||
|
||||
|
||||
@@ -55,6 +55,10 @@
|
||||
font-size: 0.8125rem;
|
||||
}
|
||||
|
||||
.missing-count {
|
||||
color: #f59e0b;
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
background: transparent;
|
||||
border: none;
|
||||
@@ -72,171 +76,115 @@
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.dependency-graph-toolbar {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 12px 20px;
|
||||
border-bottom: 1px solid var(--border-primary);
|
||||
background: var(--bg-secondary);
|
||||
}
|
||||
|
||||
.zoom-level {
|
||||
margin-left: auto;
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-muted);
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
}
|
||||
|
||||
.dependency-graph-container {
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
background:
|
||||
linear-gradient(90deg, var(--border-primary) 1px, transparent 1px),
|
||||
linear-gradient(var(--border-primary) 1px, transparent 1px);
|
||||
background-size: 20px 20px;
|
||||
background-position: center center;
|
||||
background: var(--bg-primary);
|
||||
}
|
||||
|
||||
.graph-canvas {
|
||||
padding: 40px;
|
||||
min-width: 100%;
|
||||
min-height: 100%;
|
||||
transform-origin: center center;
|
||||
transition: transform 0.1s ease-out;
|
||||
/* React Flow Customization */
|
||||
.react-flow__background {
|
||||
background-color: var(--bg-primary) !important;
|
||||
}
|
||||
|
||||
/* Graph Nodes */
|
||||
.graph-node-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
.react-flow__controls {
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
|
||||
}
|
||||
|
||||
.graph-node {
|
||||
.react-flow__controls-button {
|
||||
background: var(--bg-tertiary);
|
||||
border: none;
|
||||
border-bottom: 1px solid var(--border-primary);
|
||||
color: var(--text-secondary);
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
}
|
||||
|
||||
.react-flow__controls-button:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.react-flow__controls-button:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.react-flow__controls-button svg {
|
||||
fill: currentColor;
|
||||
}
|
||||
|
||||
.react-flow__attribution {
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
.react-flow__attribution a {
|
||||
color: var(--text-muted) !important;
|
||||
font-size: 10px;
|
||||
}
|
||||
|
||||
/* Custom Flow Nodes */
|
||||
.flow-node {
|
||||
background: var(--bg-tertiary);
|
||||
border: 2px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
padding: 12px 16px;
|
||||
min-width: 200px;
|
||||
min-width: 160px;
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
position: relative;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.graph-node:hover {
|
||||
.flow-node:hover {
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 4px 12px rgba(16, 185, 129, 0.2);
|
||||
}
|
||||
|
||||
.graph-node--root {
|
||||
.flow-node--root {
|
||||
background: linear-gradient(135deg, rgba(16, 185, 129, 0.15) 0%, rgba(5, 150, 105, 0.15) 100%);
|
||||
border-color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.graph-node--hovered {
|
||||
transform: scale(1.02);
|
||||
}
|
||||
|
||||
.graph-node__header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.graph-node__name {
|
||||
.flow-node__name {
|
||||
font-weight: 600;
|
||||
color: var(--accent-primary);
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
font-size: 0.875rem;
|
||||
font-size: 0.8125rem;
|
||||
margin-bottom: 4px;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.graph-node__toggle {
|
||||
background: var(--bg-hover);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: 4px;
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
.flow-node__details {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
cursor: pointer;
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-secondary);
|
||||
font-weight: 600;
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
.graph-node__toggle:hover {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.graph-node__details {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
font-size: 0.75rem;
|
||||
gap: 8px;
|
||||
font-size: 0.6875rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.graph-node__version {
|
||||
.flow-node__version {
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.graph-node__size {
|
||||
.flow-node__size {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Graph Children / Tree Structure */
|
||||
.graph-children {
|
||||
display: flex;
|
||||
padding-left: 24px;
|
||||
margin-top: 8px;
|
||||
position: relative;
|
||||
/* Flow Handles (connection points) */
|
||||
.flow-handle {
|
||||
width: 8px !important;
|
||||
height: 8px !important;
|
||||
background: var(--border-primary) !important;
|
||||
border: 2px solid var(--bg-tertiary) !important;
|
||||
}
|
||||
|
||||
.graph-connector {
|
||||
position: absolute;
|
||||
left: 12px;
|
||||
top: 0;
|
||||
bottom: 50%;
|
||||
width: 12px;
|
||||
border-left: 2px solid var(--border-primary);
|
||||
border-bottom: 2px solid var(--border-primary);
|
||||
border-bottom-left-radius: 8px;
|
||||
}
|
||||
|
||||
.graph-children-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.graph-children-list::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: -12px;
|
||||
top: 20px;
|
||||
bottom: 20px;
|
||||
border-left: 2px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.graph-children-list > .graph-node-container {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.graph-children-list > .graph-node-container::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: -12px;
|
||||
top: 20px;
|
||||
width: 12px;
|
||||
border-top: 2px solid var(--border-primary);
|
||||
.flow-node:hover .flow-handle {
|
||||
background: var(--accent-primary) !important;
|
||||
}
|
||||
|
||||
/* Loading, Error, Empty States */
|
||||
@@ -279,39 +227,76 @@
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
/* Tooltip */
|
||||
.graph-tooltip {
|
||||
position: fixed;
|
||||
bottom: 24px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
padding: 12px 16px;
|
||||
font-size: 0.8125rem;
|
||||
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.4);
|
||||
z-index: 1001;
|
||||
.graph-warning {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 8px 16px;
|
||||
background: rgba(245, 158, 11, 0.1);
|
||||
border-top: 1px solid rgba(245, 158, 11, 0.3);
|
||||
color: var(--warning-color, #f59e0b);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.graph-tooltip strong {
|
||||
display: block;
|
||||
color: var(--accent-primary);
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
margin-bottom: 4px;
|
||||
.graph-warning svg {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.graph-tooltip div {
|
||||
color: var(--text-secondary);
|
||||
margin-top: 2px;
|
||||
}
|
||||
|
||||
.tooltip-hint {
|
||||
margin-top: 8px;
|
||||
padding-top: 8px;
|
||||
/* Missing Dependencies */
|
||||
.missing-dependencies {
|
||||
border-top: 1px solid var(--border-primary);
|
||||
color: var(--text-muted);
|
||||
padding: 16px 20px;
|
||||
background: rgba(245, 158, 11, 0.05);
|
||||
max-height: 200px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.missing-dependencies h3 {
|
||||
margin: 0 0 8px 0;
|
||||
font-size: 0.875rem;
|
||||
font-weight: 600;
|
||||
color: #f59e0b;
|
||||
}
|
||||
|
||||
.missing-hint {
|
||||
margin: 0 0 12px 0;
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.missing-list {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.missing-item {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid rgba(245, 158, 11, 0.3);
|
||||
border-radius: var(--radius-sm);
|
||||
padding: 4px 8px;
|
||||
font-size: 0.75rem;
|
||||
}
|
||||
|
||||
.missing-name {
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.missing-constraint {
|
||||
color: var(--text-muted);
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
}
|
||||
|
||||
.missing-required-by {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.6875rem;
|
||||
}
|
||||
|
||||
/* Responsive */
|
||||
|
||||
@@ -1,5 +1,19 @@
|
||||
import { useState, useEffect, useCallback, useRef } from 'react';
|
||||
import { useState, useEffect, useCallback, useMemo } from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import ReactFlow, {
|
||||
Node,
|
||||
Edge,
|
||||
Controls,
|
||||
Background,
|
||||
useNodesState,
|
||||
useEdgesState,
|
||||
MarkerType,
|
||||
NodeProps,
|
||||
Handle,
|
||||
Position,
|
||||
} from 'reactflow';
|
||||
import dagre from 'dagre';
|
||||
import 'reactflow/dist/style.css';
|
||||
import { ResolvedArtifact, DependencyResolutionResponse, Dependency } from '../types';
|
||||
import { resolveDependencies, getArtifactDependencies } from '../api';
|
||||
import './DependencyGraph.css';
|
||||
@@ -11,15 +25,14 @@ interface DependencyGraphProps {
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
interface GraphNode {
|
||||
id: string;
|
||||
interface NodeData {
|
||||
label: string;
|
||||
project: string;
|
||||
package: string;
|
||||
version: string | null;
|
||||
size: number;
|
||||
depth: number;
|
||||
children: GraphNode[];
|
||||
isRoot?: boolean;
|
||||
isRoot: boolean;
|
||||
onNavigate: (project: string, pkg: string) => void;
|
||||
}
|
||||
|
||||
function formatBytes(bytes: number): string {
|
||||
@@ -30,84 +43,185 @@ function formatBytes(bytes: number): string {
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
// Custom node component
|
||||
function DependencyNode({ data }: NodeProps<NodeData>) {
|
||||
return (
|
||||
<div
|
||||
className={`flow-node ${data.isRoot ? 'flow-node--root' : ''}`}
|
||||
onClick={() => data.onNavigate(data.project, data.package)}
|
||||
>
|
||||
<Handle type="target" position={Position.Top} className="flow-handle" />
|
||||
<div className="flow-node__name">{data.package}</div>
|
||||
<div className="flow-node__details">
|
||||
{data.version && <span className="flow-node__version">{data.version}</span>}
|
||||
<span className="flow-node__size">{formatBytes(data.size)}</span>
|
||||
</div>
|
||||
<Handle type="source" position={Position.Bottom} className="flow-handle" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const nodeTypes = { dependency: DependencyNode };
|
||||
|
||||
// Dagre layout function
|
||||
function getLayoutedElements(
|
||||
nodes: Node<NodeData>[],
|
||||
edges: Edge[],
|
||||
direction: 'TB' | 'LR' = 'TB'
|
||||
) {
|
||||
const dagreGraph = new dagre.graphlib.Graph();
|
||||
dagreGraph.setDefaultEdgeLabel(() => ({}));
|
||||
|
||||
const nodeWidth = 180;
|
||||
const nodeHeight = 60;
|
||||
|
||||
dagreGraph.setGraph({ rankdir: direction, nodesep: 50, ranksep: 80 });
|
||||
|
||||
nodes.forEach((node) => {
|
||||
dagreGraph.setNode(node.id, { width: nodeWidth, height: nodeHeight });
|
||||
});
|
||||
|
||||
edges.forEach((edge) => {
|
||||
dagreGraph.setEdge(edge.source, edge.target);
|
||||
});
|
||||
|
||||
dagre.layout(dagreGraph);
|
||||
|
||||
const layoutedNodes = nodes.map((node) => {
|
||||
const nodeWithPosition = dagreGraph.node(node.id);
|
||||
return {
|
||||
...node,
|
||||
position: {
|
||||
x: nodeWithPosition.x - nodeWidth / 2,
|
||||
y: nodeWithPosition.y - nodeHeight / 2,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
return { nodes: layoutedNodes, edges };
|
||||
}
|
||||
|
||||
function DependencyGraph({ projectName, packageName, tagName, onClose }: DependencyGraphProps) {
|
||||
const navigate = useNavigate();
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [warning, setWarning] = useState<string | null>(null);
|
||||
const [resolution, setResolution] = useState<DependencyResolutionResponse | null>(null);
|
||||
const [graphRoot, setGraphRoot] = useState<GraphNode | null>(null);
|
||||
const [hoveredNode, setHoveredNode] = useState<GraphNode | null>(null);
|
||||
const [zoom, setZoom] = useState(1);
|
||||
const [pan, setPan] = useState({ x: 0, y: 0 });
|
||||
const [isDragging, setIsDragging] = useState(false);
|
||||
const [dragStart, setDragStart] = useState({ x: 0, y: 0 });
|
||||
const [collapsedNodes, setCollapsedNodes] = useState<Set<string>>(new Set());
|
||||
const [nodes, setNodes, onNodesChange] = useNodesState<NodeData>([]);
|
||||
const [edges, setEdges, onEdgesChange] = useEdgesState([]);
|
||||
|
||||
const handleNavigate = useCallback((project: string, pkg: string) => {
|
||||
navigate(`/project/${project}/${pkg}`);
|
||||
onClose();
|
||||
}, [navigate, onClose]);
|
||||
|
||||
// Build graph structure from resolution data
|
||||
const buildGraph = useCallback(async (resolutionData: DependencyResolutionResponse) => {
|
||||
const buildFlowGraph = useCallback(async (
|
||||
resolutionData: DependencyResolutionResponse,
|
||||
onNavigate: (project: string, pkg: string) => void
|
||||
) => {
|
||||
const artifactMap = new Map<string, ResolvedArtifact>();
|
||||
resolutionData.resolved.forEach(artifact => {
|
||||
artifactMap.set(artifact.artifact_id, artifact);
|
||||
});
|
||||
|
||||
// Fetch dependencies for each artifact to build the tree
|
||||
// Fetch dependencies for each artifact
|
||||
const depsMap = new Map<string, Dependency[]>();
|
||||
const failedFetches: string[] = [];
|
||||
|
||||
for (const artifact of resolutionData.resolved) {
|
||||
try {
|
||||
const deps = await getArtifactDependencies(artifact.artifact_id);
|
||||
depsMap.set(artifact.artifact_id, deps.dependencies);
|
||||
} catch {
|
||||
} catch (err) {
|
||||
console.warn(`Failed to fetch dependencies for ${artifact.package}:`, err);
|
||||
failedFetches.push(artifact.package);
|
||||
depsMap.set(artifact.artifact_id, []);
|
||||
}
|
||||
}
|
||||
|
||||
// Find the root artifact (the requested one)
|
||||
// Report warning if some fetches failed
|
||||
if (failedFetches.length > 0) {
|
||||
setWarning(`Could not load dependency details for: ${failedFetches.slice(0, 3).join(', ')}${failedFetches.length > 3 ? ` and ${failedFetches.length - 3} more` : ''}`);
|
||||
}
|
||||
|
||||
// Find the root artifact
|
||||
const rootArtifact = resolutionData.resolved.find(
|
||||
a => a.project === resolutionData.requested.project &&
|
||||
a.package === resolutionData.requested.package
|
||||
);
|
||||
|
||||
if (!rootArtifact) {
|
||||
return null;
|
||||
return { nodes: [], edges: [] };
|
||||
}
|
||||
|
||||
// Build tree recursively
|
||||
const flowNodes: Node<NodeData>[] = [];
|
||||
const flowEdges: Edge[] = [];
|
||||
const visited = new Set<string>();
|
||||
const nodeIdMap = new Map<string, string>(); // artifact_id -> node id
|
||||
|
||||
// Build nodes and edges recursively
|
||||
const processNode = (artifact: ResolvedArtifact, isRoot: boolean) => {
|
||||
if (visited.has(artifact.artifact_id)) {
|
||||
return nodeIdMap.get(artifact.artifact_id);
|
||||
}
|
||||
|
||||
const buildNode = (artifact: ResolvedArtifact, depth: number): GraphNode => {
|
||||
const nodeId = `${artifact.project}/${artifact.package}`;
|
||||
visited.add(artifact.artifact_id);
|
||||
const nodeId = `node-${flowNodes.length}`;
|
||||
nodeIdMap.set(artifact.artifact_id, nodeId);
|
||||
|
||||
flowNodes.push({
|
||||
id: nodeId,
|
||||
type: 'dependency',
|
||||
position: { x: 0, y: 0 }, // Will be set by dagre
|
||||
data: {
|
||||
label: `${artifact.project}/${artifact.package}`,
|
||||
project: artifact.project,
|
||||
package: artifact.package,
|
||||
version: artifact.version,
|
||||
size: artifact.size,
|
||||
isRoot,
|
||||
onNavigate,
|
||||
},
|
||||
});
|
||||
|
||||
const deps = depsMap.get(artifact.artifact_id) || [];
|
||||
const children: GraphNode[] = [];
|
||||
|
||||
for (const dep of deps) {
|
||||
// Find the resolved artifact for this dependency
|
||||
const childArtifact = resolutionData.resolved.find(
|
||||
a => a.project === dep.project && a.package === dep.package
|
||||
);
|
||||
|
||||
if (childArtifact && !visited.has(childArtifact.artifact_id)) {
|
||||
children.push(buildNode(childArtifact, depth + 1));
|
||||
if (childArtifact) {
|
||||
const childNodeId = processNode(childArtifact, false);
|
||||
if (childNodeId) {
|
||||
flowEdges.push({
|
||||
id: `edge-${nodeId}-${childNodeId}`,
|
||||
source: nodeId,
|
||||
target: childNodeId,
|
||||
markerEnd: {
|
||||
type: MarkerType.ArrowClosed,
|
||||
width: 15,
|
||||
height: 15,
|
||||
color: 'var(--accent-primary)',
|
||||
},
|
||||
style: {
|
||||
stroke: 'var(--border-primary)',
|
||||
strokeWidth: 2,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: nodeId,
|
||||
project: artifact.project,
|
||||
package: artifact.package,
|
||||
version: artifact.version || artifact.tag,
|
||||
size: artifact.size,
|
||||
depth,
|
||||
children,
|
||||
isRoot: depth === 0,
|
||||
};
|
||||
return nodeId;
|
||||
};
|
||||
|
||||
return buildNode(rootArtifact, 0);
|
||||
processNode(rootArtifact, true);
|
||||
|
||||
// Apply dagre layout
|
||||
return getLayoutedElements(flowNodes, flowEdges);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -117,13 +231,21 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
|
||||
|
||||
try {
|
||||
const result = await resolveDependencies(projectName, packageName, tagName);
|
||||
|
||||
// If only the root package (no dependencies) and no missing deps, close the modal
|
||||
const hasDeps = result.artifact_count > 1 || (result.missing && result.missing.length > 0);
|
||||
if (!hasDeps) {
|
||||
onClose();
|
||||
return;
|
||||
}
|
||||
|
||||
setResolution(result);
|
||||
|
||||
const graph = await buildGraph(result);
|
||||
setGraphRoot(graph);
|
||||
const { nodes: layoutedNodes, edges: layoutedEdges } = await buildFlowGraph(result, handleNavigate);
|
||||
setNodes(layoutedNodes);
|
||||
setEdges(layoutedEdges);
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
// Check if it's a resolution error
|
||||
try {
|
||||
const errorData = JSON.parse(err.message);
|
||||
if (errorData.error === 'circular_dependency') {
|
||||
@@ -145,95 +267,9 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
|
||||
}
|
||||
|
||||
loadData();
|
||||
}, [projectName, packageName, tagName, buildGraph]);
|
||||
}, [projectName, packageName, tagName, buildFlowGraph, handleNavigate, onClose, setNodes, setEdges]);
|
||||
|
||||
const handleNodeClick = (node: GraphNode) => {
|
||||
navigate(`/project/${node.project}/${node.package}`);
|
||||
onClose();
|
||||
};
|
||||
|
||||
const handleNodeToggle = (node: GraphNode, e: React.MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
setCollapsedNodes(prev => {
|
||||
const next = new Set(prev);
|
||||
if (next.has(node.id)) {
|
||||
next.delete(node.id);
|
||||
} else {
|
||||
next.add(node.id);
|
||||
}
|
||||
return next;
|
||||
});
|
||||
};
|
||||
|
||||
const handleWheel = (e: React.WheelEvent) => {
|
||||
e.preventDefault();
|
||||
const delta = e.deltaY > 0 ? -0.1 : 0.1;
|
||||
setZoom(z => Math.max(0.25, Math.min(2, z + delta)));
|
||||
};
|
||||
|
||||
const handleMouseDown = (e: React.MouseEvent) => {
|
||||
if (e.target === containerRef.current || (e.target as HTMLElement).classList.contains('graph-canvas')) {
|
||||
setIsDragging(true);
|
||||
setDragStart({ x: e.clientX - pan.x, y: e.clientY - pan.y });
|
||||
}
|
||||
};
|
||||
|
||||
const handleMouseMove = (e: React.MouseEvent) => {
|
||||
if (isDragging) {
|
||||
setPan({ x: e.clientX - dragStart.x, y: e.clientY - dragStart.y });
|
||||
}
|
||||
};
|
||||
|
||||
const handleMouseUp = () => {
|
||||
setIsDragging(false);
|
||||
};
|
||||
|
||||
const resetView = () => {
|
||||
setZoom(1);
|
||||
setPan({ x: 0, y: 0 });
|
||||
};
|
||||
|
||||
const renderNode = (node: GraphNode, index: number = 0): JSX.Element => {
|
||||
const isCollapsed = collapsedNodes.has(node.id);
|
||||
const hasChildren = node.children.length > 0;
|
||||
|
||||
return (
|
||||
<div key={`${node.id}-${index}`} className="graph-node-container">
|
||||
<div
|
||||
className={`graph-node ${node.isRoot ? 'graph-node--root' : ''} ${hoveredNode?.id === node.id ? 'graph-node--hovered' : ''}`}
|
||||
onClick={() => handleNodeClick(node)}
|
||||
onMouseEnter={() => setHoveredNode(node)}
|
||||
onMouseLeave={() => setHoveredNode(null)}
|
||||
>
|
||||
<div className="graph-node__header">
|
||||
<span className="graph-node__name">{node.project}/{node.package}</span>
|
||||
{hasChildren && (
|
||||
<button
|
||||
className="graph-node__toggle"
|
||||
onClick={(e) => handleNodeToggle(node, e)}
|
||||
title={isCollapsed ? 'Expand' : 'Collapse'}
|
||||
>
|
||||
{isCollapsed ? '+' : '-'}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<div className="graph-node__details">
|
||||
{node.version && <span className="graph-node__version">@ {node.version}</span>}
|
||||
<span className="graph-node__size">{formatBytes(node.size)}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{hasChildren && !isCollapsed && (
|
||||
<div className="graph-children">
|
||||
<div className="graph-connector"></div>
|
||||
<div className="graph-children-list">
|
||||
{node.children.map((child, i) => renderNode(child, i))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
const defaultViewport = useMemo(() => ({ x: 50, y: 50, zoom: 0.8 }), []);
|
||||
|
||||
return (
|
||||
<div className="dependency-graph-modal" onClick={onClose}>
|
||||
@@ -244,7 +280,11 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
|
||||
<span>{projectName}/{packageName} @ {tagName}</span>
|
||||
{resolution && (
|
||||
<span className="graph-stats">
|
||||
{resolution.artifact_count} packages • {formatBytes(resolution.total_size)} total
|
||||
{resolution.artifact_count} cached
|
||||
{resolution.missing && resolution.missing.length > 0 && (
|
||||
<span className="missing-count"> • {resolution.missing.length} not cached</span>
|
||||
)}
|
||||
• {formatBytes(resolution.total_size)} total
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
@@ -256,28 +296,7 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="dependency-graph-toolbar">
|
||||
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.min(2, z + 0.25))}>
|
||||
Zoom In
|
||||
</button>
|
||||
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.max(0.25, z - 0.25))}>
|
||||
Zoom Out
|
||||
</button>
|
||||
<button className="btn btn-secondary btn-small" onClick={resetView}>
|
||||
Reset View
|
||||
</button>
|
||||
<span className="zoom-level">{Math.round(zoom * 100)}%</span>
|
||||
</div>
|
||||
|
||||
<div
|
||||
ref={containerRef}
|
||||
className="dependency-graph-container"
|
||||
onWheel={handleWheel}
|
||||
onMouseDown={handleMouseDown}
|
||||
onMouseMove={handleMouseMove}
|
||||
onMouseUp={handleMouseUp}
|
||||
onMouseLeave={handleMouseUp}
|
||||
>
|
||||
<div className="dependency-graph-container">
|
||||
{loading ? (
|
||||
<div className="graph-loading">
|
||||
<div className="spinner"></div>
|
||||
@@ -292,27 +311,52 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
|
||||
</svg>
|
||||
<p>{error}</p>
|
||||
</div>
|
||||
) : graphRoot ? (
|
||||
<div
|
||||
className="graph-canvas"
|
||||
style={{
|
||||
transform: `translate(${pan.x}px, ${pan.y}px) scale(${zoom})`,
|
||||
cursor: isDragging ? 'grabbing' : 'grab',
|
||||
}}
|
||||
) : nodes.length > 0 ? (
|
||||
<ReactFlow
|
||||
nodes={nodes}
|
||||
edges={edges}
|
||||
onNodesChange={onNodesChange}
|
||||
onEdgesChange={onEdgesChange}
|
||||
nodeTypes={nodeTypes}
|
||||
defaultViewport={defaultViewport}
|
||||
fitView
|
||||
fitViewOptions={{ padding: 0.2 }}
|
||||
minZoom={0.1}
|
||||
maxZoom={2}
|
||||
attributionPosition="bottom-left"
|
||||
>
|
||||
{renderNode(graphRoot)}
|
||||
</div>
|
||||
<Controls />
|
||||
<Background color="var(--border-primary)" gap={20} />
|
||||
</ReactFlow>
|
||||
) : (
|
||||
<div className="graph-empty">No dependencies to display</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{hoveredNode && (
|
||||
<div className="graph-tooltip">
|
||||
<strong>{hoveredNode.project}/{hoveredNode.package}</strong>
|
||||
{hoveredNode.version && <div>Version: {hoveredNode.version}</div>}
|
||||
<div>Size: {formatBytes(hoveredNode.size)}</div>
|
||||
<div className="tooltip-hint">Click to navigate</div>
|
||||
{warning && (
|
||||
<div className="graph-warning">
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M10.29 3.86L1.82 18a2 2 0 0 0 1.71 3h16.94a2 2 0 0 0 1.71-3L13.71 3.86a2 2 0 0 0-3.42 0z"></path>
|
||||
<line x1="12" y1="9" x2="12" y2="13"></line>
|
||||
<line x1="12" y1="17" x2="12.01" y2="17"></line>
|
||||
</svg>
|
||||
<span>{warning}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{resolution && resolution.missing && resolution.missing.length > 0 && (
|
||||
<div className="missing-dependencies">
|
||||
<h3>Not Cached ({resolution.missing.length})</h3>
|
||||
<p className="missing-hint">These dependencies are referenced but not yet cached on the server.</p>
|
||||
<ul className="missing-list">
|
||||
{resolution.missing.map((dep, i) => (
|
||||
<li key={i} className="missing-item">
|
||||
<span className="missing-name">{dep.project}/{dep.package}</span>
|
||||
{dep.constraint && <span className="missing-constraint">@{dep.constraint}</span>}
|
||||
{dep.required_by && <span className="missing-required-by">← {dep.required_by}</span>}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -290,20 +290,25 @@
|
||||
color: var(--error-color, #dc3545);
|
||||
}
|
||||
|
||||
/* Progress Bar */
|
||||
.progress-bar {
|
||||
/* Progress Bar - scoped to upload component */
|
||||
.drag-drop-upload .progress-bar,
|
||||
.upload-queue .progress-bar {
|
||||
height: 8px;
|
||||
background: var(--border-color, #ddd);
|
||||
border-radius: 4px;
|
||||
overflow: hidden;
|
||||
width: 100%;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.progress-bar--small {
|
||||
.drag-drop-upload .progress-bar--small,
|
||||
.upload-queue .progress-bar--small {
|
||||
height: 4px;
|
||||
margin-top: 0.25rem;
|
||||
}
|
||||
|
||||
.progress-bar__fill {
|
||||
.drag-drop-upload .progress-bar__fill,
|
||||
.upload-queue .progress-bar__fill {
|
||||
height: 100%;
|
||||
background: var(--accent-color, #007bff);
|
||||
border-radius: 4px;
|
||||
|
||||
@@ -504,42 +504,4 @@ describe('DragDropUpload', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Tag Support', () => {
|
||||
it('includes tag in upload request', async () => {
|
||||
let capturedFormData: FormData | null = null;
|
||||
|
||||
class MockXHR {
|
||||
status = 200;
|
||||
responseText = JSON.stringify({ artifact_id: 'abc123', size: 100 });
|
||||
timeout = 0;
|
||||
upload = { addEventListener: vi.fn() };
|
||||
addEventListener = vi.fn((event: string, handler: () => void) => {
|
||||
if (event === 'load') setTimeout(handler, 10);
|
||||
});
|
||||
open = vi.fn();
|
||||
send = vi.fn((data: FormData) => {
|
||||
capturedFormData = data;
|
||||
});
|
||||
}
|
||||
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||
|
||||
render(<DragDropUpload {...defaultProps} tag="v1.0.0" />);
|
||||
|
||||
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||
|
||||
Object.defineProperty(input, 'files', {
|
||||
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||
});
|
||||
|
||||
fireEvent.change(input);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(100);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(capturedFormData?.get('tag')).toBe('v1.0.0');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -13,7 +13,6 @@ interface StoredUploadState {
|
||||
completedParts: number[];
|
||||
project: string;
|
||||
package: string;
|
||||
tag?: string;
|
||||
createdAt: number;
|
||||
}
|
||||
|
||||
@@ -87,7 +86,6 @@ export interface DragDropUploadProps {
|
||||
maxFileSize?: number; // in bytes
|
||||
maxConcurrentUploads?: number;
|
||||
maxRetries?: number;
|
||||
tag?: string;
|
||||
className?: string;
|
||||
disabled?: boolean;
|
||||
disabledReason?: string;
|
||||
@@ -230,7 +228,6 @@ export function DragDropUpload({
|
||||
maxFileSize,
|
||||
maxConcurrentUploads = 3,
|
||||
maxRetries = 3,
|
||||
tag,
|
||||
className = '',
|
||||
disabled = false,
|
||||
disabledReason,
|
||||
@@ -368,7 +365,6 @@ export function DragDropUpload({
|
||||
expected_hash: fileHash,
|
||||
filename: item.file.name,
|
||||
size: item.file.size,
|
||||
tag: tag || undefined,
|
||||
}),
|
||||
}
|
||||
);
|
||||
@@ -392,7 +388,6 @@ export function DragDropUpload({
|
||||
completedParts: [],
|
||||
project: projectName,
|
||||
package: packageName,
|
||||
tag: tag || undefined,
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
|
||||
@@ -438,7 +433,6 @@ export function DragDropUpload({
|
||||
completedParts,
|
||||
project: projectName,
|
||||
package: packageName,
|
||||
tag: tag || undefined,
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
|
||||
@@ -459,7 +453,7 @@ export function DragDropUpload({
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ tag: tag || undefined }),
|
||||
body: JSON.stringify({}),
|
||||
}
|
||||
);
|
||||
|
||||
@@ -475,7 +469,7 @@ export function DragDropUpload({
|
||||
size: completeData.size,
|
||||
deduplicated: false,
|
||||
};
|
||||
}, [projectName, packageName, tag, isOnline]);
|
||||
}, [projectName, packageName, isOnline]);
|
||||
|
||||
const uploadFileSimple = useCallback((item: UploadItem): Promise<UploadResult> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -484,9 +478,6 @@ export function DragDropUpload({
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', item.file);
|
||||
if (tag) {
|
||||
formData.append('tag', tag);
|
||||
}
|
||||
|
||||
let lastLoaded = 0;
|
||||
let lastTime = Date.now();
|
||||
@@ -555,7 +546,7 @@ export function DragDropUpload({
|
||||
: u
|
||||
));
|
||||
});
|
||||
}, [projectName, packageName, tag]);
|
||||
}, [projectName, packageName]);
|
||||
|
||||
const uploadFile = useCallback((item: UploadItem): Promise<UploadResult> => {
|
||||
if (item.file.size >= CHUNKED_UPLOAD_THRESHOLD) {
|
||||
|
||||
@@ -233,7 +233,7 @@ export function GlobalSearch() {
|
||||
const flatIndex = results.projects.length + results.packages.length + index;
|
||||
return (
|
||||
<button
|
||||
key={artifact.tag_id}
|
||||
key={artifact.artifact_id}
|
||||
className={`global-search__result ${selectedIndex === flatIndex ? 'selected' : ''}`}
|
||||
onClick={() => navigateToResult({ type: 'artifact', item: artifact })}
|
||||
onMouseEnter={() => setSelectedIndex(flatIndex)}
|
||||
@@ -243,7 +243,7 @@ export function GlobalSearch() {
|
||||
<line x1="7" y1="7" x2="7.01" y2="7" />
|
||||
</svg>
|
||||
<div className="global-search__result-content">
|
||||
<span className="global-search__result-name">{artifact.tag_name}</span>
|
||||
<span className="global-search__result-name">{artifact.version}</span>
|
||||
<span className="global-search__result-path">
|
||||
{artifact.project_name} / {artifact.package_name}
|
||||
</span>
|
||||
|
||||
@@ -84,29 +84,6 @@ function Layout({ children }: LayoutProps) {
|
||||
</svg>
|
||||
Projects
|
||||
</Link>
|
||||
<Link to="/dashboard" className={location.pathname === '/dashboard' ? 'active' : ''}>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<rect x="3" y="3" width="7" height="7" rx="1"/>
|
||||
<rect x="14" y="3" width="7" height="7" rx="1"/>
|
||||
<rect x="3" y="14" width="7" height="7" rx="1"/>
|
||||
<rect x="14" y="14" width="7" height="7" rx="1"/>
|
||||
</svg>
|
||||
Dashboard
|
||||
</Link>
|
||||
{user && userTeams.length > 0 && (
|
||||
<Link
|
||||
to={userTeams.length === 1 ? `/teams/${userTeams[0].slug}` : '/teams'}
|
||||
className={location.pathname.startsWith('/teams') ? 'active' : ''}
|
||||
>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
||||
<circle cx="9" cy="7" r="4"/>
|
||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
||||
</svg>
|
||||
{userTeams.length === 1 ? 'Team' : 'Teams'}
|
||||
</Link>
|
||||
)}
|
||||
<a href="/docs" className="nav-link-muted">
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"/>
|
||||
@@ -148,6 +125,35 @@ function Layout({ children }: LayoutProps) {
|
||||
)}
|
||||
</div>
|
||||
<div className="user-menu-divider"></div>
|
||||
<NavLink
|
||||
to="/dashboard"
|
||||
className="user-menu-item"
|
||||
onClick={() => setShowUserMenu(false)}
|
||||
>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<rect x="3" y="3" width="7" height="7" rx="1"/>
|
||||
<rect x="14" y="3" width="7" height="7" rx="1"/>
|
||||
<rect x="3" y="14" width="7" height="7" rx="1"/>
|
||||
<rect x="14" y="14" width="7" height="7" rx="1"/>
|
||||
</svg>
|
||||
Dashboard
|
||||
</NavLink>
|
||||
{userTeams.length > 0 && (
|
||||
<NavLink
|
||||
to={userTeams.length === 1 ? `/teams/${userTeams[0].slug}` : '/teams'}
|
||||
className="user-menu-item"
|
||||
onClick={() => setShowUserMenu(false)}
|
||||
>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
||||
<circle cx="9" cy="7" r="4"/>
|
||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
||||
</svg>
|
||||
{userTeams.length === 1 ? 'Team' : 'Teams'}
|
||||
</NavLink>
|
||||
)}
|
||||
<div className="user-menu-divider"></div>
|
||||
<NavLink
|
||||
to="/settings/api-keys"
|
||||
className="user-menu-item"
|
||||
|
||||
@@ -132,6 +132,12 @@
|
||||
color: #c62828;
|
||||
}
|
||||
|
||||
.coming-soon-badge {
|
||||
color: #9e9e9e;
|
||||
font-style: italic;
|
||||
font-size: 0.85em;
|
||||
}
|
||||
|
||||
/* Actions */
|
||||
.actions-cell {
|
||||
white-space: nowrap;
|
||||
|
||||
@@ -12,6 +12,7 @@ import { UpstreamSource, SourceType, AuthType } from '../types';
|
||||
import './AdminCachePage.css';
|
||||
|
||||
const SOURCE_TYPES: SourceType[] = ['npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic'];
|
||||
const SUPPORTED_SOURCE_TYPES: Set<SourceType> = new Set(['pypi', 'generic']);
|
||||
const AUTH_TYPES: AuthType[] = ['none', 'basic', 'bearer', 'api_key'];
|
||||
|
||||
function AdminCachePage() {
|
||||
@@ -285,7 +286,12 @@ function AdminCachePage() {
|
||||
<span className="env-badge" title="Defined via environment variable">ENV</span>
|
||||
)}
|
||||
</td>
|
||||
<td>{source.source_type}</td>
|
||||
<td>
|
||||
{source.source_type}
|
||||
{!SUPPORTED_SOURCE_TYPES.has(source.source_type) && (
|
||||
<span className="coming-soon-badge"> (coming soon)</span>
|
||||
)}
|
||||
</td>
|
||||
<td className="url-cell" title={source.url}>{source.url}</td>
|
||||
<td>{source.priority}</td>
|
||||
<td>
|
||||
@@ -359,7 +365,7 @@ function AdminCachePage() {
|
||||
>
|
||||
{SOURCE_TYPES.map((type) => (
|
||||
<option key={type} value={type}>
|
||||
{type}
|
||||
{type}{!SUPPORTED_SOURCE_TYPES.has(type) ? ' (coming soon)' : ''}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
|
||||
@@ -249,7 +249,7 @@ function Home() {
|
||||
key: 'created_by',
|
||||
header: 'Owner',
|
||||
className: 'cell-owner',
|
||||
render: (project) => project.created_by,
|
||||
render: (project) => project.team_name || project.created_by,
|
||||
},
|
||||
...(user
|
||||
? [
|
||||
|
||||
@@ -185,56 +185,6 @@ h2 {
|
||||
color: var(--warning-color, #f59e0b);
|
||||
}
|
||||
|
||||
/* Usage Section */
|
||||
.usage-section {
|
||||
margin-top: 32px;
|
||||
background: var(--bg-secondary);
|
||||
}
|
||||
|
||||
.usage-section h3 {
|
||||
margin-bottom: 12px;
|
||||
color: var(--text-primary);
|
||||
font-size: 1rem;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.usage-section p {
|
||||
color: var(--text-secondary);
|
||||
margin-bottom: 12px;
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.usage-section pre {
|
||||
background: #0d0d0f;
|
||||
border: 1px solid var(--border-primary);
|
||||
padding: 16px 20px;
|
||||
border-radius: var(--radius-md);
|
||||
overflow-x: auto;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.usage-section code {
|
||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||
font-size: 0.8125rem;
|
||||
color: #e2e8f0;
|
||||
}
|
||||
|
||||
/* Syntax highlighting for code blocks */
|
||||
.usage-section pre {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.usage-section pre::before {
|
||||
content: 'bash';
|
||||
position: absolute;
|
||||
top: 8px;
|
||||
right: 12px;
|
||||
font-size: 0.6875rem;
|
||||
color: var(--text-muted);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
}
|
||||
|
||||
/* Copy button for code blocks (optional enhancement) */
|
||||
.code-block {
|
||||
position: relative;
|
||||
@@ -642,6 +592,11 @@ tr:hover .copy-btn {
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
/* Ensure file modal needs higher z-index when opened from deps modal */
|
||||
.modal-overlay:has(.ensure-file-modal) {
|
||||
z-index: 1100;
|
||||
}
|
||||
|
||||
.ensure-file-modal {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
@@ -793,4 +748,194 @@ tr:hover .copy-btn {
|
||||
.ensure-file-modal {
|
||||
max-height: 90vh;
|
||||
}
|
||||
|
||||
.action-menu-dropdown {
|
||||
right: 0;
|
||||
left: auto;
|
||||
}
|
||||
}
|
||||
|
||||
/* Header upload button */
|
||||
.header-upload-btn {
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
/* Tag/Version cell */
|
||||
.tag-version-cell {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.tag-version-cell .version-badge {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Icon buttons */
|
||||
.btn-icon {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
padding: 0;
|
||||
background: transparent;
|
||||
border: 1px solid transparent;
|
||||
border-radius: var(--radius-sm);
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.btn-icon:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
/* Action menu */
|
||||
.action-buttons {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.action-menu {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
/* Action menu backdrop for click-outside */
|
||||
.action-menu-backdrop {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
z-index: 999;
|
||||
}
|
||||
|
||||
.action-menu-dropdown {
|
||||
position: fixed;
|
||||
z-index: 1000;
|
||||
min-width: 180px;
|
||||
padding: 4px 0;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
|
||||
}
|
||||
|
||||
.action-menu-dropdown button {
|
||||
display: block;
|
||||
width: 100%;
|
||||
padding: 8px 12px;
|
||||
background: none;
|
||||
border: none;
|
||||
text-align: left;
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-primary);
|
||||
cursor: pointer;
|
||||
transition: background var(--transition-fast);
|
||||
}
|
||||
|
||||
.action-menu-dropdown button:hover {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
/* Upload Modal */
|
||||
.upload-modal,
|
||||
.create-tag-modal {
|
||||
background: var(--bg-secondary);
|
||||
border-radius: var(--radius-lg);
|
||||
width: 90%;
|
||||
max-width: 500px;
|
||||
max-height: 90vh;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.modal-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 16px 20px;
|
||||
border-bottom: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.modal-header h3 {
|
||||
margin: 0;
|
||||
font-size: 1.125rem;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.modal-body {
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.modal-description {
|
||||
margin-bottom: 16px;
|
||||
color: var(--text-secondary);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.modal-actions {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
gap: 12px;
|
||||
margin-top: 20px;
|
||||
padding-top: 16px;
|
||||
border-top: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
/* Dependencies Modal */
|
||||
.deps-modal {
|
||||
background: var(--bg-secondary);
|
||||
border-radius: var(--radius-lg);
|
||||
width: 90%;
|
||||
max-width: 600px;
|
||||
max-height: 80vh;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.deps-modal .modal-body {
|
||||
overflow-y: auto;
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.deps-modal-controls {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
/* Artifact ID Modal */
|
||||
.artifact-id-modal {
|
||||
background: var(--bg-secondary);
|
||||
border-radius: var(--radius-lg);
|
||||
width: 90%;
|
||||
max-width: 500px;
|
||||
}
|
||||
|
||||
.artifact-id-display {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
padding: 16px;
|
||||
background: var(--bg-tertiary);
|
||||
border-radius: var(--radius-md);
|
||||
border: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.artifact-id-display code {
|
||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-primary);
|
||||
word-break: break-all;
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.artifact-id-display .copy-btn {
|
||||
opacity: 1;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -214,7 +214,7 @@ function ProjectPage() {
|
||||
</div>
|
||||
</div>
|
||||
<div className="page-header__actions">
|
||||
{canAdmin && !project.team_id && (
|
||||
{canAdmin && !project.team_id && !project.is_system && (
|
||||
<button
|
||||
className="btn btn-secondary"
|
||||
onClick={() => navigate(`/project/${projectName}/settings`)}
|
||||
@@ -227,11 +227,11 @@ function ProjectPage() {
|
||||
Settings
|
||||
</button>
|
||||
)}
|
||||
{canWrite ? (
|
||||
{canWrite && !project.is_system ? (
|
||||
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
||||
{showForm ? 'Cancel' : '+ New Package'}
|
||||
</button>
|
||||
) : user ? (
|
||||
) : user && !project.is_system ? (
|
||||
<span className="text-muted" title="You have read-only access to this project">
|
||||
Read-only access
|
||||
</span>
|
||||
@@ -294,18 +294,20 @@ function ProjectPage() {
|
||||
placeholder="Filter packages..."
|
||||
className="list-controls__search"
|
||||
/>
|
||||
<select
|
||||
className="list-controls__select"
|
||||
value={format}
|
||||
onChange={(e) => handleFormatChange(e.target.value)}
|
||||
>
|
||||
<option value="">All formats</option>
|
||||
{FORMAT_OPTIONS.map((f) => (
|
||||
<option key={f} value={f}>
|
||||
{f}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
{!project?.is_system && (
|
||||
<select
|
||||
className="list-controls__select"
|
||||
value={format}
|
||||
onChange={(e) => handleFormatChange(e.target.value)}
|
||||
>
|
||||
<option value="">All formats</option>
|
||||
{FORMAT_OPTIONS.map((f) => (
|
||||
<option key={f} value={f}>
|
||||
{f}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{hasActiveFilters && (
|
||||
@@ -341,19 +343,19 @@ function ProjectPage() {
|
||||
className: 'cell-description',
|
||||
render: (pkg) => pkg.description || '—',
|
||||
},
|
||||
{
|
||||
...(!project?.is_system ? [{
|
||||
key: 'format',
|
||||
header: 'Format',
|
||||
render: (pkg) => <Badge variant="default">{pkg.format}</Badge>,
|
||||
},
|
||||
{
|
||||
key: 'tag_count',
|
||||
header: 'Tags',
|
||||
render: (pkg) => pkg.tag_count ?? '—',
|
||||
},
|
||||
render: (pkg: Package) => <Badge variant="default">{pkg.format}</Badge>,
|
||||
}] : []),
|
||||
...(!project?.is_system ? [{
|
||||
key: 'version_count',
|
||||
header: 'Versions',
|
||||
render: (pkg: Package) => pkg.version_count ?? '—',
|
||||
}] : []),
|
||||
{
|
||||
key: 'artifact_count',
|
||||
header: 'Artifacts',
|
||||
header: project?.is_system ? 'Versions' : 'Artifacts',
|
||||
render: (pkg) => pkg.artifact_count ?? '—',
|
||||
},
|
||||
{
|
||||
@@ -362,12 +364,12 @@ function ProjectPage() {
|
||||
render: (pkg) =>
|
||||
pkg.total_size !== undefined && pkg.total_size > 0 ? formatBytes(pkg.total_size) : '—',
|
||||
},
|
||||
{
|
||||
key: 'latest_tag',
|
||||
...(!project?.is_system ? [{
|
||||
key: 'latest_version',
|
||||
header: 'Latest',
|
||||
render: (pkg) =>
|
||||
pkg.latest_tag ? <strong style={{ color: 'var(--accent-primary)' }}>{pkg.latest_tag}</strong> : '—',
|
||||
},
|
||||
render: (pkg: Package) =>
|
||||
pkg.latest_version ? <strong style={{ color: 'var(--accent-primary)' }}>{pkg.latest_version}</strong> : '—',
|
||||
}] : []),
|
||||
{
|
||||
key: 'created_at',
|
||||
header: 'Created',
|
||||
|
||||
@@ -19,12 +19,6 @@ export interface Project {
|
||||
team_name?: string | null;
|
||||
}
|
||||
|
||||
export interface TagSummary {
|
||||
name: string;
|
||||
artifact_id: string;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export interface Package {
|
||||
id: string;
|
||||
project_id: string;
|
||||
@@ -35,12 +29,11 @@ export interface Package {
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
// Aggregated fields (from PackageDetailResponse)
|
||||
tag_count?: number;
|
||||
artifact_count?: number;
|
||||
version_count?: number;
|
||||
total_size?: number;
|
||||
latest_tag?: string | null;
|
||||
latest_upload_at?: string | null;
|
||||
recent_tags?: TagSummary[];
|
||||
latest_version?: string | null;
|
||||
}
|
||||
|
||||
export interface Artifact {
|
||||
@@ -53,22 +46,19 @@ export interface Artifact {
|
||||
ref_count: number;
|
||||
}
|
||||
|
||||
export interface Tag {
|
||||
export interface PackageArtifact {
|
||||
id: string;
|
||||
package_id: string;
|
||||
name: string;
|
||||
artifact_id: string;
|
||||
sha256: string;
|
||||
size: number;
|
||||
content_type: string | null;
|
||||
original_name: string | null;
|
||||
checksum_md5?: string | null;
|
||||
checksum_sha1?: string | null;
|
||||
s3_etag?: string | null;
|
||||
created_at: string;
|
||||
created_by: string;
|
||||
}
|
||||
|
||||
export interface TagDetail extends Tag {
|
||||
artifact_size: number;
|
||||
artifact_content_type: string | null;
|
||||
artifact_original_name: string | null;
|
||||
artifact_created_at: string;
|
||||
artifact_format_metadata: Record<string, unknown> | null;
|
||||
version: string | null;
|
||||
format_metadata?: Record<string, unknown> | null;
|
||||
version?: string | null; // Version from PackageVersion if exists
|
||||
}
|
||||
|
||||
export interface PackageVersion {
|
||||
@@ -83,20 +73,9 @@ export interface PackageVersion {
|
||||
size?: number;
|
||||
content_type?: string | null;
|
||||
original_name?: string | null;
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
export interface ArtifactTagInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
package_id: string;
|
||||
package_name: string;
|
||||
project_name: string;
|
||||
}
|
||||
|
||||
export interface ArtifactDetail extends Artifact {
|
||||
tags: ArtifactTagInfo[];
|
||||
}
|
||||
export interface ArtifactDetail extends Artifact {}
|
||||
|
||||
export interface PaginatedResponse<T> {
|
||||
items: T[];
|
||||
@@ -116,8 +95,6 @@ export interface ListParams {
|
||||
order?: 'asc' | 'desc';
|
||||
}
|
||||
|
||||
export interface TagListParams extends ListParams {}
|
||||
|
||||
export interface PackageListParams extends ListParams {
|
||||
format?: string;
|
||||
platform?: string;
|
||||
@@ -142,7 +119,6 @@ export interface UploadResponse {
|
||||
size: number;
|
||||
project: string;
|
||||
package: string;
|
||||
tag: string | null;
|
||||
version: string | null;
|
||||
version_source: string | null;
|
||||
}
|
||||
@@ -165,9 +141,8 @@ export interface SearchResultPackage {
|
||||
}
|
||||
|
||||
export interface SearchResultArtifact {
|
||||
tag_id: string;
|
||||
tag_name: string;
|
||||
artifact_id: string;
|
||||
version: string | null;
|
||||
package_id: string;
|
||||
package_name: string;
|
||||
project_name: string;
|
||||
@@ -390,8 +365,7 @@ export interface Dependency {
|
||||
artifact_id: string;
|
||||
project: string;
|
||||
package: string;
|
||||
version: string | null;
|
||||
tag: string | null;
|
||||
version: string;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
@@ -405,7 +379,6 @@ export interface DependentInfo {
|
||||
project: string;
|
||||
package: string;
|
||||
version: string | null;
|
||||
constraint_type: 'version' | 'tag';
|
||||
constraint_value: string;
|
||||
}
|
||||
|
||||
@@ -428,11 +401,17 @@ export interface ResolvedArtifact {
|
||||
project: string;
|
||||
package: string;
|
||||
version: string | null;
|
||||
tag: string | null;
|
||||
size: number;
|
||||
download_url: string;
|
||||
}
|
||||
|
||||
export interface MissingDependency {
|
||||
project: string;
|
||||
package: string;
|
||||
constraint: string | null;
|
||||
required_by: string | null;
|
||||
}
|
||||
|
||||
export interface DependencyResolutionResponse {
|
||||
requested: {
|
||||
project: string;
|
||||
@@ -440,6 +419,7 @@ export interface DependencyResolutionResponse {
|
||||
ref: string;
|
||||
};
|
||||
resolved: ResolvedArtifact[];
|
||||
missing: MissingDependency[];
|
||||
total_size: number;
|
||||
artifact_count: number;
|
||||
}
|
||||
|
||||
@@ -144,6 +144,20 @@ spec:
|
||||
- name: ORCHARD_DATABASE_POOL_TIMEOUT
|
||||
value: {{ .Values.orchard.database.poolTimeout | quote }}
|
||||
{{- end }}
|
||||
{{- if .Values.orchard.pypiCache }}
|
||||
{{- if .Values.orchard.pypiCache.workers }}
|
||||
- name: ORCHARD_PYPI_CACHE_WORKERS
|
||||
value: {{ .Values.orchard.pypiCache.workers | quote }}
|
||||
{{- end }}
|
||||
{{- if .Values.orchard.pypiCache.maxDepth }}
|
||||
- name: ORCHARD_PYPI_CACHE_MAX_DEPTH
|
||||
value: {{ .Values.orchard.pypiCache.maxDepth | quote }}
|
||||
{{- end }}
|
||||
{{- if .Values.orchard.pypiCache.maxAttempts }}
|
||||
- name: ORCHARD_PYPI_CACHE_MAX_ATTEMPTS
|
||||
value: {{ .Values.orchard.pypiCache.maxAttempts | quote }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if .Values.orchard.auth }}
|
||||
{{- if or .Values.orchard.auth.secretsManager .Values.orchard.auth.existingSecret .Values.orchard.auth.adminPassword }}
|
||||
- name: ORCHARD_ADMIN_PASSWORD
|
||||
|
||||
@@ -59,10 +59,10 @@ ingress:
|
||||
resources:
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
memory: 1Gi
|
||||
requests:
|
||||
cpu: 200m
|
||||
memory: 512Mi
|
||||
memory: 1Gi
|
||||
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
@@ -124,6 +124,12 @@ orchard:
|
||||
mode: "presigned"
|
||||
presignedUrlExpiry: 3600
|
||||
|
||||
# PyPI Cache Worker settings (reduced workers to limit memory usage)
|
||||
pypiCache:
|
||||
workers: 1
|
||||
maxDepth: 10
|
||||
maxAttempts: 3
|
||||
|
||||
# Relaxed rate limits for dev/feature environments (allows integration tests to run)
|
||||
rateLimit:
|
||||
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests
|
||||
@@ -222,7 +228,7 @@ minioIngress:
|
||||
secretName: minio-tls # Overridden by CI
|
||||
|
||||
redis:
|
||||
enabled: false
|
||||
enabled: true
|
||||
|
||||
waitForDatabase: true
|
||||
|
||||
|
||||
@@ -57,10 +57,10 @@ ingress:
|
||||
resources:
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
memory: 768Mi
|
||||
requests:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
memory: 768Mi
|
||||
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
@@ -121,6 +121,12 @@ orchard:
|
||||
mode: "presigned"
|
||||
presignedUrlExpiry: 3600
|
||||
|
||||
# PyPI Cache Worker settings (reduced workers to limit memory usage)
|
||||
pypiCache:
|
||||
workers: 2
|
||||
maxDepth: 10
|
||||
maxAttempts: 3
|
||||
|
||||
# PostgreSQL subchart - disabled in prod, using RDS
|
||||
postgresql:
|
||||
enabled: false
|
||||
@@ -134,7 +140,7 @@ minioIngress:
|
||||
enabled: false
|
||||
|
||||
redis:
|
||||
enabled: false
|
||||
enabled: true
|
||||
|
||||
waitForDatabase: true
|
||||
|
||||
|
||||
@@ -56,10 +56,10 @@ ingress:
|
||||
resources:
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
memory: 768Mi
|
||||
requests:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
memory: 768Mi
|
||||
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
@@ -122,6 +122,12 @@ orchard:
|
||||
mode: "presigned" # presigned, redirect, or proxy
|
||||
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
||||
|
||||
# PyPI Cache Worker settings (reduced workers to limit memory usage)
|
||||
pypiCache:
|
||||
workers: 2
|
||||
maxDepth: 10
|
||||
maxAttempts: 3
|
||||
|
||||
# Relaxed rate limits for stage (allows CI integration tests to run)
|
||||
rateLimit:
|
||||
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests
|
||||
@@ -140,7 +146,7 @@ minioIngress:
|
||||
|
||||
# Redis subchart configuration (for future caching)
|
||||
redis:
|
||||
enabled: false
|
||||
enabled: true
|
||||
image:
|
||||
registry: containers.global.bsf.tools
|
||||
repository: bitnami/redis
|
||||
|
||||
@@ -54,10 +54,10 @@ ingress:
|
||||
resources:
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
memory: 768Mi
|
||||
requests:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
memory: 768Mi
|
||||
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
@@ -120,6 +120,12 @@ orchard:
|
||||
mode: "presigned" # presigned, redirect, or proxy
|
||||
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
||||
|
||||
# PyPI Cache Worker settings
|
||||
pypiCache:
|
||||
workers: 2 # Number of concurrent cache workers (reduced to limit memory usage)
|
||||
maxDepth: 10 # Maximum recursion depth for dependency caching
|
||||
maxAttempts: 3 # Maximum retry attempts for failed cache tasks
|
||||
|
||||
# Authentication settings
|
||||
auth:
|
||||
# Option 1: Plain admin password (creates K8s secret)
|
||||
|
||||
55
migrations/011_pypi_cache_tasks.sql
Normal file
55
migrations/011_pypi_cache_tasks.sql
Normal file
@@ -0,0 +1,55 @@
|
||||
-- Migration: 011_pypi_cache_tasks
|
||||
-- Description: Add table for tracking PyPI dependency caching tasks
|
||||
-- Date: 2026-02-02
|
||||
|
||||
-- Table for tracking PyPI cache tasks with retry support
|
||||
CREATE TABLE pypi_cache_tasks (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
-- What to cache
|
||||
package_name VARCHAR(255) NOT NULL,
|
||||
version_constraint VARCHAR(255),
|
||||
|
||||
-- Origin tracking
|
||||
parent_task_id UUID REFERENCES pypi_cache_tasks(id) ON DELETE SET NULL,
|
||||
depth INTEGER NOT NULL DEFAULT 0,
|
||||
triggered_by_artifact VARCHAR(64) REFERENCES artifacts(id) ON DELETE SET NULL,
|
||||
|
||||
-- Status
|
||||
status VARCHAR(20) NOT NULL DEFAULT 'pending',
|
||||
attempts INTEGER NOT NULL DEFAULT 0,
|
||||
max_attempts INTEGER NOT NULL DEFAULT 3,
|
||||
|
||||
-- Results
|
||||
cached_artifact_id VARCHAR(64) REFERENCES artifacts(id) ON DELETE SET NULL,
|
||||
error_message TEXT,
|
||||
|
||||
-- Timing
|
||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
started_at TIMESTAMP WITH TIME ZONE,
|
||||
completed_at TIMESTAMP WITH TIME ZONE,
|
||||
next_retry_at TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT check_task_status CHECK (status IN ('pending', 'in_progress', 'completed', 'failed')),
|
||||
CONSTRAINT check_depth_non_negative CHECK (depth >= 0),
|
||||
CONSTRAINT check_attempts_non_negative CHECK (attempts >= 0)
|
||||
);
|
||||
|
||||
-- Index for finding tasks ready to process (pending with retry time passed)
|
||||
CREATE INDEX idx_pypi_cache_tasks_status_retry ON pypi_cache_tasks(status, next_retry_at);
|
||||
|
||||
-- Index for deduplication check (is this package already queued?)
|
||||
CREATE INDEX idx_pypi_cache_tasks_package_status ON pypi_cache_tasks(package_name, status);
|
||||
|
||||
-- Index for tracing dependency chains
|
||||
CREATE INDEX idx_pypi_cache_tasks_parent ON pypi_cache_tasks(parent_task_id);
|
||||
|
||||
-- Index for finding tasks by artifact that triggered them
|
||||
CREATE INDEX idx_pypi_cache_tasks_triggered_by ON pypi_cache_tasks(triggered_by_artifact);
|
||||
|
||||
-- Index for finding tasks by cached artifact
|
||||
CREATE INDEX idx_pypi_cache_tasks_cached_artifact ON pypi_cache_tasks(cached_artifact_id);
|
||||
|
||||
-- Index for sorting by depth and creation time (processing order)
|
||||
CREATE INDEX idx_pypi_cache_tasks_depth_created ON pypi_cache_tasks(depth, created_at);
|
||||
33
migrations/012_remove_tags.sql
Normal file
33
migrations/012_remove_tags.sql
Normal file
@@ -0,0 +1,33 @@
|
||||
-- Migration: Remove tag system
|
||||
-- Date: 2026-02-03
|
||||
-- Description: Remove tags table and related objects, keeping only versions for artifact references
|
||||
|
||||
-- Drop triggers on tags table
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
|
||||
DROP TRIGGER IF EXISTS tags_updated_at_trigger ON tags;
|
||||
DROP TRIGGER IF EXISTS tag_changes_trigger ON tags;
|
||||
|
||||
-- Drop the tag change tracking function
|
||||
DROP FUNCTION IF EXISTS track_tag_changes();
|
||||
|
||||
-- Remove tag_constraint from artifact_dependencies
|
||||
-- First drop the constraint that requires either version or tag
|
||||
ALTER TABLE artifact_dependencies DROP CONSTRAINT IF EXISTS check_constraint_type;
|
||||
|
||||
-- Remove the tag_constraint column
|
||||
ALTER TABLE artifact_dependencies DROP COLUMN IF EXISTS tag_constraint;
|
||||
|
||||
-- Make version_constraint NOT NULL (now the only option)
|
||||
UPDATE artifact_dependencies SET version_constraint = '*' WHERE version_constraint IS NULL;
|
||||
ALTER TABLE artifact_dependencies ALTER COLUMN version_constraint SET NOT NULL;
|
||||
|
||||
-- Drop tag_history table first (depends on tags)
|
||||
DROP TABLE IF EXISTS tag_history;
|
||||
|
||||
-- Drop tags table
|
||||
DROP TABLE IF EXISTS tags;
|
||||
|
||||
-- Rename uploads.tag_name to uploads.version (historical data field)
|
||||
ALTER TABLE uploads RENAME COLUMN tag_name TO version;
|
||||
Reference in New Issue
Block a user