2 Commits

Author SHA1 Message Date
Mondo Diaz
ebf9926809 Fix YAML anchor ordering in CI pipeline 2026-01-29 11:37:57 -06:00
Mondo Diaz
a3a49ac9c3 Add upstream caching infrastructure and refactor CI pipeline
Upstream Caching (Epic #68-#75, #105):
- Add upstream_sources and cache_settings tables with migrations
- Add cache management API endpoints (CRUD for sources, settings)
- Add environment variable overrides for upstream sources and cache settings
- Add encryption module for storing credentials securely
- Add frontend Admin Cache Management page
- Add is_system field to projects for system cache distinction
- Add purge_seed_data for transitioning to production-like environments

CI Pipeline Refactoring:
- Remove reset jobs (reset_stage_pre, reset_stage)
- Add ephemeral orchard-test deployment for main branch testing
- Run integration tests on ephemeral deployment before promoting to stage
- Stage is now long-running pre-prod (smoke tests only)
- Disable prosper_setup for tag pipelines
2026-01-29 11:28:59 -06:00
66 changed files with 4597 additions and 6574 deletions

View File

@@ -213,74 +213,6 @@ integration_test_feature:
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"' - if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
when: on_success when: on_success
# Reset feature environment after integration tests
# Calls factory-reset to clean up test data created during integration tests
reset_feature:
stage: deploy
needs: [integration_test_feature]
image: deps.global.bsf.tools/docker/python:3.12-slim
timeout: 5m
before_script:
- pip install --index-url "$PIP_INDEX_URL" httpx
script:
# Debug: Check if variable is set at shell level
- echo "RESET_ADMIN_PASSWORD length at shell level:${#RESET_ADMIN_PASSWORD}"
- |
python - <<'RESET_SCRIPT'
import httpx
import os
import sys
BASE_URL = f"https://orchard-{os.environ['CI_COMMIT_REF_SLUG']}.common.global.bsf.tools"
PASSWORD_RAW = os.environ.get("RESET_ADMIN_PASSWORD")
if not PASSWORD_RAW:
print("ERROR: RESET_ADMIN_PASSWORD not set")
sys.exit(1)
# Debug: check for hidden characters
print(f"Raw password repr (first 3 chars): {repr(PASSWORD_RAW[:3])}")
print(f"Raw password repr (last 3 chars): {repr(PASSWORD_RAW[-3:])}")
print(f"Raw length: {len(PASSWORD_RAW)}")
# Strip any whitespace
PASSWORD = PASSWORD_RAW.strip()
print(f"Stripped length: {len(PASSWORD)}")
print(f"Resetting environment at {BASE_URL}")
client = httpx.Client(base_url=BASE_URL, timeout=60.0)
# Login as admin
login_resp = client.post("/api/v1/auth/login", json={
"username": "admin",
"password": PASSWORD
})
if login_resp.status_code != 200:
print(f"ERROR: Login failed: {login_resp.status_code}")
print(f"Response: {login_resp.text}")
sys.exit(1)
# Call factory reset
reset_resp = client.post(
"/api/v1/admin/factory-reset",
headers={"X-Confirm-Reset": "yes-delete-all-data"}
)
if reset_resp.status_code == 200:
print("SUCCESS: Factory reset completed")
print(reset_resp.json())
else:
print(f"ERROR: Factory reset failed: {reset_resp.status_code}")
print(reset_resp.text)
sys.exit(1)
RESET_SCRIPT
variables:
# Use same pattern as integration_test_feature - create new variable from CI variable
RESET_ADMIN_PASSWORD: $DEV_ADMIN_PASSWORD
rules:
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
when: on_success
allow_failure: true # Don't fail the pipeline if reset fails
# Run Python backend unit tests # Run Python backend unit tests
python_unit_tests: python_unit_tests:
stage: test stage: test

View File

@@ -7,43 +7,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] ## [Unreleased]
### Added ### Added
- Added transparent PyPI proxy implementing PEP 503 Simple API (#108)
- `GET /pypi/simple/` - package index (proxied from upstream)
- `GET /pypi/simple/{package}/` - version list with rewritten download links
- `GET /pypi/simple/{package}/{filename}` - download with automatic caching
- Allows `pip install --index-url https://orchard.../pypi/simple/ <package>`
- Artifacts cached on first access through configured upstream sources
- Added `POST /api/v1/cache/resolve` endpoint to cache packages by coordinates instead of URL (#108)
### Changed
- Upstream sources table text is now centered under column headers (#108)
- ENV badge now appears inline with source name instead of separate column (#108)
- Test and Edit buttons now have more prominent button styling (#108)
- Reduced footer padding for cleaner layout (#108)
### Fixed
- Fixed purge_seed_data crash when deleting access permissions - was comparing UUID to VARCHAR column (#107)
### Changed
- Upstream source connectivity test no longer follows redirects, fixing "Exceeded maximum allowed redirects" error with Artifactory proxies (#107)
- Test runs automatically after saving a new or updated upstream source (#107)
- Test status now shows as colored dots (green=success, red=error) instead of text badges (#107)
- Clicking red dot shows error details in a modal (#107)
- Source name column no longer wraps text for better table layout (#107)
- Renamed "Cache Management" page to "Upstream Sources" (#107)
- Moved Delete button from table row to edit modal for cleaner table layout (#107)
### Removed
- Removed `is_public` field from upstream sources - all sources are now treated as internal/private (#107)
- Removed `allow_public_internet` (air-gap mode) setting from cache settings - not needed for enterprise proxy use case (#107)
- Removed seeding of public registry URLs (npm-public, pypi-public, maven-central, docker-hub) (#107)
- Removed "Public" badge and checkbox from upstream sources UI (#107)
- Removed "Allow Public Internet" toggle from cache settings UI (#107)
- Removed "Global Settings" section from cache management UI - auto-create system projects is always enabled (#107)
- Removed unused CacheSettings frontend types and API functions (#107)
### Added
- Added `ORCHARD_PURGE_SEED_DATA` environment variable support to stage helm values to remove seed data from long-running deployments (#107)
- Added frontend system projects visual distinction (#105) - Added frontend system projects visual distinction (#105)
- "Cache" badge for system projects in project list - "Cache" badge for system projects in project list
- "System Cache" badge on project detail page - "System Cache" badge on project detail page

View File

@@ -51,7 +51,6 @@ class Settings(BaseSettings):
presigned_url_expiry: int = ( presigned_url_expiry: int = (
3600 # Presigned URL expiry in seconds (default: 1 hour) 3600 # Presigned URL expiry in seconds (default: 1 hour)
) )
pypi_download_mode: str = "redirect" # "redirect" (to S3) or "proxy" (stream through Orchard)
# Logging settings # Logging settings
log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
@@ -62,14 +61,10 @@ class Settings(BaseSettings):
# Cache settings # Cache settings
cache_encryption_key: str = "" # Fernet key for encrypting upstream credentials (auto-generated if empty) cache_encryption_key: str = "" # Fernet key for encrypting upstream credentials (auto-generated if empty)
# Global cache settings override (None = use DB value, True/False = override DB) # Global cache settings overrides (None = use DB value, True/False = override DB)
cache_allow_public_internet: Optional[bool] = None # Override allow_public_internet (air-gap mode)
cache_auto_create_system_projects: Optional[bool] = None # Override auto_create_system_projects cache_auto_create_system_projects: Optional[bool] = None # Override auto_create_system_projects
# PyPI Cache Worker settings
pypi_cache_workers: int = 5 # Number of concurrent cache workers
pypi_cache_max_depth: int = 10 # Maximum recursion depth for dependency caching
pypi_cache_max_attempts: int = 3 # Maximum retry attempts for failed cache tasks
# JWT Authentication settings (optional, for external identity providers) # JWT Authentication settings (optional, for external identity providers)
jwt_enabled: bool = False # Enable JWT token validation jwt_enabled: bool = False # Enable JWT token validation
jwt_secret: str = "" # Secret key for HS256, or leave empty for RS256 with JWKS jwt_secret: str = "" # Secret key for HS256, or leave empty for RS256 with JWKS
@@ -94,24 +89,6 @@ class Settings(BaseSettings):
def is_production(self) -> bool: def is_production(self) -> bool:
return self.env.lower() == "production" return self.env.lower() == "production"
@property
def PORT(self) -> int:
"""Alias for server_port for compatibility."""
return self.server_port
# Uppercase aliases for PyPI cache settings (for backward compatibility)
@property
def PYPI_CACHE_WORKERS(self) -> int:
return self.pypi_cache_workers
@property
def PYPI_CACHE_MAX_DEPTH(self) -> int:
return self.pypi_cache_max_depth
@property
def PYPI_CACHE_MAX_ATTEMPTS(self) -> int:
return self.pypi_cache_max_attempts
class Config: class Config:
env_prefix = "ORCHARD_" env_prefix = "ORCHARD_"
case_sensitive = False case_sensitive = False
@@ -131,6 +108,7 @@ class EnvUpstreamSource:
url: str, url: str,
source_type: str = "generic", source_type: str = "generic",
enabled: bool = True, enabled: bool = True,
is_public: bool = True,
auth_type: str = "none", auth_type: str = "none",
username: Optional[str] = None, username: Optional[str] = None,
password: Optional[str] = None, password: Optional[str] = None,
@@ -140,6 +118,7 @@ class EnvUpstreamSource:
self.url = url self.url = url
self.source_type = source_type self.source_type = source_type
self.enabled = enabled self.enabled = enabled
self.is_public = is_public
self.auth_type = auth_type self.auth_type = auth_type
self.username = username self.username = username
self.password = password self.password = password
@@ -209,6 +188,7 @@ def parse_upstream_sources_from_env() -> list[EnvUpstreamSource]:
url=url, url=url,
source_type=data.get("TYPE", "generic").lower(), source_type=data.get("TYPE", "generic").lower(),
enabled=parse_bool(data.get("ENABLED"), True), enabled=parse_bool(data.get("ENABLED"), True),
is_public=parse_bool(data.get("IS_PUBLIC"), True),
auth_type=data.get("AUTH_TYPE", "none").lower(), auth_type=data.get("AUTH_TYPE", "none").lower(),
username=data.get("USERNAME"), username=data.get("USERNAME"),
password=data.get("PASSWORD"), password=data.get("PASSWORD"),

View File

@@ -220,7 +220,17 @@ def _run_migrations():
CREATE UNIQUE INDEX idx_packages_project_name ON packages(project_id, name); CREATE UNIQUE INDEX idx_packages_project_name ON packages(project_id, name);
END IF; END IF;
-- Tag indexes removed: tags table no longer exists (removed in tag system removal) IF NOT EXISTS (
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_name'
) THEN
CREATE UNIQUE INDEX idx_tags_package_name ON tags(package_id, name);
END IF;
IF NOT EXISTS (
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_created_at'
) THEN
CREATE INDEX idx_tags_package_created_at ON tags(package_id, created_at);
END IF;
END $$; END $$;
""", """,
), ),
@@ -277,8 +287,27 @@ def _run_migrations():
Migration( Migration(
name="008_create_tags_ref_count_triggers", name="008_create_tags_ref_count_triggers",
sql=""" sql="""
-- Tags table removed: triggers no longer needed (tag system removed) DO $$
DO $$ BEGIN NULL; END $$; BEGIN
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
CREATE TRIGGER tags_ref_count_insert_trigger
AFTER INSERT ON tags
FOR EACH ROW
EXECUTE FUNCTION increment_artifact_ref_count();
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
CREATE TRIGGER tags_ref_count_delete_trigger
AFTER DELETE ON tags
FOR EACH ROW
EXECUTE FUNCTION decrement_artifact_ref_count();
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
CREATE TRIGGER tags_ref_count_update_trigger
AFTER UPDATE ON tags
FOR EACH ROW
WHEN (OLD.artifact_id IS DISTINCT FROM NEW.artifact_id)
EXECUTE FUNCTION update_artifact_ref_count();
END $$;
""", """,
), ),
Migration( Migration(
@@ -325,11 +354,9 @@ def _run_migrations():
Migration( Migration(
name="011_migrate_semver_tags_to_versions", name="011_migrate_semver_tags_to_versions",
sql=r""" sql=r"""
-- Migrate semver tags to versions (only if both tables exist - for existing databases)
DO $$ DO $$
BEGIN BEGIN
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
AND EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'tags') THEN
INSERT INTO package_versions (id, package_id, artifact_id, version, version_source, created_by, created_at) INSERT INTO package_versions (id, package_id, artifact_id, version, version_source, created_by, created_at)
SELECT SELECT
gen_random_uuid(), gen_random_uuid(),
@@ -435,6 +462,7 @@ def _run_migrations():
source_type VARCHAR(50) NOT NULL DEFAULT 'generic', source_type VARCHAR(50) NOT NULL DEFAULT 'generic',
url VARCHAR(2048) NOT NULL, url VARCHAR(2048) NOT NULL,
enabled BOOLEAN NOT NULL DEFAULT FALSE, enabled BOOLEAN NOT NULL DEFAULT FALSE,
is_public BOOLEAN NOT NULL DEFAULT TRUE,
auth_type VARCHAR(20) NOT NULL DEFAULT 'none', auth_type VARCHAR(20) NOT NULL DEFAULT 'none',
username VARCHAR(255), username VARCHAR(255),
password_encrypted BYTEA, password_encrypted BYTEA,
@@ -452,6 +480,7 @@ def _run_migrations():
); );
CREATE INDEX IF NOT EXISTS idx_upstream_sources_enabled ON upstream_sources(enabled); CREATE INDEX IF NOT EXISTS idx_upstream_sources_enabled ON upstream_sources(enabled);
CREATE INDEX IF NOT EXISTS idx_upstream_sources_source_type ON upstream_sources(source_type); CREATE INDEX IF NOT EXISTS idx_upstream_sources_source_type ON upstream_sources(source_type);
CREATE INDEX IF NOT EXISTS idx_upstream_sources_is_public ON upstream_sources(is_public);
CREATE INDEX IF NOT EXISTS idx_upstream_sources_priority ON upstream_sources(priority); CREATE INDEX IF NOT EXISTS idx_upstream_sources_priority ON upstream_sources(priority);
""", """,
), ),
@@ -460,13 +489,14 @@ def _run_migrations():
sql=""" sql="""
CREATE TABLE IF NOT EXISTS cache_settings ( CREATE TABLE IF NOT EXISTS cache_settings (
id INTEGER PRIMARY KEY DEFAULT 1, id INTEGER PRIMARY KEY DEFAULT 1,
allow_public_internet BOOLEAN NOT NULL DEFAULT TRUE,
auto_create_system_projects BOOLEAN NOT NULL DEFAULT TRUE, auto_create_system_projects BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
CONSTRAINT check_cache_settings_singleton CHECK (id = 1) CONSTRAINT check_cache_settings_singleton CHECK (id = 1)
); );
INSERT INTO cache_settings (id, auto_create_system_projects) INSERT INTO cache_settings (id, allow_public_internet, auto_create_system_projects)
VALUES (1, TRUE) VALUES (1, TRUE, TRUE)
ON CONFLICT (id) DO NOTHING; ON CONFLICT (id) DO NOTHING;
""", """,
), ),
@@ -492,106 +522,13 @@ def _run_migrations():
Migration( Migration(
name="020_seed_default_upstream_sources", name="020_seed_default_upstream_sources",
sql=""" sql="""
-- Originally seeded public sources, but these are no longer used. INSERT INTO upstream_sources (id, name, source_type, url, enabled, is_public, auth_type, priority)
-- Migration 023 deletes any previously seeded sources. VALUES
-- This migration is now a no-op for fresh installs. (gen_random_uuid(), 'npm-public', 'npm', 'https://registry.npmjs.org', FALSE, TRUE, 'none', 100),
SELECT 1; (gen_random_uuid(), 'pypi-public', 'pypi', 'https://pypi.org/simple', FALSE, TRUE, 'none', 100),
""", (gen_random_uuid(), 'maven-central', 'maven', 'https://repo1.maven.org/maven2', FALSE, TRUE, 'none', 100),
), (gen_random_uuid(), 'docker-hub', 'docker', 'https://registry-1.docker.io', FALSE, TRUE, 'none', 100)
Migration( ON CONFLICT (name) DO NOTHING;
name="021_remove_is_public_from_upstream_sources",
sql="""
DO $$
BEGIN
-- Drop the index if it exists
DROP INDEX IF EXISTS idx_upstream_sources_is_public;
-- Drop the column if it exists
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'upstream_sources' AND column_name = 'is_public'
) THEN
ALTER TABLE upstream_sources DROP COLUMN is_public;
END IF;
END $$;
""",
),
Migration(
name="022_remove_allow_public_internet_from_cache_settings",
sql="""
DO $$
BEGIN
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'cache_settings' AND column_name = 'allow_public_internet'
) THEN
ALTER TABLE cache_settings DROP COLUMN allow_public_internet;
END IF;
END $$;
""",
),
Migration(
name="023_delete_seeded_public_sources",
sql="""
-- Delete the seeded public sources that were added by migration 020
DELETE FROM upstream_sources
WHERE name IN ('npm-public', 'pypi-public', 'maven-central', 'docker-hub');
""",
),
Migration(
name="024_remove_tags",
sql="""
-- Remove tag system, keeping only versions for artifact references
DO $$
BEGIN
-- Drop triggers on tags table (if they exist)
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
DROP TRIGGER IF EXISTS tags_updated_at_trigger ON tags;
DROP TRIGGER IF EXISTS tag_changes_trigger ON tags;
-- Drop the tag change tracking function
DROP FUNCTION IF EXISTS track_tag_changes();
-- Remove tag_constraint from artifact_dependencies
IF EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE constraint_name = 'check_constraint_type'
AND table_name = 'artifact_dependencies'
) THEN
ALTER TABLE artifact_dependencies DROP CONSTRAINT check_constraint_type;
END IF;
-- Remove the tag_constraint column if it exists
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'artifact_dependencies' AND column_name = 'tag_constraint'
) THEN
ALTER TABLE artifact_dependencies DROP COLUMN tag_constraint;
END IF;
-- Make version_constraint NOT NULL
UPDATE artifact_dependencies SET version_constraint = '*' WHERE version_constraint IS NULL;
ALTER TABLE artifact_dependencies ALTER COLUMN version_constraint SET NOT NULL;
-- Drop tag_history table first (depends on tags)
DROP TABLE IF EXISTS tag_history;
-- Drop tags table
DROP TABLE IF EXISTS tags;
-- Rename uploads.tag_name to version if it exists and version doesn't
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'uploads' AND column_name = 'tag_name'
) AND NOT EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'uploads' AND column_name = 'version'
) THEN
ALTER TABLE uploads RENAME COLUMN tag_name TO version;
END IF;
END $$;
""", """,
), ),
] ]

View File

@@ -10,24 +10,16 @@ Handles:
- Conflict detection - Conflict detection
""" """
import re
import yaml import yaml
from typing import List, Dict, Any, Optional, Set, Tuple from typing import List, Dict, Any, Optional, Set, Tuple
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from sqlalchemy import and_ from sqlalchemy import and_
# Import packaging for PEP 440 version matching
try:
from packaging.specifiers import SpecifierSet, InvalidSpecifier
from packaging.version import Version, InvalidVersion
HAS_PACKAGING = True
except ImportError:
HAS_PACKAGING = False
from .models import ( from .models import (
Project, Project,
Package, Package,
Artifact, Artifact,
Tag,
ArtifactDependency, ArtifactDependency,
PackageVersion, PackageVersion,
) )
@@ -41,27 +33,10 @@ from .schemas import (
ResolvedArtifact, ResolvedArtifact,
DependencyResolutionResponse, DependencyResolutionResponse,
DependencyConflict, DependencyConflict,
MissingDependency,
PaginationMeta, PaginationMeta,
) )
def _normalize_pypi_package_name(name: str) -> str:
"""
Normalize a PyPI package name for comparison.
- Strips extras brackets (e.g., "package[extra]" -> "package")
- Replaces sequences of hyphens, underscores, and dots with a single hyphen
- Lowercases the result
This follows PEP 503 normalization rules.
"""
# Strip extras brackets like [test], [dev], etc.
base_name = re.sub(r'\[.*\]', '', name)
# Normalize separators and lowercase
return re.sub(r'[-_.]+', '-', base_name).lower()
class DependencyError(Exception): class DependencyError(Exception):
"""Base exception for dependency errors.""" """Base exception for dependency errors."""
pass pass
@@ -152,20 +127,26 @@ def parse_ensure_file(content: bytes) -> EnsureFileContent:
project = dep.get('project') project = dep.get('project')
package = dep.get('package') package = dep.get('package')
version = dep.get('version') version = dep.get('version')
tag = dep.get('tag')
if not project: if not project:
raise InvalidEnsureFileError(f"Dependency {i} missing 'project'") raise InvalidEnsureFileError(f"Dependency {i} missing 'project'")
if not package: if not package:
raise InvalidEnsureFileError(f"Dependency {i} missing 'package'") raise InvalidEnsureFileError(f"Dependency {i} missing 'package'")
if not version: if not version and not tag:
raise InvalidEnsureFileError( raise InvalidEnsureFileError(
f"Dependency {i} must have 'version'" f"Dependency {i} must have either 'version' or 'tag'"
)
if version and tag:
raise InvalidEnsureFileError(
f"Dependency {i} cannot have both 'version' and 'tag'"
) )
dependencies.append(EnsureFileDependency( dependencies.append(EnsureFileDependency(
project=project, project=project,
package=package, package=package,
version=version, version=version,
tag=tag,
)) ))
return EnsureFileContent(dependencies=dependencies) return EnsureFileContent(dependencies=dependencies)
@@ -219,6 +200,7 @@ def store_dependencies(
dependency_project=dep.project, dependency_project=dep.project,
dependency_package=dep.package, dependency_package=dep.package,
version_constraint=dep.version, version_constraint=dep.version,
tag_constraint=dep.tag,
) )
db.add(artifact_dep) db.add(artifact_dep)
created.append(artifact_dep) created.append(artifact_dep)
@@ -284,21 +266,26 @@ def get_reverse_dependencies(
if not artifact: if not artifact:
continue continue
# Find which package this artifact belongs to via versions # Find which package this artifact belongs to via tags or versions
version_record = db.query(PackageVersion).filter( tag = db.query(Tag).filter(Tag.artifact_id == dep.artifact_id).first()
PackageVersion.artifact_id == dep.artifact_id, if tag:
).first() pkg = db.query(Package).filter(Package.id == tag.package_id).first()
if version_record:
pkg = db.query(Package).filter(Package.id == version_record.package_id).first()
if pkg: if pkg:
proj = db.query(Project).filter(Project.id == pkg.project_id).first() proj = db.query(Project).filter(Project.id == pkg.project_id).first()
if proj: if proj:
# Get version if available
version_record = db.query(PackageVersion).filter(
PackageVersion.artifact_id == dep.artifact_id,
PackageVersion.package_id == pkg.id,
).first()
dependents.append(DependentInfo( dependents.append(DependentInfo(
artifact_id=dep.artifact_id, artifact_id=dep.artifact_id,
project=proj.name, project=proj.name,
package=pkg.name, package=pkg.name,
version=version_record.version, version=version_record.version if version_record else None,
constraint_value=dep.version_constraint, constraint_type="version" if dep.version_constraint else "tag",
constraint_value=dep.version_constraint or dep.tag_constraint,
)) ))
total_pages = (total + limit - 1) // limit total_pages = (total + limit - 1) // limit
@@ -317,117 +304,25 @@ def get_reverse_dependencies(
) )
def _is_version_constraint(version_str: str) -> bool:
"""Check if a version string contains constraint operators."""
if not version_str:
return False
# Check for common constraint operators
return any(op in version_str for op in ['>=', '<=', '!=', '~=', '>', '<', '==', '*'])
def _resolve_version_constraint(
db: Session,
package: Package,
constraint: str,
) -> Optional[Tuple[str, str, int]]:
"""
Resolve a version constraint (e.g., '>=1.9') to a specific version.
Uses PEP 440 version matching to find the best matching version.
Args:
db: Database session
package: Package to search versions in
constraint: Version constraint string (e.g., '>=1.9', '<2.0,>=1.5')
Returns:
Tuple of (artifact_id, resolved_version, size) or None if not found
"""
if not HAS_PACKAGING:
# Fallback: if packaging not available, can't do constraint matching
return None
# Handle wildcard - return latest version
if constraint == '*':
# Get the latest version by created_at
latest = db.query(PackageVersion).filter(
PackageVersion.package_id == package.id,
).order_by(PackageVersion.created_at.desc()).first()
if latest:
artifact = db.query(Artifact).filter(Artifact.id == latest.artifact_id).first()
if artifact:
return (artifact.id, latest.version, artifact.size)
return None
try:
specifier = SpecifierSet(constraint)
except InvalidSpecifier:
# Invalid constraint (e.g., ">=" without version) - treat as wildcard
# This can happen with malformed metadata from PyPI packages
latest = db.query(PackageVersion).filter(
PackageVersion.package_id == package.id,
).order_by(PackageVersion.created_at.desc()).first()
if latest:
artifact = db.query(Artifact).filter(Artifact.id == latest.artifact_id).first()
if artifact:
return (artifact.id, latest.version, artifact.size)
return None
# Get all versions for this package
all_versions = db.query(PackageVersion).filter(
PackageVersion.package_id == package.id,
).all()
if not all_versions:
return None
# Find matching versions
matching = []
for pv in all_versions:
try:
v = Version(pv.version)
if v in specifier:
matching.append((pv, v))
except InvalidVersion:
# Skip invalid versions
continue
if not matching:
return None
# Sort by version (descending) and return the latest matching
matching.sort(key=lambda x: x[1], reverse=True)
best_match = matching[0][0]
artifact = db.query(Artifact).filter(Artifact.id == best_match.artifact_id).first()
if artifact:
return (artifact.id, best_match.version, artifact.size)
return None
def _resolve_dependency_to_artifact( def _resolve_dependency_to_artifact(
db: Session, db: Session,
project_name: str, project_name: str,
package_name: str, package_name: str,
version: str, version: Optional[str],
tag: Optional[str],
) -> Optional[Tuple[str, str, int]]: ) -> Optional[Tuple[str, str, int]]:
""" """
Resolve a dependency constraint to an artifact ID. Resolve a dependency constraint to an artifact ID.
Supports:
- Exact version matching (e.g., '1.2.3')
- Version constraints (e.g., '>=1.9', '<2.0,>=1.5')
- Wildcard ('*' for any version)
Args: Args:
db: Database session db: Database session
project_name: Project name project_name: Project name
package_name: Package name package_name: Package name
version: Version or version constraint version: Version constraint (exact)
tag: Tag constraint
Returns: Returns:
Tuple of (artifact_id, resolved_version, size) or None if not found Tuple of (artifact_id, resolved_version_or_tag, size) or None if not found
""" """
# Get project and package # Get project and package
project = db.query(Project).filter(Project.name == project_name).first() project = db.query(Project).filter(Project.name == project_name).first()
@@ -441,13 +336,8 @@ def _resolve_dependency_to_artifact(
if not package: if not package:
return None return None
# Check if this is a version constraint (>=, <, etc.) or exact version if version:
if _is_version_constraint(version): # Look up by version
result = _resolve_version_constraint(db, package, version)
if result:
return result
else:
# Look up by exact version
pkg_version = db.query(PackageVersion).filter( pkg_version = db.query(PackageVersion).filter(
PackageVersion.package_id == package.id, PackageVersion.package_id == package.id,
PackageVersion.version == version, PackageVersion.version == version,
@@ -459,6 +349,31 @@ def _resolve_dependency_to_artifact(
if artifact: if artifact:
return (artifact.id, version, artifact.size) return (artifact.id, version, artifact.size)
# Also check if there's a tag with this exact name
tag_record = db.query(Tag).filter(
Tag.package_id == package.id,
Tag.name == version,
).first()
if tag_record:
artifact = db.query(Artifact).filter(
Artifact.id == tag_record.artifact_id
).first()
if artifact:
return (artifact.id, version, artifact.size)
if tag:
# Look up by tag
tag_record = db.query(Tag).filter(
Tag.package_id == package.id,
Tag.name == tag,
).first()
if tag_record:
artifact = db.query(Artifact).filter(
Artifact.id == tag_record.artifact_id
).first()
if artifact:
return (artifact.id, tag, artifact.size)
return None return None
@@ -488,16 +403,10 @@ def _detect_package_cycle(
Returns: Returns:
Cycle path if detected, None otherwise Cycle path if detected, None otherwise
""" """
# Normalize names for comparison (handles extras like [test] and separators) pkg_key = f"{project_name}/{package_name}"
pkg_normalized = _normalize_pypi_package_name(package_name)
target_pkg_normalized = _normalize_pypi_package_name(target_package)
# Use normalized key for tracking
pkg_key = f"{project_name.lower()}/{pkg_normalized}"
# Check if we've reached the target package (cycle detected) # Check if we've reached the target package (cycle detected)
# Use normalized comparison to handle extras and naming variations if project_name == target_project and package_name == target_package:
if project_name.lower() == target_project.lower() and pkg_normalized == target_pkg_normalized:
return path + [pkg_key] return path + [pkg_key]
if pkg_key in visiting: if pkg_key in visiting:
@@ -518,9 +427,9 @@ def _detect_package_cycle(
Package.name == package_name, Package.name == package_name,
).first() ).first()
if package: if package:
# Find all artifacts in this package via versions # Find all artifacts in this package via tags
versions = db.query(PackageVersion).filter(PackageVersion.package_id == package.id).all() tags = db.query(Tag).filter(Tag.package_id == package.id).all()
artifact_ids = {v.artifact_id for v in versions} artifact_ids = {t.artifact_id for t in tags}
# Get dependencies from all artifacts in this package # Get dependencies from all artifacts in this package
for artifact_id in artifact_ids: for artifact_id in artifact_ids:
@@ -563,8 +472,8 @@ def check_circular_dependencies(
db: Database session db: Database session
artifact_id: The artifact that will have these dependencies artifact_id: The artifact that will have these dependencies
new_dependencies: Dependencies to be added new_dependencies: Dependencies to be added
project_name: Project name (optional, will try to look up from version if not provided) project_name: Project name (optional, will try to look up from tag if not provided)
package_name: Package name (optional, will try to look up from version if not provided) package_name: Package name (optional, will try to look up from tag if not provided)
Returns: Returns:
Cycle path if detected, None otherwise Cycle path if detected, None otherwise
@@ -573,19 +482,17 @@ def check_circular_dependencies(
if project_name and package_name: if project_name and package_name:
current_path = f"{project_name}/{package_name}" current_path = f"{project_name}/{package_name}"
else: else:
# Try to look up from version # Try to look up from tag
artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first() artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first()
if not artifact: if not artifact:
return None return None
# Find package for this artifact via version # Find package for this artifact
version_record = db.query(PackageVersion).filter( tag = db.query(Tag).filter(Tag.artifact_id == artifact_id).first()
PackageVersion.artifact_id == artifact_id if not tag:
).first()
if not version_record:
return None return None
package = db.query(Package).filter(Package.id == version_record.package_id).first() package = db.query(Package).filter(Package.id == tag.package_id).first()
if not package: if not package:
return None return None
@@ -601,15 +508,12 @@ def check_circular_dependencies(
else: else:
return None return None
# Normalize the initial path for consistency with _detect_package_cycle
normalized_path = f"{target_project.lower()}/{_normalize_pypi_package_name(target_package)}"
# For each new dependency, check if it would create a cycle back to our package # For each new dependency, check if it would create a cycle back to our package
for dep in new_dependencies: for dep in new_dependencies:
# Check if this dependency (transitively) depends on us at the package level # Check if this dependency (transitively) depends on us at the package level
visiting: Set[str] = set() visiting: Set[str] = set()
visited: Set[str] = set() visited: Set[str] = set()
path: List[str] = [normalized_path] path: List[str] = [current_path]
# Check from the dependency's package # Check from the dependency's package
cycle = _detect_package_cycle( cycle = _detect_package_cycle(
@@ -642,7 +546,7 @@ def resolve_dependencies(
db: Database session db: Database session
project_name: Project name project_name: Project name
package_name: Package name package_name: Package name
ref: Version reference (or artifact:hash) ref: Tag or version reference
base_url: Base URL for download URLs base_url: Base URL for download URLs
Returns: Returns:
@@ -665,35 +569,22 @@ def resolve_dependencies(
if not package: if not package:
raise DependencyNotFoundError(project_name, package_name, ref) raise DependencyNotFoundError(project_name, package_name, ref)
# Handle artifact: prefix for direct artifact ID references # Try to find artifact by tag or version
if ref.startswith("artifact:"): resolved = _resolve_dependency_to_artifact(
artifact_id = ref[9:] db, project_name, package_name, ref, ref
artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first() )
if not artifact: if not resolved:
raise DependencyNotFoundError(project_name, package_name, ref) raise DependencyNotFoundError(project_name, package_name, ref)
root_artifact_id = artifact.id
root_version = artifact_id[:12] # Use short hash as version display root_artifact_id, root_version, root_size = resolved
root_size = artifact.size
else:
# Try to find artifact by version
resolved = _resolve_dependency_to_artifact(
db, project_name, package_name, ref
)
if not resolved:
raise DependencyNotFoundError(project_name, package_name, ref)
root_artifact_id, root_version, root_size = resolved
# Track resolved artifacts and their versions # Track resolved artifacts and their versions
resolved_artifacts: Dict[str, ResolvedArtifact] = {} resolved_artifacts: Dict[str, ResolvedArtifact] = {}
# Track missing dependencies (not cached on server)
missing_dependencies: List[MissingDependency] = []
# Track version requirements for conflict detection # Track version requirements for conflict detection
version_requirements: Dict[str, List[Dict[str, Any]]] = {} # pkg_key -> [(version, required_by)] version_requirements: Dict[str, List[Dict[str, Any]]] = {} # pkg_key -> [(version, required_by)]
# Track visiting/visited for cycle detection # Track visiting/visited for cycle detection
visiting: Set[str] = set() visiting: Set[str] = set()
visited: Set[str] = set() visited: Set[str] = set()
# Track the current path for cycle reporting (artifact_id -> pkg_key)
current_path: Dict[str, str] = {}
# Resolution order (topological) # Resolution order (topological)
resolution_order: List[str] = [] resolution_order: List[str] = []
@@ -715,10 +606,8 @@ def resolve_dependencies(
# Cycle detection (at artifact level) # Cycle detection (at artifact level)
if artifact_id in visiting: if artifact_id in visiting:
# Build cycle path from current_path # Build cycle path
cycle_start = current_path.get(artifact_id, pkg_key) raise CircularDependencyError([pkg_key, pkg_key])
cycle = [cycle_start, pkg_key]
raise CircularDependencyError(cycle)
# Conflict detection - check if we've seen this package before with a different version # Conflict detection - check if we've seen this package before with a different version
if pkg_key in version_requirements: if pkg_key in version_requirements:
@@ -749,7 +638,6 @@ def resolve_dependencies(
return return
visiting.add(artifact_id) visiting.add(artifact_id)
current_path[artifact_id] = pkg_key
# Track version requirement # Track version requirement
if pkg_key not in version_requirements: if pkg_key not in version_requirements:
@@ -766,39 +654,23 @@ def resolve_dependencies(
# Resolve each dependency first (depth-first) # Resolve each dependency first (depth-first)
for dep in deps: for dep in deps:
# Skip self-dependencies (can happen with PyPI extras like pytest[testing])
# Use normalized comparison for PyPI naming conventions (handles extras, separators)
dep_proj_normalized = dep.dependency_project.lower()
dep_pkg_normalized = _normalize_pypi_package_name(dep.dependency_package)
curr_proj_normalized = proj_name.lower()
curr_pkg_normalized = _normalize_pypi_package_name(pkg_name)
if dep_proj_normalized == curr_proj_normalized and dep_pkg_normalized == curr_pkg_normalized:
continue
resolved_dep = _resolve_dependency_to_artifact( resolved_dep = _resolve_dependency_to_artifact(
db, db,
dep.dependency_project, dep.dependency_project,
dep.dependency_package, dep.dependency_package,
dep.version_constraint, dep.version_constraint,
dep.tag_constraint,
) )
if not resolved_dep: if not resolved_dep:
# Dependency not cached on server - track as missing but continue constraint = dep.version_constraint or dep.tag_constraint
constraint = dep.version_constraint raise DependencyNotFoundError(
missing_dependencies.append(MissingDependency( dep.dependency_project,
project=dep.dependency_project, dep.dependency_package,
package=dep.dependency_package, constraint,
constraint=constraint, )
required_by=pkg_key,
))
continue
dep_artifact_id, dep_version, dep_size = resolved_dep dep_artifact_id, dep_version, dep_size = resolved_dep
# Skip if resolved to same artifact (self-dependency at artifact level)
if dep_artifact_id == artifact_id:
continue
_resolve_recursive( _resolve_recursive(
dep_artifact_id, dep_artifact_id,
dep.dependency_project, dep.dependency_project,
@@ -810,7 +682,6 @@ def resolve_dependencies(
) )
visiting.remove(artifact_id) visiting.remove(artifact_id)
del current_path[artifact_id]
visited.add(artifact_id) visited.add(artifact_id)
# Add to resolution order (dependencies before dependents) # Add to resolution order (dependencies before dependents)
@@ -847,7 +718,6 @@ def resolve_dependencies(
"ref": ref, "ref": ref,
}, },
resolved=resolved_list, resolved=resolved_list,
missing=missing_dependencies,
total_size=total_size, total_size=total_size,
artifact_count=len(resolved_list), artifact_count=len(resolved_list),
) )

View File

@@ -11,7 +11,6 @@ from slowapi.errors import RateLimitExceeded
from .config import get_settings from .config import get_settings
from .database import init_db, SessionLocal from .database import init_db, SessionLocal
from .routes import router from .routes import router
from .pypi_proxy import router as pypi_router
from .seed import seed_database from .seed import seed_database
from .auth import create_default_admin from .auth import create_default_admin
from .rate_limit import limiter from .rate_limit import limiter
@@ -50,6 +49,7 @@ async def lifespan(app: FastAPI):
logger.info(f"Running in {settings.env} mode - skipping seed data") logger.info(f"Running in {settings.env} mode - skipping seed data")
yield yield
# Shutdown: cleanup if needed
app = FastAPI( app = FastAPI(
@@ -65,7 +65,6 @@ app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
# Include API routes # Include API routes
app.include_router(router) app.include_router(router)
app.include_router(pypi_router)
# Serve static files (React build) if the directory exists # Serve static files (React build) if the directory exists
static_dir = os.path.join(os.path.dirname(__file__), "..", "..", "frontend", "dist") static_dir = os.path.join(os.path.dirname(__file__), "..", "..", "frontend", "dist")

View File

@@ -71,6 +71,7 @@ class Package(Base):
) )
project = relationship("Project", back_populates="packages") project = relationship("Project", back_populates="packages")
tags = relationship("Tag", back_populates="package", cascade="all, delete-orphan")
uploads = relationship( uploads = relationship(
"Upload", back_populates="package", cascade="all, delete-orphan" "Upload", back_populates="package", cascade="all, delete-orphan"
) )
@@ -119,6 +120,7 @@ class Artifact(Base):
ref_count = Column(Integer, default=1) ref_count = Column(Integer, default=1)
s3_key = Column(String(1024), nullable=False) s3_key = Column(String(1024), nullable=False)
tags = relationship("Tag", back_populates="artifact")
uploads = relationship("Upload", back_populates="artifact") uploads = relationship("Upload", back_populates="artifact")
versions = relationship("PackageVersion", back_populates="artifact") versions = relationship("PackageVersion", back_populates="artifact")
dependencies = relationship( dependencies = relationship(
@@ -149,6 +151,65 @@ class Artifact(Base):
) )
class Tag(Base):
__tablename__ = "tags"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
package_id = Column(
UUID(as_uuid=True),
ForeignKey("packages.id", ondelete="CASCADE"),
nullable=False,
)
name = Column(String(255), nullable=False)
artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
updated_at = Column(
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
)
created_by = Column(String(255), nullable=False)
package = relationship("Package", back_populates="tags")
artifact = relationship("Artifact", back_populates="tags")
history = relationship(
"TagHistory", back_populates="tag", cascade="all, delete-orphan"
)
__table_args__ = (
Index("idx_tags_package_id", "package_id"),
Index("idx_tags_artifact_id", "artifact_id"),
Index(
"idx_tags_package_name", "package_id", "name", unique=True
), # Composite unique index
Index(
"idx_tags_package_created_at", "package_id", "created_at"
), # For recent tags queries
)
class TagHistory(Base):
__tablename__ = "tag_history"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tag_id = Column(
UUID(as_uuid=True), ForeignKey("tags.id", ondelete="CASCADE"), nullable=False
)
old_artifact_id = Column(String(64), ForeignKey("artifacts.id"))
new_artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
change_type = Column(String(20), nullable=False, default="update")
changed_at = Column(DateTime(timezone=True), default=datetime.utcnow)
changed_by = Column(String(255), nullable=False)
tag = relationship("Tag", back_populates="history")
__table_args__ = (
Index("idx_tag_history_tag_id", "tag_id"),
Index("idx_tag_history_changed_at", "changed_at"),
CheckConstraint(
"change_type IN ('create', 'update', 'delete')", name="check_change_type"
),
)
class PackageVersion(Base): class PackageVersion(Base):
"""Immutable version record for a package-artifact relationship. """Immutable version record for a package-artifact relationship.
@@ -188,7 +249,7 @@ class Upload(Base):
artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False) artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
package_id = Column(UUID(as_uuid=True), ForeignKey("packages.id"), nullable=False) package_id = Column(UUID(as_uuid=True), ForeignKey("packages.id"), nullable=False)
original_name = Column(String(1024)) original_name = Column(String(1024))
version = Column(String(255)) # Version assigned during upload tag_name = Column(String(255)) # Tag assigned during upload
user_agent = Column(String(512)) # Client identification user_agent = Column(String(512)) # Client identification
duration_ms = Column(Integer) # Upload timing in milliseconds duration_ms = Column(Integer) # Upload timing in milliseconds
deduplicated = Column(Boolean, default=False) # Whether artifact was deduplicated deduplicated = Column(Boolean, default=False) # Whether artifact was deduplicated
@@ -463,8 +524,8 @@ class PackageHistory(Base):
class ArtifactDependency(Base): class ArtifactDependency(Base):
"""Dependency declared by an artifact on another package. """Dependency declared by an artifact on another package.
Each artifact can declare dependencies on other packages, specifying a version. Each artifact can declare dependencies on other packages, specifying either
This enables recursive dependency resolution. an exact version or a tag. This enables recursive dependency resolution.
""" """
__tablename__ = "artifact_dependencies" __tablename__ = "artifact_dependencies"
@@ -477,13 +538,20 @@ class ArtifactDependency(Base):
) )
dependency_project = Column(String(255), nullable=False) dependency_project = Column(String(255), nullable=False)
dependency_package = Column(String(255), nullable=False) dependency_package = Column(String(255), nullable=False)
version_constraint = Column(String(255), nullable=False) version_constraint = Column(String(255), nullable=True)
tag_constraint = Column(String(255), nullable=True)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow) created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
# Relationship to the artifact that declares this dependency # Relationship to the artifact that declares this dependency
artifact = relationship("Artifact", back_populates="dependencies") artifact = relationship("Artifact", back_populates="dependencies")
__table_args__ = ( __table_args__ = (
# Exactly one of version_constraint or tag_constraint must be set
CheckConstraint(
"(version_constraint IS NOT NULL AND tag_constraint IS NULL) OR "
"(version_constraint IS NULL AND tag_constraint IS NOT NULL)",
name="check_constraint_type",
),
# Each artifact can only depend on a specific project/package once # Each artifact can only depend on a specific project/package once
Index( Index(
"idx_artifact_dependencies_artifact_id", "idx_artifact_dependencies_artifact_id",
@@ -599,6 +667,7 @@ class UpstreamSource(Base):
source_type = Column(String(50), default="generic", nullable=False) source_type = Column(String(50), default="generic", nullable=False)
url = Column(String(2048), nullable=False) url = Column(String(2048), nullable=False)
enabled = Column(Boolean, default=False, nullable=False) enabled = Column(Boolean, default=False, nullable=False)
is_public = Column(Boolean, default=True, nullable=False)
auth_type = Column(String(20), default="none", nullable=False) auth_type = Column(String(20), default="none", nullable=False)
username = Column(String(255)) username = Column(String(255))
password_encrypted = Column(LargeBinary) password_encrypted = Column(LargeBinary)
@@ -615,6 +684,7 @@ class UpstreamSource(Base):
__table_args__ = ( __table_args__ = (
Index("idx_upstream_sources_enabled", "enabled"), Index("idx_upstream_sources_enabled", "enabled"),
Index("idx_upstream_sources_source_type", "source_type"), Index("idx_upstream_sources_source_type", "source_type"),
Index("idx_upstream_sources_is_public", "is_public"),
Index("idx_upstream_sources_priority", "priority"), Index("idx_upstream_sources_priority", "priority"),
CheckConstraint( CheckConstraint(
"source_type IN ('npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic')", "source_type IN ('npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic')",
@@ -677,12 +747,13 @@ class UpstreamSource(Base):
class CacheSettings(Base): class CacheSettings(Base):
"""Global cache settings (singleton table). """Global cache settings (singleton table).
Controls behavior of the upstream caching system. Controls behavior of the upstream caching system including air-gap mode.
""" """
__tablename__ = "cache_settings" __tablename__ = "cache_settings"
id = Column(Integer, primary_key=True, default=1) id = Column(Integer, primary_key=True, default=1)
allow_public_internet = Column(Boolean, default=True, nullable=False)
auto_create_system_projects = Column(Boolean, default=True, nullable=False) auto_create_system_projects = Column(Boolean, default=True, nullable=False)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow) created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
updated_at = Column( updated_at = Column(

View File

@@ -12,6 +12,7 @@ from .models import (
Project, Project,
Package, Package,
Artifact, Artifact,
Tag,
Upload, Upload,
PackageVersion, PackageVersion,
ArtifactDependency, ArtifactDependency,
@@ -59,6 +60,7 @@ def purge_seed_data(db: Session) -> dict:
results = { results = {
"dependencies_deleted": 0, "dependencies_deleted": 0,
"tags_deleted": 0,
"versions_deleted": 0, "versions_deleted": 0,
"uploads_deleted": 0, "uploads_deleted": 0,
"artifacts_deleted": 0, "artifacts_deleted": 0,
@@ -101,7 +103,15 @@ def purge_seed_data(db: Session) -> dict:
results["dependencies_deleted"] = count results["dependencies_deleted"] = count
logger.info(f"Deleted {count} artifact dependencies") logger.info(f"Deleted {count} artifact dependencies")
# 2. Delete package versions # 2. Delete tags
if seed_package_ids:
count = db.query(Tag).filter(Tag.package_id.in_(seed_package_ids)).delete(
synchronize_session=False
)
results["tags_deleted"] = count
logger.info(f"Deleted {count} tags")
# 3. Delete package versions
if seed_package_ids: if seed_package_ids:
count = db.query(PackageVersion).filter( count = db.query(PackageVersion).filter(
PackageVersion.package_id.in_(seed_package_ids) PackageVersion.package_id.in_(seed_package_ids)
@@ -109,7 +119,7 @@ def purge_seed_data(db: Session) -> dict:
results["versions_deleted"] = count results["versions_deleted"] = count
logger.info(f"Deleted {count} package versions") logger.info(f"Deleted {count} package versions")
# 3. Delete uploads # 4. Delete uploads
if seed_package_ids: if seed_package_ids:
count = db.query(Upload).filter(Upload.package_id.in_(seed_package_ids)).delete( count = db.query(Upload).filter(Upload.package_id.in_(seed_package_ids)).delete(
synchronize_session=False synchronize_session=False
@@ -117,7 +127,7 @@ def purge_seed_data(db: Session) -> dict:
results["uploads_deleted"] = count results["uploads_deleted"] = count
logger.info(f"Deleted {count} uploads") logger.info(f"Deleted {count} uploads")
# 4. Delete S3 objects for seed artifacts # 5. Delete S3 objects for seed artifacts
if seed_artifact_ids: if seed_artifact_ids:
seed_artifacts = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).all() seed_artifacts = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).all()
for artifact in seed_artifacts: for artifact in seed_artifacts:
@@ -129,8 +139,8 @@ def purge_seed_data(db: Session) -> dict:
logger.warning(f"Failed to delete S3 object {artifact.s3_key}: {e}") logger.warning(f"Failed to delete S3 object {artifact.s3_key}: {e}")
logger.info(f"Deleted {results['s3_objects_deleted']} S3 objects") logger.info(f"Deleted {results['s3_objects_deleted']} S3 objects")
# 5. Delete artifacts (only those with ref_count that would be 0 after our deletions) # 6. Delete artifacts (only those with ref_count that would be 0 after our deletions)
# Since we deleted all versions pointing to these artifacts, we can delete them # Since we deleted all tags/versions pointing to these artifacts, we can delete them
if seed_artifact_ids: if seed_artifact_ids:
count = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).delete( count = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).delete(
synchronize_session=False synchronize_session=False
@@ -138,7 +148,7 @@ def purge_seed_data(db: Session) -> dict:
results["artifacts_deleted"] = count results["artifacts_deleted"] = count
logger.info(f"Deleted {count} artifacts") logger.info(f"Deleted {count} artifacts")
# 6. Delete packages # 7. Delete packages
if seed_package_ids: if seed_package_ids:
count = db.query(Package).filter(Package.id.in_(seed_package_ids)).delete( count = db.query(Package).filter(Package.id.in_(seed_package_ids)).delete(
synchronize_session=False synchronize_session=False
@@ -146,7 +156,7 @@ def purge_seed_data(db: Session) -> dict:
results["packages_deleted"] = count results["packages_deleted"] = count
logger.info(f"Deleted {count} packages") logger.info(f"Deleted {count} packages")
# 7. Delete access permissions for seed projects # 8. Delete access permissions for seed projects
if seed_project_ids: if seed_project_ids:
count = db.query(AccessPermission).filter( count = db.query(AccessPermission).filter(
AccessPermission.project_id.in_(seed_project_ids) AccessPermission.project_id.in_(seed_project_ids)
@@ -154,14 +164,14 @@ def purge_seed_data(db: Session) -> dict:
results["permissions_deleted"] = count results["permissions_deleted"] = count
logger.info(f"Deleted {count} access permissions") logger.info(f"Deleted {count} access permissions")
# 8. Delete seed projects # 9. Delete seed projects
count = db.query(Project).filter(Project.name.in_(SEED_PROJECT_NAMES)).delete( count = db.query(Project).filter(Project.name.in_(SEED_PROJECT_NAMES)).delete(
synchronize_session=False synchronize_session=False
) )
results["projects_deleted"] = count results["projects_deleted"] = count
logger.info(f"Deleted {count} projects") logger.info(f"Deleted {count} projects")
# 9. Find and delete seed team # 10. Find and delete seed team
seed_team = db.query(Team).filter(Team.slug == SEED_TEAM_SLUG).first() seed_team = db.query(Team).filter(Team.slug == SEED_TEAM_SLUG).first()
if seed_team: if seed_team:
# Delete team memberships first # Delete team memberships first
@@ -176,7 +186,7 @@ def purge_seed_data(db: Session) -> dict:
results["teams_deleted"] = 1 results["teams_deleted"] = 1
logger.info(f"Deleted team: {SEED_TEAM_SLUG}") logger.info(f"Deleted team: {SEED_TEAM_SLUG}")
# 10. Delete seed users (but NOT admin) # 11. Delete seed users (but NOT admin)
seed_users = db.query(User).filter(User.username.in_(SEED_USERNAMES)).all() seed_users = db.query(User).filter(User.username.in_(SEED_USERNAMES)).all()
for user in seed_users: for user in seed_users:
# Delete any remaining team memberships for this user # Delete any remaining team memberships for this user
@@ -184,8 +194,7 @@ def purge_seed_data(db: Session) -> dict:
synchronize_session=False synchronize_session=False
) )
# Delete any access permissions for this user # Delete any access permissions for this user
# Note: AccessPermission.user_id is VARCHAR (username), not UUID db.query(AccessPermission).filter(AccessPermission.user_id == user.id).delete(
db.query(AccessPermission).filter(AccessPermission.user_id == user.username).delete(
synchronize_session=False synchronize_session=False
) )
db.delete(user) db.delete(user)

View File

@@ -1,899 +0,0 @@
"""
Transparent PyPI proxy implementing PEP 503 (Simple API).
Provides endpoints that allow pip to use Orchard as a PyPI index URL.
Artifacts are cached on first access through configured upstream sources.
"""
import hashlib
import json
import logging
import os
import re
import tarfile
import tempfile
import zipfile
from io import BytesIO
from typing import Optional, List, Tuple
from urllib.parse import urljoin, urlparse, quote, unquote
import httpx
from fastapi import APIRouter, Depends, HTTPException, Request, Response
from fastapi.responses import StreamingResponse, HTMLResponse, RedirectResponse
from sqlalchemy.orm import Session
from .database import get_db
from .models import UpstreamSource, CachedUrl, Artifact, Project, Package, PackageVersion, ArtifactDependency
from .storage import S3Storage, get_storage
from .config import get_env_upstream_sources, get_settings
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/pypi", tags=["pypi-proxy"])
# Timeout configuration for proxy requests
PROXY_CONNECT_TIMEOUT = 30.0
PROXY_READ_TIMEOUT = 60.0
def _parse_requires_dist(requires_dist: str) -> Tuple[str, Optional[str]]:
"""Parse a Requires-Dist line into (package_name, version_constraint).
Examples:
"requests (>=2.25.0)" -> ("requests", ">=2.25.0")
"typing-extensions; python_version < '3.8'" -> ("typing-extensions", None)
"numpy>=1.21.0" -> ("numpy", ">=1.21.0")
"certifi" -> ("certifi", None)
Returns:
Tuple of (normalized_package_name, version_constraint or None)
"""
# Remove any environment markers (after semicolon)
if ';' in requires_dist:
requires_dist = requires_dist.split(';')[0].strip()
# Match patterns like "package (>=1.0)" or "package>=1.0" or "package"
match = re.match(
r'^([a-zA-Z0-9][-a-zA-Z0-9._]*)\s*(?:\(([^)]+)\)|([<>=!~][^\s;]+))?',
requires_dist.strip()
)
if not match:
return None, None
package_name = match.group(1)
# Version can be in parentheses (group 2) or directly after name (group 3)
version_constraint = match.group(2) or match.group(3)
# Normalize package name (PEP 503)
normalized_name = re.sub(r'[-_.]+', '-', package_name).lower()
# Clean up version constraint
if version_constraint:
version_constraint = version_constraint.strip()
return normalized_name, version_constraint
def _extract_requires_from_metadata(metadata_content: str) -> List[Tuple[str, Optional[str]]]:
"""Extract all Requires-Dist entries from METADATA/PKG-INFO content.
Args:
metadata_content: The content of a METADATA or PKG-INFO file
Returns:
List of (package_name, version_constraint) tuples
"""
dependencies = []
for line in metadata_content.split('\n'):
if line.startswith('Requires-Dist:'):
value = line[len('Requires-Dist:'):].strip()
pkg_name, version = _parse_requires_dist(value)
if pkg_name:
dependencies.append((pkg_name, version))
return dependencies
def _extract_metadata_from_wheel(file_path: str) -> Optional[str]:
"""Extract METADATA file content from a wheel (zip) file.
Args:
file_path: Path to the wheel file
Returns:
METADATA file content as string, or None if not found
"""
try:
with zipfile.ZipFile(file_path) as zf:
for name in zf.namelist():
if name.endswith('.dist-info/METADATA'):
return zf.read(name).decode('utf-8', errors='replace')
except Exception as e:
logger.warning(f"Failed to extract metadata from wheel: {e}")
return None
def _extract_metadata_from_sdist(file_path: str) -> Optional[str]:
"""Extract PKG-INFO file content from a source distribution (.tar.gz).
Args:
file_path: Path to the tarball file
Returns:
PKG-INFO file content as string, or None if not found
"""
try:
with tarfile.open(file_path, mode='r:gz') as tf:
for member in tf.getmembers():
if member.name.endswith('/PKG-INFO') and member.name.count('/') == 1:
f = tf.extractfile(member)
if f:
return f.read().decode('utf-8', errors='replace')
except Exception as e:
logger.warning(f"Failed to extract metadata from sdist: {e}")
return None
def _extract_dependencies_from_file(file_path: str, filename: str) -> List[Tuple[str, Optional[str]]]:
"""Extract dependencies from a PyPI package file.
Supports wheel (.whl) and source distribution (.tar.gz) formats.
Args:
file_path: Path to the package file
filename: The original filename
Returns:
List of (package_name, version_constraint) tuples
"""
metadata = None
if filename.endswith('.whl'):
metadata = _extract_metadata_from_wheel(file_path)
elif filename.endswith('.tar.gz'):
metadata = _extract_metadata_from_sdist(file_path)
if metadata:
return _extract_requires_from_metadata(metadata)
return []
def _parse_upstream_error(response: httpx.Response) -> str:
"""Parse upstream error response to extract useful error details.
Handles JFrog/Artifactory policy errors and other common formats.
Returns a user-friendly error message.
"""
status = response.status_code
try:
body = response.text
except Exception:
return f"HTTP {status}"
# Try to parse as JSON (JFrog/Artifactory format)
try:
data = json.loads(body)
# JFrog Artifactory format: {"errors": [{"status": 403, "message": "..."}]}
if "errors" in data and isinstance(data["errors"], list):
messages = []
for err in data["errors"]:
if isinstance(err, dict) and "message" in err:
messages.append(err["message"])
if messages:
return "; ".join(messages)
# Alternative format: {"message": "..."}
if "message" in data:
return data["message"]
# Alternative format: {"error": "..."}
if "error" in data:
return data["error"]
except (json.JSONDecodeError, ValueError):
pass
# Check for policy-related keywords in plain text response
policy_keywords = ["policy", "blocked", "forbidden", "curation", "security"]
if any(kw in body.lower() for kw in policy_keywords):
# Truncate long responses but preserve the message
if len(body) > 500:
return body[:500] + "..."
return body
# Default: just return status code
return f"HTTP {status}"
def _extract_pypi_version(filename: str) -> Optional[str]:
"""Extract version from PyPI filename.
Handles formats like:
- cowsay-6.1-py3-none-any.whl
- cowsay-1.0.tar.gz
- some_package-1.2.3.post1-cp39-cp39-linux_x86_64.whl
"""
# Remove extension
if filename.endswith('.whl'):
# Wheel: name-version-pytag-abitag-platform.whl
parts = filename[:-4].split('-')
if len(parts) >= 2:
return parts[1]
elif filename.endswith('.tar.gz'):
# Source: name-version.tar.gz
base = filename[:-7]
# Find the last hyphen that precedes a version-like string
match = re.match(r'^(.+)-(\d+.*)$', base)
if match:
return match.group(2)
elif filename.endswith('.zip'):
# Egg/zip: name-version.zip
base = filename[:-4]
match = re.match(r'^(.+)-(\d+.*)$', base)
if match:
return match.group(2)
return None
def _get_pypi_upstream_sources(db: Session) -> list[UpstreamSource]:
"""Get all enabled upstream sources configured for PyPI."""
# Get database sources
db_sources = (
db.query(UpstreamSource)
.filter(
UpstreamSource.source_type == "pypi",
UpstreamSource.enabled == True,
)
.order_by(UpstreamSource.priority)
.all()
)
# Get env sources
env_sources = [
s for s in get_env_upstream_sources()
if s.source_type == "pypi" and s.enabled
]
# Combine and sort by priority
all_sources = list(db_sources) + list(env_sources)
return sorted(all_sources, key=lambda s: s.priority)
def _build_auth_headers(source) -> dict:
"""Build authentication headers for an upstream source."""
headers = {}
if hasattr(source, 'auth_type'):
if source.auth_type == "bearer":
password = source.get_password() if hasattr(source, 'get_password') else getattr(source, 'password', None)
if password:
headers["Authorization"] = f"Bearer {password}"
elif source.auth_type == "api_key":
custom_headers = source.get_headers() if hasattr(source, 'get_headers') else {}
if custom_headers:
headers.update(custom_headers)
return headers
def _get_basic_auth(source) -> Optional[tuple[str, str]]:
"""Get basic auth credentials if applicable."""
if hasattr(source, 'auth_type') and source.auth_type == "basic":
username = getattr(source, 'username', None)
if username:
password = source.get_password() if hasattr(source, 'get_password') else getattr(source, 'password', '')
return (username, password or '')
return None
def _get_base_url(request: Request) -> str:
"""
Get the external base URL, respecting X-Forwarded-Proto header.
When behind a reverse proxy that terminates SSL, the request.base_url
will show http:// even though the external URL is https://. This function
checks the X-Forwarded-Proto header to determine the correct scheme.
"""
base_url = str(request.base_url).rstrip('/')
# Check for X-Forwarded-Proto header (set by reverse proxies)
forwarded_proto = request.headers.get('x-forwarded-proto')
if forwarded_proto:
# Replace the scheme with the forwarded protocol
parsed = urlparse(base_url)
base_url = f"{forwarded_proto}://{parsed.netloc}{parsed.path}"
return base_url
def _rewrite_package_links(html: str, base_url: str, package_name: str, upstream_base_url: str) -> str:
"""
Rewrite download links in a PyPI simple page to go through our proxy.
Args:
html: The HTML content from upstream
base_url: Our server's base URL
package_name: The package name for the URL path
upstream_base_url: The upstream URL used to fetch this page (for resolving relative URLs)
Returns:
HTML with rewritten download links
"""
# Pattern to match href attributes in anchor tags
# PyPI simple pages have links like:
# <a href="https://files.pythonhosted.org/packages/.../file.tar.gz#sha256=...">file.tar.gz</a>
# Or relative URLs from Artifactory like:
# <a href="../../packages/packages/62/35/.../requests-0.10.0.tar.gz#sha256=...">
def replace_href(match):
original_url = match.group(1)
# Resolve relative URLs to absolute using the upstream base URL
if not original_url.startswith(('http://', 'https://')):
# Split off fragment before resolving
url_without_fragment = original_url.split('#')[0]
fragment_part = original_url[len(url_without_fragment):]
absolute_url = urljoin(upstream_base_url, url_without_fragment) + fragment_part
else:
absolute_url = original_url
# Extract the filename from the URL
parsed = urlparse(absolute_url)
path_parts = parsed.path.split('/')
filename = path_parts[-1] if path_parts else ''
# Keep the hash fragment if present
fragment = f"#{parsed.fragment}" if parsed.fragment else ""
# Encode the absolute URL (without fragment) for safe transmission
encoded_url = quote(absolute_url.split('#')[0], safe='')
# Build new URL pointing to our proxy
new_url = f"{base_url}/pypi/simple/{package_name}/{filename}?upstream={encoded_url}{fragment}"
return f'href="{new_url}"'
# Match href="..." patterns
rewritten = re.sub(r'href="([^"]+)"', replace_href, html)
return rewritten
@router.get("/simple/")
async def pypi_simple_index(
request: Request,
db: Session = Depends(get_db),
):
"""
PyPI Simple API index - lists all packages.
Proxies to the first available upstream PyPI source.
"""
sources = _get_pypi_upstream_sources(db)
if not sources:
raise HTTPException(
status_code=503,
detail="No PyPI upstream sources configured"
)
# Try each source in priority order
last_error = None
last_status = None
for source in sources:
try:
headers = {"User-Agent": "Orchard-PyPI-Proxy/1.0"}
headers.update(_build_auth_headers(source))
auth = _get_basic_auth(source)
# Use URL as-is - users should provide full path including /simple
simple_url = source.url.rstrip('/') + '/'
timeout = httpx.Timeout(PROXY_READ_TIMEOUT, connect=PROXY_CONNECT_TIMEOUT)
async with httpx.AsyncClient(timeout=timeout, follow_redirects=False) as client:
response = await client.get(
simple_url,
headers=headers,
auth=auth,
)
# Handle redirects manually to avoid loops
if response.status_code in (301, 302, 303, 307, 308):
redirect_url = response.headers.get('location')
if redirect_url:
# Follow the redirect once
response = await client.get(
redirect_url,
headers=headers,
auth=auth,
follow_redirects=False,
)
if response.status_code == 200:
# Return the index as-is (links are to package pages, not files)
# We could rewrite these too, but for now just proxy
content = response.text
# Rewrite package links to go through our proxy
base_url = _get_base_url(request)
content = re.sub(
r'href="([^"]+)/"',
lambda m: f'href="{base_url}/pypi/simple/{m.group(1)}/"',
content
)
return HTMLResponse(content=content)
# Parse upstream error for policy/blocking messages
last_error = _parse_upstream_error(response)
last_status = response.status_code
logger.warning(f"PyPI proxy: upstream returned {response.status_code}: {last_error}")
except httpx.ConnectError as e:
last_error = f"Connection failed: {e}"
last_status = 502
logger.warning(f"PyPI proxy: failed to connect to {source.url}: {e}")
except httpx.TimeoutException as e:
last_error = f"Timeout: {e}"
last_status = 504
logger.warning(f"PyPI proxy: timeout connecting to {source.url}: {e}")
except Exception as e:
last_error = str(e)
last_status = 502
logger.warning(f"PyPI proxy: error fetching from {source.url}: {e}")
# Pass through 4xx errors (like 403 policy blocks) so users understand why
status_code = last_status if last_status and 400 <= last_status < 500 else 502
raise HTTPException(
status_code=status_code,
detail=f"Upstream error: {last_error}"
)
@router.get("/simple/{package_name}/")
async def pypi_package_versions(
request: Request,
package_name: str,
db: Session = Depends(get_db),
):
"""
PyPI Simple API package page - lists all versions/files for a package.
Proxies to upstream and rewrites download links to go through our cache.
"""
sources = _get_pypi_upstream_sources(db)
if not sources:
raise HTTPException(
status_code=503,
detail="No PyPI upstream sources configured"
)
base_url = _get_base_url(request)
# Normalize package name (PEP 503)
normalized_name = re.sub(r'[-_.]+', '-', package_name).lower()
# Try each source in priority order
last_error = None
last_status = None
for source in sources:
try:
headers = {"User-Agent": "Orchard-PyPI-Proxy/1.0"}
headers.update(_build_auth_headers(source))
auth = _get_basic_auth(source)
# Use URL as-is - users should provide full path including /simple
package_url = source.url.rstrip('/') + f'/{normalized_name}/'
final_url = package_url # Track final URL after redirects
timeout = httpx.Timeout(PROXY_READ_TIMEOUT, connect=PROXY_CONNECT_TIMEOUT)
async with httpx.AsyncClient(timeout=timeout, follow_redirects=False) as client:
response = await client.get(
package_url,
headers=headers,
auth=auth,
)
# Handle redirects manually
redirect_count = 0
while response.status_code in (301, 302, 303, 307, 308) and redirect_count < 5:
redirect_url = response.headers.get('location')
if not redirect_url:
break
# Make redirect URL absolute if needed
if not redirect_url.startswith('http'):
redirect_url = urljoin(final_url, redirect_url)
final_url = redirect_url # Update final URL
response = await client.get(
redirect_url,
headers=headers,
auth=auth,
follow_redirects=False,
)
redirect_count += 1
if response.status_code == 200:
content = response.text
# Rewrite download links to go through our proxy
# Pass final_url so relative URLs can be resolved correctly
content = _rewrite_package_links(content, base_url, normalized_name, final_url)
return HTMLResponse(content=content)
if response.status_code == 404:
# Package not found in this source, try next
last_error = f"Package not found in {source.name}"
last_status = 404
continue
# Parse upstream error for policy/blocking messages
last_error = _parse_upstream_error(response)
last_status = response.status_code
logger.warning(f"PyPI proxy: upstream returned {response.status_code} for {package_name}: {last_error}")
except httpx.ConnectError as e:
last_error = f"Connection failed: {e}"
last_status = 502
logger.warning(f"PyPI proxy: failed to connect to {source.url}: {e}")
except httpx.TimeoutException as e:
last_error = f"Timeout: {e}"
last_status = 504
logger.warning(f"PyPI proxy: timeout connecting to {source.url}: {e}")
except Exception as e:
last_error = str(e)
last_status = 502
logger.warning(f"PyPI proxy: error fetching {package_name} from {source.url}: {e}")
# Pass through 4xx errors (like 403 policy blocks) so users understand why
status_code = last_status if last_status and 400 <= last_status < 500 else 404
raise HTTPException(
status_code=status_code,
detail=f"Package '{package_name}' error: {last_error}"
)
@router.get("/simple/{package_name}/{filename}")
async def pypi_download_file(
request: Request,
package_name: str,
filename: str,
upstream: Optional[str] = None,
db: Session = Depends(get_db),
storage: S3Storage = Depends(get_storage),
):
"""
Download a package file, caching it in Orchard.
Args:
package_name: The package name
filename: The filename to download
upstream: URL-encoded upstream URL to fetch from
"""
if not upstream:
raise HTTPException(
status_code=400,
detail="Missing 'upstream' query parameter with source URL"
)
# Decode the upstream URL
upstream_url = unquote(upstream)
# Check if we already have this URL cached
url_hash = hashlib.sha256(upstream_url.encode()).hexdigest()
cached_url = db.query(CachedUrl).filter(CachedUrl.url_hash == url_hash).first()
if cached_url:
# Serve from cache
artifact = db.query(Artifact).filter(Artifact.id == cached_url.artifact_id).first()
if artifact:
logger.info(f"PyPI proxy: serving cached {filename} (artifact {artifact.id[:12]})")
settings = get_settings()
try:
if settings.pypi_download_mode == "redirect":
# Redirect to S3 presigned URL - client downloads directly from S3
presigned_url = storage.generate_presigned_url(artifact.s3_key)
return RedirectResponse(
url=presigned_url,
status_code=302,
headers={
"X-Checksum-SHA256": artifact.id,
"X-Cache": "HIT",
}
)
else:
# Proxy mode - stream from S3 through Orchard
stream, content_length, _ = storage.get_stream(artifact.s3_key)
def stream_content():
"""Generator that yields chunks from the S3 stream."""
try:
for chunk in stream.iter_chunks():
yield chunk
finally:
stream.close()
return StreamingResponse(
stream_content(),
media_type=artifact.content_type or "application/octet-stream",
headers={
"Content-Disposition": f'attachment; filename="{filename}"',
"Content-Length": str(content_length),
"X-Checksum-SHA256": artifact.id,
"X-Cache": "HIT",
}
)
except Exception as e:
logger.error(f"PyPI proxy: error serving cached artifact: {e}")
# Fall through to fetch from upstream
# Not cached - fetch from upstream
sources = _get_pypi_upstream_sources(db)
# Use the first available source for authentication headers
# Note: The upstream URL may point to files.pythonhosted.org or other CDNs,
# not the configured source URL directly, so we can't strictly validate the host
matched_source = sources[0] if sources else None
try:
headers = {"User-Agent": "Orchard-PyPI-Proxy/1.0"}
if matched_source:
headers.update(_build_auth_headers(matched_source))
auth = _get_basic_auth(matched_source) if matched_source else None
timeout = httpx.Timeout(300.0, connect=PROXY_CONNECT_TIMEOUT) # 5 minutes for large files
# Initialize extracted dependencies list
extracted_deps = []
# Fetch the file
logger.info(f"PyPI proxy: fetching {filename} from {upstream_url}")
async with httpx.AsyncClient(timeout=timeout, follow_redirects=False) as client:
response = await client.get(
upstream_url,
headers=headers,
auth=auth,
)
# Handle redirects manually
redirect_count = 0
while response.status_code in (301, 302, 303, 307, 308) and redirect_count < 5:
redirect_url = response.headers.get('location')
if not redirect_url:
break
if not redirect_url.startswith('http'):
redirect_url = urljoin(upstream_url, redirect_url)
logger.info(f"PyPI proxy: following redirect to {redirect_url}")
# Don't send auth to different hosts
redirect_headers = {"User-Agent": "Orchard-PyPI-Proxy/1.0"}
redirect_auth = None
if urlparse(redirect_url).netloc == urlparse(upstream_url).netloc:
redirect_headers.update(headers)
redirect_auth = auth
response = await client.get(
redirect_url,
headers=redirect_headers,
auth=redirect_auth,
follow_redirects=False,
)
redirect_count += 1
if response.status_code != 200:
# Parse upstream error for policy/blocking messages
error_detail = _parse_upstream_error(response)
logger.warning(f"PyPI proxy: upstream returned {response.status_code} for {filename}: {error_detail}")
raise HTTPException(
status_code=response.status_code,
detail=f"Upstream error: {error_detail}"
)
content_type = response.headers.get('content-type', 'application/octet-stream')
# Stream to temp file to avoid loading large packages into memory
# This keeps memory usage constant regardless of package size
# Using async iteration to avoid blocking the event loop
tmp_path = None
try:
with tempfile.NamedTemporaryFile(delete=False, suffix=f"_{filename}") as tmp_file:
tmp_path = tmp_file.name
async for chunk in response.aiter_bytes(chunk_size=65536): # 64KB chunks
tmp_file.write(chunk)
# Store in S3 from temp file (computes hash and deduplicates automatically)
with open(tmp_path, 'rb') as f:
result = storage.store(f)
sha256 = result.sha256
size = result.size
s3_key = result.s3_key
# Extract dependencies from the temp file before cleaning up
extracted_deps = _extract_dependencies_from_file(tmp_path, filename)
if extracted_deps:
logger.info(f"PyPI proxy: extracted {len(extracted_deps)} dependencies from {filename}")
logger.info(f"PyPI proxy: downloaded {filename}, {size} bytes, sha256={sha256[:12]}")
finally:
# Clean up temp file
if tmp_path and os.path.exists(tmp_path):
os.unlink(tmp_path)
# Check if artifact already exists
existing = db.query(Artifact).filter(Artifact.id == sha256).first()
if existing:
# Increment ref count
existing.ref_count += 1
db.flush()
else:
# Create artifact record
new_artifact = Artifact(
id=sha256,
original_name=filename,
content_type=content_type,
size=size,
ref_count=1,
created_by="pypi-proxy",
s3_key=result.s3_key,
checksum_md5=result.md5,
checksum_sha1=result.sha1,
s3_etag=result.s3_etag,
)
db.add(new_artifact)
db.flush()
# Create/get system project and package
system_project = db.query(Project).filter(Project.name == "_pypi").first()
if not system_project:
system_project = Project(
name="_pypi",
description="System project for cached PyPI packages",
is_public=True,
is_system=True,
created_by="pypi-proxy",
)
db.add(system_project)
db.flush()
elif not system_project.is_system:
# Ensure existing project is marked as system
system_project.is_system = True
db.flush()
# Normalize package name
normalized_name = re.sub(r'[-_.]+', '-', package_name).lower()
package = db.query(Package).filter(
Package.project_id == system_project.id,
Package.name == normalized_name,
).first()
if not package:
package = Package(
project_id=system_project.id,
name=normalized_name,
description=f"PyPI package: {normalized_name}",
format="pypi",
)
db.add(package)
db.flush()
# Extract and create version
# Only create version for actual package files, not .metadata files
version = _extract_pypi_version(filename)
if version and not filename.endswith('.metadata'):
# Check by version string (the unique constraint is on package_id + version)
existing_version = db.query(PackageVersion).filter(
PackageVersion.package_id == package.id,
PackageVersion.version == version,
).first()
if not existing_version:
pkg_version = PackageVersion(
package_id=package.id,
artifact_id=sha256,
version=version,
version_source="filename",
created_by="pypi-proxy",
)
db.add(pkg_version)
# Cache the URL mapping
existing_cached = db.query(CachedUrl).filter(CachedUrl.url_hash == url_hash).first()
if not existing_cached:
cached_url_record = CachedUrl(
url_hash=url_hash,
url=upstream_url,
artifact_id=sha256,
)
db.add(cached_url_record)
# Store extracted dependencies (deduplicate first - METADATA can list same dep under multiple extras)
if extracted_deps:
# Deduplicate: keep first version constraint seen for each package name
seen_deps: dict[str, str] = {}
for dep_name, dep_version in extracted_deps:
if dep_name not in seen_deps:
seen_deps[dep_name] = dep_version if dep_version else "*"
for dep_name, dep_version in seen_deps.items():
# Check if this dependency already exists for this artifact
existing_dep = db.query(ArtifactDependency).filter(
ArtifactDependency.artifact_id == sha256,
ArtifactDependency.dependency_project == "_pypi",
ArtifactDependency.dependency_package == dep_name,
).first()
if not existing_dep:
dep = ArtifactDependency(
artifact_id=sha256,
dependency_project="_pypi",
dependency_package=dep_name,
version_constraint=dep_version,
)
db.add(dep)
db.commit()
# Serve the file from S3
settings = get_settings()
try:
if settings.pypi_download_mode == "redirect":
# Redirect to S3 presigned URL - client downloads directly from S3
presigned_url = storage.generate_presigned_url(s3_key)
return RedirectResponse(
url=presigned_url,
status_code=302,
headers={
"X-Checksum-SHA256": sha256,
"X-Cache": "MISS",
}
)
else:
# Proxy mode - stream from S3 through Orchard
stream, content_length, _ = storage.get_stream(s3_key)
def stream_content():
"""Generator that yields chunks from the S3 stream."""
try:
for chunk in stream.iter_chunks():
yield chunk
finally:
stream.close()
return StreamingResponse(
stream_content(),
media_type=content_type,
headers={
"Content-Disposition": f'attachment; filename="{filename}"',
"Content-Length": str(size),
"X-Checksum-SHA256": sha256,
"X-Cache": "MISS",
}
)
except Exception as e:
logger.error(f"PyPI proxy: error serving from S3: {e}")
raise HTTPException(status_code=500, detail=f"Error serving file: {e}")
except httpx.ConnectError as e:
raise HTTPException(status_code=502, detail=f"Connection failed: {e}")
except httpx.TimeoutException as e:
raise HTTPException(status_code=504, detail=f"Timeout: {e}")
except HTTPException:
raise
except Exception as e:
logger.exception(f"PyPI proxy: error downloading {filename}")
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -9,6 +9,7 @@ from .base import BaseRepository
from .project import ProjectRepository from .project import ProjectRepository
from .package import PackageRepository from .package import PackageRepository
from .artifact import ArtifactRepository from .artifact import ArtifactRepository
from .tag import TagRepository
from .upload import UploadRepository from .upload import UploadRepository
__all__ = [ __all__ = [
@@ -16,5 +17,6 @@ __all__ = [
"ProjectRepository", "ProjectRepository",
"PackageRepository", "PackageRepository",
"ArtifactRepository", "ArtifactRepository",
"TagRepository",
"UploadRepository", "UploadRepository",
] ]

View File

@@ -8,7 +8,7 @@ from sqlalchemy import func, or_
from uuid import UUID from uuid import UUID
from .base import BaseRepository from .base import BaseRepository
from ..models import Artifact, PackageVersion, Upload, Package, Project from ..models import Artifact, Tag, Upload, Package, Project
class ArtifactRepository(BaseRepository[Artifact]): class ArtifactRepository(BaseRepository[Artifact]):
@@ -77,14 +77,14 @@ class ArtifactRepository(BaseRepository[Artifact]):
.all() .all()
) )
def get_artifacts_without_versions(self, limit: int = 100) -> List[Artifact]: def get_artifacts_without_tags(self, limit: int = 100) -> List[Artifact]:
"""Get artifacts that have no versions pointing to them.""" """Get artifacts that have no tags pointing to them."""
# Subquery to find artifact IDs that have versions # Subquery to find artifact IDs that have tags
versioned_artifacts = self.db.query(PackageVersion.artifact_id).distinct().subquery() tagged_artifacts = self.db.query(Tag.artifact_id).distinct().subquery()
return ( return (
self.db.query(Artifact) self.db.query(Artifact)
.filter(~Artifact.id.in_(versioned_artifacts)) .filter(~Artifact.id.in_(tagged_artifacts))
.limit(limit) .limit(limit)
.all() .all()
) )
@@ -115,34 +115,34 @@ class ArtifactRepository(BaseRepository[Artifact]):
return artifacts, total return artifacts, total
def get_referencing_versions(self, artifact_id: str) -> List[Tuple[PackageVersion, Package, Project]]: def get_referencing_tags(self, artifact_id: str) -> List[Tuple[Tag, Package, Project]]:
"""Get all versions referencing this artifact with package and project info.""" """Get all tags referencing this artifact with package and project info."""
return ( return (
self.db.query(PackageVersion, Package, Project) self.db.query(Tag, Package, Project)
.join(Package, PackageVersion.package_id == Package.id) .join(Package, Tag.package_id == Package.id)
.join(Project, Package.project_id == Project.id) .join(Project, Package.project_id == Project.id)
.filter(PackageVersion.artifact_id == artifact_id) .filter(Tag.artifact_id == artifact_id)
.all() .all()
) )
def search(self, query_str: str, limit: int = 10) -> List[Tuple[PackageVersion, Artifact, str, str]]: def search(self, query_str: str, limit: int = 10) -> List[Tuple[Tag, Artifact, str, str]]:
""" """
Search artifacts by version or original filename. Search artifacts by tag name or original filename.
Returns (version, artifact, package_name, project_name) tuples. Returns (tag, artifact, package_name, project_name) tuples.
""" """
search_lower = query_str.lower() search_lower = query_str.lower()
return ( return (
self.db.query(PackageVersion, Artifact, Package.name, Project.name) self.db.query(Tag, Artifact, Package.name, Project.name)
.join(Artifact, PackageVersion.artifact_id == Artifact.id) .join(Artifact, Tag.artifact_id == Artifact.id)
.join(Package, PackageVersion.package_id == Package.id) .join(Package, Tag.package_id == Package.id)
.join(Project, Package.project_id == Project.id) .join(Project, Package.project_id == Project.id)
.filter( .filter(
or_( or_(
func.lower(PackageVersion.version).contains(search_lower), func.lower(Tag.name).contains(search_lower),
func.lower(Artifact.original_name).contains(search_lower) func.lower(Artifact.original_name).contains(search_lower)
) )
) )
.order_by(PackageVersion.version) .order_by(Tag.name)
.limit(limit) .limit(limit)
.all() .all()
) )

View File

@@ -8,7 +8,7 @@ from sqlalchemy import func, or_, asc, desc
from uuid import UUID from uuid import UUID
from .base import BaseRepository from .base import BaseRepository
from ..models import Package, Project, PackageVersion, Upload, Artifact from ..models import Package, Project, Tag, Upload, Artifact
class PackageRepository(BaseRepository[Package]): class PackageRepository(BaseRepository[Package]):
@@ -136,10 +136,10 @@ class PackageRepository(BaseRepository[Package]):
return self.update(package, **updates) return self.update(package, **updates)
def get_stats(self, package_id: UUID) -> dict: def get_stats(self, package_id: UUID) -> dict:
"""Get package statistics (version count, artifact count, total size).""" """Get package statistics (tag count, artifact count, total size)."""
version_count = ( tag_count = (
self.db.query(func.count(PackageVersion.id)) self.db.query(func.count(Tag.id))
.filter(PackageVersion.package_id == package_id) .filter(Tag.package_id == package_id)
.scalar() or 0 .scalar() or 0
) )
@@ -154,7 +154,7 @@ class PackageRepository(BaseRepository[Package]):
) )
return { return {
"version_count": version_count, "tag_count": tag_count,
"artifact_count": artifact_stats[0] if artifact_stats else 0, "artifact_count": artifact_stats[0] if artifact_stats else 0,
"total_size": artifact_stats[1] if artifact_stats else 0, "total_size": artifact_stats[1] if artifact_stats else 0,
} }

View File

@@ -0,0 +1,168 @@
"""
Tag repository for data access operations.
"""
from typing import Optional, List, Tuple
from sqlalchemy.orm import Session
from sqlalchemy import func, or_, asc, desc
from uuid import UUID
from .base import BaseRepository
from ..models import Tag, TagHistory, Artifact, Package, Project
class TagRepository(BaseRepository[Tag]):
"""Repository for Tag entity operations."""
model = Tag
def get_by_name(self, package_id: UUID, name: str) -> Optional[Tag]:
"""Get tag by name within a package."""
return (
self.db.query(Tag)
.filter(Tag.package_id == package_id, Tag.name == name)
.first()
)
def get_with_artifact(self, package_id: UUID, name: str) -> Optional[Tuple[Tag, Artifact]]:
"""Get tag with its artifact."""
return (
self.db.query(Tag, Artifact)
.join(Artifact, Tag.artifact_id == Artifact.id)
.filter(Tag.package_id == package_id, Tag.name == name)
.first()
)
def exists_by_name(self, package_id: UUID, name: str) -> bool:
"""Check if tag with name exists in package."""
return self.db.query(
self.db.query(Tag)
.filter(Tag.package_id == package_id, Tag.name == name)
.exists()
).scalar()
def list_by_package(
self,
package_id: UUID,
page: int = 1,
limit: int = 20,
search: Optional[str] = None,
sort: str = "name",
order: str = "asc",
) -> Tuple[List[Tuple[Tag, Artifact]], int]:
"""
List tags in a package with artifact metadata.
Returns tuple of ((tag, artifact) tuples, total_count).
"""
query = (
self.db.query(Tag, Artifact)
.join(Artifact, Tag.artifact_id == Artifact.id)
.filter(Tag.package_id == package_id)
)
# Apply search filter (tag name or artifact original filename)
if search:
search_lower = search.lower()
query = query.filter(
or_(
func.lower(Tag.name).contains(search_lower),
func.lower(Artifact.original_name).contains(search_lower)
)
)
# Get total count
total = query.count()
# Apply sorting
sort_columns = {
"name": Tag.name,
"created_at": Tag.created_at,
}
sort_column = sort_columns.get(sort, Tag.name)
if order == "desc":
query = query.order_by(desc(sort_column))
else:
query = query.order_by(asc(sort_column))
# Apply pagination
offset = (page - 1) * limit
results = query.offset(offset).limit(limit).all()
return results, total
def create_tag(
self,
package_id: UUID,
name: str,
artifact_id: str,
created_by: str,
) -> Tag:
"""Create a new tag."""
return self.create(
package_id=package_id,
name=name,
artifact_id=artifact_id,
created_by=created_by,
)
def update_artifact(
self,
tag: Tag,
new_artifact_id: str,
changed_by: str,
record_history: bool = True,
) -> Tag:
"""
Update tag to point to a different artifact.
Optionally records change in tag history.
"""
old_artifact_id = tag.artifact_id
if record_history and old_artifact_id != new_artifact_id:
history = TagHistory(
tag_id=tag.id,
old_artifact_id=old_artifact_id,
new_artifact_id=new_artifact_id,
changed_by=changed_by,
)
self.db.add(history)
tag.artifact_id = new_artifact_id
tag.created_by = changed_by
self.db.flush()
return tag
def get_history(self, tag_id: UUID) -> List[TagHistory]:
"""Get tag change history."""
return (
self.db.query(TagHistory)
.filter(TagHistory.tag_id == tag_id)
.order_by(TagHistory.changed_at.desc())
.all()
)
def get_latest_in_package(self, package_id: UUID) -> Optional[Tag]:
"""Get the most recently created/updated tag in a package."""
return (
self.db.query(Tag)
.filter(Tag.package_id == package_id)
.order_by(Tag.created_at.desc())
.first()
)
def get_by_artifact(self, artifact_id: str) -> List[Tag]:
"""Get all tags pointing to an artifact."""
return (
self.db.query(Tag)
.filter(Tag.artifact_id == artifact_id)
.all()
)
def count_by_artifact(self, artifact_id: str) -> int:
"""Count tags pointing to an artifact."""
return (
self.db.query(func.count(Tag.id))
.filter(Tag.artifact_id == artifact_id)
.scalar() or 0
)

File diff suppressed because it is too large Load Diff

View File

@@ -33,7 +33,6 @@ class ProjectResponse(BaseModel):
name: str name: str
description: Optional[str] description: Optional[str]
is_public: bool is_public: bool
is_system: bool = False
created_at: datetime created_at: datetime
updated_at: datetime updated_at: datetime
created_by: str created_by: str
@@ -114,6 +113,14 @@ class PackageUpdate(BaseModel):
platform: Optional[str] = None platform: Optional[str] = None
class TagSummary(BaseModel):
"""Lightweight tag info for embedding in package responses"""
name: str
artifact_id: str
created_at: datetime
class PackageDetailResponse(BaseModel): class PackageDetailResponse(BaseModel):
"""Package with aggregated metadata""" """Package with aggregated metadata"""
@@ -126,9 +133,13 @@ class PackageDetailResponse(BaseModel):
created_at: datetime created_at: datetime
updated_at: datetime updated_at: datetime
# Aggregated fields # Aggregated fields
tag_count: int = 0
artifact_count: int = 0 artifact_count: int = 0
total_size: int = 0 total_size: int = 0
latest_tag: Optional[str] = None
latest_upload_at: Optional[datetime] = None latest_upload_at: Optional[datetime] = None
# Recent tags (limit 5)
recent_tags: List[TagSummary] = []
class Config: class Config:
from_attributes = True from_attributes = True
@@ -153,6 +164,79 @@ class ArtifactResponse(BaseModel):
from_attributes = True from_attributes = True
# Tag schemas
class TagCreate(BaseModel):
name: str
artifact_id: str
class TagResponse(BaseModel):
id: UUID
package_id: UUID
name: str
artifact_id: str
created_at: datetime
created_by: str
version: Optional[str] = None # Version of the artifact this tag points to
class Config:
from_attributes = True
class TagDetailResponse(BaseModel):
"""Tag with embedded artifact metadata"""
id: UUID
package_id: UUID
name: str
artifact_id: str
created_at: datetime
created_by: str
version: Optional[str] = None # Version of the artifact this tag points to
# Artifact metadata
artifact_size: int
artifact_content_type: Optional[str]
artifact_original_name: Optional[str]
artifact_created_at: datetime
artifact_format_metadata: Optional[Dict[str, Any]] = None
class Config:
from_attributes = True
class TagHistoryResponse(BaseModel):
"""History entry for tag changes"""
id: UUID
tag_id: UUID
old_artifact_id: Optional[str]
new_artifact_id: str
changed_at: datetime
changed_by: str
class Config:
from_attributes = True
class TagHistoryDetailResponse(BaseModel):
"""Tag history with artifact metadata for each version"""
id: UUID
tag_id: UUID
tag_name: str
old_artifact_id: Optional[str]
new_artifact_id: str
changed_at: datetime
changed_by: str
# Artifact metadata for new artifact
artifact_size: int
artifact_original_name: Optional[str]
artifact_content_type: Optional[str]
class Config:
from_attributes = True
# Audit log schemas # Audit log schemas
class AuditLogResponse(BaseModel): class AuditLogResponse(BaseModel):
"""Audit log entry response""" """Audit log entry response"""
@@ -179,7 +263,7 @@ class UploadHistoryResponse(BaseModel):
package_name: str package_name: str
project_name: str project_name: str
original_name: Optional[str] original_name: Optional[str]
version: Optional[str] tag_name: Optional[str]
uploaded_at: datetime uploaded_at: datetime
uploaded_by: str uploaded_by: str
source_ip: Optional[str] source_ip: Optional[str]
@@ -210,10 +294,10 @@ class ArtifactProvenanceResponse(BaseModel):
# Usage statistics # Usage statistics
upload_count: int upload_count: int
# References # References
packages: List[Dict[str, Any]] # List of {project_name, package_name, versions} packages: List[Dict[str, Any]] # List of {project_name, package_name, tag_names}
versions: List[ tags: List[
Dict[str, Any] Dict[str, Any]
] # List of {project_name, package_name, version, created_at} ] # List of {project_name, package_name, tag_name, created_at}
# Upload history # Upload history
uploads: List[Dict[str, Any]] # List of upload events uploads: List[Dict[str, Any]] # List of upload events
@@ -221,8 +305,18 @@ class ArtifactProvenanceResponse(BaseModel):
from_attributes = True from_attributes = True
class ArtifactTagInfo(BaseModel):
"""Tag info for embedding in artifact responses"""
id: UUID
name: str
package_id: UUID
package_name: str
project_name: str
class ArtifactDetailResponse(BaseModel): class ArtifactDetailResponse(BaseModel):
"""Artifact with metadata""" """Artifact with list of tags/packages referencing it"""
id: str id: str
sha256: str # Explicit SHA256 field (same as id) sha256: str # Explicit SHA256 field (same as id)
@@ -236,14 +330,14 @@ class ArtifactDetailResponse(BaseModel):
created_by: str created_by: str
ref_count: int ref_count: int
format_metadata: Optional[Dict[str, Any]] = None format_metadata: Optional[Dict[str, Any]] = None
versions: List[Dict[str, Any]] = [] # List of {version, package_name, project_name} tags: List[ArtifactTagInfo] = []
class Config: class Config:
from_attributes = True from_attributes = True
class PackageArtifactResponse(BaseModel): class PackageArtifactResponse(BaseModel):
"""Artifact for package artifact listing""" """Artifact with tags for package artifact listing"""
id: str id: str
sha256: str # Explicit SHA256 field (same as id) sha256: str # Explicit SHA256 field (same as id)
@@ -256,7 +350,7 @@ class PackageArtifactResponse(BaseModel):
created_at: datetime created_at: datetime
created_by: str created_by: str
format_metadata: Optional[Dict[str, Any]] = None format_metadata: Optional[Dict[str, Any]] = None
version: Optional[str] = None # Version from PackageVersion if exists tags: List[str] = [] # Tag names pointing to this artifact
class Config: class Config:
from_attributes = True from_attributes = True
@@ -274,9 +368,28 @@ class GlobalArtifactResponse(BaseModel):
created_by: str created_by: str
format_metadata: Optional[Dict[str, Any]] = None format_metadata: Optional[Dict[str, Any]] = None
ref_count: int = 0 ref_count: int = 0
# Context from versions/packages # Context from tags/packages
projects: List[str] = [] # List of project names containing this artifact projects: List[str] = [] # List of project names containing this artifact
packages: List[str] = [] # List of "project/package" paths packages: List[str] = [] # List of "project/package" paths
tags: List[str] = [] # List of "project/package:tag" references
class Config:
from_attributes = True
class GlobalTagResponse(BaseModel):
"""Tag with project/package context for global listing"""
id: UUID
name: str
artifact_id: str
created_at: datetime
created_by: str
project_name: str
package_name: str
artifact_size: Optional[int] = None
artifact_content_type: Optional[str] = None
version: Optional[str] = None # Version of the artifact this tag points to
class Config: class Config:
from_attributes = True from_attributes = True
@@ -289,6 +402,7 @@ class UploadResponse(BaseModel):
size: int size: int
project: str project: str
package: str package: str
tag: Optional[str]
version: Optional[str] = None # Version assigned to this artifact version: Optional[str] = None # Version assigned to this artifact
version_source: Optional[str] = None # How version was determined: 'explicit', 'filename', 'metadata' version_source: Optional[str] = None # How version was determined: 'explicit', 'filename', 'metadata'
checksum_md5: Optional[str] = None checksum_md5: Optional[str] = None
@@ -315,6 +429,7 @@ class ResumableUploadInitRequest(BaseModel):
filename: str filename: str
content_type: Optional[str] = None content_type: Optional[str] = None
size: int size: int
tag: Optional[str] = None
version: Optional[str] = None # Explicit version (auto-detected if not provided) version: Optional[str] = None # Explicit version (auto-detected if not provided)
@field_validator("expected_hash") @field_validator("expected_hash")
@@ -349,7 +464,7 @@ class ResumableUploadPartResponse(BaseModel):
class ResumableUploadCompleteRequest(BaseModel): class ResumableUploadCompleteRequest(BaseModel):
"""Request to complete a resumable upload""" """Request to complete a resumable upload"""
pass tag: Optional[str] = None
class ResumableUploadCompleteResponse(BaseModel): class ResumableUploadCompleteResponse(BaseModel):
@@ -359,6 +474,7 @@ class ResumableUploadCompleteResponse(BaseModel):
size: int size: int
project: str project: str
package: str package: str
tag: Optional[str]
class ResumableUploadStatusResponse(BaseModel): class ResumableUploadStatusResponse(BaseModel):
@@ -411,6 +527,7 @@ class PackageVersionResponse(BaseModel):
size: Optional[int] = None size: Optional[int] = None
content_type: Optional[str] = None content_type: Optional[str] = None
original_name: Optional[str] = None original_name: Optional[str] = None
tags: List[str] = [] # Tag names pointing to this artifact
class Config: class Config:
from_attributes = True from_attributes = True
@@ -452,10 +569,11 @@ class SearchResultPackage(BaseModel):
class SearchResultArtifact(BaseModel): class SearchResultArtifact(BaseModel):
"""Artifact result for global search""" """Artifact/tag result for global search"""
tag_id: UUID
tag_name: str
artifact_id: str artifact_id: str
version: Optional[str]
package_id: UUID package_id: UUID
package_name: str package_name: str
project_name: str project_name: str
@@ -568,7 +686,7 @@ class ProjectStatsResponse(BaseModel):
project_id: str project_id: str
project_name: str project_name: str
package_count: int package_count: int
version_count: int tag_count: int
artifact_count: int artifact_count: int
total_size_bytes: int total_size_bytes: int
upload_count: int upload_count: int
@@ -583,7 +701,7 @@ class PackageStatsResponse(BaseModel):
package_id: str package_id: str
package_name: str package_name: str
project_name: str project_name: str
version_count: int tag_count: int
artifact_count: int artifact_count: int
total_size_bytes: int total_size_bytes: int
upload_count: int upload_count: int
@@ -600,9 +718,9 @@ class ArtifactStatsResponse(BaseModel):
size: int size: int
ref_count: int ref_count: int
storage_savings: int # (ref_count - 1) * size storage_savings: int # (ref_count - 1) * size
tags: List[Dict[str, Any]] # Tags referencing this artifact
projects: List[str] # Projects using this artifact projects: List[str] # Projects using this artifact
packages: List[str] # Packages using this artifact packages: List[str] # Packages using this artifact
versions: List[Dict[str, Any]] = [] # List of {version, package_name, project_name}
first_uploaded: Optional[datetime] = None first_uploaded: Optional[datetime] = None
last_referenced: Optional[datetime] = None last_referenced: Optional[datetime] = None
@@ -811,7 +929,20 @@ class DependencyCreate(BaseModel):
"""Schema for creating a dependency""" """Schema for creating a dependency"""
project: str project: str
package: str package: str
version: str version: Optional[str] = None
tag: Optional[str] = None
@field_validator('version', 'tag')
@classmethod
def validate_constraint(cls, v, info):
return v
def model_post_init(self, __context):
"""Validate that exactly one of version or tag is set"""
if self.version is None and self.tag is None:
raise ValueError("Either 'version' or 'tag' must be specified")
if self.version is not None and self.tag is not None:
raise ValueError("Cannot specify both 'version' and 'tag'")
class DependencyResponse(BaseModel): class DependencyResponse(BaseModel):
@@ -820,7 +951,8 @@ class DependencyResponse(BaseModel):
artifact_id: str artifact_id: str
project: str project: str
package: str package: str
version: str version: Optional[str] = None
tag: Optional[str] = None
created_at: datetime created_at: datetime
class Config: class Config:
@@ -835,6 +967,7 @@ class DependencyResponse(BaseModel):
project=dep.dependency_project, project=dep.dependency_project,
package=dep.dependency_package, package=dep.dependency_package,
version=dep.version_constraint, version=dep.version_constraint,
tag=dep.tag_constraint,
created_at=dep.created_at, created_at=dep.created_at,
) )
@@ -851,6 +984,7 @@ class DependentInfo(BaseModel):
project: str project: str
package: str package: str
version: Optional[str] = None version: Optional[str] = None
constraint_type: str # 'version' or 'tag'
constraint_value: str constraint_value: str
@@ -866,7 +1000,20 @@ class EnsureFileDependency(BaseModel):
"""Dependency entry from orchard.ensure file""" """Dependency entry from orchard.ensure file"""
project: str project: str
package: str package: str
version: str version: Optional[str] = None
tag: Optional[str] = None
@field_validator('version', 'tag')
@classmethod
def validate_constraint(cls, v, info):
return v
def model_post_init(self, __context):
"""Validate that exactly one of version or tag is set"""
if self.version is None and self.tag is None:
raise ValueError("Either 'version' or 'tag' must be specified")
if self.version is not None and self.tag is not None:
raise ValueError("Cannot specify both 'version' and 'tag'")
class EnsureFileContent(BaseModel): class EnsureFileContent(BaseModel):
@@ -880,23 +1027,15 @@ class ResolvedArtifact(BaseModel):
project: str project: str
package: str package: str
version: Optional[str] = None version: Optional[str] = None
tag: Optional[str] = None
size: int size: int
download_url: str download_url: str
class MissingDependency(BaseModel):
"""A dependency that could not be resolved (not cached on server)"""
project: str
package: str
constraint: Optional[str] = None
required_by: Optional[str] = None
class DependencyResolutionResponse(BaseModel): class DependencyResolutionResponse(BaseModel):
"""Response from dependency resolution endpoint""" """Response from dependency resolution endpoint"""
requested: Dict[str, str] # project, package, ref requested: Dict[str, str] # project, package, ref
resolved: List[ResolvedArtifact] resolved: List[ResolvedArtifact]
missing: List[MissingDependency] = []
total_size: int total_size: int
artifact_count: int artifact_count: int
@@ -905,7 +1044,7 @@ class DependencyConflict(BaseModel):
"""Details about a dependency conflict""" """Details about a dependency conflict"""
project: str project: str
package: str package: str
requirements: List[Dict[str, Any]] # version and required_by info requirements: List[Dict[str, Any]] # version/tag and required_by info
class DependencyConflictError(BaseModel): class DependencyConflictError(BaseModel):
@@ -1075,6 +1214,7 @@ class UpstreamSourceCreate(BaseModel):
source_type: str = "generic" source_type: str = "generic"
url: str url: str
enabled: bool = False enabled: bool = False
is_public: bool = True
auth_type: str = "none" auth_type: str = "none"
username: Optional[str] = None username: Optional[str] = None
password: Optional[str] = None # Write-only password: Optional[str] = None # Write-only
@@ -1131,6 +1271,7 @@ class UpstreamSourceUpdate(BaseModel):
source_type: Optional[str] = None source_type: Optional[str] = None
url: Optional[str] = None url: Optional[str] = None
enabled: Optional[bool] = None enabled: Optional[bool] = None
is_public: Optional[bool] = None
auth_type: Optional[str] = None auth_type: Optional[str] = None
username: Optional[str] = None username: Optional[str] = None
password: Optional[str] = None # Write-only, None = keep existing, empty string = clear password: Optional[str] = None # Write-only, None = keep existing, empty string = clear
@@ -1190,6 +1331,7 @@ class UpstreamSourceResponse(BaseModel):
source_type: str source_type: str
url: str url: str
enabled: bool enabled: bool
is_public: bool
auth_type: str auth_type: str
username: Optional[str] username: Optional[str]
has_password: bool # True if password is set has_password: bool # True if password is set
@@ -1205,7 +1347,9 @@ class UpstreamSourceResponse(BaseModel):
class CacheSettingsResponse(BaseModel): class CacheSettingsResponse(BaseModel):
"""Global cache settings response""" """Global cache settings response"""
allow_public_internet: bool
auto_create_system_projects: bool auto_create_system_projects: bool
allow_public_internet_env_override: Optional[bool] = None # Set if overridden by env var
auto_create_system_projects_env_override: Optional[bool] = None # Set if overridden by env var auto_create_system_projects_env_override: Optional[bool] = None # Set if overridden by env var
created_at: Optional[datetime] = None # May be None for legacy data created_at: Optional[datetime] = None # May be None for legacy data
updated_at: Optional[datetime] = None # May be None for legacy data updated_at: Optional[datetime] = None # May be None for legacy data
@@ -1216,6 +1360,7 @@ class CacheSettingsResponse(BaseModel):
class CacheSettingsUpdate(BaseModel): class CacheSettingsUpdate(BaseModel):
"""Update cache settings (partial)""" """Update cache settings (partial)"""
allow_public_internet: Optional[bool] = None
auto_create_system_projects: Optional[bool] = None auto_create_system_projects: Optional[bool] = None
@@ -1239,10 +1384,10 @@ class CacheRequest(BaseModel):
url: str url: str
source_type: str source_type: str
package_name: Optional[str] = None # Auto-derived from URL if not provided package_name: Optional[str] = None # Auto-derived from URL if not provided
version: Optional[str] = None # Auto-derived from URL if not provided tag: Optional[str] = None # Auto-derived from URL if not provided
user_project: Optional[str] = None # Cross-reference to user project user_project: Optional[str] = None # Cross-reference to user project
user_package: Optional[str] = None user_package: Optional[str] = None
user_version: Optional[str] = None user_tag: Optional[str] = None
expected_hash: Optional[str] = None # Verify downloaded content expected_hash: Optional[str] = None # Verify downloaded content
@field_validator('url') @field_validator('url')
@@ -1289,45 +1434,8 @@ class CacheResponse(BaseModel):
source_name: Optional[str] source_name: Optional[str]
system_project: str system_project: str
system_package: str system_package: str
system_version: Optional[str] system_tag: Optional[str]
user_reference: Optional[str] = None # e.g., "my-app/npm-deps/+/4.17.21" user_reference: Optional[str] = None # e.g., "my-app/npm-deps:lodash-4.17.21"
class CacheResolveRequest(BaseModel):
"""Request to cache an artifact by package coordinates (no URL required).
The server will construct the appropriate URL based on source_type and
configured upstream sources.
"""
source_type: str
package: str
version: str
user_project: Optional[str] = None
user_package: Optional[str] = None
user_version: Optional[str] = None
@field_validator('source_type')
@classmethod
def validate_source_type(cls, v: str) -> str:
if v not in SOURCE_TYPES:
raise ValueError(f"source_type must be one of: {', '.join(SOURCE_TYPES)}")
return v
@field_validator('package')
@classmethod
def validate_package(cls, v: str) -> str:
v = v.strip()
if not v:
raise ValueError("package cannot be empty")
return v
@field_validator('version')
@classmethod
def validate_version(cls, v: str) -> str:
v = v.strip()
if not v:
raise ValueError("version cannot be empty")
return v

View File

@@ -5,7 +5,7 @@ import hashlib
import logging import logging
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from .models import Project, Package, Artifact, Upload, PackageVersion, ArtifactDependency, Team, TeamMembership, User from .models import Project, Package, Artifact, Tag, Upload, PackageVersion, ArtifactDependency, Team, TeamMembership, User
from .storage import get_storage from .storage import get_storage
from .auth import hash_password from .auth import hash_password
@@ -125,14 +125,14 @@ TEST_ARTIFACTS = [
] ]
# Dependencies to create (source artifact -> dependency) # Dependencies to create (source artifact -> dependency)
# Format: (source_project, source_package, source_version, dep_project, dep_package, version_constraint) # Format: (source_project, source_package, source_version, dep_project, dep_package, version_constraint, tag_constraint)
TEST_DEPENDENCIES = [ TEST_DEPENDENCIES = [
# ui-components v1.1.0 depends on design-tokens v1.0.0 # ui-components v1.1.0 depends on design-tokens v1.0.0
("frontend-libs", "ui-components", "1.1.0", "frontend-libs", "design-tokens", "1.0.0"), ("frontend-libs", "ui-components", "1.1.0", "frontend-libs", "design-tokens", "1.0.0", None),
# auth-lib v1.0.0 depends on common-utils v2.0.0 # auth-lib v1.0.0 depends on common-utils v2.0.0
("backend-services", "auth-lib", "1.0.0", "backend-services", "common-utils", "2.0.0"), ("backend-services", "auth-lib", "1.0.0", "backend-services", "common-utils", "2.0.0", None),
# auth-lib v1.0.0 also depends on design-tokens v1.0.0 # auth-lib v1.0.0 also depends on design-tokens (stable tag)
("backend-services", "auth-lib", "1.0.0", "frontend-libs", "design-tokens", "1.0.0"), ("backend-services", "auth-lib", "1.0.0", "frontend-libs", "design-tokens", None, "latest"),
] ]
@@ -252,8 +252,9 @@ def seed_database(db: Session) -> None:
logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages (assigned to {demo_team.slug})") logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages (assigned to {demo_team.slug})")
# Create artifacts and versions # Create artifacts, tags, and versions
artifact_count = 0 artifact_count = 0
tag_count = 0
version_count = 0 version_count = 0
for artifact_data in TEST_ARTIFACTS: for artifact_data in TEST_ARTIFACTS:
@@ -315,12 +316,23 @@ def seed_database(db: Session) -> None:
db.add(version) db.add(version)
version_count += 1 version_count += 1
# Create tags
for tag_name in artifact_data["tags"]:
tag = Tag(
package_id=package.id,
name=tag_name,
artifact_id=sha256_hash,
created_by=team_owner_username,
)
db.add(tag)
tag_count += 1
db.flush() db.flush()
# Create dependencies # Create dependencies
dependency_count = 0 dependency_count = 0
for dep_data in TEST_DEPENDENCIES: for dep_data in TEST_DEPENDENCIES:
src_project, src_package, src_version, dep_project, dep_package, version_constraint = dep_data src_project, src_package, src_version, dep_project, dep_package, version_constraint, tag_constraint = dep_data
# Find the source artifact by looking up its version # Find the source artifact by looking up its version
src_pkg = package_map.get((src_project, src_package)) src_pkg = package_map.get((src_project, src_package))
@@ -344,10 +356,11 @@ def seed_database(db: Session) -> None:
dependency_project=dep_project, dependency_project=dep_project,
dependency_package=dep_package, dependency_package=dep_package,
version_constraint=version_constraint, version_constraint=version_constraint,
tag_constraint=tag_constraint,
) )
db.add(dependency) db.add(dependency)
dependency_count += 1 dependency_count += 1
db.commit() db.commit()
logger.info(f"Created {artifact_count} artifacts, {version_count} versions, and {dependency_count} dependencies") logger.info(f"Created {artifact_count} artifacts, {tag_count} tags, {version_count} versions, and {dependency_count} dependencies")
logger.info("Database seeding complete") logger.info("Database seeding complete")

View File

@@ -6,8 +6,9 @@ from typing import List, Optional, Tuple
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
import logging import logging
from ..models import Artifact, PackageVersion from ..models import Artifact, Tag
from ..repositories.artifact import ArtifactRepository from ..repositories.artifact import ArtifactRepository
from ..repositories.tag import TagRepository
from ..storage import S3Storage from ..storage import S3Storage
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -20,8 +21,8 @@ class ArtifactCleanupService:
Reference counting rules: Reference counting rules:
- ref_count starts at 1 when artifact is first uploaded - ref_count starts at 1 when artifact is first uploaded
- ref_count increments when the same artifact is uploaded again (deduplication) - ref_count increments when the same artifact is uploaded again (deduplication)
- ref_count decrements when a version is deleted or updated to point elsewhere - ref_count decrements when a tag is deleted or updated to point elsewhere
- ref_count decrements when a package is deleted (for each version pointing to artifact) - ref_count decrements when a package is deleted (for each tag pointing to artifact)
- When ref_count reaches 0, artifact is a candidate for deletion from S3 - When ref_count reaches 0, artifact is a candidate for deletion from S3
""" """
@@ -29,11 +30,12 @@ class ArtifactCleanupService:
self.db = db self.db = db
self.storage = storage self.storage = storage
self.artifact_repo = ArtifactRepository(db) self.artifact_repo = ArtifactRepository(db)
self.tag_repo = TagRepository(db)
def on_version_deleted(self, artifact_id: str) -> Artifact: def on_tag_deleted(self, artifact_id: str) -> Artifact:
""" """
Called when a version is deleted. Called when a tag is deleted.
Decrements ref_count for the artifact the version was pointing to. Decrements ref_count for the artifact the tag was pointing to.
""" """
artifact = self.artifact_repo.get_by_sha256(artifact_id) artifact = self.artifact_repo.get_by_sha256(artifact_id)
if artifact: if artifact:
@@ -43,11 +45,11 @@ class ArtifactCleanupService:
) )
return artifact return artifact
def on_version_updated( def on_tag_updated(
self, old_artifact_id: str, new_artifact_id: str self, old_artifact_id: str, new_artifact_id: str
) -> Tuple[Optional[Artifact], Optional[Artifact]]: ) -> Tuple[Optional[Artifact], Optional[Artifact]]:
""" """
Called when a version is updated to point to a different artifact. Called when a tag is updated to point to a different artifact.
Decrements ref_count for old artifact, increments for new (if different). Decrements ref_count for old artifact, increments for new (if different).
Returns (old_artifact, new_artifact) tuple. Returns (old_artifact, new_artifact) tuple.
@@ -77,21 +79,21 @@ class ArtifactCleanupService:
def on_package_deleted(self, package_id) -> List[str]: def on_package_deleted(self, package_id) -> List[str]:
""" """
Called when a package is deleted. Called when a package is deleted.
Decrements ref_count for all artifacts that had versions in the package. Decrements ref_count for all artifacts that had tags in the package.
Returns list of artifact IDs that were affected. Returns list of artifact IDs that were affected.
""" """
# Get all versions in the package before deletion # Get all tags in the package before deletion
versions = self.db.query(PackageVersion).filter(PackageVersion.package_id == package_id).all() tags = self.db.query(Tag).filter(Tag.package_id == package_id).all()
affected_artifacts = [] affected_artifacts = []
for version in versions: for tag in tags:
artifact = self.artifact_repo.get_by_sha256(version.artifact_id) artifact = self.artifact_repo.get_by_sha256(tag.artifact_id)
if artifact: if artifact:
self.artifact_repo.decrement_ref_count(artifact) self.artifact_repo.decrement_ref_count(artifact)
affected_artifacts.append(version.artifact_id) affected_artifacts.append(tag.artifact_id)
logger.info( logger.info(
f"Decremented ref_count for artifact {version.artifact_id} (package delete)" f"Decremented ref_count for artifact {tag.artifact_id} (package delete)"
) )
return affected_artifacts return affected_artifacts
@@ -150,7 +152,7 @@ class ArtifactCleanupService:
def verify_ref_counts(self, fix: bool = False) -> List[dict]: def verify_ref_counts(self, fix: bool = False) -> List[dict]:
""" """
Verify that ref_counts match actual version references. Verify that ref_counts match actual tag references.
Args: Args:
fix: If True, fix any mismatched ref_counts fix: If True, fix any mismatched ref_counts
@@ -160,28 +162,28 @@ class ArtifactCleanupService:
""" """
from sqlalchemy import func from sqlalchemy import func
# Get actual version counts per artifact # Get actual tag counts per artifact
version_counts = ( tag_counts = (
self.db.query(PackageVersion.artifact_id, func.count(PackageVersion.id).label("version_count")) self.db.query(Tag.artifact_id, func.count(Tag.id).label("tag_count"))
.group_by(PackageVersion.artifact_id) .group_by(Tag.artifact_id)
.all() .all()
) )
version_count_map = {artifact_id: count for artifact_id, count in version_counts} tag_count_map = {artifact_id: count for artifact_id, count in tag_counts}
# Check all artifacts # Check all artifacts
artifacts = self.db.query(Artifact).all() artifacts = self.db.query(Artifact).all()
mismatches = [] mismatches = []
for artifact in artifacts: for artifact in artifacts:
actual_count = version_count_map.get(artifact.id, 0) actual_count = tag_count_map.get(artifact.id, 0)
# ref_count should be at least 1 (initial upload) + additional uploads # ref_count should be at least 1 (initial upload) + additional uploads
# But versions are the primary reference, so we check against version count # But tags are the primary reference, so we check against tag count
if artifact.ref_count < actual_count: if artifact.ref_count < actual_count:
mismatch = { mismatch = {
"artifact_id": artifact.id, "artifact_id": artifact.id,
"stored_ref_count": artifact.ref_count, "stored_ref_count": artifact.ref_count,
"actual_version_count": actual_count, "actual_tag_count": actual_count,
} }
mismatches.append(mismatch) mismatches.append(mismatch)

View File

@@ -57,6 +57,10 @@ class UpstreamSSLError(UpstreamError):
pass pass
class AirGapError(UpstreamError):
"""Request blocked due to air-gap mode."""
pass
class FileSizeExceededError(UpstreamError): class FileSizeExceededError(UpstreamError):
@@ -152,6 +156,12 @@ class UpstreamClient:
# Sort sources by priority (lower = higher priority) # Sort sources by priority (lower = higher priority)
self.sources = sorted(self.sources, key=lambda s: s.priority) self.sources = sorted(self.sources, key=lambda s: s.priority)
def _get_allow_public_internet(self) -> bool:
"""Get the allow_public_internet setting."""
if self.cache_settings is None:
return True # Default to allowing if no settings provided
return self.cache_settings.allow_public_internet
def _match_source(self, url: str) -> Optional[UpstreamSource]: def _match_source(self, url: str) -> Optional[UpstreamSource]:
""" """
Find the upstream source that matches the given URL. Find the upstream source that matches the given URL.
@@ -278,6 +288,7 @@ class UpstreamClient:
FetchResult with content, hash, size, and headers. FetchResult with content, hash, size, and headers.
Raises: Raises:
AirGapError: If air-gap mode blocks the request.
SourceDisabledError: If the matching source is disabled. SourceDisabledError: If the matching source is disabled.
UpstreamConnectionError: On connection failures. UpstreamConnectionError: On connection failures.
UpstreamTimeoutError: On timeout. UpstreamTimeoutError: On timeout.
@@ -290,6 +301,19 @@ class UpstreamClient:
# Match URL to source # Match URL to source
source = self._match_source(url) source = self._match_source(url)
# Check air-gap mode
allow_public = self._get_allow_public_internet()
if not allow_public:
if source is None:
raise AirGapError(
f"Air-gap mode enabled: URL does not match any configured upstream source: {url}"
)
if source.is_public:
raise AirGapError(
f"Air-gap mode enabled: Cannot fetch from public source '{source.name}'"
)
# Check if source is enabled (if we have a match) # Check if source is enabled (if we have a match)
if source is not None and not source.enabled: if source is not None and not source.enabled:
raise SourceDisabledError( raise SourceDisabledError(
@@ -512,8 +536,7 @@ class UpstreamClient:
Test connectivity to an upstream source. Test connectivity to an upstream source.
Performs a HEAD request to the source URL to verify connectivity Performs a HEAD request to the source URL to verify connectivity
and authentication. Does not follow redirects - a 3xx response and authentication.
is considered successful since it proves the server is reachable.
Args: Args:
source: The upstream source to test. source: The upstream source to test.
@@ -541,7 +564,7 @@ class UpstreamClient:
source.url, source.url,
headers=headers, headers=headers,
auth=auth, auth=auth,
follow_redirects=False, follow_redirects=True,
) )
# Consider 2xx and 3xx as success, also 405 (Method Not Allowed) # Consider 2xx and 3xx as success, also 405 (Method Not Allowed)
# since some servers don't support HEAD # since some servers don't support HEAD
@@ -559,7 +582,5 @@ class UpstreamClient:
return (False, f"Connection timed out: {e}", None) return (False, f"Connection timed out: {e}", None)
except httpx.ReadTimeout as e: except httpx.ReadTimeout as e:
return (False, f"Read timed out: {e}", None) return (False, f"Read timed out: {e}", None)
except httpx.TooManyRedirects as e:
return (False, f"Too many redirects: {e}", None)
except Exception as e: except Exception as e:
return (False, f"Error: {e}", None) return (False, f"Error: {e}", None)

View File

@@ -1 +0,0 @@
# Scripts package

View File

@@ -1,262 +0,0 @@
#!/usr/bin/env python3
"""
Backfill script to extract dependencies from cached PyPI packages.
This script scans all artifacts in the _pypi project and extracts
Requires-Dist metadata from wheel and sdist files that don't already
have dependencies recorded.
Usage:
# From within the container:
python -m scripts.backfill_pypi_dependencies
# Or with docker exec:
docker exec orchard_orchard-server_1 python -m scripts.backfill_pypi_dependencies
# Dry run (preview only):
docker exec orchard_orchard-server_1 python -m scripts.backfill_pypi_dependencies --dry-run
"""
import argparse
import logging
import re
import sys
import tarfile
import zipfile
from io import BytesIO
from typing import List, Optional, Tuple
# Add parent directory to path for imports
sys.path.insert(0, "/app")
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from backend.app.config import get_settings
from backend.app.models import (
Artifact,
ArtifactDependency,
Package,
Project,
Tag,
)
from backend.app.storage import get_storage
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
def parse_requires_dist(requires_dist: str) -> Tuple[Optional[str], Optional[str]]:
"""Parse a Requires-Dist line into (package_name, version_constraint)."""
# Remove any environment markers (after semicolon)
if ";" in requires_dist:
requires_dist = requires_dist.split(";")[0].strip()
# Match patterns like "package (>=1.0)" or "package>=1.0" or "package"
match = re.match(
r"^([a-zA-Z0-9][-a-zA-Z0-9._]*)\s*(?:\(([^)]+)\)|([<>=!~][^\s;]+))?",
requires_dist.strip(),
)
if not match:
return None, None
package_name = match.group(1)
version_constraint = match.group(2) or match.group(3)
# Normalize package name (PEP 503)
normalized_name = re.sub(r"[-_.]+", "-", package_name).lower()
if version_constraint:
version_constraint = version_constraint.strip()
return normalized_name, version_constraint
def extract_requires_from_metadata(metadata_content: str) -> List[Tuple[str, Optional[str]]]:
"""Extract all Requires-Dist entries from METADATA/PKG-INFO content."""
dependencies = []
for line in metadata_content.split("\n"):
if line.startswith("Requires-Dist:"):
value = line[len("Requires-Dist:"):].strip()
pkg_name, version = parse_requires_dist(value)
if pkg_name:
dependencies.append((pkg_name, version))
return dependencies
def extract_metadata_from_wheel(content: bytes) -> Optional[str]:
"""Extract METADATA file content from a wheel (zip) file."""
try:
with zipfile.ZipFile(BytesIO(content)) as zf:
for name in zf.namelist():
if name.endswith(".dist-info/METADATA"):
return zf.read(name).decode("utf-8", errors="replace")
except Exception as e:
logger.warning(f"Failed to extract metadata from wheel: {e}")
return None
def extract_metadata_from_sdist(content: bytes) -> Optional[str]:
"""Extract PKG-INFO file content from a source distribution (.tar.gz)."""
try:
with tarfile.open(fileobj=BytesIO(content), mode="r:gz") as tf:
for member in tf.getmembers():
if member.name.endswith("/PKG-INFO") and member.name.count("/") == 1:
f = tf.extractfile(member)
if f:
return f.read().decode("utf-8", errors="replace")
except Exception as e:
logger.warning(f"Failed to extract metadata from sdist: {e}")
return None
def extract_dependencies(content: bytes, filename: str) -> List[Tuple[str, Optional[str]]]:
"""Extract dependencies from a PyPI package file."""
metadata = None
if filename.endswith(".whl"):
metadata = extract_metadata_from_wheel(content)
elif filename.endswith(".tar.gz"):
metadata = extract_metadata_from_sdist(content)
if metadata:
return extract_requires_from_metadata(metadata)
return []
def backfill_dependencies(dry_run: bool = False):
"""Main backfill function."""
settings = get_settings()
# Create database connection
engine = create_engine(settings.database_url)
Session = sessionmaker(bind=engine)
db = Session()
# Create storage client
storage = get_storage()
try:
# Find the _pypi project
pypi_project = db.query(Project).filter(Project.name == "_pypi").first()
if not pypi_project:
logger.info("No _pypi project found. Nothing to backfill.")
return
# Get all packages in _pypi
packages = db.query(Package).filter(Package.project_id == pypi_project.id).all()
logger.info(f"Found {len(packages)} packages in _pypi project")
total_artifacts = 0
artifacts_with_deps = 0
artifacts_processed = 0
dependencies_added = 0
for package in packages:
# Get all tags (each tag points to an artifact)
tags = db.query(Tag).filter(Tag.package_id == package.id).all()
for tag in tags:
total_artifacts += 1
filename = tag.name
# Skip non-package files (like .metadata files)
if not (filename.endswith(".whl") or filename.endswith(".tar.gz")):
continue
# Check if this artifact already has dependencies
existing_deps = db.query(ArtifactDependency).filter(
ArtifactDependency.artifact_id == tag.artifact_id
).count()
if existing_deps > 0:
artifacts_with_deps += 1
continue
# Get the artifact
artifact = db.query(Artifact).filter(Artifact.id == tag.artifact_id).first()
if not artifact:
logger.warning(f"Artifact {tag.artifact_id} not found for tag {filename}")
continue
logger.info(f"Processing {package.name}/{filename}...")
if dry_run:
logger.info(f" [DRY RUN] Would extract dependencies from {filename}")
artifacts_processed += 1
continue
# Download the artifact from S3
try:
content = storage.get(artifact.s3_key)
except Exception as e:
logger.error(f" Failed to download {filename}: {e}")
continue
# Extract dependencies
deps = extract_dependencies(content, filename)
if deps:
logger.info(f" Found {len(deps)} dependencies")
for dep_name, dep_version in deps:
# Check if already exists (race condition protection)
existing = db.query(ArtifactDependency).filter(
ArtifactDependency.artifact_id == tag.artifact_id,
ArtifactDependency.dependency_project == "_pypi",
ArtifactDependency.dependency_package == dep_name,
).first()
if not existing:
dep = ArtifactDependency(
artifact_id=tag.artifact_id,
dependency_project="_pypi",
dependency_package=dep_name,
version_constraint=dep_version if dep_version else "*",
)
db.add(dep)
dependencies_added += 1
logger.info(f" + {dep_name} {dep_version or '*'}")
db.commit()
else:
logger.info(f" No dependencies found")
artifacts_processed += 1
logger.info("")
logger.info("=" * 50)
logger.info("Backfill complete!")
logger.info(f" Total artifacts: {total_artifacts}")
logger.info(f" Already had deps: {artifacts_with_deps}")
logger.info(f" Processed: {artifacts_processed}")
logger.info(f" Dependencies added: {dependencies_added}")
if dry_run:
logger.info(" (DRY RUN - no changes made)")
finally:
db.close()
def main():
parser = argparse.ArgumentParser(
description="Backfill dependencies for cached PyPI packages"
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Preview what would be done without making changes",
)
args = parser.parse_args()
backfill_dependencies(dry_run=args.dry_run)
if __name__ == "__main__":
main()

View File

@@ -96,6 +96,7 @@ def upload_test_file(
package: str, package: str,
content: bytes, content: bytes,
filename: str = "test.bin", filename: str = "test.bin",
tag: Optional[str] = None,
version: Optional[str] = None, version: Optional[str] = None,
) -> dict: ) -> dict:
""" """
@@ -107,6 +108,7 @@ def upload_test_file(
package: Package name package: Package name
content: File content as bytes content: File content as bytes
filename: Original filename filename: Original filename
tag: Optional tag to assign
version: Optional version to assign version: Optional version to assign
Returns: Returns:
@@ -114,6 +116,8 @@ def upload_test_file(
""" """
files = {"file": (filename, io.BytesIO(content), "application/octet-stream")} files = {"file": (filename, io.BytesIO(content), "application/octet-stream")}
data = {} data = {}
if tag:
data["tag"] = tag
if version: if version:
data["version"] = version data["version"] = version

View File

@@ -25,7 +25,7 @@ class TestArtifactRetrieval:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project_name, package_name, content, version="v1" integration_client, project_name, package_name, content, tag="v1"
) )
response = integration_client.get(f"/api/v1/artifact/{expected_hash}") response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
@@ -46,27 +46,27 @@ class TestArtifactRetrieval:
assert response.status_code == 404 assert response.status_code == 404
@pytest.mark.integration @pytest.mark.integration
def test_artifact_includes_versions(self, integration_client, test_package): def test_artifact_includes_tags(self, integration_client, test_package):
"""Test artifact response includes versions pointing to it.""" """Test artifact response includes tags pointing to it."""
project_name, package_name = test_package project_name, package_name = test_package
content = b"artifact with versions test" content = b"artifact with tags test"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project_name, package_name, content, version="1.0.0" integration_client, project_name, package_name, content, tag="tagged-v1"
) )
response = integration_client.get(f"/api/v1/artifact/{expected_hash}") response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200 assert response.status_code == 200
data = response.json() data = response.json()
assert "versions" in data assert "tags" in data
assert len(data["versions"]) >= 1 assert len(data["tags"]) >= 1
version = data["versions"][0] tag = data["tags"][0]
assert "version" in version assert "name" in tag
assert "package_name" in version assert "package_name" in tag
assert "project_name" in version assert "project_name" in tag
class TestArtifactStats: class TestArtifactStats:
@@ -82,7 +82,7 @@ class TestArtifactStats:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project, package, content, version=f"art-{unique_test_id}" integration_client, project, package, content, tag=f"art-{unique_test_id}"
) )
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats") response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
@@ -94,7 +94,7 @@ class TestArtifactStats:
assert "size" in data assert "size" in data
assert "ref_count" in data assert "ref_count" in data
assert "storage_savings" in data assert "storage_savings" in data
assert "versions" in data assert "tags" in data
assert "projects" in data assert "projects" in data
assert "packages" in data assert "packages" in data
@@ -136,8 +136,8 @@ class TestArtifactStats:
) )
# Upload same content to both projects # Upload same content to both projects
upload_test_file(integration_client, proj1, "pkg", content, version="v1") upload_test_file(integration_client, proj1, "pkg", content, tag="v1")
upload_test_file(integration_client, proj2, "pkg", content, version="v1") upload_test_file(integration_client, proj2, "pkg", content, tag="v1")
# Check artifact stats # Check artifact stats
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats") response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
@@ -203,7 +203,7 @@ class TestArtifactProvenance:
assert "first_uploaded_by" in data assert "first_uploaded_by" in data
assert "upload_count" in data assert "upload_count" in data
assert "packages" in data assert "packages" in data
assert "versions" in data assert "tags" in data
assert "uploads" in data assert "uploads" in data
@pytest.mark.integration @pytest.mark.integration
@@ -214,17 +214,17 @@ class TestArtifactProvenance:
assert response.status_code == 404 assert response.status_code == 404
@pytest.mark.integration @pytest.mark.integration
def test_artifact_history_with_version(self, integration_client, test_package): def test_artifact_history_with_tag(self, integration_client, test_package):
"""Test artifact history includes version information when versioned.""" """Test artifact history includes tag information when tagged."""
project_name, package_name = test_package project_name, package_name = test_package
upload_result = upload_test_file( upload_result = upload_test_file(
integration_client, integration_client,
project_name, project_name,
package_name, package_name,
b"versioned provenance test", b"tagged provenance test",
"versioned.txt", "tagged.txt",
version="v1.0.0", tag="v1.0.0",
) )
artifact_id = upload_result["artifact_id"] artifact_id = upload_result["artifact_id"]
@@ -232,12 +232,12 @@ class TestArtifactProvenance:
assert response.status_code == 200 assert response.status_code == 200
data = response.json() data = response.json()
assert len(data["versions"]) >= 1 assert len(data["tags"]) >= 1
version = data["versions"][0] tag = data["tags"][0]
assert "project_name" in version assert "project_name" in tag
assert "package_name" in version assert "package_name" in tag
assert "version" in version assert "tag_name" in tag
class TestArtifactUploads: class TestArtifactUploads:
@@ -306,24 +306,24 @@ class TestOrphanedArtifacts:
assert len(response.json()) <= 5 assert len(response.json()) <= 5
@pytest.mark.integration @pytest.mark.integration
def test_artifact_becomes_orphaned_when_version_deleted( def test_artifact_becomes_orphaned_when_tag_deleted(
self, integration_client, test_package, unique_test_id self, integration_client, test_package, unique_test_id
): ):
"""Test artifact appears in orphaned list after version is deleted.""" """Test artifact appears in orphaned list after tag is deleted."""
project, package = test_package project, package = test_package
content = f"orphan test {unique_test_id}".encode() content = f"orphan test {unique_test_id}".encode()
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
# Upload with version # Upload with tag
upload_test_file(integration_client, project, package, content, version="1.0.0-temp") upload_test_file(integration_client, project, package, content, tag="temp-tag")
# Verify not in orphaned list # Verify not in orphaned list
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000") response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
orphaned_ids = [a["id"] for a in response.json()] orphaned_ids = [a["id"] for a in response.json()]
assert expected_hash not in orphaned_ids assert expected_hash not in orphaned_ids
# Delete the version # Delete the tag
integration_client.delete(f"/api/v1/project/{project}/{package}/versions/1.0.0-temp") integration_client.delete(f"/api/v1/project/{project}/{package}/tags/temp-tag")
# Verify now in orphaned list # Verify now in orphaned list
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000") response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
@@ -356,9 +356,9 @@ class TestGarbageCollection:
content = f"dry run test {unique_test_id}".encode() content = f"dry run test {unique_test_id}".encode()
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
# Upload and delete version to create orphan # Upload and delete tag to create orphan
upload_test_file(integration_client, project, package, content, version="1.0.0-dryrun") upload_test_file(integration_client, project, package, content, tag="dry-run")
integration_client.delete(f"/api/v1/project/{project}/{package}/versions/1.0.0-dryrun") integration_client.delete(f"/api/v1/project/{project}/{package}/tags/dry-run")
# Verify artifact exists # Verify artifact exists
response = integration_client.get(f"/api/v1/artifact/{expected_hash}") response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
@@ -385,7 +385,7 @@ class TestGarbageCollection:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
# Upload with tag (ref_count=1) # Upload with tag (ref_count=1)
upload_test_file(integration_client, project, package, content, version="keep-this") upload_test_file(integration_client, project, package, content, tag="keep-this")
# Verify artifact exists with ref_count=1 # Verify artifact exists with ref_count=1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}") response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
@@ -534,6 +534,50 @@ class TestGlobalArtifacts:
assert response.status_code == 400 assert response.status_code == 400
class TestGlobalTags:
"""Tests for global tags endpoint."""
@pytest.mark.integration
def test_global_tags_returns_200(self, integration_client):
"""Test global tags endpoint returns 200."""
response = integration_client.get("/api/v1/tags")
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
@pytest.mark.integration
def test_global_tags_pagination(self, integration_client):
"""Test global tags endpoint respects pagination."""
response = integration_client.get("/api/v1/tags?limit=5&page=1")
assert response.status_code == 200
data = response.json()
assert len(data["items"]) <= 5
assert data["pagination"]["limit"] == 5
@pytest.mark.integration
def test_global_tags_has_project_context(self, integration_client):
"""Test global tags response includes project/package context."""
response = integration_client.get("/api/v1/tags?limit=1")
assert response.status_code == 200
data = response.json()
if len(data["items"]) > 0:
item = data["items"][0]
assert "project_name" in item
assert "package_name" in item
assert "artifact_id" in item
@pytest.mark.integration
def test_global_tags_search_with_wildcard(self, integration_client):
"""Test global tags search supports wildcards."""
response = integration_client.get("/api/v1/tags?search=v*")
assert response.status_code == 200
# Just verify it doesn't error; results may vary
class TestAuditLogs: class TestAuditLogs:
"""Tests for global audit logs endpoint.""" """Tests for global audit logs endpoint."""

View File

@@ -63,7 +63,7 @@ class TestConcurrentUploads:
response = client.post( response = client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": f"concurrent-{idx}"}, data={"tag": f"concurrent-{idx}"},
headers={"Authorization": f"Bearer {api_key}"}, headers={"Authorization": f"Bearer {api_key}"},
) )
if response.status_code == 200: if response.status_code == 200:
@@ -117,7 +117,7 @@ class TestConcurrentUploads:
response = client.post( response = client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": f"concurrent5-{idx}"}, data={"tag": f"concurrent5-{idx}"},
headers={"Authorization": f"Bearer {api_key}"}, headers={"Authorization": f"Bearer {api_key}"},
) )
if response.status_code == 200: if response.status_code == 200:
@@ -171,7 +171,7 @@ class TestConcurrentUploads:
response = client.post( response = client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": f"concurrent10-{idx}"}, data={"tag": f"concurrent10-{idx}"},
headers={"Authorization": f"Bearer {api_key}"}, headers={"Authorization": f"Bearer {api_key}"},
) )
if response.status_code == 200: if response.status_code == 200:
@@ -195,38 +195,19 @@ class TestConcurrentUploads:
@pytest.mark.integration @pytest.mark.integration
@pytest.mark.concurrent @pytest.mark.concurrent
def test_concurrent_uploads_same_file_deduplication( def test_concurrent_uploads_same_file_deduplication(self, integration_client, test_package):
self, integration_client, test_project, unique_test_id """Test concurrent uploads of same file handle deduplication correctly."""
): project, package = test_package
"""Test concurrent uploads of same file handle deduplication correctly.
Same content uploaded to different packages should result in:
- Same artifact_id (content-addressable)
- ref_count = number of packages (one version per package)
"""
project = test_project
api_key = get_api_key(integration_client) api_key = get_api_key(integration_client)
assert api_key, "Failed to create API key" assert api_key, "Failed to create API key"
num_concurrent = 5
package_names = []
# Create multiple packages for concurrent uploads
for i in range(num_concurrent):
pkg_name = f"dedup-pkg-{unique_test_id}-{i}"
response = integration_client.post(
f"/api/v1/project/{project}/packages",
json={"name": pkg_name, "description": f"Dedup test package {i}"},
)
assert response.status_code == 200
package_names.append(pkg_name)
content, expected_hash = generate_content_with_hash(4096, seed=999) content, expected_hash = generate_content_with_hash(4096, seed=999)
num_concurrent = 5
results = [] results = []
errors = [] errors = []
def upload_worker(idx, package): def upload_worker(idx):
try: try:
from httpx import Client from httpx import Client
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080") base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
@@ -238,7 +219,7 @@ class TestConcurrentUploads:
response = client.post( response = client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": f"dedup-{idx}"},
headers={"Authorization": f"Bearer {api_key}"}, headers={"Authorization": f"Bearer {api_key}"},
) )
if response.status_code == 200: if response.status_code == 200:
@@ -249,10 +230,7 @@ class TestConcurrentUploads:
errors.append(f"Worker {idx}: {str(e)}") errors.append(f"Worker {idx}: {str(e)}")
with ThreadPoolExecutor(max_workers=num_concurrent) as executor: with ThreadPoolExecutor(max_workers=num_concurrent) as executor:
futures = [ futures = [executor.submit(upload_worker, i) for i in range(num_concurrent)]
executor.submit(upload_worker, i, package_names[i])
for i in range(num_concurrent)
]
for future in as_completed(futures): for future in as_completed(futures):
pass pass
@@ -264,7 +242,7 @@ class TestConcurrentUploads:
assert len(artifact_ids) == 1 assert len(artifact_ids) == 1
assert expected_hash in artifact_ids assert expected_hash in artifact_ids
# Verify final ref_count equals number of packages # Verify final ref_count equals number of uploads
response = integration_client.get(f"/api/v1/artifact/{expected_hash}") response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200 assert response.status_code == 200
assert response.json()["ref_count"] == num_concurrent assert response.json()["ref_count"] == num_concurrent
@@ -309,7 +287,7 @@ class TestConcurrentUploads:
response = client.post( response = client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": "latest"}, data={"tag": "latest"},
headers={"Authorization": f"Bearer {api_key}"}, headers={"Authorization": f"Bearer {api_key}"},
) )
if response.status_code == 200: if response.status_code == 200:
@@ -343,7 +321,7 @@ class TestConcurrentDownloads:
content, expected_hash = generate_content_with_hash(2048, seed=400) content, expected_hash = generate_content_with_hash(2048, seed=400)
# Upload first # Upload first
upload_test_file(integration_client, project, package, content, version="download-test") upload_test_file(integration_client, project, package, content, tag="download-test")
results = [] results = []
errors = [] errors = []
@@ -384,7 +362,7 @@ class TestConcurrentDownloads:
project, package = test_package project, package = test_package
content, expected_hash = generate_content_with_hash(4096, seed=500) content, expected_hash = generate_content_with_hash(4096, seed=500)
upload_test_file(integration_client, project, package, content, version="download5-test") upload_test_file(integration_client, project, package, content, tag="download5-test")
num_downloads = 5 num_downloads = 5
results = [] results = []
@@ -425,7 +403,7 @@ class TestConcurrentDownloads:
project, package = test_package project, package = test_package
content, expected_hash = generate_content_with_hash(8192, seed=600) content, expected_hash = generate_content_with_hash(8192, seed=600)
upload_test_file(integration_client, project, package, content, version="download10-test") upload_test_file(integration_client, project, package, content, tag="download10-test")
num_downloads = 10 num_downloads = 10
results = [] results = []
@@ -472,7 +450,7 @@ class TestConcurrentDownloads:
content, expected_hash = generate_content_with_hash(1024, seed=700 + i) content, expected_hash = generate_content_with_hash(1024, seed=700 + i)
upload_test_file( upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
version=f"multi-download-{i}" tag=f"multi-download-{i}"
) )
uploads.append((f"multi-download-{i}", content)) uploads.append((f"multi-download-{i}", content))
@@ -524,7 +502,7 @@ class TestMixedConcurrentOperations:
# Upload initial content # Upload initial content
content1, hash1 = generate_content_with_hash(10240, seed=800) # 10KB content1, hash1 = generate_content_with_hash(10240, seed=800) # 10KB
upload_test_file(integration_client, project, package, content1, version="initial") upload_test_file(integration_client, project, package, content1, tag="initial")
# New content for upload during download # New content for upload during download
content2, hash2 = generate_content_with_hash(10240, seed=801) content2, hash2 = generate_content_with_hash(10240, seed=801)
@@ -561,7 +539,7 @@ class TestMixedConcurrentOperations:
response = client.post( response = client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": "during-download"}, data={"tag": "during-download"},
headers={"Authorization": f"Bearer {api_key}"}, headers={"Authorization": f"Bearer {api_key}"},
) )
if response.status_code == 200: if response.status_code == 200:
@@ -601,7 +579,7 @@ class TestMixedConcurrentOperations:
existing_files = [] existing_files = []
for i in range(3): for i in range(3):
content, hash = generate_content_with_hash(2048, seed=900 + i) content, hash = generate_content_with_hash(2048, seed=900 + i)
upload_test_file(integration_client, project, package, content, version=f"existing-{i}") upload_test_file(integration_client, project, package, content, tag=f"existing-{i}")
existing_files.append((f"existing-{i}", content)) existing_files.append((f"existing-{i}", content))
# New files for uploading # New files for uploading
@@ -641,7 +619,7 @@ class TestMixedConcurrentOperations:
response = client.post( response = client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": f"new-{idx}"}, data={"tag": f"new-{idx}"},
headers={"Authorization": f"Bearer {api_key}"}, headers={"Authorization": f"Bearer {api_key}"},
) )
if response.status_code == 200: if response.status_code == 200:
@@ -711,7 +689,7 @@ class TestMixedConcurrentOperations:
upload_resp = client.post( upload_resp = client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": f"pattern-{idx}"}, data={"tag": f"pattern-{idx}"},
headers={"Authorization": f"Bearer {api_key}"}, headers={"Authorization": f"Bearer {api_key}"},
) )
if upload_resp.status_code != 200: if upload_resp.status_code != 200:

View File

@@ -68,7 +68,7 @@ class TestUploadErrorHandling:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
data={"version": "no-file-provided"}, data={"tag": "no-file-provided"},
) )
assert response.status_code == 422 assert response.status_code == 422
@@ -200,7 +200,7 @@ class TestTimeoutBehavior:
start_time = time.time() start_time = time.time()
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="timeout-test" integration_client, project, package, content, tag="timeout-test"
) )
elapsed = time.time() - start_time elapsed = time.time() - start_time
@@ -219,7 +219,7 @@ class TestTimeoutBehavior:
# First upload # First upload
upload_test_file( upload_test_file(
integration_client, project, package, content, version="download-timeout-test" integration_client, project, package, content, tag="download-timeout-test"
) )
# Then download and time it # Then download and time it

View File

@@ -41,7 +41,7 @@ class TestRoundTripVerification:
# Upload and capture returned hash # Upload and capture returned hash
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="roundtrip" integration_client, project, package, content, tag="roundtrip"
) )
uploaded_hash = result["artifact_id"] uploaded_hash = result["artifact_id"]
@@ -84,7 +84,7 @@ class TestRoundTripVerification:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project, package, content, version="header-check" integration_client, project, package, content, tag="header-check"
) )
response = integration_client.get( response = integration_client.get(
@@ -102,7 +102,7 @@ class TestRoundTripVerification:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project, package, content, version="etag-check" integration_client, project, package, content, tag="etag-check"
) )
response = integration_client.get( response = integration_client.get(
@@ -186,7 +186,7 @@ class TestClientSideVerificationWorkflow:
content = b"Client post-download verification" content = b"Client post-download verification"
upload_test_file( upload_test_file(
integration_client, project, package, content, version="verify-after" integration_client, project, package, content, tag="verify-after"
) )
response = integration_client.get( response = integration_client.get(
@@ -215,7 +215,7 @@ class TestIntegritySizeVariants:
content, expected_hash = sized_content(SIZE_1KB, seed=100) content, expected_hash = sized_content(SIZE_1KB, seed=100)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="int-1kb" integration_client, project, package, content, tag="int-1kb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -234,7 +234,7 @@ class TestIntegritySizeVariants:
content, expected_hash = sized_content(SIZE_100KB, seed=101) content, expected_hash = sized_content(SIZE_100KB, seed=101)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="int-100kb" integration_client, project, package, content, tag="int-100kb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -253,7 +253,7 @@ class TestIntegritySizeVariants:
content, expected_hash = sized_content(SIZE_1MB, seed=102) content, expected_hash = sized_content(SIZE_1MB, seed=102)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="int-1mb" integration_client, project, package, content, tag="int-1mb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -273,7 +273,7 @@ class TestIntegritySizeVariants:
content, expected_hash = sized_content(SIZE_10MB, seed=103) content, expected_hash = sized_content(SIZE_10MB, seed=103)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="int-10mb" integration_client, project, package, content, tag="int-10mb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -323,13 +323,7 @@ class TestConsistencyCheck:
@pytest.mark.integration @pytest.mark.integration
def test_consistency_check_after_upload(self, integration_client, test_package): def test_consistency_check_after_upload(self, integration_client, test_package):
"""Test consistency check runs successfully after a valid upload. """Test consistency check passes after valid upload."""
Note: We don't assert healthy=True because other tests (especially
corruption detection tests) may leave orphaned S3 objects behind.
This test validates the consistency check endpoint works and the
uploaded artifact is included in the check count.
"""
project, package = test_package project, package = test_package
content = b"Consistency check test content" content = b"Consistency check test content"
@@ -341,10 +335,9 @@ class TestConsistencyCheck:
assert response.status_code == 200 assert response.status_code == 200
data = response.json() data = response.json()
# Verify check ran - at least 1 artifact was checked # Verify check ran and no issues
assert data["total_artifacts_checked"] >= 1 assert data["total_artifacts_checked"] >= 1
# Verify no missing S3 objects (uploaded artifact should exist) assert data["healthy"] is True
assert data["missing_s3_objects"] == 0
@pytest.mark.integration @pytest.mark.integration
def test_consistency_check_limit_parameter(self, integration_client): def test_consistency_check_limit_parameter(self, integration_client):
@@ -373,7 +366,7 @@ class TestDigestHeader:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project, package, content, version="digest-test" integration_client, project, package, content, tag="digest-test"
) )
response = integration_client.get( response = integration_client.get(
@@ -397,7 +390,7 @@ class TestDigestHeader:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project, package, content, version="digest-b64" integration_client, project, package, content, tag="digest-b64"
) )
response = integration_client.get( response = integration_client.get(
@@ -427,7 +420,7 @@ class TestVerificationModes:
content = b"Pre-verification mode test" content = b"Pre-verification mode test"
upload_test_file( upload_test_file(
integration_client, project, package, content, version="pre-verify" integration_client, project, package, content, tag="pre-verify"
) )
response = integration_client.get( response = integration_client.get(
@@ -447,7 +440,7 @@ class TestVerificationModes:
content = b"Stream verification mode test" content = b"Stream verification mode test"
upload_test_file( upload_test_file(
integration_client, project, package, content, version="stream-verify" integration_client, project, package, content, tag="stream-verify"
) )
response = integration_client.get( response = integration_client.get(
@@ -484,7 +477,7 @@ class TestArtifactIntegrityEndpoint:
expected_size = len(content) expected_size = len(content)
upload_test_file( upload_test_file(
integration_client, project, package, content, version="content-len" integration_client, project, package, content, tag="content-len"
) )
response = integration_client.get( response = integration_client.get(
@@ -520,7 +513,7 @@ class TestCorruptionDetection:
# Upload original content # Upload original content
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="corrupt-test" integration_client, project, package, content, tag="corrupt-test"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -562,7 +555,7 @@ class TestCorruptionDetection:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="bitflip-test" integration_client, project, package, content, tag="bitflip-test"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -599,7 +592,7 @@ class TestCorruptionDetection:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="truncate-test" integration_client, project, package, content, tag="truncate-test"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -634,7 +627,7 @@ class TestCorruptionDetection:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="append-test" integration_client, project, package, content, tag="append-test"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -677,7 +670,7 @@ class TestCorruptionDetection:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="client-detect" integration_client, project, package, content, tag="client-detect"
) )
# Corrupt the S3 object # Corrupt the S3 object
@@ -720,7 +713,7 @@ class TestCorruptionDetection:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="size-mismatch" integration_client, project, package, content, tag="size-mismatch"
) )
# Modify S3 object to have different size # Modify S3 object to have different size
@@ -754,7 +747,7 @@ class TestCorruptionDetection:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="missing-s3" integration_client, project, package, content, tag="missing-s3"
) )
# Delete the S3 object # Delete the S3 object

View File

@@ -41,7 +41,7 @@ class TestUploadMetrics:
content = b"duration test content" content = b"duration test content"
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="duration-test" integration_client, project, package, content, tag="duration-test"
) )
assert "duration_ms" in result assert "duration_ms" in result
@@ -55,7 +55,7 @@ class TestUploadMetrics:
content = b"throughput test content" content = b"throughput test content"
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="throughput-test" integration_client, project, package, content, tag="throughput-test"
) )
assert "throughput_mbps" in result assert "throughput_mbps" in result
@@ -72,7 +72,7 @@ class TestUploadMetrics:
start = time.time() start = time.time()
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="duration-check" integration_client, project, package, content, tag="duration-check"
) )
actual_duration = (time.time() - start) * 1000 # ms actual_duration = (time.time() - start) * 1000 # ms
@@ -92,7 +92,7 @@ class TestLargeFileUploads:
content, expected_hash = sized_content(SIZE_10MB, seed=200) content, expected_hash = sized_content(SIZE_10MB, seed=200)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="large-10mb" integration_client, project, package, content, tag="large-10mb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -109,7 +109,7 @@ class TestLargeFileUploads:
content, expected_hash = sized_content(SIZE_100MB, seed=300) content, expected_hash = sized_content(SIZE_100MB, seed=300)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="large-100mb" integration_client, project, package, content, tag="large-100mb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -126,7 +126,7 @@ class TestLargeFileUploads:
content, expected_hash = sized_content(SIZE_1GB, seed=400) content, expected_hash = sized_content(SIZE_1GB, seed=400)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="large-1gb" integration_client, project, package, content, tag="large-1gb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -147,14 +147,14 @@ class TestLargeFileUploads:
# First upload # First upload
result1 = upload_test_file( result1 = upload_test_file(
integration_client, project, package, content, version=f"dedup-{unique_test_id}-1" integration_client, project, package, content, tag=f"dedup-{unique_test_id}-1"
) )
# Note: may be True if previous test uploaded same content # Note: may be True if previous test uploaded same content
first_dedupe = result1["deduplicated"] first_dedupe = result1["deduplicated"]
# Second upload of same content # Second upload of same content
result2 = upload_test_file( result2 = upload_test_file(
integration_client, project, package, content, version=f"dedup-{unique_test_id}-2" integration_client, project, package, content, tag=f"dedup-{unique_test_id}-2"
) )
assert result2["artifact_id"] == expected_hash assert result2["artifact_id"] == expected_hash
# Second upload MUST be deduplicated # Second upload MUST be deduplicated
@@ -277,7 +277,7 @@ class TestUploadSizeLimits:
content = b"X" content = b"X"
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="min-size" integration_client, project, package, content, tag="min-size"
) )
assert result["size"] == 1 assert result["size"] == 1
@@ -289,7 +289,7 @@ class TestUploadSizeLimits:
content = b"content length verification test" content = b"content length verification test"
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="content-length-test" integration_client, project, package, content, tag="content-length-test"
) )
# Size in response should match actual content length # Size in response should match actual content length
@@ -336,7 +336,7 @@ class TestUploadErrorHandling:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
data={"version": "no-file"}, data={"tag": "no-file"},
) )
assert response.status_code == 422 assert response.status_code == 422
@@ -459,7 +459,7 @@ class TestUploadTimeout:
# httpx client should handle this quickly # httpx client should handle this quickly
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="timeout-small" integration_client, project, package, content, tag="timeout-small"
) )
assert result["artifact_id"] is not None assert result["artifact_id"] is not None
@@ -474,7 +474,7 @@ class TestUploadTimeout:
start = time.time() start = time.time()
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="timeout-check" integration_client, project, package, content, tag="timeout-check"
) )
duration = time.time() - start duration = time.time() - start
@@ -525,7 +525,7 @@ class TestConcurrentUploads:
response = client.post( response = client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": f"concurrent-diff-{idx}"}, data={"tag": f"concurrent-diff-{idx}"},
headers={"Authorization": f"Bearer {api_key}"}, headers={"Authorization": f"Bearer {api_key}"},
) )
if response.status_code == 200: if response.status_code == 200:

View File

@@ -175,7 +175,7 @@ class TestPackageStats:
assert "package_id" in data assert "package_id" in data
assert "package_name" in data assert "package_name" in data
assert "project_name" in data assert "project_name" in data
assert "version_count" in data assert "tag_count" in data
assert "artifact_count" in data assert "artifact_count" in data
assert "total_size_bytes" in data assert "total_size_bytes" in data
assert "upload_count" in data assert "upload_count" in data
@@ -234,11 +234,7 @@ class TestPackageCascadeDelete:
def test_ref_count_decrements_on_package_delete( def test_ref_count_decrements_on_package_delete(
self, integration_client, unique_test_id self, integration_client, unique_test_id
): ):
"""Test ref_count decrements when package is deleted. """Test ref_count decrements for all tags when package is deleted."""
Each package can only have one version per artifact (same content = same version).
This test verifies that deleting a package decrements the artifact's ref_count.
"""
project_name = f"cascade-pkg-{unique_test_id}" project_name = f"cascade-pkg-{unique_test_id}"
package_name = f"test-pkg-{unique_test_id}" package_name = f"test-pkg-{unique_test_id}"
@@ -260,17 +256,23 @@ class TestPackageCascadeDelete:
) )
assert response.status_code == 200 assert response.status_code == 200
# Upload content with version # Upload content with multiple tags
content = f"cascade delete test {unique_test_id}".encode() content = f"cascade delete test {unique_test_id}".encode()
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project_name, package_name, content, version="1.0.0" integration_client, project_name, package_name, content, tag="v1"
)
upload_test_file(
integration_client, project_name, package_name, content, tag="v2"
)
upload_test_file(
integration_client, project_name, package_name, content, tag="v3"
) )
# Verify ref_count is 1 # Verify ref_count is 3
response = integration_client.get(f"/api/v1/artifact/{expected_hash}") response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1 assert response.json()["ref_count"] == 3
# Delete the package # Delete the package
delete_response = integration_client.delete( delete_response = integration_client.delete(

View File

@@ -128,9 +128,7 @@ class TestProjectListingFilters:
assert response.status_code == 200 assert response.status_code == 200
data = response.json() data = response.json()
# Filter out system projects (names starting with "_") as they may have names = [p["name"] for p in data["items"]]
# collation-specific sort behavior and aren't part of the test data
names = [p["name"] for p in data["items"] if not p["name"].startswith("_")]
assert names == sorted(names) assert names == sorted(names)
@@ -149,7 +147,7 @@ class TestProjectStats:
assert "project_id" in data assert "project_id" in data
assert "project_name" in data assert "project_name" in data
assert "package_count" in data assert "package_count" in data
assert "version_count" in data assert "tag_count" in data
assert "artifact_count" in data assert "artifact_count" in data
assert "total_size_bytes" in data assert "total_size_bytes" in data
assert "upload_count" in data assert "upload_count" in data
@@ -229,11 +227,7 @@ class TestProjectCascadeDelete:
def test_ref_count_decrements_on_project_delete( def test_ref_count_decrements_on_project_delete(
self, integration_client, unique_test_id self, integration_client, unique_test_id
): ):
"""Test ref_count decrements for all versions when project is deleted. """Test ref_count decrements for all tags when project is deleted."""
Each package can only have one version per artifact (same content = same version).
With 2 packages, ref_count should be 2, and go to 0 when project is deleted.
"""
project_name = f"cascade-proj-{unique_test_id}" project_name = f"cascade-proj-{unique_test_id}"
package1_name = f"pkg1-{unique_test_id}" package1_name = f"pkg1-{unique_test_id}"
package2_name = f"pkg2-{unique_test_id}" package2_name = f"pkg2-{unique_test_id}"
@@ -257,20 +251,26 @@ class TestProjectCascadeDelete:
) )
assert response.status_code == 200 assert response.status_code == 200
# Upload same content to both packages # Upload same content with tags in both packages
content = f"project cascade test {unique_test_id}".encode() content = f"project cascade test {unique_test_id}".encode()
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project_name, package1_name, content, version="1.0.0" integration_client, project_name, package1_name, content, tag="v1"
) )
upload_test_file( upload_test_file(
integration_client, project_name, package2_name, content, version="1.0.0" integration_client, project_name, package1_name, content, tag="v2"
)
upload_test_file(
integration_client, project_name, package2_name, content, tag="latest"
)
upload_test_file(
integration_client, project_name, package2_name, content, tag="stable"
) )
# Verify ref_count is 2 (1 version in each of 2 packages) # Verify ref_count is 4 (2 tags in each of 2 packages)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}") response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 2 assert response.json()["ref_count"] == 4
# Delete the project # Delete the project
delete_response = integration_client.delete(f"/api/v1/projects/{project_name}") delete_response = integration_client.delete(f"/api/v1/projects/{project_name}")

View File

@@ -1,137 +0,0 @@
"""Integration tests for PyPI transparent proxy."""
import os
import pytest
import httpx
def get_base_url():
"""Get the base URL for the Orchard server from environment."""
return os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
class TestPyPIProxyEndpoints:
"""Tests for PyPI proxy endpoints.
These endpoints are public (no auth required) since pip needs to use them.
"""
@pytest.mark.integration
def test_pypi_simple_index(self):
"""Test that /pypi/simple/ returns HTML response."""
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
response = client.get("/pypi/simple/")
# Returns 200 if sources configured, 503 if not
assert response.status_code in (200, 503)
if response.status_code == 200:
assert "text/html" in response.headers.get("content-type", "")
else:
assert "No PyPI upstream sources configured" in response.json()["detail"]
@pytest.mark.integration
def test_pypi_package_endpoint(self):
"""Test that /pypi/simple/{package}/ returns appropriate response."""
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
response = client.get("/pypi/simple/requests/")
# Returns 200 if sources configured and package found,
# 404 if package not found, 503 if no sources
assert response.status_code in (200, 404, 503)
if response.status_code == 200:
assert "text/html" in response.headers.get("content-type", "")
elif response.status_code == 404:
assert "not found" in response.json()["detail"].lower()
else: # 503
assert "No PyPI upstream sources configured" in response.json()["detail"]
@pytest.mark.integration
def test_pypi_download_missing_upstream_param(self):
"""Test that /pypi/simple/{package}/{filename} requires upstream param."""
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
response = client.get("/pypi/simple/requests/requests-2.31.0.tar.gz")
assert response.status_code == 400
assert "upstream" in response.json()["detail"].lower()
class TestPyPILinkRewriting:
"""Tests for URL rewriting in PyPI proxy responses."""
def test_rewrite_package_links(self):
"""Test that download links are rewritten to go through proxy."""
from app.pypi_proxy import _rewrite_package_links
html = '''
<html>
<body>
<a href="https://files.pythonhosted.org/packages/ab/cd/requests-2.31.0.tar.gz#sha256=abc123">requests-2.31.0.tar.gz</a>
<a href="https://files.pythonhosted.org/packages/ef/gh/requests-2.31.0-py3-none-any.whl#sha256=def456">requests-2.31.0-py3-none-any.whl</a>
</body>
</html>
'''
# upstream_base_url is used to resolve relative URLs (not needed here since URLs are absolute)
result = _rewrite_package_links(
html,
"http://localhost:8080",
"requests",
"https://pypi.org/simple/requests/"
)
# Links should be rewritten to go through our proxy
assert "/pypi/simple/requests/requests-2.31.0.tar.gz?upstream=" in result
assert "/pypi/simple/requests/requests-2.31.0-py3-none-any.whl?upstream=" in result
# Original URLs should be encoded in upstream param
assert "files.pythonhosted.org" in result
# Hash fragments should be preserved
assert "#sha256=abc123" in result
assert "#sha256=def456" in result
def test_rewrite_relative_links(self):
"""Test that relative URLs are resolved to absolute URLs."""
from app.pypi_proxy import _rewrite_package_links
# Artifactory-style relative URLs
html = '''
<html>
<body>
<a href="../../packages/ab/cd/requests-2.31.0.tar.gz#sha256=abc123">requests-2.31.0.tar.gz</a>
</body>
</html>
'''
result = _rewrite_package_links(
html,
"https://orchard.example.com",
"requests",
"https://artifactory.example.com/api/pypi/pypi-remote/simple/requests/"
)
# The relative URL should be resolved to absolute
# ../../packages/ab/cd/... from /api/pypi/pypi-remote/simple/requests/ resolves to /api/pypi/pypi-remote/packages/ab/cd/...
assert "upstream=https%3A%2F%2Fartifactory.example.com%2Fapi%2Fpypi%2Fpypi-remote%2Fpackages" in result
# Hash fragment should be preserved
assert "#sha256=abc123" in result
class TestPyPIPackageNormalization:
"""Tests for PyPI package name normalization."""
@pytest.mark.integration
def test_package_name_normalized(self):
"""Test that package names are normalized per PEP 503.
Different capitalizations/separators should all be valid paths.
The endpoint normalizes to lowercase with hyphens before lookup.
"""
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
# Test various name formats - all should be valid endpoint paths
for package_name in ["Requests", "some_package", "some-package"]:
response = client.get(f"/pypi/simple/{package_name}/")
# 200 = found, 404 = not found, 503 = no sources configured
assert response.status_code in (200, 404, 503), \
f"Unexpected status {response.status_code} for {package_name}"
# Verify response is appropriate for the status code
if response.status_code == 200:
assert "text/html" in response.headers.get("content-type", "")
elif response.status_code == 503:
assert "No PyPI upstream sources configured" in response.json()["detail"]

View File

@@ -48,7 +48,7 @@ class TestSmallFileSizes:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="1byte.bin", version="1byte" filename="1byte.bin", tag="1byte"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["size"] == SIZE_1B assert result["size"] == SIZE_1B
@@ -70,7 +70,7 @@ class TestSmallFileSizes:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="1kb.bin", version="1kb" filename="1kb.bin", tag="1kb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["size"] == SIZE_1KB assert result["size"] == SIZE_1KB
@@ -90,7 +90,7 @@ class TestSmallFileSizes:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="10kb.bin", version="10kb" filename="10kb.bin", tag="10kb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["size"] == SIZE_10KB assert result["size"] == SIZE_10KB
@@ -110,7 +110,7 @@ class TestSmallFileSizes:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="100kb.bin", version="100kb" filename="100kb.bin", tag="100kb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["size"] == SIZE_100KB assert result["size"] == SIZE_100KB
@@ -134,7 +134,7 @@ class TestMediumFileSizes:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="1mb.bin", version="1mb" filename="1mb.bin", tag="1mb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["size"] == SIZE_1MB assert result["size"] == SIZE_1MB
@@ -155,7 +155,7 @@ class TestMediumFileSizes:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="5mb.bin", version="5mb" filename="5mb.bin", tag="5mb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["size"] == SIZE_5MB assert result["size"] == SIZE_5MB
@@ -177,7 +177,7 @@ class TestMediumFileSizes:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="10mb.bin", version="10mb" filename="10mb.bin", tag="10mb"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["size"] == SIZE_10MB assert result["size"] == SIZE_10MB
@@ -200,7 +200,7 @@ class TestMediumFileSizes:
start_time = time.time() start_time = time.time()
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="50mb.bin", version="50mb" filename="50mb.bin", tag="50mb"
) )
upload_time = time.time() - start_time upload_time = time.time() - start_time
@@ -240,7 +240,7 @@ class TestLargeFileSizes:
start_time = time.time() start_time = time.time()
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="100mb.bin", version="100mb" filename="100mb.bin", tag="100mb"
) )
upload_time = time.time() - start_time upload_time = time.time() - start_time
@@ -271,7 +271,7 @@ class TestLargeFileSizes:
start_time = time.time() start_time = time.time()
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="250mb.bin", version="250mb" filename="250mb.bin", tag="250mb"
) )
upload_time = time.time() - start_time upload_time = time.time() - start_time
@@ -302,7 +302,7 @@ class TestLargeFileSizes:
start_time = time.time() start_time = time.time()
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="500mb.bin", version="500mb" filename="500mb.bin", tag="500mb"
) )
upload_time = time.time() - start_time upload_time = time.time() - start_time
@@ -336,7 +336,7 @@ class TestLargeFileSizes:
start_time = time.time() start_time = time.time()
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="1gb.bin", version="1gb" filename="1gb.bin", tag="1gb"
) )
upload_time = time.time() - start_time upload_time = time.time() - start_time
@@ -368,7 +368,7 @@ class TestChunkBoundaries:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="chunk.bin", version="chunk-exact" filename="chunk.bin", tag="chunk-exact"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["size"] == CHUNK_SIZE assert result["size"] == CHUNK_SIZE
@@ -389,7 +389,7 @@ class TestChunkBoundaries:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="chunk_plus.bin", version="chunk-plus" filename="chunk_plus.bin", tag="chunk-plus"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["size"] == size assert result["size"] == size
@@ -410,7 +410,7 @@ class TestChunkBoundaries:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="chunk_minus.bin", version="chunk-minus" filename="chunk_minus.bin", tag="chunk-minus"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["size"] == size assert result["size"] == size
@@ -431,7 +431,7 @@ class TestChunkBoundaries:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="multi_chunk.bin", version="multi-chunk" filename="multi_chunk.bin", tag="multi-chunk"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["size"] == size assert result["size"] == size
@@ -457,7 +457,7 @@ class TestDataIntegrity:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="binary.bin", version="binary" filename="binary.bin", tag="binary"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -477,7 +477,7 @@ class TestDataIntegrity:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="text.txt", version="text" filename="text.txt", tag="text"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -498,7 +498,7 @@ class TestDataIntegrity:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="nulls.bin", version="nulls" filename="nulls.bin", tag="nulls"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -519,7 +519,7 @@ class TestDataIntegrity:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="文件名.txt", version="unicode-name" filename="文件名.txt", tag="unicode-name"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
assert result["original_name"] == "文件名.txt" assert result["original_name"] == "文件名.txt"
@@ -543,7 +543,7 @@ class TestDataIntegrity:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="data.gz", version="compressed" filename="data.gz", tag="compressed"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -568,7 +568,7 @@ class TestDataIntegrity:
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename=f"hash_test_{size}.bin", version=f"hash-{size}" filename=f"hash_test_{size}.bin", tag=f"hash-{size}"
) )
# Verify artifact_id matches expected hash # Verify artifact_id matches expected hash

View File

@@ -32,7 +32,7 @@ class TestRangeRequests:
"""Test range request for first N bytes.""" """Test range request for first N bytes."""
project, package = test_package project, package = test_package
content = b"0123456789" * 100 # 1000 bytes content = b"0123456789" * 100 # 1000 bytes
upload_test_file(integration_client, project, package, content, version="range-test") upload_test_file(integration_client, project, package, content, tag="range-test")
# Request first 10 bytes # Request first 10 bytes
response = integration_client.get( response = integration_client.get(
@@ -50,7 +50,7 @@ class TestRangeRequests:
"""Test range request for bytes in the middle.""" """Test range request for bytes in the middle."""
project, package = test_package project, package = test_package
content = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" content = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
upload_test_file(integration_client, project, package, content, version="range-mid") upload_test_file(integration_client, project, package, content, tag="range-mid")
# Request bytes 10-19 (KLMNOPQRST) # Request bytes 10-19 (KLMNOPQRST)
response = integration_client.get( response = integration_client.get(
@@ -66,7 +66,7 @@ class TestRangeRequests:
"""Test range request for last N bytes (suffix range).""" """Test range request for last N bytes (suffix range)."""
project, package = test_package project, package = test_package
content = b"0123456789ABCDEF" # 16 bytes content = b"0123456789ABCDEF" # 16 bytes
upload_test_file(integration_client, project, package, content, version="range-suffix") upload_test_file(integration_client, project, package, content, tag="range-suffix")
# Request last 4 bytes # Request last 4 bytes
response = integration_client.get( response = integration_client.get(
@@ -82,7 +82,7 @@ class TestRangeRequests:
"""Test range request from offset to end.""" """Test range request from offset to end."""
project, package = test_package project, package = test_package
content = b"0123456789" content = b"0123456789"
upload_test_file(integration_client, project, package, content, version="range-open") upload_test_file(integration_client, project, package, content, tag="range-open")
# Request from byte 5 to end # Request from byte 5 to end
response = integration_client.get( response = integration_client.get(
@@ -100,7 +100,7 @@ class TestRangeRequests:
"""Test that range requests include Accept-Ranges header.""" """Test that range requests include Accept-Ranges header."""
project, package = test_package project, package = test_package
content = b"test content" content = b"test content"
upload_test_file(integration_client, project, package, content, version="accept-ranges") upload_test_file(integration_client, project, package, content, tag="accept-ranges")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/accept-ranges", f"/api/v1/project/{project}/{package}/+/accept-ranges",
@@ -117,7 +117,7 @@ class TestRangeRequests:
"""Test that full downloads advertise range support.""" """Test that full downloads advertise range support."""
project, package = test_package project, package = test_package
content = b"test content" content = b"test content"
upload_test_file(integration_client, project, package, content, version="full-accept") upload_test_file(integration_client, project, package, content, tag="full-accept")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/full-accept", f"/api/v1/project/{project}/{package}/+/full-accept",
@@ -136,7 +136,7 @@ class TestConditionalRequests:
project, package = test_package project, package = test_package
content = b"conditional request test content" content = b"conditional request test content"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file(integration_client, project, package, content, version="cond-etag") upload_test_file(integration_client, project, package, content, tag="cond-etag")
# Request with matching ETag # Request with matching ETag
response = integration_client.get( response = integration_client.get(
@@ -153,7 +153,7 @@ class TestConditionalRequests:
project, package = test_package project, package = test_package
content = b"etag no quotes test" content = b"etag no quotes test"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file(integration_client, project, package, content, version="cond-noquote") upload_test_file(integration_client, project, package, content, tag="cond-noquote")
# Request with ETag without quotes # Request with ETag without quotes
response = integration_client.get( response = integration_client.get(
@@ -168,7 +168,7 @@ class TestConditionalRequests:
"""Test If-None-Match with non-matching ETag returns 200.""" """Test If-None-Match with non-matching ETag returns 200."""
project, package = test_package project, package = test_package
content = b"etag mismatch test" content = b"etag mismatch test"
upload_test_file(integration_client, project, package, content, version="cond-mismatch") upload_test_file(integration_client, project, package, content, tag="cond-mismatch")
# Request with different ETag # Request with different ETag
response = integration_client.get( response = integration_client.get(
@@ -184,7 +184,7 @@ class TestConditionalRequests:
"""Test If-Modified-Since with future date returns 304.""" """Test If-Modified-Since with future date returns 304."""
project, package = test_package project, package = test_package
content = b"modified since test" content = b"modified since test"
upload_test_file(integration_client, project, package, content, version="cond-modified") upload_test_file(integration_client, project, package, content, tag="cond-modified")
# Request with future date (artifact was definitely created before this) # Request with future date (artifact was definitely created before this)
future_date = formatdate(time.time() + 86400, usegmt=True) # Tomorrow future_date = formatdate(time.time() + 86400, usegmt=True) # Tomorrow
@@ -202,7 +202,7 @@ class TestConditionalRequests:
"""Test If-Modified-Since with old date returns 200.""" """Test If-Modified-Since with old date returns 200."""
project, package = test_package project, package = test_package
content = b"old date test" content = b"old date test"
upload_test_file(integration_client, project, package, content, version="cond-old") upload_test_file(integration_client, project, package, content, tag="cond-old")
# Request with old date (2020-01-01) # Request with old date (2020-01-01)
old_date = "Wed, 01 Jan 2020 00:00:00 GMT" old_date = "Wed, 01 Jan 2020 00:00:00 GMT"
@@ -220,7 +220,7 @@ class TestConditionalRequests:
project, package = test_package project, package = test_package
content = b"304 etag test" content = b"304 etag test"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file(integration_client, project, package, content, version="304-etag") upload_test_file(integration_client, project, package, content, tag="304-etag")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/304-etag", f"/api/v1/project/{project}/{package}/+/304-etag",
@@ -236,7 +236,7 @@ class TestConditionalRequests:
project, package = test_package project, package = test_package
content = b"304 cache test" content = b"304 cache test"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file(integration_client, project, package, content, version="304-cache") upload_test_file(integration_client, project, package, content, tag="304-cache")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/304-cache", f"/api/v1/project/{project}/{package}/+/304-cache",
@@ -255,7 +255,7 @@ class TestCachingHeaders:
"""Test download response includes Cache-Control header.""" """Test download response includes Cache-Control header."""
project, package = test_package project, package = test_package
content = b"cache control test" content = b"cache control test"
upload_test_file(integration_client, project, package, content, version="cache-ctl") upload_test_file(integration_client, project, package, content, tag="cache-ctl")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/cache-ctl", f"/api/v1/project/{project}/{package}/+/cache-ctl",
@@ -272,7 +272,7 @@ class TestCachingHeaders:
"""Test download response includes Last-Modified header.""" """Test download response includes Last-Modified header."""
project, package = test_package project, package = test_package
content = b"last modified test" content = b"last modified test"
upload_test_file(integration_client, project, package, content, version="last-mod") upload_test_file(integration_client, project, package, content, tag="last-mod")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/last-mod", f"/api/v1/project/{project}/{package}/+/last-mod",
@@ -290,7 +290,7 @@ class TestCachingHeaders:
project, package = test_package project, package = test_package
content = b"etag header test" content = b"etag header test"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file(integration_client, project, package, content, version="etag-hdr") upload_test_file(integration_client, project, package, content, tag="etag-hdr")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/etag-hdr", f"/api/v1/project/{project}/{package}/+/etag-hdr",
@@ -308,7 +308,7 @@ class TestDownloadResume:
"""Test resuming download from where it left off.""" """Test resuming download from where it left off."""
project, package = test_package project, package = test_package
content = b"ABCDEFGHIJ" * 100 # 1000 bytes content = b"ABCDEFGHIJ" * 100 # 1000 bytes
upload_test_file(integration_client, project, package, content, version="resume-test") upload_test_file(integration_client, project, package, content, tag="resume-test")
# Simulate partial download (first 500 bytes) # Simulate partial download (first 500 bytes)
response1 = integration_client.get( response1 = integration_client.get(
@@ -340,7 +340,7 @@ class TestDownloadResume:
project, package = test_package project, package = test_package
content = b"resume etag verification test content" content = b"resume etag verification test content"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file(integration_client, project, package, content, version="resume-etag") upload_test_file(integration_client, project, package, content, tag="resume-etag")
# Get ETag from first request # Get ETag from first request
response1 = integration_client.get( response1 = integration_client.get(
@@ -373,7 +373,7 @@ class TestLargeFileStreaming:
project, package = test_package project, package = test_package
content, expected_hash = sized_content(SIZE_1MB, seed=500) content, expected_hash = sized_content(SIZE_1MB, seed=500)
upload_test_file(integration_client, project, package, content, version="stream-1mb") upload_test_file(integration_client, project, package, content, tag="stream-1mb")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/stream-1mb", f"/api/v1/project/{project}/{package}/+/stream-1mb",
@@ -391,7 +391,7 @@ class TestLargeFileStreaming:
project, package = test_package project, package = test_package
content, expected_hash = sized_content(SIZE_100KB, seed=501) content, expected_hash = sized_content(SIZE_100KB, seed=501)
upload_test_file(integration_client, project, package, content, version="stream-hdr") upload_test_file(integration_client, project, package, content, tag="stream-hdr")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/stream-hdr", f"/api/v1/project/{project}/{package}/+/stream-hdr",
@@ -410,7 +410,7 @@ class TestLargeFileStreaming:
project, package = test_package project, package = test_package
content, _ = sized_content(SIZE_100KB, seed=502) content, _ = sized_content(SIZE_100KB, seed=502)
upload_test_file(integration_client, project, package, content, version="range-large") upload_test_file(integration_client, project, package, content, tag="range-large")
# Request a slice from the middle # Request a slice from the middle
start = 50000 start = 50000
@@ -433,7 +433,7 @@ class TestDownloadModes:
"""Test proxy mode streams content through backend.""" """Test proxy mode streams content through backend."""
project, package = test_package project, package = test_package
content = b"proxy mode test content" content = b"proxy mode test content"
upload_test_file(integration_client, project, package, content, version="mode-proxy") upload_test_file(integration_client, project, package, content, tag="mode-proxy")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/mode-proxy", f"/api/v1/project/{project}/{package}/+/mode-proxy",
@@ -447,7 +447,7 @@ class TestDownloadModes:
"""Test presigned mode returns JSON with URL.""" """Test presigned mode returns JSON with URL."""
project, package = test_package project, package = test_package
content = b"presigned mode test" content = b"presigned mode test"
upload_test_file(integration_client, project, package, content, version="mode-presign") upload_test_file(integration_client, project, package, content, tag="mode-presign")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/mode-presign", f"/api/v1/project/{project}/{package}/+/mode-presign",
@@ -464,7 +464,7 @@ class TestDownloadModes:
"""Test redirect mode returns 302 to presigned URL.""" """Test redirect mode returns 302 to presigned URL."""
project, package = test_package project, package = test_package
content = b"redirect mode test" content = b"redirect mode test"
upload_test_file(integration_client, project, package, content, version="mode-redir") upload_test_file(integration_client, project, package, content, tag="mode-redir")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/mode-redir", f"/api/v1/project/{project}/{package}/+/mode-redir",
@@ -484,7 +484,7 @@ class TestIntegrityDuringStreaming:
project, package = test_package project, package = test_package
content = b"integrity check content" content = b"integrity check content"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file(integration_client, project, package, content, version="integrity") upload_test_file(integration_client, project, package, content, tag="integrity")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/integrity", f"/api/v1/project/{project}/{package}/+/integrity",
@@ -505,7 +505,7 @@ class TestIntegrityDuringStreaming:
project, package = test_package project, package = test_package
content = b"etag integrity test" content = b"etag integrity test"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file(integration_client, project, package, content, version="etag-int") upload_test_file(integration_client, project, package, content, tag="etag-int")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/etag-int", f"/api/v1/project/{project}/{package}/+/etag-int",
@@ -524,7 +524,7 @@ class TestIntegrityDuringStreaming:
"""Test Digest header is present in RFC 3230 format.""" """Test Digest header is present in RFC 3230 format."""
project, package = test_package project, package = test_package
content = b"digest header test" content = b"digest header test"
upload_test_file(integration_client, project, package, content, version="digest") upload_test_file(integration_client, project, package, content, tag="digest")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/digest", f"/api/v1/project/{project}/{package}/+/digest",

View File

@@ -0,0 +1,403 @@
"""
Integration tests for tag API endpoints.
Tests cover:
- Tag CRUD operations
- Tag listing with pagination and search
- Tag history tracking
- ref_count behavior with tag operations
"""
import pytest
from tests.factories import compute_sha256, upload_test_file
class TestTagCRUD:
"""Tests for tag create, read, delete operations."""
@pytest.mark.integration
def test_create_tag_via_upload(self, integration_client, test_package):
"""Test creating a tag via upload endpoint."""
project_name, package_name = test_package
result = upload_test_file(
integration_client,
project_name,
package_name,
b"tag create test",
tag="v1.0.0",
)
assert result["tag"] == "v1.0.0"
assert result["artifact_id"]
@pytest.mark.integration
def test_create_tag_via_post(
self, integration_client, test_package, unique_test_id
):
"""Test creating a tag via POST /tags endpoint."""
project_name, package_name = test_package
# First upload an artifact
result = upload_test_file(
integration_client,
project_name,
package_name,
b"artifact for tag",
)
artifact_id = result["artifact_id"]
# Create tag via POST
tag_name = f"post-tag-{unique_test_id}"
response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/tags",
json={"name": tag_name, "artifact_id": artifact_id},
)
assert response.status_code == 200
data = response.json()
assert data["name"] == tag_name
assert data["artifact_id"] == artifact_id
@pytest.mark.integration
def test_get_tag(self, integration_client, test_package):
"""Test getting a tag by name."""
project_name, package_name = test_package
upload_test_file(
integration_client,
project_name,
package_name,
b"get tag test",
tag="get-tag",
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags/get-tag"
)
assert response.status_code == 200
data = response.json()
assert data["name"] == "get-tag"
assert "artifact_id" in data
assert "artifact_size" in data
assert "artifact_content_type" in data
@pytest.mark.integration
def test_list_tags(self, integration_client, test_package):
"""Test listing tags for a package."""
project_name, package_name = test_package
# Create some tags
upload_test_file(
integration_client,
project_name,
package_name,
b"list tags test",
tag="list-v1",
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags"
)
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
tag_names = [t["name"] for t in data["items"]]
assert "list-v1" in tag_names
@pytest.mark.integration
def test_delete_tag(self, integration_client, test_package):
"""Test deleting a tag."""
project_name, package_name = test_package
upload_test_file(
integration_client,
project_name,
package_name,
b"delete tag test",
tag="to-delete",
)
# Delete tag
response = integration_client.delete(
f"/api/v1/project/{project_name}/{package_name}/tags/to-delete"
)
assert response.status_code == 204
# Verify deleted
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags/to-delete"
)
assert response.status_code == 404
class TestTagListingFilters:
"""Tests for tag listing with filters and search."""
@pytest.mark.integration
def test_tags_pagination(self, integration_client, test_package):
"""Test tag listing respects pagination."""
project_name, package_name = test_package
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags?limit=5"
)
assert response.status_code == 200
data = response.json()
assert len(data["items"]) <= 5
assert data["pagination"]["limit"] == 5
@pytest.mark.integration
def test_tags_search(self, integration_client, test_package, unique_test_id):
"""Test tag search by name."""
project_name, package_name = test_package
tag_name = f"searchable-{unique_test_id}"
upload_test_file(
integration_client,
project_name,
package_name,
b"search test",
tag=tag_name,
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags?search=searchable"
)
assert response.status_code == 200
data = response.json()
tag_names = [t["name"] for t in data["items"]]
assert tag_name in tag_names
class TestTagHistory:
"""Tests for tag history tracking."""
@pytest.mark.integration
def test_tag_history_on_create(self, integration_client, test_package):
"""Test tag history is created when tag is created."""
project_name, package_name = test_package
upload_test_file(
integration_client,
project_name,
package_name,
b"history create test",
tag="history-create",
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags/history-create/history"
)
assert response.status_code == 200
data = response.json()
assert len(data) >= 1
@pytest.mark.integration
def test_tag_history_on_update(
self, integration_client, test_package, unique_test_id
):
"""Test tag history is created when tag is updated."""
project_name, package_name = test_package
tag_name = f"history-update-{unique_test_id}"
# Create tag with first artifact
upload_test_file(
integration_client,
project_name,
package_name,
b"first content",
tag=tag_name,
)
# Update tag with second artifact
upload_test_file(
integration_client,
project_name,
package_name,
b"second content",
tag=tag_name,
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags/{tag_name}/history"
)
assert response.status_code == 200
data = response.json()
# Should have at least 2 history entries (create + update)
assert len(data) >= 2
class TestTagRefCount:
"""Tests for ref_count behavior with tag operations."""
@pytest.mark.integration
def test_ref_count_decrements_on_tag_delete(self, integration_client, test_package):
"""Test ref_count decrements when a tag is deleted."""
project_name, package_name = test_package
content = b"ref count delete test"
expected_hash = compute_sha256(content)
# Upload with two tags
upload_test_file(
integration_client, project_name, package_name, content, tag="rc-v1"
)
upload_test_file(
integration_client, project_name, package_name, content, tag="rc-v2"
)
# Verify ref_count is 2
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 2
# Delete one tag
delete_response = integration_client.delete(
f"/api/v1/project/{project_name}/{package_name}/tags/rc-v1"
)
assert delete_response.status_code == 204
# Verify ref_count is now 1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
@pytest.mark.integration
def test_ref_count_zero_after_all_tags_deleted(
self, integration_client, test_package
):
"""Test ref_count goes to 0 when all tags are deleted."""
project_name, package_name = test_package
content = b"orphan test content"
expected_hash = compute_sha256(content)
# Upload with one tag
upload_test_file(
integration_client, project_name, package_name, content, tag="only-tag"
)
# Delete the tag
integration_client.delete(
f"/api/v1/project/{project_name}/{package_name}/tags/only-tag"
)
# Verify ref_count is 0
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 0
@pytest.mark.integration
def test_ref_count_adjusts_on_tag_update(
self, integration_client, test_package, unique_test_id
):
"""Test ref_count adjusts when a tag is updated to point to different artifact."""
project_name, package_name = test_package
# Upload two different artifacts
content1 = f"artifact one {unique_test_id}".encode()
content2 = f"artifact two {unique_test_id}".encode()
hash1 = compute_sha256(content1)
hash2 = compute_sha256(content2)
# Upload first artifact with tag "latest"
upload_test_file(
integration_client, project_name, package_name, content1, tag="latest"
)
# Verify first artifact has ref_count 1
response = integration_client.get(f"/api/v1/artifact/{hash1}")
assert response.json()["ref_count"] == 1
# Upload second artifact with different tag
upload_test_file(
integration_client, project_name, package_name, content2, tag="stable"
)
# Now update "latest" tag to point to second artifact
upload_test_file(
integration_client, project_name, package_name, content2, tag="latest"
)
# Verify first artifact ref_count decreased to 0
response = integration_client.get(f"/api/v1/artifact/{hash1}")
assert response.json()["ref_count"] == 0
# Verify second artifact ref_count increased to 2
response = integration_client.get(f"/api/v1/artifact/{hash2}")
assert response.json()["ref_count"] == 2
@pytest.mark.integration
def test_ref_count_unchanged_when_tag_same_artifact(
self, integration_client, test_package, unique_test_id
):
"""Test ref_count doesn't change when tag is 'updated' to same artifact."""
project_name, package_name = test_package
content = f"same artifact {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload with tag
upload_test_file(
integration_client, project_name, package_name, content, tag="same-v1"
)
# Verify ref_count is 1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
# Upload same content with same tag (no-op)
upload_test_file(
integration_client, project_name, package_name, content, tag="same-v1"
)
# Verify ref_count is still 1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
@pytest.mark.integration
def test_tag_via_post_endpoint_increments_ref_count(
self, integration_client, test_package, unique_test_id
):
"""Test creating tag via POST /tags endpoint increments ref_count."""
project_name, package_name = test_package
content = f"tag endpoint test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload artifact without tag
result = upload_test_file(
integration_client, project_name, package_name, content, filename="test.bin"
)
artifact_id = result["artifact_id"]
# Verify ref_count is 0 (no tags yet)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 0
# Create tag via POST endpoint
tag_response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/tags",
json={"name": "post-v1", "artifact_id": artifact_id},
)
assert tag_response.status_code == 200
# Verify ref_count is now 1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
# Create another tag via POST endpoint
tag_response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/tags",
json={"name": "post-latest", "artifact_id": artifact_id},
)
assert tag_response.status_code == 200
# Verify ref_count is now 2
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 2

View File

@@ -47,7 +47,7 @@ class TestUploadBasics:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
result = upload_test_file( result = upload_test_file(
integration_client, project_name, package_name, content, version="v1" integration_client, project_name, package_name, content, tag="v1"
) )
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
@@ -116,23 +116,31 @@ class TestUploadBasics:
assert result["created_at"] is not None assert result["created_at"] is not None
@pytest.mark.integration @pytest.mark.integration
def test_upload_without_version_succeeds(self, integration_client, test_package): def test_upload_without_tag_succeeds(self, integration_client, test_package):
"""Test upload without version succeeds (no version created).""" """Test upload without tag succeeds (no tag created)."""
project, package = test_package project, package = test_package
content = b"upload without version test" content = b"upload without tag test"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
files = {"file": ("no_version.bin", io.BytesIO(content), "application/octet-stream")} files = {"file": ("no_tag.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
# No version parameter # No tag parameter
) )
assert response.status_code == 200 assert response.status_code == 200
result = response.json() result = response.json()
assert result["artifact_id"] == expected_hash assert result["artifact_id"] == expected_hash
# Version should be None when not specified
assert result.get("version") is None # Verify no tag was created - list tags and check
tags_response = integration_client.get(
f"/api/v1/project/{project}/{package}/tags"
)
assert tags_response.status_code == 200
tags = tags_response.json()
# Filter for tags pointing to this artifact
artifact_tags = [t for t in tags.get("items", tags) if t.get("artifact_id") == expected_hash]
assert len(artifact_tags) == 0, "Tag should not be created when not specified"
@pytest.mark.integration @pytest.mark.integration
def test_upload_creates_artifact_in_database(self, integration_client, test_package): def test_upload_creates_artifact_in_database(self, integration_client, test_package):
@@ -164,29 +172,25 @@ class TestUploadBasics:
assert s3_object_exists(expected_hash), "S3 object should exist after upload" assert s3_object_exists(expected_hash), "S3 object should exist after upload"
@pytest.mark.integration @pytest.mark.integration
def test_upload_with_version_creates_version_record(self, integration_client, test_package): def test_upload_with_tag_creates_tag_record(self, integration_client, test_package):
"""Test upload with version creates version record.""" """Test upload with tag creates tag record."""
project, package = test_package project, package = test_package
content = b"version creation test" content = b"tag creation test"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
version_name = "1.0.0" tag_name = "my-tag-v1"
result = upload_test_file( upload_test_file(
integration_client, project, package, content, version=version_name integration_client, project, package, content, tag=tag_name
) )
# Verify version was created # Verify tag exists
assert result.get("version") == version_name tags_response = integration_client.get(
assert result["artifact_id"] == expected_hash f"/api/v1/project/{project}/{package}/tags"
# Verify version exists in versions list
versions_response = integration_client.get(
f"/api/v1/project/{project}/{package}/versions"
) )
assert versions_response.status_code == 200 assert tags_response.status_code == 200
versions = versions_response.json() tags = tags_response.json()
version_names = [v["version"] for v in versions.get("items", [])] tag_names = [t["name"] for t in tags.get("items", tags)]
assert version_name in version_names assert tag_name in tag_names
class TestDuplicateUploads: class TestDuplicateUploads:
@@ -203,44 +207,36 @@ class TestDuplicateUploads:
# First upload # First upload
result1 = upload_test_file( result1 = upload_test_file(
integration_client, project, package, content, version="first" integration_client, project, package, content, tag="first"
) )
assert result1["artifact_id"] == expected_hash assert result1["artifact_id"] == expected_hash
# Second upload # Second upload
result2 = upload_test_file( result2 = upload_test_file(
integration_client, project, package, content, version="second" integration_client, project, package, content, tag="second"
) )
assert result2["artifact_id"] == expected_hash assert result2["artifact_id"] == expected_hash
assert result1["artifact_id"] == result2["artifact_id"] assert result1["artifact_id"] == result2["artifact_id"]
@pytest.mark.integration @pytest.mark.integration
def test_same_file_twice_returns_existing_version( def test_same_file_twice_increments_ref_count(
self, integration_client, test_package self, integration_client, test_package
): ):
"""Test uploading same file twice in same package returns existing version. """Test uploading same file twice increments ref_count to 2."""
Same artifact can only have one version per package. Uploading the same content
with a different version name returns the existing version, not a new one.
ref_count stays at 1 because there's still only one PackageVersion reference.
"""
project, package = test_package project, package = test_package
content = b"content for ref count increment test" content = b"content for ref count increment test"
# First upload # First upload
result1 = upload_test_file( result1 = upload_test_file(
integration_client, project, package, content, version="v1" integration_client, project, package, content, tag="v1"
) )
assert result1["ref_count"] == 1 assert result1["ref_count"] == 1
# Second upload with different version name returns existing version # Second upload
result2 = upload_test_file( result2 = upload_test_file(
integration_client, project, package, content, version="v2" integration_client, project, package, content, tag="v2"
) )
# Same artifact, same package = same version returned, ref_count stays 1 assert result2["ref_count"] == 2
assert result2["ref_count"] == 1
assert result2["deduplicated"] is True
assert result1["version"] == result2["version"] # Both return "v1"
@pytest.mark.integration @pytest.mark.integration
def test_same_file_different_packages_shares_artifact( def test_same_file_different_packages_shares_artifact(
@@ -265,12 +261,12 @@ class TestDuplicateUploads:
) )
# Upload to first package # Upload to first package
result1 = upload_test_file(integration_client, project, pkg1, content, version="v1") result1 = upload_test_file(integration_client, project, pkg1, content, tag="v1")
assert result1["artifact_id"] == expected_hash assert result1["artifact_id"] == expected_hash
assert result1["deduplicated"] is False assert result1["deduplicated"] is False
# Upload to second package # Upload to second package
result2 = upload_test_file(integration_client, project, pkg2, content, version="v1") result2 = upload_test_file(integration_client, project, pkg2, content, tag="v1")
assert result2["artifact_id"] == expected_hash assert result2["artifact_id"] == expected_hash
assert result2["deduplicated"] is True assert result2["deduplicated"] is True
@@ -290,7 +286,7 @@ class TestDuplicateUploads:
package, package,
content, content,
filename="file1.bin", filename="file1.bin",
version="v1", tag="v1",
) )
assert result1["artifact_id"] == expected_hash assert result1["artifact_id"] == expected_hash
@@ -301,7 +297,7 @@ class TestDuplicateUploads:
package, package,
content, content,
filename="file2.bin", filename="file2.bin",
version="v2", tag="v2",
) )
assert result2["artifact_id"] == expected_hash assert result2["artifact_id"] == expected_hash
assert result2["deduplicated"] is True assert result2["deduplicated"] is True
@@ -311,17 +307,17 @@ class TestDownload:
"""Tests for download functionality.""" """Tests for download functionality."""
@pytest.mark.integration @pytest.mark.integration
def test_download_by_version(self, integration_client, test_package): def test_download_by_tag(self, integration_client, test_package):
"""Test downloading artifact by version.""" """Test downloading artifact by tag name."""
project, package = test_package project, package = test_package
original_content = b"download by version test" original_content = b"download by tag test"
upload_test_file( upload_test_file(
integration_client, project, package, original_content, version="1.0.0" integration_client, project, package, original_content, tag="download-tag"
) )
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/1.0.0", f"/api/v1/project/{project}/{package}/+/download-tag",
params={"mode": "proxy"}, params={"mode": "proxy"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -344,29 +340,29 @@ class TestDownload:
assert response.content == original_content assert response.content == original_content
@pytest.mark.integration @pytest.mark.integration
def test_download_by_version_prefix(self, integration_client, test_package): def test_download_by_tag_prefix(self, integration_client, test_package):
"""Test downloading artifact using version: prefix.""" """Test downloading artifact using tag: prefix."""
project, package = test_package project, package = test_package
original_content = b"download by version prefix test" original_content = b"download by tag prefix test"
upload_test_file( upload_test_file(
integration_client, project, package, original_content, version="2.0.0" integration_client, project, package, original_content, tag="prefix-tag"
) )
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/version:2.0.0", f"/api/v1/project/{project}/{package}/+/tag:prefix-tag",
params={"mode": "proxy"}, params={"mode": "proxy"},
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.content == original_content assert response.content == original_content
@pytest.mark.integration @pytest.mark.integration
def test_download_nonexistent_version(self, integration_client, test_package): def test_download_nonexistent_tag(self, integration_client, test_package):
"""Test downloading nonexistent version returns 404.""" """Test downloading nonexistent tag returns 404."""
project, package = test_package project, package = test_package
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/nonexistent-version" f"/api/v1/project/{project}/{package}/+/nonexistent-tag"
) )
assert response.status_code == 404 assert response.status_code == 404
@@ -404,7 +400,7 @@ class TestDownload:
original_content = b"exact content verification test data 12345" original_content = b"exact content verification test data 12345"
upload_test_file( upload_test_file(
integration_client, project, package, original_content, version="verify" integration_client, project, package, original_content, tag="verify"
) )
response = integration_client.get( response = integration_client.get(
@@ -425,7 +421,7 @@ class TestDownloadHeaders:
upload_test_file( upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename="test.txt", version="content-type-test" filename="test.txt", tag="content-type-test"
) )
response = integration_client.get( response = integration_client.get(
@@ -444,7 +440,7 @@ class TestDownloadHeaders:
expected_length = len(content) expected_length = len(content)
upload_test_file( upload_test_file(
integration_client, project, package, content, version="content-length-test" integration_client, project, package, content, tag="content-length-test"
) )
response = integration_client.get( response = integration_client.get(
@@ -464,7 +460,7 @@ class TestDownloadHeaders:
upload_test_file( upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
filename=filename, version="disposition-test" filename=filename, tag="disposition-test"
) )
response = integration_client.get( response = integration_client.get(
@@ -485,7 +481,7 @@ class TestDownloadHeaders:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project, package, content, version="checksum-headers" integration_client, project, package, content, tag="checksum-headers"
) )
response = integration_client.get( response = integration_client.get(
@@ -505,7 +501,7 @@ class TestDownloadHeaders:
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project, package, content, version="etag-test" integration_client, project, package, content, tag="etag-test"
) )
response = integration_client.get( response = integration_client.get(
@@ -523,31 +519,17 @@ class TestConcurrentUploads:
"""Tests for concurrent upload handling.""" """Tests for concurrent upload handling."""
@pytest.mark.integration @pytest.mark.integration
def test_concurrent_uploads_same_file(self, integration_client, test_project, unique_test_id): def test_concurrent_uploads_same_file(self, integration_client, test_package):
"""Test concurrent uploads of same file to different packages handle deduplication correctly. """Test concurrent uploads of same file handle deduplication correctly."""
project, package = test_package
Same artifact can only have one version per package, so we create multiple packages
to test that concurrent uploads to different packages correctly increment ref_count.
"""
content = b"content for concurrent upload test" content = b"content for concurrent upload test"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
num_concurrent = 5 num_concurrent = 5
# Create packages for each concurrent upload
packages = []
for i in range(num_concurrent):
pkg_name = f"concurrent-pkg-{unique_test_id}-{i}"
response = integration_client.post(
f"/api/v1/project/{test_project}/packages",
json={"name": pkg_name},
)
assert response.status_code == 200
packages.append(pkg_name)
# Create an API key for worker threads # Create an API key for worker threads
api_key_response = integration_client.post( api_key_response = integration_client.post(
"/api/v1/auth/keys", "/api/v1/auth/keys",
json={"name": f"concurrent-test-key-{unique_test_id}"}, json={"name": "concurrent-test-key"},
) )
assert api_key_response.status_code == 200, f"Failed to create API key: {api_key_response.text}" assert api_key_response.status_code == 200, f"Failed to create API key: {api_key_response.text}"
api_key = api_key_response.json()["key"] api_key = api_key_response.json()["key"]
@@ -555,7 +537,7 @@ class TestConcurrentUploads:
results = [] results = []
errors = [] errors = []
def upload_worker(idx): def upload_worker(tag_suffix):
try: try:
from httpx import Client from httpx import Client
@@ -563,15 +545,15 @@ class TestConcurrentUploads:
with Client(base_url=base_url, timeout=30.0) as client: with Client(base_url=base_url, timeout=30.0) as client:
files = { files = {
"file": ( "file": (
f"concurrent-{idx}.bin", f"concurrent-{tag_suffix}.bin",
io.BytesIO(content), io.BytesIO(content),
"application/octet-stream", "application/octet-stream",
) )
} }
response = client.post( response = client.post(
f"/api/v1/project/{test_project}/{packages[idx]}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": f"concurrent-{tag_suffix}"},
headers={"Authorization": f"Bearer {api_key}"}, headers={"Authorization": f"Bearer {api_key}"},
) )
if response.status_code == 200: if response.status_code == 200:
@@ -594,7 +576,7 @@ class TestConcurrentUploads:
assert len(artifact_ids) == 1 assert len(artifact_ids) == 1
assert expected_hash in artifact_ids assert expected_hash in artifact_ids
# Verify final ref_count equals number of packages # Verify final ref_count
response = integration_client.get(f"/api/v1/artifact/{expected_hash}") response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200 assert response.status_code == 200
assert response.json()["ref_count"] == num_concurrent assert response.json()["ref_count"] == num_concurrent
@@ -623,7 +605,7 @@ class TestFileSizeValidation:
content = b"X" content = b"X"
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="tiny" integration_client, project, package, content, tag="tiny"
) )
assert result["artifact_id"] is not None assert result["artifact_id"] is not None
@@ -639,7 +621,7 @@ class TestFileSizeValidation:
expected_size = len(content) expected_size = len(content)
result = upload_test_file( result = upload_test_file(
integration_client, project, package, content, version="size-test" integration_client, project, package, content, tag="size-test"
) )
assert result["size"] == expected_size assert result["size"] == expected_size
@@ -667,7 +649,7 @@ class TestUploadFailureCleanup:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload", f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload",
files=files, files=files,
data={"version": "test"}, data={"tag": "test"},
) )
assert response.status_code == 404 assert response.status_code == 404
@@ -690,7 +672,7 @@ class TestUploadFailureCleanup:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload", f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
files=files, files=files,
data={"version": "test"}, data={"tag": "test"},
) )
assert response.status_code == 404 assert response.status_code == 404
@@ -711,7 +693,7 @@ class TestUploadFailureCleanup:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload", f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
files=files, files=files,
data={"version": "test"}, data={"tag": "test"},
) )
assert response.status_code == 404 assert response.status_code == 404
@@ -737,7 +719,7 @@ class TestS3StorageVerification:
# Upload same content multiple times # Upload same content multiple times
for tag in ["s3test1", "s3test2", "s3test3"]: for tag in ["s3test1", "s3test2", "s3test3"]:
upload_test_file(integration_client, project, package, content, version=tag) upload_test_file(integration_client, project, package, content, tag=tag)
# Verify only one S3 object exists # Verify only one S3 object exists
s3_objects = list_s3_objects_by_hash(expected_hash) s3_objects = list_s3_objects_by_hash(expected_hash)
@@ -753,26 +735,16 @@ class TestS3StorageVerification:
@pytest.mark.integration @pytest.mark.integration
def test_artifact_table_single_row_after_duplicates( def test_artifact_table_single_row_after_duplicates(
self, integration_client, test_project, unique_test_id self, integration_client, test_package
): ):
"""Test artifact table contains only one row after duplicate uploads to different packages. """Test artifact table contains only one row after duplicate uploads."""
project, package = test_package
Same artifact can only have one version per package, so we create multiple packages
to test deduplication across packages.
"""
content = b"content for single row test" content = b"content for single row test"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
# Create 3 packages and upload same content to each # Upload same content multiple times
for i in range(3): for tag in ["v1", "v2", "v3"]:
pkg_name = f"single-row-pkg-{unique_test_id}-{i}" upload_test_file(integration_client, project, package, content, tag=tag)
integration_client.post(
f"/api/v1/project/{test_project}/packages",
json={"name": pkg_name},
)
upload_test_file(
integration_client, test_project, pkg_name, content, version="1.0.0"
)
# Query artifact # Query artifact
response = integration_client.get(f"/api/v1/artifact/{expected_hash}") response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
@@ -811,7 +783,7 @@ class TestSecurityPathTraversal:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": "traversal-test"}, data={"tag": "traversal-test"},
) )
assert response.status_code == 200 assert response.status_code == 200
result = response.json() result = response.json()
@@ -829,16 +801,48 @@ class TestSecurityPathTraversal:
assert response.status_code in [400, 404, 422] assert response.status_code in [400, 404, 422]
@pytest.mark.integration @pytest.mark.integration
def test_path_traversal_in_version_name(self, integration_client, test_package): def test_path_traversal_in_tag_name(self, integration_client, test_package):
"""Test version names with path traversal are handled safely.""" """Test tag names with path traversal are handled safely."""
project, package = test_package project, package = test_package
content = b"version traversal test" content = b"tag traversal test"
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")} files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
files=files, files=files,
data={"version": "../../../etc/passwd"}, data={"tag": "../../../etc/passwd"},
)
assert response.status_code in [200, 400, 422]
@pytest.mark.integration
def test_download_path_traversal_in_ref(self, integration_client, test_package):
"""Test download ref with path traversal is rejected."""
project, package = test_package
response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/../../../etc/passwd"
)
assert response.status_code in [400, 404, 422]
@pytest.mark.integration
def test_path_traversal_in_package_name(self, integration_client, test_project):
"""Test package names with path traversal sequences are rejected."""
response = integration_client.get(
f"/api/v1/project/{test_project}/packages/../../../etc/passwd"
)
assert response.status_code in [400, 404, 422]
@pytest.mark.integration
def test_path_traversal_in_tag_name(self, integration_client, test_package):
"""Test tag names with path traversal are rejected or handled safely."""
project, package = test_package
content = b"tag traversal test"
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
data={"tag": "../../../etc/passwd"},
) )
assert response.status_code in [200, 400, 422] assert response.status_code in [200, 400, 422]
@@ -863,7 +867,7 @@ class TestSecurityMalformedRequests:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload", f"/api/v1/project/{project}/{package}/upload",
data={"version": "no-file"}, data={"tag": "no-file"},
) )
assert response.status_code == 422 assert response.status_code == 422

View File

@@ -39,6 +39,31 @@ class TestVersionCreation:
assert result.get("version") == "1.0.0" assert result.get("version") == "1.0.0"
assert result.get("version_source") == "explicit" assert result.get("version_source") == "explicit"
@pytest.mark.integration
def test_upload_with_version_and_tag(self, integration_client, test_package):
"""Test upload with both version and tag creates both records."""
project, package = test_package
content = b"version and tag test"
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
data={"version": "2.0.0", "tag": "latest"},
)
assert response.status_code == 200
result = response.json()
assert result.get("version") == "2.0.0"
# Verify tag was also created
tags_response = integration_client.get(
f"/api/v1/project/{project}/{package}/tags"
)
assert tags_response.status_code == 200
tags = tags_response.json()
tag_names = [t["name"] for t in tags.get("items", tags)]
assert "latest" in tag_names
@pytest.mark.integration @pytest.mark.integration
def test_duplicate_version_same_content_succeeds(self, integration_client, test_package): def test_duplicate_version_same_content_succeeds(self, integration_client, test_package):
"""Test uploading same version with same content succeeds (deduplication).""" """Test uploading same version with same content succeeds (deduplication)."""
@@ -237,10 +262,11 @@ class TestDownloadByVersion:
assert response.status_code == 404 assert response.status_code == 404
@pytest.mark.integration @pytest.mark.integration
def test_version_resolution_with_prefix(self, integration_client, test_package): def test_version_resolution_priority(self, integration_client, test_package):
"""Test that version: prefix explicitly resolves to version.""" """Test that version: prefix explicitly resolves to version, not tag."""
project, package = test_package project, package = test_package
version_content = b"this is the version content" version_content = b"this is the version content"
tag_content = b"this is the tag content"
# Create a version 6.0.0 # Create a version 6.0.0
files1 = {"file": ("app-v.tar.gz", io.BytesIO(version_content), "application/octet-stream")} files1 = {"file": ("app-v.tar.gz", io.BytesIO(version_content), "application/octet-stream")}
@@ -250,6 +276,14 @@ class TestDownloadByVersion:
data={"version": "6.0.0"}, data={"version": "6.0.0"},
) )
# Create a tag named "6.0.0" pointing to different content
files2 = {"file": ("app-t.tar.gz", io.BytesIO(tag_content), "application/octet-stream")}
integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files2,
data={"tag": "6.0.0"},
)
# Download with version: prefix should get version content # Download with version: prefix should get version content
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/version:6.0.0", f"/api/v1/project/{project}/{package}/+/version:6.0.0",
@@ -258,6 +292,14 @@ class TestDownloadByVersion:
assert response.status_code == 200 assert response.status_code == 200
assert response.content == version_content assert response.content == version_content
# Download with tag: prefix should get tag content
response2 = integration_client.get(
f"/api/v1/project/{project}/{package}/+/tag:6.0.0",
params={"mode": "proxy"},
)
assert response2.status_code == 200
assert response2.content == tag_content
class TestVersionDeletion: class TestVersionDeletion:
"""Tests for deleting versions.""" """Tests for deleting versions."""

View File

@@ -27,9 +27,11 @@ class TestVersionCreation:
project_name, project_name,
package_name, package_name,
b"version create test", b"version create test",
tag="latest",
version="1.0.0", version="1.0.0",
) )
assert result["tag"] == "latest"
assert result["version"] == "1.0.0" assert result["version"] == "1.0.0"
assert result["version_source"] == "explicit" assert result["version_source"] == "explicit"
assert result["artifact_id"] assert result["artifact_id"]
@@ -147,6 +149,7 @@ class TestVersionCRUD:
package_name, package_name,
b"version with info", b"version with info",
version="1.0.0", version="1.0.0",
tag="release",
) )
response = integration_client.get( response = integration_client.get(
@@ -163,6 +166,8 @@ class TestVersionCRUD:
assert version_item is not None assert version_item is not None
assert "size" in version_item assert "size" in version_item
assert "artifact_id" in version_item assert "artifact_id" in version_item
assert "tags" in version_item
assert "release" in version_item["tags"]
@pytest.mark.integration @pytest.mark.integration
def test_get_version(self, integration_client, test_package): def test_get_version(self, integration_client, test_package):
@@ -267,9 +272,94 @@ class TestVersionDownload:
follow_redirects=False, follow_redirects=False,
) )
# Should resolve version # Should resolve version first (before tag)
assert response.status_code in [200, 302, 307] assert response.status_code in [200, 302, 307]
@pytest.mark.integration
def test_version_takes_precedence_over_tag(self, integration_client, test_package):
"""Test that version is checked before tag when resolving refs."""
project_name, package_name = test_package
# Upload with version "1.0"
version_result = upload_test_file(
integration_client,
project_name,
package_name,
b"version content",
version="1.0",
)
# Create a tag with the same name "1.0" pointing to different artifact
tag_result = upload_test_file(
integration_client,
project_name,
package_name,
b"tag content different",
tag="1.0",
)
# Download by "1.0" should resolve to version, not tag
# Since version:1.0 artifact was uploaded first
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/1.0",
follow_redirects=False,
)
assert response.status_code in [200, 302, 307]
class TestTagVersionEnrichment:
"""Tests for tag responses including version information."""
@pytest.mark.integration
def test_tag_response_includes_version(self, integration_client, test_package):
"""Test that tag responses include version of the artifact."""
project_name, package_name = test_package
# Upload with both version and tag
upload_test_file(
integration_client,
project_name,
package_name,
b"enriched tag test",
version="7.0.0",
tag="stable",
)
# Get tag and check version field
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags/stable"
)
assert response.status_code == 200
data = response.json()
assert data["name"] == "stable"
assert data["version"] == "7.0.0"
@pytest.mark.integration
def test_tag_list_includes_versions(self, integration_client, test_package):
"""Test that tag list responses include version for each tag."""
project_name, package_name = test_package
upload_test_file(
integration_client,
project_name,
package_name,
b"list version test",
version="8.0.0",
tag="latest",
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags"
)
assert response.status_code == 200
data = response.json()
tag_item = next((t for t in data["items"] if t["name"] == "latest"), None)
assert tag_item is not None
assert tag_item.get("version") == "8.0.0"
class TestVersionPagination: class TestVersionPagination:
"""Tests for version listing pagination and sorting.""" """Tests for version listing pagination and sorting."""

View File

@@ -39,7 +39,7 @@ class TestDependencySchema:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": f"v1.0.0-{unique_test_id}"}, data={"tag": f"v1.0.0-{unique_test_id}"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -59,17 +59,29 @@ class TestDependencySchema:
integration_client.delete(f"/api/v1/projects/{dep_project_name}") integration_client.delete(f"/api/v1/projects/{dep_project_name}")
@pytest.mark.integration @pytest.mark.integration
def test_dependency_requires_version(self, integration_client): def test_dependency_requires_version_or_tag(self, integration_client):
"""Test that dependency requires version.""" """Test that dependency must have either version or tag, not both or neither."""
from app.schemas import DependencyCreate from app.schemas import DependencyCreate
# Test: missing version # Test: neither version nor tag
with pytest.raises(ValidationError): with pytest.raises(ValidationError) as exc_info:
DependencyCreate(project="proj", package="pkg") DependencyCreate(project="proj", package="pkg")
assert "Either 'version' or 'tag' must be specified" in str(exc_info.value)
# Test: both version and tag
with pytest.raises(ValidationError) as exc_info:
DependencyCreate(project="proj", package="pkg", version="1.0.0", tag="stable")
assert "Cannot specify both 'version' and 'tag'" in str(exc_info.value)
# Test: valid with version # Test: valid with version
dep = DependencyCreate(project="proj", package="pkg", version="1.0.0") dep = DependencyCreate(project="proj", package="pkg", version="1.0.0")
assert dep.version == "1.0.0" assert dep.version == "1.0.0"
assert dep.tag is None
# Test: valid with tag
dep = DependencyCreate(project="proj", package="pkg", tag="stable")
assert dep.tag == "stable"
assert dep.version is None
@pytest.mark.integration @pytest.mark.integration
def test_dependency_unique_constraint( def test_dependency_unique_constraint(
@@ -114,7 +126,7 @@ class TestEnsureFileParsing:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": f"v1.0.0-{unique_test_id}"}, data={"tag": f"v1.0.0-{unique_test_id}"},
) )
assert response.status_code == 200 assert response.status_code == 200
data = response.json() data = response.json()
@@ -150,7 +162,7 @@ class TestEnsureFileParsing:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": f"v1.0.0-{unique_test_id}"}, data={"tag": f"v1.0.0-{unique_test_id}"},
) )
assert response.status_code == 400 assert response.status_code == 400
assert "Invalid ensure file" in response.json().get("detail", "") assert "Invalid ensure file" in response.json().get("detail", "")
@@ -176,7 +188,7 @@ class TestEnsureFileParsing:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": f"v1.0.0-{unique_test_id}"}, data={"tag": f"v1.0.0-{unique_test_id}"},
) )
assert response.status_code == 400 assert response.status_code == 400
assert "Project" in response.json().get("detail", "") assert "Project" in response.json().get("detail", "")
@@ -196,7 +208,7 @@ class TestEnsureFileParsing:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": f"v1.0.0-nodeps-{unique_test_id}"}, data={"tag": f"v1.0.0-nodeps-{unique_test_id}"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -214,14 +226,13 @@ class TestEnsureFileParsing:
assert response.status_code == 200 assert response.status_code == 200
try: try:
# Test with missing version field (version is now required)
ensure_content = yaml.dump({ ensure_content = yaml.dump({
"dependencies": [ "dependencies": [
{"project": dep_project_name, "package": "pkg"} # Missing version {"project": dep_project_name, "package": "pkg", "version": "1.0.0", "tag": "stable"}
] ]
}) })
content = unique_content("test-missing-version", unique_test_id, "constraint") content = unique_content("test-both", unique_test_id, "constraint")
files = { files = {
"file": ("test.tar.gz", BytesIO(content), "application/gzip"), "file": ("test.tar.gz", BytesIO(content), "application/gzip"),
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"), "ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
@@ -229,10 +240,11 @@ class TestEnsureFileParsing:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": f"v1.0.0-{unique_test_id}"}, data={"tag": f"v1.0.0-{unique_test_id}"},
) )
assert response.status_code == 400 assert response.status_code == 400
assert "version" in response.json().get("detail", "").lower() assert "both" in response.json().get("detail", "").lower() or \
"version" in response.json().get("detail", "").lower()
finally: finally:
integration_client.delete(f"/api/v1/projects/{dep_project_name}") integration_client.delete(f"/api/v1/projects/{dep_project_name}")
@@ -259,7 +271,7 @@ class TestDependencyQueryEndpoints:
ensure_content = yaml.dump({ ensure_content = yaml.dump({
"dependencies": [ "dependencies": [
{"project": dep_project_name, "package": "lib-a", "version": "1.0.0"}, {"project": dep_project_name, "package": "lib-a", "version": "1.0.0"},
{"project": dep_project_name, "package": "lib-b", "version": "2.0.0"}, {"project": dep_project_name, "package": "lib-b", "tag": "stable"},
] ]
}) })
@@ -271,7 +283,7 @@ class TestDependencyQueryEndpoints:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": f"v2.0.0-{unique_test_id}"}, data={"tag": f"v2.0.0-{unique_test_id}"},
) )
assert response.status_code == 200 assert response.status_code == 200
artifact_id = response.json()["artifact_id"] artifact_id = response.json()["artifact_id"]
@@ -287,8 +299,10 @@ class TestDependencyQueryEndpoints:
deps = {d["package"]: d for d in data["dependencies"]} deps = {d["package"]: d for d in data["dependencies"]}
assert "lib-a" in deps assert "lib-a" in deps
assert deps["lib-a"]["version"] == "1.0.0" assert deps["lib-a"]["version"] == "1.0.0"
assert deps["lib-a"]["tag"] is None
assert "lib-b" in deps assert "lib-b" in deps
assert deps["lib-b"]["version"] == "2.0.0" assert deps["lib-b"]["tag"] == "stable"
assert deps["lib-b"]["version"] is None
finally: finally:
integration_client.delete(f"/api/v1/projects/{dep_project_name}") integration_client.delete(f"/api/v1/projects/{dep_project_name}")
@@ -322,7 +336,7 @@ class TestDependencyQueryEndpoints:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": tag_name}, data={"tag": tag_name},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -367,7 +381,7 @@ class TestDependencyQueryEndpoints:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{dep_project_name}/target-lib/upload", f"/api/v1/project/{dep_project_name}/target-lib/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -386,7 +400,7 @@ class TestDependencyQueryEndpoints:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": f"v4.0.0-{unique_test_id}"}, data={"tag": f"v4.0.0-{unique_test_id}"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -405,6 +419,7 @@ class TestDependencyQueryEndpoints:
for dep in data["dependents"]: for dep in data["dependents"]:
if dep["project"] == project_name: if dep["project"] == project_name:
found = True found = True
assert dep["constraint_type"] == "version"
assert dep["constraint_value"] == "1.0.0" assert dep["constraint_value"] == "1.0.0"
break break
assert found, "Our package should be in the dependents list" assert found, "Our package should be in the dependents list"
@@ -427,7 +442,7 @@ class TestDependencyQueryEndpoints:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": f"v5.0.0-nodeps-{unique_test_id}"}, data={"tag": f"v5.0.0-nodeps-{unique_test_id}"},
) )
assert response.status_code == 200 assert response.status_code == 200
artifact_id = response.json()["artifact_id"] artifact_id = response.json()["artifact_id"]
@@ -467,7 +482,7 @@ class TestDependencyResolution:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_c}/upload", f"/api/v1/project/{test_project}/{pkg_c}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -485,7 +500,7 @@ class TestDependencyResolution:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_b}/upload", f"/api/v1/project/{test_project}/{pkg_b}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -503,7 +518,7 @@ class TestDependencyResolution:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_a}/upload", f"/api/v1/project/{test_project}/{pkg_a}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -551,7 +566,7 @@ class TestDependencyResolution:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_d}/upload", f"/api/v1/project/{test_project}/{pkg_d}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -569,7 +584,7 @@ class TestDependencyResolution:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_b}/upload", f"/api/v1/project/{test_project}/{pkg_b}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -587,7 +602,7 @@ class TestDependencyResolution:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_c}/upload", f"/api/v1/project/{test_project}/{pkg_c}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -606,7 +621,7 @@ class TestDependencyResolution:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_a}/upload", f"/api/v1/project/{test_project}/{pkg_a}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -648,7 +663,7 @@ class TestDependencyResolution:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": f"solo-{unique_test_id}"}, data={"tag": f"solo-{unique_test_id}"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -683,21 +698,17 @@ class TestDependencyResolution:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
files=files, files=files,
data={"version": f"missing-dep-{unique_test_id}"}, data={"tag": f"missing-dep-{unique_test_id}"},
) )
# Should fail at upload time since package doesn't exist # Should fail at upload time since package doesn't exist
# OR succeed at upload but fail at resolution # OR succeed at upload but fail at resolution
# Depending on implementation choice # Depending on implementation choice
if response.status_code == 200: if response.status_code == 200:
# Resolution should return missing dependencies # Resolution should fail
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/missing-dep-{unique_test_id}/resolve" f"/api/v1/project/{project_name}/{package_name}/+/missing-dep-{unique_test_id}/resolve"
) )
# Expect 200 with missing dependencies listed assert response.status_code == 404
assert response.status_code == 200
data = response.json()
# The missing dependency should be in the 'missing' list
assert len(data.get("missing", [])) >= 1
class TestCircularDependencyDetection: class TestCircularDependencyDetection:
@@ -725,7 +736,7 @@ class TestCircularDependencyDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_a}/upload", f"/api/v1/project/{test_project}/{pkg_a}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -743,7 +754,7 @@ class TestCircularDependencyDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_b}/upload", f"/api/v1/project/{test_project}/{pkg_b}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -761,7 +772,7 @@ class TestCircularDependencyDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_a}/upload", f"/api/v1/project/{test_project}/{pkg_a}/upload",
files=files, files=files,
data={"version": "2.0.0"}, data={"tag": "2.0.0"},
) )
# Should be rejected with 400 (circular dependency) # Should be rejected with 400 (circular dependency)
assert response.status_code == 400 assert response.status_code == 400
@@ -796,7 +807,7 @@ class TestCircularDependencyDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_a}/upload", f"/api/v1/project/{test_project}/{pkg_a}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -814,7 +825,7 @@ class TestCircularDependencyDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_b}/upload", f"/api/v1/project/{test_project}/{pkg_b}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -832,7 +843,7 @@ class TestCircularDependencyDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_c}/upload", f"/api/v1/project/{test_project}/{pkg_c}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -850,7 +861,7 @@ class TestCircularDependencyDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_a}/upload", f"/api/v1/project/{test_project}/{pkg_a}/upload",
files=files, files=files,
data={"version": "2.0.0"}, data={"tag": "2.0.0"},
) )
assert response.status_code == 400 assert response.status_code == 400
data = response.json() data = response.json()
@@ -899,7 +910,7 @@ class TestConflictDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_common}/upload", f"/api/v1/project/{test_project}/{pkg_common}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -909,7 +920,7 @@ class TestConflictDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_common}/upload", f"/api/v1/project/{test_project}/{pkg_common}/upload",
files=files, files=files,
data={"version": "2.0.0"}, data={"tag": "2.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -927,7 +938,7 @@ class TestConflictDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_lib_a}/upload", f"/api/v1/project/{test_project}/{pkg_lib_a}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -945,7 +956,7 @@ class TestConflictDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_lib_b}/upload", f"/api/v1/project/{test_project}/{pkg_lib_b}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -964,7 +975,7 @@ class TestConflictDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_app}/upload", f"/api/v1/project/{test_project}/{pkg_app}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -1012,7 +1023,7 @@ class TestConflictDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_common}/upload", f"/api/v1/project/{test_project}/{pkg_common}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -1031,7 +1042,7 @@ class TestConflictDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{lib_pkg}/upload", f"/api/v1/project/{test_project}/{lib_pkg}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -1050,7 +1061,7 @@ class TestConflictDetection:
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{test_project}/{pkg_app}/upload", f"/api/v1/project/{test_project}/{pkg_app}/upload",
files=files, files=files,
data={"version": "1.0.0"}, data={"tag": "1.0.0"},
) )
assert response.status_code == 200 assert response.status_code == 200

View File

@@ -26,16 +26,16 @@ def upload_test_file(integration_client):
Factory fixture to upload a test file and return its artifact ID. Factory fixture to upload a test file and return its artifact ID.
Usage: Usage:
artifact_id = upload_test_file(project, package, content, version="v1.0") artifact_id = upload_test_file(project, package, content, tag="v1.0")
""" """
def _upload(project_name: str, package_name: str, content: bytes, version: str = None): def _upload(project_name: str, package_name: str, content: bytes, tag: str = None):
files = { files = {
"file": ("test-file.bin", io.BytesIO(content), "application/octet-stream") "file": ("test-file.bin", io.BytesIO(content), "application/octet-stream")
} }
data = {} data = {}
if version: if tag:
data["version"] = version data["tag"] = tag
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",
@@ -66,7 +66,7 @@ class TestDownloadChecksumHeaders:
# Upload file # Upload file
artifact_id = upload_test_file( artifact_id = upload_test_file(
project_name, package_name, content, version="sha256-header-test" project_name, package_name, content, tag="sha256-header-test"
) )
# Download with proxy mode # Download with proxy mode
@@ -88,7 +88,7 @@ class TestDownloadChecksumHeaders:
content = b"Content for ETag header test" content = b"Content for ETag header test"
artifact_id = upload_test_file( artifact_id = upload_test_file(
project_name, package_name, content, version="etag-test" project_name, package_name, content, tag="etag-test"
) )
response = integration_client.get( response = integration_client.get(
@@ -110,7 +110,7 @@ class TestDownloadChecksumHeaders:
content = b"Content for Digest header test" content = b"Content for Digest header test"
sha256 = hashlib.sha256(content).hexdigest() sha256 = hashlib.sha256(content).hexdigest()
upload_test_file(project_name, package_name, content, version="digest-test") upload_test_file(project_name, package_name, content, tag="digest-test")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/digest-test", f"/api/v1/project/{project_name}/{package_name}/+/digest-test",
@@ -137,7 +137,7 @@ class TestDownloadChecksumHeaders:
project_name, package_name = test_package project_name, package_name = test_package
content = b"Content for X-Content-Length test" content = b"Content for X-Content-Length test"
upload_test_file(project_name, package_name, content, version="content-length-test") upload_test_file(project_name, package_name, content, tag="content-length-test")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/content-length-test", f"/api/v1/project/{project_name}/{package_name}/+/content-length-test",
@@ -156,7 +156,7 @@ class TestDownloadChecksumHeaders:
project_name, package_name = test_package project_name, package_name = test_package
content = b"Content for X-Verified false test" content = b"Content for X-Verified false test"
upload_test_file(project_name, package_name, content, version="verified-false-test") upload_test_file(project_name, package_name, content, tag="verified-false-test")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/verified-false-test", f"/api/v1/project/{project_name}/{package_name}/+/verified-false-test",
@@ -184,7 +184,7 @@ class TestPreVerificationMode:
project_name, package_name = test_package project_name, package_name = test_package
content = b"Content for pre-verification success test" content = b"Content for pre-verification success test"
upload_test_file(project_name, package_name, content, version="pre-verify-success") upload_test_file(project_name, package_name, content, tag="pre-verify-success")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-success", f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-success",
@@ -205,7 +205,7 @@ class TestPreVerificationMode:
# Use binary content to verify no corruption # Use binary content to verify no corruption
content = bytes(range(256)) * 10 # 2560 bytes of all byte values content = bytes(range(256)) * 10 # 2560 bytes of all byte values
upload_test_file(project_name, package_name, content, version="pre-verify-content") upload_test_file(project_name, package_name, content, tag="pre-verify-content")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-content", f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-content",
@@ -233,7 +233,7 @@ class TestStreamingVerificationMode:
content = b"Content for streaming verification success test" content = b"Content for streaming verification success test"
upload_test_file( upload_test_file(
project_name, package_name, content, version="stream-verify-success" project_name, package_name, content, tag="stream-verify-success"
) )
response = integration_client.get( response = integration_client.get(
@@ -255,7 +255,7 @@ class TestStreamingVerificationMode:
# 100KB of content # 100KB of content
content = b"x" * (100 * 1024) content = b"x" * (100 * 1024)
upload_test_file(project_name, package_name, content, version="stream-verify-large") upload_test_file(project_name, package_name, content, tag="stream-verify-large")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/stream-verify-large", f"/api/v1/project/{project_name}/{package_name}/+/stream-verify-large",
@@ -283,7 +283,7 @@ class TestHeadRequestHeaders:
content = b"Content for HEAD SHA256 test" content = b"Content for HEAD SHA256 test"
artifact_id = upload_test_file( artifact_id = upload_test_file(
project_name, package_name, content, version="head-sha256-test" project_name, package_name, content, tag="head-sha256-test"
) )
response = integration_client.head( response = integration_client.head(
@@ -303,7 +303,7 @@ class TestHeadRequestHeaders:
content = b"Content for HEAD ETag test" content = b"Content for HEAD ETag test"
artifact_id = upload_test_file( artifact_id = upload_test_file(
project_name, package_name, content, version="head-etag-test" project_name, package_name, content, tag="head-etag-test"
) )
response = integration_client.head( response = integration_client.head(
@@ -322,7 +322,7 @@ class TestHeadRequestHeaders:
project_name, package_name = test_package project_name, package_name = test_package
content = b"Content for HEAD Digest test" content = b"Content for HEAD Digest test"
upload_test_file(project_name, package_name, content, version="head-digest-test") upload_test_file(project_name, package_name, content, tag="head-digest-test")
response = integration_client.head( response = integration_client.head(
f"/api/v1/project/{project_name}/{package_name}/+/head-digest-test" f"/api/v1/project/{project_name}/{package_name}/+/head-digest-test"
@@ -340,7 +340,7 @@ class TestHeadRequestHeaders:
project_name, package_name = test_package project_name, package_name = test_package
content = b"Content for HEAD Content-Length test" content = b"Content for HEAD Content-Length test"
upload_test_file(project_name, package_name, content, version="head-length-test") upload_test_file(project_name, package_name, content, tag="head-length-test")
response = integration_client.head( response = integration_client.head(
f"/api/v1/project/{project_name}/{package_name}/+/head-length-test" f"/api/v1/project/{project_name}/{package_name}/+/head-length-test"
@@ -356,7 +356,7 @@ class TestHeadRequestHeaders:
project_name, package_name = test_package project_name, package_name = test_package
content = b"Content for HEAD no-body test" content = b"Content for HEAD no-body test"
upload_test_file(project_name, package_name, content, version="head-no-body-test") upload_test_file(project_name, package_name, content, tag="head-no-body-test")
response = integration_client.head( response = integration_client.head(
f"/api/v1/project/{project_name}/{package_name}/+/head-no-body-test" f"/api/v1/project/{project_name}/{package_name}/+/head-no-body-test"
@@ -382,7 +382,7 @@ class TestRangeRequestHeaders:
project_name, package_name = test_package project_name, package_name = test_package
content = b"Content for range request checksum header test" content = b"Content for range request checksum header test"
upload_test_file(project_name, package_name, content, version="range-checksum-test") upload_test_file(project_name, package_name, content, tag="range-checksum-test")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/range-checksum-test", f"/api/v1/project/{project_name}/{package_name}/+/range-checksum-test",
@@ -412,7 +412,7 @@ class TestClientSideVerification:
project_name, package_name = test_package project_name, package_name = test_package
content = b"Content for client-side verification test" content = b"Content for client-side verification test"
upload_test_file(project_name, package_name, content, version="client-verify-test") upload_test_file(project_name, package_name, content, tag="client-verify-test")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/client-verify-test", f"/api/v1/project/{project_name}/{package_name}/+/client-verify-test",
@@ -438,7 +438,7 @@ class TestClientSideVerification:
project_name, package_name = test_package project_name, package_name = test_package
content = b"Content for Digest header verification" content = b"Content for Digest header verification"
upload_test_file(project_name, package_name, content, version="digest-verify-test") upload_test_file(project_name, package_name, content, tag="digest-verify-test")
response = integration_client.get( response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/digest-verify-test", f"/api/v1/project/{project_name}/{package_name}/+/digest-verify-test",

View File

@@ -91,6 +91,7 @@ class TestUpstreamSourceModel:
assert hasattr(source, 'source_type') assert hasattr(source, 'source_type')
assert hasattr(source, 'url') assert hasattr(source, 'url')
assert hasattr(source, 'enabled') assert hasattr(source, 'enabled')
assert hasattr(source, 'is_public')
assert hasattr(source, 'auth_type') assert hasattr(source, 'auth_type')
assert hasattr(source, 'username') assert hasattr(source, 'username')
assert hasattr(source, 'password_encrypted') assert hasattr(source, 'password_encrypted')
@@ -106,6 +107,7 @@ class TestUpstreamSourceModel:
source_type="npm", source_type="npm",
url="https://npm.example.com", url="https://npm.example.com",
enabled=True, enabled=True,
is_public=False,
auth_type="basic", auth_type="basic",
username="admin", username="admin",
priority=50, priority=50,
@@ -114,6 +116,7 @@ class TestUpstreamSourceModel:
assert source.source_type == "npm" assert source.source_type == "npm"
assert source.url == "https://npm.example.com" assert source.url == "https://npm.example.com"
assert source.enabled is True assert source.enabled is True
assert source.is_public is False
assert source.auth_type == "basic" assert source.auth_type == "basic"
assert source.username == "admin" assert source.username == "admin"
assert source.priority == 50 assert source.priority == 50
@@ -192,6 +195,7 @@ class TestCacheSettingsModel:
settings = CacheSettings() settings = CacheSettings()
assert hasattr(settings, 'id') assert hasattr(settings, 'id')
assert hasattr(settings, 'allow_public_internet')
assert hasattr(settings, 'auto_create_system_projects') assert hasattr(settings, 'auto_create_system_projects')
def test_model_with_values(self): def test_model_with_values(self):
@@ -200,9 +204,11 @@ class TestCacheSettingsModel:
settings = CacheSettings( settings = CacheSettings(
id=1, id=1,
allow_public_internet=False,
auto_create_system_projects=True, auto_create_system_projects=True,
) )
assert settings.id == 1 assert settings.id == 1
assert settings.allow_public_internet is False
assert settings.auto_create_system_projects is True assert settings.auto_create_system_projects is True
@@ -254,6 +260,7 @@ class TestUpstreamSourceSchemas:
source_type="npm", source_type="npm",
url="https://npm.example.com", url="https://npm.example.com",
enabled=True, enabled=True,
is_public=False,
auth_type="basic", auth_type="basic",
username="admin", username="admin",
password="secret", password="secret",
@@ -274,6 +281,7 @@ class TestUpstreamSourceSchemas:
) )
assert source.source_type == "generic" assert source.source_type == "generic"
assert source.enabled is False assert source.enabled is False
assert source.is_public is True
assert source.auth_type == "none" assert source.auth_type == "none"
assert source.priority == 100 assert source.priority == 100
@@ -362,14 +370,16 @@ class TestCacheSettingsSchemas:
from app.schemas import CacheSettingsUpdate from app.schemas import CacheSettingsUpdate
update = CacheSettingsUpdate() update = CacheSettingsUpdate()
assert update.allow_public_internet is None
assert update.auto_create_system_projects is None assert update.auto_create_system_projects is None
def test_update_schema_partial(self): def test_update_schema_partial(self):
"""Test CacheSettingsUpdate with partial fields.""" """Test CacheSettingsUpdate with partial fields."""
from app.schemas import CacheSettingsUpdate from app.schemas import CacheSettingsUpdate
update = CacheSettingsUpdate(auto_create_system_projects=True) update = CacheSettingsUpdate(allow_public_internet=False)
assert update.auto_create_system_projects is True assert update.allow_public_internet is False
assert update.auto_create_system_projects is None
class TestCacheRequestSchemas: class TestCacheRequestSchemas:
@@ -383,7 +393,7 @@ class TestCacheRequestSchemas:
url="https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", url="https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
source_type="npm", source_type="npm",
package_name="lodash", package_name="lodash",
version="4.17.21", tag="4.17.21",
) )
assert request.url == "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" assert request.url == "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"
assert request.source_type == "npm" assert request.source_type == "npm"
@@ -568,6 +578,7 @@ class TestUpstreamClientSourceMatching:
name="npm-public", name="npm-public",
url="https://registry.npmjs.org", url="https://registry.npmjs.org",
enabled=True, enabled=True,
is_public=True,
auth_type="none", auth_type="none",
priority=100, priority=100,
) )
@@ -592,6 +603,7 @@ class TestUpstreamClientSourceMatching:
name="npm-private", name="npm-private",
url="https://registry.npmjs.org", url="https://registry.npmjs.org",
enabled=True, enabled=True,
is_public=False,
auth_type="basic", auth_type="basic",
priority=50, priority=50,
) )
@@ -599,6 +611,7 @@ class TestUpstreamClientSourceMatching:
name="npm-public", name="npm-public",
url="https://registry.npmjs.org", url="https://registry.npmjs.org",
enabled=True, enabled=True,
is_public=True,
auth_type="none", auth_type="none",
priority=100, priority=100,
) )
@@ -698,6 +711,89 @@ class TestUpstreamClientAuthHeaders:
assert auth is None assert auth is None
class TestUpstreamClientAirGapMode:
"""Tests for air-gap mode enforcement."""
def test_airgap_blocks_public_source(self):
"""Test that air-gap mode blocks public sources."""
from app.models import UpstreamSource, CacheSettings
from app.upstream import UpstreamClient, AirGapError
source = UpstreamSource(
name="npm-public",
url="https://registry.npmjs.org",
enabled=True,
is_public=True,
auth_type="none",
priority=100,
)
settings = CacheSettings(allow_public_internet=False)
client = UpstreamClient(sources=[source], cache_settings=settings)
with pytest.raises(AirGapError) as exc_info:
client.fetch("https://registry.npmjs.org/lodash")
assert "Air-gap mode enabled" in str(exc_info.value)
assert "public source" in str(exc_info.value)
def test_airgap_blocks_unmatched_url(self):
"""Test that air-gap mode blocks URLs not matching any source."""
from app.models import CacheSettings
from app.upstream import UpstreamClient, AirGapError
settings = CacheSettings(allow_public_internet=False)
client = UpstreamClient(sources=[], cache_settings=settings)
with pytest.raises(AirGapError) as exc_info:
client.fetch("https://example.com/file.tgz")
assert "Air-gap mode enabled" in str(exc_info.value)
assert "does not match any configured" in str(exc_info.value)
def test_airgap_allows_private_source(self):
"""Test that air-gap mode allows private sources."""
from app.models import UpstreamSource, CacheSettings
from app.upstream import UpstreamClient, SourceDisabledError
source = UpstreamSource(
name="npm-private",
url="https://npm.internal.corp",
enabled=False, # Disabled, but would pass air-gap check
is_public=False,
auth_type="none",
priority=100,
)
settings = CacheSettings(allow_public_internet=False)
client = UpstreamClient(sources=[source], cache_settings=settings)
# Should fail due to disabled source, not air-gap
with pytest.raises(SourceDisabledError):
client.fetch("https://npm.internal.corp/package.tgz")
def test_allow_public_internet_true(self):
"""Test that public internet is allowed when setting is true."""
from app.models import UpstreamSource, CacheSettings
from app.upstream import UpstreamClient, SourceDisabledError
source = UpstreamSource(
name="npm-public",
url="https://registry.npmjs.org",
enabled=False, # Disabled
is_public=True,
auth_type="none",
priority=100,
)
settings = CacheSettings(allow_public_internet=True)
client = UpstreamClient(sources=[source], cache_settings=settings)
# Should fail due to disabled source, not air-gap
with pytest.raises(SourceDisabledError):
client.fetch("https://registry.npmjs.org/lodash")
class TestUpstreamClientSourceDisabled: class TestUpstreamClientSourceDisabled:
"""Tests for disabled source handling.""" """Tests for disabled source handling."""
@@ -710,6 +806,7 @@ class TestUpstreamClientSourceDisabled:
name="npm-public", name="npm-public",
url="https://registry.npmjs.org", url="https://registry.npmjs.org",
enabled=False, enabled=False,
is_public=True,
auth_type="none", auth_type="none",
priority=100, priority=100,
) )
@@ -882,6 +979,13 @@ class TestUpstreamExceptions:
assert error.status_code == 404 assert error.status_code == 404
assert error.response_headers == {"x-custom": "value"} assert error.response_headers == {"x-custom": "value"}
def test_airgap_error(self):
"""Test AirGapError."""
from app.upstream import AirGapError
error = AirGapError("Blocked by air-gap")
assert "Blocked by air-gap" in str(error)
def test_source_not_found_error(self): def test_source_not_found_error(self):
"""Test SourceNotFoundError.""" """Test SourceNotFoundError."""
from app.upstream import SourceNotFoundError from app.upstream import SourceNotFoundError
@@ -1132,7 +1236,7 @@ class TestCacheRequestValidation:
url="https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", url="https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
source_type="npm", source_type="npm",
package_name="lodash", package_name="lodash",
version="4.17.21", tag="4.17.21",
) )
assert request.url == "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" assert request.url == "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"
assert request.source_type == "npm" assert request.source_type == "npm"
@@ -1316,6 +1420,7 @@ class TestUpstreamSourcesAdminAPI:
"source_type": "generic", "source_type": "generic",
"url": "https://example.com/packages", "url": "https://example.com/packages",
"enabled": False, "enabled": False,
"is_public": False,
"auth_type": "none", "auth_type": "none",
"priority": 200, "priority": 200,
}, },
@@ -1327,6 +1432,7 @@ class TestUpstreamSourcesAdminAPI:
assert data["source_type"] == "generic" assert data["source_type"] == "generic"
assert data["url"] == "https://example.com/packages" assert data["url"] == "https://example.com/packages"
assert data["enabled"] is False assert data["enabled"] is False
assert data["is_public"] is False
assert data["priority"] == 200 assert data["priority"] == 200
assert "id" in data assert "id" in data
@@ -1346,6 +1452,7 @@ class TestUpstreamSourcesAdminAPI:
"source_type": "npm", "source_type": "npm",
"url": "https://npm.internal.corp", "url": "https://npm.internal.corp",
"enabled": False, "enabled": False,
"is_public": False,
"auth_type": "basic", "auth_type": "basic",
"username": "reader", "username": "reader",
"password": "secret123", "password": "secret123",
@@ -1599,9 +1706,11 @@ class TestCacheSettingsAdminAPI:
data = response.json() data = response.json()
# Check expected fields exist # Check expected fields exist
assert "allow_public_internet" in data
assert "auto_create_system_projects" in data assert "auto_create_system_projects" in data
# Check types # Check types
assert isinstance(data["allow_public_internet"], bool)
assert isinstance(data["auto_create_system_projects"], bool) assert isinstance(data["auto_create_system_projects"], bool)
@pytest.mark.integration @pytest.mark.integration
@@ -1614,7 +1723,7 @@ class TestCacheSettingsAdminAPI:
with httpx.Client(base_url=base_url, timeout=30.0) as unauthenticated_client: with httpx.Client(base_url=base_url, timeout=30.0) as unauthenticated_client:
response = unauthenticated_client.put( response = unauthenticated_client.put(
"/api/v1/admin/cache-settings", "/api/v1/admin/cache-settings",
json={"auto_create_system_projects": False}, json={"allow_public_internet": False},
) )
assert response.status_code in (401, 403) assert response.status_code in (401, 403)
@@ -1628,43 +1737,76 @@ class TestCacheSettingsAdminAPI:
response = integration_client.put( response = integration_client.put(
"/api/v1/admin/cache-settings", "/api/v1/admin/cache-settings",
json={ json={
"allow_public_internet": not original["allow_public_internet"],
"auto_create_system_projects": not original["auto_create_system_projects"], "auto_create_system_projects": not original["auto_create_system_projects"],
}, },
) )
assert response.status_code == 200 assert response.status_code == 200
data = response.json() data = response.json()
assert data["allow_public_internet"] == (not original["allow_public_internet"])
assert data["auto_create_system_projects"] == (not original["auto_create_system_projects"]) assert data["auto_create_system_projects"] == (not original["auto_create_system_projects"])
# Restore original settings # Restore original settings
integration_client.put( integration_client.put(
"/api/v1/admin/cache-settings", "/api/v1/admin/cache-settings",
json={ json={
"allow_public_internet": original["allow_public_internet"],
"auto_create_system_projects": original["auto_create_system_projects"], "auto_create_system_projects": original["auto_create_system_projects"],
}, },
) )
@pytest.mark.integration
def test_update_cache_settings_allow_public_internet(self, integration_client):
"""Test enabling and disabling public internet access (air-gap mode)."""
# First get current settings to restore later
original = integration_client.get("/api/v1/admin/cache-settings").json()
# Disable public internet (enable air-gap mode)
response = integration_client.put(
"/api/v1/admin/cache-settings",
json={"allow_public_internet": False},
)
assert response.status_code == 200
assert response.json()["allow_public_internet"] is False
# Enable public internet (disable air-gap mode)
response = integration_client.put(
"/api/v1/admin/cache-settings",
json={"allow_public_internet": True},
)
assert response.status_code == 200
assert response.json()["allow_public_internet"] is True
# Restore original settings
integration_client.put(
"/api/v1/admin/cache-settings",
json={"allow_public_internet": original["allow_public_internet"]},
)
@pytest.mark.integration @pytest.mark.integration
def test_update_cache_settings_partial(self, integration_client): def test_update_cache_settings_partial(self, integration_client):
"""Test that partial updates only change specified fields.""" """Test that partial updates only change specified fields."""
# Get current settings # Get current settings
original = integration_client.get("/api/v1/admin/cache-settings").json() original = integration_client.get("/api/v1/admin/cache-settings").json()
# Update only auto_create_system_projects # Update only allow_public_internet
new_value = not original["auto_create_system_projects"] new_value = not original["allow_public_internet"]
response = integration_client.put( response = integration_client.put(
"/api/v1/admin/cache-settings", "/api/v1/admin/cache-settings",
json={"auto_create_system_projects": new_value}, json={"allow_public_internet": new_value},
) )
assert response.status_code == 200 assert response.status_code == 200
data = response.json() data = response.json()
assert data["auto_create_system_projects"] == new_value assert data["allow_public_internet"] == new_value
# Other field should be unchanged
assert data["auto_create_system_projects"] == original["auto_create_system_projects"]
# Restore # Restore
integration_client.put( integration_client.put(
"/api/v1/admin/cache-settings", "/api/v1/admin/cache-settings",
json={"auto_create_system_projects": original["auto_create_system_projects"]}, json={"allow_public_internet": original["allow_public_internet"]},
) )
@pytest.mark.integration @pytest.mark.integration
@@ -1816,6 +1958,7 @@ class TestEnvVarUpstreamSourcesParsing:
# Check defaults # Check defaults
assert test_source.source_type == "generic" assert test_source.source_type == "generic"
assert test_source.enabled is True assert test_source.enabled is True
assert test_source.is_public is True
assert test_source.auth_type == "none" assert test_source.auth_type == "none"
assert test_source.priority == 100 assert test_source.priority == 100
finally: finally:
@@ -1838,6 +1981,7 @@ class TestEnvSourceToResponse:
url="https://example.com", url="https://example.com",
source_type="npm", source_type="npm",
enabled=True, enabled=True,
is_public=False,
auth_type="basic", auth_type="basic",
username="user", username="user",
password="pass", password="pass",
@@ -1848,6 +1992,7 @@ class TestEnvSourceToResponse:
assert source.url == "https://example.com" assert source.url == "https://example.com"
assert source.source_type == "npm" assert source.source_type == "npm"
assert source.enabled is True assert source.enabled is True
assert source.is_public is False
assert source.auth_type == "basic" assert source.auth_type == "basic"
assert source.username == "user" assert source.username == "user"
assert source.password == "pass" assert source.password == "pass"
@@ -1902,4 +2047,5 @@ class TestCacheSettingsEnvOverride:
data = response.json() data = response.json()
# These fields should exist (may be null if no env override) # These fields should exist (may be null if no env override)
assert "allow_public_internet_env_override" in data
assert "auto_create_system_projects_env_override" in data assert "auto_create_system_projects_env_override" in data

View File

@@ -145,6 +145,54 @@ class TestPackageModel:
assert platform_col.default.arg == "any" assert platform_col.default.arg == "any"
class TestTagModel:
"""Tests for the Tag model."""
@pytest.mark.unit
def test_tag_requires_package_id(self):
"""Test tag requires package_id."""
from app.models import Tag
tag = Tag(
name="v1.0.0",
package_id=uuid.uuid4(),
artifact_id="f" * 64,
created_by="test-user",
)
assert tag.package_id is not None
assert tag.artifact_id == "f" * 64
class TestTagHistoryModel:
"""Tests for the TagHistory model."""
@pytest.mark.unit
def test_tag_history_default_change_type(self):
"""Test tag history change_type column has default value of 'update'."""
from app.models import TagHistory
# Check the column definition has the right default
change_type_col = TagHistory.__table__.columns["change_type"]
assert change_type_col.default is not None
assert change_type_col.default.arg == "update"
@pytest.mark.unit
def test_tag_history_allows_null_old_artifact(self):
"""Test tag history allows null old_artifact_id (for create events)."""
from app.models import TagHistory
history = TagHistory(
tag_id=uuid.uuid4(),
old_artifact_id=None,
new_artifact_id="h" * 64,
change_type="create",
changed_by="test-user",
)
assert history.old_artifact_id is None
class TestUploadModel: class TestUploadModel:
"""Tests for the Upload model.""" """Tests for the Upload model."""

View File

@@ -1,228 +0,0 @@
# PyPI Proxy Performance & Multi-Protocol Architecture Design
**Date:** 2026-02-04
**Status:** Approved
**Branch:** fix/pypi-proxy-timeout
## Overview
Comprehensive infrastructure overhaul to address latency, throughput, and resource consumption issues in the PyPI proxy, while establishing a foundation for npm, Maven, and other package protocols.
## Goals
1. **Reduce latency** - Eliminate per-request connection overhead, cache aggressively
2. **Increase throughput** - Handle hundreds of concurrent requests without degradation
3. **Lower resource usage** - Connection pooling, efficient DB queries, proper async I/O
4. **Enable multi-protocol** - Abstract base class ready for npm/Maven/etc.
5. **Maintain hermetic builds** - Immutable artifact content and metadata, mutable discovery data
## Architecture
```
┌─────────────────────────────────────────────────────────────────────┐
│ FastAPI Application │
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
│ │ PyPI Proxy │ │ npm Proxy │ │ Maven Proxy │ │ (future) │ │
│ │ Router │ │ Router │ │ Router │ │ │ │
│ └──────┬──────┘ └──────┬──────┘ └──────┬──────┘ └─────────────┘ │
│ │ │ │ │
│ └────────────────┼────────────────┘ │
│ ▼ │
│ ┌───────────────────────┐ │
│ │ PackageProxyBase │ ← Abstract base class │
│ │ - check_cache() │ │
│ │ - fetch_upstream() │ │
│ │ - store_artifact() │ │
│ │ - serve_artifact() │ │
│ └───────────┬───────────┘ │
│ │ │
│ ┌────────────────┼────────────────┐ │
│ ▼ ▼ ▼ │
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
│ │ HttpClient │ │ CacheService│ │ ThreadPool │ │
│ │ Manager │ │ (Redis) │ │ Executor │ │
│ └─────────────┘ └─────────────┘ └─────────────┘ │
│ │ │ │ │
└─────────┼────────────────┼────────────────┼──────────────────────────┘
▼ ▼ ▼
┌──────────┐ ┌──────────┐ ┌──────────────┐
│ Upstream │ │ Redis │ │ S3/MinIO │
│ Sources │ │ │ │ │
└──────────┘ └──────────┘ └──────────────┘
```
## Components
### 1. HttpClientManager
Manages httpx.AsyncClient pools with FastAPI lifespan integration.
**Features:**
- Default pool for general requests
- Per-upstream pools for sources needing specific config/auth
- Graceful shutdown drains in-flight requests
- Dedicated thread pool for blocking operations
**Configuration:**
```bash
ORCHARD_HTTP_MAX_CONNECTIONS=100 # Default pool size
ORCHARD_HTTP_KEEPALIVE_CONNECTIONS=20 # Keep-alive connections
ORCHARD_HTTP_CONNECT_TIMEOUT=30 # Connection timeout (seconds)
ORCHARD_HTTP_READ_TIMEOUT=60 # Read timeout (seconds)
ORCHARD_HTTP_WORKER_THREADS=32 # Thread pool size
```
**File:** `backend/app/http_client.py`
### 2. CacheService (Redis Layer)
Redis-backed caching with category-aware TTL and invalidation.
**Cache Categories:**
| Category | TTL | Invalidation | Purpose |
|----------|-----|--------------|---------|
| ARTIFACT_METADATA | Forever | Never (immutable) | Artifact info by SHA256 |
| ARTIFACT_DEPENDENCIES | Forever | Never (immutable) | Extracted deps by SHA256 |
| DEPENDENCY_RESOLUTION | Forever | Manual/refresh param | Resolution results |
| UPSTREAM_SOURCES | 1 hour | On DB change | Upstream config |
| PACKAGE_INDEX | 5 min | TTL only | PyPI/npm index pages |
| PACKAGE_VERSIONS | 5 min | TTL only | Version listings |
**Key format:** `orchard:{category}:{protocol}:{identifier}`
**Configuration:**
```bash
ORCHARD_REDIS_HOST=redis
ORCHARD_REDIS_PORT=6379
ORCHARD_REDIS_DB=0
ORCHARD_CACHE_TTL_INDEX=300 # Package index: 5 minutes
ORCHARD_CACHE_TTL_VERSIONS=300 # Version listings: 5 minutes
ORCHARD_CACHE_TTL_UPSTREAM=3600 # Upstream config: 1 hour
```
**File:** `backend/app/cache_service.py`
### 3. PackageProxyBase
Abstract base class defining the cache→fetch→store→serve flow.
**Abstract methods (protocol-specific):**
- `get_protocol_name()` - Return 'pypi', 'npm', 'maven'
- `get_system_project_name()` - Return '_pypi', '_npm'
- `rewrite_index_html()` - Rewrite upstream index to Orchard URLs
- `extract_metadata()` - Extract deps from package file
- `parse_package_url()` - Parse URL into package/version/filename
**Concrete methods (shared):**
- `serve_index()` - Serve package index with caching
- `serve_artifact()` - Full cache→fetch→store→serve flow
**File:** `backend/app/proxy_base.py`
### 4. ArtifactRepository (DB Optimization)
Optimized database operations eliminating N+1 queries.
**Key methods:**
- `get_or_create_artifact()` - Atomic upsert via ON CONFLICT
- `batch_upsert_dependencies()` - Single INSERT for all deps
- `get_cached_url_with_artifact()` - Joined query for cache lookup
**Query reduction:**
| Operation | Before | After |
|-----------|--------|-------|
| Cache hit check | 2 queries | 1 query (joined) |
| Store artifact | 3-4 queries | 1 query (upsert) |
| Store 50 deps | 50+ queries | 1 query (batch) |
**Configuration:**
```bash
ORCHARD_DATABASE_POOL_SIZE=20 # Base connections (up from 5)
ORCHARD_DATABASE_MAX_OVERFLOW=30 # Burst capacity (up from 10)
ORCHARD_DATABASE_POOL_TIMEOUT=30 # Wait timeout
ORCHARD_DATABASE_POOL_PRE_PING=false # Disable in prod for performance
```
**File:** `backend/app/db_utils.py`
### 5. Dependency Resolution Caching
Cache resolution results for ensure files and API queries.
**Cache key:** Hash of (artifact_id, max_depth, include_optional)
**Invalidation:** Manual only (immutable artifact deps mean cached resolutions stay valid)
**Refresh:** `?refresh=true` parameter forces fresh resolution
**File:** Updates to `backend/app/dependencies.py`
### 6. FastAPI Integration
Lifespan-managed infrastructure with dependency injection.
**Startup:**
1. Initialize HttpClientManager (connection pools)
2. Initialize CacheService (Redis connection)
3. Load upstream source configs
**Shutdown:**
1. Drain in-flight HTTP requests
2. Close Redis connections
3. Shutdown thread pool
**Health endpoint additions:**
- Database connection status
- Redis ping
- HTTP pool active/max connections
- Thread pool active/max workers
**File:** Updates to `backend/app/main.py`
## Files Summary
**New files:**
- `backend/app/http_client.py` - HttpClientManager
- `backend/app/cache_service.py` - CacheService
- `backend/app/proxy_base.py` - PackageProxyBase
- `backend/app/db_utils.py` - ArtifactRepository
**Modified files:**
- `backend/app/config.py` - New settings
- `backend/app/main.py` - Lifespan integration
- `backend/app/pypi_proxy.py` - Refactor to use base class
- `backend/app/dependencies.py` - Resolution caching
- `backend/app/routes.py` - Health endpoint, DI
## Hermetic Build Guarantees
**Immutable (cached forever):**
- Artifact content (by SHA256)
- Extracted dependencies for a specific artifact
- Dependency resolution results
**Mutable (TTL + event invalidation):**
- Package index listings
- Version discovery
- Upstream source configuration
Once an artifact is cached with SHA256 `abc123` and dependencies extracted, that data never changes.
## Performance Expectations
| Metric | Before | After |
|--------|--------|-------|
| HTTP connection setup | Per request (~100-500ms) | Pooled (~5ms) |
| Cache hit (index page) | N/A | ~5ms (Redis) |
| Store 50 dependencies | ~500ms (50 queries) | ~10ms (1 query) |
| Dependency resolution (cached) | N/A | ~5ms |
| Concurrent request capacity | ~15 (DB pool) | ~50 (configurable) |
## Testing Requirements
- Unit tests for each new component
- Integration tests for full proxy flow
- Load tests to verify pool sizing
- Cache hit/miss verification tests

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -12,12 +12,9 @@
"test:coverage": "vitest run --coverage" "test:coverage": "vitest run --coverage"
}, },
"dependencies": { "dependencies": {
"@types/dagre": "^0.7.53",
"dagre": "^0.8.5",
"react": "^18.2.0", "react": "^18.2.0",
"react-dom": "^18.2.0", "react-dom": "^18.2.0",
"react-router-dom": "6.28.0", "react-router-dom": "6.28.0"
"reactflow": "^11.11.4"
}, },
"devDependencies": { "devDependencies": {
"@testing-library/jest-dom": "^6.4.2", "@testing-library/jest-dom": "^6.4.2",
@@ -37,15 +34,6 @@
"ufo": "1.5.4", "ufo": "1.5.4",
"rollup": "4.52.4", "rollup": "4.52.4",
"caniuse-lite": "1.0.30001692", "caniuse-lite": "1.0.30001692",
"baseline-browser-mapping": "2.9.5", "baseline-browser-mapping": "2.9.5"
"lodash": "4.17.21",
"electron-to-chromium": "1.5.72",
"@babel/core": "7.26.0",
"@babel/traverse": "7.26.4",
"@babel/types": "7.26.3",
"@babel/compat-data": "7.26.3",
"@babel/parser": "7.26.3",
"@babel/generator": "7.26.3",
"@babel/code-frame": "7.26.2"
} }
} }

View File

@@ -1,11 +1,14 @@
import { import {
Project, Project,
Package, Package,
Tag,
TagDetail,
Artifact,
ArtifactDetail, ArtifactDetail,
PackageArtifact,
UploadResponse, UploadResponse,
PaginatedResponse, PaginatedResponse,
ListParams, ListParams,
TagListParams,
PackageListParams, PackageListParams,
ArtifactListParams, ArtifactListParams,
ProjectListParams, ProjectListParams,
@@ -43,6 +46,8 @@ import {
UpstreamSourceCreate, UpstreamSourceCreate,
UpstreamSourceUpdate, UpstreamSourceUpdate,
UpstreamSourceTestResult, UpstreamSourceTestResult,
CacheSettings,
CacheSettingsUpdate,
} from './types'; } from './types';
const API_BASE = '/api/v1'; const API_BASE = '/api/v1';
@@ -75,13 +80,7 @@ export class ForbiddenError extends ApiError {
async function handleResponse<T>(response: Response): Promise<T> { async function handleResponse<T>(response: Response): Promise<T> {
if (!response.ok) { if (!response.ok) {
const error = await response.json().catch(() => ({ detail: 'Unknown error' })); const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
// Handle detail as string or object (backend may return structured errors) const message = error.detail || `HTTP ${response.status}`;
let message: string;
if (typeof error.detail === 'object') {
message = JSON.stringify(error.detail);
} else {
message = error.detail || `HTTP ${response.status}`;
}
if (response.status === 401) { if (response.status === 401) {
throw new UnauthorizedError(message); throw new UnauthorizedError(message);
@@ -237,6 +236,32 @@ export async function createPackage(projectName: string, data: { name: string; d
return handleResponse<Package>(response); return handleResponse<Package>(response);
} }
// Tag API
export async function listTags(projectName: string, packageName: string, params: TagListParams = {}): Promise<PaginatedResponse<TagDetail>> {
const query = buildQueryString(params as Record<string, unknown>);
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags${query}`);
return handleResponse<PaginatedResponse<TagDetail>>(response);
}
export async function listTagsSimple(projectName: string, packageName: string, params: TagListParams = {}): Promise<TagDetail[]> {
const data = await listTags(projectName, packageName, params);
return data.items;
}
export async function getTag(projectName: string, packageName: string, tagName: string): Promise<TagDetail> {
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags/${tagName}`);
return handleResponse<TagDetail>(response);
}
export async function createTag(projectName: string, packageName: string, data: { name: string; artifact_id: string }): Promise<Tag> {
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data),
});
return handleResponse<Tag>(response);
}
// Artifact API // Artifact API
export async function getArtifact(artifactId: string): Promise<ArtifactDetail> { export async function getArtifact(artifactId: string): Promise<ArtifactDetail> {
const response = await fetch(`${API_BASE}/artifact/${artifactId}`); const response = await fetch(`${API_BASE}/artifact/${artifactId}`);
@@ -247,10 +272,10 @@ export async function listPackageArtifacts(
projectName: string, projectName: string,
packageName: string, packageName: string,
params: ArtifactListParams = {} params: ArtifactListParams = {}
): Promise<PaginatedResponse<PackageArtifact>> { ): Promise<PaginatedResponse<Artifact & { tags: string[] }>> {
const query = buildQueryString(params as Record<string, unknown>); const query = buildQueryString(params as Record<string, unknown>);
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/artifacts${query}`); const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/artifacts${query}`);
return handleResponse<PaginatedResponse<PackageArtifact>>(response); return handleResponse<PaginatedResponse<Artifact & { tags: string[] }>>(response);
} }
// Upload // Upload
@@ -258,10 +283,14 @@ export async function uploadArtifact(
projectName: string, projectName: string,
packageName: string, packageName: string,
file: File, file: File,
tag?: string,
version?: string version?: string
): Promise<UploadResponse> { ): Promise<UploadResponse> {
const formData = new FormData(); const formData = new FormData();
formData.append('file', file); formData.append('file', file);
if (tag) {
formData.append('tag', tag);
}
if (version) { if (version) {
formData.append('version', version); formData.append('version', version);
} }
@@ -720,3 +749,20 @@ export async function testUpstreamSource(id: string): Promise<UpstreamSourceTest
return handleResponse<UpstreamSourceTestResult>(response); return handleResponse<UpstreamSourceTestResult>(response);
} }
// Cache Settings Admin API
export async function getCacheSettings(): Promise<CacheSettings> {
const response = await fetch(`${API_BASE}/admin/cache-settings`, {
credentials: 'include',
});
return handleResponse<CacheSettings>(response);
}
export async function updateCacheSettings(data: CacheSettingsUpdate): Promise<CacheSettings> {
const response = await fetch(`${API_BASE}/admin/cache-settings`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data),
credentials: 'include',
});
return handleResponse<CacheSettings>(response);
}

View File

@@ -55,10 +55,6 @@
font-size: 0.8125rem; font-size: 0.8125rem;
} }
.missing-count {
color: #f59e0b;
}
.close-btn { .close-btn {
background: transparent; background: transparent;
border: none; border: none;
@@ -76,115 +72,171 @@
color: var(--text-primary); color: var(--text-primary);
} }
.dependency-graph-toolbar {
display: flex;
align-items: center;
gap: 8px;
padding: 12px 20px;
border-bottom: 1px solid var(--border-primary);
background: var(--bg-secondary);
}
.zoom-level {
margin-left: auto;
font-size: 0.8125rem;
color: var(--text-muted);
font-family: 'JetBrains Mono', monospace;
}
.dependency-graph-container { .dependency-graph-container {
flex: 1; flex: 1;
overflow: hidden; overflow: hidden;
position: relative; position: relative;
background: var(--bg-primary); background:
linear-gradient(90deg, var(--border-primary) 1px, transparent 1px),
linear-gradient(var(--border-primary) 1px, transparent 1px);
background-size: 20px 20px;
background-position: center center;
} }
/* React Flow Customization */ .graph-canvas {
.react-flow__background { padding: 40px;
background-color: var(--bg-primary) !important; min-width: 100%;
min-height: 100%;
transform-origin: center center;
transition: transform 0.1s ease-out;
} }
.react-flow__controls { /* Graph Nodes */
background: var(--bg-tertiary); .graph-node-container {
border: 1px solid var(--border-primary); display: flex;
border-radius: var(--radius-md); flex-direction: column;
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3); align-items: flex-start;
} }
.react-flow__controls-button { .graph-node {
background: var(--bg-tertiary);
border: none;
border-bottom: 1px solid var(--border-primary);
color: var(--text-secondary);
width: 28px;
height: 28px;
}
.react-flow__controls-button:hover {
background: var(--bg-hover);
color: var(--text-primary);
}
.react-flow__controls-button:last-child {
border-bottom: none;
}
.react-flow__controls-button svg {
fill: currentColor;
}
.react-flow__attribution {
background: transparent !important;
}
.react-flow__attribution a {
color: var(--text-muted) !important;
font-size: 10px;
}
/* Custom Flow Nodes */
.flow-node {
background: var(--bg-tertiary); background: var(--bg-tertiary);
border: 2px solid var(--border-primary); border: 2px solid var(--border-primary);
border-radius: var(--radius-md); border-radius: var(--radius-md);
padding: 12px 16px; padding: 12px 16px;
min-width: 160px; min-width: 200px;
cursor: pointer; cursor: pointer;
transition: all var(--transition-fast); transition: all var(--transition-fast);
text-align: center; position: relative;
} }
.flow-node:hover { .graph-node:hover {
border-color: var(--accent-primary); border-color: var(--accent-primary);
box-shadow: 0 4px 12px rgba(16, 185, 129, 0.2); box-shadow: 0 4px 12px rgba(16, 185, 129, 0.2);
} }
.flow-node--root { .graph-node--root {
background: linear-gradient(135deg, rgba(16, 185, 129, 0.15) 0%, rgba(5, 150, 105, 0.15) 100%); background: linear-gradient(135deg, rgba(16, 185, 129, 0.15) 0%, rgba(5, 150, 105, 0.15) 100%);
border-color: var(--accent-primary); border-color: var(--accent-primary);
} }
.flow-node__name { .graph-node--hovered {
transform: scale(1.02);
}
.graph-node__header {
display: flex;
align-items: center;
gap: 8px;
margin-bottom: 4px;
}
.graph-node__name {
font-weight: 600; font-weight: 600;
color: var(--accent-primary); color: var(--accent-primary);
font-family: 'JetBrains Mono', monospace; font-family: 'JetBrains Mono', monospace;
font-size: 0.8125rem; font-size: 0.875rem;
margin-bottom: 4px;
word-break: break-word;
} }
.flow-node__details { .graph-node__toggle {
background: var(--bg-hover);
border: 1px solid var(--border-primary);
border-radius: 4px;
width: 20px;
height: 20px;
display: flex; display: flex;
align-items: center; align-items: center;
justify-content: center; justify-content: center;
gap: 8px; cursor: pointer;
font-size: 0.6875rem; font-size: 0.875rem;
color: var(--text-secondary);
font-weight: 600;
margin-left: auto;
}
.graph-node__toggle:hover {
background: var(--bg-tertiary);
color: var(--text-primary);
}
.graph-node__details {
display: flex;
align-items: center;
gap: 12px;
font-size: 0.75rem;
color: var(--text-muted); color: var(--text-muted);
} }
.flow-node__version { .graph-node__version {
font-family: 'JetBrains Mono', monospace; font-family: 'JetBrains Mono', monospace;
color: var(--text-secondary); color: var(--text-secondary);
} }
.flow-node__size { .graph-node__size {
color: var(--text-muted); color: var(--text-muted);
} }
/* Flow Handles (connection points) */ /* Graph Children / Tree Structure */
.flow-handle { .graph-children {
width: 8px !important; display: flex;
height: 8px !important; padding-left: 24px;
background: var(--border-primary) !important; margin-top: 8px;
border: 2px solid var(--bg-tertiary) !important; position: relative;
} }
.flow-node:hover .flow-handle { .graph-connector {
background: var(--accent-primary) !important; position: absolute;
left: 12px;
top: 0;
bottom: 50%;
width: 12px;
border-left: 2px solid var(--border-primary);
border-bottom: 2px solid var(--border-primary);
border-bottom-left-radius: 8px;
}
.graph-children-list {
display: flex;
flex-direction: column;
gap: 8px;
position: relative;
}
.graph-children-list::before {
content: '';
position: absolute;
left: -12px;
top: 20px;
bottom: 20px;
border-left: 2px solid var(--border-primary);
}
.graph-children-list > .graph-node-container {
position: relative;
}
.graph-children-list > .graph-node-container::before {
content: '';
position: absolute;
left: -12px;
top: 20px;
width: 12px;
border-top: 2px solid var(--border-primary);
} }
/* Loading, Error, Empty States */ /* Loading, Error, Empty States */
@@ -227,61 +279,39 @@
line-height: 1.5; line-height: 1.5;
} }
/* Missing Dependencies */ /* Tooltip */
.missing-dependencies { .graph-tooltip {
border-top: 1px solid var(--border-primary); position: fixed;
padding: 16px 20px; bottom: 24px;
background: rgba(245, 158, 11, 0.05); left: 50%;
max-height: 200px; transform: translateX(-50%);
overflow-y: auto;
}
.missing-dependencies h3 {
margin: 0 0 8px 0;
font-size: 0.875rem;
font-weight: 600;
color: #f59e0b;
}
.missing-hint {
margin: 0 0 12px 0;
font-size: 0.75rem;
color: var(--text-muted);
}
.missing-list {
list-style: none;
padding: 0;
margin: 0;
display: flex;
flex-wrap: wrap;
gap: 8px;
}
.missing-item {
display: inline-flex;
align-items: center;
gap: 4px;
background: var(--bg-tertiary); background: var(--bg-tertiary);
border: 1px solid rgba(245, 158, 11, 0.3); border: 1px solid var(--border-primary);
border-radius: var(--radius-sm); border-radius: var(--radius-md);
padding: 4px 8px; padding: 12px 16px;
font-size: 0.75rem; font-size: 0.8125rem;
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.4);
z-index: 1001;
} }
.missing-name { .graph-tooltip strong {
display: block;
color: var(--accent-primary);
font-family: 'JetBrains Mono', monospace; font-family: 'JetBrains Mono', monospace;
margin-bottom: 4px;
}
.graph-tooltip div {
color: var(--text-secondary); color: var(--text-secondary);
margin-top: 2px;
} }
.missing-constraint { .tooltip-hint {
margin-top: 8px;
padding-top: 8px;
border-top: 1px solid var(--border-primary);
color: var(--text-muted); color: var(--text-muted);
font-family: 'JetBrains Mono', monospace; font-size: 0.75rem;
}
.missing-required-by {
color: var(--text-muted);
font-size: 0.6875rem;
} }
/* Responsive */ /* Responsive */

View File

@@ -1,19 +1,5 @@
import { useState, useEffect, useCallback, useMemo } from 'react'; import { useState, useEffect, useCallback, useRef } from 'react';
import { useNavigate } from 'react-router-dom'; import { useNavigate } from 'react-router-dom';
import ReactFlow, {
Node,
Edge,
Controls,
Background,
useNodesState,
useEdgesState,
MarkerType,
NodeProps,
Handle,
Position,
} from 'reactflow';
import dagre from 'dagre';
import 'reactflow/dist/style.css';
import { ResolvedArtifact, DependencyResolutionResponse, Dependency } from '../types'; import { ResolvedArtifact, DependencyResolutionResponse, Dependency } from '../types';
import { resolveDependencies, getArtifactDependencies } from '../api'; import { resolveDependencies, getArtifactDependencies } from '../api';
import './DependencyGraph.css'; import './DependencyGraph.css';
@@ -25,14 +11,15 @@ interface DependencyGraphProps {
onClose: () => void; onClose: () => void;
} }
interface NodeData { interface GraphNode {
label: string; id: string;
project: string; project: string;
package: string; package: string;
version: string | null; version: string | null;
size: number; size: number;
isRoot: boolean; depth: number;
onNavigate: (project: string, pkg: string) => void; children: GraphNode[];
isRoot?: boolean;
} }
function formatBytes(bytes: number): string { function formatBytes(bytes: number): string {
@@ -43,89 +30,29 @@ function formatBytes(bytes: number): string {
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i]; return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
} }
// Custom node component
function DependencyNode({ data }: NodeProps<NodeData>) {
return (
<div
className={`flow-node ${data.isRoot ? 'flow-node--root' : ''}`}
onClick={() => data.onNavigate(data.project, data.package)}
>
<Handle type="target" position={Position.Top} className="flow-handle" />
<div className="flow-node__name">{data.package}</div>
<div className="flow-node__details">
{data.version && <span className="flow-node__version">{data.version}</span>}
<span className="flow-node__size">{formatBytes(data.size)}</span>
</div>
<Handle type="source" position={Position.Bottom} className="flow-handle" />
</div>
);
}
const nodeTypes = { dependency: DependencyNode };
// Dagre layout function
function getLayoutedElements(
nodes: Node<NodeData>[],
edges: Edge[],
direction: 'TB' | 'LR' = 'TB'
) {
const dagreGraph = new dagre.graphlib.Graph();
dagreGraph.setDefaultEdgeLabel(() => ({}));
const nodeWidth = 180;
const nodeHeight = 60;
dagreGraph.setGraph({ rankdir: direction, nodesep: 50, ranksep: 80 });
nodes.forEach((node) => {
dagreGraph.setNode(node.id, { width: nodeWidth, height: nodeHeight });
});
edges.forEach((edge) => {
dagreGraph.setEdge(edge.source, edge.target);
});
dagre.layout(dagreGraph);
const layoutedNodes = nodes.map((node) => {
const nodeWithPosition = dagreGraph.node(node.id);
return {
...node,
position: {
x: nodeWithPosition.x - nodeWidth / 2,
y: nodeWithPosition.y - nodeHeight / 2,
},
};
});
return { nodes: layoutedNodes, edges };
}
function DependencyGraph({ projectName, packageName, tagName, onClose }: DependencyGraphProps) { function DependencyGraph({ projectName, packageName, tagName, onClose }: DependencyGraphProps) {
const navigate = useNavigate(); const navigate = useNavigate();
const containerRef = useRef<HTMLDivElement>(null);
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null); const [error, setError] = useState<string | null>(null);
const [resolution, setResolution] = useState<DependencyResolutionResponse | null>(null); const [resolution, setResolution] = useState<DependencyResolutionResponse | null>(null);
const [nodes, setNodes, onNodesChange] = useNodesState<NodeData>([]); const [graphRoot, setGraphRoot] = useState<GraphNode | null>(null);
const [edges, setEdges, onEdgesChange] = useEdgesState([]); const [hoveredNode, setHoveredNode] = useState<GraphNode | null>(null);
const [zoom, setZoom] = useState(1);
const handleNavigate = useCallback((project: string, pkg: string) => { const [pan, setPan] = useState({ x: 0, y: 0 });
navigate(`/project/${project}/${pkg}`); const [isDragging, setIsDragging] = useState(false);
onClose(); const [dragStart, setDragStart] = useState({ x: 0, y: 0 });
}, [navigate, onClose]); const [collapsedNodes, setCollapsedNodes] = useState<Set<string>>(new Set());
// Build graph structure from resolution data // Build graph structure from resolution data
const buildFlowGraph = useCallback(async ( const buildGraph = useCallback(async (resolutionData: DependencyResolutionResponse) => {
resolutionData: DependencyResolutionResponse,
onNavigate: (project: string, pkg: string) => void
) => {
const artifactMap = new Map<string, ResolvedArtifact>(); const artifactMap = new Map<string, ResolvedArtifact>();
resolutionData.resolved.forEach(artifact => { resolutionData.resolved.forEach(artifact => {
artifactMap.set(artifact.artifact_id, artifact); artifactMap.set(artifact.artifact_id, artifact);
}); });
// Fetch dependencies for each artifact // Fetch dependencies for each artifact to build the tree
const depsMap = new Map<string, Dependency[]>(); const depsMap = new Map<string, Dependency[]>();
for (const artifact of resolutionData.resolved) { for (const artifact of resolutionData.resolved) {
@@ -137,82 +64,50 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
} }
} }
// Find the root artifact // Find the root artifact (the requested one)
const rootArtifact = resolutionData.resolved.find( const rootArtifact = resolutionData.resolved.find(
a => a.project === resolutionData.requested.project && a => a.project === resolutionData.requested.project &&
a.package === resolutionData.requested.package a.package === resolutionData.requested.package
); );
if (!rootArtifact) { if (!rootArtifact) {
return { nodes: [], edges: [] }; return null;
} }
const flowNodes: Node<NodeData>[] = []; // Build tree recursively
const flowEdges: Edge[] = [];
const visited = new Set<string>(); const visited = new Set<string>();
const nodeIdMap = new Map<string, string>(); // artifact_id -> node id
// Build nodes and edges recursively
const processNode = (artifact: ResolvedArtifact, isRoot: boolean) => {
if (visited.has(artifact.artifact_id)) {
return nodeIdMap.get(artifact.artifact_id);
}
const buildNode = (artifact: ResolvedArtifact, depth: number): GraphNode => {
const nodeId = `${artifact.project}/${artifact.package}`;
visited.add(artifact.artifact_id); visited.add(artifact.artifact_id);
const nodeId = `node-${flowNodes.length}`;
nodeIdMap.set(artifact.artifact_id, nodeId);
flowNodes.push({
id: nodeId,
type: 'dependency',
position: { x: 0, y: 0 }, // Will be set by dagre
data: {
label: `${artifact.project}/${artifact.package}`,
project: artifact.project,
package: artifact.package,
version: artifact.version,
size: artifact.size,
isRoot,
onNavigate,
},
});
const deps = depsMap.get(artifact.artifact_id) || []; const deps = depsMap.get(artifact.artifact_id) || [];
const children: GraphNode[] = [];
for (const dep of deps) { for (const dep of deps) {
// Find the resolved artifact for this dependency
const childArtifact = resolutionData.resolved.find( const childArtifact = resolutionData.resolved.find(
a => a.project === dep.project && a.package === dep.package a => a.project === dep.project && a.package === dep.package
); );
if (childArtifact) { if (childArtifact && !visited.has(childArtifact.artifact_id)) {
const childNodeId = processNode(childArtifact, false); children.push(buildNode(childArtifact, depth + 1));
if (childNodeId) {
flowEdges.push({
id: `edge-${nodeId}-${childNodeId}`,
source: nodeId,
target: childNodeId,
markerEnd: {
type: MarkerType.ArrowClosed,
width: 15,
height: 15,
color: 'var(--accent-primary)',
},
style: {
stroke: 'var(--border-primary)',
strokeWidth: 2,
},
});
}
} }
} }
return nodeId; return {
id: nodeId,
project: artifact.project,
package: artifact.package,
version: artifact.version || artifact.tag,
size: artifact.size,
depth,
children,
isRoot: depth === 0,
};
}; };
processNode(rootArtifact, true); return buildNode(rootArtifact, 0);
// Apply dagre layout
return getLayoutedElements(flowNodes, flowEdges);
}, []); }, []);
useEffect(() => { useEffect(() => {
@@ -222,21 +117,13 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
try { try {
const result = await resolveDependencies(projectName, packageName, tagName); const result = await resolveDependencies(projectName, packageName, tagName);
// If only the root package (no dependencies) and no missing deps, close the modal
const hasDeps = result.artifact_count > 1 || (result.missing && result.missing.length > 0);
if (!hasDeps) {
onClose();
return;
}
setResolution(result); setResolution(result);
const { nodes: layoutedNodes, edges: layoutedEdges } = await buildFlowGraph(result, handleNavigate); const graph = await buildGraph(result);
setNodes(layoutedNodes); setGraphRoot(graph);
setEdges(layoutedEdges);
} catch (err) { } catch (err) {
if (err instanceof Error) { if (err instanceof Error) {
// Check if it's a resolution error
try { try {
const errorData = JSON.parse(err.message); const errorData = JSON.parse(err.message);
if (errorData.error === 'circular_dependency') { if (errorData.error === 'circular_dependency') {
@@ -258,9 +145,95 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
} }
loadData(); loadData();
}, [projectName, packageName, tagName, buildFlowGraph, handleNavigate, onClose, setNodes, setEdges]); }, [projectName, packageName, tagName, buildGraph]);
const defaultViewport = useMemo(() => ({ x: 50, y: 50, zoom: 0.8 }), []); const handleNodeClick = (node: GraphNode) => {
navigate(`/project/${node.project}/${node.package}`);
onClose();
};
const handleNodeToggle = (node: GraphNode, e: React.MouseEvent) => {
e.stopPropagation();
setCollapsedNodes(prev => {
const next = new Set(prev);
if (next.has(node.id)) {
next.delete(node.id);
} else {
next.add(node.id);
}
return next;
});
};
const handleWheel = (e: React.WheelEvent) => {
e.preventDefault();
const delta = e.deltaY > 0 ? -0.1 : 0.1;
setZoom(z => Math.max(0.25, Math.min(2, z + delta)));
};
const handleMouseDown = (e: React.MouseEvent) => {
if (e.target === containerRef.current || (e.target as HTMLElement).classList.contains('graph-canvas')) {
setIsDragging(true);
setDragStart({ x: e.clientX - pan.x, y: e.clientY - pan.y });
}
};
const handleMouseMove = (e: React.MouseEvent) => {
if (isDragging) {
setPan({ x: e.clientX - dragStart.x, y: e.clientY - dragStart.y });
}
};
const handleMouseUp = () => {
setIsDragging(false);
};
const resetView = () => {
setZoom(1);
setPan({ x: 0, y: 0 });
};
const renderNode = (node: GraphNode, index: number = 0): JSX.Element => {
const isCollapsed = collapsedNodes.has(node.id);
const hasChildren = node.children.length > 0;
return (
<div key={`${node.id}-${index}`} className="graph-node-container">
<div
className={`graph-node ${node.isRoot ? 'graph-node--root' : ''} ${hoveredNode?.id === node.id ? 'graph-node--hovered' : ''}`}
onClick={() => handleNodeClick(node)}
onMouseEnter={() => setHoveredNode(node)}
onMouseLeave={() => setHoveredNode(null)}
>
<div className="graph-node__header">
<span className="graph-node__name">{node.project}/{node.package}</span>
{hasChildren && (
<button
className="graph-node__toggle"
onClick={(e) => handleNodeToggle(node, e)}
title={isCollapsed ? 'Expand' : 'Collapse'}
>
{isCollapsed ? '+' : '-'}
</button>
)}
</div>
<div className="graph-node__details">
{node.version && <span className="graph-node__version">@ {node.version}</span>}
<span className="graph-node__size">{formatBytes(node.size)}</span>
</div>
</div>
{hasChildren && !isCollapsed && (
<div className="graph-children">
<div className="graph-connector"></div>
<div className="graph-children-list">
{node.children.map((child, i) => renderNode(child, i))}
</div>
</div>
)}
</div>
);
};
return ( return (
<div className="dependency-graph-modal" onClick={onClose}> <div className="dependency-graph-modal" onClick={onClose}>
@@ -271,11 +244,7 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
<span>{projectName}/{packageName} @ {tagName}</span> <span>{projectName}/{packageName} @ {tagName}</span>
{resolution && ( {resolution && (
<span className="graph-stats"> <span className="graph-stats">
{resolution.artifact_count} cached {resolution.artifact_count} packages {formatBytes(resolution.total_size)} total
{resolution.missing && resolution.missing.length > 0 && (
<span className="missing-count"> {resolution.missing.length} not cached</span>
)}
{formatBytes(resolution.total_size)} total
</span> </span>
)} )}
</div> </div>
@@ -287,7 +256,28 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
</button> </button>
</div> </div>
<div className="dependency-graph-container"> <div className="dependency-graph-toolbar">
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.min(2, z + 0.25))}>
Zoom In
</button>
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.max(0.25, z - 0.25))}>
Zoom Out
</button>
<button className="btn btn-secondary btn-small" onClick={resetView}>
Reset View
</button>
<span className="zoom-level">{Math.round(zoom * 100)}%</span>
</div>
<div
ref={containerRef}
className="dependency-graph-container"
onWheel={handleWheel}
onMouseDown={handleMouseDown}
onMouseMove={handleMouseMove}
onMouseUp={handleMouseUp}
onMouseLeave={handleMouseUp}
>
{loading ? ( {loading ? (
<div className="graph-loading"> <div className="graph-loading">
<div className="spinner"></div> <div className="spinner"></div>
@@ -302,41 +292,27 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
</svg> </svg>
<p>{error}</p> <p>{error}</p>
</div> </div>
) : nodes.length > 0 ? ( ) : graphRoot ? (
<ReactFlow <div
nodes={nodes} className="graph-canvas"
edges={edges} style={{
onNodesChange={onNodesChange} transform: `translate(${pan.x}px, ${pan.y}px) scale(${zoom})`,
onEdgesChange={onEdgesChange} cursor: isDragging ? 'grabbing' : 'grab',
nodeTypes={nodeTypes} }}
defaultViewport={defaultViewport}
fitView
fitViewOptions={{ padding: 0.2 }}
minZoom={0.1}
maxZoom={2}
attributionPosition="bottom-left"
> >
<Controls /> {renderNode(graphRoot)}
<Background color="var(--border-primary)" gap={20} /> </div>
</ReactFlow>
) : ( ) : (
<div className="graph-empty">No dependencies to display</div> <div className="graph-empty">No dependencies to display</div>
)} )}
</div> </div>
{resolution && resolution.missing && resolution.missing.length > 0 && ( {hoveredNode && (
<div className="missing-dependencies"> <div className="graph-tooltip">
<h3>Not Cached ({resolution.missing.length})</h3> <strong>{hoveredNode.project}/{hoveredNode.package}</strong>
<p className="missing-hint">These dependencies are referenced but not yet cached on the server.</p> {hoveredNode.version && <div>Version: {hoveredNode.version}</div>}
<ul className="missing-list"> <div>Size: {formatBytes(hoveredNode.size)}</div>
{resolution.missing.map((dep, i) => ( <div className="tooltip-hint">Click to navigate</div>
<li key={i} className="missing-item">
<span className="missing-name">{dep.project}/{dep.package}</span>
{dep.constraint && <span className="missing-constraint">@{dep.constraint}</span>}
{dep.required_by && <span className="missing-required-by"> {dep.required_by}</span>}
</li>
))}
</ul>
</div> </div>
)} )}
</div> </div>

View File

@@ -290,25 +290,20 @@
color: var(--error-color, #dc3545); color: var(--error-color, #dc3545);
} }
/* Progress Bar - scoped to upload component */ /* Progress Bar */
.drag-drop-upload .progress-bar, .progress-bar {
.upload-queue .progress-bar {
height: 8px; height: 8px;
background: var(--border-color, #ddd); background: var(--border-color, #ddd);
border-radius: 4px; border-radius: 4px;
overflow: hidden; overflow: hidden;
width: 100%;
max-width: 100%;
} }
.drag-drop-upload .progress-bar--small, .progress-bar--small {
.upload-queue .progress-bar--small {
height: 4px; height: 4px;
margin-top: 0.25rem; margin-top: 0.25rem;
} }
.drag-drop-upload .progress-bar__fill, .progress-bar__fill {
.upload-queue .progress-bar__fill {
height: 100%; height: 100%;
background: var(--accent-color, #007bff); background: var(--accent-color, #007bff);
border-radius: 4px; border-radius: 4px;

View File

@@ -504,4 +504,42 @@ describe('DragDropUpload', () => {
}); });
}); });
}); });
describe('Tag Support', () => {
it('includes tag in upload request', async () => {
let capturedFormData: FormData | null = null;
class MockXHR {
status = 200;
responseText = JSON.stringify({ artifact_id: 'abc123', size: 100 });
timeout = 0;
upload = { addEventListener: vi.fn() };
addEventListener = vi.fn((event: string, handler: () => void) => {
if (event === 'load') setTimeout(handler, 10);
});
open = vi.fn();
send = vi.fn((data: FormData) => {
capturedFormData = data;
});
}
vi.stubGlobal('XMLHttpRequest', MockXHR);
render(<DragDropUpload {...defaultProps} tag="v1.0.0" />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('test.txt', 100, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await vi.advanceTimersByTimeAsync(100);
await waitFor(() => {
expect(capturedFormData?.get('tag')).toBe('v1.0.0');
});
});
});
}); });

View File

@@ -13,6 +13,7 @@ interface StoredUploadState {
completedParts: number[]; completedParts: number[];
project: string; project: string;
package: string; package: string;
tag?: string;
createdAt: number; createdAt: number;
} }
@@ -86,6 +87,7 @@ export interface DragDropUploadProps {
maxFileSize?: number; // in bytes maxFileSize?: number; // in bytes
maxConcurrentUploads?: number; maxConcurrentUploads?: number;
maxRetries?: number; maxRetries?: number;
tag?: string;
className?: string; className?: string;
disabled?: boolean; disabled?: boolean;
disabledReason?: string; disabledReason?: string;
@@ -228,6 +230,7 @@ export function DragDropUpload({
maxFileSize, maxFileSize,
maxConcurrentUploads = 3, maxConcurrentUploads = 3,
maxRetries = 3, maxRetries = 3,
tag,
className = '', className = '',
disabled = false, disabled = false,
disabledReason, disabledReason,
@@ -365,6 +368,7 @@ export function DragDropUpload({
expected_hash: fileHash, expected_hash: fileHash,
filename: item.file.name, filename: item.file.name,
size: item.file.size, size: item.file.size,
tag: tag || undefined,
}), }),
} }
); );
@@ -388,6 +392,7 @@ export function DragDropUpload({
completedParts: [], completedParts: [],
project: projectName, project: projectName,
package: packageName, package: packageName,
tag: tag || undefined,
createdAt: Date.now(), createdAt: Date.now(),
}); });
@@ -433,6 +438,7 @@ export function DragDropUpload({
completedParts, completedParts,
project: projectName, project: projectName,
package: packageName, package: packageName,
tag: tag || undefined,
createdAt: Date.now(), createdAt: Date.now(),
}); });
@@ -453,7 +459,7 @@ export function DragDropUpload({
{ {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({}), body: JSON.stringify({ tag: tag || undefined }),
} }
); );
@@ -469,7 +475,7 @@ export function DragDropUpload({
size: completeData.size, size: completeData.size,
deduplicated: false, deduplicated: false,
}; };
}, [projectName, packageName, isOnline]); }, [projectName, packageName, tag, isOnline]);
const uploadFileSimple = useCallback((item: UploadItem): Promise<UploadResult> => { const uploadFileSimple = useCallback((item: UploadItem): Promise<UploadResult> => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@@ -478,6 +484,9 @@ export function DragDropUpload({
const formData = new FormData(); const formData = new FormData();
formData.append('file', item.file); formData.append('file', item.file);
if (tag) {
formData.append('tag', tag);
}
let lastLoaded = 0; let lastLoaded = 0;
let lastTime = Date.now(); let lastTime = Date.now();
@@ -546,7 +555,7 @@ export function DragDropUpload({
: u : u
)); ));
}); });
}, [projectName, packageName]); }, [projectName, packageName, tag]);
const uploadFile = useCallback((item: UploadItem): Promise<UploadResult> => { const uploadFile = useCallback((item: UploadItem): Promise<UploadResult> => {
if (item.file.size >= CHUNKED_UPLOAD_THRESHOLD) { if (item.file.size >= CHUNKED_UPLOAD_THRESHOLD) {

View File

@@ -233,7 +233,7 @@ export function GlobalSearch() {
const flatIndex = results.projects.length + results.packages.length + index; const flatIndex = results.projects.length + results.packages.length + index;
return ( return (
<button <button
key={artifact.artifact_id} key={artifact.tag_id}
className={`global-search__result ${selectedIndex === flatIndex ? 'selected' : ''}`} className={`global-search__result ${selectedIndex === flatIndex ? 'selected' : ''}`}
onClick={() => navigateToResult({ type: 'artifact', item: artifact })} onClick={() => navigateToResult({ type: 'artifact', item: artifact })}
onMouseEnter={() => setSelectedIndex(flatIndex)} onMouseEnter={() => setSelectedIndex(flatIndex)}
@@ -243,7 +243,7 @@ export function GlobalSearch() {
<line x1="7" y1="7" x2="7.01" y2="7" /> <line x1="7" y1="7" x2="7.01" y2="7" />
</svg> </svg>
<div className="global-search__result-content"> <div className="global-search__result-content">
<span className="global-search__result-name">{artifact.version}</span> <span className="global-search__result-name">{artifact.tag_name}</span>
<span className="global-search__result-path"> <span className="global-search__result-path">
{artifact.project_name} / {artifact.package_name} {artifact.project_name} / {artifact.package_name}
</span> </span>

View File

@@ -272,7 +272,7 @@
.footer { .footer {
background: var(--bg-secondary); background: var(--bg-secondary);
border-top: 1px solid var(--border-primary); border-top: 1px solid var(--border-primary);
padding: 12px 0; padding: 24px 0;
} }
.footer-content { .footer-content {

View File

@@ -84,6 +84,29 @@ function Layout({ children }: LayoutProps) {
</svg> </svg>
Projects Projects
</Link> </Link>
<Link to="/dashboard" className={location.pathname === '/dashboard' ? 'active' : ''}>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<rect x="3" y="3" width="7" height="7" rx="1"/>
<rect x="14" y="3" width="7" height="7" rx="1"/>
<rect x="3" y="14" width="7" height="7" rx="1"/>
<rect x="14" y="14" width="7" height="7" rx="1"/>
</svg>
Dashboard
</Link>
{user && userTeams.length > 0 && (
<Link
to={userTeams.length === 1 ? `/teams/${userTeams[0].slug}` : '/teams'}
className={location.pathname.startsWith('/teams') ? 'active' : ''}
>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
<circle cx="9" cy="7" r="4"/>
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
</svg>
{userTeams.length === 1 ? 'Team' : 'Teams'}
</Link>
)}
<a href="/docs" className="nav-link-muted"> <a href="/docs" className="nav-link-muted">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2"> <svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"/> <path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"/>
@@ -125,35 +148,6 @@ function Layout({ children }: LayoutProps) {
)} )}
</div> </div>
<div className="user-menu-divider"></div> <div className="user-menu-divider"></div>
<NavLink
to="/dashboard"
className="user-menu-item"
onClick={() => setShowUserMenu(false)}
>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<rect x="3" y="3" width="7" height="7" rx="1"/>
<rect x="14" y="3" width="7" height="7" rx="1"/>
<rect x="3" y="14" width="7" height="7" rx="1"/>
<rect x="14" y="14" width="7" height="7" rx="1"/>
</svg>
Dashboard
</NavLink>
{userTeams.length > 0 && (
<NavLink
to={userTeams.length === 1 ? `/teams/${userTeams[0].slug}` : '/teams'}
className="user-menu-item"
onClick={() => setShowUserMenu(false)}
>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
<circle cx="9" cy="7" r="4"/>
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
</svg>
{userTeams.length === 1 ? 'Team' : 'Teams'}
</NavLink>
)}
<div className="user-menu-divider"></div>
<NavLink <NavLink
to="/settings/api-keys" to="/settings/api-keys"
className="user-menu-item" className="user-menu-item"

View File

@@ -34,6 +34,74 @@
margin-bottom: 1rem; margin-bottom: 1rem;
} }
/* Settings Section */
.settings-section {
background: var(--bg-secondary);
border: 1px solid var(--border-color);
border-radius: 8px;
padding: 1.5rem;
margin-bottom: 2rem;
}
.settings-grid {
display: flex;
flex-direction: column;
gap: 1rem;
}
.setting-item {
display: flex;
justify-content: space-between;
align-items: center;
padding: 1rem;
background: var(--bg-primary);
border: 1px solid var(--border-color);
border-radius: 4px;
}
.toggle-label {
display: flex;
flex-direction: column;
gap: 0.25rem;
}
.setting-name {
font-weight: 500;
color: var(--text-primary);
display: flex;
align-items: center;
gap: 0.5rem;
}
.setting-description {
font-size: 0.85rem;
color: var(--text-secondary);
}
.toggle-button {
padding: 0.5rem 1rem;
border: none;
border-radius: 4px;
cursor: pointer;
font-weight: 500;
min-width: 100px;
}
.toggle-button.on {
background-color: #28a745;
color: white;
}
.toggle-button.off {
background-color: #dc3545;
color: white;
}
.toggle-button:disabled {
opacity: 0.6;
cursor: not-allowed;
}
/* Sources Section */ /* Sources Section */
.sources-section { .sources-section {
background: var(--bg-secondary); background: var(--bg-secondary);
@@ -65,7 +133,7 @@
.sources-table th, .sources-table th,
.sources-table td { .sources-table td {
padding: 0.75rem 1rem; padding: 0.75rem 1rem;
text-align: center; text-align: left;
border-bottom: 1px solid var(--border-color); border-bottom: 1px solid var(--border-color);
} }
@@ -88,12 +156,6 @@
.source-name { .source-name {
font-weight: 500; font-weight: 500;
color: var(--text-primary); color: var(--text-primary);
white-space: nowrap;
}
/* Name column should be left-aligned */
.sources-table td:first-child {
text-align: left;
} }
.url-cell { .url-cell {
@@ -103,10 +165,10 @@
overflow: hidden; overflow: hidden;
text-overflow: ellipsis; text-overflow: ellipsis;
white-space: nowrap; white-space: nowrap;
text-align: left;
} }
/* Badges */ /* Badges */
.public-badge,
.env-badge, .env-badge,
.status-badge { .status-badge {
display: inline-block; display: inline-block;
@@ -117,6 +179,11 @@
margin-left: 0.5rem; margin-left: 0.5rem;
} }
.public-badge {
background-color: #e3f2fd;
color: #1976d2;
}
.env-badge { .env-badge {
background-color: #fff3e0; background-color: #fff3e0;
color: #e65100; color: #e65100;
@@ -132,12 +199,6 @@
color: #c62828; color: #c62828;
} }
.coming-soon-badge {
color: #9e9e9e;
font-style: italic;
font-size: 0.85em;
}
/* Actions */ /* Actions */
.actions-cell { .actions-cell {
white-space: nowrap; white-space: nowrap;
@@ -151,67 +212,18 @@
margin-right: 0; margin-right: 0;
} }
.test-cell { .test-result {
text-align: center; display: inline-block;
width: 2rem; margin-left: 0.5rem;
font-size: 0.85rem;
} }
.test-dot { .test-result.success {
font-size: 1rem;
cursor: default;
}
.test-dot.success {
color: #2e7d32; color: #2e7d32;
} }
.test-dot.failure { .test-result.failure {
color: #c62828; color: #c62828;
cursor: pointer;
}
.test-dot.failure:hover {
color: #b71c1c;
}
.test-dot.testing {
color: #1976d2;
animation: pulse 1s infinite;
}
@keyframes pulse {
0%, 100% { opacity: 1; }
50% { opacity: 0.4; }
}
/* Error Modal */
.error-modal-content {
background: var(--bg-primary);
border-radius: 8px;
padding: 2rem;
width: 100%;
max-width: 500px;
}
.error-modal-content h3 {
margin-top: 0;
color: #c62828;
}
.error-modal-content .error-details {
background: var(--bg-tertiary);
padding: 1rem;
border-radius: 4px;
font-family: monospace;
font-size: 0.9rem;
word-break: break-word;
white-space: pre-wrap;
}
.error-modal-content .modal-actions {
display: flex;
justify-content: flex-end;
margin-top: 1.5rem;
} }
/* Buttons */ /* Buttons */
@@ -255,22 +267,10 @@
} }
.btn-sm { .btn-sm {
padding: 0.25rem 0.75rem; padding: 0.25rem 0.5rem;
font-size: 0.8rem; font-size: 0.8rem;
} }
.btn-secondary {
background-color: var(--bg-tertiary);
border-color: var(--border-color);
color: var(--text-primary);
font-weight: 500;
}
.btn-secondary:hover {
background-color: var(--bg-secondary);
border-color: var(--text-secondary);
}
.empty-message { .empty-message {
color: var(--text-secondary); color: var(--text-secondary);
font-style: italic; font-style: italic;
@@ -364,14 +364,9 @@
.form-actions { .form-actions {
display: flex; display: flex;
justify-content: space-between; justify-content: flex-end;
align-items: center; gap: 0.5rem;
margin-top: 1.5rem; margin-top: 1.5rem;
padding-top: 1rem; padding-top: 1rem;
border-top: 1px solid var(--border-color); border-top: 1px solid var(--border-color);
} }
.form-actions-right {
display: flex;
gap: 0.5rem;
}

View File

@@ -7,12 +7,13 @@ import {
updateUpstreamSource, updateUpstreamSource,
deleteUpstreamSource, deleteUpstreamSource,
testUpstreamSource, testUpstreamSource,
getCacheSettings,
updateCacheSettings,
} from '../api'; } from '../api';
import { UpstreamSource, SourceType, AuthType } from '../types'; import { UpstreamSource, CacheSettings, SourceType, AuthType } from '../types';
import './AdminCachePage.css'; import './AdminCachePage.css';
const SOURCE_TYPES: SourceType[] = ['npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic']; const SOURCE_TYPES: SourceType[] = ['npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic'];
const SUPPORTED_SOURCE_TYPES: Set<SourceType> = new Set(['pypi', 'generic']);
const AUTH_TYPES: AuthType[] = ['none', 'basic', 'bearer', 'api_key']; const AUTH_TYPES: AuthType[] = ['none', 'basic', 'bearer', 'api_key'];
function AdminCachePage() { function AdminCachePage() {
@@ -24,6 +25,11 @@ function AdminCachePage() {
const [loadingSources, setLoadingSources] = useState(true); const [loadingSources, setLoadingSources] = useState(true);
const [sourcesError, setSourcesError] = useState<string | null>(null); const [sourcesError, setSourcesError] = useState<string | null>(null);
// Cache settings state
const [settings, setSettings] = useState<CacheSettings | null>(null);
const [loadingSettings, setLoadingSettings] = useState(true);
const [settingsError, setSettingsError] = useState<string | null>(null);
// Create/Edit form state // Create/Edit form state
const [showForm, setShowForm] = useState(false); const [showForm, setShowForm] = useState(false);
const [editingSource, setEditingSource] = useState<UpstreamSource | null>(null); const [editingSource, setEditingSource] = useState<UpstreamSource | null>(null);
@@ -32,6 +38,7 @@ function AdminCachePage() {
source_type: 'generic' as SourceType, source_type: 'generic' as SourceType,
url: '', url: '',
enabled: true, enabled: true,
is_public: true,
auth_type: 'none' as AuthType, auth_type: 'none' as AuthType,
username: '', username: '',
password: '', password: '',
@@ -47,13 +54,12 @@ function AdminCachePage() {
// Delete confirmation state // Delete confirmation state
const [deletingId, setDeletingId] = useState<string | null>(null); const [deletingId, setDeletingId] = useState<string | null>(null);
// Settings update state
const [updatingSettings, setUpdatingSettings] = useState(false);
// Success message // Success message
const [successMessage, setSuccessMessage] = useState<string | null>(null); const [successMessage, setSuccessMessage] = useState<string | null>(null);
// Error modal state
const [showErrorModal, setShowErrorModal] = useState(false);
const [selectedError, setSelectedError] = useState<{ sourceName: string; error: string } | null>(null);
useEffect(() => { useEffect(() => {
if (!authLoading && !user) { if (!authLoading && !user) {
navigate('/login', { state: { from: '/admin/cache' } }); navigate('/login', { state: { from: '/admin/cache' } });
@@ -63,6 +69,7 @@ function AdminCachePage() {
useEffect(() => { useEffect(() => {
if (user && user.is_admin) { if (user && user.is_admin) {
loadSources(); loadSources();
loadSettings();
} }
}, [user]); }, [user]);
@@ -86,6 +93,19 @@ function AdminCachePage() {
} }
} }
async function loadSettings() {
setLoadingSettings(true);
setSettingsError(null);
try {
const data = await getCacheSettings();
setSettings(data);
} catch (err) {
setSettingsError(err instanceof Error ? err.message : 'Failed to load settings');
} finally {
setLoadingSettings(false);
}
}
function openCreateForm() { function openCreateForm() {
setEditingSource(null); setEditingSource(null);
setFormData({ setFormData({
@@ -93,6 +113,7 @@ function AdminCachePage() {
source_type: 'generic', source_type: 'generic',
url: '', url: '',
enabled: true, enabled: true,
is_public: true,
auth_type: 'none', auth_type: 'none',
username: '', username: '',
password: '', password: '',
@@ -109,6 +130,7 @@ function AdminCachePage() {
source_type: source.source_type, source_type: source.source_type,
url: source.url, url: source.url,
enabled: source.enabled, enabled: source.enabled,
is_public: source.is_public,
auth_type: source.auth_type, auth_type: source.auth_type,
username: source.username || '', username: source.username || '',
password: '', password: '',
@@ -133,8 +155,6 @@ function AdminCachePage() {
setFormError(null); setFormError(null);
try { try {
let savedSourceId: string | null = null;
if (editingSource) { if (editingSource) {
// Update existing source // Update existing source
await updateUpstreamSource(editingSource.id, { await updateUpstreamSource(editingSource.id, {
@@ -142,35 +162,30 @@ function AdminCachePage() {
source_type: formData.source_type, source_type: formData.source_type,
url: formData.url.trim(), url: formData.url.trim(),
enabled: formData.enabled, enabled: formData.enabled,
is_public: formData.is_public,
auth_type: formData.auth_type, auth_type: formData.auth_type,
username: formData.username.trim() || undefined, username: formData.username.trim() || undefined,
password: formData.password || undefined, password: formData.password || undefined,
priority: formData.priority, priority: formData.priority,
}); });
savedSourceId = editingSource.id;
setSuccessMessage('Source updated successfully'); setSuccessMessage('Source updated successfully');
} else { } else {
// Create new source // Create new source
const newSource = await createUpstreamSource({ await createUpstreamSource({
name: formData.name.trim(), name: formData.name.trim(),
source_type: formData.source_type, source_type: formData.source_type,
url: formData.url.trim(), url: formData.url.trim(),
enabled: formData.enabled, enabled: formData.enabled,
is_public: formData.is_public,
auth_type: formData.auth_type, auth_type: formData.auth_type,
username: formData.username.trim() || undefined, username: formData.username.trim() || undefined,
password: formData.password || undefined, password: formData.password || undefined,
priority: formData.priority, priority: formData.priority,
}); });
savedSourceId = newSource.id;
setSuccessMessage('Source created successfully'); setSuccessMessage('Source created successfully');
} }
setShowForm(false); setShowForm(false);
await loadSources(); await loadSources();
// Auto-test the source after save
if (savedSourceId) {
testSourceById(savedSourceId);
}
} catch (err) { } catch (err) {
setFormError(err instanceof Error ? err.message : 'Failed to save source'); setFormError(err instanceof Error ? err.message : 'Failed to save source');
} finally { } finally {
@@ -196,28 +211,24 @@ function AdminCachePage() {
} }
async function handleTest(source: UpstreamSource) { async function handleTest(source: UpstreamSource) {
testSourceById(source.id); setTestingId(source.id);
} setTestResults((prev) => ({ ...prev, [source.id]: { success: true, message: 'Testing...' } }));
async function testSourceById(sourceId: string) {
setTestingId(sourceId);
setTestResults((prev) => ({ ...prev, [sourceId]: { success: true, message: 'Testing...' } }));
try { try {
const result = await testUpstreamSource(sourceId); const result = await testUpstreamSource(source.id);
setTestResults((prev) => ({ setTestResults((prev) => ({
...prev, ...prev,
[sourceId]: { [source.id]: {
success: result.success, success: result.success,
message: result.success message: result.success
? `OK (${result.elapsed_ms}ms)` ? `Connected (${result.elapsed_ms}ms)`
: result.error || `HTTP ${result.status_code}`, : result.error || `HTTP ${result.status_code}`,
}, },
})); }));
} catch (err) { } catch (err) {
setTestResults((prev) => ({ setTestResults((prev) => ({
...prev, ...prev,
[sourceId]: { [source.id]: {
success: false, success: false,
message: err instanceof Error ? err.message : 'Test failed', message: err instanceof Error ? err.message : 'Test failed',
}, },
@@ -227,9 +238,30 @@ function AdminCachePage() {
} }
} }
function showError(sourceName: string, error: string) { async function handleSettingsToggle(field: 'allow_public_internet' | 'auto_create_system_projects') {
setSelectedError({ sourceName, error }); if (!settings) return;
setShowErrorModal(true);
// Check if env override is active
const isOverridden =
(field === 'allow_public_internet' && settings.allow_public_internet_env_override !== null) ||
(field === 'auto_create_system_projects' && settings.auto_create_system_projects_env_override !== null);
if (isOverridden) {
alert('This setting is overridden by an environment variable and cannot be changed via UI.');
return;
}
setUpdatingSettings(true);
try {
const update = { [field]: !settings[field] };
const newSettings = await updateCacheSettings(update);
setSettings(newSettings);
setSuccessMessage(`Setting "${field}" updated`);
} catch (err) {
setSettingsError(err instanceof Error ? err.message : 'Failed to update settings');
} finally {
setUpdatingSettings(false);
}
} }
if (authLoading) { if (authLoading) {
@@ -246,13 +278,71 @@ function AdminCachePage() {
return ( return (
<div className="admin-cache-page"> <div className="admin-cache-page">
<h1>Upstream Sources</h1> <h1>Cache Management</h1>
{successMessage && <div className="success-message">{successMessage}</div>} {successMessage && <div className="success-message">{successMessage}</div>}
{/* Cache Settings Section */}
<section className="settings-section">
<h2>Global Settings</h2>
{loadingSettings ? (
<p>Loading settings...</p>
) : settingsError ? (
<div className="error-message">{settingsError}</div>
) : settings ? (
<div className="settings-grid">
<div className="setting-item">
<label className="toggle-label">
<span className="setting-name">
Allow Public Internet
{settings.allow_public_internet_env_override !== null && (
<span className="env-badge" title="Overridden by environment variable">
ENV
</span>
)}
</span>
<span className="setting-description">
When disabled (air-gap mode), requests to public sources are blocked.
</span>
</label>
<button
className={`toggle-button ${settings.allow_public_internet ? 'on' : 'off'}`}
onClick={() => handleSettingsToggle('allow_public_internet')}
disabled={updatingSettings || settings.allow_public_internet_env_override !== null}
>
{settings.allow_public_internet ? 'Enabled' : 'Disabled'}
</button>
</div>
<div className="setting-item">
<label className="toggle-label">
<span className="setting-name">
Auto-create System Projects
{settings.auto_create_system_projects_env_override !== null && (
<span className="env-badge" title="Overridden by environment variable">
ENV
</span>
)}
</span>
<span className="setting-description">
Automatically create system projects (_npm, _pypi, etc.) on first cache request.
</span>
</label>
<button
className={`toggle-button ${settings.auto_create_system_projects ? 'on' : 'off'}`}
onClick={() => handleSettingsToggle('auto_create_system_projects')}
disabled={updatingSettings || settings.auto_create_system_projects_env_override !== null}
>
{settings.auto_create_system_projects ? 'Enabled' : 'Disabled'}
</button>
</div>
</div>
) : null}
</section>
{/* Upstream Sources Section */} {/* Upstream Sources Section */}
<section className="sources-section"> <section className="sources-section">
<div className="section-header"> <div className="section-header">
<h2>Upstream Sources</h2>
<button className="btn btn-primary" onClick={openCreateForm}> <button className="btn btn-primary" onClick={openCreateForm}>
Add Source Add Source
</button> </button>
@@ -273,7 +363,7 @@ function AdminCachePage() {
<th>URL</th> <th>URL</th>
<th>Priority</th> <th>Priority</th>
<th>Status</th> <th>Status</th>
<th>Test</th> <th>Source</th>
<th>Actions</th> <th>Actions</th>
</tr> </tr>
</thead> </thead>
@@ -282,50 +372,51 @@ function AdminCachePage() {
<tr key={source.id} className={source.enabled ? '' : 'disabled-row'}> <tr key={source.id} className={source.enabled ? '' : 'disabled-row'}>
<td> <td>
<span className="source-name">{source.name}</span> <span className="source-name">{source.name}</span>
{source.source === 'env' && ( {source.is_public && <span className="public-badge">Public</span>}
<span className="env-badge" title="Defined via environment variable">ENV</span>
)}
</td> </td>
<td> <td>{source.source_type}</td>
{source.source_type} <td className="url-cell">{source.url}</td>
{!SUPPORTED_SOURCE_TYPES.has(source.source_type) && (
<span className="coming-soon-badge"> (coming soon)</span>
)}
</td>
<td className="url-cell" title={source.url}>{source.url}</td>
<td>{source.priority}</td> <td>{source.priority}</td>
<td> <td>
<span className={`status-badge ${source.enabled ? 'enabled' : 'disabled'}`}> <span className={`status-badge ${source.enabled ? 'enabled' : 'disabled'}`}>
{source.enabled ? 'Enabled' : 'Disabled'} {source.enabled ? 'Enabled' : 'Disabled'}
</span> </span>
</td> </td>
<td className="test-cell"> <td>
{testingId === source.id ? ( {source.source === 'env' ? (
<span className="test-dot testing" title="Testing..."></span> <span className="env-badge" title="Defined via environment variable">
) : testResults[source.id] ? ( ENV
testResults[source.id].success ? ( </span>
<span className="test-dot success" title={testResults[source.id].message}></span> ) : (
) : ( 'Database'
<span )}
className="test-dot failure"
title="Click to see error"
onClick={() => showError(source.name, testResults[source.id].message)}
></span>
)
) : null}
</td> </td>
<td className="actions-cell"> <td className="actions-cell">
<button <button
className="btn btn-sm btn-secondary" className="btn btn-sm"
onClick={() => handleTest(source)} onClick={() => handleTest(source)}
disabled={testingId === source.id} disabled={testingId === source.id}
> >
Test {testingId === source.id ? 'Testing...' : 'Test'}
</button> </button>
{source.source !== 'env' && ( {source.source !== 'env' && (
<button className="btn btn-sm btn-secondary" onClick={() => openEditForm(source)}> <>
Edit <button className="btn btn-sm" onClick={() => openEditForm(source)}>
</button> Edit
</button>
<button
className="btn btn-sm btn-danger"
onClick={() => handleDelete(source)}
disabled={deletingId === source.id}
>
{deletingId === source.id ? 'Deleting...' : 'Delete'}
</button>
</>
)}
{testResults[source.id] && (
<span className={`test-result ${testResults[source.id].success ? 'success' : 'failure'}`}>
{testResults[source.id].message}
</span>
)} )}
</td> </td>
</tr> </tr>
@@ -365,7 +456,7 @@ function AdminCachePage() {
> >
{SOURCE_TYPES.map((type) => ( {SOURCE_TYPES.map((type) => (
<option key={type} value={type}> <option key={type} value={type}>
{type}{!SUPPORTED_SOURCE_TYPES.has(type) ? ' (coming soon)' : ''} {type}
</option> </option>
))} ))}
</select> </select>
@@ -407,6 +498,16 @@ function AdminCachePage() {
Enabled Enabled
</label> </label>
</div> </div>
<div className="form-group checkbox-group">
<label>
<input
type="checkbox"
checked={formData.is_public}
onChange={(e) => setFormData({ ...formData, is_public: e.target.checked })}
/>
Public Internet Source
</label>
</div>
</div> </div>
<div className="form-group"> <div className="form-group">
@@ -461,47 +562,17 @@ function AdminCachePage() {
)} )}
<div className="form-actions"> <div className="form-actions">
{editingSource && ( <button type="button" className="btn" onClick={() => setShowForm(false)}>
<button Cancel
type="button" </button>
className="btn btn-danger" <button type="submit" className="btn btn-primary" disabled={isSaving}>
onClick={() => { {isSaving ? 'Saving...' : editingSource ? 'Update' : 'Create'}
handleDelete(editingSource); </button>
setShowForm(false);
}}
disabled={deletingId === editingSource.id}
>
{deletingId === editingSource.id ? 'Deleting...' : 'Delete'}
</button>
)}
<div className="form-actions-right">
<button type="button" className="btn" onClick={() => setShowForm(false)}>
Cancel
</button>
<button type="submit" className="btn btn-primary" disabled={isSaving}>
{isSaving ? 'Saving...' : editingSource ? 'Update' : 'Create'}
</button>
</div>
</div> </div>
</form> </form>
</div> </div>
</div> </div>
)} )}
{/* Error Details Modal */}
{showErrorModal && selectedError && (
<div className="modal-overlay" onClick={() => setShowErrorModal(false)}>
<div className="error-modal-content" onClick={(e) => e.stopPropagation()}>
<h3>Connection Error: {selectedError.sourceName}</h3>
<div className="error-details">{selectedError.error}</div>
<div className="modal-actions">
<button className="btn" onClick={() => setShowErrorModal(false)}>
Close
</button>
</div>
</div>
</div>
)}
</div> </div>
); );
} }

View File

@@ -249,7 +249,7 @@ function Home() {
key: 'created_by', key: 'created_by',
header: 'Owner', header: 'Owner',
className: 'cell-owner', className: 'cell-owner',
render: (project) => project.team_name || project.created_by, render: (project) => project.created_by,
}, },
...(user ...(user
? [ ? [

View File

@@ -642,11 +642,6 @@ tr:hover .copy-btn {
padding: 20px; padding: 20px;
} }
/* Ensure file modal needs higher z-index when opened from deps modal */
.modal-overlay:has(.ensure-file-modal) {
z-index: 1100;
}
.ensure-file-modal { .ensure-file-modal {
background: var(--bg-secondary); background: var(--bg-secondary);
border: 1px solid var(--border-primary); border: 1px solid var(--border-primary);
@@ -798,194 +793,4 @@ tr:hover .copy-btn {
.ensure-file-modal { .ensure-file-modal {
max-height: 90vh; max-height: 90vh;
} }
.action-menu-dropdown {
right: 0;
left: auto;
}
}
/* Header upload button */
.header-upload-btn {
margin-left: auto;
}
/* Tag/Version cell */
.tag-version-cell {
display: flex;
flex-direction: column;
gap: 4px;
}
.tag-version-cell .version-badge {
font-size: 0.75rem;
color: var(--text-muted);
}
/* Icon buttons */
.btn-icon {
display: flex;
align-items: center;
justify-content: center;
width: 32px;
height: 32px;
padding: 0;
background: transparent;
border: 1px solid transparent;
border-radius: var(--radius-sm);
color: var(--text-secondary);
cursor: pointer;
transition: all var(--transition-fast);
}
.btn-icon:hover {
background: var(--bg-hover);
color: var(--text-primary);
}
/* Action menu */
.action-buttons {
display: flex;
align-items: center;
gap: 4px;
}
.action-menu {
position: relative;
}
/* Action menu backdrop for click-outside */
.action-menu-backdrop {
position: fixed;
top: 0;
left: 0;
right: 0;
bottom: 0;
z-index: 999;
}
.action-menu-dropdown {
position: fixed;
z-index: 1000;
min-width: 180px;
padding: 4px 0;
background: var(--bg-secondary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
}
.action-menu-dropdown button {
display: block;
width: 100%;
padding: 8px 12px;
background: none;
border: none;
text-align: left;
font-size: 0.875rem;
color: var(--text-primary);
cursor: pointer;
transition: background var(--transition-fast);
}
.action-menu-dropdown button:hover {
background: var(--bg-hover);
}
/* Upload Modal */
.upload-modal,
.create-tag-modal {
background: var(--bg-secondary);
border-radius: var(--radius-lg);
width: 90%;
max-width: 500px;
max-height: 90vh;
overflow: hidden;
}
.modal-header {
display: flex;
align-items: center;
justify-content: space-between;
padding: 16px 20px;
border-bottom: 1px solid var(--border-primary);
}
.modal-header h3 {
margin: 0;
font-size: 1.125rem;
font-weight: 600;
}
.modal-body {
padding: 20px;
}
.modal-description {
margin-bottom: 16px;
color: var(--text-secondary);
font-size: 0.875rem;
}
.modal-actions {
display: flex;
justify-content: flex-end;
gap: 12px;
margin-top: 20px;
padding-top: 16px;
border-top: 1px solid var(--border-primary);
}
/* Dependencies Modal */
.deps-modal {
background: var(--bg-secondary);
border-radius: var(--radius-lg);
width: 90%;
max-width: 600px;
max-height: 80vh;
overflow: hidden;
display: flex;
flex-direction: column;
}
.deps-modal .modal-body {
overflow-y: auto;
flex: 1;
}
.deps-modal-controls {
display: flex;
gap: 8px;
margin-bottom: 16px;
}
/* Artifact ID Modal */
.artifact-id-modal {
background: var(--bg-secondary);
border-radius: var(--radius-lg);
width: 90%;
max-width: 500px;
}
.artifact-id-display {
display: flex;
align-items: center;
gap: 12px;
padding: 16px;
background: var(--bg-tertiary);
border-radius: var(--radius-md);
border: 1px solid var(--border-primary);
}
.artifact-id-display code {
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
font-size: 0.8125rem;
color: var(--text-primary);
word-break: break-all;
flex: 1;
}
.artifact-id-display .copy-btn {
opacity: 1;
flex-shrink: 0;
} }

File diff suppressed because it is too large Load Diff

View File

@@ -214,7 +214,7 @@ function ProjectPage() {
</div> </div>
</div> </div>
<div className="page-header__actions"> <div className="page-header__actions">
{canAdmin && !project.team_id && !project.is_system && ( {canAdmin && !project.team_id && (
<button <button
className="btn btn-secondary" className="btn btn-secondary"
onClick={() => navigate(`/project/${projectName}/settings`)} onClick={() => navigate(`/project/${projectName}/settings`)}
@@ -227,11 +227,11 @@ function ProjectPage() {
Settings Settings
</button> </button>
)} )}
{canWrite && !project.is_system ? ( {canWrite ? (
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}> <button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
{showForm ? 'Cancel' : '+ New Package'} {showForm ? 'Cancel' : '+ New Package'}
</button> </button>
) : user && !project.is_system ? ( ) : user ? (
<span className="text-muted" title="You have read-only access to this project"> <span className="text-muted" title="You have read-only access to this project">
Read-only access Read-only access
</span> </span>
@@ -294,20 +294,18 @@ function ProjectPage() {
placeholder="Filter packages..." placeholder="Filter packages..."
className="list-controls__search" className="list-controls__search"
/> />
{!project?.is_system && ( <select
<select className="list-controls__select"
className="list-controls__select" value={format}
value={format} onChange={(e) => handleFormatChange(e.target.value)}
onChange={(e) => handleFormatChange(e.target.value)} >
> <option value="">All formats</option>
<option value="">All formats</option> {FORMAT_OPTIONS.map((f) => (
{FORMAT_OPTIONS.map((f) => ( <option key={f} value={f}>
<option key={f} value={f}> {f}
{f} </option>
</option> ))}
))} </select>
</select>
)}
</div> </div>
{hasActiveFilters && ( {hasActiveFilters && (
@@ -343,19 +341,19 @@ function ProjectPage() {
className: 'cell-description', className: 'cell-description',
render: (pkg) => pkg.description || '—', render: (pkg) => pkg.description || '—',
}, },
...(!project?.is_system ? [{ {
key: 'format', key: 'format',
header: 'Format', header: 'Format',
render: (pkg: Package) => <Badge variant="default">{pkg.format}</Badge>, render: (pkg) => <Badge variant="default">{pkg.format}</Badge>,
}] : []), },
...(!project?.is_system ? [{ {
key: 'version_count', key: 'tag_count',
header: 'Versions', header: 'Tags',
render: (pkg: Package) => pkg.version_count ?? '—', render: (pkg) => pkg.tag_count ?? '—',
}] : []), },
{ {
key: 'artifact_count', key: 'artifact_count',
header: project?.is_system ? 'Versions' : 'Artifacts', header: 'Artifacts',
render: (pkg) => pkg.artifact_count ?? '—', render: (pkg) => pkg.artifact_count ?? '—',
}, },
{ {
@@ -364,12 +362,12 @@ function ProjectPage() {
render: (pkg) => render: (pkg) =>
pkg.total_size !== undefined && pkg.total_size > 0 ? formatBytes(pkg.total_size) : '—', pkg.total_size !== undefined && pkg.total_size > 0 ? formatBytes(pkg.total_size) : '—',
}, },
...(!project?.is_system ? [{ {
key: 'latest_version', key: 'latest_tag',
header: 'Latest', header: 'Latest',
render: (pkg: Package) => render: (pkg) =>
pkg.latest_version ? <strong style={{ color: 'var(--accent-primary)' }}>{pkg.latest_version}</strong> : '—', pkg.latest_tag ? <strong style={{ color: 'var(--accent-primary)' }}>{pkg.latest_tag}</strong> : '—',
}] : []), },
{ {
key: 'created_at', key: 'created_at',
header: 'Created', header: 'Created',

View File

@@ -19,6 +19,12 @@ export interface Project {
team_name?: string | null; team_name?: string | null;
} }
export interface TagSummary {
name: string;
artifact_id: string;
created_at: string;
}
export interface Package { export interface Package {
id: string; id: string;
project_id: string; project_id: string;
@@ -29,11 +35,12 @@ export interface Package {
created_at: string; created_at: string;
updated_at: string; updated_at: string;
// Aggregated fields (from PackageDetailResponse) // Aggregated fields (from PackageDetailResponse)
tag_count?: number;
artifact_count?: number; artifact_count?: number;
version_count?: number;
total_size?: number; total_size?: number;
latest_tag?: string | null;
latest_upload_at?: string | null; latest_upload_at?: string | null;
latest_version?: string | null; recent_tags?: TagSummary[];
} }
export interface Artifact { export interface Artifact {
@@ -46,19 +53,22 @@ export interface Artifact {
ref_count: number; ref_count: number;
} }
export interface PackageArtifact { export interface Tag {
id: string; id: string;
sha256: string; package_id: string;
size: number; name: string;
content_type: string | null; artifact_id: string;
original_name: string | null;
checksum_md5?: string | null;
checksum_sha1?: string | null;
s3_etag?: string | null;
created_at: string; created_at: string;
created_by: string; created_by: string;
format_metadata?: Record<string, unknown> | null; }
version?: string | null; // Version from PackageVersion if exists
export interface TagDetail extends Tag {
artifact_size: number;
artifact_content_type: string | null;
artifact_original_name: string | null;
artifact_created_at: string;
artifact_format_metadata: Record<string, unknown> | null;
version: string | null;
} }
export interface PackageVersion { export interface PackageVersion {
@@ -73,9 +83,20 @@ export interface PackageVersion {
size?: number; size?: number;
content_type?: string | null; content_type?: string | null;
original_name?: string | null; original_name?: string | null;
tags?: string[];
} }
export interface ArtifactDetail extends Artifact {} export interface ArtifactTagInfo {
id: string;
name: string;
package_id: string;
package_name: string;
project_name: string;
}
export interface ArtifactDetail extends Artifact {
tags: ArtifactTagInfo[];
}
export interface PaginatedResponse<T> { export interface PaginatedResponse<T> {
items: T[]; items: T[];
@@ -95,6 +116,8 @@ export interface ListParams {
order?: 'asc' | 'desc'; order?: 'asc' | 'desc';
} }
export interface TagListParams extends ListParams {}
export interface PackageListParams extends ListParams { export interface PackageListParams extends ListParams {
format?: string; format?: string;
platform?: string; platform?: string;
@@ -119,6 +142,7 @@ export interface UploadResponse {
size: number; size: number;
project: string; project: string;
package: string; package: string;
tag: string | null;
version: string | null; version: string | null;
version_source: string | null; version_source: string | null;
} }
@@ -141,8 +165,9 @@ export interface SearchResultPackage {
} }
export interface SearchResultArtifact { export interface SearchResultArtifact {
tag_id: string;
tag_name: string;
artifact_id: string; artifact_id: string;
version: string | null;
package_id: string; package_id: string;
package_name: string; package_name: string;
project_name: string; project_name: string;
@@ -365,7 +390,8 @@ export interface Dependency {
artifact_id: string; artifact_id: string;
project: string; project: string;
package: string; package: string;
version: string; version: string | null;
tag: string | null;
created_at: string; created_at: string;
} }
@@ -379,6 +405,7 @@ export interface DependentInfo {
project: string; project: string;
package: string; package: string;
version: string | null; version: string | null;
constraint_type: 'version' | 'tag';
constraint_value: string; constraint_value: string;
} }
@@ -401,17 +428,11 @@ export interface ResolvedArtifact {
project: string; project: string;
package: string; package: string;
version: string | null; version: string | null;
tag: string | null;
size: number; size: number;
download_url: string; download_url: string;
} }
export interface MissingDependency {
project: string;
package: string;
constraint: string | null;
required_by: string | null;
}
export interface DependencyResolutionResponse { export interface DependencyResolutionResponse {
requested: { requested: {
project: string; project: string;
@@ -419,7 +440,6 @@ export interface DependencyResolutionResponse {
ref: string; ref: string;
}; };
resolved: ResolvedArtifact[]; resolved: ResolvedArtifact[];
missing: MissingDependency[];
total_size: number; total_size: number;
artifact_count: number; artifact_count: number;
} }
@@ -495,6 +515,7 @@ export interface UpstreamSource {
source_type: SourceType; source_type: SourceType;
url: string; url: string;
enabled: boolean; enabled: boolean;
is_public: boolean;
auth_type: AuthType; auth_type: AuthType;
username: string | null; username: string | null;
has_password: boolean; has_password: boolean;
@@ -510,6 +531,7 @@ export interface UpstreamSourceCreate {
source_type: SourceType; source_type: SourceType;
url: string; url: string;
enabled?: boolean; enabled?: boolean;
is_public?: boolean;
auth_type?: AuthType; auth_type?: AuthType;
username?: string; username?: string;
password?: string; password?: string;
@@ -522,6 +544,7 @@ export interface UpstreamSourceUpdate {
source_type?: SourceType; source_type?: SourceType;
url?: string; url?: string;
enabled?: boolean; enabled?: boolean;
is_public?: boolean;
auth_type?: AuthType; auth_type?: AuthType;
username?: string; username?: string;
password?: string; password?: string;
@@ -537,3 +560,18 @@ export interface UpstreamSourceTestResult {
source_id: string; source_id: string;
source_name: string; source_name: string;
} }
// Cache Settings types
export interface CacheSettings {
allow_public_internet: boolean;
auto_create_system_projects: boolean;
allow_public_internet_env_override: boolean | null;
auto_create_system_projects_env_override: boolean | null;
created_at: string | null;
updated_at: string | null;
}
export interface CacheSettingsUpdate {
allow_public_internet?: boolean;
auto_create_system_projects?: boolean;
}

View File

@@ -128,10 +128,6 @@ spec:
value: {{ .Values.orchard.rateLimit.login | quote }} value: {{ .Values.orchard.rateLimit.login | quote }}
{{- end }} {{- end }}
{{- end }} {{- end }}
{{- if .Values.orchard.purgeSeedData }}
- name: ORCHARD_PURGE_SEED_DATA
value: "true"
{{- end }}
{{- if .Values.orchard.database.poolSize }} {{- if .Values.orchard.database.poolSize }}
- name: ORCHARD_DATABASE_POOL_SIZE - name: ORCHARD_DATABASE_POOL_SIZE
value: {{ .Values.orchard.database.poolSize | quote }} value: {{ .Values.orchard.database.poolSize | quote }}
@@ -144,20 +140,6 @@ spec:
- name: ORCHARD_DATABASE_POOL_TIMEOUT - name: ORCHARD_DATABASE_POOL_TIMEOUT
value: {{ .Values.orchard.database.poolTimeout | quote }} value: {{ .Values.orchard.database.poolTimeout | quote }}
{{- end }} {{- end }}
{{- if .Values.orchard.pypiCache }}
{{- if .Values.orchard.pypiCache.workers }}
- name: ORCHARD_PYPI_CACHE_WORKERS
value: {{ .Values.orchard.pypiCache.workers | quote }}
{{- end }}
{{- if .Values.orchard.pypiCache.maxDepth }}
- name: ORCHARD_PYPI_CACHE_MAX_DEPTH
value: {{ .Values.orchard.pypiCache.maxDepth | quote }}
{{- end }}
{{- if .Values.orchard.pypiCache.maxAttempts }}
- name: ORCHARD_PYPI_CACHE_MAX_ATTEMPTS
value: {{ .Values.orchard.pypiCache.maxAttempts | quote }}
{{- end }}
{{- end }}
{{- if .Values.orchard.auth }} {{- if .Values.orchard.auth }}
{{- if or .Values.orchard.auth.secretsManager .Values.orchard.auth.existingSecret .Values.orchard.auth.adminPassword }} {{- if or .Values.orchard.auth.secretsManager .Values.orchard.auth.existingSecret .Values.orchard.auth.adminPassword }}
- name: ORCHARD_ADMIN_PASSWORD - name: ORCHARD_ADMIN_PASSWORD

View File

@@ -59,10 +59,10 @@ ingress:
resources: resources:
limits: limits:
cpu: 500m cpu: 500m
memory: 1Gi memory: 512Mi
requests: requests:
cpu: 200m cpu: 200m
memory: 1Gi memory: 512Mi
livenessProbe: livenessProbe:
httpGet: httpGet:
@@ -124,12 +124,6 @@ orchard:
mode: "presigned" mode: "presigned"
presignedUrlExpiry: 3600 presignedUrlExpiry: 3600
# PyPI Cache Worker settings (reduced workers to limit memory usage)
pypiCache:
workers: 1
maxDepth: 10
maxAttempts: 3
# Relaxed rate limits for dev/feature environments (allows integration tests to run) # Relaxed rate limits for dev/feature environments (allows integration tests to run)
rateLimit: rateLimit:
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests

View File

@@ -57,10 +57,10 @@ ingress:
resources: resources:
limits: limits:
cpu: 500m cpu: 500m
memory: 768Mi memory: 512Mi
requests: requests:
cpu: 500m cpu: 500m
memory: 768Mi memory: 512Mi
livenessProbe: livenessProbe:
httpGet: httpGet:
@@ -121,12 +121,6 @@ orchard:
mode: "presigned" mode: "presigned"
presignedUrlExpiry: 3600 presignedUrlExpiry: 3600
# PyPI Cache Worker settings (reduced workers to limit memory usage)
pypiCache:
workers: 2
maxDepth: 10
maxAttempts: 3
# PostgreSQL subchart - disabled in prod, using RDS # PostgreSQL subchart - disabled in prod, using RDS
postgresql: postgresql:
enabled: false enabled: false

View File

@@ -56,10 +56,10 @@ ingress:
resources: resources:
limits: limits:
cpu: 500m cpu: 500m
memory: 768Mi memory: 512Mi
requests: requests:
cpu: 500m cpu: 500m
memory: 768Mi memory: 512Mi
livenessProbe: livenessProbe:
httpGet: httpGet:
@@ -91,7 +91,6 @@ affinity: {}
# Orchard server configuration # Orchard server configuration
orchard: orchard:
env: "development" # Allows seed data for testing env: "development" # Allows seed data for testing
purgeSeedData: true # Remove public seed data (npm-public, pypi-public, etc.)
server: server:
host: "0.0.0.0" host: "0.0.0.0"
port: 8080 port: 8080
@@ -122,12 +121,6 @@ orchard:
mode: "presigned" # presigned, redirect, or proxy mode: "presigned" # presigned, redirect, or proxy
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
# PyPI Cache Worker settings (reduced workers to limit memory usage)
pypiCache:
workers: 2
maxDepth: 10
maxAttempts: 3
# Relaxed rate limits for stage (allows CI integration tests to run) # Relaxed rate limits for stage (allows CI integration tests to run)
rateLimit: rateLimit:
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests

View File

@@ -54,10 +54,10 @@ ingress:
resources: resources:
limits: limits:
cpu: 500m cpu: 500m
memory: 768Mi memory: 512Mi
requests: requests:
cpu: 500m cpu: 500m
memory: 768Mi memory: 512Mi
livenessProbe: livenessProbe:
httpGet: httpGet:
@@ -120,12 +120,6 @@ orchard:
mode: "presigned" # presigned, redirect, or proxy mode: "presigned" # presigned, redirect, or proxy
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
# PyPI Cache Worker settings
pypiCache:
workers: 2 # Number of concurrent cache workers (reduced to limit memory usage)
maxDepth: 10 # Maximum recursion depth for dependency caching
maxAttempts: 3 # Maximum retry attempts for failed cache tasks
# Authentication settings # Authentication settings
auth: auth:
# Option 1: Plain admin password (creates K8s secret) # Option 1: Plain admin password (creates K8s secret)

View File

@@ -1,55 +0,0 @@
-- Migration: 011_pypi_cache_tasks
-- Description: Add table for tracking PyPI dependency caching tasks
-- Date: 2026-02-02
-- Table for tracking PyPI cache tasks with retry support
CREATE TABLE pypi_cache_tasks (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
-- What to cache
package_name VARCHAR(255) NOT NULL,
version_constraint VARCHAR(255),
-- Origin tracking
parent_task_id UUID REFERENCES pypi_cache_tasks(id) ON DELETE SET NULL,
depth INTEGER NOT NULL DEFAULT 0,
triggered_by_artifact VARCHAR(64) REFERENCES artifacts(id) ON DELETE SET NULL,
-- Status
status VARCHAR(20) NOT NULL DEFAULT 'pending',
attempts INTEGER NOT NULL DEFAULT 0,
max_attempts INTEGER NOT NULL DEFAULT 3,
-- Results
cached_artifact_id VARCHAR(64) REFERENCES artifacts(id) ON DELETE SET NULL,
error_message TEXT,
-- Timing
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
started_at TIMESTAMP WITH TIME ZONE,
completed_at TIMESTAMP WITH TIME ZONE,
next_retry_at TIMESTAMP WITH TIME ZONE,
-- Constraints
CONSTRAINT check_task_status CHECK (status IN ('pending', 'in_progress', 'completed', 'failed')),
CONSTRAINT check_depth_non_negative CHECK (depth >= 0),
CONSTRAINT check_attempts_non_negative CHECK (attempts >= 0)
);
-- Index for finding tasks ready to process (pending with retry time passed)
CREATE INDEX idx_pypi_cache_tasks_status_retry ON pypi_cache_tasks(status, next_retry_at);
-- Index for deduplication check (is this package already queued?)
CREATE INDEX idx_pypi_cache_tasks_package_status ON pypi_cache_tasks(package_name, status);
-- Index for tracing dependency chains
CREATE INDEX idx_pypi_cache_tasks_parent ON pypi_cache_tasks(parent_task_id);
-- Index for finding tasks by artifact that triggered them
CREATE INDEX idx_pypi_cache_tasks_triggered_by ON pypi_cache_tasks(triggered_by_artifact);
-- Index for finding tasks by cached artifact
CREATE INDEX idx_pypi_cache_tasks_cached_artifact ON pypi_cache_tasks(cached_artifact_id);
-- Index for sorting by depth and creation time (processing order)
CREATE INDEX idx_pypi_cache_tasks_depth_created ON pypi_cache_tasks(depth, created_at);

View File

@@ -1,33 +0,0 @@
-- Migration: Remove tag system
-- Date: 2026-02-03
-- Description: Remove tags table and related objects, keeping only versions for artifact references
-- Drop triggers on tags table
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
DROP TRIGGER IF EXISTS tags_updated_at_trigger ON tags;
DROP TRIGGER IF EXISTS tag_changes_trigger ON tags;
-- Drop the tag change tracking function
DROP FUNCTION IF EXISTS track_tag_changes();
-- Remove tag_constraint from artifact_dependencies
-- First drop the constraint that requires either version or tag
ALTER TABLE artifact_dependencies DROP CONSTRAINT IF EXISTS check_constraint_type;
-- Remove the tag_constraint column
ALTER TABLE artifact_dependencies DROP COLUMN IF EXISTS tag_constraint;
-- Make version_constraint NOT NULL (now the only option)
UPDATE artifact_dependencies SET version_constraint = '*' WHERE version_constraint IS NULL;
ALTER TABLE artifact_dependencies ALTER COLUMN version_constraint SET NOT NULL;
-- Drop tag_history table first (depends on tags)
DROP TABLE IF EXISTS tag_history;
-- Drop tags table
DROP TABLE IF EXISTS tags;
-- Rename uploads.tag_name to uploads.version (historical data field)
ALTER TABLE uploads RENAME COLUMN tag_name TO version;