Add upstream caching infrastructure and refactor CI pipeline

This commit is contained in:
Mondo Diaz
2026-01-29 11:55:15 -06:00
parent c92895ffe9
commit 1d51c856b0
24 changed files with 7285 additions and 117 deletions

View File

@@ -12,6 +12,7 @@ from sqlalchemy import (
Index,
JSON,
ARRAY,
LargeBinary,
)
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import relationship, declarative_base
@@ -27,6 +28,7 @@ class Project(Base):
name = Column(String(255), unique=True, nullable=False)
description = Column(Text)
is_public = Column(Boolean, default=True)
is_system = Column(Boolean, default=False, nullable=False)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
updated_at = Column(
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
@@ -46,6 +48,7 @@ class Project(Base):
Index("idx_projects_name", "name"),
Index("idx_projects_created_by", "created_by"),
Index("idx_projects_team_id", "team_id"),
Index("idx_projects_is_system", "is_system"),
)
@@ -637,3 +640,169 @@ class TeamMembership(Base):
name="check_team_role",
),
)
# =============================================================================
# Upstream Caching Models
# =============================================================================
# Valid source types for upstream registries
SOURCE_TYPES = ["npm", "pypi", "maven", "docker", "helm", "nuget", "deb", "rpm", "generic"]
# Valid authentication types
AUTH_TYPES = ["none", "basic", "bearer", "api_key"]
class UpstreamSource(Base):
"""Configuration for an upstream artifact registry.
Stores connection details and authentication for upstream registries
like npm, PyPI, Maven Central, or private Artifactory instances.
"""
__tablename__ = "upstream_sources"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
name = Column(String(255), unique=True, nullable=False)
source_type = Column(String(50), default="generic", nullable=False)
url = Column(String(2048), nullable=False)
enabled = Column(Boolean, default=False, nullable=False)
is_public = Column(Boolean, default=True, nullable=False)
auth_type = Column(String(20), default="none", nullable=False)
username = Column(String(255))
password_encrypted = Column(LargeBinary)
headers_encrypted = Column(LargeBinary)
priority = Column(Integer, default=100, nullable=False)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
updated_at = Column(
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
)
# Relationships
cached_urls = relationship("CachedUrl", back_populates="source")
__table_args__ = (
Index("idx_upstream_sources_enabled", "enabled"),
Index("idx_upstream_sources_source_type", "source_type"),
Index("idx_upstream_sources_is_public", "is_public"),
Index("idx_upstream_sources_priority", "priority"),
CheckConstraint(
"source_type IN ('npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic')",
name="check_source_type",
),
CheckConstraint(
"auth_type IN ('none', 'basic', 'bearer', 'api_key')",
name="check_auth_type",
),
CheckConstraint("priority > 0", name="check_priority_positive"),
)
def set_password(self, password: str) -> None:
"""Encrypt and store a password/token."""
from .encryption import encrypt_value
if password:
self.password_encrypted = encrypt_value(password)
else:
self.password_encrypted = None
def get_password(self) -> str | None:
"""Decrypt and return the stored password/token."""
from .encryption import decrypt_value
if self.password_encrypted:
try:
return decrypt_value(self.password_encrypted)
except Exception:
return None
return None
def has_password(self) -> bool:
"""Check if a password/token is stored."""
return self.password_encrypted is not None
def set_headers(self, headers: dict) -> None:
"""Encrypt and store custom headers as JSON."""
from .encryption import encrypt_value
import json
if headers:
self.headers_encrypted = encrypt_value(json.dumps(headers))
else:
self.headers_encrypted = None
def get_headers(self) -> dict | None:
"""Decrypt and return custom headers."""
from .encryption import decrypt_value
import json
if self.headers_encrypted:
try:
return json.loads(decrypt_value(self.headers_encrypted))
except Exception:
return None
return None
class CacheSettings(Base):
"""Global cache settings (singleton table).
Controls behavior of the upstream caching system including air-gap mode.
"""
__tablename__ = "cache_settings"
id = Column(Integer, primary_key=True, default=1)
allow_public_internet = Column(Boolean, default=True, nullable=False)
auto_create_system_projects = Column(Boolean, default=True, nullable=False)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
updated_at = Column(
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
)
__table_args__ = (
CheckConstraint("id = 1", name="check_cache_settings_singleton"),
)
class CachedUrl(Base):
"""Tracks URL to artifact mappings for provenance.
Records which URLs have been cached and maps them to their stored artifacts.
Enables "is this URL already cached?" lookups and audit trails.
"""
__tablename__ = "cached_urls"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
url = Column(String(4096), nullable=False)
url_hash = Column(String(64), unique=True, nullable=False)
artifact_id = Column(
String(64), ForeignKey("artifacts.id"), nullable=False
)
source_id = Column(
UUID(as_uuid=True),
ForeignKey("upstream_sources.id", ondelete="SET NULL"),
)
fetched_at = Column(DateTime(timezone=True), default=datetime.utcnow, nullable=False)
response_headers = Column(JSON, default=dict)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
# Relationships
artifact = relationship("Artifact")
source = relationship("UpstreamSource", back_populates="cached_urls")
__table_args__ = (
Index("idx_cached_urls_url_hash", "url_hash"),
Index("idx_cached_urls_artifact_id", "artifact_id"),
Index("idx_cached_urls_source_id", "source_id"),
Index("idx_cached_urls_fetched_at", "fetched_at"),
)
@staticmethod
def compute_url_hash(url: str) -> str:
"""Compute SHA256 hash of a URL for fast lookups."""
import hashlib
return hashlib.sha256(url.encode("utf-8")).hexdigest()