diff --git a/backend/app/dependencies.py b/backend/app/dependencies.py index e43790c..b04d288 100644 --- a/backend/app/dependencies.py +++ b/backend/app/dependencies.py @@ -28,7 +28,6 @@ from .models import ( Project, Package, Artifact, - Tag, ArtifactDependency, PackageVersion, ) @@ -153,26 +152,20 @@ def parse_ensure_file(content: bytes) -> EnsureFileContent: project = dep.get('project') package = dep.get('package') version = dep.get('version') - tag = dep.get('tag') if not project: raise InvalidEnsureFileError(f"Dependency {i} missing 'project'") if not package: raise InvalidEnsureFileError(f"Dependency {i} missing 'package'") - if not version and not tag: + if not version: raise InvalidEnsureFileError( - f"Dependency {i} must have either 'version' or 'tag'" - ) - if version and tag: - raise InvalidEnsureFileError( - f"Dependency {i} cannot have both 'version' and 'tag'" + f"Dependency {i} must have 'version'" ) dependencies.append(EnsureFileDependency( project=project, package=package, version=version, - tag=tag, )) return EnsureFileContent(dependencies=dependencies) @@ -226,7 +219,6 @@ def store_dependencies( dependency_project=dep.project, dependency_package=dep.package, version_constraint=dep.version, - tag_constraint=dep.tag, ) db.add(artifact_dep) created.append(artifact_dep) @@ -292,26 +284,21 @@ def get_reverse_dependencies( if not artifact: continue - # Find which package this artifact belongs to via tags or versions - tag = db.query(Tag).filter(Tag.artifact_id == dep.artifact_id).first() - if tag: - pkg = db.query(Package).filter(Package.id == tag.package_id).first() + # Find which package this artifact belongs to via versions + version_record = db.query(PackageVersion).filter( + PackageVersion.artifact_id == dep.artifact_id, + ).first() + if version_record: + pkg = db.query(Package).filter(Package.id == version_record.package_id).first() if pkg: proj = db.query(Project).filter(Project.id == pkg.project_id).first() if proj: - # Get version if available - version_record = db.query(PackageVersion).filter( - PackageVersion.artifact_id == dep.artifact_id, - PackageVersion.package_id == pkg.id, - ).first() - dependents.append(DependentInfo( artifact_id=dep.artifact_id, project=proj.name, package=pkg.name, - version=version_record.version if version_record else None, - constraint_type="version" if dep.version_constraint else "tag", - constraint_value=dep.version_constraint or dep.tag_constraint, + version=version_record.version, + constraint_value=dep.version_constraint, )) total_pages = (total + limit - 1) // limit @@ -423,8 +410,7 @@ def _resolve_dependency_to_artifact( db: Session, project_name: str, package_name: str, - version: Optional[str], - tag: Optional[str], + version: str, ) -> Optional[Tuple[str, str, int]]: """ Resolve a dependency constraint to an artifact ID. @@ -432,7 +418,6 @@ def _resolve_dependency_to_artifact( Supports: - Exact version matching (e.g., '1.2.3') - Version constraints (e.g., '>=1.9', '<2.0,>=1.5') - - Tag matching - Wildcard ('*' for any version) Args: @@ -440,10 +425,9 @@ def _resolve_dependency_to_artifact( project_name: Project name package_name: Package name version: Version or version constraint - tag: Tag constraint Returns: - Tuple of (artifact_id, resolved_version_or_tag, size) or None if not found + Tuple of (artifact_id, resolved_version, size) or None if not found """ # Get project and package project = db.query(Project).filter(Project.name == project_name).first() @@ -457,50 +441,24 @@ def _resolve_dependency_to_artifact( if not package: return None - if version: - # Check if this is a version constraint (>=, <, etc.) or exact version - if _is_version_constraint(version): - result = _resolve_version_constraint(db, package, version) - if result: - return result - else: - # Look up by exact version - pkg_version = db.query(PackageVersion).filter( - PackageVersion.package_id == package.id, - PackageVersion.version == version, - ).first() - if pkg_version: - artifact = db.query(Artifact).filter( - Artifact.id == pkg_version.artifact_id - ).first() - if artifact: - return (artifact.id, version, artifact.size) - - # Also check if there's a tag with this exact name - tag_record = db.query(Tag).filter( - Tag.package_id == package.id, - Tag.name == version, + # Check if this is a version constraint (>=, <, etc.) or exact version + if _is_version_constraint(version): + result = _resolve_version_constraint(db, package, version) + if result: + return result + else: + # Look up by exact version + pkg_version = db.query(PackageVersion).filter( + PackageVersion.package_id == package.id, + PackageVersion.version == version, ).first() - if tag_record: + if pkg_version: artifact = db.query(Artifact).filter( - Artifact.id == tag_record.artifact_id + Artifact.id == pkg_version.artifact_id ).first() if artifact: return (artifact.id, version, artifact.size) - if tag: - # Look up by tag - tag_record = db.query(Tag).filter( - Tag.package_id == package.id, - Tag.name == tag, - ).first() - if tag_record: - artifact = db.query(Artifact).filter( - Artifact.id == tag_record.artifact_id - ).first() - if artifact: - return (artifact.id, tag, artifact.size) - return None @@ -560,9 +518,9 @@ def _detect_package_cycle( Package.name == package_name, ).first() if package: - # Find all artifacts in this package via tags - tags = db.query(Tag).filter(Tag.package_id == package.id).all() - artifact_ids = {t.artifact_id for t in tags} + # Find all artifacts in this package via versions + versions = db.query(PackageVersion).filter(PackageVersion.package_id == package.id).all() + artifact_ids = {v.artifact_id for v in versions} # Get dependencies from all artifacts in this package for artifact_id in artifact_ids: @@ -605,8 +563,8 @@ def check_circular_dependencies( db: Database session artifact_id: The artifact that will have these dependencies new_dependencies: Dependencies to be added - project_name: Project name (optional, will try to look up from tag if not provided) - package_name: Package name (optional, will try to look up from tag if not provided) + project_name: Project name (optional, will try to look up from version if not provided) + package_name: Package name (optional, will try to look up from version if not provided) Returns: Cycle path if detected, None otherwise @@ -615,17 +573,19 @@ def check_circular_dependencies( if project_name and package_name: current_path = f"{project_name}/{package_name}" else: - # Try to look up from tag + # Try to look up from version artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first() if not artifact: return None - # Find package for this artifact - tag = db.query(Tag).filter(Tag.artifact_id == artifact_id).first() - if not tag: + # Find package for this artifact via version + version_record = db.query(PackageVersion).filter( + PackageVersion.artifact_id == artifact_id + ).first() + if not version_record: return None - package = db.query(Package).filter(Package.id == tag.package_id).first() + package = db.query(Package).filter(Package.id == version_record.package_id).first() if not package: return None @@ -682,7 +642,7 @@ def resolve_dependencies( db: Database session project_name: Project name package_name: Package name - ref: Tag or version reference + ref: Version reference (or artifact:hash) base_url: Base URL for download URLs Returns: @@ -715,7 +675,7 @@ def resolve_dependencies( root_version = artifact_id[:12] # Use short hash as version display root_size = artifact.size else: - # Try to find artifact by tag or version + # Try to find artifact by version resolved = _resolve_dependency_to_artifact( db, project_name, package_name, ref, ref ) @@ -820,12 +780,11 @@ def resolve_dependencies( dep.dependency_project, dep.dependency_package, dep.version_constraint, - dep.tag_constraint, ) if not resolved_dep: # Dependency not cached on server - track as missing but continue - constraint = dep.version_constraint or dep.tag_constraint + constraint = dep.version_constraint missing_dependencies.append(MissingDependency( project=dep.dependency_project, package=dep.dependency_package, diff --git a/backend/app/models.py b/backend/app/models.py index 500d318..b01877c 100644 --- a/backend/app/models.py +++ b/backend/app/models.py @@ -71,7 +71,6 @@ class Package(Base): ) project = relationship("Project", back_populates="packages") - tags = relationship("Tag", back_populates="package", cascade="all, delete-orphan") uploads = relationship( "Upload", back_populates="package", cascade="all, delete-orphan" ) @@ -120,7 +119,6 @@ class Artifact(Base): ref_count = Column(Integer, default=1) s3_key = Column(String(1024), nullable=False) - tags = relationship("Tag", back_populates="artifact") uploads = relationship("Upload", back_populates="artifact") versions = relationship("PackageVersion", back_populates="artifact") dependencies = relationship( @@ -151,65 +149,6 @@ class Artifact(Base): ) -class Tag(Base): - __tablename__ = "tags" - - id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - package_id = Column( - UUID(as_uuid=True), - ForeignKey("packages.id", ondelete="CASCADE"), - nullable=False, - ) - name = Column(String(255), nullable=False) - artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False) - created_at = Column(DateTime(timezone=True), default=datetime.utcnow) - updated_at = Column( - DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow - ) - created_by = Column(String(255), nullable=False) - - package = relationship("Package", back_populates="tags") - artifact = relationship("Artifact", back_populates="tags") - history = relationship( - "TagHistory", back_populates="tag", cascade="all, delete-orphan" - ) - - __table_args__ = ( - Index("idx_tags_package_id", "package_id"), - Index("idx_tags_artifact_id", "artifact_id"), - Index( - "idx_tags_package_name", "package_id", "name", unique=True - ), # Composite unique index - Index( - "idx_tags_package_created_at", "package_id", "created_at" - ), # For recent tags queries - ) - - -class TagHistory(Base): - __tablename__ = "tag_history" - - id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - tag_id = Column( - UUID(as_uuid=True), ForeignKey("tags.id", ondelete="CASCADE"), nullable=False - ) - old_artifact_id = Column(String(64), ForeignKey("artifacts.id")) - new_artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False) - change_type = Column(String(20), nullable=False, default="update") - changed_at = Column(DateTime(timezone=True), default=datetime.utcnow) - changed_by = Column(String(255), nullable=False) - - tag = relationship("Tag", back_populates="history") - - __table_args__ = ( - Index("idx_tag_history_tag_id", "tag_id"), - Index("idx_tag_history_changed_at", "changed_at"), - CheckConstraint( - "change_type IN ('create', 'update', 'delete')", name="check_change_type" - ), - ) - - class PackageVersion(Base): """Immutable version record for a package-artifact relationship. @@ -249,7 +188,7 @@ class Upload(Base): artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False) package_id = Column(UUID(as_uuid=True), ForeignKey("packages.id"), nullable=False) original_name = Column(String(1024)) - tag_name = Column(String(255)) # Tag assigned during upload + version = Column(String(255)) # Version assigned during upload user_agent = Column(String(512)) # Client identification duration_ms = Column(Integer) # Upload timing in milliseconds deduplicated = Column(Boolean, default=False) # Whether artifact was deduplicated @@ -524,8 +463,8 @@ class PackageHistory(Base): class ArtifactDependency(Base): """Dependency declared by an artifact on another package. - Each artifact can declare dependencies on other packages, specifying either - an exact version or a tag. This enables recursive dependency resolution. + Each artifact can declare dependencies on other packages, specifying a version. + This enables recursive dependency resolution. """ __tablename__ = "artifact_dependencies" @@ -538,20 +477,13 @@ class ArtifactDependency(Base): ) dependency_project = Column(String(255), nullable=False) dependency_package = Column(String(255), nullable=False) - version_constraint = Column(String(255), nullable=True) - tag_constraint = Column(String(255), nullable=True) + version_constraint = Column(String(255), nullable=False) created_at = Column(DateTime(timezone=True), default=datetime.utcnow) # Relationship to the artifact that declares this dependency artifact = relationship("Artifact", back_populates="dependencies") __table_args__ = ( - # Exactly one of version_constraint or tag_constraint must be set - CheckConstraint( - "(version_constraint IS NOT NULL AND tag_constraint IS NULL) OR " - "(version_constraint IS NULL AND tag_constraint IS NOT NULL)", - name="check_constraint_type", - ), # Each artifact can only depend on a specific project/package once Index( "idx_artifact_dependencies_artifact_id", diff --git a/backend/app/purge_seed_data.py b/backend/app/purge_seed_data.py index b4f5698..1b8d184 100644 --- a/backend/app/purge_seed_data.py +++ b/backend/app/purge_seed_data.py @@ -12,7 +12,6 @@ from .models import ( Project, Package, Artifact, - Tag, Upload, PackageVersion, ArtifactDependency, @@ -60,7 +59,6 @@ def purge_seed_data(db: Session) -> dict: results = { "dependencies_deleted": 0, - "tags_deleted": 0, "versions_deleted": 0, "uploads_deleted": 0, "artifacts_deleted": 0, @@ -103,15 +101,7 @@ def purge_seed_data(db: Session) -> dict: results["dependencies_deleted"] = count logger.info(f"Deleted {count} artifact dependencies") - # 2. Delete tags - if seed_package_ids: - count = db.query(Tag).filter(Tag.package_id.in_(seed_package_ids)).delete( - synchronize_session=False - ) - results["tags_deleted"] = count - logger.info(f"Deleted {count} tags") - - # 3. Delete package versions + # 2. Delete package versions if seed_package_ids: count = db.query(PackageVersion).filter( PackageVersion.package_id.in_(seed_package_ids) @@ -119,7 +109,7 @@ def purge_seed_data(db: Session) -> dict: results["versions_deleted"] = count logger.info(f"Deleted {count} package versions") - # 4. Delete uploads + # 3. Delete uploads if seed_package_ids: count = db.query(Upload).filter(Upload.package_id.in_(seed_package_ids)).delete( synchronize_session=False @@ -127,7 +117,7 @@ def purge_seed_data(db: Session) -> dict: results["uploads_deleted"] = count logger.info(f"Deleted {count} uploads") - # 5. Delete S3 objects for seed artifacts + # 4. Delete S3 objects for seed artifacts if seed_artifact_ids: seed_artifacts = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).all() for artifact in seed_artifacts: @@ -139,8 +129,8 @@ def purge_seed_data(db: Session) -> dict: logger.warning(f"Failed to delete S3 object {artifact.s3_key}: {e}") logger.info(f"Deleted {results['s3_objects_deleted']} S3 objects") - # 6. Delete artifacts (only those with ref_count that would be 0 after our deletions) - # Since we deleted all tags/versions pointing to these artifacts, we can delete them + # 5. Delete artifacts (only those with ref_count that would be 0 after our deletions) + # Since we deleted all versions pointing to these artifacts, we can delete them if seed_artifact_ids: count = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).delete( synchronize_session=False @@ -148,7 +138,7 @@ def purge_seed_data(db: Session) -> dict: results["artifacts_deleted"] = count logger.info(f"Deleted {count} artifacts") - # 7. Delete packages + # 6. Delete packages if seed_package_ids: count = db.query(Package).filter(Package.id.in_(seed_package_ids)).delete( synchronize_session=False @@ -156,7 +146,7 @@ def purge_seed_data(db: Session) -> dict: results["packages_deleted"] = count logger.info(f"Deleted {count} packages") - # 8. Delete access permissions for seed projects + # 7. Delete access permissions for seed projects if seed_project_ids: count = db.query(AccessPermission).filter( AccessPermission.project_id.in_(seed_project_ids) @@ -164,14 +154,14 @@ def purge_seed_data(db: Session) -> dict: results["permissions_deleted"] = count logger.info(f"Deleted {count} access permissions") - # 9. Delete seed projects + # 8. Delete seed projects count = db.query(Project).filter(Project.name.in_(SEED_PROJECT_NAMES)).delete( synchronize_session=False ) results["projects_deleted"] = count logger.info(f"Deleted {count} projects") - # 10. Find and delete seed team + # 9. Find and delete seed team seed_team = db.query(Team).filter(Team.slug == SEED_TEAM_SLUG).first() if seed_team: # Delete team memberships first @@ -186,7 +176,7 @@ def purge_seed_data(db: Session) -> dict: results["teams_deleted"] = 1 logger.info(f"Deleted team: {SEED_TEAM_SLUG}") - # 11. Delete seed users (but NOT admin) + # 10. Delete seed users (but NOT admin) seed_users = db.query(User).filter(User.username.in_(SEED_USERNAMES)).all() for user in seed_users: # Delete any remaining team memberships for this user diff --git a/backend/app/pypi_proxy.py b/backend/app/pypi_proxy.py index 46011be..7f07e89 100644 --- a/backend/app/pypi_proxy.py +++ b/backend/app/pypi_proxy.py @@ -20,7 +20,7 @@ from fastapi.responses import StreamingResponse, HTMLResponse from sqlalchemy.orm import Session from .database import get_db -from .models import UpstreamSource, CachedUrl, Artifact, Project, Package, Tag, PackageVersion +from .models import UpstreamSource, CachedUrl, Artifact, Project, Package, PackageVersion from .storage import S3Storage, get_storage from .config import get_env_upstream_sources @@ -646,20 +646,6 @@ async def pypi_download_file( db.add(package) db.flush() - # Create tag with filename - existing_tag = db.query(Tag).filter( - Tag.package_id == package.id, - Tag.name == filename, - ).first() - if not existing_tag: - tag = Tag( - package_id=package.id, - name=filename, - artifact_id=sha256, - created_by="pypi-proxy", - ) - db.add(tag) - # Extract and create version # Only create version for actual package files, not .metadata files version = _extract_pypi_version(filename) diff --git a/backend/app/repositories/__init__.py b/backend/app/repositories/__init__.py index 822b730..8284aef 100644 --- a/backend/app/repositories/__init__.py +++ b/backend/app/repositories/__init__.py @@ -9,7 +9,6 @@ from .base import BaseRepository from .project import ProjectRepository from .package import PackageRepository from .artifact import ArtifactRepository -from .tag import TagRepository from .upload import UploadRepository __all__ = [ @@ -17,6 +16,5 @@ __all__ = [ "ProjectRepository", "PackageRepository", "ArtifactRepository", - "TagRepository", "UploadRepository", ] diff --git a/backend/app/repositories/artifact.py b/backend/app/repositories/artifact.py index 8145407..ce84c45 100644 --- a/backend/app/repositories/artifact.py +++ b/backend/app/repositories/artifact.py @@ -8,7 +8,7 @@ from sqlalchemy import func, or_ from uuid import UUID from .base import BaseRepository -from ..models import Artifact, Tag, Upload, Package, Project +from ..models import Artifact, PackageVersion, Upload, Package, Project class ArtifactRepository(BaseRepository[Artifact]): @@ -77,14 +77,14 @@ class ArtifactRepository(BaseRepository[Artifact]): .all() ) - def get_artifacts_without_tags(self, limit: int = 100) -> List[Artifact]: - """Get artifacts that have no tags pointing to them.""" - # Subquery to find artifact IDs that have tags - tagged_artifacts = self.db.query(Tag.artifact_id).distinct().subquery() + def get_artifacts_without_versions(self, limit: int = 100) -> List[Artifact]: + """Get artifacts that have no versions pointing to them.""" + # Subquery to find artifact IDs that have versions + versioned_artifacts = self.db.query(PackageVersion.artifact_id).distinct().subquery() return ( self.db.query(Artifact) - .filter(~Artifact.id.in_(tagged_artifacts)) + .filter(~Artifact.id.in_(versioned_artifacts)) .limit(limit) .all() ) @@ -115,34 +115,34 @@ class ArtifactRepository(BaseRepository[Artifact]): return artifacts, total - def get_referencing_tags(self, artifact_id: str) -> List[Tuple[Tag, Package, Project]]: - """Get all tags referencing this artifact with package and project info.""" + def get_referencing_versions(self, artifact_id: str) -> List[Tuple[PackageVersion, Package, Project]]: + """Get all versions referencing this artifact with package and project info.""" return ( - self.db.query(Tag, Package, Project) - .join(Package, Tag.package_id == Package.id) + self.db.query(PackageVersion, Package, Project) + .join(Package, PackageVersion.package_id == Package.id) .join(Project, Package.project_id == Project.id) - .filter(Tag.artifact_id == artifact_id) + .filter(PackageVersion.artifact_id == artifact_id) .all() ) - def search(self, query_str: str, limit: int = 10) -> List[Tuple[Tag, Artifact, str, str]]: + def search(self, query_str: str, limit: int = 10) -> List[Tuple[PackageVersion, Artifact, str, str]]: """ - Search artifacts by tag name or original filename. - Returns (tag, artifact, package_name, project_name) tuples. + Search artifacts by version or original filename. + Returns (version, artifact, package_name, project_name) tuples. """ search_lower = query_str.lower() return ( - self.db.query(Tag, Artifact, Package.name, Project.name) - .join(Artifact, Tag.artifact_id == Artifact.id) - .join(Package, Tag.package_id == Package.id) + self.db.query(PackageVersion, Artifact, Package.name, Project.name) + .join(Artifact, PackageVersion.artifact_id == Artifact.id) + .join(Package, PackageVersion.package_id == Package.id) .join(Project, Package.project_id == Project.id) .filter( or_( - func.lower(Tag.name).contains(search_lower), + func.lower(PackageVersion.version).contains(search_lower), func.lower(Artifact.original_name).contains(search_lower) ) ) - .order_by(Tag.name) + .order_by(PackageVersion.version) .limit(limit) .all() ) diff --git a/backend/app/repositories/package.py b/backend/app/repositories/package.py index ffce857..fe4c7be 100644 --- a/backend/app/repositories/package.py +++ b/backend/app/repositories/package.py @@ -8,7 +8,7 @@ from sqlalchemy import func, or_, asc, desc from uuid import UUID from .base import BaseRepository -from ..models import Package, Project, Tag, Upload, Artifact +from ..models import Package, Project, PackageVersion, Upload, Artifact class PackageRepository(BaseRepository[Package]): @@ -136,10 +136,10 @@ class PackageRepository(BaseRepository[Package]): return self.update(package, **updates) def get_stats(self, package_id: UUID) -> dict: - """Get package statistics (tag count, artifact count, total size).""" - tag_count = ( - self.db.query(func.count(Tag.id)) - .filter(Tag.package_id == package_id) + """Get package statistics (version count, artifact count, total size).""" + version_count = ( + self.db.query(func.count(PackageVersion.id)) + .filter(PackageVersion.package_id == package_id) .scalar() or 0 ) @@ -154,7 +154,7 @@ class PackageRepository(BaseRepository[Package]): ) return { - "tag_count": tag_count, + "version_count": version_count, "artifact_count": artifact_stats[0] if artifact_stats else 0, "total_size": artifact_stats[1] if artifact_stats else 0, } diff --git a/backend/app/repositories/tag.py b/backend/app/repositories/tag.py deleted file mode 100644 index 4a87798..0000000 --- a/backend/app/repositories/tag.py +++ /dev/null @@ -1,168 +0,0 @@ -""" -Tag repository for data access operations. -""" - -from typing import Optional, List, Tuple -from sqlalchemy.orm import Session -from sqlalchemy import func, or_, asc, desc -from uuid import UUID - -from .base import BaseRepository -from ..models import Tag, TagHistory, Artifact, Package, Project - - -class TagRepository(BaseRepository[Tag]): - """Repository for Tag entity operations.""" - - model = Tag - - def get_by_name(self, package_id: UUID, name: str) -> Optional[Tag]: - """Get tag by name within a package.""" - return ( - self.db.query(Tag) - .filter(Tag.package_id == package_id, Tag.name == name) - .first() - ) - - def get_with_artifact(self, package_id: UUID, name: str) -> Optional[Tuple[Tag, Artifact]]: - """Get tag with its artifact.""" - return ( - self.db.query(Tag, Artifact) - .join(Artifact, Tag.artifact_id == Artifact.id) - .filter(Tag.package_id == package_id, Tag.name == name) - .first() - ) - - def exists_by_name(self, package_id: UUID, name: str) -> bool: - """Check if tag with name exists in package.""" - return self.db.query( - self.db.query(Tag) - .filter(Tag.package_id == package_id, Tag.name == name) - .exists() - ).scalar() - - def list_by_package( - self, - package_id: UUID, - page: int = 1, - limit: int = 20, - search: Optional[str] = None, - sort: str = "name", - order: str = "asc", - ) -> Tuple[List[Tuple[Tag, Artifact]], int]: - """ - List tags in a package with artifact metadata. - - Returns tuple of ((tag, artifact) tuples, total_count). - """ - query = ( - self.db.query(Tag, Artifact) - .join(Artifact, Tag.artifact_id == Artifact.id) - .filter(Tag.package_id == package_id) - ) - - # Apply search filter (tag name or artifact original filename) - if search: - search_lower = search.lower() - query = query.filter( - or_( - func.lower(Tag.name).contains(search_lower), - func.lower(Artifact.original_name).contains(search_lower) - ) - ) - - # Get total count - total = query.count() - - # Apply sorting - sort_columns = { - "name": Tag.name, - "created_at": Tag.created_at, - } - sort_column = sort_columns.get(sort, Tag.name) - if order == "desc": - query = query.order_by(desc(sort_column)) - else: - query = query.order_by(asc(sort_column)) - - # Apply pagination - offset = (page - 1) * limit - results = query.offset(offset).limit(limit).all() - - return results, total - - def create_tag( - self, - package_id: UUID, - name: str, - artifact_id: str, - created_by: str, - ) -> Tag: - """Create a new tag.""" - return self.create( - package_id=package_id, - name=name, - artifact_id=artifact_id, - created_by=created_by, - ) - - def update_artifact( - self, - tag: Tag, - new_artifact_id: str, - changed_by: str, - record_history: bool = True, - ) -> Tag: - """ - Update tag to point to a different artifact. - Optionally records change in tag history. - """ - old_artifact_id = tag.artifact_id - - if record_history and old_artifact_id != new_artifact_id: - history = TagHistory( - tag_id=tag.id, - old_artifact_id=old_artifact_id, - new_artifact_id=new_artifact_id, - changed_by=changed_by, - ) - self.db.add(history) - - tag.artifact_id = new_artifact_id - tag.created_by = changed_by - self.db.flush() - return tag - - def get_history(self, tag_id: UUID) -> List[TagHistory]: - """Get tag change history.""" - return ( - self.db.query(TagHistory) - .filter(TagHistory.tag_id == tag_id) - .order_by(TagHistory.changed_at.desc()) - .all() - ) - - def get_latest_in_package(self, package_id: UUID) -> Optional[Tag]: - """Get the most recently created/updated tag in a package.""" - return ( - self.db.query(Tag) - .filter(Tag.package_id == package_id) - .order_by(Tag.created_at.desc()) - .first() - ) - - def get_by_artifact(self, artifact_id: str) -> List[Tag]: - """Get all tags pointing to an artifact.""" - return ( - self.db.query(Tag) - .filter(Tag.artifact_id == artifact_id) - .all() - ) - - def count_by_artifact(self, artifact_id: str) -> int: - """Count tags pointing to an artifact.""" - return ( - self.db.query(func.count(Tag.id)) - .filter(Tag.artifact_id == artifact_id) - .scalar() or 0 - ) diff --git a/backend/app/routes.py b/backend/app/routes.py index decd1d0..df42f41 100644 --- a/backend/app/routes.py +++ b/backend/app/routes.py @@ -38,8 +38,6 @@ from .models import ( Project, Package, Artifact, - Tag, - TagHistory, Upload, UploadLock, Consumer, @@ -63,17 +61,10 @@ from .schemas import ( PackageUpdate, PackageResponse, PackageDetailResponse, - TagSummary, PACKAGE_FORMATS, PACKAGE_PLATFORMS, ArtifactDetailResponse, - ArtifactTagInfo, PackageArtifactResponse, - TagCreate, - TagResponse, - TagDetailResponse, - TagHistoryResponse, - TagHistoryDetailResponse, AuditLogResponse, UploadHistoryResponse, ArtifactProvenanceResponse, @@ -106,7 +97,6 @@ from .schemas import ( TimeBasedStatsResponse, StatsReportResponse, GlobalArtifactResponse, - GlobalTagResponse, LoginRequest, LoginResponse, ChangePasswordRequest, @@ -410,97 +400,6 @@ def _create_or_update_version( return pkg_version -def _create_or_update_tag( - db: Session, - package_id: str, - tag_name: str, - new_artifact_id: str, - user_id: str, -) -> tuple[Tag, bool, Optional[str]]: - """ - Create or update a tag, handling ref_count and history. - - Uses SELECT FOR UPDATE to prevent race conditions during concurrent uploads. - - Returns: - tuple of (tag, is_new, old_artifact_id) - - tag: The created/updated Tag object - - is_new: True if tag was created, False if updated - - old_artifact_id: Previous artifact_id if tag was updated, None otherwise - """ - # Use with_for_update() to lock the row and prevent race conditions - # during concurrent uploads to the same tag - existing_tag = ( - db.query(Tag) - .filter(Tag.package_id == package_id, Tag.name == tag_name) - .with_for_update() - .first() - ) - - if existing_tag: - old_artifact_id = existing_tag.artifact_id - - # Only process if artifact actually changed - if old_artifact_id != new_artifact_id: - # Record history - history = TagHistory( - tag_id=existing_tag.id, - old_artifact_id=old_artifact_id, - new_artifact_id=new_artifact_id, - change_type="update", - changed_by=user_id, - ) - db.add(history) - - # Update tag to point to new artifact - # NOTE: SQL trigger (tags_ref_count_update_trigger) handles ref_count: - # - Decrements old artifact's ref_count - # - Increments new artifact's ref_count - existing_tag.artifact_id = new_artifact_id - existing_tag.created_by = user_id - - logger.info( - f"Tag '{tag_name}' updated: {old_artifact_id[:12]}... -> {new_artifact_id[:12]}..." - ) - - return existing_tag, False, old_artifact_id - else: - # Same artifact, no change needed - return existing_tag, False, None - else: - # Create new tag with race condition handling - from sqlalchemy.exc import IntegrityError - - new_tag = Tag( - package_id=package_id, - name=tag_name, - artifact_id=new_artifact_id, - created_by=user_id, - ) - db.add(new_tag) - - try: - db.flush() # Get the tag ID - may fail if concurrent insert happened - except IntegrityError: - # Another request created the tag concurrently - # Rollback the failed insert and retry as update - db.rollback() - logger.info(f"Tag '{tag_name}' created concurrently, retrying as update") - return _create_or_update_tag(db, package_id, tag_name, new_artifact_id, user_id) - - # Record history for creation - history = TagHistory( - tag_id=new_tag.id, - old_artifact_id=None, - new_artifact_id=new_artifact_id, - change_type="create", - changed_by=user_id, - ) - db.add(history) - - return new_tag, True, None - - def _log_audit( db: Session, action: str, @@ -1358,7 +1257,7 @@ def global_search( db: Session = Depends(get_db), ): """ - Search across all entity types (projects, packages, artifacts/tags). + Search across all entity types (projects, packages, artifacts). Returns limited results for each type plus total counts. """ user_id = get_user_id(request) @@ -1390,27 +1289,27 @@ def global_search( package_count = package_query.count() package_results = package_query.order_by(Package.name).limit(limit).all() - # Search tags/artifacts (tag name and original filename) + # Search artifacts (version and original filename) artifact_query = ( db.query( - Tag, + PackageVersion, Artifact, Package.name.label("package_name"), Project.name.label("project_name"), ) - .join(Artifact, Tag.artifact_id == Artifact.id) - .join(Package, Tag.package_id == Package.id) + .join(Artifact, PackageVersion.artifact_id == Artifact.id) + .join(Package, PackageVersion.package_id == Package.id) .join(Project, Package.project_id == Project.id) .filter( or_(Project.is_public == True, Project.created_by == user_id), or_( - func.lower(Tag.name).contains(search_lower), + func.lower(PackageVersion.version).contains(search_lower), func.lower(Artifact.original_name).contains(search_lower), ), ) ) artifact_count = artifact_query.count() - artifact_results = artifact_query.order_by(Tag.name).limit(limit).all() + artifact_results = artifact_query.order_by(PackageVersion.version).limit(limit).all() return GlobalSearchResponse( query=q, @@ -1433,15 +1332,14 @@ def global_search( ], artifacts=[ SearchResultArtifact( - tag_id=tag.id, - tag_name=tag.name, + version=pkg_version.version, artifact_id=artifact.id, - package_id=tag.package_id, + package_id=pkg_version.package_id, package_name=package_name, project_name=project_name, original_name=artifact.original_name, ) - for tag, artifact, package_name, project_name in artifact_results + for pkg_version, artifact, package_name, project_name in artifact_results ], counts={ "projects": project_count, @@ -1786,7 +1684,7 @@ def delete_project( """ Delete a project and all its packages. Requires admin access. - Decrements ref_count for all artifacts referenced by tags in all packages + Decrements ref_count for all artifacts referenced by versions in all packages within this project. """ check_project_access(db, project_name, current_user, "admin") @@ -1807,21 +1705,21 @@ def delete_project( packages = db.query(Package).filter(Package.project_id == project.id).all() package_count = len(packages) - total_tags = 0 + total_versions = 0 artifact_ids = set() for package in packages: - tags = db.query(Tag).filter(Tag.package_id == package.id).all() - total_tags += len(tags) - for tag in tags: - artifact_ids.add(tag.artifact_id) + versions = db.query(PackageVersion).filter(PackageVersion.package_id == package.id).all() + total_versions += len(versions) + for version in versions: + artifact_ids.add(version.artifact_id) logger.info( f"Project '{project_name}' deletion: {package_count} packages, " - f"{total_tags} tags affecting {len(artifact_ids)} artifacts" + f"{total_versions} versions affecting {len(artifact_ids)} artifacts" ) - # Delete the project (cascade will delete packages, tags, etc.) - # NOTE: SQL triggers (tags_ref_count_delete_trigger) handle ref_count automatically + # Delete the project (cascade will delete packages, versions, etc.) + # NOTE: SQL triggers handle ref_count automatically db.delete(project) db.commit() @@ -1834,7 +1732,7 @@ def delete_project( source_ip=request.client.host if request.client else None, details={ "packages_deleted": package_count, - "tags_deleted": total_tags, + "versions_deleted": total_versions, "artifacts_affected": list(artifact_ids), }, ) @@ -2825,33 +2723,32 @@ def list_packages( # Build detailed responses with aggregated data detailed_packages = [] for pkg in packages: - # Get tag count - tag_count = ( - db.query(func.count(Tag.id)).filter(Tag.package_id == pkg.id).scalar() or 0 + # Get version count + version_count = ( + db.query(func.count(PackageVersion.id)).filter(PackageVersion.package_id == pkg.id).scalar() or 0 ) - # Get unique artifact count and total size via tags - # (PyPI proxy creates tags without uploads, so query from tags) + # Get unique artifact count and total size via versions artifact_stats = ( db.query( - func.count(func.distinct(Tag.artifact_id)), + func.count(func.distinct(PackageVersion.artifact_id)), func.coalesce(func.sum(Artifact.size), 0), ) - .join(Artifact, Tag.artifact_id == Artifact.id) - .filter(Tag.package_id == pkg.id) + .join(Artifact, PackageVersion.artifact_id == Artifact.id) + .filter(PackageVersion.package_id == pkg.id) .first() ) artifact_count = artifact_stats[0] if artifact_stats else 0 total_size = artifact_stats[1] if artifact_stats else 0 - # Get latest tag - latest_tag_obj = ( - db.query(Tag) - .filter(Tag.package_id == pkg.id) - .order_by(Tag.created_at.desc()) + # Get latest version + latest_version_obj = ( + db.query(PackageVersion) + .filter(PackageVersion.package_id == pkg.id) + .order_by(PackageVersion.created_at.desc()) .first() ) - latest_tag = latest_tag_obj.name if latest_tag_obj else None + latest_version = latest_version_obj.version if latest_version_obj else None # Get latest upload timestamp latest_upload = ( @@ -2860,19 +2757,6 @@ def list_packages( .scalar() ) - # Get recent tags (limit 5) - recent_tags_objs = ( - db.query(Tag) - .filter(Tag.package_id == pkg.id) - .order_by(Tag.created_at.desc()) - .limit(5) - .all() - ) - recent_tags = [ - TagSummary(name=t.name, artifact_id=t.artifact_id, created_at=t.created_at) - for t in recent_tags_objs - ] - detailed_packages.append( PackageDetailResponse( id=pkg.id, @@ -2883,12 +2767,11 @@ def list_packages( platform=pkg.platform, created_at=pkg.created_at, updated_at=pkg.updated_at, - tag_count=tag_count, + version_count=version_count, artifact_count=artifact_count, total_size=total_size, - latest_tag=latest_tag, + latest_version=latest_version, latest_upload_at=latest_upload, - recent_tags=recent_tags, ) ) @@ -2911,9 +2794,6 @@ def list_packages( def get_package( project_name: str, package_name: str, - include_tags: bool = Query( - default=False, description="Include all tags (not just recent 5)" - ), db: Session = Depends(get_db), ): """Get a single package with full metadata""" @@ -2929,33 +2809,32 @@ def get_package( if not pkg: raise HTTPException(status_code=404, detail="Package not found") - # Get tag count - tag_count = ( - db.query(func.count(Tag.id)).filter(Tag.package_id == pkg.id).scalar() or 0 + # Get version count + version_count = ( + db.query(func.count(PackageVersion.id)).filter(PackageVersion.package_id == pkg.id).scalar() or 0 ) - # Get unique artifact count and total size via tags - # (PyPI proxy creates tags without uploads, so query from tags) + # Get unique artifact count and total size via versions artifact_stats = ( db.query( - func.count(func.distinct(Tag.artifact_id)), + func.count(func.distinct(PackageVersion.artifact_id)), func.coalesce(func.sum(Artifact.size), 0), ) - .join(Artifact, Tag.artifact_id == Artifact.id) - .filter(Tag.package_id == pkg.id) + .join(Artifact, PackageVersion.artifact_id == Artifact.id) + .filter(PackageVersion.package_id == pkg.id) .first() ) artifact_count = artifact_stats[0] if artifact_stats else 0 total_size = artifact_stats[1] if artifact_stats else 0 - # Get latest tag - latest_tag_obj = ( - db.query(Tag) - .filter(Tag.package_id == pkg.id) - .order_by(Tag.created_at.desc()) + # Get latest version + latest_version_obj = ( + db.query(PackageVersion) + .filter(PackageVersion.package_id == pkg.id) + .order_by(PackageVersion.created_at.desc()) .first() ) - latest_tag = latest_tag_obj.name if latest_tag_obj else None + latest_version = latest_version_obj.version if latest_version_obj else None # Get latest upload timestamp latest_upload = ( @@ -2964,18 +2843,6 @@ def get_package( .scalar() ) - # Get tags (all if include_tags=true, else limit 5) - tags_query = ( - db.query(Tag).filter(Tag.package_id == pkg.id).order_by(Tag.created_at.desc()) - ) - if not include_tags: - tags_query = tags_query.limit(5) - tags_objs = tags_query.all() - recent_tags = [ - TagSummary(name=t.name, artifact_id=t.artifact_id, created_at=t.created_at) - for t in tags_objs - ] - return PackageDetailResponse( id=pkg.id, project_id=pkg.project_id, @@ -2985,12 +2852,11 @@ def get_package( platform=pkg.platform, created_at=pkg.created_at, updated_at=pkg.updated_at, - tag_count=tag_count, + version_count=version_count, artifact_count=artifact_count, total_size=total_size, - latest_tag=latest_tag, + latest_version=latest_version, latest_upload_at=latest_upload, - recent_tags=recent_tags, ) @@ -3148,9 +3014,9 @@ def delete_package( db: Session = Depends(get_db), ): """ - Delete a package and all its tags. + Delete a package and all its versions. - Decrements ref_count for all artifacts referenced by tags in this package. + Decrements ref_count for all artifacts referenced by versions in this package. The package's uploads records are preserved for audit purposes but will have null package_id after cascade. """ @@ -3168,18 +3034,18 @@ def delete_package( if not package: raise HTTPException(status_code=404, detail="Package not found") - # Get tags count and affected artifacts for logging - tags = db.query(Tag).filter(Tag.package_id == package.id).all() - artifact_ids = list(set(tag.artifact_id for tag in tags)) - tag_count = len(tags) + # Get version count and affected artifacts for logging + versions = db.query(PackageVersion).filter(PackageVersion.package_id == package.id).all() + artifact_ids = list(set(v.artifact_id for v in versions)) + version_count = len(versions) logger.info( - f"Package '{package_name}' deletion: {tag_count} tags affecting " + f"Package '{package_name}' deletion: {version_count} versions affecting " f"{len(artifact_ids)} artifacts" ) - # Delete the package (cascade will delete tags, which triggers ref_count decrements) - # NOTE: SQL triggers (tags_ref_count_delete_trigger) handle ref_count automatically + # Delete the package (cascade will delete versions, which triggers ref_count decrements) + # NOTE: SQL triggers handle ref_count automatically db.delete(package) db.commit() @@ -3191,7 +3057,7 @@ def delete_package( user_id=user_id, source_ip=request.client.host if request.client else None, details={ - "tags_deleted": tag_count, + "versions_deleted": version_count, "artifacts_affected": artifact_ids, }, ) @@ -3211,7 +3077,6 @@ def upload_artifact( request: Request, file: UploadFile = File(...), ensure: Optional[UploadFile] = File(None, description="Optional orchard.ensure file with dependencies"), - tag: Optional[str] = Form(None), version: Optional[str] = Form(None), db: Session = Depends(get_db), storage: S3Storage = Depends(get_storage), @@ -3250,15 +3115,11 @@ def upload_artifact( dependencies: - project: some-project package: some-lib - version: "1.2.3" # Exact version (mutually exclusive with tag) - - - project: another-project - package: another-lib - tag: stable # Tag reference (mutually exclusive with version) + version: "1.2.3" ``` **Dependency validation:** - - Each dependency must specify either `version` or `tag`, not both + - Each dependency must specify a version - Referenced projects must exist (packages are not validated at upload time) - Circular dependencies are rejected at upload time @@ -3267,7 +3128,7 @@ def upload_artifact( curl -X POST "http://localhost:8080/api/v1/project/myproject/mypackage/upload" \\ -H "Authorization: Bearer " \\ -F "file=@myfile.tar.gz" \\ - -F "tag=v1.0.0" + -F "version=1.0.0" ``` **Example with dependencies (curl):** @@ -3276,7 +3137,7 @@ def upload_artifact( -H "Authorization: Bearer " \\ -F "file=@myfile.tar.gz" \\ -F "ensure=@orchard.ensure" \\ - -F "tag=v1.0.0" + -F "version=1.0.0" ``` **Example (Python requests):** @@ -3286,7 +3147,7 @@ def upload_artifact( response = requests.post( 'http://localhost:8080/api/v1/project/myproject/mypackage/upload', files={'file': f}, - data={'tag': 'v1.0.0'}, + data={'version': '1.0.0'}, headers={'Authorization': 'Bearer '} ) ``` @@ -3295,7 +3156,7 @@ def upload_artifact( ```javascript const formData = new FormData(); formData.append('file', fileInput.files[0]); - formData.append('tag', 'v1.0.0'); + formData.append('version', '1.0.0'); const response = await fetch('/api/v1/project/myproject/mypackage/upload', { method: 'POST', headers: { 'Authorization': 'Bearer ' }, @@ -3491,8 +3352,7 @@ def upload_artifact( ) if artifact: # Artifact exists - this is a deduplicated upload - # NOTE: ref_count is managed by SQL triggers on tag INSERT/DELETE - # We don't manually increment here - the tag creation will trigger the increment + # NOTE: ref_count is managed by SQL triggers on PackageVersion INSERT/DELETE deduplicated = True saved_bytes = storage_result.size # Merge metadata if new metadata was extracted @@ -3511,8 +3371,8 @@ def upload_artifact( db.refresh(artifact) else: # Create new artifact with ref_count=0 - # NOTE: ref_count is managed by SQL triggers on tag INSERT/DELETE - # When a tag is created for this artifact, the trigger will increment ref_count + # NOTE: ref_count is managed by SQL triggers on PackageVersion INSERT/DELETE + # When a version is created for this artifact, the trigger will increment ref_count from sqlalchemy.exc import IntegrityError artifact = Artifact( @@ -3554,7 +3414,6 @@ def upload_artifact( artifact_id=storage_result.sha256, package_id=package.id, original_name=file.filename, - tag_name=tag, user_agent=user_agent[:512] if user_agent else None, # Truncate if too long duration_ms=duration_ms, deduplicated=deduplicated, @@ -3567,10 +3426,6 @@ def upload_artifact( db.add(upload) db.flush() # Flush to get upload ID - # Create or update tag if provided (with ref_count management and history) - if tag: - _create_or_update_tag(db, package.id, tag, storage_result.sha256, user_id) - # Create version record if version was detected pkg_version = None if detected_version: @@ -3648,7 +3503,6 @@ def upload_artifact( "size": storage_result.size, "deduplicated": deduplicated, "saved_bytes": saved_bytes, - "tag": tag, "duration_ms": duration_ms, "client_checksum_provided": client_checksum is not None, }, @@ -3688,7 +3542,6 @@ def upload_artifact( size=storage_result.size, project=project_name, package=package_name, - tag=tag, version=detected_version, version_source=version_source, checksum_md5=storage_result.md5, @@ -3758,7 +3611,7 @@ def init_resumable_upload( curl -X POST "http://localhost:8080/api/v1/project/myproject/mypackage/upload//complete" \\ -H "Authorization: Bearer " \\ -H "Content-Type: application/json" \\ - -d '{"tag": "v1.0.0"}' + -d '{}' ``` """ user_id = get_user_id(request) @@ -3795,11 +3648,7 @@ def init_resumable_upload( ) if existing_artifact: # File already exists - deduplicated upload - # NOTE: ref_count is managed by SQL triggers on tag INSERT/DELETE/UPDATE - # We do NOT manually increment here because: - # 1. If a tag is provided, _create_or_update_tag will create/update a tag - # and the SQL trigger will handle ref_count - # 2. If no tag is provided, ref_count shouldn't change (no new reference) + # NOTE: ref_count is managed by SQL triggers on PackageVersion INSERT/DELETE # Record the upload upload = Upload( @@ -3812,12 +3661,6 @@ def init_resumable_upload( ) db.add(upload) - # Create or update tag if provided (with ref_count management and history) - if init_request.tag: - _create_or_update_tag( - db, package.id, init_request.tag, init_request.expected_hash, user_id - ) - # Log deduplication event logger.info( f"Deduplication (resumable init): artifact {init_request.expected_hash[:12]}... " @@ -3836,7 +3679,6 @@ def init_resumable_upload( "size": init_request.size, "deduplicated": True, "saved_bytes": init_request.size, - "tag": init_request.tag, "resumable": True, }, ) @@ -4037,25 +3879,6 @@ def complete_resumable_upload( ) db.add(upload) - # Create tag if provided - if complete_request.tag: - existing_tag = ( - db.query(Tag) - .filter(Tag.package_id == package.id, Tag.name == complete_request.tag) - .first() - ) - if existing_tag: - existing_tag.artifact_id = sha256_hash - existing_tag.created_by = user_id - else: - new_tag = Tag( - package_id=package.id, - name=complete_request.tag, - artifact_id=sha256_hash, - created_by=user_id, - ) - db.add(new_tag) - db.commit() return ResumableUploadCompleteResponse( @@ -4063,7 +3886,6 @@ def complete_resumable_upload( size=size, project=project_name, package=package_name, - tag=complete_request.tag, ) @@ -4110,12 +3932,11 @@ def _resolve_artifact_ref( package: Package, db: Session, ) -> Optional[Artifact]: - """Resolve a reference (tag name, version, artifact:hash, tag:name, version:X.Y.Z) to an artifact. + """Resolve a reference (version, artifact:hash, version:X.Y.Z) to an artifact. Resolution order for implicit refs (no prefix): 1. Version (immutable) - 2. Tag (mutable) - 3. Artifact ID (direct hash) + 2. Artifact ID (direct hash) """ artifact = None @@ -4132,17 +3953,8 @@ def _resolve_artifact_ref( ) if pkg_version: artifact = db.query(Artifact).filter(Artifact.id == pkg_version.artifact_id).first() - elif ref.startswith("tag:"): - tag_name = ref[4:] - tag = ( - db.query(Tag) - .filter(Tag.package_id == package.id, Tag.name == tag_name) - .first() - ) - if tag: - artifact = db.query(Artifact).filter(Artifact.id == tag.artifact_id).first() else: - # Implicit ref: try version first, then tag, then artifact ID + # Implicit ref: try version first, then artifact ID # Try as version first pkg_version = ( db.query(PackageVersion) @@ -4152,15 +3964,8 @@ def _resolve_artifact_ref( if pkg_version: artifact = db.query(Artifact).filter(Artifact.id == pkg_version.artifact_id).first() else: - # Try as tag name - tag = ( - db.query(Tag).filter(Tag.package_id == package.id, Tag.name == ref).first() - ) - if tag: - artifact = db.query(Artifact).filter(Artifact.id == tag.artifact_id).first() - else: - # Try as direct artifact ID - artifact = db.query(Artifact).filter(Artifact.id == ref).first() + # Try as direct artifact ID + artifact = db.query(Artifact).filter(Artifact.id == ref).first() return artifact @@ -4192,7 +3997,7 @@ def download_artifact( ), ): """ - Download an artifact by reference (tag name, artifact:hash, tag:name). + Download an artifact by reference (version, artifact:hash). Supports conditional requests: - If-None-Match: Returns 304 Not Modified if ETag matches @@ -4734,17 +4539,8 @@ def list_versions( offset = (page - 1) * limit results = query.offset(offset).limit(limit).all() - # Get tags for each version's artifact version_responses = [] for pkg_version, artifact in results: - # Get tags pointing to this artifact in this package - tags = ( - db.query(Tag.name) - .filter(Tag.package_id == package.id, Tag.artifact_id == artifact.id) - .all() - ) - tag_names = [t[0] for t in tags] - version_responses.append( PackageVersionResponse( id=pkg_version.id, @@ -4757,7 +4553,6 @@ def list_versions( size=artifact.size, content_type=artifact.content_type, original_name=artifact.original_name, - tags=tag_names, ) ) @@ -4809,14 +4604,6 @@ def get_version( artifact = db.query(Artifact).filter(Artifact.id == pkg_version.artifact_id).first() - # Get tags pointing to this artifact - tags = ( - db.query(Tag.name) - .filter(Tag.package_id == package.id, Tag.artifact_id == artifact.id) - .all() - ) - tag_names = [t[0] for t in tags] - return PackageVersionDetailResponse( id=pkg_version.id, package_id=pkg_version.package_id, @@ -4828,7 +4615,6 @@ def get_version( size=artifact.size, content_type=artifact.content_type, original_name=artifact.original_name, - tags=tag_names, format_metadata=artifact.artifact_metadata, checksum_md5=artifact.checksum_md5, checksum_sha1=artifact.checksum_sha1, @@ -4887,421 +4673,6 @@ def delete_version( return Response(status_code=204) -# Tag routes -@router.get( - "/api/v1/project/{project_name}/{package_name}/tags", - response_model=PaginatedResponse[TagDetailResponse], -) -def list_tags( - project_name: str, - package_name: str, - page: int = Query(default=1, ge=1, description="Page number"), - limit: int = Query(default=20, ge=1, le=100, description="Items per page"), - search: Optional[str] = Query(default=None, description="Search by tag name"), - sort: str = Query(default="name", description="Sort field (name, created_at)"), - order: str = Query(default="asc", description="Sort order (asc, desc)"), - from_date: Optional[datetime] = Query( - default=None, alias="from", description="Filter tags created after this date" - ), - to_date: Optional[datetime] = Query( - default=None, alias="to", description="Filter tags created before this date" - ), - db: Session = Depends(get_db), -): - project = db.query(Project).filter(Project.name == project_name).first() - if not project: - raise HTTPException(status_code=404, detail="Project not found") - - package = ( - db.query(Package) - .filter(Package.project_id == project.id, Package.name == package_name) - .first() - ) - if not package: - raise HTTPException(status_code=404, detail="Package not found") - - # Validate sort field - valid_sort_fields = {"name": Tag.name, "created_at": Tag.created_at} - if sort not in valid_sort_fields: - raise HTTPException( - status_code=400, - detail=f"Invalid sort field. Must be one of: {', '.join(valid_sort_fields.keys())}", - ) - - # Validate order - if order not in ("asc", "desc"): - raise HTTPException( - status_code=400, detail="Invalid order. Must be 'asc' or 'desc'" - ) - - # Base query with JOIN to artifact for metadata and LEFT JOIN to version - query = ( - db.query(Tag, Artifact, PackageVersion.version) - .join(Artifact, Tag.artifact_id == Artifact.id) - .outerjoin( - PackageVersion, - and_( - PackageVersion.package_id == Tag.package_id, - PackageVersion.artifact_id == Tag.artifact_id, - ), - ) - .filter(Tag.package_id == package.id) - ) - - # Apply search filter (case-insensitive on tag name OR artifact original filename) - if search: - search_lower = search.lower() - query = query.filter( - or_( - func.lower(Tag.name).contains(search_lower), - func.lower(Artifact.original_name).contains(search_lower), - ) - ) - - # Apply date range filters - if from_date: - query = query.filter(Tag.created_at >= from_date) - if to_date: - query = query.filter(Tag.created_at <= to_date) - - # Get total count before pagination - total = query.count() - - # Apply sorting - sort_column = valid_sort_fields[sort] - if order == "desc": - query = query.order_by(sort_column.desc()) - else: - query = query.order_by(sort_column.asc()) - - # Apply pagination - offset = (page - 1) * limit - results = query.offset(offset).limit(limit).all() - - # Calculate total pages - total_pages = math.ceil(total / limit) if total > 0 else 1 - - # Build detailed responses with artifact metadata and version - detailed_tags = [] - for tag, artifact, version in results: - detailed_tags.append( - TagDetailResponse( - id=tag.id, - package_id=tag.package_id, - name=tag.name, - artifact_id=tag.artifact_id, - created_at=tag.created_at, - created_by=tag.created_by, - artifact_size=artifact.size, - artifact_content_type=artifact.content_type, - artifact_original_name=artifact.original_name, - artifact_created_at=artifact.created_at, - artifact_format_metadata=artifact.format_metadata, - version=version, - ) - ) - - return PaginatedResponse( - items=detailed_tags, - pagination=PaginationMeta( - page=page, - limit=limit, - total=total, - total_pages=total_pages, - has_more=page < total_pages, - ), - ) - - -@router.post( - "/api/v1/project/{project_name}/{package_name}/tags", response_model=TagResponse -) -def create_tag( - project_name: str, - package_name: str, - tag: TagCreate, - request: Request, - db: Session = Depends(get_db), - current_user: Optional[User] = Depends(get_current_user_optional), -): - """Create or update a tag. Requires write access.""" - project = check_project_access(db, project_name, current_user, "write") - user_id = current_user.username if current_user else get_user_id(request) - - package = ( - db.query(Package) - .filter(Package.project_id == project.id, Package.name == package_name) - .first() - ) - if not package: - raise HTTPException(status_code=404, detail="Package not found") - - # Verify artifact exists - artifact = db.query(Artifact).filter(Artifact.id == tag.artifact_id).first() - if not artifact: - raise HTTPException(status_code=404, detail="Artifact not found") - - # Create or update tag - existing = ( - db.query(Tag).filter(Tag.package_id == package.id, Tag.name == tag.name).first() - ) - if existing: - old_artifact_id = existing.artifact_id - existing.artifact_id = tag.artifact_id - existing.created_by = user_id - - # Audit log for tag update - _log_audit( - db=db, - action="tag.update", - resource=f"project/{project_name}/{package_name}/tag/{tag.name}", - user_id=user_id, - source_ip=request.client.host if request.client else None, - details={ - "old_artifact_id": old_artifact_id, - "new_artifact_id": tag.artifact_id, - }, - ) - - db.commit() - db.refresh(existing) - return existing - - db_tag = Tag( - package_id=package.id, - name=tag.name, - artifact_id=tag.artifact_id, - created_by=user_id, - ) - db.add(db_tag) - - # Audit log for tag create - _log_audit( - db=db, - action="tag.create", - resource=f"project/{project_name}/{package_name}/tag/{tag.name}", - user_id=user_id, - source_ip=request.client.host if request.client else None, - details={"artifact_id": tag.artifact_id}, - ) - - db.commit() - db.refresh(db_tag) - return db_tag - - -@router.get( - "/api/v1/project/{project_name}/{package_name}/tags/{tag_name}", - response_model=TagDetailResponse, -) -def get_tag( - project_name: str, - package_name: str, - tag_name: str, - db: Session = Depends(get_db), -): - """Get a single tag with full artifact metadata""" - project = db.query(Project).filter(Project.name == project_name).first() - if not project: - raise HTTPException(status_code=404, detail="Project not found") - - package = ( - db.query(Package) - .filter(Package.project_id == project.id, Package.name == package_name) - .first() - ) - if not package: - raise HTTPException(status_code=404, detail="Package not found") - - result = ( - db.query(Tag, Artifact, PackageVersion.version) - .join(Artifact, Tag.artifact_id == Artifact.id) - .outerjoin( - PackageVersion, - and_( - PackageVersion.package_id == Tag.package_id, - PackageVersion.artifact_id == Tag.artifact_id, - ), - ) - .filter(Tag.package_id == package.id, Tag.name == tag_name) - .first() - ) - - if not result: - raise HTTPException(status_code=404, detail="Tag not found") - - tag, artifact, version = result - return TagDetailResponse( - id=tag.id, - package_id=tag.package_id, - name=tag.name, - artifact_id=tag.artifact_id, - created_at=tag.created_at, - created_by=tag.created_by, - artifact_size=artifact.size, - artifact_content_type=artifact.content_type, - artifact_original_name=artifact.original_name, - artifact_created_at=artifact.created_at, - artifact_format_metadata=artifact.format_metadata, - version=version, - ) - - -@router.get( - "/api/v1/project/{project_name}/{package_name}/tags/{tag_name}/history", - response_model=PaginatedResponse[TagHistoryDetailResponse], -) -def get_tag_history( - project_name: str, - package_name: str, - tag_name: str, - page: int = Query(default=1, ge=1), - limit: int = Query(default=20, ge=1, le=100), - db: Session = Depends(get_db), -): - """Get the history of artifact assignments for a tag with artifact metadata""" - project = db.query(Project).filter(Project.name == project_name).first() - if not project: - raise HTTPException(status_code=404, detail="Project not found") - - package = ( - db.query(Package) - .filter(Package.project_id == project.id, Package.name == package_name) - .first() - ) - if not package: - raise HTTPException(status_code=404, detail="Package not found") - - tag = ( - db.query(Tag).filter(Tag.package_id == package.id, Tag.name == tag_name).first() - ) - if not tag: - raise HTTPException(status_code=404, detail="Tag not found") - - # Get total count - total = ( - db.query(func.count(TagHistory.id)).filter(TagHistory.tag_id == tag.id).scalar() - or 0 - ) - - # Get paginated history with artifact metadata - offset = (page - 1) * limit - history_items = ( - db.query(TagHistory, Artifact) - .outerjoin(Artifact, TagHistory.new_artifact_id == Artifact.id) - .filter(TagHistory.tag_id == tag.id) - .order_by(TagHistory.changed_at.desc()) - .offset(offset) - .limit(limit) - .all() - ) - - # Build response with artifact metadata - items = [] - for history, artifact in history_items: - items.append( - TagHistoryDetailResponse( - id=history.id, - tag_id=history.tag_id, - tag_name=tag.name, - old_artifact_id=history.old_artifact_id, - new_artifact_id=history.new_artifact_id, - changed_at=history.changed_at, - changed_by=history.changed_by, - artifact_size=artifact.size if artifact else 0, - artifact_original_name=artifact.original_name if artifact else None, - artifact_content_type=artifact.content_type if artifact else None, - ) - ) - - total_pages = math.ceil(total / limit) if limit > 0 else 0 - return PaginatedResponse( - items=items, - pagination=PaginationMeta( - page=page, - limit=limit, - total=total, - total_pages=total_pages, - has_more=page < total_pages, - ), - ) - - -@router.delete( - "/api/v1/project/{project_name}/{package_name}/tags/{tag_name}", - status_code=204, -) -def delete_tag( - project_name: str, - package_name: str, - tag_name: str, - request: Request, - db: Session = Depends(get_db), -): - """ - Delete a tag and decrement the artifact's ref_count. - - Records the deletion in tag history before removing the tag. - """ - user_id = get_user_id(request) - - project = db.query(Project).filter(Project.name == project_name).first() - if not project: - raise HTTPException(status_code=404, detail="Project not found") - - package = ( - db.query(Package) - .filter(Package.project_id == project.id, Package.name == package_name) - .first() - ) - if not package: - raise HTTPException(status_code=404, detail="Package not found") - - tag = ( - db.query(Tag).filter(Tag.package_id == package.id, Tag.name == tag_name).first() - ) - if not tag: - raise HTTPException(status_code=404, detail="Tag not found") - - artifact_id = tag.artifact_id - - # Record deletion in history - history = TagHistory( - tag_id=tag.id, - old_artifact_id=artifact_id, - new_artifact_id=artifact_id, # Same artifact for delete record - change_type="delete", - changed_by=user_id, - ) - db.add(history) - db.flush() # Flush history before deleting tag (cascade will delete history) - - # NOTE: ref_count decrement is handled by SQL trigger (tags_ref_count_delete_trigger) - # when the tag is deleted below - logger.info(f"Tag '{tag_name}' deleted for artifact {artifact_id[:12]}...") - - # Delete the tag (SQL trigger will decrement ref_count) - db.delete(tag) - db.commit() - - # Audit log (after commit so we can query the updated ref_count) - artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first() - _log_audit( - db, - action="tag.delete", - resource=f"project/{project_name}/{package_name}/tag/{tag_name}", - user_id=user_id, - source_ip=request.client.host if request.client else None, - details={ - "artifact_id": artifact_id, - "ref_count_after": artifact.ref_count if artifact else 0, - }, - ) - db.commit() # Commit the audit log - - return None - - # Consumer routes @router.get( "/api/v1/project/{project_name}/{package_name}/consumers", @@ -5428,16 +4799,16 @@ def list_package_artifacts( # Calculate total pages total_pages = math.ceil(total / limit) if total > 0 else 1 - # Build responses with tag info + # Build responses with version info artifact_responses = [] for artifact in artifacts: - # Get tags pointing to this artifact in this package - tags = ( - db.query(Tag.name) - .filter(Tag.package_id == package.id, Tag.artifact_id == artifact.id) - .all() + # Get version for this artifact in this package + version_obj = ( + db.query(PackageVersion.version) + .filter(PackageVersion.package_id == package.id, PackageVersion.artifact_id == artifact.id) + .first() ) - tag_names = [t.name for t in tags] + version = version_obj[0] if version_obj else None artifact_responses.append( PackageArtifactResponse( @@ -5448,7 +4819,7 @@ def list_package_artifacts( created_at=artifact.created_at, created_by=artifact.created_by, format_metadata=artifact.format_metadata, - tags=tag_names, + version=version, ) ) @@ -5472,9 +4843,9 @@ def list_package_artifacts( def list_all_artifacts( project: Optional[str] = Query(None, description="Filter by project name"), package: Optional[str] = Query(None, description="Filter by package name"), - tag: Optional[str] = Query( + version: Optional[str] = Query( None, - description="Filter by tag name. Supports wildcards (*) and comma-separated values", + description="Filter by version. Supports wildcards (*) and comma-separated values", ), content_type: Optional[str] = Query(None, description="Filter by content type"), min_size: Optional[int] = Query(None, ge=0, description="Minimum size in bytes"), @@ -5490,47 +4861,47 @@ def list_all_artifacts( db: Session = Depends(get_db), ): """ - List all artifacts globally with filtering by project, package, tag, etc. + List all artifacts globally with filtering by project, package, version, etc. - Returns artifacts with context about which projects/packages/tags reference them. + Returns artifacts with context about which projects/packages reference them. """ # Start with base query query = db.query(Artifact) - # If filtering by project/package/tag, need to join through tags - if project or package or tag: + # If filtering by project/package/version, need to join through versions + if project or package or version: # Subquery to get artifact IDs that match the filters - tag_query = ( - db.query(Tag.artifact_id) - .join(Package, Tag.package_id == Package.id) + version_query = ( + db.query(PackageVersion.artifact_id) + .join(Package, PackageVersion.package_id == Package.id) .join(Project, Package.project_id == Project.id) ) if project: - tag_query = tag_query.filter(Project.name == project) + version_query = version_query.filter(Project.name == project) if package: - tag_query = tag_query.filter(Package.name == package) - if tag: + version_query = version_query.filter(Package.name == package) + if version: # Support multiple values (comma-separated) and wildcards (*) - tag_values = [t.strip() for t in tag.split(",") if t.strip()] - if len(tag_values) == 1: - tag_val = tag_values[0] - if "*" in tag_val: + version_values = [v.strip() for v in version.split(",") if v.strip()] + if len(version_values) == 1: + version_val = version_values[0] + if "*" in version_val: # Wildcard: convert * to SQL LIKE % - tag_query = tag_query.filter( - Tag.name.ilike(tag_val.replace("*", "%")) + version_query = version_query.filter( + PackageVersion.version.ilike(version_val.replace("*", "%")) ) else: - tag_query = tag_query.filter(Tag.name == tag_val) + version_query = version_query.filter(PackageVersion.version == version_val) else: # Multiple values: check if any match (with wildcard support) - tag_conditions = [] - for tag_val in tag_values: - if "*" in tag_val: - tag_conditions.append(Tag.name.ilike(tag_val.replace("*", "%"))) + version_conditions = [] + for version_val in version_values: + if "*" in version_val: + version_conditions.append(PackageVersion.version.ilike(version_val.replace("*", "%"))) else: - tag_conditions.append(Tag.name == tag_val) - tag_query = tag_query.filter(or_(*tag_conditions)) - artifact_ids = tag_query.distinct().subquery() + version_conditions.append(PackageVersion.version == version_val) + version_query = version_query.filter(or_(*version_conditions)) + artifact_ids = version_query.distinct().subquery() query = query.filter(Artifact.id.in_(artifact_ids)) # Apply content type filter @@ -5573,18 +4944,18 @@ def list_all_artifacts( # Build responses with context items = [] for artifact in artifacts: - # Get all tags referencing this artifact with project/package info - tags_info = ( - db.query(Tag, Package, Project) - .join(Package, Tag.package_id == Package.id) + # Get all versions referencing this artifact with project/package info + versions_info = ( + db.query(PackageVersion, Package, Project) + .join(Package, PackageVersion.package_id == Package.id) .join(Project, Package.project_id == Project.id) - .filter(Tag.artifact_id == artifact.id) + .filter(PackageVersion.artifact_id == artifact.id) .all() ) - projects = list(set(proj.name for _, _, proj in tags_info)) - packages = list(set(f"{proj.name}/{pkg.name}" for _, pkg, proj in tags_info)) - tags = [f"{proj.name}/{pkg.name}:{t.name}" for t, pkg, proj in tags_info] + projects = list(set(proj.name for _, _, proj in versions_info)) + packages = list(set(f"{proj.name}/{pkg.name}" for _, pkg, proj in versions_info)) + versions = [f"{proj.name}/{pkg.name}:{v.version}" for v, pkg, proj in versions_info] items.append( GlobalArtifactResponse( @@ -5599,7 +4970,7 @@ def list_all_artifacts( ref_count=artifact.ref_count, projects=projects, packages=packages, - tags=tags, + versions=versions, ) ) @@ -5615,151 +4986,14 @@ def list_all_artifacts( ) -# Global tags listing -@router.get( - "/api/v1/tags", - response_model=PaginatedResponse[GlobalTagResponse], -) -def list_all_tags( - project: Optional[str] = Query(None, description="Filter by project name"), - package: Optional[str] = Query(None, description="Filter by package name"), - search: Optional[str] = Query( - None, - description="Search by tag name. Supports wildcards (*) and comma-separated values", - ), - from_date: Optional[datetime] = Query( - None, alias="from", description="Created after" - ), - to_date: Optional[datetime] = Query(None, alias="to", description="Created before"), - sort: Optional[str] = Query(None, description="Sort field: name, created_at"), - order: Optional[str] = Query("desc", description="Sort order: asc or desc"), - page: int = Query(1, ge=1), - limit: int = Query(20, ge=1, le=100), - db: Session = Depends(get_db), -): - """ - List all tags globally with filtering by project, package, name, etc. - """ - query = ( - db.query(Tag, Package, Project, Artifact, PackageVersion.version) - .join(Package, Tag.package_id == Package.id) - .join(Project, Package.project_id == Project.id) - .join(Artifact, Tag.artifact_id == Artifact.id) - .outerjoin( - PackageVersion, - and_( - PackageVersion.package_id == Tag.package_id, - PackageVersion.artifact_id == Tag.artifact_id, - ), - ) - ) - - # Apply filters - if project: - query = query.filter(Project.name == project) - if package: - query = query.filter(Package.name == package) - if search: - # Support multiple values (comma-separated) and wildcards (*) - search_values = [s.strip() for s in search.split(",") if s.strip()] - if len(search_values) == 1: - search_val = search_values[0] - if "*" in search_val: - query = query.filter(Tag.name.ilike(search_val.replace("*", "%"))) - else: - query = query.filter(Tag.name.ilike(f"%{search_val}%")) - else: - search_conditions = [] - for search_val in search_values: - if "*" in search_val: - search_conditions.append( - Tag.name.ilike(search_val.replace("*", "%")) - ) - else: - search_conditions.append(Tag.name.ilike(f"%{search_val}%")) - query = query.filter(or_(*search_conditions)) - if from_date: - query = query.filter(Tag.created_at >= from_date) - if to_date: - query = query.filter(Tag.created_at <= to_date) - - # Validate and apply sorting - valid_sort_fields = {"name": Tag.name, "created_at": Tag.created_at} - if sort and sort not in valid_sort_fields: - raise HTTPException( - status_code=400, - detail=f"Invalid sort field. Valid options: {', '.join(valid_sort_fields.keys())}", - ) - sort_column = valid_sort_fields.get(sort, Tag.created_at) - if order and order.lower() not in ("asc", "desc"): - raise HTTPException( - status_code=400, detail="Invalid order. Valid options: asc, desc" - ) - sort_order = ( - sort_column.asc() if order and order.lower() == "asc" else sort_column.desc() - ) - - total = query.count() - total_pages = math.ceil(total / limit) if total > 0 else 1 - - results = query.order_by(sort_order).offset((page - 1) * limit).limit(limit).all() - - items = [ - GlobalTagResponse( - id=tag.id, - name=tag.name, - artifact_id=tag.artifact_id, - created_at=tag.created_at, - created_by=tag.created_by, - project_name=proj.name, - package_name=pkg.name, - artifact_size=artifact.size, - artifact_content_type=artifact.content_type, - version=version, - ) - for tag, pkg, proj, artifact, version in results - ] - - return PaginatedResponse( - items=items, - pagination=PaginationMeta( - page=page, - limit=limit, - total=total, - total_pages=total_pages, - has_more=page < total_pages, - ), - ) - - # Artifact by ID @router.get("/api/v1/artifact/{artifact_id}", response_model=ArtifactDetailResponse) def get_artifact(artifact_id: str, db: Session = Depends(get_db)): - """Get artifact metadata including list of packages/tags referencing it""" + """Get artifact metadata including list of packages/versions referencing it""" artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first() if not artifact: raise HTTPException(status_code=404, detail="Artifact not found") - # Get all tags referencing this artifact with package and project info - tags_with_context = ( - db.query(Tag, Package, Project) - .join(Package, Tag.package_id == Package.id) - .join(Project, Package.project_id == Project.id) - .filter(Tag.artifact_id == artifact_id) - .all() - ) - - tag_infos = [ - ArtifactTagInfo( - id=tag.id, - name=tag.name, - package_id=package.id, - package_name=package.name, - project_name=project.name, - ) - for tag, package, project in tags_with_context - ] - return ArtifactDetailResponse( id=artifact.id, sha256=artifact.id, # SHA256 hash is the artifact ID @@ -5773,7 +5007,6 @@ def get_artifact(artifact_id: str, db: Session = Depends(get_db)): created_by=artifact.created_by, ref_count=artifact.ref_count, format_metadata=artifact.format_metadata, - tags=tag_infos, ) @@ -5794,9 +5027,9 @@ def list_orphaned_artifacts( db: Session = Depends(get_db), ): """ - List artifacts with ref_count=0 (orphaned artifacts not referenced by any tag). + List artifacts with ref_count=0 (orphaned artifacts not referenced by any version). - These artifacts can be safely cleaned up as they are not referenced by any tag. + These artifacts can be safely cleaned up as they are not referenced by any version. """ orphaned = ( db.query(Artifact) @@ -6200,9 +5433,9 @@ def get_project_stats( db.query(Package.id).filter(Package.project_id == project.id).subquery() ) - # Tag count - tag_count = ( - db.query(func.count(Tag.id)).filter(Tag.package_id.in_(package_ids)).scalar() + # Version count + version_count = ( + db.query(func.count(PackageVersion.id)).filter(PackageVersion.package_id.in_(package_ids)).scalar() or 0 ) @@ -6243,7 +5476,7 @@ def get_project_stats( project_id=str(project.id), project_name=project.name, package_count=package_count, - tag_count=tag_count, + version_count=version_count, artifact_count=artifact_count, total_size_bytes=total_size_bytes, upload_count=upload_count, @@ -6280,19 +5513,19 @@ def get_package_stats( if not package: raise HTTPException(status_code=404, detail="Package not found") - # Tag count - tag_count = ( - db.query(func.count(Tag.id)).filter(Tag.package_id == package.id).scalar() or 0 + # Version count + version_count = ( + db.query(func.count(PackageVersion.id)).filter(PackageVersion.package_id == package.id).scalar() or 0 ) - # Artifact stats via tags (tags exist for both user uploads and PyPI proxy) + # Artifact stats via versions artifact_stats = ( db.query( - func.count(func.distinct(Tag.artifact_id)), + func.count(func.distinct(PackageVersion.artifact_id)), func.coalesce(func.sum(Artifact.size), 0), ) - .join(Artifact, Tag.artifact_id == Artifact.id) - .filter(Tag.package_id == package.id) + .join(Artifact, PackageVersion.artifact_id == Artifact.id) + .filter(PackageVersion.package_id == package.id) .first() ) artifact_count = artifact_stats[0] if artifact_stats else 0 @@ -6321,7 +5554,7 @@ def get_package_stats( package_id=str(package.id), package_name=package.name, project_name=project.name, - tag_count=tag_count, + version_count=version_count, artifact_count=artifact_count, total_size_bytes=total_size_bytes, upload_count=upload_count, @@ -6348,28 +5581,28 @@ def get_artifact_stats( if not artifact: raise HTTPException(status_code=404, detail="Artifact not found") - # Get all tags referencing this artifact - tags = ( - db.query(Tag, Package, Project) - .join(Package, Tag.package_id == Package.id) + # Get all versions referencing this artifact + versions = ( + db.query(PackageVersion, Package, Project) + .join(Package, PackageVersion.package_id == Package.id) .join(Project, Package.project_id == Project.id) - .filter(Tag.artifact_id == artifact_id) + .filter(PackageVersion.artifact_id == artifact_id) .all() ) - tag_list = [ + version_list = [ { - "tag_name": tag.name, + "version": v.version, "package_name": pkg.name, "project_name": proj.name, - "created_at": tag.created_at.isoformat() if tag.created_at else None, + "created_at": v.created_at.isoformat() if v.created_at else None, } - for tag, pkg, proj in tags + for v, pkg, proj in versions ] # Get unique projects and packages - projects = list(set(proj.name for _, _, proj in tags)) - packages = list(set(f"{proj.name}/{pkg.name}" for _, pkg, proj in tags)) + projects = list(set(proj.name for _, _, proj in versions)) + packages = list(set(f"{proj.name}/{pkg.name}" for _, pkg, proj in versions)) # Get first and last upload times upload_times = ( @@ -6386,7 +5619,7 @@ def get_artifact_stats( storage_savings=(artifact.ref_count - 1) * artifact.size if artifact.ref_count > 1 else 0, - tags=tag_list, + versions=version_list, projects=projects, packages=packages, first_uploaded=upload_times[0] if upload_times else None, @@ -6940,10 +6173,6 @@ def list_all_uploads( None, description="Filter by deduplication status" ), search: Optional[str] = Query(None, description="Search by original filename"), - tag: Optional[str] = Query( - None, - description="Filter by tag name. Supports wildcards (*) and comma-separated values", - ), sort: Optional[str] = Query( None, description="Sort field: uploaded_at, original_name, size" ), @@ -6962,7 +6191,6 @@ def list_all_uploads( - from/to: Filter by timestamp range - deduplicated: Filter by deduplication status - search: Search by original filename (case-insensitive) - - tag: Filter by tag name """ query = ( db.query(Upload, Package, Project, Artifact) @@ -6986,25 +6214,6 @@ def list_all_uploads( query = query.filter(Upload.deduplicated == deduplicated) if search: query = query.filter(Upload.original_name.ilike(f"%{search}%")) - if tag: - # Support multiple values (comma-separated) and wildcards (*) - tag_values = [t.strip() for t in tag.split(",") if t.strip()] - if len(tag_values) == 1: - tag_val = tag_values[0] - if "*" in tag_val: - query = query.filter(Upload.tag_name.ilike(tag_val.replace("*", "%"))) - else: - query = query.filter(Upload.tag_name == tag_val) - else: - tag_conditions = [] - for tag_val in tag_values: - if "*" in tag_val: - tag_conditions.append( - Upload.tag_name.ilike(tag_val.replace("*", "%")) - ) - else: - tag_conditions.append(Upload.tag_name == tag_val) - query = query.filter(or_(*tag_conditions)) # Validate and apply sorting valid_sort_fields = { @@ -7039,7 +6248,7 @@ def list_all_uploads( package_name=pkg.name, project_name=proj.name, original_name=upload.original_name, - tag_name=upload.tag_name, + version=upload.version, uploaded_at=upload.uploaded_at, uploaded_by=upload.uploaded_by, source_ip=upload.source_ip, @@ -7134,7 +6343,7 @@ def list_project_uploads( package_name=pkg.name, project_name=project_name, original_name=upload.original_name, - tag_name=upload.tag_name, + version=upload.version, uploaded_at=upload.uploaded_at, uploaded_by=upload.uploaded_by, source_ip=upload.source_ip, @@ -7212,7 +6421,7 @@ def list_package_uploads( package_name=package_name, project_name=project_name, original_name=upload.original_name, - tag_name=upload.tag_name, + version=upload.version, uploaded_at=upload.uploaded_at, uploaded_by=upload.uploaded_by, source_ip=upload.source_ip, @@ -7278,7 +6487,7 @@ def list_artifact_uploads( package_name=package.name if package else "unknown", project_name=project.name if project else "unknown", original_name=upload.original_name, - tag_name=upload.tag_name, + version=upload.version, uploaded_at=upload.uploaded_at, uploaded_by=upload.uploaded_by, source_ip=upload.source_ip, @@ -7336,15 +6545,15 @@ def get_artifact_provenance( # Get first upload info first_upload = uploads[0] if uploads else None - # Get all tags referencing this artifact - tags = db.query(Tag).filter(Tag.artifact_id == artifact_id).all() + # Get all versions referencing this artifact + versions = db.query(PackageVersion).filter(PackageVersion.artifact_id == artifact_id).all() - # Build package list with tags - package_map = {} # package_id -> {project_name, package_name, tag_names} - tag_list = [] + # Build package list with versions + package_map = {} # package_id -> {project_name, package_name, versions} + version_list = [] - for tag in tags: - package = db.query(Package).filter(Package.id == tag.package_id).first() + for version in versions: + package = db.query(Package).filter(Package.id == version.package_id).first() if package: project = db.query(Project).filter(Project.id == package.project_id).first() project_name = project.name if project else "unknown" @@ -7355,18 +6564,18 @@ def get_artifact_provenance( package_map[pkg_key] = { "project_name": project_name, "package_name": package.name, - "tag_names": [], + "versions": [], } - package_map[pkg_key]["tag_names"].append(tag.name) + package_map[pkg_key]["versions"].append(version.version) - # Add to tag list - tag_list.append( + # Add to version list + version_list.append( { "project_name": project_name, "package_name": package.name, - "tag_name": tag.name, - "created_at": tag.created_at.isoformat() - if tag.created_at + "version": version.version, + "created_at": version.created_at.isoformat() + if version.created_at else None, } ) @@ -7386,7 +6595,6 @@ def get_artifact_provenance( "project_name": project.name if project else "unknown", "package_name": package.name if package else "unknown", "original_name": upload.original_name, - "tag_name": upload.tag_name, "uploaded_at": upload.uploaded_at.isoformat() if upload.uploaded_at else None, @@ -7412,7 +6620,7 @@ def get_artifact_provenance( else artifact.created_by, upload_count=len(uploads), packages=list(package_map.values()), - tags=tag_list, + versions=version_list, uploads=upload_history, ) @@ -7583,7 +6791,7 @@ def get_dependencies_by_ref( """ Get dependencies for an artifact by project/package/ref. - The ref can be a tag name or version. + The ref can be a version or artifact ID prefix. """ # Check project access (handles private project authorization) project = check_project_access(db, project_name, current_user, "read") @@ -7598,22 +6806,13 @@ def get_dependencies_by_ref( # Try to resolve ref to an artifact artifact_id = None - # Try as tag first - tag = db.query(Tag).filter( - Tag.package_id == package.id, - Tag.name == ref, + # Try as version first + version_record = db.query(PackageVersion).filter( + PackageVersion.package_id == package.id, + PackageVersion.version == ref, ).first() - if tag: - artifact_id = tag.artifact_id - - # Try as version if not found as tag - if not artifact_id: - version_record = db.query(PackageVersion).filter( - PackageVersion.package_id == package.id, - PackageVersion.version == ref, - ).first() - if version_record: - artifact_id = version_record.artifact_id + if version_record: + artifact_id = version_record.artifact_id # Try as artifact ID prefix if not artifact_id and len(ref) >= 8: @@ -7667,22 +6866,13 @@ def get_ensure_file( # Resolve ref to artifact artifact_id = None - # Try as tag first - tag = db.query(Tag).filter( - Tag.package_id == package.id, - Tag.name == ref, + # Try as version first + version = db.query(PackageVersion).filter( + PackageVersion.package_id == package.id, + PackageVersion.version == ref, ).first() - if tag: - artifact_id = tag.artifact_id - - # Try as version - if not artifact_id: - version = db.query(PackageVersion).filter( - PackageVersion.package_id == package.id, - PackageVersion.version == ref, - ).first() - if version: - artifact_id = version.artifact_id + if version: + artifact_id = version.artifact_id # Try as artifact ID prefix if not artifact_id and len(ref) >= 8: @@ -7738,8 +6928,6 @@ def get_ensure_file( lines.append(f" project: {dep.project} # Cross-project dependency") if dep.version: lines.append(f" version: \"{dep.version}\"") - elif dep.tag: - lines.append(f" tag: {dep.tag}") # Suggest a path based on package name lines.append(f" path: {dep.package}/") else: @@ -8009,20 +7197,20 @@ def cache_artifact( - `url` (required): URL to fetch the artifact from - `source_type` (required): Type of source (npm, pypi, maven, docker, helm, nuget, deb, rpm, generic) - `package_name` (optional): Package name in system project (auto-derived from URL if not provided) - - `tag` (optional): Tag name in system project (auto-derived from URL if not provided) + - `version` (optional): Version in system project (auto-derived from URL if not provided) - `user_project` (optional): Also create reference in this user project - `user_package` (optional): Package name in user project (required if user_project specified) - - `user_tag` (optional): Tag name in user project (defaults to system tag) + - `user_version` (optional): Version in user project (defaults to system version) - `expected_hash` (optional): Verify downloaded content matches this SHA256 hash **Behavior:** 1. Checks if URL is already cached (fast lookup by URL hash) - 2. If cached: Returns existing artifact info, optionally creates user tag + 2. If cached: Returns existing artifact info, optionally creates user version 3. If not cached: - Fetches via configured upstream source (with auth if configured) - Stores artifact in S3 (content-addressable) - - Creates system project/package/tag (e.g., _npm/lodash:4.17.21) - - Optionally creates tag in user project + - Creates system project/package/version (e.g., _npm/lodash/+/4.17.21) + - Optionally creates version in user project - Records URL mapping for provenance **Example (curl):** @@ -8043,7 +7231,7 @@ def cache_artifact( # Parse URL to extract package info parsed_url = parse_url(cache_request.url, cache_request.source_type) package_name = cache_request.package_name or parsed_url.package_name - tag_name = cache_request.tag or parsed_url.version + version_str = cache_request.version or parsed_url.version # Check if URL is already cached url_hash = CachedUrl.compute_url_hash(cache_request.url) @@ -8068,7 +7256,7 @@ def cache_artifact( db=db, user_project_name=cache_request.user_project, user_package_name=cache_request.user_package, - user_tag_name=cache_request.user_tag or tag_name, + user_version=cache_request.user_version or version_str, artifact_id=artifact.id, current_user=current_user, ) @@ -8099,7 +7287,7 @@ def cache_artifact( source_name=cached_url.source.name if cached_url.source else None, system_project=system_project_name, system_package=package_name, - system_tag=tag_name, + system_version=version_str, user_reference=user_reference, ) @@ -8187,10 +7375,10 @@ def cache_artifact( db, system_project, package_name, cache_request.source_type ) - # Create tag in system package - if tag_name: - _create_or_update_tag( - db, system_package.id, tag_name, artifact.id, "system" + # Create version in system package + if version_str: + _create_or_update_version( + db, system_package.id, artifact.id, version_str, "cache", "system" ) # Find the matched source for provenance @@ -8217,7 +7405,7 @@ def cache_artifact( db=db, user_project_name=cache_request.user_project, user_package_name=cache_request.user_package, - user_tag_name=cache_request.user_tag or tag_name, + user_version=cache_request.user_version or version_str, artifact_id=artifact.id, current_user=current_user, ) @@ -8236,7 +7424,7 @@ def cache_artifact( "source_name": matched_source.name if matched_source else None, "system_project": system_project.name, "system_package": system_package.name, - "system_tag": tag_name, + "system_version": version_str, }, ) @@ -8252,7 +7440,7 @@ def cache_artifact( source_name=matched_source.name if matched_source else None, system_project=system_project.name, system_package=system_package.name, - system_tag=tag_name, + system_version=version_str, user_reference=user_reference, ) @@ -8274,7 +7462,7 @@ def _create_user_cache_reference( db: Session, user_project_name: str, user_package_name: str, - user_tag_name: str, + user_version: str, artifact_id: str, current_user: User, ) -> str: @@ -8285,12 +7473,12 @@ def _create_user_cache_reference( db: Database session. user_project_name: User's project name. user_package_name: Package name in user's project. - user_tag_name: Tag name in user's project. + user_version: Version in user's project. artifact_id: The artifact ID to reference. current_user: The current user (for auth check). Returns: - Reference string like "my-app/npm-deps:lodash-4.17.21" + Reference string like "my-app/npm-deps/+/4.17.21" """ # Check user has write access to the project user_project = check_project_access(db, user_project_name, current_user, "write") @@ -8300,12 +7488,12 @@ def _create_user_cache_reference( db, user_project, user_package_name, "generic" ) - # Create tag - if user_tag_name: - _create_or_update_tag( - db, user_package.id, user_tag_name, artifact_id, current_user.username + # Create version + if user_version: + _create_or_update_version( + db, user_package.id, artifact_id, user_version, "cache", current_user.username ) - return f"{user_project_name}/{user_package_name}:{user_tag_name}" + return f"{user_project_name}/{user_package_name}/+/{user_version}" return f"{user_project_name}/{user_package_name}" @@ -8340,7 +7528,7 @@ def cache_resolve( - `version` (required): Package version - `user_project` (optional): Also create reference in this user project - `user_package` (optional): Package name in user project - - `user_tag` (optional): Tag name in user project + - `user_version` (optional): Version in user project **Example (curl):** ```bash @@ -8488,10 +7676,10 @@ def cache_resolve( url=download_url, source_type="pypi", package_name=normalized_package, - tag=matched_filename or resolve_request.version, + version=matched_filename or resolve_request.version, user_project=resolve_request.user_project, user_package=resolve_request.user_package, - user_tag=resolve_request.user_tag, + user_version=resolve_request.user_version, ) # Call the cache logic diff --git a/backend/app/schemas.py b/backend/app/schemas.py index 481891a..46931b4 100644 --- a/backend/app/schemas.py +++ b/backend/app/schemas.py @@ -114,14 +114,6 @@ class PackageUpdate(BaseModel): platform: Optional[str] = None -class TagSummary(BaseModel): - """Lightweight tag info for embedding in package responses""" - - name: str - artifact_id: str - created_at: datetime - - class PackageDetailResponse(BaseModel): """Package with aggregated metadata""" @@ -134,13 +126,9 @@ class PackageDetailResponse(BaseModel): created_at: datetime updated_at: datetime # Aggregated fields - tag_count: int = 0 artifact_count: int = 0 total_size: int = 0 - latest_tag: Optional[str] = None latest_upload_at: Optional[datetime] = None - # Recent tags (limit 5) - recent_tags: List[TagSummary] = [] class Config: from_attributes = True @@ -165,79 +153,6 @@ class ArtifactResponse(BaseModel): from_attributes = True -# Tag schemas -class TagCreate(BaseModel): - name: str - artifact_id: str - - -class TagResponse(BaseModel): - id: UUID - package_id: UUID - name: str - artifact_id: str - created_at: datetime - created_by: str - version: Optional[str] = None # Version of the artifact this tag points to - - class Config: - from_attributes = True - - -class TagDetailResponse(BaseModel): - """Tag with embedded artifact metadata""" - - id: UUID - package_id: UUID - name: str - artifact_id: str - created_at: datetime - created_by: str - version: Optional[str] = None # Version of the artifact this tag points to - # Artifact metadata - artifact_size: int - artifact_content_type: Optional[str] - artifact_original_name: Optional[str] - artifact_created_at: datetime - artifact_format_metadata: Optional[Dict[str, Any]] = None - - class Config: - from_attributes = True - - -class TagHistoryResponse(BaseModel): - """History entry for tag changes""" - - id: UUID - tag_id: UUID - old_artifact_id: Optional[str] - new_artifact_id: str - changed_at: datetime - changed_by: str - - class Config: - from_attributes = True - - -class TagHistoryDetailResponse(BaseModel): - """Tag history with artifact metadata for each version""" - - id: UUID - tag_id: UUID - tag_name: str - old_artifact_id: Optional[str] - new_artifact_id: str - changed_at: datetime - changed_by: str - # Artifact metadata for new artifact - artifact_size: int - artifact_original_name: Optional[str] - artifact_content_type: Optional[str] - - class Config: - from_attributes = True - - # Audit log schemas class AuditLogResponse(BaseModel): """Audit log entry response""" @@ -264,7 +179,7 @@ class UploadHistoryResponse(BaseModel): package_name: str project_name: str original_name: Optional[str] - tag_name: Optional[str] + version: Optional[str] uploaded_at: datetime uploaded_by: str source_ip: Optional[str] @@ -306,18 +221,8 @@ class ArtifactProvenanceResponse(BaseModel): from_attributes = True -class ArtifactTagInfo(BaseModel): - """Tag info for embedding in artifact responses""" - - id: UUID - name: str - package_id: UUID - package_name: str - project_name: str - - class ArtifactDetailResponse(BaseModel): - """Artifact with list of tags/packages referencing it""" + """Artifact with metadata""" id: str sha256: str # Explicit SHA256 field (same as id) @@ -331,14 +236,13 @@ class ArtifactDetailResponse(BaseModel): created_by: str ref_count: int format_metadata: Optional[Dict[str, Any]] = None - tags: List[ArtifactTagInfo] = [] class Config: from_attributes = True class PackageArtifactResponse(BaseModel): - """Artifact with tags for package artifact listing""" + """Artifact for package artifact listing""" id: str sha256: str # Explicit SHA256 field (same as id) @@ -351,7 +255,6 @@ class PackageArtifactResponse(BaseModel): created_at: datetime created_by: str format_metadata: Optional[Dict[str, Any]] = None - tags: List[str] = [] # Tag names pointing to this artifact class Config: from_attributes = True @@ -369,28 +272,9 @@ class GlobalArtifactResponse(BaseModel): created_by: str format_metadata: Optional[Dict[str, Any]] = None ref_count: int = 0 - # Context from tags/packages + # Context from versions/packages projects: List[str] = [] # List of project names containing this artifact packages: List[str] = [] # List of "project/package" paths - tags: List[str] = [] # List of "project/package:tag" references - - class Config: - from_attributes = True - - -class GlobalTagResponse(BaseModel): - """Tag with project/package context for global listing""" - - id: UUID - name: str - artifact_id: str - created_at: datetime - created_by: str - project_name: str - package_name: str - artifact_size: Optional[int] = None - artifact_content_type: Optional[str] = None - version: Optional[str] = None # Version of the artifact this tag points to class Config: from_attributes = True @@ -403,7 +287,6 @@ class UploadResponse(BaseModel): size: int project: str package: str - tag: Optional[str] version: Optional[str] = None # Version assigned to this artifact version_source: Optional[str] = None # How version was determined: 'explicit', 'filename', 'metadata' checksum_md5: Optional[str] = None @@ -430,7 +313,6 @@ class ResumableUploadInitRequest(BaseModel): filename: str content_type: Optional[str] = None size: int - tag: Optional[str] = None version: Optional[str] = None # Explicit version (auto-detected if not provided) @field_validator("expected_hash") @@ -465,7 +347,7 @@ class ResumableUploadPartResponse(BaseModel): class ResumableUploadCompleteRequest(BaseModel): """Request to complete a resumable upload""" - tag: Optional[str] = None + pass class ResumableUploadCompleteResponse(BaseModel): @@ -475,7 +357,6 @@ class ResumableUploadCompleteResponse(BaseModel): size: int project: str package: str - tag: Optional[str] class ResumableUploadStatusResponse(BaseModel): @@ -528,7 +409,6 @@ class PackageVersionResponse(BaseModel): size: Optional[int] = None content_type: Optional[str] = None original_name: Optional[str] = None - tags: List[str] = [] # Tag names pointing to this artifact class Config: from_attributes = True @@ -570,11 +450,10 @@ class SearchResultPackage(BaseModel): class SearchResultArtifact(BaseModel): - """Artifact/tag result for global search""" + """Artifact result for global search""" - tag_id: UUID - tag_name: str artifact_id: str + version: Optional[str] package_id: UUID package_name: str project_name: str @@ -687,7 +566,6 @@ class ProjectStatsResponse(BaseModel): project_id: str project_name: str package_count: int - tag_count: int artifact_count: int total_size_bytes: int upload_count: int @@ -702,7 +580,6 @@ class PackageStatsResponse(BaseModel): package_id: str package_name: str project_name: str - tag_count: int artifact_count: int total_size_bytes: int upload_count: int @@ -719,7 +596,6 @@ class ArtifactStatsResponse(BaseModel): size: int ref_count: int storage_savings: int # (ref_count - 1) * size - tags: List[Dict[str, Any]] # Tags referencing this artifact projects: List[str] # Projects using this artifact packages: List[str] # Packages using this artifact first_uploaded: Optional[datetime] = None @@ -930,20 +806,7 @@ class DependencyCreate(BaseModel): """Schema for creating a dependency""" project: str package: str - version: Optional[str] = None - tag: Optional[str] = None - - @field_validator('version', 'tag') - @classmethod - def validate_constraint(cls, v, info): - return v - - def model_post_init(self, __context): - """Validate that exactly one of version or tag is set""" - if self.version is None and self.tag is None: - raise ValueError("Either 'version' or 'tag' must be specified") - if self.version is not None and self.tag is not None: - raise ValueError("Cannot specify both 'version' and 'tag'") + version: str class DependencyResponse(BaseModel): @@ -952,8 +815,7 @@ class DependencyResponse(BaseModel): artifact_id: str project: str package: str - version: Optional[str] = None - tag: Optional[str] = None + version: str created_at: datetime class Config: @@ -968,7 +830,6 @@ class DependencyResponse(BaseModel): project=dep.dependency_project, package=dep.dependency_package, version=dep.version_constraint, - tag=dep.tag_constraint, created_at=dep.created_at, ) @@ -985,7 +846,6 @@ class DependentInfo(BaseModel): project: str package: str version: Optional[str] = None - constraint_type: str # 'version' or 'tag' constraint_value: str @@ -1001,20 +861,7 @@ class EnsureFileDependency(BaseModel): """Dependency entry from orchard.ensure file""" project: str package: str - version: Optional[str] = None - tag: Optional[str] = None - - @field_validator('version', 'tag') - @classmethod - def validate_constraint(cls, v, info): - return v - - def model_post_init(self, __context): - """Validate that exactly one of version or tag is set""" - if self.version is None and self.tag is None: - raise ValueError("Either 'version' or 'tag' must be specified") - if self.version is not None and self.tag is not None: - raise ValueError("Cannot specify both 'version' and 'tag'") + version: str class EnsureFileContent(BaseModel): @@ -1028,7 +875,6 @@ class ResolvedArtifact(BaseModel): project: str package: str version: Optional[str] = None - tag: Optional[str] = None size: int download_url: str @@ -1054,7 +900,7 @@ class DependencyConflict(BaseModel): """Details about a dependency conflict""" project: str package: str - requirements: List[Dict[str, Any]] # version/tag and required_by info + requirements: List[Dict[str, Any]] # version and required_by info class DependencyConflictError(BaseModel): @@ -1388,10 +1234,10 @@ class CacheRequest(BaseModel): url: str source_type: str package_name: Optional[str] = None # Auto-derived from URL if not provided - tag: Optional[str] = None # Auto-derived from URL if not provided + version: Optional[str] = None # Auto-derived from URL if not provided user_project: Optional[str] = None # Cross-reference to user project user_package: Optional[str] = None - user_tag: Optional[str] = None + user_version: Optional[str] = None expected_hash: Optional[str] = None # Verify downloaded content @field_validator('url') @@ -1438,8 +1284,8 @@ class CacheResponse(BaseModel): source_name: Optional[str] system_project: str system_package: str - system_tag: Optional[str] - user_reference: Optional[str] = None # e.g., "my-app/npm-deps:lodash-4.17.21" + system_version: Optional[str] + user_reference: Optional[str] = None # e.g., "my-app/npm-deps/+/4.17.21" class CacheResolveRequest(BaseModel): @@ -1453,7 +1299,7 @@ class CacheResolveRequest(BaseModel): version: str user_project: Optional[str] = None user_package: Optional[str] = None - user_tag: Optional[str] = None + user_version: Optional[str] = None @field_validator('source_type') @classmethod diff --git a/backend/app/seed.py b/backend/app/seed.py index 9a18e66..7861a54 100644 --- a/backend/app/seed.py +++ b/backend/app/seed.py @@ -5,7 +5,7 @@ import hashlib import logging from sqlalchemy.orm import Session -from .models import Project, Package, Artifact, Tag, Upload, PackageVersion, ArtifactDependency, Team, TeamMembership, User +from .models import Project, Package, Artifact, Upload, PackageVersion, ArtifactDependency, Team, TeamMembership, User from .storage import get_storage from .auth import hash_password @@ -125,14 +125,14 @@ TEST_ARTIFACTS = [ ] # Dependencies to create (source artifact -> dependency) -# Format: (source_project, source_package, source_version, dep_project, dep_package, version_constraint, tag_constraint) +# Format: (source_project, source_package, source_version, dep_project, dep_package, version_constraint) TEST_DEPENDENCIES = [ # ui-components v1.1.0 depends on design-tokens v1.0.0 - ("frontend-libs", "ui-components", "1.1.0", "frontend-libs", "design-tokens", "1.0.0", None), + ("frontend-libs", "ui-components", "1.1.0", "frontend-libs", "design-tokens", "1.0.0"), # auth-lib v1.0.0 depends on common-utils v2.0.0 - ("backend-services", "auth-lib", "1.0.0", "backend-services", "common-utils", "2.0.0", None), - # auth-lib v1.0.0 also depends on design-tokens (stable tag) - ("backend-services", "auth-lib", "1.0.0", "frontend-libs", "design-tokens", None, "latest"), + ("backend-services", "auth-lib", "1.0.0", "backend-services", "common-utils", "2.0.0"), + # auth-lib v1.0.0 also depends on design-tokens v1.0.0 + ("backend-services", "auth-lib", "1.0.0", "frontend-libs", "design-tokens", "1.0.0"), ] @@ -252,9 +252,8 @@ def seed_database(db: Session) -> None: logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages (assigned to {demo_team.slug})") - # Create artifacts, tags, and versions + # Create artifacts and versions artifact_count = 0 - tag_count = 0 version_count = 0 for artifact_data in TEST_ARTIFACTS: @@ -316,23 +315,12 @@ def seed_database(db: Session) -> None: db.add(version) version_count += 1 - # Create tags - for tag_name in artifact_data["tags"]: - tag = Tag( - package_id=package.id, - name=tag_name, - artifact_id=sha256_hash, - created_by=team_owner_username, - ) - db.add(tag) - tag_count += 1 - db.flush() # Create dependencies dependency_count = 0 for dep_data in TEST_DEPENDENCIES: - src_project, src_package, src_version, dep_project, dep_package, version_constraint, tag_constraint = dep_data + src_project, src_package, src_version, dep_project, dep_package, version_constraint = dep_data # Find the source artifact by looking up its version src_pkg = package_map.get((src_project, src_package)) @@ -356,11 +344,10 @@ def seed_database(db: Session) -> None: dependency_project=dep_project, dependency_package=dep_package, version_constraint=version_constraint, - tag_constraint=tag_constraint, ) db.add(dependency) dependency_count += 1 db.commit() - logger.info(f"Created {artifact_count} artifacts, {tag_count} tags, {version_count} versions, and {dependency_count} dependencies") + logger.info(f"Created {artifact_count} artifacts, {version_count} versions, and {dependency_count} dependencies") logger.info("Database seeding complete") diff --git a/backend/app/services/artifact_cleanup.py b/backend/app/services/artifact_cleanup.py index 0857155..0000261 100644 --- a/backend/app/services/artifact_cleanup.py +++ b/backend/app/services/artifact_cleanup.py @@ -6,9 +6,8 @@ from typing import List, Optional, Tuple from sqlalchemy.orm import Session import logging -from ..models import Artifact, Tag +from ..models import Artifact, PackageVersion from ..repositories.artifact import ArtifactRepository -from ..repositories.tag import TagRepository from ..storage import S3Storage logger = logging.getLogger(__name__) @@ -21,8 +20,8 @@ class ArtifactCleanupService: Reference counting rules: - ref_count starts at 1 when artifact is first uploaded - ref_count increments when the same artifact is uploaded again (deduplication) - - ref_count decrements when a tag is deleted or updated to point elsewhere - - ref_count decrements when a package is deleted (for each tag pointing to artifact) + - ref_count decrements when a version is deleted or updated to point elsewhere + - ref_count decrements when a package is deleted (for each version pointing to artifact) - When ref_count reaches 0, artifact is a candidate for deletion from S3 """ @@ -30,12 +29,11 @@ class ArtifactCleanupService: self.db = db self.storage = storage self.artifact_repo = ArtifactRepository(db) - self.tag_repo = TagRepository(db) - def on_tag_deleted(self, artifact_id: str) -> Artifact: + def on_version_deleted(self, artifact_id: str) -> Artifact: """ - Called when a tag is deleted. - Decrements ref_count for the artifact the tag was pointing to. + Called when a version is deleted. + Decrements ref_count for the artifact the version was pointing to. """ artifact = self.artifact_repo.get_by_sha256(artifact_id) if artifact: @@ -45,11 +43,11 @@ class ArtifactCleanupService: ) return artifact - def on_tag_updated( + def on_version_updated( self, old_artifact_id: str, new_artifact_id: str ) -> Tuple[Optional[Artifact], Optional[Artifact]]: """ - Called when a tag is updated to point to a different artifact. + Called when a version is updated to point to a different artifact. Decrements ref_count for old artifact, increments for new (if different). Returns (old_artifact, new_artifact) tuple. @@ -79,21 +77,21 @@ class ArtifactCleanupService: def on_package_deleted(self, package_id) -> List[str]: """ Called when a package is deleted. - Decrements ref_count for all artifacts that had tags in the package. + Decrements ref_count for all artifacts that had versions in the package. Returns list of artifact IDs that were affected. """ - # Get all tags in the package before deletion - tags = self.db.query(Tag).filter(Tag.package_id == package_id).all() + # Get all versions in the package before deletion + versions = self.db.query(PackageVersion).filter(PackageVersion.package_id == package_id).all() affected_artifacts = [] - for tag in tags: - artifact = self.artifact_repo.get_by_sha256(tag.artifact_id) + for version in versions: + artifact = self.artifact_repo.get_by_sha256(version.artifact_id) if artifact: self.artifact_repo.decrement_ref_count(artifact) - affected_artifacts.append(tag.artifact_id) + affected_artifacts.append(version.artifact_id) logger.info( - f"Decremented ref_count for artifact {tag.artifact_id} (package delete)" + f"Decremented ref_count for artifact {version.artifact_id} (package delete)" ) return affected_artifacts @@ -152,7 +150,7 @@ class ArtifactCleanupService: def verify_ref_counts(self, fix: bool = False) -> List[dict]: """ - Verify that ref_counts match actual tag references. + Verify that ref_counts match actual version references. Args: fix: If True, fix any mismatched ref_counts @@ -162,28 +160,28 @@ class ArtifactCleanupService: """ from sqlalchemy import func - # Get actual tag counts per artifact - tag_counts = ( - self.db.query(Tag.artifact_id, func.count(Tag.id).label("tag_count")) - .group_by(Tag.artifact_id) + # Get actual version counts per artifact + version_counts = ( + self.db.query(PackageVersion.artifact_id, func.count(PackageVersion.id).label("version_count")) + .group_by(PackageVersion.artifact_id) .all() ) - tag_count_map = {artifact_id: count for artifact_id, count in tag_counts} + version_count_map = {artifact_id: count for artifact_id, count in version_counts} # Check all artifacts artifacts = self.db.query(Artifact).all() mismatches = [] for artifact in artifacts: - actual_count = tag_count_map.get(artifact.id, 0) + actual_count = version_count_map.get(artifact.id, 0) # ref_count should be at least 1 (initial upload) + additional uploads - # But tags are the primary reference, so we check against tag count + # But versions are the primary reference, so we check against version count if artifact.ref_count < actual_count: mismatch = { "artifact_id": artifact.id, "stored_ref_count": artifact.ref_count, - "actual_tag_count": actual_count, + "actual_version_count": actual_count, } mismatches.append(mismatch) diff --git a/backend/tests/integration/test_tags_api.py b/backend/tests/integration/test_tags_api.py deleted file mode 100644 index 2b8db6e..0000000 --- a/backend/tests/integration/test_tags_api.py +++ /dev/null @@ -1,403 +0,0 @@ -""" -Integration tests for tag API endpoints. - -Tests cover: -- Tag CRUD operations -- Tag listing with pagination and search -- Tag history tracking -- ref_count behavior with tag operations -""" - -import pytest -from tests.factories import compute_sha256, upload_test_file - - -class TestTagCRUD: - """Tests for tag create, read, delete operations.""" - - @pytest.mark.integration - def test_create_tag_via_upload(self, integration_client, test_package): - """Test creating a tag via upload endpoint.""" - project_name, package_name = test_package - - result = upload_test_file( - integration_client, - project_name, - package_name, - b"tag create test", - tag="v1.0.0", - ) - - assert result["tag"] == "v1.0.0" - assert result["artifact_id"] - - @pytest.mark.integration - def test_create_tag_via_post( - self, integration_client, test_package, unique_test_id - ): - """Test creating a tag via POST /tags endpoint.""" - project_name, package_name = test_package - - # First upload an artifact - result = upload_test_file( - integration_client, - project_name, - package_name, - b"artifact for tag", - ) - artifact_id = result["artifact_id"] - - # Create tag via POST - tag_name = f"post-tag-{unique_test_id}" - response = integration_client.post( - f"/api/v1/project/{project_name}/{package_name}/tags", - json={"name": tag_name, "artifact_id": artifact_id}, - ) - assert response.status_code == 200 - - data = response.json() - assert data["name"] == tag_name - assert data["artifact_id"] == artifact_id - - @pytest.mark.integration - def test_get_tag(self, integration_client, test_package): - """Test getting a tag by name.""" - project_name, package_name = test_package - - upload_test_file( - integration_client, - project_name, - package_name, - b"get tag test", - tag="get-tag", - ) - - response = integration_client.get( - f"/api/v1/project/{project_name}/{package_name}/tags/get-tag" - ) - assert response.status_code == 200 - - data = response.json() - assert data["name"] == "get-tag" - assert "artifact_id" in data - assert "artifact_size" in data - assert "artifact_content_type" in data - - @pytest.mark.integration - def test_list_tags(self, integration_client, test_package): - """Test listing tags for a package.""" - project_name, package_name = test_package - - # Create some tags - upload_test_file( - integration_client, - project_name, - package_name, - b"list tags test", - tag="list-v1", - ) - - response = integration_client.get( - f"/api/v1/project/{project_name}/{package_name}/tags" - ) - assert response.status_code == 200 - - data = response.json() - assert "items" in data - assert "pagination" in data - - tag_names = [t["name"] for t in data["items"]] - assert "list-v1" in tag_names - - @pytest.mark.integration - def test_delete_tag(self, integration_client, test_package): - """Test deleting a tag.""" - project_name, package_name = test_package - - upload_test_file( - integration_client, - project_name, - package_name, - b"delete tag test", - tag="to-delete", - ) - - # Delete tag - response = integration_client.delete( - f"/api/v1/project/{project_name}/{package_name}/tags/to-delete" - ) - assert response.status_code == 204 - - # Verify deleted - response = integration_client.get( - f"/api/v1/project/{project_name}/{package_name}/tags/to-delete" - ) - assert response.status_code == 404 - - -class TestTagListingFilters: - """Tests for tag listing with filters and search.""" - - @pytest.mark.integration - def test_tags_pagination(self, integration_client, test_package): - """Test tag listing respects pagination.""" - project_name, package_name = test_package - - response = integration_client.get( - f"/api/v1/project/{project_name}/{package_name}/tags?limit=5" - ) - assert response.status_code == 200 - - data = response.json() - assert len(data["items"]) <= 5 - assert data["pagination"]["limit"] == 5 - - @pytest.mark.integration - def test_tags_search(self, integration_client, test_package, unique_test_id): - """Test tag search by name.""" - project_name, package_name = test_package - - tag_name = f"searchable-{unique_test_id}" - upload_test_file( - integration_client, - project_name, - package_name, - b"search test", - tag=tag_name, - ) - - response = integration_client.get( - f"/api/v1/project/{project_name}/{package_name}/tags?search=searchable" - ) - assert response.status_code == 200 - - data = response.json() - tag_names = [t["name"] for t in data["items"]] - assert tag_name in tag_names - - -class TestTagHistory: - """Tests for tag history tracking.""" - - @pytest.mark.integration - def test_tag_history_on_create(self, integration_client, test_package): - """Test tag history is created when tag is created.""" - project_name, package_name = test_package - - upload_test_file( - integration_client, - project_name, - package_name, - b"history create test", - tag="history-create", - ) - - response = integration_client.get( - f"/api/v1/project/{project_name}/{package_name}/tags/history-create/history" - ) - assert response.status_code == 200 - - data = response.json() - assert len(data) >= 1 - - @pytest.mark.integration - def test_tag_history_on_update( - self, integration_client, test_package, unique_test_id - ): - """Test tag history is created when tag is updated.""" - project_name, package_name = test_package - - tag_name = f"history-update-{unique_test_id}" - - # Create tag with first artifact - upload_test_file( - integration_client, - project_name, - package_name, - b"first content", - tag=tag_name, - ) - - # Update tag with second artifact - upload_test_file( - integration_client, - project_name, - package_name, - b"second content", - tag=tag_name, - ) - - response = integration_client.get( - f"/api/v1/project/{project_name}/{package_name}/tags/{tag_name}/history" - ) - assert response.status_code == 200 - - data = response.json() - # Should have at least 2 history entries (create + update) - assert len(data) >= 2 - - -class TestTagRefCount: - """Tests for ref_count behavior with tag operations.""" - - @pytest.mark.integration - def test_ref_count_decrements_on_tag_delete(self, integration_client, test_package): - """Test ref_count decrements when a tag is deleted.""" - project_name, package_name = test_package - content = b"ref count delete test" - expected_hash = compute_sha256(content) - - # Upload with two tags - upload_test_file( - integration_client, project_name, package_name, content, tag="rc-v1" - ) - upload_test_file( - integration_client, project_name, package_name, content, tag="rc-v2" - ) - - # Verify ref_count is 2 - response = integration_client.get(f"/api/v1/artifact/{expected_hash}") - assert response.json()["ref_count"] == 2 - - # Delete one tag - delete_response = integration_client.delete( - f"/api/v1/project/{project_name}/{package_name}/tags/rc-v1" - ) - assert delete_response.status_code == 204 - - # Verify ref_count is now 1 - response = integration_client.get(f"/api/v1/artifact/{expected_hash}") - assert response.json()["ref_count"] == 1 - - @pytest.mark.integration - def test_ref_count_zero_after_all_tags_deleted( - self, integration_client, test_package - ): - """Test ref_count goes to 0 when all tags are deleted.""" - project_name, package_name = test_package - content = b"orphan test content" - expected_hash = compute_sha256(content) - - # Upload with one tag - upload_test_file( - integration_client, project_name, package_name, content, tag="only-tag" - ) - - # Delete the tag - integration_client.delete( - f"/api/v1/project/{project_name}/{package_name}/tags/only-tag" - ) - - # Verify ref_count is 0 - response = integration_client.get(f"/api/v1/artifact/{expected_hash}") - assert response.json()["ref_count"] == 0 - - @pytest.mark.integration - def test_ref_count_adjusts_on_tag_update( - self, integration_client, test_package, unique_test_id - ): - """Test ref_count adjusts when a tag is updated to point to different artifact.""" - project_name, package_name = test_package - - # Upload two different artifacts - content1 = f"artifact one {unique_test_id}".encode() - content2 = f"artifact two {unique_test_id}".encode() - hash1 = compute_sha256(content1) - hash2 = compute_sha256(content2) - - # Upload first artifact with tag "latest" - upload_test_file( - integration_client, project_name, package_name, content1, tag="latest" - ) - - # Verify first artifact has ref_count 1 - response = integration_client.get(f"/api/v1/artifact/{hash1}") - assert response.json()["ref_count"] == 1 - - # Upload second artifact with different tag - upload_test_file( - integration_client, project_name, package_name, content2, tag="stable" - ) - - # Now update "latest" tag to point to second artifact - upload_test_file( - integration_client, project_name, package_name, content2, tag="latest" - ) - - # Verify first artifact ref_count decreased to 0 - response = integration_client.get(f"/api/v1/artifact/{hash1}") - assert response.json()["ref_count"] == 0 - - # Verify second artifact ref_count increased to 2 - response = integration_client.get(f"/api/v1/artifact/{hash2}") - assert response.json()["ref_count"] == 2 - - @pytest.mark.integration - def test_ref_count_unchanged_when_tag_same_artifact( - self, integration_client, test_package, unique_test_id - ): - """Test ref_count doesn't change when tag is 'updated' to same artifact.""" - project_name, package_name = test_package - - content = f"same artifact {unique_test_id}".encode() - expected_hash = compute_sha256(content) - - # Upload with tag - upload_test_file( - integration_client, project_name, package_name, content, tag="same-v1" - ) - - # Verify ref_count is 1 - response = integration_client.get(f"/api/v1/artifact/{expected_hash}") - assert response.json()["ref_count"] == 1 - - # Upload same content with same tag (no-op) - upload_test_file( - integration_client, project_name, package_name, content, tag="same-v1" - ) - - # Verify ref_count is still 1 - response = integration_client.get(f"/api/v1/artifact/{expected_hash}") - assert response.json()["ref_count"] == 1 - - @pytest.mark.integration - def test_tag_via_post_endpoint_increments_ref_count( - self, integration_client, test_package, unique_test_id - ): - """Test creating tag via POST /tags endpoint increments ref_count.""" - project_name, package_name = test_package - - content = f"tag endpoint test {unique_test_id}".encode() - expected_hash = compute_sha256(content) - - # Upload artifact without tag - result = upload_test_file( - integration_client, project_name, package_name, content, filename="test.bin" - ) - artifact_id = result["artifact_id"] - - # Verify ref_count is 0 (no tags yet) - response = integration_client.get(f"/api/v1/artifact/{expected_hash}") - assert response.json()["ref_count"] == 0 - - # Create tag via POST endpoint - tag_response = integration_client.post( - f"/api/v1/project/{project_name}/{package_name}/tags", - json={"name": "post-v1", "artifact_id": artifact_id}, - ) - assert tag_response.status_code == 200 - - # Verify ref_count is now 1 - response = integration_client.get(f"/api/v1/artifact/{expected_hash}") - assert response.json()["ref_count"] == 1 - - # Create another tag via POST endpoint - tag_response = integration_client.post( - f"/api/v1/project/{project_name}/{package_name}/tags", - json={"name": "post-latest", "artifact_id": artifact_id}, - ) - assert tag_response.status_code == 200 - - # Verify ref_count is now 2 - response = integration_client.get(f"/api/v1/artifact/{expected_hash}") - assert response.json()["ref_count"] == 2 diff --git a/frontend/src/api.ts b/frontend/src/api.ts index e0730fa..de5d739 100644 --- a/frontend/src/api.ts +++ b/frontend/src/api.ts @@ -1,14 +1,11 @@ import { Project, Package, - Tag, - TagDetail, ArtifactDetail, PackageArtifact, UploadResponse, PaginatedResponse, ListParams, - TagListParams, PackageListParams, ArtifactListParams, ProjectListParams, @@ -240,32 +237,6 @@ export async function createPackage(projectName: string, data: { name: string; d return handleResponse(response); } -// Tag API -export async function listTags(projectName: string, packageName: string, params: TagListParams = {}): Promise> { - const query = buildQueryString(params as Record); - const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags${query}`); - return handleResponse>(response); -} - -export async function listTagsSimple(projectName: string, packageName: string, params: TagListParams = {}): Promise { - const data = await listTags(projectName, packageName, params); - return data.items; -} - -export async function getTag(projectName: string, packageName: string, tagName: string): Promise { - const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags/${tagName}`); - return handleResponse(response); -} - -export async function createTag(projectName: string, packageName: string, data: { name: string; artifact_id: string }): Promise { - const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(data), - }); - return handleResponse(response); -} - // Artifact API export async function getArtifact(artifactId: string): Promise { const response = await fetch(`${API_BASE}/artifact/${artifactId}`); @@ -287,14 +258,10 @@ export async function uploadArtifact( projectName: string, packageName: string, file: File, - tag?: string, version?: string ): Promise { const formData = new FormData(); formData.append('file', file); - if (tag) { - formData.append('tag', tag); - } if (version) { formData.append('version', version); } diff --git a/frontend/src/components/DependencyGraph.tsx b/frontend/src/components/DependencyGraph.tsx index 487e942..a9312b0 100644 --- a/frontend/src/components/DependencyGraph.tsx +++ b/frontend/src/components/DependencyGraph.tsx @@ -170,7 +170,7 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende label: `${artifact.project}/${artifact.package}`, project: artifact.project, package: artifact.package, - version: artifact.version || artifact.tag, + version: artifact.version, size: artifact.size, isRoot, onNavigate, diff --git a/frontend/src/components/DragDropUpload.test.tsx b/frontend/src/components/DragDropUpload.test.tsx index babe4c2..a091bf7 100644 --- a/frontend/src/components/DragDropUpload.test.tsx +++ b/frontend/src/components/DragDropUpload.test.tsx @@ -524,7 +524,7 @@ describe('DragDropUpload', () => { } vi.stubGlobal('XMLHttpRequest', MockXHR); - render(); + render(); const input = document.querySelector('input[type="file"]') as HTMLInputElement; const file = createMockFile('test.txt', 100, 'text/plain'); diff --git a/frontend/src/components/DragDropUpload.tsx b/frontend/src/components/DragDropUpload.tsx index e9f6a90..e3d95cc 100644 --- a/frontend/src/components/DragDropUpload.tsx +++ b/frontend/src/components/DragDropUpload.tsx @@ -13,7 +13,6 @@ interface StoredUploadState { completedParts: number[]; project: string; package: string; - tag?: string; createdAt: number; } @@ -87,7 +86,6 @@ export interface DragDropUploadProps { maxFileSize?: number; // in bytes maxConcurrentUploads?: number; maxRetries?: number; - tag?: string; className?: string; disabled?: boolean; disabledReason?: string; @@ -230,7 +228,6 @@ export function DragDropUpload({ maxFileSize, maxConcurrentUploads = 3, maxRetries = 3, - tag, className = '', disabled = false, disabledReason, @@ -368,7 +365,6 @@ export function DragDropUpload({ expected_hash: fileHash, filename: item.file.name, size: item.file.size, - tag: tag || undefined, }), } ); @@ -392,7 +388,6 @@ export function DragDropUpload({ completedParts: [], project: projectName, package: packageName, - tag: tag || undefined, createdAt: Date.now(), }); @@ -438,7 +433,6 @@ export function DragDropUpload({ completedParts, project: projectName, package: packageName, - tag: tag || undefined, createdAt: Date.now(), }); @@ -459,7 +453,7 @@ export function DragDropUpload({ { method: 'POST', headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ tag: tag || undefined }), + body: JSON.stringify({}), } ); @@ -475,18 +469,15 @@ export function DragDropUpload({ size: completeData.size, deduplicated: false, }; - }, [projectName, packageName, tag, isOnline]); + }, [projectName, packageName, isOnline]); const uploadFileSimple = useCallback((item: UploadItem): Promise => { return new Promise((resolve, reject) => { const xhr = new XMLHttpRequest(); xhrMapRef.current.set(item.id, xhr); - + const formData = new FormData(); formData.append('file', item.file); - if (tag) { - formData.append('tag', tag); - } let lastLoaded = 0; let lastTime = Date.now(); @@ -549,13 +540,13 @@ export function DragDropUpload({ xhr.timeout = 300000; xhr.send(formData); - setUploadQueue(prev => prev.map(u => - u.id === item.id + setUploadQueue(prev => prev.map(u => + u.id === item.id ? { ...u, status: 'uploading' as UploadStatus, startTime: Date.now() } : u )); }); - }, [projectName, packageName, tag]); + }, [projectName, packageName]); const uploadFile = useCallback((item: UploadItem): Promise => { if (item.file.size >= CHUNKED_UPLOAD_THRESHOLD) { diff --git a/frontend/src/components/GlobalSearch.tsx b/frontend/src/components/GlobalSearch.tsx index 3716d5e..57b21eb 100644 --- a/frontend/src/components/GlobalSearch.tsx +++ b/frontend/src/components/GlobalSearch.tsx @@ -233,7 +233,7 @@ export function GlobalSearch() { const flatIndex = results.projects.length + results.packages.length + index; return ( - {a.tags.length > 0 && ( - - )} - {canWrite && !isSystemProject && ( - - )} + @@ -623,13 +581,8 @@ function PackagePage() { )} - {pkg && (pkg.tag_count !== undefined || pkg.artifact_count !== undefined) && ( + {pkg && pkg.artifact_count !== undefined && (
- {!isSystemProject && pkg.tag_count !== undefined && ( - - {pkg.tag_count} tags - - )} {pkg.artifact_count !== undefined && ( {pkg.artifact_count} {isSystemProject ? 'versions' : 'artifacts'} @@ -640,11 +593,6 @@ function PackagePage() { {formatBytes(pkg.total_size)} total )} - {!isSystemProject && pkg.latest_tag && ( - - Latest: {pkg.latest_tag} - - )}
)} @@ -655,7 +603,7 @@ function PackagePage() {
-

{isSystemProject ? 'Versions' : 'Tags / Versions'}

+

{isSystemProject ? 'Versions' : 'Artifacts'}

@@ -754,9 +702,9 @@ function PackagePage() {
           curl -O {window.location.origin}/api/v1/project/{projectName}/{packageName}/+/latest
         
-

Or with a specific tag:

+

Or with a specific version:

-          curl -O {window.location.origin}/api/v1/project/{projectName}/{packageName}/+/v1.0.0
+          curl -O {window.location.origin}/api/v1/project/{projectName}/{packageName}/+/1.0.0
         
@@ -765,7 +713,7 @@ function PackagePage() { 0 ? selectedArtifact.tags[0] : `artifact:${selectedArtifact.id}`} + tagName={getArtifactVersion(selectedArtifact) || `artifact:${selectedArtifact.id}`} onClose={() => setShowGraph(false)} /> )} @@ -788,24 +736,12 @@ function PackagePage() {
-
- - setUploadTag(e.target.value)} - placeholder="v1.0.0, latest, stable..." - /> -
{ handleUploadComplete(result); setShowUploadModal(false); - setUploadTag(''); }} onUploadError={handleUploadError} /> @@ -814,74 +750,6 @@ function PackagePage() {
)} - {/* Create/Update Tag Modal */} - {showCreateTagModal && ( -
setShowCreateTagModal(false)}> -
e.stopPropagation()}> -
-

Create / Update Tag

- -
-
-

Point a tag at an artifact by its ID

-
{ handleCreateTag(e); setShowCreateTagModal(false); }}> -
- - setCreateTagName(e.target.value)} - placeholder="latest, stable, v1.0.0..." - disabled={createTagLoading} - /> -
-
- - setCreateTagArtifactId(e.target.value.toLowerCase().replace(/[^a-f0-9]/g, '').slice(0, 64))} - placeholder="SHA256 hash (64 hex characters)" - className="artifact-id-input" - disabled={createTagLoading} - /> - {createTagArtifactId.length > 0 && createTagArtifactId.length !== 64 && ( -

{createTagArtifactId.length}/64 characters

- )} -
-
- - -
-
-
-
-
- )} - {/* Ensure File Modal */} {showEnsureFile && (
setShowEnsureFile(false)}> @@ -943,15 +811,13 @@ function PackagePage() {
- {selectedArtifact?.tags && selectedArtifact.tags.length > 0 && ( - - )} +