Compare commits
33 Commits
feature/pa
...
11131c2071
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
11131c2071 | ||
|
|
994f166de8 | ||
|
|
8999552949 | ||
|
|
2df97ae94a | ||
|
|
caa0c5af0c | ||
|
|
3fd2747ae4 | ||
|
|
96367da448 | ||
|
|
2686fdcb89 | ||
|
|
0eb2deb4ca | ||
|
|
3fe421f31d | ||
|
|
68660eacf6 | ||
|
|
b52c8840f1 | ||
|
|
4afcdf5cda | ||
|
|
bc3da14d50 | ||
|
|
2843335f6d | ||
|
|
2097865874 | ||
|
|
0e1474bf6c | ||
|
|
9604540dd3 | ||
|
|
a6df5aba5a | ||
|
|
096887d4da | ||
|
|
7d80bef39a | ||
|
|
96198dc127 | ||
|
|
fd06dfb3ce | ||
|
|
11852adc66 | ||
|
|
21555d64a3 | ||
|
|
b83f19aa52 | ||
|
|
5d0122fc36 | ||
|
|
81b423e0ea | ||
|
|
e89947f3d3 | ||
|
|
459867abdb | ||
|
|
2b5bc60a69 | ||
|
|
8b7b523aa8 | ||
|
|
dea03c4a12 |
@@ -1,26 +1,21 @@
|
||||
stages:
|
||||
- test
|
||||
- build
|
||||
- publish
|
||||
# - deploy
|
||||
include:
|
||||
- project: 'esv/bsf/pypi/prosper'
|
||||
ref: v0.64.1
|
||||
file: '/prosper/templates/projects/docker.yml'
|
||||
|
||||
variables:
|
||||
# Container registry settings
|
||||
REGISTRY: ${CI_REGISTRY}
|
||||
IMAGE_NAME: ${CI_REGISTRY_IMAGE}
|
||||
# Buildah settings
|
||||
STORAGE_DRIVER: vfs
|
||||
BUILDAH_FORMAT: docker
|
||||
BUILDAH_ISOLATION: chroot
|
||||
# renovate: datasource=gitlab-tags depName=esv/bsf/pypi/prosper versioning=semver registryUrl=https://gitlab.global.bsf.tools
|
||||
PROSPER_VERSION: v0.64.1
|
||||
|
||||
kics:
|
||||
allow_failure: true
|
||||
|
||||
hadolint:
|
||||
allow_failure: true
|
||||
|
||||
.buildah-base:
|
||||
image: deps.global.bsf.tools/quay.io/buildah/stable:latest
|
||||
before_script:
|
||||
- buildah version
|
||||
- buildah login -u ${CI_REGISTRY_USER} -p ${CI_REGISTRY_PASSWORD} ${CI_REGISTRY}
|
||||
|
||||
# Run Python tests
|
||||
test:
|
||||
python_tests:
|
||||
stage: test
|
||||
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||
before_script:
|
||||
@@ -29,47 +24,6 @@ test:
|
||||
script:
|
||||
- cd backend
|
||||
- python -m pytest -v || echo "No tests yet"
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
|
||||
# Build container image for merge requests (no push)
|
||||
build:
|
||||
stage: build
|
||||
extends: .buildah-base
|
||||
script:
|
||||
- |
|
||||
buildah build \
|
||||
--build-arg NPM_REGISTRY=https://deps.global.bsf.tools/artifactory/api/npm/registry.npmjs.org/ \
|
||||
--tag ${IMAGE_NAME}:${CI_COMMIT_SHORT_SHA} \
|
||||
--label org.opencontainers.image.source=${CI_PROJECT_URL} \
|
||||
--label org.opencontainers.image.revision=${CI_COMMIT_SHA} \
|
||||
--label org.opencontainers.image.created=$(date -u +%Y-%m-%dT%H:%M:%SZ) \
|
||||
.
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
|
||||
# Build and push on main branch
|
||||
publish:
|
||||
stage: publish
|
||||
extends: .buildah-base
|
||||
script:
|
||||
- |
|
||||
buildah build \
|
||||
--build-arg NPM_REGISTRY=https://deps.global.bsf.tools/artifactory/api/npm/registry.npmjs.org/ \
|
||||
--tag ${IMAGE_NAME}:${CI_COMMIT_SHORT_SHA} \
|
||||
--tag ${IMAGE_NAME}:${CI_COMMIT_REF_SLUG} \
|
||||
--tag ${IMAGE_NAME}:latest \
|
||||
--label org.opencontainers.image.source=${CI_PROJECT_URL} \
|
||||
--label org.opencontainers.image.revision=${CI_COMMIT_SHA} \
|
||||
--label org.opencontainers.image.created=$(date -u +%Y-%m-%dT%H:%M:%SZ) \
|
||||
.
|
||||
- buildah push ${IMAGE_NAME}:${CI_COMMIT_SHORT_SHA}
|
||||
- buildah push ${IMAGE_NAME}:${CI_COMMIT_REF_SLUG}
|
||||
- buildah push ${IMAGE_NAME}:latest
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
|
||||
|
||||
# deploy_helm_charts:
|
||||
# stage: deploy
|
||||
|
||||
53
CHANGELOG.md
Normal file
53
CHANGELOG.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased]
|
||||
### Fixed
|
||||
- Fixed Helm chart `minio.ingress` conflicting with Bitnami MinIO subchart by renaming to `minioIngress` (#48)
|
||||
|
||||
## [0.3.0] - 2025-12-15
|
||||
### Changed
|
||||
- Changed default download mode from `proxy` to `presigned` for better performance (#48)
|
||||
### Added
|
||||
- Added presigned URL support for direct S3 downloads (#48)
|
||||
- Added `ORCHARD_DOWNLOAD_MODE` config option (`presigned`, `redirect`, `proxy`) (#48)
|
||||
- Added `ORCHARD_PRESIGNED_URL_EXPIRY` config option (default: 3600 seconds) (#48)
|
||||
- Added `?mode=` query parameter to override download mode per-request (#48)
|
||||
- Added `/api/v1/project/{project}/{package}/+/{ref}/url` endpoint for getting presigned URLs (#48)
|
||||
- Added `PresignedUrlResponse` schema with URL, expiry, checksums, and artifact metadata (#48)
|
||||
- Added MinIO ingress support in Helm chart for presigned URL access (#48)
|
||||
- Added `orchard.download.mode` and `orchard.download.presignedUrlExpiry` Helm values (#48)
|
||||
- Added integrity verification workflow design document (#24)
|
||||
- Added `sha256` field to API responses for clarity (alias of `id`) (#25)
|
||||
- Added `checksum_sha1` field to artifacts table for compatibility (#25)
|
||||
- Added `s3_etag` field to artifacts table for S3 verification (#25)
|
||||
- Compute and store MD5, SHA1, and S3 ETag alongside SHA256 during upload (#25)
|
||||
- Added `Dockerfile.local` and `docker-compose.local.yml` for local development (#25)
|
||||
- Added migration script `003_checksum_fields.sql` for existing databases (#25)
|
||||
|
||||
## [0.2.0] - 2025-12-15
|
||||
### Added
|
||||
- Added `format` and `platform` fields to packages table (#16)
|
||||
- Added `checksum_md5` and `metadata` JSONB fields to artifacts table (#16)
|
||||
- Added `updated_at` field to tags table (#16)
|
||||
- Added `tag_name`, `user_agent`, `duration_ms`, `deduplicated`, `checksum_verified` fields to uploads table (#16)
|
||||
- Added `change_type` field to tag_history table (#16)
|
||||
- Added composite indexes for common query patterns (#16)
|
||||
- Added GIN indexes on JSONB fields for efficient JSON queries (#16)
|
||||
- Added partial index for public projects (#16)
|
||||
- Added database triggers for `updated_at` timestamps (#16)
|
||||
- Added database triggers for maintaining artifact `ref_count` accuracy (#16)
|
||||
- Added CHECK constraints for data integrity (`size > 0`, `ref_count >= 0`) (#16)
|
||||
- Added migration script `002_schema_enhancements.sql` for existing databases (#16)
|
||||
### Changed
|
||||
- Updated images to use internal container BSF proxy (#46)
|
||||
|
||||
## [0.1.0] - 2025-12-12
|
||||
### Added
|
||||
- Added Prosper docker template config (#45)
|
||||
### Changed
|
||||
- Changed the Dockerfile npm build arg to use the deps.global.bsf.tools URL as the default registry (#45)
|
||||
@@ -1,7 +1,7 @@
|
||||
# Frontend build stage
|
||||
FROM node:20-alpine AS frontend-builder
|
||||
FROM containers.global.bsf.tools/node:20-alpine AS frontend-builder
|
||||
|
||||
ARG NPM_REGISTRY
|
||||
ARG NPM_REGISTRY=https://deps.global.bsf.tools/artifactory/api/npm/registry.npmjs.org/
|
||||
|
||||
WORKDIR /app/frontend
|
||||
|
||||
@@ -19,7 +19,10 @@ COPY frontend/ ./
|
||||
RUN npm run build
|
||||
|
||||
# Runtime stage
|
||||
FROM python:3.12-slim
|
||||
FROM containers.global.bsf.tools/python:3.12-slim
|
||||
|
||||
# Disable proxy cache
|
||||
RUN echo 'Acquire::http::Pipeline-Depth 0;\nAcquire::http::No-Cache true;\nAcquire::BrokenProxy true;\n' > /etc/apt/apt.conf.d/99fixbadproxy
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
|
||||
50
Dockerfile.local
Normal file
50
Dockerfile.local
Normal file
@@ -0,0 +1,50 @@
|
||||
# Frontend build stage
|
||||
FROM node:20-alpine AS frontend-builder
|
||||
|
||||
WORKDIR /app/frontend
|
||||
|
||||
# Copy package files
|
||||
COPY frontend/package*.json ./
|
||||
RUN npm install
|
||||
|
||||
# Copy frontend source
|
||||
COPY frontend/ ./
|
||||
|
||||
# Build frontend
|
||||
RUN npm run build
|
||||
|
||||
# Runtime stage
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create non-root user
|
||||
RUN groupadd -g 1000 orchard && \
|
||||
useradd -u 1000 -g orchard -s /bin/bash -m orchard
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy requirements and install Python dependencies
|
||||
COPY backend/requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy backend source
|
||||
COPY backend/ ./backend/
|
||||
|
||||
# Copy frontend build
|
||||
COPY --from=frontend-builder /app/frontend/dist ./frontend/dist
|
||||
|
||||
# Set ownership
|
||||
RUN chown -R orchard:orchard /app
|
||||
|
||||
USER orchard
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8080/health || exit 1
|
||||
|
||||
CMD ["uvicorn", "backend.app.main:app", "--host", "0.0.0.0", "--port", "8080"]
|
||||
280
README.md
280
README.md
@@ -22,10 +22,18 @@ Orchard is a centralized binary artifact storage system that provides content-ad
|
||||
- **Package** - Named collection within a project
|
||||
- **Artifact** - Specific content instance identified by SHA256
|
||||
- **Tags** - Alias system for referencing artifacts by human-readable names (e.g., `v1.0.0`, `latest`, `stable`)
|
||||
- **Package Formats & Platforms** - Packages can be tagged with format (npm, pypi, docker, deb, rpm, etc.) and platform (linux, darwin, windows, etc.)
|
||||
- **Rich Package Metadata** - Package listings include aggregated stats (tag count, artifact count, total size, latest tag)
|
||||
- **S3-Compatible Backend** - Uses MinIO (or any S3-compatible storage) for artifact storage
|
||||
- **PostgreSQL Metadata** - Relational database for metadata, access control, and audit trails
|
||||
- **REST API** - Full HTTP API for all operations
|
||||
- **Web UI** - React-based interface for managing artifacts
|
||||
- **Web UI** - React-based interface for managing artifacts with:
|
||||
- Hierarchical navigation (Projects → Packages → Tags/Artifacts)
|
||||
- Search, sort, and filter capabilities on all list views
|
||||
- URL-based state persistence for filters and pagination
|
||||
- Keyboard navigation (Backspace to go up hierarchy)
|
||||
- Copy-to-clipboard for artifact IDs
|
||||
- Responsive design for mobile and desktop
|
||||
- **Docker Compose Setup** - Easy local development environment
|
||||
- **Helm Chart** - Kubernetes deployment with PostgreSQL, MinIO, and Redis subcharts
|
||||
- **Multipart Upload** - Automatic multipart upload for files larger than 100MB
|
||||
@@ -48,15 +56,20 @@ Orchard is a centralized binary artifact storage system that provides content-ad
|
||||
| `GET` | `/api/v1/projects` | List all projects |
|
||||
| `POST` | `/api/v1/projects` | Create a new project |
|
||||
| `GET` | `/api/v1/projects/:project` | Get project details |
|
||||
| `GET` | `/api/v1/project/:project/packages` | List packages in a project |
|
||||
| `GET` | `/api/v1/project/:project/packages` | List packages (with pagination, search, filtering) |
|
||||
| `GET` | `/api/v1/project/:project/packages/:package` | Get single package with metadata |
|
||||
| `POST` | `/api/v1/project/:project/packages` | Create a new package |
|
||||
| `POST` | `/api/v1/project/:project/:package/upload` | Upload an artifact |
|
||||
| `GET` | `/api/v1/project/:project/:package/+/:ref` | Download an artifact (supports Range header) |
|
||||
| `GET` | `/api/v1/project/:project/:package/+/:ref` | Download an artifact (supports Range header, mode param) |
|
||||
| `GET` | `/api/v1/project/:project/:package/+/:ref/url` | Get presigned URL for direct S3 download |
|
||||
| `HEAD` | `/api/v1/project/:project/:package/+/:ref` | Get artifact metadata without downloading |
|
||||
| `GET` | `/api/v1/project/:project/:package/tags` | List all tags |
|
||||
| `GET` | `/api/v1/project/:project/:package/tags` | List tags (with pagination, search, sorting, artifact metadata) |
|
||||
| `POST` | `/api/v1/project/:project/:package/tags` | Create a tag |
|
||||
| `GET` | `/api/v1/project/:project/:package/tags/:tag_name` | Get single tag with artifact metadata |
|
||||
| `GET` | `/api/v1/project/:project/:package/tags/:tag_name/history` | Get tag change history |
|
||||
| `GET` | `/api/v1/project/:project/:package/artifacts` | List artifacts in package (with filtering) |
|
||||
| `GET` | `/api/v1/project/:project/:package/consumers` | List consumers of a package |
|
||||
| `GET` | `/api/v1/artifact/:id` | Get artifact metadata by hash |
|
||||
| `GET` | `/api/v1/artifact/:id` | Get artifact metadata with referencing tags |
|
||||
|
||||
#### Resumable Upload Endpoints
|
||||
|
||||
@@ -151,7 +164,61 @@ curl -X POST http://localhost:8080/api/v1/projects \
|
||||
```bash
|
||||
curl -X POST http://localhost:8080/api/v1/project/my-project/packages \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"name": "releases", "description": "Release builds"}'
|
||||
-d '{"name": "releases", "description": "Release builds", "format": "generic", "platform": "any"}'
|
||||
```
|
||||
|
||||
Supported formats: `generic`, `npm`, `pypi`, `docker`, `deb`, `rpm`, `maven`, `nuget`, `helm`
|
||||
|
||||
Supported platforms: `any`, `linux`, `darwin`, `windows`, `linux-amd64`, `linux-arm64`, `darwin-amd64`, `darwin-arm64`, `windows-amd64`
|
||||
|
||||
### List Packages
|
||||
|
||||
```bash
|
||||
# Basic listing
|
||||
curl http://localhost:8080/api/v1/project/my-project/packages
|
||||
|
||||
# With pagination
|
||||
curl "http://localhost:8080/api/v1/project/my-project/packages?page=1&limit=10"
|
||||
|
||||
# With search
|
||||
curl "http://localhost:8080/api/v1/project/my-project/packages?search=release"
|
||||
|
||||
# With sorting
|
||||
curl "http://localhost:8080/api/v1/project/my-project/packages?sort=created_at&order=desc"
|
||||
|
||||
# Filter by format/platform
|
||||
curl "http://localhost:8080/api/v1/project/my-project/packages?format=npm&platform=linux"
|
||||
```
|
||||
|
||||
Response includes aggregated metadata:
|
||||
```json
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"id": "uuid",
|
||||
"name": "releases",
|
||||
"description": "Release builds",
|
||||
"format": "generic",
|
||||
"platform": "any",
|
||||
"tag_count": 5,
|
||||
"artifact_count": 3,
|
||||
"total_size": 1048576,
|
||||
"latest_tag": "v1.0.0",
|
||||
"latest_upload_at": "2025-01-01T00:00:00Z",
|
||||
"recent_tags": [...]
|
||||
}
|
||||
],
|
||||
"pagination": {"page": 1, "limit": 20, "total": 1, "total_pages": 1}
|
||||
}
|
||||
```
|
||||
|
||||
### Get Single Package
|
||||
|
||||
```bash
|
||||
curl http://localhost:8080/api/v1/project/my-project/packages/releases
|
||||
|
||||
# Include all tags (not just recent 5)
|
||||
curl "http://localhost:8080/api/v1/project/my-project/packages/releases?include_tags=true"
|
||||
```
|
||||
|
||||
### Upload an Artifact
|
||||
@@ -209,22 +276,64 @@ curl -X POST http://localhost:8080/api/v1/project/my-project/releases/upload/abc
|
||||
### Download an Artifact
|
||||
|
||||
```bash
|
||||
# By tag
|
||||
curl -O http://localhost:8080/api/v1/project/my-project/releases/+/v1.0.0
|
||||
# By tag (use -OJ to save with the correct filename from Content-Disposition header)
|
||||
curl -OJ http://localhost:8080/api/v1/project/my-project/releases/+/v1.0.0
|
||||
|
||||
# By artifact ID
|
||||
curl -O http://localhost:8080/api/v1/project/my-project/releases/+/artifact:a3f5d8e12b4c6789...
|
||||
curl -OJ http://localhost:8080/api/v1/project/my-project/releases/+/artifact:a3f5d8e12b4c6789...
|
||||
|
||||
# Using the short URL pattern
|
||||
curl -O http://localhost:8080/project/my-project/releases/+/latest
|
||||
curl -OJ http://localhost:8080/project/my-project/releases/+/latest
|
||||
|
||||
# Save to a specific filename
|
||||
curl -o myfile.tar.gz http://localhost:8080/api/v1/project/my-project/releases/+/v1.0.0
|
||||
|
||||
# Partial download (range request)
|
||||
curl -H "Range: bytes=0-1023" http://localhost:8080/api/v1/project/my-project/releases/+/v1.0.0
|
||||
|
||||
# Check file info without downloading (HEAD request)
|
||||
curl -I http://localhost:8080/api/v1/project/my-project/releases/+/v1.0.0
|
||||
|
||||
# Download with specific mode (presigned, redirect, or proxy)
|
||||
curl "http://localhost:8080/api/v1/project/my-project/releases/+/v1.0.0?mode=proxy"
|
||||
|
||||
# Get presigned URL for direct S3 download
|
||||
curl http://localhost:8080/api/v1/project/my-project/releases/+/v1.0.0/url
|
||||
```
|
||||
|
||||
> **Note on curl flags:**
|
||||
> - `-O` saves the file using the URL path as the filename (e.g., `latest`, `v1.0.0`)
|
||||
> - `-J` tells curl to use the filename from the `Content-Disposition` header (e.g., `app-v1.0.0.tar.gz`)
|
||||
> - `-OJ` combines both: download to a file using the server-provided filename
|
||||
> - `-o <filename>` saves to a specific filename you choose
|
||||
|
||||
#### Download Modes
|
||||
|
||||
Orchard supports three download modes, configurable via `ORCHARD_DOWNLOAD_MODE` or per-request with `?mode=`:
|
||||
|
||||
| Mode | Description | Use Case |
|
||||
|------|-------------|----------|
|
||||
| `presigned` (default) | Returns JSON with a presigned S3 URL | Clients that handle redirects themselves, web UIs |
|
||||
| `redirect` | Returns HTTP 302 redirect to presigned S3 URL | Simple clients, browsers, wget |
|
||||
| `proxy` | Streams content through the backend | When S3 isn't directly accessible to clients |
|
||||
|
||||
**Presigned URL Response:**
|
||||
```json
|
||||
{
|
||||
"url": "https://minio.example.com/bucket/...",
|
||||
"expires_at": "2025-01-01T01:00:00Z",
|
||||
"method": "GET",
|
||||
"artifact_id": "a3f5d8e...",
|
||||
"size": 1048576,
|
||||
"content_type": "application/gzip",
|
||||
"original_name": "app-v1.0.0.tar.gz",
|
||||
"checksum_sha256": "a3f5d8e...",
|
||||
"checksum_md5": "d41d8cd..."
|
||||
}
|
||||
```
|
||||
|
||||
> **Note:** For presigned URLs to work, clients must be able to reach the S3 endpoint directly. In Kubernetes, this requires exposing MinIO via ingress (see Helm configuration below).
|
||||
|
||||
### Create a Tag
|
||||
|
||||
```bash
|
||||
@@ -233,12 +342,119 @@ curl -X POST http://localhost:8080/api/v1/project/my-project/releases/tags \
|
||||
-d '{"name": "stable", "artifact_id": "a3f5d8e12b4c6789..."}'
|
||||
```
|
||||
|
||||
### List Tags
|
||||
|
||||
```bash
|
||||
# Basic listing with artifact metadata
|
||||
curl http://localhost:8080/api/v1/project/my-project/releases/tags
|
||||
|
||||
# With pagination
|
||||
curl "http://localhost:8080/api/v1/project/my-project/releases/tags?page=1&limit=10"
|
||||
|
||||
# Search by tag name
|
||||
curl "http://localhost:8080/api/v1/project/my-project/releases/tags?search=v1"
|
||||
|
||||
# Sort by created_at descending
|
||||
curl "http://localhost:8080/api/v1/project/my-project/releases/tags?sort=created_at&order=desc"
|
||||
```
|
||||
|
||||
Response includes artifact metadata:
|
||||
```json
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"id": "uuid",
|
||||
"package_id": "uuid",
|
||||
"name": "v1.0.0",
|
||||
"artifact_id": "a3f5d8e...",
|
||||
"created_at": "2025-01-01T00:00:00Z",
|
||||
"created_by": "user",
|
||||
"artifact_size": 1048576,
|
||||
"artifact_content_type": "application/gzip",
|
||||
"artifact_original_name": "app-v1.0.0.tar.gz",
|
||||
"artifact_created_at": "2025-01-01T00:00:00Z",
|
||||
"artifact_format_metadata": {}
|
||||
}
|
||||
],
|
||||
"pagination": {"page": 1, "limit": 20, "total": 1, "total_pages": 1}
|
||||
}
|
||||
```
|
||||
|
||||
### Get Single Tag
|
||||
|
||||
```bash
|
||||
curl http://localhost:8080/api/v1/project/my-project/releases/tags/v1.0.0
|
||||
```
|
||||
|
||||
### Get Tag History
|
||||
|
||||
```bash
|
||||
curl http://localhost:8080/api/v1/project/my-project/releases/tags/latest/history
|
||||
```
|
||||
|
||||
Returns list of artifact changes for the tag (most recent first).
|
||||
|
||||
### List Artifacts in Package
|
||||
|
||||
```bash
|
||||
# Basic listing
|
||||
curl http://localhost:8080/api/v1/project/my-project/releases/artifacts
|
||||
|
||||
# Filter by content type
|
||||
curl "http://localhost:8080/api/v1/project/my-project/releases/artifacts?content_type=application/gzip"
|
||||
|
||||
# Filter by date range
|
||||
curl "http://localhost:8080/api/v1/project/my-project/releases/artifacts?created_after=2025-01-01T00:00:00Z"
|
||||
```
|
||||
|
||||
Response includes tags pointing to each artifact:
|
||||
```json
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"id": "a3f5d8e...",
|
||||
"size": 1048576,
|
||||
"content_type": "application/gzip",
|
||||
"original_name": "app-v1.0.0.tar.gz",
|
||||
"created_at": "2025-01-01T00:00:00Z",
|
||||
"created_by": "user",
|
||||
"format_metadata": {},
|
||||
"tags": ["v1.0.0", "latest", "stable"]
|
||||
}
|
||||
],
|
||||
"pagination": {"page": 1, "limit": 20, "total": 1, "total_pages": 1}
|
||||
}
|
||||
```
|
||||
|
||||
### Get Artifact by ID
|
||||
|
||||
```bash
|
||||
curl http://localhost:8080/api/v1/artifact/a3f5d8e12b4c67890abcdef1234567890abcdef1234567890abcdef12345678
|
||||
```
|
||||
|
||||
Response includes all tags/packages referencing the artifact:
|
||||
```json
|
||||
{
|
||||
"id": "a3f5d8e...",
|
||||
"size": 1048576,
|
||||
"content_type": "application/gzip",
|
||||
"original_name": "app-v1.0.0.tar.gz",
|
||||
"created_at": "2025-01-01T00:00:00Z",
|
||||
"created_by": "user",
|
||||
"ref_count": 2,
|
||||
"format_metadata": {},
|
||||
"tags": [
|
||||
{
|
||||
"id": "uuid",
|
||||
"name": "v1.0.0",
|
||||
"package_id": "uuid",
|
||||
"package_name": "releases",
|
||||
"project_name": "my-project"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
@@ -257,10 +473,21 @@ orchard/
|
||||
│ └── requirements.txt
|
||||
├── frontend/
|
||||
│ ├── src/
|
||||
│ │ ├── components/ # React components
|
||||
│ │ ├── components/ # Reusable UI components
|
||||
│ │ │ ├── Badge.tsx # Status/type badges
|
||||
│ │ │ ├── Breadcrumb.tsx # Navigation breadcrumbs
|
||||
│ │ │ ├── Card.tsx # Card containers
|
||||
│ │ │ ├── DataTable.tsx # Sortable data tables
|
||||
│ │ │ ├── FilterChip.tsx # Active filter chips
|
||||
│ │ │ ├── Pagination.tsx # Page navigation
|
||||
│ │ │ ├── SearchInput.tsx # Debounced search
|
||||
│ │ │ └── SortDropdown.tsx# Sort field selector
|
||||
│ │ ├── pages/ # Page components
|
||||
│ │ ├── api.ts # API client
|
||||
│ │ ├── types.ts # TypeScript types
|
||||
│ │ │ ├── Home.tsx # Project list
|
||||
│ │ │ ├── ProjectPage.tsx # Package list within project
|
||||
│ │ │ └── PackagePage.tsx # Tag/artifact list within package
|
||||
│ │ ├── api.ts # API client with pagination support
|
||||
│ │ ├── types.ts # TypeScript interfaces
|
||||
│ │ ├── App.tsx
|
||||
│ │ └── main.tsx
|
||||
│ ├── index.html
|
||||
@@ -292,6 +519,8 @@ Configuration is provided via environment variables prefixed with `ORCHARD_`:
|
||||
| `ORCHARD_S3_BUCKET` | S3 bucket name | `orchard-artifacts` |
|
||||
| `ORCHARD_S3_ACCESS_KEY_ID` | S3 access key | - |
|
||||
| `ORCHARD_S3_SECRET_ACCESS_KEY` | S3 secret key | - |
|
||||
| `ORCHARD_DOWNLOAD_MODE` | Download mode: `presigned`, `redirect`, or `proxy` | `presigned` |
|
||||
| `ORCHARD_PRESIGNED_URL_EXPIRY` | Presigned URL expiry in seconds | `3600` |
|
||||
|
||||
## Kubernetes Deployment
|
||||
|
||||
@@ -312,6 +541,31 @@ helm install orchard ./helm/orchard -n orchard --create-namespace
|
||||
helm install orchard ./helm/orchard -f my-values.yaml
|
||||
```
|
||||
|
||||
### Helm Configuration
|
||||
|
||||
Key configuration options in `values.yaml`:
|
||||
|
||||
```yaml
|
||||
orchard:
|
||||
# Download configuration
|
||||
download:
|
||||
mode: "presigned" # presigned, redirect, or proxy
|
||||
presignedUrlExpiry: 3600
|
||||
|
||||
# MinIO ingress (required for presigned URL downloads)
|
||||
minioIngress:
|
||||
enabled: true
|
||||
className: "nginx"
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "letsencrypt"
|
||||
host: "minio.your-domain.com"
|
||||
tls:
|
||||
enabled: true
|
||||
secretName: minio-tls
|
||||
```
|
||||
|
||||
When `minioIngress.enabled` is `true`, the S3 endpoint automatically uses the external URL (`https://minio.your-domain.com`), making presigned URLs accessible to external clients.
|
||||
|
||||
See `helm/orchard/values.yaml` for all configuration options.
|
||||
|
||||
## Database Schema
|
||||
|
||||
83
backend/alembic.ini
Normal file
83
backend/alembic.ini
Normal file
@@ -0,0 +1,83 @@
|
||||
# Alembic Configuration File
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during the 'revision' command,
|
||||
# regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without a source .py file
|
||||
# to be detected as revisions in the versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator
|
||||
# version_path_separator = :
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by env.py
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
27
backend/alembic/README
Normal file
27
backend/alembic/README
Normal file
@@ -0,0 +1,27 @@
|
||||
Alembic Migrations for Orchard
|
||||
|
||||
This directory contains database migration scripts managed by Alembic.
|
||||
|
||||
Common Commands:
|
||||
# Generate a new migration (autogenerate from model changes)
|
||||
alembic revision --autogenerate -m "description of changes"
|
||||
|
||||
# Apply all pending migrations
|
||||
alembic upgrade head
|
||||
|
||||
# Rollback one migration
|
||||
alembic downgrade -1
|
||||
|
||||
# Show current migration status
|
||||
alembic current
|
||||
|
||||
# Show migration history
|
||||
alembic history
|
||||
|
||||
# Generate SQL without applying (for review)
|
||||
alembic upgrade head --sql
|
||||
|
||||
Notes:
|
||||
- Always review autogenerated migrations before applying
|
||||
- Test migrations in development before applying to production
|
||||
- Migrations are stored in the versions/ directory
|
||||
95
backend/alembic/env.py
Normal file
95
backend/alembic/env.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""
|
||||
Alembic migration environment configuration.
|
||||
"""
|
||||
|
||||
from logging.config import fileConfig
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add the app directory to path for imports
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from app.config import get_settings
|
||||
from app.models import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Get database URL from settings
|
||||
settings = get_settings()
|
||||
config.set_main_option("sqlalchemy.url", settings.database_url)
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True, # Detect column type changes
|
||||
compare_server_default=True, # Detect default value changes
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
backend/alembic/script.py.mako
Normal file
26
backend/alembic/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -18,6 +18,12 @@ class Settings(BaseSettings):
|
||||
database_dbname: str = "orchard"
|
||||
database_sslmode: str = "disable"
|
||||
|
||||
# Database connection pool settings
|
||||
database_pool_size: int = 5 # Number of connections to keep open
|
||||
database_max_overflow: int = 10 # Max additional connections beyond pool_size
|
||||
database_pool_timeout: int = 30 # Seconds to wait for a connection from pool
|
||||
database_pool_recycle: int = 1800 # Recycle connections after this many seconds (30 min)
|
||||
|
||||
# S3
|
||||
s3_endpoint: str = ""
|
||||
s3_region: str = "us-east-1"
|
||||
@@ -26,6 +32,10 @@ class Settings(BaseSettings):
|
||||
s3_secret_access_key: str = ""
|
||||
s3_use_path_style: bool = True
|
||||
|
||||
# Download settings
|
||||
download_mode: str = "presigned" # "presigned", "redirect", or "proxy"
|
||||
presigned_url_expiry: int = 3600 # Presigned URL expiry in seconds (default: 1 hour)
|
||||
|
||||
@property
|
||||
def database_url(self) -> str:
|
||||
sslmode = f"?sslmode={self.database_sslmode}" if self.database_sslmode else ""
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy import create_engine, text, event
|
||||
from sqlalchemy.orm import sessionmaker, Session
|
||||
from sqlalchemy.pool import QueuePool
|
||||
from typing import Generator
|
||||
from contextlib import contextmanager
|
||||
import logging
|
||||
import time
|
||||
|
||||
from .config import get_settings
|
||||
from .models import Base
|
||||
@@ -9,10 +12,44 @@ from .models import Base
|
||||
settings = get_settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
engine = create_engine(settings.database_url, pool_pre_ping=True)
|
||||
# Create engine with connection pool configuration
|
||||
engine = create_engine(
|
||||
settings.database_url,
|
||||
pool_pre_ping=True, # Check connection health before using
|
||||
poolclass=QueuePool,
|
||||
pool_size=settings.database_pool_size,
|
||||
max_overflow=settings.database_max_overflow,
|
||||
pool_timeout=settings.database_pool_timeout,
|
||||
pool_recycle=settings.database_pool_recycle,
|
||||
)
|
||||
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
# Connection pool monitoring
|
||||
@event.listens_for(engine, "checkout")
|
||||
def receive_checkout(dbapi_connection, connection_record, connection_proxy):
|
||||
"""Log when a connection is checked out from the pool"""
|
||||
logger.debug(f"Connection checked out from pool: {id(dbapi_connection)}")
|
||||
|
||||
|
||||
@event.listens_for(engine, "checkin")
|
||||
def receive_checkin(dbapi_connection, connection_record):
|
||||
"""Log when a connection is returned to the pool"""
|
||||
logger.debug(f"Connection returned to pool: {id(dbapi_connection)}")
|
||||
|
||||
|
||||
def get_pool_status() -> dict:
|
||||
"""Get current connection pool status for monitoring"""
|
||||
pool = engine.pool
|
||||
return {
|
||||
"pool_size": pool.size(),
|
||||
"checked_out": pool.checkedout(),
|
||||
"overflow": pool.overflow(),
|
||||
"checked_in": pool.checkedin(),
|
||||
}
|
||||
|
||||
|
||||
def init_db():
|
||||
"""Create all tables and run migrations"""
|
||||
Base.metadata.create_all(bind=engine)
|
||||
@@ -36,6 +73,77 @@ def _run_migrations():
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
# Add format column to packages table
|
||||
"""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'packages' AND column_name = 'format'
|
||||
) THEN
|
||||
ALTER TABLE packages ADD COLUMN format VARCHAR(50) DEFAULT 'generic' NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_packages_format ON packages(format);
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
# Add platform column to packages table
|
||||
"""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'packages' AND column_name = 'platform'
|
||||
) THEN
|
||||
ALTER TABLE packages ADD COLUMN platform VARCHAR(50) DEFAULT 'any' NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_packages_platform ON packages(platform);
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
# Add ref_count index and constraints for artifacts
|
||||
"""
|
||||
DO $$
|
||||
BEGIN
|
||||
-- Add ref_count index
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_artifacts_ref_count'
|
||||
) THEN
|
||||
CREATE INDEX idx_artifacts_ref_count ON artifacts(ref_count);
|
||||
END IF;
|
||||
|
||||
-- Add ref_count >= 0 constraint
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_constraint WHERE conname = 'check_ref_count_non_negative'
|
||||
) THEN
|
||||
ALTER TABLE artifacts ADD CONSTRAINT check_ref_count_non_negative CHECK (ref_count >= 0);
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
# Add composite indexes for packages and tags
|
||||
"""
|
||||
DO $$
|
||||
BEGIN
|
||||
-- Composite index for package lookup by project and name
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_packages_project_name'
|
||||
) THEN
|
||||
CREATE UNIQUE INDEX idx_packages_project_name ON packages(project_id, name);
|
||||
END IF;
|
||||
|
||||
-- Composite index for tag lookup by package and name
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_name'
|
||||
) THEN
|
||||
CREATE UNIQUE INDEX idx_tags_package_name ON tags(package_id, name);
|
||||
END IF;
|
||||
|
||||
-- Composite index for recent tags queries
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_created_at'
|
||||
) THEN
|
||||
CREATE INDEX idx_tags_package_created_at ON tags(package_id, created_at);
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
]
|
||||
|
||||
with engine.connect() as conn:
|
||||
@@ -54,3 +162,75 @@ def get_db() -> Generator[Session, None, None]:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def transaction(db: Session):
|
||||
"""
|
||||
Context manager for explicit transaction management with savepoint support.
|
||||
|
||||
Usage:
|
||||
with transaction(db):
|
||||
# operations here
|
||||
# automatically commits on success, rolls back on exception
|
||||
"""
|
||||
try:
|
||||
yield db
|
||||
db.commit()
|
||||
except Exception:
|
||||
db.rollback()
|
||||
raise
|
||||
|
||||
|
||||
@contextmanager
|
||||
def savepoint(db: Session, name: str = None):
|
||||
"""
|
||||
Create a savepoint for partial rollback support.
|
||||
|
||||
Usage:
|
||||
with savepoint(db, "my_savepoint"):
|
||||
# operations here
|
||||
# rolls back to savepoint on exception, but doesn't rollback whole transaction
|
||||
"""
|
||||
savepoint_obj = db.begin_nested()
|
||||
try:
|
||||
yield savepoint_obj
|
||||
savepoint_obj.commit()
|
||||
except Exception:
|
||||
savepoint_obj.rollback()
|
||||
raise
|
||||
|
||||
|
||||
def retry_on_deadlock(func, max_retries: int = 3, delay: float = 0.1):
|
||||
"""
|
||||
Decorator/wrapper to retry operations on deadlock detection.
|
||||
|
||||
Usage:
|
||||
@retry_on_deadlock
|
||||
def my_operation(db):
|
||||
...
|
||||
|
||||
Or:
|
||||
retry_on_deadlock(lambda: my_operation(db))()
|
||||
"""
|
||||
import functools
|
||||
from sqlalchemy.exc import OperationalError
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
last_exception = None
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except OperationalError as e:
|
||||
# Check for deadlock error codes (PostgreSQL: 40P01, MySQL: 1213)
|
||||
error_str = str(e).lower()
|
||||
if "deadlock" in error_str or "40p01" in error_str:
|
||||
last_exception = e
|
||||
logger.warning(f"Deadlock detected, retrying (attempt {attempt + 1}/{max_retries})")
|
||||
time.sleep(delay * (attempt + 1)) # Exponential backoff
|
||||
else:
|
||||
raise
|
||||
raise last_exception
|
||||
|
||||
return wrapper
|
||||
|
||||
@@ -38,6 +38,8 @@ class Package(Base):
|
||||
project_id = Column(UUID(as_uuid=True), ForeignKey("projects.id", ondelete="CASCADE"), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
description = Column(Text)
|
||||
format = Column(String(50), default="generic", nullable=False)
|
||||
platform = Column(String(50), default="any", nullable=False)
|
||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||
updated_at = Column(DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
@@ -49,6 +51,17 @@ class Package(Base):
|
||||
__table_args__ = (
|
||||
Index("idx_packages_project_id", "project_id"),
|
||||
Index("idx_packages_name", "name"),
|
||||
Index("idx_packages_format", "format"),
|
||||
Index("idx_packages_platform", "platform"),
|
||||
Index("idx_packages_project_name", "project_id", "name", unique=True), # Composite unique index
|
||||
CheckConstraint(
|
||||
"format IN ('generic', 'npm', 'pypi', 'docker', 'deb', 'rpm', 'maven', 'nuget', 'helm')",
|
||||
name="check_package_format"
|
||||
),
|
||||
CheckConstraint(
|
||||
"platform IN ('any', 'linux', 'darwin', 'windows', 'linux-amd64', 'linux-arm64', 'darwin-amd64', 'darwin-arm64', 'windows-amd64')",
|
||||
name="check_package_platform"
|
||||
),
|
||||
{"extend_existing": True},
|
||||
)
|
||||
|
||||
@@ -60,18 +73,39 @@ class Artifact(Base):
|
||||
size = Column(BigInteger, nullable=False)
|
||||
content_type = Column(String(255))
|
||||
original_name = Column(String(1024))
|
||||
checksum_md5 = Column(String(32)) # MD5 hash for additional verification
|
||||
checksum_sha1 = Column(String(40)) # SHA1 hash for compatibility
|
||||
s3_etag = Column(String(64)) # S3 ETag for verification
|
||||
artifact_metadata = Column("metadata", JSON, default=dict) # Format-specific metadata (column name is 'metadata')
|
||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||
created_by = Column(String(255), nullable=False)
|
||||
ref_count = Column(Integer, default=1)
|
||||
s3_key = Column(String(1024), nullable=False)
|
||||
format_metadata = Column(JSON, default=dict) # Format-specific metadata (version, etc.)
|
||||
|
||||
tags = relationship("Tag", back_populates="artifact")
|
||||
uploads = relationship("Upload", back_populates="artifact")
|
||||
|
||||
@property
|
||||
def sha256(self) -> str:
|
||||
"""Alias for id - the SHA256 hash of the artifact content"""
|
||||
return self.id
|
||||
|
||||
@property
|
||||
def format_metadata(self):
|
||||
"""Alias for artifact_metadata - backward compatibility"""
|
||||
return self.artifact_metadata
|
||||
|
||||
@format_metadata.setter
|
||||
def format_metadata(self, value):
|
||||
"""Alias setter for artifact_metadata - backward compatibility"""
|
||||
self.artifact_metadata = value
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_artifacts_created_at", "created_at"),
|
||||
Index("idx_artifacts_created_by", "created_by"),
|
||||
Index("idx_artifacts_ref_count", "ref_count"), # For cleanup queries
|
||||
CheckConstraint("ref_count >= 0", name="check_ref_count_non_negative"),
|
||||
CheckConstraint("size > 0", name="check_size_positive"),
|
||||
)
|
||||
|
||||
|
||||
@@ -83,6 +117,7 @@ class Tag(Base):
|
||||
name = Column(String(255), nullable=False)
|
||||
artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
|
||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||
updated_at = Column(DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
created_by = Column(String(255), nullable=False)
|
||||
|
||||
package = relationship("Package", back_populates="tags")
|
||||
@@ -92,6 +127,8 @@ class Tag(Base):
|
||||
__table_args__ = (
|
||||
Index("idx_tags_package_id", "package_id"),
|
||||
Index("idx_tags_artifact_id", "artifact_id"),
|
||||
Index("idx_tags_package_name", "package_id", "name", unique=True), # Composite unique index
|
||||
Index("idx_tags_package_created_at", "package_id", "created_at"), # For recent tags queries
|
||||
)
|
||||
|
||||
|
||||
@@ -102,6 +139,7 @@ class TagHistory(Base):
|
||||
tag_id = Column(UUID(as_uuid=True), ForeignKey("tags.id", ondelete="CASCADE"), nullable=False)
|
||||
old_artifact_id = Column(String(64), ForeignKey("artifacts.id"))
|
||||
new_artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
|
||||
change_type = Column(String(20), nullable=False, default="update")
|
||||
changed_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||
changed_by = Column(String(255), nullable=False)
|
||||
|
||||
@@ -109,6 +147,8 @@ class TagHistory(Base):
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_tag_history_tag_id", "tag_id"),
|
||||
Index("idx_tag_history_changed_at", "changed_at"),
|
||||
CheckConstraint("change_type IN ('create', 'update', 'delete')", name="check_change_type"),
|
||||
)
|
||||
|
||||
|
||||
@@ -119,6 +159,11 @@ class Upload(Base):
|
||||
artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
|
||||
package_id = Column(UUID(as_uuid=True), ForeignKey("packages.id"), nullable=False)
|
||||
original_name = Column(String(1024))
|
||||
tag_name = Column(String(255)) # Tag assigned during upload
|
||||
user_agent = Column(String(512)) # Client identification
|
||||
duration_ms = Column(Integer) # Upload timing in milliseconds
|
||||
deduplicated = Column(Boolean, default=False) # Whether artifact was deduplicated
|
||||
checksum_verified = Column(Boolean, default=True) # Whether checksum was verified
|
||||
uploaded_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||
uploaded_by = Column(String(255), nullable=False)
|
||||
source_ip = Column(String(45))
|
||||
@@ -130,6 +175,8 @@ class Upload(Base):
|
||||
Index("idx_uploads_artifact_id", "artifact_id"),
|
||||
Index("idx_uploads_package_id", "package_id"),
|
||||
Index("idx_uploads_uploaded_at", "uploaded_at"),
|
||||
Index("idx_uploads_package_uploaded_at", "package_id", "uploaded_at"),
|
||||
Index("idx_uploads_uploaded_by_at", "uploaded_by", "uploaded_at"),
|
||||
)
|
||||
|
||||
|
||||
@@ -202,4 +249,6 @@ class AuditLog(Base):
|
||||
Index("idx_audit_logs_resource", "resource"),
|
||||
Index("idx_audit_logs_user_id", "user_id"),
|
||||
Index("idx_audit_logs_timestamp", "timestamp"),
|
||||
Index("idx_audit_logs_resource_timestamp", "resource", "timestamp"),
|
||||
Index("idx_audit_logs_user_timestamp", "user_id", "timestamp"),
|
||||
)
|
||||
|
||||
22
backend/app/repositories/__init__.py
Normal file
22
backend/app/repositories/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""
|
||||
Repository pattern implementation for data access layer.
|
||||
|
||||
Repositories abstract database operations from business logic,
|
||||
providing clean interfaces for CRUD operations on each entity.
|
||||
"""
|
||||
|
||||
from .base import BaseRepository
|
||||
from .project import ProjectRepository
|
||||
from .package import PackageRepository
|
||||
from .artifact import ArtifactRepository
|
||||
from .tag import TagRepository
|
||||
from .upload import UploadRepository
|
||||
|
||||
__all__ = [
|
||||
"BaseRepository",
|
||||
"ProjectRepository",
|
||||
"PackageRepository",
|
||||
"ArtifactRepository",
|
||||
"TagRepository",
|
||||
"UploadRepository",
|
||||
]
|
||||
157
backend/app/repositories/artifact.py
Normal file
157
backend/app/repositories/artifact.py
Normal file
@@ -0,0 +1,157 @@
|
||||
"""
|
||||
Artifact repository for data access operations.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Tuple
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import func, or_
|
||||
from uuid import UUID
|
||||
|
||||
from .base import BaseRepository
|
||||
from ..models import Artifact, Tag, Upload, Package, Project
|
||||
|
||||
|
||||
class ArtifactRepository(BaseRepository[Artifact]):
|
||||
"""Repository for Artifact entity operations."""
|
||||
|
||||
model = Artifact
|
||||
|
||||
def get_by_sha256(self, sha256: str) -> Optional[Artifact]:
|
||||
"""Get artifact by SHA256 hash (primary key)."""
|
||||
return self.db.query(Artifact).filter(Artifact.id == sha256).first()
|
||||
|
||||
def exists_by_sha256(self, sha256: str) -> bool:
|
||||
"""Check if artifact with SHA256 exists."""
|
||||
return self.db.query(
|
||||
self.db.query(Artifact).filter(Artifact.id == sha256).exists()
|
||||
).scalar()
|
||||
|
||||
def create_artifact(
|
||||
self,
|
||||
sha256: str,
|
||||
size: int,
|
||||
s3_key: str,
|
||||
created_by: str,
|
||||
content_type: Optional[str] = None,
|
||||
original_name: Optional[str] = None,
|
||||
format_metadata: Optional[dict] = None,
|
||||
) -> Artifact:
|
||||
"""Create a new artifact."""
|
||||
artifact = Artifact(
|
||||
id=sha256,
|
||||
size=size,
|
||||
s3_key=s3_key,
|
||||
created_by=created_by,
|
||||
content_type=content_type,
|
||||
original_name=original_name,
|
||||
format_metadata=format_metadata or {},
|
||||
ref_count=1,
|
||||
)
|
||||
self.db.add(artifact)
|
||||
self.db.flush()
|
||||
return artifact
|
||||
|
||||
def increment_ref_count(self, artifact: Artifact) -> Artifact:
|
||||
"""Increment artifact reference count."""
|
||||
artifact.ref_count += 1
|
||||
self.db.flush()
|
||||
return artifact
|
||||
|
||||
def decrement_ref_count(self, artifact: Artifact) -> Artifact:
|
||||
"""
|
||||
Decrement artifact reference count.
|
||||
Returns the artifact with updated count.
|
||||
Does not delete the artifact even if ref_count reaches 0.
|
||||
"""
|
||||
if artifact.ref_count > 0:
|
||||
artifact.ref_count -= 1
|
||||
self.db.flush()
|
||||
return artifact
|
||||
|
||||
def get_orphaned_artifacts(self, limit: int = 100) -> List[Artifact]:
|
||||
"""Get artifacts with ref_count = 0 (candidates for cleanup)."""
|
||||
return (
|
||||
self.db.query(Artifact)
|
||||
.filter(Artifact.ref_count == 0)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
def get_artifacts_without_tags(self, limit: int = 100) -> List[Artifact]:
|
||||
"""Get artifacts that have no tags pointing to them."""
|
||||
# Subquery to find artifact IDs that have tags
|
||||
tagged_artifacts = self.db.query(Tag.artifact_id).distinct().subquery()
|
||||
|
||||
return (
|
||||
self.db.query(Artifact)
|
||||
.filter(~Artifact.id.in_(tagged_artifacts))
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
def find_by_package(
|
||||
self,
|
||||
package_id: UUID,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
content_type: Optional[str] = None,
|
||||
) -> Tuple[List[Artifact], int]:
|
||||
"""Find artifacts uploaded to a package."""
|
||||
# Get distinct artifact IDs from uploads
|
||||
artifact_ids_subquery = (
|
||||
self.db.query(func.distinct(Upload.artifact_id))
|
||||
.filter(Upload.package_id == package_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
query = self.db.query(Artifact).filter(Artifact.id.in_(artifact_ids_subquery))
|
||||
|
||||
if content_type:
|
||||
query = query.filter(Artifact.content_type == content_type)
|
||||
|
||||
total = query.count()
|
||||
offset = (page - 1) * limit
|
||||
artifacts = query.order_by(Artifact.created_at.desc()).offset(offset).limit(limit).all()
|
||||
|
||||
return artifacts, total
|
||||
|
||||
def get_referencing_tags(self, artifact_id: str) -> List[Tuple[Tag, Package, Project]]:
|
||||
"""Get all tags referencing this artifact with package and project info."""
|
||||
return (
|
||||
self.db.query(Tag, Package, Project)
|
||||
.join(Package, Tag.package_id == Package.id)
|
||||
.join(Project, Package.project_id == Project.id)
|
||||
.filter(Tag.artifact_id == artifact_id)
|
||||
.all()
|
||||
)
|
||||
|
||||
def search(self, query_str: str, limit: int = 10) -> List[Tuple[Tag, Artifact, str, str]]:
|
||||
"""
|
||||
Search artifacts by tag name or original filename.
|
||||
Returns (tag, artifact, package_name, project_name) tuples.
|
||||
"""
|
||||
search_lower = query_str.lower()
|
||||
return (
|
||||
self.db.query(Tag, Artifact, Package.name, Project.name)
|
||||
.join(Artifact, Tag.artifact_id == Artifact.id)
|
||||
.join(Package, Tag.package_id == Package.id)
|
||||
.join(Project, Package.project_id == Project.id)
|
||||
.filter(
|
||||
or_(
|
||||
func.lower(Tag.name).contains(search_lower),
|
||||
func.lower(Artifact.original_name).contains(search_lower)
|
||||
)
|
||||
)
|
||||
.order_by(Tag.name)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
def update_metadata(self, artifact: Artifact, metadata: dict) -> Artifact:
|
||||
"""Update or merge format metadata."""
|
||||
if artifact.format_metadata:
|
||||
artifact.format_metadata = {**artifact.format_metadata, **metadata}
|
||||
else:
|
||||
artifact.format_metadata = metadata
|
||||
self.db.flush()
|
||||
return artifact
|
||||
96
backend/app/repositories/base.py
Normal file
96
backend/app/repositories/base.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""
|
||||
Base repository class with common CRUD operations.
|
||||
"""
|
||||
|
||||
from typing import TypeVar, Generic, Type, Optional, List, Any, Dict
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import func, asc, desc
|
||||
from uuid import UUID
|
||||
|
||||
from ..models import Base
|
||||
|
||||
T = TypeVar("T", bound=Base)
|
||||
|
||||
|
||||
class BaseRepository(Generic[T]):
|
||||
"""
|
||||
Base repository providing common CRUD operations.
|
||||
|
||||
Subclasses should set `model` class attribute to the SQLAlchemy model.
|
||||
"""
|
||||
|
||||
model: Type[T]
|
||||
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
def get_by_id(self, id: Any) -> Optional[T]:
|
||||
"""Get entity by primary key."""
|
||||
return self.db.query(self.model).filter(self.model.id == id).first()
|
||||
|
||||
def get_all(
|
||||
self,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
order_by: str = None,
|
||||
order_desc: bool = False,
|
||||
) -> List[T]:
|
||||
"""Get all entities with pagination and optional ordering."""
|
||||
query = self.db.query(self.model)
|
||||
|
||||
if order_by and hasattr(self.model, order_by):
|
||||
column = getattr(self.model, order_by)
|
||||
query = query.order_by(desc(column) if order_desc else asc(column))
|
||||
|
||||
return query.offset(skip).limit(limit).all()
|
||||
|
||||
def count(self) -> int:
|
||||
"""Count total entities."""
|
||||
return self.db.query(func.count(self.model.id)).scalar() or 0
|
||||
|
||||
def create(self, **kwargs) -> T:
|
||||
"""Create a new entity."""
|
||||
entity = self.model(**kwargs)
|
||||
self.db.add(entity)
|
||||
self.db.flush() # Flush to get ID without committing
|
||||
return entity
|
||||
|
||||
def update(self, entity: T, **kwargs) -> T:
|
||||
"""Update an existing entity."""
|
||||
for key, value in kwargs.items():
|
||||
if hasattr(entity, key):
|
||||
setattr(entity, key, value)
|
||||
self.db.flush()
|
||||
return entity
|
||||
|
||||
def delete(self, entity: T) -> None:
|
||||
"""Delete an entity."""
|
||||
self.db.delete(entity)
|
||||
self.db.flush()
|
||||
|
||||
def delete_by_id(self, id: Any) -> bool:
|
||||
"""Delete entity by ID. Returns True if deleted, False if not found."""
|
||||
entity = self.get_by_id(id)
|
||||
if entity:
|
||||
self.delete(entity)
|
||||
return True
|
||||
return False
|
||||
|
||||
def exists(self, id: Any) -> bool:
|
||||
"""Check if entity exists by ID."""
|
||||
return self.db.query(
|
||||
self.db.query(self.model).filter(self.model.id == id).exists()
|
||||
).scalar()
|
||||
|
||||
def commit(self) -> None:
|
||||
"""Commit the current transaction."""
|
||||
self.db.commit()
|
||||
|
||||
def rollback(self) -> None:
|
||||
"""Rollback the current transaction."""
|
||||
self.db.rollback()
|
||||
|
||||
def refresh(self, entity: T) -> T:
|
||||
"""Refresh entity from database."""
|
||||
self.db.refresh(entity)
|
||||
return entity
|
||||
177
backend/app/repositories/package.py
Normal file
177
backend/app/repositories/package.py
Normal file
@@ -0,0 +1,177 @@
|
||||
"""
|
||||
Package repository for data access operations.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Tuple
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import func, or_, asc, desc
|
||||
from uuid import UUID
|
||||
|
||||
from .base import BaseRepository
|
||||
from ..models import Package, Project, Tag, Upload, Artifact
|
||||
|
||||
|
||||
class PackageRepository(BaseRepository[Package]):
|
||||
"""Repository for Package entity operations."""
|
||||
|
||||
model = Package
|
||||
|
||||
def get_by_name(self, project_id: UUID, name: str) -> Optional[Package]:
|
||||
"""Get package by name within a project."""
|
||||
return (
|
||||
self.db.query(Package)
|
||||
.filter(Package.project_id == project_id, Package.name == name)
|
||||
.first()
|
||||
)
|
||||
|
||||
def get_by_project_and_name(self, project_name: str, package_name: str) -> Optional[Package]:
|
||||
"""Get package by project name and package name."""
|
||||
return (
|
||||
self.db.query(Package)
|
||||
.join(Project, Package.project_id == Project.id)
|
||||
.filter(Project.name == project_name, Package.name == package_name)
|
||||
.first()
|
||||
)
|
||||
|
||||
def exists_by_name(self, project_id: UUID, name: str) -> bool:
|
||||
"""Check if package with name exists in project."""
|
||||
return self.db.query(
|
||||
self.db.query(Package)
|
||||
.filter(Package.project_id == project_id, Package.name == name)
|
||||
.exists()
|
||||
).scalar()
|
||||
|
||||
def list_by_project(
|
||||
self,
|
||||
project_id: UUID,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
search: Optional[str] = None,
|
||||
format: Optional[str] = None,
|
||||
platform: Optional[str] = None,
|
||||
sort: str = "name",
|
||||
order: str = "asc",
|
||||
) -> Tuple[List[Package], int]:
|
||||
"""
|
||||
List packages in a project with filtering and pagination.
|
||||
|
||||
Returns tuple of (packages, total_count).
|
||||
"""
|
||||
query = self.db.query(Package).filter(Package.project_id == project_id)
|
||||
|
||||
# Apply search filter
|
||||
if search:
|
||||
search_lower = search.lower()
|
||||
query = query.filter(
|
||||
or_(
|
||||
func.lower(Package.name).contains(search_lower),
|
||||
func.lower(Package.description).contains(search_lower)
|
||||
)
|
||||
)
|
||||
|
||||
# Apply format filter
|
||||
if format:
|
||||
query = query.filter(Package.format == format)
|
||||
|
||||
# Apply platform filter
|
||||
if platform:
|
||||
query = query.filter(Package.platform == platform)
|
||||
|
||||
# Get total count
|
||||
total = query.count()
|
||||
|
||||
# Apply sorting
|
||||
sort_columns = {
|
||||
"name": Package.name,
|
||||
"created_at": Package.created_at,
|
||||
"updated_at": Package.updated_at,
|
||||
}
|
||||
sort_column = sort_columns.get(sort, Package.name)
|
||||
if order == "desc":
|
||||
query = query.order_by(desc(sort_column))
|
||||
else:
|
||||
query = query.order_by(asc(sort_column))
|
||||
|
||||
# Apply pagination
|
||||
offset = (page - 1) * limit
|
||||
packages = query.offset(offset).limit(limit).all()
|
||||
|
||||
return packages, total
|
||||
|
||||
def create_package(
|
||||
self,
|
||||
project_id: UUID,
|
||||
name: str,
|
||||
description: Optional[str] = None,
|
||||
format: str = "generic",
|
||||
platform: str = "any",
|
||||
) -> Package:
|
||||
"""Create a new package."""
|
||||
return self.create(
|
||||
project_id=project_id,
|
||||
name=name,
|
||||
description=description,
|
||||
format=format,
|
||||
platform=platform,
|
||||
)
|
||||
|
||||
def update_package(
|
||||
self,
|
||||
package: Package,
|
||||
name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
format: Optional[str] = None,
|
||||
platform: Optional[str] = None,
|
||||
) -> Package:
|
||||
"""Update package fields."""
|
||||
updates = {}
|
||||
if name is not None:
|
||||
updates["name"] = name
|
||||
if description is not None:
|
||||
updates["description"] = description
|
||||
if format is not None:
|
||||
updates["format"] = format
|
||||
if platform is not None:
|
||||
updates["platform"] = platform
|
||||
return self.update(package, **updates)
|
||||
|
||||
def get_stats(self, package_id: UUID) -> dict:
|
||||
"""Get package statistics (tag count, artifact count, total size)."""
|
||||
tag_count = (
|
||||
self.db.query(func.count(Tag.id))
|
||||
.filter(Tag.package_id == package_id)
|
||||
.scalar() or 0
|
||||
)
|
||||
|
||||
artifact_stats = (
|
||||
self.db.query(
|
||||
func.count(func.distinct(Upload.artifact_id)),
|
||||
func.coalesce(func.sum(Artifact.size), 0)
|
||||
)
|
||||
.join(Artifact, Upload.artifact_id == Artifact.id)
|
||||
.filter(Upload.package_id == package_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
return {
|
||||
"tag_count": tag_count,
|
||||
"artifact_count": artifact_stats[0] if artifact_stats else 0,
|
||||
"total_size": artifact_stats[1] if artifact_stats else 0,
|
||||
}
|
||||
|
||||
def search(self, query_str: str, limit: int = 10) -> List[Tuple[Package, str]]:
|
||||
"""Search packages by name or description. Returns (package, project_name) tuples."""
|
||||
search_lower = query_str.lower()
|
||||
return (
|
||||
self.db.query(Package, Project.name)
|
||||
.join(Project, Package.project_id == Project.id)
|
||||
.filter(
|
||||
or_(
|
||||
func.lower(Package.name).contains(search_lower),
|
||||
func.lower(Package.description).contains(search_lower)
|
||||
)
|
||||
)
|
||||
.order_by(Package.name)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
132
backend/app/repositories/project.py
Normal file
132
backend/app/repositories/project.py
Normal file
@@ -0,0 +1,132 @@
|
||||
"""
|
||||
Project repository for data access operations.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Tuple
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import func, or_, asc, desc
|
||||
from uuid import UUID
|
||||
|
||||
from .base import BaseRepository
|
||||
from ..models import Project
|
||||
|
||||
|
||||
class ProjectRepository(BaseRepository[Project]):
|
||||
"""Repository for Project entity operations."""
|
||||
|
||||
model = Project
|
||||
|
||||
def get_by_name(self, name: str) -> Optional[Project]:
|
||||
"""Get project by unique name."""
|
||||
return self.db.query(Project).filter(Project.name == name).first()
|
||||
|
||||
def exists_by_name(self, name: str) -> bool:
|
||||
"""Check if project with name exists."""
|
||||
return self.db.query(
|
||||
self.db.query(Project).filter(Project.name == name).exists()
|
||||
).scalar()
|
||||
|
||||
def list_accessible(
|
||||
self,
|
||||
user_id: str,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
search: Optional[str] = None,
|
||||
visibility: Optional[str] = None,
|
||||
sort: str = "name",
|
||||
order: str = "asc",
|
||||
) -> Tuple[List[Project], int]:
|
||||
"""
|
||||
List projects accessible to user with filtering and pagination.
|
||||
|
||||
Returns tuple of (projects, total_count).
|
||||
"""
|
||||
# Base query - filter by access
|
||||
query = self.db.query(Project).filter(
|
||||
or_(Project.is_public == True, Project.created_by == user_id)
|
||||
)
|
||||
|
||||
# Apply visibility filter
|
||||
if visibility == "public":
|
||||
query = query.filter(Project.is_public == True)
|
||||
elif visibility == "private":
|
||||
query = query.filter(Project.is_public == False, Project.created_by == user_id)
|
||||
|
||||
# Apply search filter
|
||||
if search:
|
||||
search_lower = search.lower()
|
||||
query = query.filter(
|
||||
or_(
|
||||
func.lower(Project.name).contains(search_lower),
|
||||
func.lower(Project.description).contains(search_lower)
|
||||
)
|
||||
)
|
||||
|
||||
# Get total count before pagination
|
||||
total = query.count()
|
||||
|
||||
# Apply sorting
|
||||
sort_columns = {
|
||||
"name": Project.name,
|
||||
"created_at": Project.created_at,
|
||||
"updated_at": Project.updated_at,
|
||||
}
|
||||
sort_column = sort_columns.get(sort, Project.name)
|
||||
if order == "desc":
|
||||
query = query.order_by(desc(sort_column))
|
||||
else:
|
||||
query = query.order_by(asc(sort_column))
|
||||
|
||||
# Apply pagination
|
||||
offset = (page - 1) * limit
|
||||
projects = query.offset(offset).limit(limit).all()
|
||||
|
||||
return projects, total
|
||||
|
||||
def create_project(
|
||||
self,
|
||||
name: str,
|
||||
created_by: str,
|
||||
description: Optional[str] = None,
|
||||
is_public: bool = True,
|
||||
) -> Project:
|
||||
"""Create a new project."""
|
||||
return self.create(
|
||||
name=name,
|
||||
description=description,
|
||||
is_public=is_public,
|
||||
created_by=created_by,
|
||||
)
|
||||
|
||||
def update_project(
|
||||
self,
|
||||
project: Project,
|
||||
name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
is_public: Optional[bool] = None,
|
||||
) -> Project:
|
||||
"""Update project fields."""
|
||||
updates = {}
|
||||
if name is not None:
|
||||
updates["name"] = name
|
||||
if description is not None:
|
||||
updates["description"] = description
|
||||
if is_public is not None:
|
||||
updates["is_public"] = is_public
|
||||
return self.update(project, **updates)
|
||||
|
||||
def search(self, query_str: str, limit: int = 10) -> List[Project]:
|
||||
"""Search projects by name or description."""
|
||||
search_lower = query_str.lower()
|
||||
return (
|
||||
self.db.query(Project)
|
||||
.filter(
|
||||
or_(
|
||||
func.lower(Project.name).contains(search_lower),
|
||||
func.lower(Project.description).contains(search_lower)
|
||||
)
|
||||
)
|
||||
.order_by(Project.name)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
168
backend/app/repositories/tag.py
Normal file
168
backend/app/repositories/tag.py
Normal file
@@ -0,0 +1,168 @@
|
||||
"""
|
||||
Tag repository for data access operations.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Tuple
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import func, or_, asc, desc
|
||||
from uuid import UUID
|
||||
|
||||
from .base import BaseRepository
|
||||
from ..models import Tag, TagHistory, Artifact, Package, Project
|
||||
|
||||
|
||||
class TagRepository(BaseRepository[Tag]):
|
||||
"""Repository for Tag entity operations."""
|
||||
|
||||
model = Tag
|
||||
|
||||
def get_by_name(self, package_id: UUID, name: str) -> Optional[Tag]:
|
||||
"""Get tag by name within a package."""
|
||||
return (
|
||||
self.db.query(Tag)
|
||||
.filter(Tag.package_id == package_id, Tag.name == name)
|
||||
.first()
|
||||
)
|
||||
|
||||
def get_with_artifact(self, package_id: UUID, name: str) -> Optional[Tuple[Tag, Artifact]]:
|
||||
"""Get tag with its artifact."""
|
||||
return (
|
||||
self.db.query(Tag, Artifact)
|
||||
.join(Artifact, Tag.artifact_id == Artifact.id)
|
||||
.filter(Tag.package_id == package_id, Tag.name == name)
|
||||
.first()
|
||||
)
|
||||
|
||||
def exists_by_name(self, package_id: UUID, name: str) -> bool:
|
||||
"""Check if tag with name exists in package."""
|
||||
return self.db.query(
|
||||
self.db.query(Tag)
|
||||
.filter(Tag.package_id == package_id, Tag.name == name)
|
||||
.exists()
|
||||
).scalar()
|
||||
|
||||
def list_by_package(
|
||||
self,
|
||||
package_id: UUID,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
search: Optional[str] = None,
|
||||
sort: str = "name",
|
||||
order: str = "asc",
|
||||
) -> Tuple[List[Tuple[Tag, Artifact]], int]:
|
||||
"""
|
||||
List tags in a package with artifact metadata.
|
||||
|
||||
Returns tuple of ((tag, artifact) tuples, total_count).
|
||||
"""
|
||||
query = (
|
||||
self.db.query(Tag, Artifact)
|
||||
.join(Artifact, Tag.artifact_id == Artifact.id)
|
||||
.filter(Tag.package_id == package_id)
|
||||
)
|
||||
|
||||
# Apply search filter (tag name or artifact original filename)
|
||||
if search:
|
||||
search_lower = search.lower()
|
||||
query = query.filter(
|
||||
or_(
|
||||
func.lower(Tag.name).contains(search_lower),
|
||||
func.lower(Artifact.original_name).contains(search_lower)
|
||||
)
|
||||
)
|
||||
|
||||
# Get total count
|
||||
total = query.count()
|
||||
|
||||
# Apply sorting
|
||||
sort_columns = {
|
||||
"name": Tag.name,
|
||||
"created_at": Tag.created_at,
|
||||
}
|
||||
sort_column = sort_columns.get(sort, Tag.name)
|
||||
if order == "desc":
|
||||
query = query.order_by(desc(sort_column))
|
||||
else:
|
||||
query = query.order_by(asc(sort_column))
|
||||
|
||||
# Apply pagination
|
||||
offset = (page - 1) * limit
|
||||
results = query.offset(offset).limit(limit).all()
|
||||
|
||||
return results, total
|
||||
|
||||
def create_tag(
|
||||
self,
|
||||
package_id: UUID,
|
||||
name: str,
|
||||
artifact_id: str,
|
||||
created_by: str,
|
||||
) -> Tag:
|
||||
"""Create a new tag."""
|
||||
return self.create(
|
||||
package_id=package_id,
|
||||
name=name,
|
||||
artifact_id=artifact_id,
|
||||
created_by=created_by,
|
||||
)
|
||||
|
||||
def update_artifact(
|
||||
self,
|
||||
tag: Tag,
|
||||
new_artifact_id: str,
|
||||
changed_by: str,
|
||||
record_history: bool = True,
|
||||
) -> Tag:
|
||||
"""
|
||||
Update tag to point to a different artifact.
|
||||
Optionally records change in tag history.
|
||||
"""
|
||||
old_artifact_id = tag.artifact_id
|
||||
|
||||
if record_history and old_artifact_id != new_artifact_id:
|
||||
history = TagHistory(
|
||||
tag_id=tag.id,
|
||||
old_artifact_id=old_artifact_id,
|
||||
new_artifact_id=new_artifact_id,
|
||||
changed_by=changed_by,
|
||||
)
|
||||
self.db.add(history)
|
||||
|
||||
tag.artifact_id = new_artifact_id
|
||||
tag.created_by = changed_by
|
||||
self.db.flush()
|
||||
return tag
|
||||
|
||||
def get_history(self, tag_id: UUID) -> List[TagHistory]:
|
||||
"""Get tag change history."""
|
||||
return (
|
||||
self.db.query(TagHistory)
|
||||
.filter(TagHistory.tag_id == tag_id)
|
||||
.order_by(TagHistory.changed_at.desc())
|
||||
.all()
|
||||
)
|
||||
|
||||
def get_latest_in_package(self, package_id: UUID) -> Optional[Tag]:
|
||||
"""Get the most recently created/updated tag in a package."""
|
||||
return (
|
||||
self.db.query(Tag)
|
||||
.filter(Tag.package_id == package_id)
|
||||
.order_by(Tag.created_at.desc())
|
||||
.first()
|
||||
)
|
||||
|
||||
def get_by_artifact(self, artifact_id: str) -> List[Tag]:
|
||||
"""Get all tags pointing to an artifact."""
|
||||
return (
|
||||
self.db.query(Tag)
|
||||
.filter(Tag.artifact_id == artifact_id)
|
||||
.all()
|
||||
)
|
||||
|
||||
def count_by_artifact(self, artifact_id: str) -> int:
|
||||
"""Count tags pointing to an artifact."""
|
||||
return (
|
||||
self.db.query(func.count(Tag.id))
|
||||
.filter(Tag.artifact_id == artifact_id)
|
||||
.scalar() or 0
|
||||
)
|
||||
136
backend/app/repositories/upload.py
Normal file
136
backend/app/repositories/upload.py
Normal file
@@ -0,0 +1,136 @@
|
||||
"""
|
||||
Upload repository for data access operations.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Tuple
|
||||
from datetime import datetime
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import func, desc
|
||||
from uuid import UUID
|
||||
|
||||
from .base import BaseRepository
|
||||
from ..models import Upload, Artifact, Package, Project
|
||||
|
||||
|
||||
class UploadRepository(BaseRepository[Upload]):
|
||||
"""Repository for Upload entity operations."""
|
||||
|
||||
model = Upload
|
||||
|
||||
def create_upload(
|
||||
self,
|
||||
artifact_id: str,
|
||||
package_id: UUID,
|
||||
uploaded_by: str,
|
||||
original_name: Optional[str] = None,
|
||||
source_ip: Optional[str] = None,
|
||||
) -> Upload:
|
||||
"""Record a new upload event."""
|
||||
return self.create(
|
||||
artifact_id=artifact_id,
|
||||
package_id=package_id,
|
||||
original_name=original_name,
|
||||
uploaded_by=uploaded_by,
|
||||
source_ip=source_ip,
|
||||
)
|
||||
|
||||
def list_by_package(
|
||||
self,
|
||||
package_id: UUID,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
) -> Tuple[List[Upload], int]:
|
||||
"""List uploads for a package with pagination."""
|
||||
query = self.db.query(Upload).filter(Upload.package_id == package_id)
|
||||
|
||||
total = query.count()
|
||||
offset = (page - 1) * limit
|
||||
uploads = query.order_by(Upload.uploaded_at.desc()).offset(offset).limit(limit).all()
|
||||
|
||||
return uploads, total
|
||||
|
||||
def list_by_artifact(self, artifact_id: str) -> List[Upload]:
|
||||
"""List all uploads of a specific artifact."""
|
||||
return (
|
||||
self.db.query(Upload)
|
||||
.filter(Upload.artifact_id == artifact_id)
|
||||
.order_by(Upload.uploaded_at.desc())
|
||||
.all()
|
||||
)
|
||||
|
||||
def get_latest_for_package(self, package_id: UUID) -> Optional[Upload]:
|
||||
"""Get the most recent upload for a package."""
|
||||
return (
|
||||
self.db.query(Upload)
|
||||
.filter(Upload.package_id == package_id)
|
||||
.order_by(Upload.uploaded_at.desc())
|
||||
.first()
|
||||
)
|
||||
|
||||
def get_latest_timestamp(self, package_id: UUID) -> Optional[datetime]:
|
||||
"""Get timestamp of most recent upload for a package."""
|
||||
result = (
|
||||
self.db.query(func.max(Upload.uploaded_at))
|
||||
.filter(Upload.package_id == package_id)
|
||||
.scalar()
|
||||
)
|
||||
return result
|
||||
|
||||
def count_by_artifact(self, artifact_id: str) -> int:
|
||||
"""Count uploads of a specific artifact."""
|
||||
return (
|
||||
self.db.query(func.count(Upload.id))
|
||||
.filter(Upload.artifact_id == artifact_id)
|
||||
.scalar() or 0
|
||||
)
|
||||
|
||||
def count_by_package(self, package_id: UUID) -> int:
|
||||
"""Count total uploads for a package."""
|
||||
return (
|
||||
self.db.query(func.count(Upload.id))
|
||||
.filter(Upload.package_id == package_id)
|
||||
.scalar() or 0
|
||||
)
|
||||
|
||||
def get_distinct_artifacts_count(self, package_id: UUID) -> int:
|
||||
"""Count distinct artifacts uploaded to a package."""
|
||||
return (
|
||||
self.db.query(func.count(func.distinct(Upload.artifact_id)))
|
||||
.filter(Upload.package_id == package_id)
|
||||
.scalar() or 0
|
||||
)
|
||||
|
||||
def get_uploads_by_user(
|
||||
self,
|
||||
user_id: str,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
) -> Tuple[List[Upload], int]:
|
||||
"""List uploads by a specific user."""
|
||||
query = self.db.query(Upload).filter(Upload.uploaded_by == user_id)
|
||||
|
||||
total = query.count()
|
||||
offset = (page - 1) * limit
|
||||
uploads = query.order_by(Upload.uploaded_at.desc()).offset(offset).limit(limit).all()
|
||||
|
||||
return uploads, total
|
||||
|
||||
def get_upload_stats(self, package_id: UUID) -> dict:
|
||||
"""Get upload statistics for a package."""
|
||||
stats = (
|
||||
self.db.query(
|
||||
func.count(Upload.id),
|
||||
func.count(func.distinct(Upload.artifact_id)),
|
||||
func.min(Upload.uploaded_at),
|
||||
func.max(Upload.uploaded_at),
|
||||
)
|
||||
.filter(Upload.package_id == package_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
return {
|
||||
"total_uploads": stats[0] if stats else 0,
|
||||
"unique_artifacts": stats[1] if stats else 0,
|
||||
"first_upload": stats[2] if stats else None,
|
||||
"last_upload": stats[3] if stats else None,
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -39,10 +39,17 @@ class ProjectResponse(BaseModel):
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# Package format and platform enums
|
||||
PACKAGE_FORMATS = ["generic", "npm", "pypi", "docker", "deb", "rpm", "maven", "nuget", "helm"]
|
||||
PACKAGE_PLATFORMS = ["any", "linux", "darwin", "windows", "linux-amd64", "linux-arm64", "darwin-amd64", "darwin-arm64", "windows-amd64"]
|
||||
|
||||
|
||||
# Package schemas
|
||||
class PackageCreate(BaseModel):
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
format: str = "generic"
|
||||
platform: str = "any"
|
||||
|
||||
|
||||
class PackageResponse(BaseModel):
|
||||
@@ -50,6 +57,8 @@ class PackageResponse(BaseModel):
|
||||
project_id: UUID
|
||||
name: str
|
||||
description: Optional[str]
|
||||
format: str
|
||||
platform: str
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
@@ -57,12 +66,46 @@ class PackageResponse(BaseModel):
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TagSummary(BaseModel):
|
||||
"""Lightweight tag info for embedding in package responses"""
|
||||
name: str
|
||||
artifact_id: str
|
||||
created_at: datetime
|
||||
|
||||
|
||||
class PackageDetailResponse(BaseModel):
|
||||
"""Package with aggregated metadata"""
|
||||
id: UUID
|
||||
project_id: UUID
|
||||
name: str
|
||||
description: Optional[str]
|
||||
format: str
|
||||
platform: str
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
# Aggregated fields
|
||||
tag_count: int = 0
|
||||
artifact_count: int = 0
|
||||
total_size: int = 0
|
||||
latest_tag: Optional[str] = None
|
||||
latest_upload_at: Optional[datetime] = None
|
||||
# Recent tags (limit 5)
|
||||
recent_tags: List[TagSummary] = []
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# Artifact schemas
|
||||
class ArtifactResponse(BaseModel):
|
||||
id: str
|
||||
sha256: str # Explicit SHA256 field (same as id)
|
||||
size: int
|
||||
content_type: Optional[str]
|
||||
original_name: Optional[str]
|
||||
checksum_md5: Optional[str] = None
|
||||
checksum_sha1: Optional[str] = None
|
||||
s3_etag: Optional[str] = None
|
||||
created_at: datetime
|
||||
created_by: str
|
||||
ref_count: int
|
||||
@@ -90,13 +133,97 @@ class TagResponse(BaseModel):
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TagDetailResponse(BaseModel):
|
||||
"""Tag with embedded artifact metadata"""
|
||||
id: UUID
|
||||
package_id: UUID
|
||||
name: str
|
||||
artifact_id: str
|
||||
created_at: datetime
|
||||
created_by: str
|
||||
# Artifact metadata
|
||||
artifact_size: int
|
||||
artifact_content_type: Optional[str]
|
||||
artifact_original_name: Optional[str]
|
||||
artifact_created_at: datetime
|
||||
artifact_format_metadata: Optional[Dict[str, Any]] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TagHistoryResponse(BaseModel):
|
||||
"""History entry for tag changes"""
|
||||
id: UUID
|
||||
tag_id: UUID
|
||||
old_artifact_id: Optional[str]
|
||||
new_artifact_id: str
|
||||
changed_at: datetime
|
||||
changed_by: str
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ArtifactTagInfo(BaseModel):
|
||||
"""Tag info for embedding in artifact responses"""
|
||||
id: UUID
|
||||
name: str
|
||||
package_id: UUID
|
||||
package_name: str
|
||||
project_name: str
|
||||
|
||||
|
||||
class ArtifactDetailResponse(BaseModel):
|
||||
"""Artifact with list of tags/packages referencing it"""
|
||||
id: str
|
||||
sha256: str # Explicit SHA256 field (same as id)
|
||||
size: int
|
||||
content_type: Optional[str]
|
||||
original_name: Optional[str]
|
||||
checksum_md5: Optional[str] = None
|
||||
checksum_sha1: Optional[str] = None
|
||||
s3_etag: Optional[str] = None
|
||||
created_at: datetime
|
||||
created_by: str
|
||||
ref_count: int
|
||||
format_metadata: Optional[Dict[str, Any]] = None
|
||||
tags: List[ArtifactTagInfo] = []
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class PackageArtifactResponse(BaseModel):
|
||||
"""Artifact with tags for package artifact listing"""
|
||||
id: str
|
||||
sha256: str # Explicit SHA256 field (same as id)
|
||||
size: int
|
||||
content_type: Optional[str]
|
||||
original_name: Optional[str]
|
||||
checksum_md5: Optional[str] = None
|
||||
checksum_sha1: Optional[str] = None
|
||||
s3_etag: Optional[str] = None
|
||||
created_at: datetime
|
||||
created_by: str
|
||||
format_metadata: Optional[Dict[str, Any]] = None
|
||||
tags: List[str] = [] # Tag names pointing to this artifact
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# Upload response
|
||||
class UploadResponse(BaseModel):
|
||||
artifact_id: str
|
||||
sha256: str # Explicit SHA256 field (same as artifact_id)
|
||||
size: int
|
||||
project: str
|
||||
package: str
|
||||
tag: Optional[str]
|
||||
checksum_md5: Optional[str] = None
|
||||
checksum_sha1: Optional[str] = None
|
||||
s3_etag: Optional[str] = None
|
||||
format_metadata: Optional[Dict[str, Any]] = None
|
||||
deduplicated: bool = False
|
||||
|
||||
@@ -158,6 +285,65 @@ class ConsumerResponse(BaseModel):
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# Global search schemas
|
||||
class SearchResultProject(BaseModel):
|
||||
"""Project result for global search"""
|
||||
id: UUID
|
||||
name: str
|
||||
description: Optional[str]
|
||||
is_public: bool
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class SearchResultPackage(BaseModel):
|
||||
"""Package result for global search"""
|
||||
id: UUID
|
||||
project_id: UUID
|
||||
project_name: str
|
||||
name: str
|
||||
description: Optional[str]
|
||||
format: str
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class SearchResultArtifact(BaseModel):
|
||||
"""Artifact/tag result for global search"""
|
||||
tag_id: UUID
|
||||
tag_name: str
|
||||
artifact_id: str
|
||||
package_id: UUID
|
||||
package_name: str
|
||||
project_name: str
|
||||
original_name: Optional[str]
|
||||
|
||||
|
||||
class GlobalSearchResponse(BaseModel):
|
||||
"""Combined search results across all entity types"""
|
||||
query: str
|
||||
projects: List[SearchResultProject]
|
||||
packages: List[SearchResultPackage]
|
||||
artifacts: List[SearchResultArtifact]
|
||||
counts: Dict[str, int] # Total counts for each type
|
||||
|
||||
|
||||
# Presigned URL response
|
||||
class PresignedUrlResponse(BaseModel):
|
||||
"""Response containing a presigned URL for direct S3 download"""
|
||||
url: str
|
||||
expires_at: datetime
|
||||
method: str = "GET"
|
||||
artifact_id: str
|
||||
size: int
|
||||
content_type: Optional[str] = None
|
||||
original_name: Optional[str] = None
|
||||
checksum_sha256: Optional[str] = None
|
||||
checksum_md5: Optional[str] = None
|
||||
|
||||
|
||||
# Health check
|
||||
class HealthResponse(BaseModel):
|
||||
status: str
|
||||
|
||||
9
backend/app/services/__init__.py
Normal file
9
backend/app/services/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
"""
|
||||
Service layer for business logic.
|
||||
"""
|
||||
|
||||
from .artifact_cleanup import ArtifactCleanupService
|
||||
|
||||
__all__ = [
|
||||
"ArtifactCleanupService",
|
||||
]
|
||||
181
backend/app/services/artifact_cleanup.py
Normal file
181
backend/app/services/artifact_cleanup.py
Normal file
@@ -0,0 +1,181 @@
|
||||
"""
|
||||
Service for artifact reference counting and cleanup.
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Tuple
|
||||
from sqlalchemy.orm import Session
|
||||
import logging
|
||||
|
||||
from ..models import Artifact, Tag, Upload, Package
|
||||
from ..repositories.artifact import ArtifactRepository
|
||||
from ..repositories.tag import TagRepository
|
||||
from ..storage import S3Storage
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ArtifactCleanupService:
|
||||
"""
|
||||
Service for managing artifact reference counts and cleaning up orphaned artifacts.
|
||||
|
||||
Reference counting rules:
|
||||
- ref_count starts at 1 when artifact is first uploaded
|
||||
- ref_count increments when the same artifact is uploaded again (deduplication)
|
||||
- ref_count decrements when a tag is deleted or updated to point elsewhere
|
||||
- ref_count decrements when a package is deleted (for each tag pointing to artifact)
|
||||
- When ref_count reaches 0, artifact is a candidate for deletion from S3
|
||||
"""
|
||||
|
||||
def __init__(self, db: Session, storage: Optional[S3Storage] = None):
|
||||
self.db = db
|
||||
self.storage = storage
|
||||
self.artifact_repo = ArtifactRepository(db)
|
||||
self.tag_repo = TagRepository(db)
|
||||
|
||||
def on_tag_deleted(self, artifact_id: str) -> Artifact:
|
||||
"""
|
||||
Called when a tag is deleted.
|
||||
Decrements ref_count for the artifact the tag was pointing to.
|
||||
"""
|
||||
artifact = self.artifact_repo.get_by_sha256(artifact_id)
|
||||
if artifact:
|
||||
artifact = self.artifact_repo.decrement_ref_count(artifact)
|
||||
logger.info(f"Decremented ref_count for artifact {artifact_id}: now {artifact.ref_count}")
|
||||
return artifact
|
||||
|
||||
def on_tag_updated(self, old_artifact_id: str, new_artifact_id: str) -> Tuple[Optional[Artifact], Optional[Artifact]]:
|
||||
"""
|
||||
Called when a tag is updated to point to a different artifact.
|
||||
Decrements ref_count for old artifact, increments for new (if different).
|
||||
|
||||
Returns (old_artifact, new_artifact) tuple.
|
||||
"""
|
||||
old_artifact = None
|
||||
new_artifact = None
|
||||
|
||||
if old_artifact_id != new_artifact_id:
|
||||
# Decrement old artifact ref_count
|
||||
old_artifact = self.artifact_repo.get_by_sha256(old_artifact_id)
|
||||
if old_artifact:
|
||||
old_artifact = self.artifact_repo.decrement_ref_count(old_artifact)
|
||||
logger.info(f"Decremented ref_count for old artifact {old_artifact_id}: now {old_artifact.ref_count}")
|
||||
|
||||
# Increment new artifact ref_count
|
||||
new_artifact = self.artifact_repo.get_by_sha256(new_artifact_id)
|
||||
if new_artifact:
|
||||
new_artifact = self.artifact_repo.increment_ref_count(new_artifact)
|
||||
logger.info(f"Incremented ref_count for new artifact {new_artifact_id}: now {new_artifact.ref_count}")
|
||||
|
||||
return old_artifact, new_artifact
|
||||
|
||||
def on_package_deleted(self, package_id) -> List[str]:
|
||||
"""
|
||||
Called when a package is deleted.
|
||||
Decrements ref_count for all artifacts that had tags in the package.
|
||||
|
||||
Returns list of artifact IDs that were affected.
|
||||
"""
|
||||
# Get all tags in the package before deletion
|
||||
tags = self.db.query(Tag).filter(Tag.package_id == package_id).all()
|
||||
|
||||
affected_artifacts = []
|
||||
for tag in tags:
|
||||
artifact = self.artifact_repo.get_by_sha256(tag.artifact_id)
|
||||
if artifact:
|
||||
self.artifact_repo.decrement_ref_count(artifact)
|
||||
affected_artifacts.append(tag.artifact_id)
|
||||
logger.info(f"Decremented ref_count for artifact {tag.artifact_id} (package delete)")
|
||||
|
||||
return affected_artifacts
|
||||
|
||||
def cleanup_orphaned_artifacts(self, batch_size: int = 100, dry_run: bool = False) -> List[str]:
|
||||
"""
|
||||
Find and delete artifacts with ref_count = 0.
|
||||
|
||||
Args:
|
||||
batch_size: Maximum number of artifacts to process
|
||||
dry_run: If True, only report what would be deleted without actually deleting
|
||||
|
||||
Returns:
|
||||
List of artifact IDs that were (or would be) deleted
|
||||
"""
|
||||
orphaned = self.artifact_repo.get_orphaned_artifacts(limit=batch_size)
|
||||
|
||||
deleted_ids = []
|
||||
for artifact in orphaned:
|
||||
if dry_run:
|
||||
logger.info(f"[DRY RUN] Would delete orphaned artifact: {artifact.id}")
|
||||
deleted_ids.append(artifact.id)
|
||||
else:
|
||||
try:
|
||||
# Delete from S3 first
|
||||
if self.storage:
|
||||
self.storage.delete(artifact.s3_key)
|
||||
logger.info(f"Deleted artifact from S3: {artifact.s3_key}")
|
||||
|
||||
# Then delete from database
|
||||
self.artifact_repo.delete(artifact)
|
||||
deleted_ids.append(artifact.id)
|
||||
logger.info(f"Deleted orphaned artifact from database: {artifact.id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete artifact {artifact.id}: {e}")
|
||||
|
||||
if not dry_run and deleted_ids:
|
||||
self.db.commit()
|
||||
|
||||
return deleted_ids
|
||||
|
||||
def get_orphaned_count(self) -> int:
|
||||
"""Get count of artifacts with ref_count = 0."""
|
||||
from sqlalchemy import func
|
||||
return (
|
||||
self.db.query(func.count(Artifact.id))
|
||||
.filter(Artifact.ref_count == 0)
|
||||
.scalar() or 0
|
||||
)
|
||||
|
||||
def verify_ref_counts(self, fix: bool = False) -> List[dict]:
|
||||
"""
|
||||
Verify that ref_counts match actual tag references.
|
||||
|
||||
Args:
|
||||
fix: If True, fix any mismatched ref_counts
|
||||
|
||||
Returns:
|
||||
List of artifacts with mismatched ref_counts
|
||||
"""
|
||||
from sqlalchemy import func
|
||||
|
||||
# Get actual tag counts per artifact
|
||||
tag_counts = (
|
||||
self.db.query(Tag.artifact_id, func.count(Tag.id).label("tag_count"))
|
||||
.group_by(Tag.artifact_id)
|
||||
.all()
|
||||
)
|
||||
tag_count_map = {artifact_id: count for artifact_id, count in tag_counts}
|
||||
|
||||
# Check all artifacts
|
||||
artifacts = self.db.query(Artifact).all()
|
||||
mismatches = []
|
||||
|
||||
for artifact in artifacts:
|
||||
actual_count = tag_count_map.get(artifact.id, 0)
|
||||
# ref_count should be at least 1 (initial upload) + additional uploads
|
||||
# But tags are the primary reference, so we check against tag count
|
||||
|
||||
if artifact.ref_count < actual_count:
|
||||
mismatch = {
|
||||
"artifact_id": artifact.id,
|
||||
"stored_ref_count": artifact.ref_count,
|
||||
"actual_tag_count": actual_count,
|
||||
}
|
||||
mismatches.append(mismatch)
|
||||
|
||||
if fix:
|
||||
artifact.ref_count = max(actual_count, 1)
|
||||
logger.warning(f"Fixed ref_count for artifact {artifact.id}: {mismatch['stored_ref_count']} -> {artifact.ref_count}")
|
||||
|
||||
if fix and mismatches:
|
||||
self.db.commit()
|
||||
|
||||
return mismatches
|
||||
@@ -1,6 +1,6 @@
|
||||
import hashlib
|
||||
import logging
|
||||
from typing import BinaryIO, Tuple, Optional, Dict, Any, Generator
|
||||
from typing import BinaryIO, Tuple, Optional, Dict, Any, Generator, NamedTuple
|
||||
import boto3
|
||||
from botocore.config import Config
|
||||
from botocore.exceptions import ClientError
|
||||
@@ -18,6 +18,16 @@ MULTIPART_CHUNK_SIZE = 10 * 1024 * 1024
|
||||
HASH_CHUNK_SIZE = 8 * 1024 * 1024
|
||||
|
||||
|
||||
class StorageResult(NamedTuple):
|
||||
"""Result of storing a file with all computed checksums"""
|
||||
sha256: str
|
||||
size: int
|
||||
s3_key: str
|
||||
md5: Optional[str] = None
|
||||
sha1: Optional[str] = None
|
||||
s3_etag: Optional[str] = None
|
||||
|
||||
|
||||
class S3Storage:
|
||||
def __init__(self):
|
||||
config = Config(s3={"addressing_style": "path"} if settings.s3_use_path_style else {})
|
||||
@@ -34,9 +44,9 @@ class S3Storage:
|
||||
# Store active multipart uploads for resumable support
|
||||
self._active_uploads: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
def store(self, file: BinaryIO, content_length: Optional[int] = None) -> Tuple[str, int, str]:
|
||||
def store(self, file: BinaryIO, content_length: Optional[int] = None) -> StorageResult:
|
||||
"""
|
||||
Store a file and return its SHA256 hash, size, and s3_key.
|
||||
Store a file and return StorageResult with all checksums.
|
||||
Content-addressable: if the file already exists, just return the hash.
|
||||
Uses multipart upload for files larger than MULTIPART_THRESHOLD.
|
||||
"""
|
||||
@@ -46,45 +56,76 @@ class S3Storage:
|
||||
else:
|
||||
return self._store_multipart(file, content_length)
|
||||
|
||||
def _store_simple(self, file: BinaryIO) -> Tuple[str, int, str]:
|
||||
def _store_simple(self, file: BinaryIO) -> StorageResult:
|
||||
"""Store a small file using simple put_object"""
|
||||
# Read file and compute hash
|
||||
# Read file and compute all hashes
|
||||
content = file.read()
|
||||
sha256_hash = hashlib.sha256(content).hexdigest()
|
||||
md5_hash = hashlib.md5(content).hexdigest()
|
||||
sha1_hash = hashlib.sha1(content).hexdigest()
|
||||
size = len(content)
|
||||
|
||||
# Check if already exists
|
||||
s3_key = f"fruits/{sha256_hash[:2]}/{sha256_hash[2:4]}/{sha256_hash}"
|
||||
s3_etag = None
|
||||
|
||||
if not self._exists(s3_key):
|
||||
self.client.put_object(
|
||||
response = self.client.put_object(
|
||||
Bucket=self.bucket,
|
||||
Key=s3_key,
|
||||
Body=content,
|
||||
)
|
||||
s3_etag = response.get("ETag", "").strip('"')
|
||||
else:
|
||||
# Get existing ETag
|
||||
obj_info = self.get_object_info(s3_key)
|
||||
if obj_info:
|
||||
s3_etag = obj_info.get("etag", "").strip('"')
|
||||
|
||||
return sha256_hash, size, s3_key
|
||||
return StorageResult(
|
||||
sha256=sha256_hash,
|
||||
size=size,
|
||||
s3_key=s3_key,
|
||||
md5=md5_hash,
|
||||
sha1=sha1_hash,
|
||||
s3_etag=s3_etag,
|
||||
)
|
||||
|
||||
def _store_multipart(self, file: BinaryIO, content_length: int) -> Tuple[str, int, str]:
|
||||
def _store_multipart(self, file: BinaryIO, content_length: int) -> StorageResult:
|
||||
"""Store a large file using S3 multipart upload with streaming hash computation"""
|
||||
# First pass: compute hash by streaming through file
|
||||
hasher = hashlib.sha256()
|
||||
# First pass: compute all hashes by streaming through file
|
||||
sha256_hasher = hashlib.sha256()
|
||||
md5_hasher = hashlib.md5()
|
||||
sha1_hasher = hashlib.sha1()
|
||||
size = 0
|
||||
|
||||
# Read file in chunks to compute hash
|
||||
# Read file in chunks to compute hashes
|
||||
while True:
|
||||
chunk = file.read(HASH_CHUNK_SIZE)
|
||||
if not chunk:
|
||||
break
|
||||
hasher.update(chunk)
|
||||
sha256_hasher.update(chunk)
|
||||
md5_hasher.update(chunk)
|
||||
sha1_hasher.update(chunk)
|
||||
size += len(chunk)
|
||||
|
||||
sha256_hash = hasher.hexdigest()
|
||||
sha256_hash = sha256_hasher.hexdigest()
|
||||
md5_hash = md5_hasher.hexdigest()
|
||||
sha1_hash = sha1_hasher.hexdigest()
|
||||
s3_key = f"fruits/{sha256_hash[:2]}/{sha256_hash[2:4]}/{sha256_hash}"
|
||||
|
||||
# Check if already exists (deduplication)
|
||||
if self._exists(s3_key):
|
||||
return sha256_hash, size, s3_key
|
||||
obj_info = self.get_object_info(s3_key)
|
||||
s3_etag = obj_info.get("etag", "").strip('"') if obj_info else None
|
||||
return StorageResult(
|
||||
sha256=sha256_hash,
|
||||
size=size,
|
||||
s3_key=s3_key,
|
||||
md5=md5_hash,
|
||||
sha1=sha1_hash,
|
||||
s3_etag=s3_etag,
|
||||
)
|
||||
|
||||
# Seek back to start for upload
|
||||
file.seek(0)
|
||||
@@ -116,14 +157,22 @@ class S3Storage:
|
||||
part_number += 1
|
||||
|
||||
# Complete multipart upload
|
||||
self.client.complete_multipart_upload(
|
||||
complete_response = self.client.complete_multipart_upload(
|
||||
Bucket=self.bucket,
|
||||
Key=s3_key,
|
||||
UploadId=upload_id,
|
||||
MultipartUpload={"Parts": parts},
|
||||
)
|
||||
s3_etag = complete_response.get("ETag", "").strip('"')
|
||||
|
||||
return sha256_hash, size, s3_key
|
||||
return StorageResult(
|
||||
sha256=sha256_hash,
|
||||
size=size,
|
||||
s3_key=s3_key,
|
||||
md5=md5_hash,
|
||||
sha1=sha1_hash,
|
||||
s3_etag=s3_etag,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
# Abort multipart upload on failure
|
||||
@@ -135,33 +184,50 @@ class S3Storage:
|
||||
)
|
||||
raise
|
||||
|
||||
def store_streaming(self, chunks: Generator[bytes, None, None]) -> Tuple[str, int, str]:
|
||||
def store_streaming(self, chunks: Generator[bytes, None, None]) -> StorageResult:
|
||||
"""
|
||||
Store a file from a stream of chunks.
|
||||
First accumulates to compute hash, then uploads.
|
||||
For truly large files, consider using initiate_resumable_upload instead.
|
||||
"""
|
||||
# Accumulate chunks and compute hash
|
||||
hasher = hashlib.sha256()
|
||||
# Accumulate chunks and compute all hashes
|
||||
sha256_hasher = hashlib.sha256()
|
||||
md5_hasher = hashlib.md5()
|
||||
sha1_hasher = hashlib.sha1()
|
||||
all_chunks = []
|
||||
size = 0
|
||||
|
||||
for chunk in chunks:
|
||||
hasher.update(chunk)
|
||||
sha256_hasher.update(chunk)
|
||||
md5_hasher.update(chunk)
|
||||
sha1_hasher.update(chunk)
|
||||
all_chunks.append(chunk)
|
||||
size += len(chunk)
|
||||
|
||||
sha256_hash = hasher.hexdigest()
|
||||
sha256_hash = sha256_hasher.hexdigest()
|
||||
md5_hash = md5_hasher.hexdigest()
|
||||
sha1_hash = sha1_hasher.hexdigest()
|
||||
s3_key = f"fruits/{sha256_hash[:2]}/{sha256_hash[2:4]}/{sha256_hash}"
|
||||
s3_etag = None
|
||||
|
||||
# Check if already exists
|
||||
if self._exists(s3_key):
|
||||
return sha256_hash, size, s3_key
|
||||
obj_info = self.get_object_info(s3_key)
|
||||
s3_etag = obj_info.get("etag", "").strip('"') if obj_info else None
|
||||
return StorageResult(
|
||||
sha256=sha256_hash,
|
||||
size=size,
|
||||
s3_key=s3_key,
|
||||
md5=md5_hash,
|
||||
sha1=sha1_hash,
|
||||
s3_etag=s3_etag,
|
||||
)
|
||||
|
||||
# Upload based on size
|
||||
if size < MULTIPART_THRESHOLD:
|
||||
content = b"".join(all_chunks)
|
||||
self.client.put_object(Bucket=self.bucket, Key=s3_key, Body=content)
|
||||
response = self.client.put_object(Bucket=self.bucket, Key=s3_key, Body=content)
|
||||
s3_etag = response.get("ETag", "").strip('"')
|
||||
else:
|
||||
# Use multipart for large files
|
||||
mpu = self.client.create_multipart_upload(Bucket=self.bucket, Key=s3_key)
|
||||
@@ -205,12 +271,13 @@ class S3Storage:
|
||||
"ETag": response["ETag"],
|
||||
})
|
||||
|
||||
self.client.complete_multipart_upload(
|
||||
complete_response = self.client.complete_multipart_upload(
|
||||
Bucket=self.bucket,
|
||||
Key=s3_key,
|
||||
UploadId=upload_id,
|
||||
MultipartUpload={"Parts": parts},
|
||||
)
|
||||
s3_etag = complete_response.get("ETag", "").strip('"')
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Streaming multipart upload failed: {e}")
|
||||
@@ -221,7 +288,14 @@ class S3Storage:
|
||||
)
|
||||
raise
|
||||
|
||||
return sha256_hash, size, s3_key
|
||||
return StorageResult(
|
||||
sha256=sha256_hash,
|
||||
size=size,
|
||||
s3_key=s3_key,
|
||||
md5=md5_hash,
|
||||
sha1=sha1_hash,
|
||||
s3_etag=s3_etag,
|
||||
)
|
||||
|
||||
def initiate_resumable_upload(self, expected_hash: str) -> Dict[str, Any]:
|
||||
"""
|
||||
@@ -376,6 +450,46 @@ class S3Storage:
|
||||
except ClientError:
|
||||
return False
|
||||
|
||||
def generate_presigned_url(
|
||||
self,
|
||||
s3_key: str,
|
||||
expiry: Optional[int] = None,
|
||||
response_content_type: Optional[str] = None,
|
||||
response_content_disposition: Optional[str] = None,
|
||||
) -> str:
|
||||
"""
|
||||
Generate a presigned URL for downloading an object.
|
||||
|
||||
Args:
|
||||
s3_key: The S3 key of the object
|
||||
expiry: URL expiry in seconds (defaults to settings.presigned_url_expiry)
|
||||
response_content_type: Override Content-Type header in response
|
||||
response_content_disposition: Override Content-Disposition header in response
|
||||
|
||||
Returns:
|
||||
Presigned URL string
|
||||
"""
|
||||
if expiry is None:
|
||||
expiry = settings.presigned_url_expiry
|
||||
|
||||
params = {
|
||||
"Bucket": self.bucket,
|
||||
"Key": s3_key,
|
||||
}
|
||||
|
||||
# Add response header overrides if specified
|
||||
if response_content_type:
|
||||
params["ResponseContentType"] = response_content_type
|
||||
if response_content_disposition:
|
||||
params["ResponseContentDisposition"] = response_content_disposition
|
||||
|
||||
url = self.client.generate_presigned_url(
|
||||
"get_object",
|
||||
Params=params,
|
||||
ExpiresIn=expiry,
|
||||
)
|
||||
return url
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_storage = None
|
||||
|
||||
7
container-test.sh
Executable file
7
container-test.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/sh
|
||||
|
||||
echo "testing container"
|
||||
|
||||
# Without a sleep, local testing shows no output because attaching to the logs happens after the container is done executing
|
||||
# this script.
|
||||
sleep 1
|
||||
122
docker-compose.local.yml
Normal file
122
docker-compose.local.yml
Normal file
@@ -0,0 +1,122 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
orchard-server:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.local
|
||||
ports:
|
||||
- "8080:8080"
|
||||
environment:
|
||||
- ORCHARD_SERVER_HOST=0.0.0.0
|
||||
- ORCHARD_SERVER_PORT=8080
|
||||
- ORCHARD_DATABASE_HOST=postgres
|
||||
- ORCHARD_DATABASE_PORT=5432
|
||||
- ORCHARD_DATABASE_USER=orchard
|
||||
- ORCHARD_DATABASE_PASSWORD=orchard_secret
|
||||
- ORCHARD_DATABASE_DBNAME=orchard
|
||||
- ORCHARD_DATABASE_SSLMODE=disable
|
||||
- ORCHARD_S3_ENDPOINT=http://minio:9000
|
||||
- ORCHARD_S3_REGION=us-east-1
|
||||
- ORCHARD_S3_BUCKET=orchard-artifacts
|
||||
- ORCHARD_S3_ACCESS_KEY_ID=minioadmin
|
||||
- ORCHARD_S3_SECRET_ACCESS_KEY=minioadmin
|
||||
- ORCHARD_S3_USE_PATH_STYLE=true
|
||||
- ORCHARD_REDIS_HOST=redis
|
||||
- ORCHARD_REDIS_PORT=6379
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
minio:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- orchard-network
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
||||
interval: 30s
|
||||
timeout: 3s
|
||||
start_period: 10s
|
||||
retries: 3
|
||||
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
environment:
|
||||
- POSTGRES_USER=orchard
|
||||
- POSTGRES_PASSWORD=orchard_secret
|
||||
- POSTGRES_DB=orchard
|
||||
volumes:
|
||||
- postgres-data-local:/var/lib/postgresql/data
|
||||
- ./migrations:/docker-entrypoint-initdb.d:ro
|
||||
ports:
|
||||
- "5432:5432"
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U orchard -d orchard"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- orchard-network
|
||||
restart: unless-stopped
|
||||
|
||||
minio:
|
||||
image: minio/minio:latest
|
||||
command: server /data --console-address ":9001"
|
||||
environment:
|
||||
- MINIO_ROOT_USER=minioadmin
|
||||
- MINIO_ROOT_PASSWORD=minioadmin
|
||||
volumes:
|
||||
- minio-data-local:/data
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "9001:9001"
|
||||
healthcheck:
|
||||
test: ["CMD", "mc", "ready", "local"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- orchard-network
|
||||
restart: unless-stopped
|
||||
|
||||
minio-init:
|
||||
image: minio/mc:latest
|
||||
depends_on:
|
||||
minio:
|
||||
condition: service_healthy
|
||||
entrypoint: >
|
||||
/bin/sh -c "
|
||||
mc alias set myminio http://minio:9000 minioadmin minioadmin;
|
||||
mc mb myminio/orchard-artifacts --ignore-existing;
|
||||
mc anonymous set download myminio/orchard-artifacts;
|
||||
exit 0;
|
||||
"
|
||||
networks:
|
||||
- orchard-network
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
command: redis-server --appendonly yes
|
||||
volumes:
|
||||
- redis-data-local:/data
|
||||
ports:
|
||||
- "6379:6379"
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- orchard-network
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
postgres-data-local:
|
||||
minio-data-local:
|
||||
redis-data-local:
|
||||
|
||||
networks:
|
||||
orchard-network:
|
||||
driver: bridge
|
||||
@@ -36,7 +36,7 @@ services:
|
||||
restart: unless-stopped
|
||||
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
image: containers.global.bsf.tools/postgres:16-alpine
|
||||
environment:
|
||||
- POSTGRES_USER=orchard
|
||||
- POSTGRES_PASSWORD=orchard_secret
|
||||
@@ -56,7 +56,7 @@ services:
|
||||
restart: unless-stopped
|
||||
|
||||
minio:
|
||||
image: minio/minio:latest
|
||||
image: containers.global.bsf.tools/minio/minio:latest
|
||||
command: server /data --console-address ":9001"
|
||||
environment:
|
||||
- MINIO_ROOT_USER=minioadmin
|
||||
@@ -76,7 +76,7 @@ services:
|
||||
restart: unless-stopped
|
||||
|
||||
minio-init:
|
||||
image: minio/mc:latest
|
||||
image: containers.global.bsf.tools/minio/mc:latest
|
||||
depends_on:
|
||||
minio:
|
||||
condition: service_healthy
|
||||
@@ -91,7 +91,7 @@ services:
|
||||
- orchard-network
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
image: containers.global.bsf.tools/redis:7-alpine
|
||||
command: redis-server --appendonly yes
|
||||
volumes:
|
||||
- redis-data:/data
|
||||
|
||||
504
docs/design/integrity-verification.md
Normal file
504
docs/design/integrity-verification.md
Normal file
@@ -0,0 +1,504 @@
|
||||
# Integrity Verification Workflow Design
|
||||
|
||||
This document defines the process for SHA256 checksum verification on artifact downloads, including failure handling and retry mechanisms.
|
||||
|
||||
## Overview
|
||||
|
||||
Orchard uses content-addressable storage where the artifact ID is the SHA256 hash of the content. This design leverages that property to provide configurable integrity verification during downloads.
|
||||
|
||||
## Current State
|
||||
|
||||
| Aspect | Status |
|
||||
|--------|--------|
|
||||
| Download streams content directly from S3 | ✅ Implemented |
|
||||
| Artifact ID is the SHA256 hash | ✅ Implemented |
|
||||
| S3 key derived from SHA256 hash | ✅ Implemented |
|
||||
| Verification during download | ❌ Not implemented |
|
||||
| Checksum headers in response | ❌ Not implemented |
|
||||
| Retry mechanism on failure | ❌ Not implemented |
|
||||
| Failure handling beyond S3 errors | ❌ Not implemented |
|
||||
|
||||
## Verification Modes
|
||||
|
||||
The verification mode is selected via query parameter `?verify=<mode>` or server-wide default via `ORCHARD_VERIFY_MODE`.
|
||||
|
||||
| Mode | Performance | Integrity | Use Case |
|
||||
|------|-------------|-----------|----------|
|
||||
| `none` | ⚡ Fastest | Client-side | Trusted networks, high throughput |
|
||||
| `header` | ⚡ Fast | Client-side | Standard downloads, client verification |
|
||||
| `stream` | 🔄 Moderate | Post-hoc server | Logging/auditing, non-blocking |
|
||||
| `pre` | 🐢 Slower | Guaranteed | Critical downloads, untrusted storage |
|
||||
| `strict` | 🐢 Slower | Guaranteed + Alert | Security-sensitive, compliance |
|
||||
|
||||
### Mode: None (Default)
|
||||
|
||||
**Behavior:**
|
||||
- Stream content directly from S3 with no server-side processing
|
||||
- Maximum download performance
|
||||
- Client is responsible for verification
|
||||
|
||||
**Headers Returned:**
|
||||
```
|
||||
X-Checksum-SHA256: <expected_hash>
|
||||
Content-Length: <expected_size>
|
||||
```
|
||||
|
||||
**Flow:**
|
||||
```
|
||||
Client Request → Lookup Artifact → Stream from S3 → Client
|
||||
```
|
||||
|
||||
### Mode: Header
|
||||
|
||||
**Behavior:**
|
||||
- Stream content directly from S3
|
||||
- Include comprehensive checksum headers
|
||||
- Client performs verification using headers
|
||||
|
||||
**Headers Returned:**
|
||||
```
|
||||
X-Checksum-SHA256: <expected_hash>
|
||||
Content-Length: <expected_size>
|
||||
Digest: sha-256=<base64_encoded_hash>
|
||||
ETag: "<sha256_hash>"
|
||||
X-Content-SHA256: <expected_hash>
|
||||
```
|
||||
|
||||
**Flow:**
|
||||
```
|
||||
Client Request → Lookup Artifact → Add Headers → Stream from S3 → Client Verifies
|
||||
```
|
||||
|
||||
**Client Verification Example:**
|
||||
```bash
|
||||
# Download and verify
|
||||
curl -OJ https://orchard/project/foo/bar/+/v1.0.0
|
||||
EXPECTED=$(curl -sI https://orchard/project/foo/bar/+/v1.0.0 | grep X-Checksum-SHA256 | cut -d' ' -f2)
|
||||
ACTUAL=$(sha256sum downloaded_file | cut -d' ' -f1)
|
||||
[ "$EXPECTED" = "$ACTUAL" ] && echo "OK" || echo "MISMATCH"
|
||||
```
|
||||
|
||||
### Mode: Stream (Post-Hoc Verification)
|
||||
|
||||
**Behavior:**
|
||||
- Wrap S3 stream with `HashingStreamWrapper`
|
||||
- Compute SHA256 incrementally while streaming to client
|
||||
- Verify hash after stream completes
|
||||
- Log verification result
|
||||
- Cannot reject content (already sent to client)
|
||||
|
||||
**Headers Returned:**
|
||||
```
|
||||
X-Checksum-SHA256: <expected_hash>
|
||||
Content-Length: <expected_size>
|
||||
X-Verify-Mode: stream
|
||||
Trailer: X-Verified
|
||||
```
|
||||
|
||||
**Trailers (if client supports):**
|
||||
```
|
||||
X-Verified: true|false
|
||||
X-Computed-SHA256: <computed_hash>
|
||||
```
|
||||
|
||||
**Flow:**
|
||||
```
|
||||
Client Request → Lookup Artifact → Wrap Stream → Stream to Client
|
||||
↓
|
||||
Compute Hash Incrementally
|
||||
↓
|
||||
Verify After Complete → Log Result
|
||||
```
|
||||
|
||||
**Implementation:**
|
||||
```python
|
||||
class HashingStreamWrapper:
|
||||
def __init__(self, stream, expected_hash: str, on_complete: Callable):
|
||||
self.stream = stream
|
||||
self.hasher = hashlib.sha256()
|
||||
self.expected_hash = expected_hash
|
||||
self.on_complete = on_complete
|
||||
|
||||
def __iter__(self):
|
||||
for chunk in self.stream:
|
||||
self.hasher.update(chunk)
|
||||
yield chunk
|
||||
# Stream complete, verify
|
||||
computed = self.hasher.hexdigest()
|
||||
self.on_complete(computed == self.expected_hash, computed)
|
||||
```
|
||||
|
||||
### Mode: Pre-Verify (Blocking)
|
||||
|
||||
**Behavior:**
|
||||
- Download entire content from S3 to memory/temp file
|
||||
- Compute SHA256 hash before sending to client
|
||||
- On match: stream verified content to client
|
||||
- On mismatch: retry from S3 (up to N times)
|
||||
- If retries exhausted: return 500 error
|
||||
|
||||
**Headers Returned:**
|
||||
```
|
||||
X-Checksum-SHA256: <expected_hash>
|
||||
Content-Length: <expected_size>
|
||||
X-Verify-Mode: pre
|
||||
X-Verified: true
|
||||
```
|
||||
|
||||
**Flow:**
|
||||
```
|
||||
Client Request → Lookup Artifact → Download from S3 → Compute Hash
|
||||
↓
|
||||
Hash Matches?
|
||||
↓ ↓
|
||||
Yes No
|
||||
↓ ↓
|
||||
Stream to Client Retry?
|
||||
↓
|
||||
Yes → Loop
|
||||
No → 500 Error
|
||||
```
|
||||
|
||||
**Memory Considerations:**
|
||||
- For files < `ORCHARD_VERIFY_MEMORY_LIMIT` (default 100MB): buffer in memory
|
||||
- For larger files: use temporary file with streaming hash computation
|
||||
- Cleanup temp files after response sent
|
||||
|
||||
### Mode: Strict
|
||||
|
||||
**Behavior:**
|
||||
- Same as pre-verify but with no retries
|
||||
- Fail immediately on any mismatch
|
||||
- Quarantine artifact on failure (mark as potentially corrupted)
|
||||
- Trigger alert/notification on failure
|
||||
- For security-critical downloads
|
||||
|
||||
**Headers Returned (on success):**
|
||||
```
|
||||
X-Checksum-SHA256: <expected_hash>
|
||||
Content-Length: <expected_size>
|
||||
X-Verify-Mode: strict
|
||||
X-Verified: true
|
||||
```
|
||||
|
||||
**Error Response (on failure):**
|
||||
```json
|
||||
{
|
||||
"error": "integrity_verification_failed",
|
||||
"message": "Artifact content does not match expected checksum",
|
||||
"expected_hash": "<expected>",
|
||||
"computed_hash": "<computed>",
|
||||
"artifact_id": "<id>",
|
||||
"action_taken": "quarantined"
|
||||
}
|
||||
```
|
||||
|
||||
**Quarantine Process:**
|
||||
1. Mark artifact `status = 'quarantined'` in database
|
||||
2. Log security event to audit_logs
|
||||
3. Optionally notify via webhook/email
|
||||
4. Artifact becomes unavailable for download until resolved
|
||||
|
||||
## Failure Detection
|
||||
|
||||
### Failure Types
|
||||
|
||||
| Failure Type | Detection Method | Severity |
|
||||
|--------------|------------------|----------|
|
||||
| Hash mismatch | Computed SHA256 ≠ Expected | Critical |
|
||||
| Size mismatch | Actual bytes ≠ `Content-Length` | High |
|
||||
| S3 read error | boto3 exception | Medium |
|
||||
| Truncated content | Stream ends early | High |
|
||||
| S3 object missing | `NoSuchKey` error | Critical |
|
||||
| ETag mismatch | S3 ETag ≠ expected | Medium |
|
||||
|
||||
### Detection Implementation
|
||||
|
||||
```python
|
||||
class VerificationResult:
|
||||
success: bool
|
||||
failure_type: Optional[str] # hash_mismatch, size_mismatch, etc.
|
||||
expected_hash: str
|
||||
computed_hash: Optional[str]
|
||||
expected_size: int
|
||||
actual_size: Optional[int]
|
||||
error_message: Optional[str]
|
||||
retry_count: int
|
||||
```
|
||||
|
||||
## Retry Mechanism
|
||||
|
||||
### Configuration
|
||||
|
||||
| Environment Variable | Default | Description |
|
||||
|---------------------|---------|-------------|
|
||||
| `ORCHARD_VERIFY_MAX_RETRIES` | 3 | Maximum retry attempts |
|
||||
| `ORCHARD_VERIFY_RETRY_DELAY_MS` | 100 | Base delay between retries |
|
||||
| `ORCHARD_VERIFY_RETRY_BACKOFF` | 2.0 | Exponential backoff multiplier |
|
||||
| `ORCHARD_VERIFY_RETRY_MAX_DELAY_MS` | 5000 | Maximum delay cap |
|
||||
|
||||
### Backoff Formula
|
||||
|
||||
```
|
||||
delay = min(base_delay * (backoff ^ attempt), max_delay)
|
||||
```
|
||||
|
||||
Example with defaults:
|
||||
- Attempt 1: 100ms
|
||||
- Attempt 2: 200ms
|
||||
- Attempt 3: 400ms
|
||||
|
||||
### Retry Flow
|
||||
|
||||
```python
|
||||
async def download_with_retry(artifact, max_retries=3):
|
||||
for attempt in range(max_retries + 1):
|
||||
try:
|
||||
content = await fetch_from_s3(artifact.s3_key)
|
||||
computed_hash = compute_sha256(content)
|
||||
|
||||
if computed_hash == artifact.id:
|
||||
return content # Success
|
||||
|
||||
# Hash mismatch
|
||||
log.warning(f"Verification failed, attempt {attempt + 1}/{max_retries + 1}")
|
||||
|
||||
if attempt < max_retries:
|
||||
delay = calculate_backoff(attempt)
|
||||
await asyncio.sleep(delay / 1000)
|
||||
else:
|
||||
raise IntegrityError("Max retries exceeded")
|
||||
|
||||
except S3Error as e:
|
||||
if attempt < max_retries:
|
||||
delay = calculate_backoff(attempt)
|
||||
await asyncio.sleep(delay / 1000)
|
||||
else:
|
||||
raise
|
||||
```
|
||||
|
||||
### Retryable vs Non-Retryable Failures
|
||||
|
||||
**Retryable:**
|
||||
- S3 read timeout
|
||||
- S3 connection error
|
||||
- Hash mismatch (may be transient S3 issue)
|
||||
- Truncated content
|
||||
|
||||
**Non-Retryable:**
|
||||
- S3 object not found (404)
|
||||
- S3 access denied (403)
|
||||
- Artifact not in database
|
||||
- Strict mode failures
|
||||
|
||||
## Configuration Reference
|
||||
|
||||
### Environment Variables
|
||||
|
||||
```bash
|
||||
# Verification mode (none, header, stream, pre, strict)
|
||||
ORCHARD_VERIFY_MODE=none
|
||||
|
||||
# Retry settings
|
||||
ORCHARD_VERIFY_MAX_RETRIES=3
|
||||
ORCHARD_VERIFY_RETRY_DELAY_MS=100
|
||||
ORCHARD_VERIFY_RETRY_BACKOFF=2.0
|
||||
ORCHARD_VERIFY_RETRY_MAX_DELAY_MS=5000
|
||||
|
||||
# Memory limit for pre-verify buffering (bytes)
|
||||
ORCHARD_VERIFY_MEMORY_LIMIT=104857600 # 100MB
|
||||
|
||||
# Strict mode settings
|
||||
ORCHARD_VERIFY_QUARANTINE_ON_FAILURE=true
|
||||
ORCHARD_VERIFY_ALERT_WEBHOOK=https://alerts.example.com/webhook
|
||||
|
||||
# Allow per-request mode override
|
||||
ORCHARD_VERIFY_ALLOW_OVERRIDE=true
|
||||
```
|
||||
|
||||
### Per-Request Override
|
||||
|
||||
When `ORCHARD_VERIFY_ALLOW_OVERRIDE=true`, clients can specify verification mode:
|
||||
|
||||
```
|
||||
GET /api/v1/project/foo/bar/+/v1.0.0?verify=pre
|
||||
GET /api/v1/project/foo/bar/+/v1.0.0?verify=none
|
||||
```
|
||||
|
||||
## API Changes
|
||||
|
||||
### Download Endpoint
|
||||
|
||||
**Request:**
|
||||
```
|
||||
GET /api/v1/project/{project}/{package}/+/{ref}?verify={mode}
|
||||
```
|
||||
|
||||
**New Query Parameters:**
|
||||
| Parameter | Type | Default | Description |
|
||||
|-----------|------|---------|-------------|
|
||||
| `verify` | string | from config | Verification mode |
|
||||
|
||||
**New Response Headers:**
|
||||
| Header | Description |
|
||||
|--------|-------------|
|
||||
| `X-Checksum-SHA256` | Expected SHA256 hash |
|
||||
| `X-Verify-Mode` | Active verification mode |
|
||||
| `X-Verified` | `true` if server verified content |
|
||||
| `Digest` | RFC 3230 digest header |
|
||||
|
||||
### New Endpoint: Verify Artifact
|
||||
|
||||
**Request:**
|
||||
```
|
||||
POST /api/v1/project/{project}/{package}/+/{ref}/verify
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"artifact_id": "abc123...",
|
||||
"verified": true,
|
||||
"expected_hash": "abc123...",
|
||||
"computed_hash": "abc123...",
|
||||
"size_match": true,
|
||||
"expected_size": 1048576,
|
||||
"actual_size": 1048576,
|
||||
"verification_time_ms": 45
|
||||
}
|
||||
```
|
||||
|
||||
## Logging and Monitoring
|
||||
|
||||
### Log Events
|
||||
|
||||
| Event | Level | When |
|
||||
|-------|-------|------|
|
||||
| `verification.success` | INFO | Hash verified successfully |
|
||||
| `verification.failure` | ERROR | Hash mismatch detected |
|
||||
| `verification.retry` | WARN | Retry attempt initiated |
|
||||
| `verification.quarantine` | ERROR | Artifact quarantined |
|
||||
| `verification.skip` | DEBUG | Verification skipped (mode=none) |
|
||||
|
||||
### Metrics
|
||||
|
||||
| Metric | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `orchard_verification_total` | Counter | Total verification attempts |
|
||||
| `orchard_verification_failures` | Counter | Failed verifications |
|
||||
| `orchard_verification_retries` | Counter | Retry attempts |
|
||||
| `orchard_verification_duration_ms` | Histogram | Verification time |
|
||||
|
||||
### Audit Log Entry
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "artifact.download.verified",
|
||||
"resource": "project/foo/package/bar/artifact/abc123",
|
||||
"user_id": "user@example.com",
|
||||
"details": {
|
||||
"verification_mode": "pre",
|
||||
"verified": true,
|
||||
"retry_count": 0,
|
||||
"duration_ms": 45
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **Strict Mode for Sensitive Data**: Use strict mode for artifacts containing credentials, certificates, or security-critical code.
|
||||
|
||||
2. **Quarantine Isolation**: Quarantined artifacts should be moved to a separate S3 prefix or bucket for forensic analysis.
|
||||
|
||||
3. **Alert on Repeated Failures**: Multiple verification failures for the same artifact may indicate storage corruption or tampering.
|
||||
|
||||
4. **Audit Trail**: All verification events should be logged for compliance and forensic purposes.
|
||||
|
||||
5. **Client Trust**: In `none` and `header` modes, clients must implement their own verification for security guarantees.
|
||||
|
||||
## Implementation Phases
|
||||
|
||||
### Phase 1: Headers Only
|
||||
- Add `X-Checksum-SHA256` header to all downloads
|
||||
- Add `verify=header` mode support
|
||||
- Add configuration options
|
||||
|
||||
### Phase 2: Stream Verification
|
||||
- Implement `HashingStreamWrapper`
|
||||
- Add `verify=stream` mode
|
||||
- Add verification logging
|
||||
|
||||
### Phase 3: Pre-Verification
|
||||
- Implement buffered verification
|
||||
- Add retry mechanism
|
||||
- Add `verify=pre` mode
|
||||
|
||||
### Phase 4: Strict Mode
|
||||
- Implement quarantine mechanism
|
||||
- Add alerting integration
|
||||
- Add `verify=strict` mode
|
||||
|
||||
## Client Integration Examples
|
||||
|
||||
### curl with Verification
|
||||
```bash
|
||||
#!/bin/bash
|
||||
URL="https://orchard.example.com/api/v1/project/myproject/mypackage/+/v1.0.0"
|
||||
|
||||
# Get expected hash from headers
|
||||
EXPECTED=$(curl -sI "$URL" | grep -i "X-Checksum-SHA256" | tr -d '\r' | cut -d' ' -f2)
|
||||
|
||||
# Download file
|
||||
curl -sO "$URL"
|
||||
FILENAME=$(basename "$URL")
|
||||
|
||||
# Verify
|
||||
ACTUAL=$(sha256sum "$FILENAME" | cut -d' ' -f1)
|
||||
|
||||
if [ "$EXPECTED" = "$ACTUAL" ]; then
|
||||
echo "✓ Verification passed"
|
||||
else
|
||||
echo "✗ Verification FAILED"
|
||||
echo " Expected: $EXPECTED"
|
||||
echo " Actual: $ACTUAL"
|
||||
exit 1
|
||||
fi
|
||||
```
|
||||
|
||||
### Python Client
|
||||
```python
|
||||
import hashlib
|
||||
import requests
|
||||
|
||||
def download_verified(url: str) -> bytes:
|
||||
# Get headers first
|
||||
head = requests.head(url)
|
||||
expected_hash = head.headers.get('X-Checksum-SHA256')
|
||||
expected_size = int(head.headers.get('Content-Length', 0))
|
||||
|
||||
# Download content
|
||||
response = requests.get(url)
|
||||
content = response.content
|
||||
|
||||
# Verify size
|
||||
if len(content) != expected_size:
|
||||
raise ValueError(f"Size mismatch: {len(content)} != {expected_size}")
|
||||
|
||||
# Verify hash
|
||||
actual_hash = hashlib.sha256(content).hexdigest()
|
||||
if actual_hash != expected_hash:
|
||||
raise ValueError(f"Hash mismatch: {actual_hash} != {expected_hash}")
|
||||
|
||||
return content
|
||||
```
|
||||
|
||||
### Server-Side Verification
|
||||
```bash
|
||||
# Force server to verify before sending
|
||||
curl -O "https://orchard.example.com/api/v1/project/myproject/mypackage/+/v1.0.0?verify=pre"
|
||||
|
||||
# Check if verification was performed
|
||||
curl -I "https://orchard.example.com/api/v1/project/myproject/mypackage/+/v1.0.0?verify=pre" | grep X-Verified
|
||||
# X-Verified: true
|
||||
```
|
||||
@@ -1,4 +1,19 @@
|
||||
import { Project, Package, Tag, Artifact, UploadResponse } from './types';
|
||||
import {
|
||||
Project,
|
||||
Package,
|
||||
Tag,
|
||||
TagDetail,
|
||||
Artifact,
|
||||
ArtifactDetail,
|
||||
UploadResponse,
|
||||
PaginatedResponse,
|
||||
ListParams,
|
||||
TagListParams,
|
||||
PackageListParams,
|
||||
ArtifactListParams,
|
||||
ProjectListParams,
|
||||
GlobalSearchResponse,
|
||||
} from './types';
|
||||
|
||||
const API_BASE = '/api/v1';
|
||||
|
||||
@@ -10,21 +25,33 @@ async function handleResponse<T>(response: Response): Promise<T> {
|
||||
return response.json();
|
||||
}
|
||||
|
||||
// Paginated response type
|
||||
interface PaginatedResponse<T> {
|
||||
items: T[];
|
||||
pagination: {
|
||||
page: number;
|
||||
limit: number;
|
||||
total: number;
|
||||
total_pages: number;
|
||||
};
|
||||
function buildQueryString(params: Record<string, unknown>): string {
|
||||
const searchParams = new URLSearchParams();
|
||||
Object.entries(params).forEach(([key, value]) => {
|
||||
if (value !== undefined && value !== null && value !== '') {
|
||||
searchParams.append(key, String(value));
|
||||
}
|
||||
});
|
||||
const query = searchParams.toString();
|
||||
return query ? `?${query}` : '';
|
||||
}
|
||||
|
||||
// Global Search API
|
||||
export async function globalSearch(query: string, limit: number = 5): Promise<GlobalSearchResponse> {
|
||||
const params = buildQueryString({ q: query, limit });
|
||||
const response = await fetch(`${API_BASE}/search${params}`);
|
||||
return handleResponse<GlobalSearchResponse>(response);
|
||||
}
|
||||
|
||||
// Project API
|
||||
export async function listProjects(): Promise<Project[]> {
|
||||
const response = await fetch(`${API_BASE}/projects`);
|
||||
const data = await handleResponse<PaginatedResponse<Project>>(response);
|
||||
export async function listProjects(params: ProjectListParams = {}): Promise<PaginatedResponse<Project>> {
|
||||
const query = buildQueryString(params as Record<string, unknown>);
|
||||
const response = await fetch(`${API_BASE}/projects${query}`);
|
||||
return handleResponse<PaginatedResponse<Project>>(response);
|
||||
}
|
||||
|
||||
export async function listProjectsSimple(params: ListParams = {}): Promise<Project[]> {
|
||||
const data = await listProjects(params);
|
||||
return data.items;
|
||||
}
|
||||
|
||||
@@ -43,9 +70,20 @@ export async function getProject(name: string): Promise<Project> {
|
||||
}
|
||||
|
||||
// Package API
|
||||
export async function listPackages(projectName: string): Promise<Package[]> {
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/packages`);
|
||||
return handleResponse<Package[]>(response);
|
||||
export async function listPackages(projectName: string, params: PackageListParams = {}): Promise<PaginatedResponse<Package>> {
|
||||
const query = buildQueryString(params as Record<string, unknown>);
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/packages${query}`);
|
||||
return handleResponse<PaginatedResponse<Package>>(response);
|
||||
}
|
||||
|
||||
export async function listPackagesSimple(projectName: string, params: PackageListParams = {}): Promise<Package[]> {
|
||||
const data = await listPackages(projectName, params);
|
||||
return data.items;
|
||||
}
|
||||
|
||||
export async function getPackage(projectName: string, packageName: string): Promise<Package> {
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/packages/${packageName}`);
|
||||
return handleResponse<Package>(response);
|
||||
}
|
||||
|
||||
export async function createPackage(projectName: string, data: { name: string; description?: string }): Promise<Package> {
|
||||
@@ -58,9 +96,20 @@ export async function createPackage(projectName: string, data: { name: string; d
|
||||
}
|
||||
|
||||
// Tag API
|
||||
export async function listTags(projectName: string, packageName: string): Promise<Tag[]> {
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags`);
|
||||
return handleResponse<Tag[]>(response);
|
||||
export async function listTags(projectName: string, packageName: string, params: TagListParams = {}): Promise<PaginatedResponse<TagDetail>> {
|
||||
const query = buildQueryString(params as Record<string, unknown>);
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags${query}`);
|
||||
return handleResponse<PaginatedResponse<TagDetail>>(response);
|
||||
}
|
||||
|
||||
export async function listTagsSimple(projectName: string, packageName: string, params: TagListParams = {}): Promise<TagDetail[]> {
|
||||
const data = await listTags(projectName, packageName, params);
|
||||
return data.items;
|
||||
}
|
||||
|
||||
export async function getTag(projectName: string, packageName: string, tagName: string): Promise<TagDetail> {
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags/${tagName}`);
|
||||
return handleResponse<TagDetail>(response);
|
||||
}
|
||||
|
||||
export async function createTag(projectName: string, packageName: string, data: { name: string; artifact_id: string }): Promise<Tag> {
|
||||
@@ -73,9 +122,19 @@ export async function createTag(projectName: string, packageName: string, data:
|
||||
}
|
||||
|
||||
// Artifact API
|
||||
export async function getArtifact(artifactId: string): Promise<Artifact> {
|
||||
export async function getArtifact(artifactId: string): Promise<ArtifactDetail> {
|
||||
const response = await fetch(`${API_BASE}/artifact/${artifactId}`);
|
||||
return handleResponse<Artifact>(response);
|
||||
return handleResponse<ArtifactDetail>(response);
|
||||
}
|
||||
|
||||
export async function listPackageArtifacts(
|
||||
projectName: string,
|
||||
packageName: string,
|
||||
params: ArtifactListParams = {}
|
||||
): Promise<PaginatedResponse<Artifact & { tags: string[] }>> {
|
||||
const query = buildQueryString(params as Record<string, unknown>);
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/artifacts${query}`);
|
||||
return handleResponse<PaginatedResponse<Artifact & { tags: string[] }>>(response);
|
||||
}
|
||||
|
||||
// Upload
|
||||
|
||||
43
frontend/src/components/Badge.css
Normal file
43
frontend/src/components/Badge.css
Normal file
@@ -0,0 +1,43 @@
|
||||
/* Badge Component */
|
||||
.badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 4px 10px;
|
||||
border-radius: 100px;
|
||||
font-weight: 500;
|
||||
font-size: 0.75rem;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.02em;
|
||||
}
|
||||
|
||||
.badge--default {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.badge--success,
|
||||
.badge--public {
|
||||
background: var(--success-bg);
|
||||
color: var(--success);
|
||||
border: 1px solid rgba(34, 197, 94, 0.2);
|
||||
}
|
||||
|
||||
.badge--warning,
|
||||
.badge--private {
|
||||
background: var(--warning-bg);
|
||||
color: var(--warning);
|
||||
border: 1px solid rgba(245, 158, 11, 0.2);
|
||||
}
|
||||
|
||||
.badge--error {
|
||||
background: var(--error-bg);
|
||||
color: var(--error);
|
||||
border: 1px solid rgba(239, 68, 68, 0.2);
|
||||
}
|
||||
|
||||
.badge--info {
|
||||
background: rgba(59, 130, 246, 0.1);
|
||||
color: #3b82f6;
|
||||
border: 1px solid rgba(59, 130, 246, 0.2);
|
||||
}
|
||||
17
frontend/src/components/Badge.tsx
Normal file
17
frontend/src/components/Badge.tsx
Normal file
@@ -0,0 +1,17 @@
|
||||
import './Badge.css';
|
||||
|
||||
type BadgeVariant = 'default' | 'success' | 'warning' | 'error' | 'info' | 'public' | 'private';
|
||||
|
||||
interface BadgeProps {
|
||||
children: React.ReactNode;
|
||||
variant?: BadgeVariant;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function Badge({ children, variant = 'default', className = '' }: BadgeProps) {
|
||||
return (
|
||||
<span className={`badge badge--${variant} ${className}`.trim()}>
|
||||
{children}
|
||||
</span>
|
||||
);
|
||||
}
|
||||
38
frontend/src/components/Breadcrumb.css
Normal file
38
frontend/src/components/Breadcrumb.css
Normal file
@@ -0,0 +1,38 @@
|
||||
/* Breadcrumb Component */
|
||||
.breadcrumb {
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
.breadcrumb__list {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.breadcrumb__item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.breadcrumb__link {
|
||||
color: var(--text-secondary);
|
||||
transition: color var(--transition-fast);
|
||||
}
|
||||
|
||||
.breadcrumb__link:hover {
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.breadcrumb__separator {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.breadcrumb__current {
|
||||
color: var(--text-primary);
|
||||
font-weight: 500;
|
||||
}
|
||||
38
frontend/src/components/Breadcrumb.tsx
Normal file
38
frontend/src/components/Breadcrumb.tsx
Normal file
@@ -0,0 +1,38 @@
|
||||
import { Link } from 'react-router-dom';
|
||||
import './Breadcrumb.css';
|
||||
|
||||
interface BreadcrumbItem {
|
||||
label: string;
|
||||
href?: string;
|
||||
}
|
||||
|
||||
interface BreadcrumbProps {
|
||||
items: BreadcrumbItem[];
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function Breadcrumb({ items, className = '' }: BreadcrumbProps) {
|
||||
return (
|
||||
<nav className={`breadcrumb ${className}`.trim()} aria-label="Breadcrumb">
|
||||
<ol className="breadcrumb__list">
|
||||
{items.map((item, index) => {
|
||||
const isLast = index === items.length - 1;
|
||||
return (
|
||||
<li key={index} className="breadcrumb__item">
|
||||
{!isLast && item.href ? (
|
||||
<>
|
||||
<Link to={item.href} className="breadcrumb__link">
|
||||
{item.label}
|
||||
</Link>
|
||||
<span className="breadcrumb__separator">/</span>
|
||||
</>
|
||||
) : (
|
||||
<span className="breadcrumb__current">{item.label}</span>
|
||||
)}
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ol>
|
||||
</nav>
|
||||
);
|
||||
}
|
||||
78
frontend/src/components/Card.css
Normal file
78
frontend/src/components/Card.css
Normal file
@@ -0,0 +1,78 @@
|
||||
/* Card Component */
|
||||
.card {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 24px;
|
||||
transition: all var(--transition-normal);
|
||||
}
|
||||
|
||||
.card--elevated {
|
||||
box-shadow: var(--shadow-md);
|
||||
}
|
||||
|
||||
.card--accent {
|
||||
background: linear-gradient(135deg, rgba(16, 185, 129, 0.05) 0%, rgba(5, 150, 105, 0.05) 100%);
|
||||
border: 1px solid rgba(16, 185, 129, 0.2);
|
||||
}
|
||||
|
||||
.card--clickable {
|
||||
display: block;
|
||||
color: inherit;
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.card--clickable::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
background: var(--accent-gradient);
|
||||
opacity: 0;
|
||||
transition: opacity var(--transition-normal);
|
||||
border-radius: var(--radius-lg);
|
||||
}
|
||||
|
||||
.card--clickable:hover {
|
||||
border-color: var(--border-secondary);
|
||||
transform: translateY(-2px);
|
||||
box-shadow: var(--shadow-lg);
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
.card--clickable:hover::before {
|
||||
opacity: 0.03;
|
||||
}
|
||||
|
||||
.card__header {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.card__header h3 {
|
||||
color: var(--text-primary);
|
||||
font-size: 1.125rem;
|
||||
font-weight: 600;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.card__header p {
|
||||
color: var(--text-secondary);
|
||||
font-size: 0.875rem;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.card__body {
|
||||
color: var(--text-secondary);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.card__footer {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
font-size: 0.75rem;
|
||||
padding-top: 16px;
|
||||
border-top: 1px solid var(--border-primary);
|
||||
margin-top: 16px;
|
||||
}
|
||||
59
frontend/src/components/Card.tsx
Normal file
59
frontend/src/components/Card.tsx
Normal file
@@ -0,0 +1,59 @@
|
||||
import { ReactNode } from 'react';
|
||||
import './Card.css';
|
||||
|
||||
interface CardProps {
|
||||
children: ReactNode;
|
||||
className?: string;
|
||||
onClick?: () => void;
|
||||
href?: string;
|
||||
variant?: 'default' | 'elevated' | 'accent';
|
||||
}
|
||||
|
||||
export function Card({ children, className = '', onClick, href, variant = 'default' }: CardProps) {
|
||||
const baseClass = `card card--${variant} ${className}`.trim();
|
||||
|
||||
if (href) {
|
||||
return (
|
||||
<a href={href} className={`${baseClass} card--clickable`}>
|
||||
{children}
|
||||
</a>
|
||||
);
|
||||
}
|
||||
|
||||
if (onClick) {
|
||||
return (
|
||||
<div className={`${baseClass} card--clickable`} onClick={onClick} role="button" tabIndex={0}>
|
||||
{children}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return <div className={baseClass}>{children}</div>;
|
||||
}
|
||||
|
||||
interface CardHeaderProps {
|
||||
children: ReactNode;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function CardHeader({ children, className = '' }: CardHeaderProps) {
|
||||
return <div className={`card__header ${className}`.trim()}>{children}</div>;
|
||||
}
|
||||
|
||||
interface CardBodyProps {
|
||||
children: ReactNode;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function CardBody({ children, className = '' }: CardBodyProps) {
|
||||
return <div className={`card__body ${className}`.trim()}>{children}</div>;
|
||||
}
|
||||
|
||||
interface CardFooterProps {
|
||||
children: ReactNode;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function CardFooter({ children, className = '' }: CardFooterProps) {
|
||||
return <div className={`card__footer ${className}`.trim()}>{children}</div>;
|
||||
}
|
||||
100
frontend/src/components/DataTable.css
Normal file
100
frontend/src/components/DataTable.css
Normal file
@@ -0,0 +1,100 @@
|
||||
/* DataTable Component */
|
||||
.data-table {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.data-table table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
.data-table th,
|
||||
.data-table td {
|
||||
padding: 14px 20px;
|
||||
text-align: left;
|
||||
border-bottom: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.data-table th {
|
||||
background: var(--bg-tertiary);
|
||||
font-weight: 600;
|
||||
font-size: 0.75rem;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: var(--text-tertiary);
|
||||
}
|
||||
|
||||
.data-table__th--sortable {
|
||||
cursor: pointer;
|
||||
user-select: none;
|
||||
transition: color var(--transition-fast);
|
||||
}
|
||||
|
||||
.data-table__th--sortable:hover {
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.data-table__th-content {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
.data-table__sort-icon {
|
||||
transition: transform var(--transition-fast);
|
||||
}
|
||||
|
||||
.data-table__sort-icon--desc {
|
||||
transform: rotate(180deg);
|
||||
}
|
||||
|
||||
.data-table tbody tr:last-child td {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.data-table tbody tr {
|
||||
transition: background var(--transition-fast);
|
||||
}
|
||||
|
||||
.data-table tbody tr:hover {
|
||||
background: var(--bg-tertiary);
|
||||
}
|
||||
|
||||
.data-table td strong {
|
||||
color: var(--accent-primary);
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
/* Empty state */
|
||||
.data-table__empty {
|
||||
text-align: center;
|
||||
padding: 48px 32px;
|
||||
color: var(--text-tertiary);
|
||||
background: var(--bg-secondary);
|
||||
border: 1px dashed var(--border-secondary);
|
||||
border-radius: var(--radius-lg);
|
||||
}
|
||||
|
||||
.data-table__empty p {
|
||||
font-size: 0.9375rem;
|
||||
}
|
||||
|
||||
/* Utility classes for cells */
|
||||
.data-table .cell-mono {
|
||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-tertiary);
|
||||
background: var(--bg-tertiary);
|
||||
padding: 4px 8px;
|
||||
border-radius: var(--radius-sm);
|
||||
}
|
||||
|
||||
.data-table .cell-truncate {
|
||||
max-width: 200px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
86
frontend/src/components/DataTable.tsx
Normal file
86
frontend/src/components/DataTable.tsx
Normal file
@@ -0,0 +1,86 @@
|
||||
import { ReactNode } from 'react';
|
||||
import './DataTable.css';
|
||||
|
||||
interface Column<T> {
|
||||
key: string;
|
||||
header: string;
|
||||
render: (item: T) => ReactNode;
|
||||
className?: string;
|
||||
sortable?: boolean;
|
||||
}
|
||||
|
||||
interface DataTableProps<T> {
|
||||
data: T[];
|
||||
columns: Column<T>[];
|
||||
keyExtractor: (item: T) => string;
|
||||
emptyMessage?: string;
|
||||
className?: string;
|
||||
onSort?: (key: string) => void;
|
||||
sortKey?: string;
|
||||
sortOrder?: 'asc' | 'desc';
|
||||
}
|
||||
|
||||
export function DataTable<T>({
|
||||
data,
|
||||
columns,
|
||||
keyExtractor,
|
||||
emptyMessage = 'No data available',
|
||||
className = '',
|
||||
onSort,
|
||||
sortKey,
|
||||
sortOrder,
|
||||
}: DataTableProps<T>) {
|
||||
if (data.length === 0) {
|
||||
return (
|
||||
<div className="data-table__empty">
|
||||
<p>{emptyMessage}</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={`data-table ${className}`.trim()}>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
{columns.map((column) => (
|
||||
<th
|
||||
key={column.key}
|
||||
className={`${column.className || ''} ${column.sortable ? 'data-table__th--sortable' : ''}`}
|
||||
onClick={() => column.sortable && onSort?.(column.key)}
|
||||
>
|
||||
<span className="data-table__th-content">
|
||||
{column.header}
|
||||
{column.sortable && sortKey === column.key && (
|
||||
<svg
|
||||
className={`data-table__sort-icon ${sortOrder === 'desc' ? 'data-table__sort-icon--desc' : ''}`}
|
||||
width="12"
|
||||
height="12"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
>
|
||||
<polyline points="18 15 12 9 6 15" />
|
||||
</svg>
|
||||
)}
|
||||
</span>
|
||||
</th>
|
||||
))}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{data.map((item) => (
|
||||
<tr key={keyExtractor(item)}>
|
||||
{columns.map((column) => (
|
||||
<td key={column.key} className={column.className}>
|
||||
{column.render(item)}
|
||||
</td>
|
||||
))}
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
63
frontend/src/components/FilterChip.css
Normal file
63
frontend/src/components/FilterChip.css
Normal file
@@ -0,0 +1,63 @@
|
||||
/* FilterChip Component */
|
||||
.filter-chip {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 4px 8px 4px 10px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: 100px;
|
||||
font-size: 0.75rem;
|
||||
}
|
||||
|
||||
.filter-chip__label {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.filter-chip__value {
|
||||
color: var(--text-primary);
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.filter-chip__remove {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
padding: 0;
|
||||
background: transparent;
|
||||
border: none;
|
||||
border-radius: 50%;
|
||||
color: var(--text-muted);
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.filter-chip__remove:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
/* FilterChipGroup */
|
||||
.filter-chip-group {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.filter-chip-group__clear {
|
||||
padding: 4px 10px;
|
||||
background: transparent;
|
||||
border: none;
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
cursor: pointer;
|
||||
transition: color var(--transition-fast);
|
||||
}
|
||||
|
||||
.filter-chip-group__clear:hover {
|
||||
color: var(--error);
|
||||
}
|
||||
47
frontend/src/components/FilterChip.tsx
Normal file
47
frontend/src/components/FilterChip.tsx
Normal file
@@ -0,0 +1,47 @@
|
||||
import './FilterChip.css';
|
||||
|
||||
interface FilterChipProps {
|
||||
label: string;
|
||||
value: string;
|
||||
onRemove: () => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function FilterChip({ label, value, onRemove, className = '' }: FilterChipProps) {
|
||||
return (
|
||||
<span className={`filter-chip ${className}`.trim()}>
|
||||
<span className="filter-chip__label">{label}:</span>
|
||||
<span className="filter-chip__value">{value}</span>
|
||||
<button
|
||||
type="button"
|
||||
className="filter-chip__remove"
|
||||
onClick={onRemove}
|
||||
aria-label={`Remove ${label} filter`}
|
||||
>
|
||||
<svg width="12" height="12" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<line x1="18" y1="6" x2="6" y2="18" />
|
||||
<line x1="6" y1="6" x2="18" y2="18" />
|
||||
</svg>
|
||||
</button>
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
interface FilterChipGroupProps {
|
||||
children: React.ReactNode;
|
||||
onClearAll?: () => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function FilterChipGroup({ children, onClearAll, className = '' }: FilterChipGroupProps) {
|
||||
return (
|
||||
<div className={`filter-chip-group ${className}`.trim()}>
|
||||
{children}
|
||||
{onClearAll && (
|
||||
<button type="button" className="filter-chip-group__clear" onClick={onClearAll}>
|
||||
Clear all
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
75
frontend/src/components/FilterDropdown.css
Normal file
75
frontend/src/components/FilterDropdown.css
Normal file
@@ -0,0 +1,75 @@
|
||||
.filter-dropdown {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.filter-dropdown__trigger {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 8px 12px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
color: var(--text-secondary);
|
||||
font-size: 0.875rem;
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.filter-dropdown__trigger:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.filter-dropdown__trigger--active {
|
||||
border-color: var(--accent-primary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.filter-dropdown__chevron {
|
||||
transition: transform var(--transition-fast);
|
||||
}
|
||||
|
||||
.filter-dropdown__chevron--open {
|
||||
transform: rotate(180deg);
|
||||
}
|
||||
|
||||
.filter-dropdown__menu {
|
||||
position: absolute;
|
||||
top: calc(100% + 4px);
|
||||
left: 0;
|
||||
min-width: 150px;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
box-shadow: var(--shadow-lg);
|
||||
z-index: 50;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.filter-dropdown__option {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
width: 100%;
|
||||
padding: 8px 12px;
|
||||
background: transparent;
|
||||
border: none;
|
||||
color: var(--text-primary);
|
||||
font-size: 0.875rem;
|
||||
text-align: left;
|
||||
cursor: pointer;
|
||||
transition: background var(--transition-fast);
|
||||
}
|
||||
|
||||
.filter-dropdown__option:hover {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
.filter-dropdown__option--selected {
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.filter-dropdown__option svg {
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
80
frontend/src/components/FilterDropdown.tsx
Normal file
80
frontend/src/components/FilterDropdown.tsx
Normal file
@@ -0,0 +1,80 @@
|
||||
import { useState, useRef, useEffect } from 'react';
|
||||
import './FilterDropdown.css';
|
||||
|
||||
export interface FilterOption {
|
||||
value: string;
|
||||
label: string;
|
||||
}
|
||||
|
||||
interface FilterDropdownProps {
|
||||
label: string;
|
||||
options: FilterOption[];
|
||||
value: string;
|
||||
onChange: (value: string) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function FilterDropdown({ label, options, value, onChange, className = '' }: FilterDropdownProps) {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const dropdownRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const selectedOption = options.find((o) => o.value === value);
|
||||
|
||||
useEffect(() => {
|
||||
function handleClickOutside(event: MouseEvent) {
|
||||
if (dropdownRef.current && !dropdownRef.current.contains(event.target as Node)) {
|
||||
setIsOpen(false);
|
||||
}
|
||||
}
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
return () => document.removeEventListener('mousedown', handleClickOutside);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className={`filter-dropdown ${className}`.trim()} ref={dropdownRef}>
|
||||
<button
|
||||
type="button"
|
||||
className={`filter-dropdown__trigger ${value ? 'filter-dropdown__trigger--active' : ''}`}
|
||||
onClick={() => setIsOpen(!isOpen)}
|
||||
aria-expanded={isOpen}
|
||||
>
|
||||
<span>{selectedOption ? selectedOption.label : label}</span>
|
||||
<svg
|
||||
className={`filter-dropdown__chevron ${isOpen ? 'filter-dropdown__chevron--open' : ''}`}
|
||||
width="14"
|
||||
height="14"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
>
|
||||
<polyline points="6 9 12 15 18 9" />
|
||||
</svg>
|
||||
</button>
|
||||
|
||||
{isOpen && (
|
||||
<div className="filter-dropdown__menu">
|
||||
{options.map((option) => (
|
||||
<button
|
||||
key={option.value}
|
||||
type="button"
|
||||
className={`filter-dropdown__option ${option.value === value ? 'filter-dropdown__option--selected' : ''}`}
|
||||
onClick={() => {
|
||||
onChange(option.value);
|
||||
setIsOpen(false);
|
||||
}}
|
||||
>
|
||||
{option.label}
|
||||
{option.value === value && (
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<polyline points="20 6 9 17 4 12" />
|
||||
</svg>
|
||||
)}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
216
frontend/src/components/GlobalSearch.css
Normal file
216
frontend/src/components/GlobalSearch.css
Normal file
@@ -0,0 +1,216 @@
|
||||
.global-search {
|
||||
position: relative;
|
||||
flex: 1;
|
||||
max-width: 400px;
|
||||
margin: 0 24px;
|
||||
}
|
||||
|
||||
.global-search__input-wrapper {
|
||||
position: relative;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.global-search__icon {
|
||||
position: absolute;
|
||||
left: 12px;
|
||||
color: var(--text-secondary);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.global-search__input {
|
||||
width: 100%;
|
||||
padding: 8px 40px 8px 36px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
color: var(--text-primary);
|
||||
font-size: 0.875rem;
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.global-search__input:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||
}
|
||||
|
||||
.global-search__input::placeholder {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.global-search__shortcut {
|
||||
position: absolute;
|
||||
right: 8px;
|
||||
padding: 2px 6px;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-sm);
|
||||
color: var(--text-muted);
|
||||
font-family: inherit;
|
||||
font-size: 0.75rem;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.global-search__spinner {
|
||||
position: absolute;
|
||||
right: 36px;
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
border: 2px solid var(--border-primary);
|
||||
border-top-color: var(--accent-primary);
|
||||
border-radius: 50%;
|
||||
animation: spin 0.6s linear infinite;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
/* Dropdown */
|
||||
.global-search__dropdown {
|
||||
position: absolute;
|
||||
top: calc(100% + 8px);
|
||||
left: 0;
|
||||
right: 0;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
box-shadow: var(--shadow-lg);
|
||||
max-height: 400px;
|
||||
overflow-y: auto;
|
||||
z-index: 1000;
|
||||
}
|
||||
|
||||
.global-search__empty {
|
||||
padding: 24px;
|
||||
text-align: center;
|
||||
color: var(--text-secondary);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
/* Sections */
|
||||
.global-search__section {
|
||||
padding: 8px 0;
|
||||
border-bottom: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.global-search__section:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.global-search__section-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 4px 12px 8px;
|
||||
color: var(--text-secondary);
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
}
|
||||
|
||||
.global-search__count {
|
||||
background: var(--bg-tertiary);
|
||||
padding: 2px 6px;
|
||||
border-radius: var(--radius-sm);
|
||||
font-size: 0.7rem;
|
||||
}
|
||||
|
||||
/* Results */
|
||||
.global-search__result {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 12px;
|
||||
width: 100%;
|
||||
padding: 8px 12px;
|
||||
background: transparent;
|
||||
border: none;
|
||||
text-align: left;
|
||||
color: var(--text-primary);
|
||||
cursor: pointer;
|
||||
transition: background var(--transition-fast);
|
||||
}
|
||||
|
||||
.global-search__result:hover,
|
||||
.global-search__result.selected {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
.global-search__result svg {
|
||||
flex-shrink: 0;
|
||||
margin-top: 2px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.global-search__result-content {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
|
||||
.global-search__result-name {
|
||||
font-weight: 500;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.global-search__result-path {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.global-search__result-desc {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
/* Badges */
|
||||
.global-search__badge {
|
||||
flex-shrink: 0;
|
||||
padding: 2px 8px;
|
||||
border-radius: var(--radius-sm);
|
||||
font-size: 0.7rem;
|
||||
font-weight: 500;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.global-search__badge.public {
|
||||
background: rgba(16, 185, 129, 0.15);
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.global-search__badge.private {
|
||||
background: rgba(234, 179, 8, 0.15);
|
||||
color: #eab308;
|
||||
}
|
||||
|
||||
.global-search__badge.format {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
/* Responsive */
|
||||
@media (max-width: 768px) {
|
||||
.global-search {
|
||||
max-width: none;
|
||||
margin: 0 12px;
|
||||
}
|
||||
|
||||
.global-search__shortcut {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 640px) {
|
||||
.global-search {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
265
frontend/src/components/GlobalSearch.tsx
Normal file
265
frontend/src/components/GlobalSearch.tsx
Normal file
@@ -0,0 +1,265 @@
|
||||
import { useState, useEffect, useRef, useCallback } from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { globalSearch } from '../api';
|
||||
import { GlobalSearchResponse } from '../types';
|
||||
import './GlobalSearch.css';
|
||||
|
||||
export function GlobalSearch() {
|
||||
const navigate = useNavigate();
|
||||
const [query, setQuery] = useState('');
|
||||
const [results, setResults] = useState<GlobalSearchResponse | null>(null);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [selectedIndex, setSelectedIndex] = useState(-1);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
// Build flat list of results for keyboard navigation
|
||||
const flatResults = results
|
||||
? [
|
||||
...results.projects.map((p) => ({ type: 'project' as const, item: p })),
|
||||
...results.packages.map((p) => ({ type: 'package' as const, item: p })),
|
||||
...results.artifacts.map((a) => ({ type: 'artifact' as const, item: a })),
|
||||
]
|
||||
: [];
|
||||
|
||||
const handleSearch = useCallback(async (searchQuery: string) => {
|
||||
if (!searchQuery.trim()) {
|
||||
setResults(null);
|
||||
setIsOpen(false);
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
try {
|
||||
const data = await globalSearch(searchQuery);
|
||||
setResults(data);
|
||||
setIsOpen(true);
|
||||
setSelectedIndex(-1);
|
||||
} catch (err) {
|
||||
console.error('Search failed:', err);
|
||||
setResults(null);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Debounced search
|
||||
useEffect(() => {
|
||||
const timer = setTimeout(() => {
|
||||
handleSearch(query);
|
||||
}, 300);
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
}, [query, handleSearch]);
|
||||
|
||||
// Close on click outside
|
||||
useEffect(() => {
|
||||
function handleClickOutside(event: MouseEvent) {
|
||||
if (containerRef.current && !containerRef.current.contains(event.target as Node)) {
|
||||
setIsOpen(false);
|
||||
}
|
||||
}
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
return () => document.removeEventListener('mousedown', handleClickOutside);
|
||||
}, []);
|
||||
|
||||
// Keyboard navigation
|
||||
useEffect(() => {
|
||||
function handleKeyDown(event: KeyboardEvent) {
|
||||
if (event.key === '/' && !['INPUT', 'TEXTAREA'].includes((event.target as HTMLElement).tagName)) {
|
||||
event.preventDefault();
|
||||
inputRef.current?.focus();
|
||||
}
|
||||
|
||||
if (!isOpen) return;
|
||||
|
||||
switch (event.key) {
|
||||
case 'ArrowDown':
|
||||
event.preventDefault();
|
||||
setSelectedIndex((prev) => Math.min(prev + 1, flatResults.length - 1));
|
||||
break;
|
||||
case 'ArrowUp':
|
||||
event.preventDefault();
|
||||
setSelectedIndex((prev) => Math.max(prev - 1, -1));
|
||||
break;
|
||||
case 'Enter':
|
||||
if (selectedIndex >= 0 && flatResults[selectedIndex]) {
|
||||
event.preventDefault();
|
||||
navigateToResult(flatResults[selectedIndex]);
|
||||
}
|
||||
break;
|
||||
case 'Escape':
|
||||
setIsOpen(false);
|
||||
inputRef.current?.blur();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
document.addEventListener('keydown', handleKeyDown);
|
||||
return () => document.removeEventListener('keydown', handleKeyDown);
|
||||
}, [isOpen, selectedIndex, flatResults]);
|
||||
|
||||
function navigateToResult(result: (typeof flatResults)[0]) {
|
||||
setIsOpen(false);
|
||||
setQuery('');
|
||||
|
||||
switch (result.type) {
|
||||
case 'project':
|
||||
navigate(`/project/${result.item.name}`);
|
||||
break;
|
||||
case 'package':
|
||||
navigate(`/project/${result.item.project_name}/${result.item.name}`);
|
||||
break;
|
||||
case 'artifact':
|
||||
navigate(`/project/${result.item.project_name}/${result.item.package_name}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const hasResults = results && results.counts.total > 0;
|
||||
|
||||
return (
|
||||
<div className="global-search" ref={containerRef}>
|
||||
<div className="global-search__input-wrapper">
|
||||
<svg
|
||||
className="global-search__icon"
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
>
|
||||
<circle cx="11" cy="11" r="8" />
|
||||
<line x1="21" y1="21" x2="16.65" y2="16.65" />
|
||||
</svg>
|
||||
<input
|
||||
ref={inputRef}
|
||||
type="text"
|
||||
value={query}
|
||||
onChange={(e) => setQuery(e.target.value)}
|
||||
onFocus={() => query && results && setIsOpen(true)}
|
||||
placeholder="Search projects, packages, artifacts..."
|
||||
className="global-search__input"
|
||||
/>
|
||||
<kbd className="global-search__shortcut">/</kbd>
|
||||
{loading && <span className="global-search__spinner" />}
|
||||
</div>
|
||||
|
||||
{isOpen && (
|
||||
<div className="global-search__dropdown">
|
||||
{!hasResults && query && (
|
||||
<div className="global-search__empty">No results found for "{query}"</div>
|
||||
)}
|
||||
|
||||
{hasResults && (
|
||||
<>
|
||||
{results.projects.length > 0 && (
|
||||
<div className="global-search__section">
|
||||
<div className="global-search__section-header">
|
||||
Projects
|
||||
<span className="global-search__count">{results.counts.projects}</span>
|
||||
</div>
|
||||
{results.projects.map((project, index) => {
|
||||
const flatIndex = index;
|
||||
return (
|
||||
<button
|
||||
key={project.id}
|
||||
className={`global-search__result ${selectedIndex === flatIndex ? 'selected' : ''}`}
|
||||
onClick={() => navigateToResult({ type: 'project', item: project })}
|
||||
onMouseEnter={() => setSelectedIndex(flatIndex)}
|
||||
>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M22 19a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h5l2 3h9a2 2 0 0 1 2 2z" />
|
||||
</svg>
|
||||
<div className="global-search__result-content">
|
||||
<span className="global-search__result-name">{project.name}</span>
|
||||
{project.description && (
|
||||
<span className="global-search__result-desc">{project.description}</span>
|
||||
)}
|
||||
</div>
|
||||
<span className={`global-search__badge ${project.is_public ? 'public' : 'private'}`}>
|
||||
{project.is_public ? 'Public' : 'Private'}
|
||||
</span>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{results.packages.length > 0 && (
|
||||
<div className="global-search__section">
|
||||
<div className="global-search__section-header">
|
||||
Packages
|
||||
<span className="global-search__count">{results.counts.packages}</span>
|
||||
</div>
|
||||
{results.packages.map((pkg, index) => {
|
||||
const flatIndex = results.projects.length + index;
|
||||
return (
|
||||
<button
|
||||
key={pkg.id}
|
||||
className={`global-search__result ${selectedIndex === flatIndex ? 'selected' : ''}`}
|
||||
onClick={() => navigateToResult({ type: 'package', item: pkg })}
|
||||
onMouseEnter={() => setSelectedIndex(flatIndex)}
|
||||
>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M16.5 9.4l-9-5.19M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z" />
|
||||
<polyline points="3.27 6.96 12 12.01 20.73 6.96" />
|
||||
<line x1="12" y1="22.08" x2="12" y2="12" />
|
||||
</svg>
|
||||
<div className="global-search__result-content">
|
||||
<span className="global-search__result-name">{pkg.name}</span>
|
||||
<span className="global-search__result-path">{pkg.project_name}</span>
|
||||
{pkg.description && (
|
||||
<span className="global-search__result-desc">{pkg.description}</span>
|
||||
)}
|
||||
</div>
|
||||
<span className="global-search__badge format">{pkg.format}</span>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{results.artifacts.length > 0 && (
|
||||
<div className="global-search__section">
|
||||
<div className="global-search__section-header">
|
||||
Artifacts / Tags
|
||||
<span className="global-search__count">{results.counts.artifacts}</span>
|
||||
</div>
|
||||
{results.artifacts.map((artifact, index) => {
|
||||
const flatIndex = results.projects.length + results.packages.length + index;
|
||||
return (
|
||||
<button
|
||||
key={artifact.tag_id}
|
||||
className={`global-search__result ${selectedIndex === flatIndex ? 'selected' : ''}`}
|
||||
onClick={() => navigateToResult({ type: 'artifact', item: artifact })}
|
||||
onMouseEnter={() => setSelectedIndex(flatIndex)}
|
||||
>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M20.59 13.41l-7.17 7.17a2 2 0 0 1-2.83 0L2 12V2h10l8.59 8.59a2 2 0 0 1 0 2.82z" />
|
||||
<line x1="7" y1="7" x2="7.01" y2="7" />
|
||||
</svg>
|
||||
<div className="global-search__result-content">
|
||||
<span className="global-search__result-name">{artifact.tag_name}</span>
|
||||
<span className="global-search__result-path">
|
||||
{artifact.project_name} / {artifact.package_name}
|
||||
</span>
|
||||
{artifact.original_name && (
|
||||
<span className="global-search__result-desc">{artifact.original_name}</span>
|
||||
)}
|
||||
</div>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import { ReactNode } from 'react';
|
||||
import { Link, useLocation } from 'react-router-dom';
|
||||
import { GlobalSearch } from './GlobalSearch';
|
||||
import './Layout.css';
|
||||
|
||||
interface LayoutProps {
|
||||
@@ -32,6 +33,7 @@ function Layout({ children }: LayoutProps) {
|
||||
</div>
|
||||
<span className="logo-text">Orchard</span>
|
||||
</Link>
|
||||
<GlobalSearch />
|
||||
<nav className="nav">
|
||||
<Link to="/" className={location.pathname === '/' ? 'active' : ''}>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
|
||||
64
frontend/src/components/Pagination.css
Normal file
64
frontend/src/components/Pagination.css
Normal file
@@ -0,0 +1,64 @@
|
||||
/* Pagination Component */
|
||||
.pagination {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 16px 0;
|
||||
margin-top: 16px;
|
||||
}
|
||||
|
||||
.pagination__info {
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.pagination__controls {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.pagination__btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
min-width: 32px;
|
||||
height: 32px;
|
||||
padding: 0 8px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.pagination__btn:hover:not(:disabled) {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text-primary);
|
||||
border-color: var(--border-secondary);
|
||||
}
|
||||
|
||||
.pagination__btn:disabled {
|
||||
opacity: 0.4;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.pagination__page--active {
|
||||
background: var(--accent-primary);
|
||||
border-color: var(--accent-primary);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.pagination__page--active:hover {
|
||||
background: var(--accent-primary);
|
||||
border-color: var(--accent-primary);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.pagination__ellipsis {
|
||||
padding: 0 8px;
|
||||
color: var(--text-muted);
|
||||
font-size: 0.8125rem;
|
||||
}
|
||||
98
frontend/src/components/Pagination.tsx
Normal file
98
frontend/src/components/Pagination.tsx
Normal file
@@ -0,0 +1,98 @@
|
||||
import './Pagination.css';
|
||||
|
||||
interface PaginationProps {
|
||||
page: number;
|
||||
totalPages: number;
|
||||
total: number;
|
||||
limit: number;
|
||||
onPageChange: (page: number) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function Pagination({ page, totalPages, total, limit, onPageChange, className = '' }: PaginationProps) {
|
||||
const start = (page - 1) * limit + 1;
|
||||
const end = Math.min(page * limit, total);
|
||||
|
||||
if (totalPages <= 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const getPageNumbers = (): (number | 'ellipsis')[] => {
|
||||
const pages: (number | 'ellipsis')[] = [];
|
||||
const showEllipsisStart = page > 3;
|
||||
const showEllipsisEnd = page < totalPages - 2;
|
||||
|
||||
pages.push(1);
|
||||
|
||||
if (showEllipsisStart) {
|
||||
pages.push('ellipsis');
|
||||
}
|
||||
|
||||
for (let i = Math.max(2, page - 1); i <= Math.min(totalPages - 1, page + 1); i++) {
|
||||
if (!pages.includes(i)) {
|
||||
pages.push(i);
|
||||
}
|
||||
}
|
||||
|
||||
if (showEllipsisEnd) {
|
||||
pages.push('ellipsis');
|
||||
}
|
||||
|
||||
if (totalPages > 1 && !pages.includes(totalPages)) {
|
||||
pages.push(totalPages);
|
||||
}
|
||||
|
||||
return pages;
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={`pagination ${className}`.trim()}>
|
||||
<span className="pagination__info">
|
||||
Showing {start}-{end} of {total}
|
||||
</span>
|
||||
|
||||
<div className="pagination__controls">
|
||||
<button
|
||||
type="button"
|
||||
className="pagination__btn"
|
||||
onClick={() => onPageChange(page - 1)}
|
||||
disabled={page <= 1}
|
||||
aria-label="Previous page"
|
||||
>
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<polyline points="15 18 9 12 15 6" />
|
||||
</svg>
|
||||
</button>
|
||||
|
||||
{getPageNumbers().map((pageNum, index) =>
|
||||
pageNum === 'ellipsis' ? (
|
||||
<span key={`ellipsis-${index}`} className="pagination__ellipsis">
|
||||
...
|
||||
</span>
|
||||
) : (
|
||||
<button
|
||||
key={pageNum}
|
||||
type="button"
|
||||
className={`pagination__btn pagination__page ${pageNum === page ? 'pagination__page--active' : ''}`}
|
||||
onClick={() => onPageChange(pageNum)}
|
||||
>
|
||||
{pageNum}
|
||||
</button>
|
||||
)
|
||||
)}
|
||||
|
||||
<button
|
||||
type="button"
|
||||
className="pagination__btn"
|
||||
onClick={() => onPageChange(page + 1)}
|
||||
disabled={page >= totalPages}
|
||||
aria-label="Next page"
|
||||
>
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<polyline points="9 18 15 12 9 6" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
57
frontend/src/components/SearchInput.css
Normal file
57
frontend/src/components/SearchInput.css
Normal file
@@ -0,0 +1,57 @@
|
||||
/* SearchInput Component */
|
||||
.search-input {
|
||||
position: relative;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.search-input__icon {
|
||||
position: absolute;
|
||||
left: 12px;
|
||||
color: var(--text-muted);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.search-input__field {
|
||||
width: 100%;
|
||||
padding: 10px 36px 10px 40px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-primary);
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.search-input__field::placeholder {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.search-input__field:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||
background: var(--bg-elevated);
|
||||
}
|
||||
|
||||
.search-input__clear {
|
||||
position: absolute;
|
||||
right: 8px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
padding: 0;
|
||||
background: transparent;
|
||||
border: none;
|
||||
border-radius: var(--radius-sm);
|
||||
color: var(--text-muted);
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.search-input__clear:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
74
frontend/src/components/SearchInput.tsx
Normal file
74
frontend/src/components/SearchInput.tsx
Normal file
@@ -0,0 +1,74 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import './SearchInput.css';
|
||||
|
||||
interface SearchInputProps {
|
||||
value: string;
|
||||
onChange: (value: string) => void;
|
||||
placeholder?: string;
|
||||
debounceMs?: number;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function SearchInput({
|
||||
value,
|
||||
onChange,
|
||||
placeholder = 'Search...',
|
||||
debounceMs = 300,
|
||||
className = '',
|
||||
}: SearchInputProps) {
|
||||
const [localValue, setLocalValue] = useState(value);
|
||||
|
||||
useEffect(() => {
|
||||
setLocalValue(value);
|
||||
}, [value]);
|
||||
|
||||
useEffect(() => {
|
||||
const timer = setTimeout(() => {
|
||||
if (localValue !== value) {
|
||||
onChange(localValue);
|
||||
}
|
||||
}, debounceMs);
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
}, [localValue, debounceMs, onChange, value]);
|
||||
|
||||
return (
|
||||
<div className={`search-input ${className}`.trim()}>
|
||||
<svg
|
||||
className="search-input__icon"
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
>
|
||||
<circle cx="11" cy="11" r="8" />
|
||||
<line x1="21" y1="21" x2="16.65" y2="16.65" />
|
||||
</svg>
|
||||
<input
|
||||
type="text"
|
||||
value={localValue}
|
||||
onChange={(e) => setLocalValue(e.target.value)}
|
||||
placeholder={placeholder}
|
||||
className="search-input__field"
|
||||
/>
|
||||
{localValue && (
|
||||
<button
|
||||
type="button"
|
||||
className="search-input__clear"
|
||||
onClick={() => {
|
||||
setLocalValue('');
|
||||
onChange('');
|
||||
}}
|
||||
aria-label="Clear search"
|
||||
>
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<line x1="18" y1="6" x2="6" y2="18" />
|
||||
<line x1="6" y1="6" x2="18" y2="18" />
|
||||
</svg>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
95
frontend/src/components/SortDropdown.css
Normal file
95
frontend/src/components/SortDropdown.css
Normal file
@@ -0,0 +1,95 @@
|
||||
/* SortDropdown Component */
|
||||
.sort-dropdown {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.sort-dropdown__trigger {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 8px 12px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.sort-dropdown__trigger:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text-primary);
|
||||
border-color: var(--border-secondary);
|
||||
}
|
||||
|
||||
.sort-dropdown__chevron {
|
||||
transition: transform var(--transition-fast);
|
||||
}
|
||||
|
||||
.sort-dropdown__chevron--open {
|
||||
transform: rotate(180deg);
|
||||
}
|
||||
|
||||
.sort-dropdown__order {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
padding: 0;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.sort-dropdown__order:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text-primary);
|
||||
border-color: var(--border-secondary);
|
||||
}
|
||||
|
||||
.sort-dropdown__menu {
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
left: 0;
|
||||
margin-top: 4px;
|
||||
min-width: 180px;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
box-shadow: var(--shadow-lg);
|
||||
z-index: 100;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.sort-dropdown__option {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
width: 100%;
|
||||
padding: 10px 14px;
|
||||
background: transparent;
|
||||
border: none;
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.sort-dropdown__option:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.sort-dropdown__option--selected {
|
||||
color: var(--accent-primary);
|
||||
font-weight: 500;
|
||||
}
|
||||
108
frontend/src/components/SortDropdown.tsx
Normal file
108
frontend/src/components/SortDropdown.tsx
Normal file
@@ -0,0 +1,108 @@
|
||||
import { useState, useRef, useEffect } from 'react';
|
||||
import './SortDropdown.css';
|
||||
|
||||
export interface SortOption {
|
||||
value: string;
|
||||
label: string;
|
||||
}
|
||||
|
||||
interface SortDropdownProps {
|
||||
options: SortOption[];
|
||||
value: string;
|
||||
order: 'asc' | 'desc';
|
||||
onChange: (value: string, order: 'asc' | 'desc') => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function SortDropdown({ options, value, order, onChange, className = '' }: SortDropdownProps) {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const dropdownRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const selectedOption = options.find((o) => o.value === value) || options[0];
|
||||
|
||||
useEffect(() => {
|
||||
function handleClickOutside(event: MouseEvent) {
|
||||
if (dropdownRef.current && !dropdownRef.current.contains(event.target as Node)) {
|
||||
setIsOpen(false);
|
||||
}
|
||||
}
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
return () => document.removeEventListener('mousedown', handleClickOutside);
|
||||
}, []);
|
||||
|
||||
const toggleOrder = () => {
|
||||
onChange(value, order === 'asc' ? 'desc' : 'asc');
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={`sort-dropdown ${className}`.trim()} ref={dropdownRef}>
|
||||
<button
|
||||
type="button"
|
||||
className="sort-dropdown__trigger"
|
||||
onClick={() => setIsOpen(!isOpen)}
|
||||
aria-expanded={isOpen}
|
||||
>
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<line x1="4" y1="6" x2="20" y2="6" />
|
||||
<line x1="4" y1="12" x2="14" y2="12" />
|
||||
<line x1="4" y1="18" x2="8" y2="18" />
|
||||
</svg>
|
||||
<span>Sort: {selectedOption.label}</span>
|
||||
<svg
|
||||
className={`sort-dropdown__chevron ${isOpen ? 'sort-dropdown__chevron--open' : ''}`}
|
||||
width="14"
|
||||
height="14"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
>
|
||||
<polyline points="6 9 12 15 18 9" />
|
||||
</svg>
|
||||
</button>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
className="sort-dropdown__order"
|
||||
onClick={toggleOrder}
|
||||
title={order === 'asc' ? 'Ascending' : 'Descending'}
|
||||
>
|
||||
{order === 'asc' ? (
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<line x1="12" y1="19" x2="12" y2="5" />
|
||||
<polyline points="5 12 12 5 19 12" />
|
||||
</svg>
|
||||
) : (
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<line x1="12" y1="5" x2="12" y2="19" />
|
||||
<polyline points="19 12 12 19 5 12" />
|
||||
</svg>
|
||||
)}
|
||||
</button>
|
||||
|
||||
{isOpen && (
|
||||
<div className="sort-dropdown__menu">
|
||||
{options.map((option) => (
|
||||
<button
|
||||
key={option.value}
|
||||
type="button"
|
||||
className={`sort-dropdown__option ${option.value === value ? 'sort-dropdown__option--selected' : ''}`}
|
||||
onClick={() => {
|
||||
onChange(option.value, order);
|
||||
setIsOpen(false);
|
||||
}}
|
||||
>
|
||||
{option.label}
|
||||
{option.value === value && (
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<polyline points="20 6 9 17 4 12" />
|
||||
</svg>
|
||||
)}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
12
frontend/src/components/index.ts
Normal file
12
frontend/src/components/index.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export { Card, CardHeader, CardBody, CardFooter } from './Card';
|
||||
export { Badge } from './Badge';
|
||||
export { Breadcrumb } from './Breadcrumb';
|
||||
export { SearchInput } from './SearchInput';
|
||||
export { SortDropdown } from './SortDropdown';
|
||||
export type { SortOption } from './SortDropdown';
|
||||
export { FilterDropdown } from './FilterDropdown';
|
||||
export type { FilterOption } from './FilterDropdown';
|
||||
export { FilterChip, FilterChipGroup } from './FilterChip';
|
||||
export { DataTable } from './DataTable';
|
||||
export { Pagination } from './Pagination';
|
||||
export { GlobalSearch } from './GlobalSearch';
|
||||
@@ -272,6 +272,179 @@
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.owner {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.75rem;
|
||||
}
|
||||
|
||||
.project-meta__dates {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.project-meta__owner {
|
||||
margin-top: 8px;
|
||||
padding-top: 8px;
|
||||
border-top: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
/* List Controls */
|
||||
.list-controls {
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
margin-bottom: 20px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.list-controls__search {
|
||||
flex: 1;
|
||||
min-width: 200px;
|
||||
max-width: 400px;
|
||||
}
|
||||
|
||||
/* Stats in project cards */
|
||||
.project-stats {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
margin-top: 12px;
|
||||
padding-top: 12px;
|
||||
border-top: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.project-stats__item {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
|
||||
.project-stats__value {
|
||||
font-size: 1rem;
|
||||
font-weight: 600;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.project-stats__label {
|
||||
font-size: 0.6875rem;
|
||||
color: var(--text-muted);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
}
|
||||
|
||||
/* Page header enhancements */
|
||||
.page-header__info {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.page-header__title-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.page-header__meta {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
margin-top: 8px;
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.meta-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
/* Package card styles */
|
||||
.package-card__header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 12px;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.package-card__header h3 {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.package-stats {
|
||||
display: flex;
|
||||
gap: 20px;
|
||||
margin: 16px 0;
|
||||
padding: 12px 0;
|
||||
border-top: 1px solid var(--border-primary);
|
||||
border-bottom: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.package-stats__item {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
|
||||
.package-stats__value {
|
||||
font-size: 1.125rem;
|
||||
font-weight: 600;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.package-stats__label {
|
||||
font-size: 0.6875rem;
|
||||
color: var(--text-muted);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
}
|
||||
|
||||
.latest-tag {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.latest-tag strong {
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
/* List controls select */
|
||||
.list-controls__select {
|
||||
padding: 8px 32px 8px 12px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
appearance: none;
|
||||
background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 24 24' fill='none' stroke='%236b7280' stroke-width='2'%3E%3Cpolyline points='6 9 12 15 18 9'%3E%3C/polyline%3E%3C/svg%3E");
|
||||
background-repeat: no-repeat;
|
||||
background-position: right 10px center;
|
||||
}
|
||||
|
||||
.list-controls__select:hover {
|
||||
background-color: var(--bg-hover);
|
||||
border-color: var(--border-secondary);
|
||||
}
|
||||
|
||||
.list-controls__select:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||
}
|
||||
|
||||
/* Form row for side-by-side inputs */
|
||||
.form-row {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.form-row .form-group {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
/* Breadcrumb */
|
||||
.breadcrumb {
|
||||
display: flex;
|
||||
|
||||
@@ -1,33 +1,78 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { Project } from '../types';
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { Link, useSearchParams } from 'react-router-dom';
|
||||
import { Project, PaginatedResponse } from '../types';
|
||||
import { listProjects, createProject } from '../api';
|
||||
import { Badge } from '../components/Badge';
|
||||
import { SortDropdown, SortOption } from '../components/SortDropdown';
|
||||
import { FilterDropdown, FilterOption } from '../components/FilterDropdown';
|
||||
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
||||
import { Pagination } from '../components/Pagination';
|
||||
import './Home.css';
|
||||
|
||||
const SORT_OPTIONS: SortOption[] = [
|
||||
{ value: 'name', label: 'Name' },
|
||||
{ value: 'created_at', label: 'Created' },
|
||||
{ value: 'updated_at', label: 'Updated' },
|
||||
];
|
||||
|
||||
const VISIBILITY_OPTIONS: FilterOption[] = [
|
||||
{ value: '', label: 'All Projects' },
|
||||
{ value: 'public', label: 'Public Only' },
|
||||
{ value: 'private', label: 'Private Only' },
|
||||
];
|
||||
|
||||
function Home() {
|
||||
const [projects, setProjects] = useState<Project[]>([]);
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const [projectsData, setProjectsData] = useState<PaginatedResponse<Project> | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [showForm, setShowForm] = useState(false);
|
||||
const [newProject, setNewProject] = useState({ name: '', description: '', is_public: true });
|
||||
const [creating, setCreating] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
loadProjects();
|
||||
}, []);
|
||||
// Get params from URL
|
||||
const page = parseInt(searchParams.get('page') || '1', 10);
|
||||
const sort = searchParams.get('sort') || 'name';
|
||||
const order = (searchParams.get('order') || 'asc') as 'asc' | 'desc';
|
||||
const visibility = searchParams.get('visibility') || '';
|
||||
|
||||
async function loadProjects() {
|
||||
const updateParams = useCallback(
|
||||
(updates: Record<string, string | undefined>) => {
|
||||
const newParams = new URLSearchParams(searchParams);
|
||||
Object.entries(updates).forEach(([key, value]) => {
|
||||
if (value === undefined || value === '' || (key === 'page' && value === '1')) {
|
||||
newParams.delete(key);
|
||||
} else {
|
||||
newParams.set(key, value);
|
||||
}
|
||||
});
|
||||
setSearchParams(newParams);
|
||||
},
|
||||
[searchParams, setSearchParams]
|
||||
);
|
||||
|
||||
const loadProjects = useCallback(async () => {
|
||||
try {
|
||||
setLoading(true);
|
||||
const data = await listProjects();
|
||||
setProjects(data);
|
||||
const data = await listProjects({
|
||||
page,
|
||||
sort,
|
||||
order,
|
||||
visibility: visibility as 'public' | 'private' | undefined || undefined,
|
||||
});
|
||||
setProjectsData(data);
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load projects');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
}, [page, sort, order, visibility]);
|
||||
|
||||
useEffect(() => {
|
||||
loadProjects();
|
||||
}, [loadProjects]);
|
||||
|
||||
async function handleCreateProject(e: React.FormEvent) {
|
||||
e.preventDefault();
|
||||
@@ -44,7 +89,27 @@ function Home() {
|
||||
}
|
||||
}
|
||||
|
||||
if (loading) {
|
||||
const handleSortChange = (newSort: string, newOrder: 'asc' | 'desc') => {
|
||||
updateParams({ sort: newSort, order: newOrder, page: '1' });
|
||||
};
|
||||
|
||||
const handleVisibilityChange = (value: string) => {
|
||||
updateParams({ visibility: value, page: '1' });
|
||||
};
|
||||
|
||||
const handlePageChange = (newPage: number) => {
|
||||
updateParams({ page: String(newPage) });
|
||||
};
|
||||
|
||||
const clearFilters = () => {
|
||||
setSearchParams({});
|
||||
};
|
||||
|
||||
const hasActiveFilters = visibility !== '';
|
||||
const projects = projectsData?.items || [];
|
||||
const pagination = projectsData?.pagination;
|
||||
|
||||
if (loading && !projectsData) {
|
||||
return <div className="loading">Loading projects...</div>;
|
||||
}
|
||||
|
||||
@@ -99,27 +164,71 @@ function Home() {
|
||||
</form>
|
||||
)}
|
||||
|
||||
<div className="list-controls">
|
||||
<FilterDropdown
|
||||
label="Visibility"
|
||||
options={VISIBILITY_OPTIONS}
|
||||
value={visibility}
|
||||
onChange={handleVisibilityChange}
|
||||
/>
|
||||
<SortDropdown options={SORT_OPTIONS} value={sort} order={order} onChange={handleSortChange} />
|
||||
</div>
|
||||
|
||||
{hasActiveFilters && (
|
||||
<FilterChipGroup onClearAll={clearFilters}>
|
||||
{visibility && (
|
||||
<FilterChip
|
||||
label="Visibility"
|
||||
value={visibility === 'public' ? 'Public' : 'Private'}
|
||||
onRemove={() => handleVisibilityChange('')}
|
||||
/>
|
||||
)}
|
||||
</FilterChipGroup>
|
||||
)}
|
||||
|
||||
{projects.length === 0 ? (
|
||||
<div className="empty-state">
|
||||
{hasActiveFilters ? (
|
||||
<p>No projects match your filters. Try adjusting your search.</p>
|
||||
) : (
|
||||
<p>No projects yet. Create your first project to get started!</p>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<div className="project-grid">
|
||||
{projects.map((project) => (
|
||||
<Link to={`/project/${project.name}`} key={project.id} className="project-card card">
|
||||
<h3>{project.name}</h3>
|
||||
{project.description && <p>{project.description}</p>}
|
||||
<div className="project-meta">
|
||||
<span className={`badge ${project.is_public ? 'badge-public' : 'badge-private'}`}>
|
||||
<Badge variant={project.is_public ? 'public' : 'private'}>
|
||||
{project.is_public ? 'Public' : 'Private'}
|
||||
</span>
|
||||
<span className="date">
|
||||
Created {new Date(project.created_at).toLocaleDateString()}
|
||||
</span>
|
||||
</Badge>
|
||||
<div className="project-meta__dates">
|
||||
<span className="date">Created {new Date(project.created_at).toLocaleDateString()}</span>
|
||||
{project.updated_at !== project.created_at && (
|
||||
<span className="date">Updated {new Date(project.updated_at).toLocaleDateString()}</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="project-meta__owner">
|
||||
<span className="owner">by {project.created_by}</span>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{pagination && pagination.total_pages > 1 && (
|
||||
<Pagination
|
||||
page={pagination.page}
|
||||
totalPages={pagination.total_pages}
|
||||
total={pagination.total}
|
||||
limit={pagination.limit}
|
||||
onPageChange={handlePageChange}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -206,6 +206,92 @@ h2 {
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
/* Section header */
|
||||
.section-header {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.section-header h2 {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
/* Package header stats */
|
||||
.package-header-stats {
|
||||
display: flex;
|
||||
gap: 20px;
|
||||
margin-top: 12px;
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.stat-item strong {
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.stat-item strong.accent {
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
/* Artifact ID cell */
|
||||
.artifact-id-cell {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
/* Copy button */
|
||||
.copy-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
padding: 0;
|
||||
background: transparent;
|
||||
border: none;
|
||||
border-radius: var(--radius-sm);
|
||||
color: var(--text-muted);
|
||||
cursor: pointer;
|
||||
opacity: 0;
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.artifact-id-cell:hover .copy-btn,
|
||||
tr:hover .copy-btn {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.copy-btn:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
/* Content type */
|
||||
.content-type {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Created cell */
|
||||
.created-cell {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
|
||||
.created-by {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Cell truncate */
|
||||
.cell-truncate {
|
||||
max-width: 150px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
/* Responsive adjustments */
|
||||
@media (max-width: 768px) {
|
||||
.upload-form {
|
||||
@@ -217,11 +303,8 @@ h2 {
|
||||
min-width: 100%;
|
||||
}
|
||||
|
||||
.tags-table {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.tags-table table {
|
||||
min-width: 500px;
|
||||
.package-header-stats {
|
||||
flex-wrap: wrap;
|
||||
gap: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,64 @@
|
||||
import { useState, useEffect, useRef } from 'react';
|
||||
import { useParams, Link } from 'react-router-dom';
|
||||
import { Tag } from '../types';
|
||||
import { listTags, uploadArtifact, getDownloadUrl } from '../api';
|
||||
import { useState, useEffect, useRef, useCallback } from 'react';
|
||||
import { useParams, useSearchParams, useNavigate } from 'react-router-dom';
|
||||
import { TagDetail, Package, PaginatedResponse } from '../types';
|
||||
import { listTags, uploadArtifact, getDownloadUrl, getPackage } from '../api';
|
||||
import { Breadcrumb } from '../components/Breadcrumb';
|
||||
import { Badge } from '../components/Badge';
|
||||
import { SearchInput } from '../components/SearchInput';
|
||||
import { SortDropdown, SortOption } from '../components/SortDropdown';
|
||||
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
||||
import { DataTable } from '../components/DataTable';
|
||||
import { Pagination } from '../components/Pagination';
|
||||
import './Home.css';
|
||||
import './PackagePage.css';
|
||||
|
||||
const SORT_OPTIONS: SortOption[] = [
|
||||
{ value: 'name', label: 'Name' },
|
||||
{ value: 'created_at', label: 'Created' },
|
||||
];
|
||||
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const k = 1024;
|
||||
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
function CopyButton({ text }: { text: string }) {
|
||||
const [copied, setCopied] = useState(false);
|
||||
|
||||
const handleCopy = async (e: React.MouseEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
await navigator.clipboard.writeText(text);
|
||||
setCopied(true);
|
||||
setTimeout(() => setCopied(false), 2000);
|
||||
};
|
||||
|
||||
return (
|
||||
<button className="copy-btn" onClick={handleCopy} title="Copy to clipboard">
|
||||
{copied ? (
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<polyline points="20 6 9 17 4 12" />
|
||||
</svg>
|
||||
) : (
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<rect x="9" y="9" width="13" height="13" rx="2" ry="2" />
|
||||
<path d="M5 15H4a2 2 0 0 1-2-2V4a2 2 0 0 1 2-2h9a2 2 0 0 1 2 2v1" />
|
||||
</svg>
|
||||
)}
|
||||
</button>
|
||||
);
|
||||
}
|
||||
|
||||
function PackagePage() {
|
||||
const { projectName, packageName } = useParams<{ projectName: string; packageName: string }>();
|
||||
const [tags, setTags] = useState<Tag[]>([]);
|
||||
const navigate = useNavigate();
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const [pkg, setPkg] = useState<Package | null>(null);
|
||||
const [tagsData, setTagsData] = useState<PaginatedResponse<TagDetail> | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [uploading, setUploading] = useState(false);
|
||||
@@ -15,24 +66,61 @@ function PackagePage() {
|
||||
const [tag, setTag] = useState('');
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (projectName && packageName) {
|
||||
loadTags();
|
||||
}
|
||||
}, [projectName, packageName]);
|
||||
// Get params from URL
|
||||
const page = parseInt(searchParams.get('page') || '1', 10);
|
||||
const search = searchParams.get('search') || '';
|
||||
const sort = searchParams.get('sort') || 'name';
|
||||
const order = (searchParams.get('order') || 'asc') as 'asc' | 'desc';
|
||||
|
||||
const updateParams = useCallback(
|
||||
(updates: Record<string, string | undefined>) => {
|
||||
const newParams = new URLSearchParams(searchParams);
|
||||
Object.entries(updates).forEach(([key, value]) => {
|
||||
if (value === undefined || value === '' || (key === 'page' && value === '1')) {
|
||||
newParams.delete(key);
|
||||
} else {
|
||||
newParams.set(key, value);
|
||||
}
|
||||
});
|
||||
setSearchParams(newParams);
|
||||
},
|
||||
[searchParams, setSearchParams]
|
||||
);
|
||||
|
||||
const loadData = useCallback(async () => {
|
||||
if (!projectName || !packageName) return;
|
||||
|
||||
async function loadTags() {
|
||||
try {
|
||||
setLoading(true);
|
||||
const data = await listTags(projectName!, packageName!);
|
||||
setTags(data);
|
||||
const [pkgData, tagsResult] = await Promise.all([
|
||||
getPackage(projectName, packageName),
|
||||
listTags(projectName, packageName, { page, search, sort, order }),
|
||||
]);
|
||||
setPkg(pkgData);
|
||||
setTagsData(tagsResult);
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load tags');
|
||||
setError(err instanceof Error ? err.message : 'Failed to load data');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [projectName, packageName, page, search, sort, order]);
|
||||
|
||||
useEffect(() => {
|
||||
loadData();
|
||||
}, [loadData]);
|
||||
|
||||
// Keyboard navigation - go back with backspace
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
if (e.key === 'Backspace' && !['INPUT', 'TEXTAREA'].includes((e.target as HTMLElement).tagName)) {
|
||||
e.preventDefault();
|
||||
navigate(`/project/${projectName}`);
|
||||
}
|
||||
};
|
||||
window.addEventListener('keydown', handleKeyDown);
|
||||
return () => window.removeEventListener('keydown', handleKeyDown);
|
||||
}, [navigate, projectName]);
|
||||
|
||||
async function handleUpload(e: React.FormEvent) {
|
||||
e.preventDefault();
|
||||
@@ -51,7 +139,7 @@ function PackagePage() {
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = '';
|
||||
}
|
||||
loadTags();
|
||||
loadData();
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Upload failed');
|
||||
} finally {
|
||||
@@ -59,18 +147,148 @@ function PackagePage() {
|
||||
}
|
||||
}
|
||||
|
||||
if (loading) {
|
||||
const handleSearchChange = (value: string) => {
|
||||
updateParams({ search: value, page: '1' });
|
||||
};
|
||||
|
||||
const handleSortChange = (newSort: string, newOrder: 'asc' | 'desc') => {
|
||||
updateParams({ sort: newSort, order: newOrder, page: '1' });
|
||||
};
|
||||
|
||||
const handlePageChange = (newPage: number) => {
|
||||
updateParams({ page: String(newPage) });
|
||||
};
|
||||
|
||||
const clearFilters = () => {
|
||||
setSearchParams({});
|
||||
};
|
||||
|
||||
const hasActiveFilters = search !== '';
|
||||
const tags = tagsData?.items || [];
|
||||
const pagination = tagsData?.pagination;
|
||||
|
||||
const columns = [
|
||||
{
|
||||
key: 'name',
|
||||
header: 'Tag',
|
||||
sortable: true,
|
||||
render: (t: TagDetail) => <strong>{t.name}</strong>,
|
||||
},
|
||||
{
|
||||
key: 'artifact_id',
|
||||
header: 'Artifact ID',
|
||||
render: (t: TagDetail) => (
|
||||
<div className="artifact-id-cell">
|
||||
<code className="artifact-id">{t.artifact_id.substring(0, 12)}...</code>
|
||||
<CopyButton text={t.artifact_id} />
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: 'size',
|
||||
header: 'Size',
|
||||
render: (t: TagDetail) => <span>{formatBytes(t.artifact_size)}</span>,
|
||||
},
|
||||
{
|
||||
key: 'content_type',
|
||||
header: 'Type',
|
||||
render: (t: TagDetail) => (
|
||||
<span className="content-type">{t.artifact_content_type || '-'}</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: 'original_name',
|
||||
header: 'Filename',
|
||||
className: 'cell-truncate',
|
||||
render: (t: TagDetail) => (
|
||||
<span title={t.artifact_original_name || undefined}>{t.artifact_original_name || '-'}</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: 'created_at',
|
||||
header: 'Created',
|
||||
sortable: true,
|
||||
render: (t: TagDetail) => (
|
||||
<div className="created-cell">
|
||||
<span>{new Date(t.created_at).toLocaleString()}</span>
|
||||
<span className="created-by">by {t.created_by}</span>
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: 'actions',
|
||||
header: 'Actions',
|
||||
render: (t: TagDetail) => (
|
||||
<a
|
||||
href={getDownloadUrl(projectName!, packageName!, t.name)}
|
||||
className="btn btn-secondary btn-small"
|
||||
download
|
||||
>
|
||||
Download
|
||||
</a>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
if (loading && !tagsData) {
|
||||
return <div className="loading">Loading...</div>;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="home">
|
||||
<nav className="breadcrumb">
|
||||
<Link to="/">Projects</Link> / <Link to={`/project/${projectName}`}>{projectName}</Link> / <span>{packageName}</span>
|
||||
</nav>
|
||||
<Breadcrumb
|
||||
items={[
|
||||
{ label: 'Projects', href: '/' },
|
||||
{ label: projectName!, href: `/project/${projectName}` },
|
||||
{ label: packageName! },
|
||||
]}
|
||||
/>
|
||||
|
||||
<div className="page-header">
|
||||
<div className="page-header__info">
|
||||
<div className="page-header__title-row">
|
||||
<h1>{packageName}</h1>
|
||||
{pkg && <Badge variant="default">{pkg.format}</Badge>}
|
||||
</div>
|
||||
{pkg?.description && <p className="description">{pkg.description}</p>}
|
||||
<div className="page-header__meta">
|
||||
<span className="meta-item">
|
||||
in <a href={`/project/${projectName}`}>{projectName}</a>
|
||||
</span>
|
||||
{pkg && (
|
||||
<>
|
||||
<span className="meta-item">Created {new Date(pkg.created_at).toLocaleDateString()}</span>
|
||||
{pkg.updated_at !== pkg.created_at && (
|
||||
<span className="meta-item">Updated {new Date(pkg.updated_at).toLocaleDateString()}</span>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
{pkg && (pkg.tag_count !== undefined || pkg.artifact_count !== undefined) && (
|
||||
<div className="package-header-stats">
|
||||
{pkg.tag_count !== undefined && (
|
||||
<span className="stat-item">
|
||||
<strong>{pkg.tag_count}</strong> tags
|
||||
</span>
|
||||
)}
|
||||
{pkg.artifact_count !== undefined && (
|
||||
<span className="stat-item">
|
||||
<strong>{pkg.artifact_count}</strong> artifacts
|
||||
</span>
|
||||
)}
|
||||
{pkg.total_size !== undefined && pkg.total_size > 0 && (
|
||||
<span className="stat-item">
|
||||
<strong>{formatBytes(pkg.total_size)}</strong> total
|
||||
</span>
|
||||
)}
|
||||
{pkg.latest_tag && (
|
||||
<span className="stat-item">
|
||||
Latest: <strong className="accent">{pkg.latest_tag}</strong>
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{error && <div className="error-message">{error}</div>}
|
||||
@@ -81,12 +299,7 @@ function PackagePage() {
|
||||
<form onSubmit={handleUpload} className="upload-form">
|
||||
<div className="form-group">
|
||||
<label htmlFor="file">File</label>
|
||||
<input
|
||||
id="file"
|
||||
type="file"
|
||||
ref={fileInputRef}
|
||||
required
|
||||
/>
|
||||
<input id="file" type="file" ref={fileInputRef} required />
|
||||
</div>
|
||||
<div className="form-group">
|
||||
<label htmlFor="tag">Tag (optional)</label>
|
||||
@@ -104,42 +317,54 @@ function PackagePage() {
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div className="section-header">
|
||||
<h2>Tags / Versions</h2>
|
||||
{tags.length === 0 ? (
|
||||
<div className="empty-state">
|
||||
<p>No tags yet. Upload an artifact with a tag to create one!</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="tags-table">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Tag</th>
|
||||
<th>Artifact ID</th>
|
||||
<th>Created</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{tags.map((t) => (
|
||||
<tr key={t.id}>
|
||||
<td><strong>{t.name}</strong></td>
|
||||
<td className="artifact-id">{t.artifact_id.substring(0, 12)}...</td>
|
||||
<td>{new Date(t.created_at).toLocaleString()}</td>
|
||||
<td>
|
||||
<a
|
||||
href={getDownloadUrl(projectName!, packageName!, t.name)}
|
||||
className="btn btn-secondary btn-small"
|
||||
download
|
||||
>
|
||||
Download
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div className="list-controls">
|
||||
<SearchInput
|
||||
value={search}
|
||||
onChange={handleSearchChange}
|
||||
placeholder="Filter tags..."
|
||||
className="list-controls__search"
|
||||
/>
|
||||
<SortDropdown options={SORT_OPTIONS} value={sort} order={order} onChange={handleSortChange} />
|
||||
</div>
|
||||
|
||||
{hasActiveFilters && (
|
||||
<FilterChipGroup onClearAll={clearFilters}>
|
||||
{search && <FilterChip label="Filter" value={search} onRemove={() => handleSearchChange('')} />}
|
||||
</FilterChipGroup>
|
||||
)}
|
||||
|
||||
<DataTable
|
||||
data={tags}
|
||||
columns={columns}
|
||||
keyExtractor={(t) => t.id}
|
||||
emptyMessage={
|
||||
hasActiveFilters
|
||||
? 'No tags match your filters. Try adjusting your search.'
|
||||
: 'No tags yet. Upload an artifact with a tag to create one!'
|
||||
}
|
||||
onSort={(key) => {
|
||||
if (key === sort) {
|
||||
handleSortChange(key, order === 'asc' ? 'desc' : 'asc');
|
||||
} else {
|
||||
handleSortChange(key, 'asc');
|
||||
}
|
||||
}}
|
||||
sortKey={sort}
|
||||
sortOrder={order}
|
||||
/>
|
||||
|
||||
{pagination && pagination.total_pages > 1 && (
|
||||
<Pagination
|
||||
page={pagination.page}
|
||||
totalPages={pagination.total_pages}
|
||||
total={pagination.total}
|
||||
limit={pagination.limit}
|
||||
onPageChange={handlePageChange}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div className="usage-section card">
|
||||
|
||||
@@ -1,48 +1,107 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useParams, Link } from 'react-router-dom';
|
||||
import { Project, Package } from '../types';
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { useParams, Link, useSearchParams, useNavigate } from 'react-router-dom';
|
||||
import { Project, Package, PaginatedResponse } from '../types';
|
||||
import { getProject, listPackages, createPackage } from '../api';
|
||||
import { Breadcrumb } from '../components/Breadcrumb';
|
||||
import { Badge } from '../components/Badge';
|
||||
import { SearchInput } from '../components/SearchInput';
|
||||
import { SortDropdown, SortOption } from '../components/SortDropdown';
|
||||
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
||||
import { Pagination } from '../components/Pagination';
|
||||
import './Home.css';
|
||||
|
||||
const SORT_OPTIONS: SortOption[] = [
|
||||
{ value: 'name', label: 'Name' },
|
||||
{ value: 'created_at', label: 'Created' },
|
||||
{ value: 'updated_at', label: 'Updated' },
|
||||
];
|
||||
|
||||
const FORMAT_OPTIONS = ['generic', 'npm', 'pypi', 'docker', 'deb', 'rpm', 'maven', 'nuget', 'helm'];
|
||||
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const k = 1024;
|
||||
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
function ProjectPage() {
|
||||
const { projectName } = useParams<{ projectName: string }>();
|
||||
const navigate = useNavigate();
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const [project, setProject] = useState<Project | null>(null);
|
||||
const [packages, setPackages] = useState<Package[]>([]);
|
||||
const [packagesData, setPackagesData] = useState<PaginatedResponse<Package> | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [showForm, setShowForm] = useState(false);
|
||||
const [newPackage, setNewPackage] = useState({ name: '', description: '' });
|
||||
const [newPackage, setNewPackage] = useState({ name: '', description: '', format: 'generic', platform: 'any' });
|
||||
const [creating, setCreating] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (projectName) {
|
||||
loadData();
|
||||
}
|
||||
}, [projectName]);
|
||||
// Get params from URL
|
||||
const page = parseInt(searchParams.get('page') || '1', 10);
|
||||
const search = searchParams.get('search') || '';
|
||||
const sort = searchParams.get('sort') || 'name';
|
||||
const order = (searchParams.get('order') || 'asc') as 'asc' | 'desc';
|
||||
const format = searchParams.get('format') || '';
|
||||
|
||||
const updateParams = useCallback(
|
||||
(updates: Record<string, string | undefined>) => {
|
||||
const newParams = new URLSearchParams(searchParams);
|
||||
Object.entries(updates).forEach(([key, value]) => {
|
||||
if (value === undefined || value === '' || (key === 'page' && value === '1')) {
|
||||
newParams.delete(key);
|
||||
} else {
|
||||
newParams.set(key, value);
|
||||
}
|
||||
});
|
||||
setSearchParams(newParams);
|
||||
},
|
||||
[searchParams, setSearchParams]
|
||||
);
|
||||
|
||||
const loadData = useCallback(async () => {
|
||||
if (!projectName) return;
|
||||
|
||||
async function loadData() {
|
||||
try {
|
||||
setLoading(true);
|
||||
const [projectData, packagesData] = await Promise.all([
|
||||
getProject(projectName!),
|
||||
listPackages(projectName!),
|
||||
const [projectData, packagesResult] = await Promise.all([
|
||||
getProject(projectName),
|
||||
listPackages(projectName, { page, search, sort, order, format: format || undefined }),
|
||||
]);
|
||||
setProject(projectData);
|
||||
setPackages(packagesData);
|
||||
setPackagesData(packagesResult);
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load data');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [projectName, page, search, sort, order, format]);
|
||||
|
||||
useEffect(() => {
|
||||
loadData();
|
||||
}, [loadData]);
|
||||
|
||||
// Keyboard navigation - go back with backspace
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
if (e.key === 'Backspace' && !['INPUT', 'TEXTAREA'].includes((e.target as HTMLElement).tagName)) {
|
||||
e.preventDefault();
|
||||
navigate('/');
|
||||
}
|
||||
};
|
||||
window.addEventListener('keydown', handleKeyDown);
|
||||
return () => window.removeEventListener('keydown', handleKeyDown);
|
||||
}, [navigate]);
|
||||
|
||||
async function handleCreatePackage(e: React.FormEvent) {
|
||||
e.preventDefault();
|
||||
try {
|
||||
setCreating(true);
|
||||
await createPackage(projectName!, newPackage);
|
||||
setNewPackage({ name: '', description: '' });
|
||||
setNewPackage({ name: '', description: '', format: 'generic', platform: 'any' });
|
||||
setShowForm(false);
|
||||
loadData();
|
||||
} catch (err) {
|
||||
@@ -52,7 +111,31 @@ function ProjectPage() {
|
||||
}
|
||||
}
|
||||
|
||||
if (loading) {
|
||||
const handleSearchChange = (value: string) => {
|
||||
updateParams({ search: value, page: '1' });
|
||||
};
|
||||
|
||||
const handleSortChange = (newSort: string, newOrder: 'asc' | 'desc') => {
|
||||
updateParams({ sort: newSort, order: newOrder, page: '1' });
|
||||
};
|
||||
|
||||
const handleFormatChange = (value: string) => {
|
||||
updateParams({ format: value, page: '1' });
|
||||
};
|
||||
|
||||
const handlePageChange = (newPage: number) => {
|
||||
updateParams({ page: String(newPage) });
|
||||
};
|
||||
|
||||
const clearFilters = () => {
|
||||
setSearchParams({});
|
||||
};
|
||||
|
||||
const hasActiveFilters = search !== '' || format !== '';
|
||||
const packages = packagesData?.items || [];
|
||||
const pagination = packagesData?.pagination;
|
||||
|
||||
if (loading && !packagesData) {
|
||||
return <div className="loading">Loading...</div>;
|
||||
}
|
||||
|
||||
@@ -62,14 +145,29 @@ function ProjectPage() {
|
||||
|
||||
return (
|
||||
<div className="home">
|
||||
<nav className="breadcrumb">
|
||||
<Link to="/">Projects</Link> / <span>{project.name}</span>
|
||||
</nav>
|
||||
<Breadcrumb
|
||||
items={[
|
||||
{ label: 'Projects', href: '/' },
|
||||
{ label: project.name },
|
||||
]}
|
||||
/>
|
||||
|
||||
<div className="page-header">
|
||||
<div>
|
||||
<div className="page-header__info">
|
||||
<div className="page-header__title-row">
|
||||
<h1>{project.name}</h1>
|
||||
<Badge variant={project.is_public ? 'public' : 'private'}>
|
||||
{project.is_public ? 'Public' : 'Private'}
|
||||
</Badge>
|
||||
</div>
|
||||
{project.description && <p className="description">{project.description}</p>}
|
||||
<div className="page-header__meta">
|
||||
<span className="meta-item">Created {new Date(project.created_at).toLocaleDateString()}</span>
|
||||
{project.updated_at !== project.created_at && (
|
||||
<span className="meta-item">Updated {new Date(project.updated_at).toLocaleDateString()}</span>
|
||||
)}
|
||||
<span className="meta-item">by {project.created_by}</span>
|
||||
</div>
|
||||
</div>
|
||||
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
||||
{showForm ? 'Cancel' : '+ New Package'}
|
||||
@@ -81,6 +179,7 @@ function ProjectPage() {
|
||||
{showForm && (
|
||||
<form className="form card" onSubmit={handleCreatePackage}>
|
||||
<h3>Create New Package</h3>
|
||||
<div className="form-row">
|
||||
<div className="form-group">
|
||||
<label htmlFor="name">Name</label>
|
||||
<input
|
||||
@@ -92,6 +191,21 @@ function ProjectPage() {
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
<div className="form-group">
|
||||
<label htmlFor="format">Format</label>
|
||||
<select
|
||||
id="format"
|
||||
value={newPackage.format}
|
||||
onChange={(e) => setNewPackage({ ...newPackage, format: e.target.value })}
|
||||
>
|
||||
{FORMAT_OPTIONS.map((f) => (
|
||||
<option key={f} value={f}>
|
||||
{f}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div className="form-group">
|
||||
<label htmlFor="description">Description</label>
|
||||
<input
|
||||
@@ -108,24 +222,99 @@ function ProjectPage() {
|
||||
</form>
|
||||
)}
|
||||
|
||||
<div className="list-controls">
|
||||
<SearchInput
|
||||
value={search}
|
||||
onChange={handleSearchChange}
|
||||
placeholder="Filter packages..."
|
||||
className="list-controls__search"
|
||||
/>
|
||||
<select
|
||||
className="list-controls__select"
|
||||
value={format}
|
||||
onChange={(e) => handleFormatChange(e.target.value)}
|
||||
>
|
||||
<option value="">All formats</option>
|
||||
{FORMAT_OPTIONS.map((f) => (
|
||||
<option key={f} value={f}>
|
||||
{f}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
<SortDropdown options={SORT_OPTIONS} value={sort} order={order} onChange={handleSortChange} />
|
||||
</div>
|
||||
|
||||
{hasActiveFilters && (
|
||||
<FilterChipGroup onClearAll={clearFilters}>
|
||||
{search && <FilterChip label="Filter" value={search} onRemove={() => handleSearchChange('')} />}
|
||||
{format && <FilterChip label="Format" value={format} onRemove={() => handleFormatChange('')} />}
|
||||
</FilterChipGroup>
|
||||
)}
|
||||
|
||||
{packages.length === 0 ? (
|
||||
<div className="empty-state">
|
||||
{hasActiveFilters ? (
|
||||
<p>No packages match your filters. Try adjusting your search.</p>
|
||||
) : (
|
||||
<p>No packages yet. Create your first package to start uploading artifacts!</p>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<div className="project-grid">
|
||||
{packages.map((pkg) => (
|
||||
<Link to={`/project/${projectName}/${pkg.name}`} key={pkg.id} className="project-card card">
|
||||
<div className="package-card__header">
|
||||
<h3>{pkg.name}</h3>
|
||||
<Badge variant="default">{pkg.format}</Badge>
|
||||
</div>
|
||||
{pkg.description && <p>{pkg.description}</p>}
|
||||
|
||||
{(pkg.tag_count !== undefined || pkg.artifact_count !== undefined) && (
|
||||
<div className="package-stats">
|
||||
{pkg.tag_count !== undefined && (
|
||||
<div className="package-stats__item">
|
||||
<span className="package-stats__value">{pkg.tag_count}</span>
|
||||
<span className="package-stats__label">Tags</span>
|
||||
</div>
|
||||
)}
|
||||
{pkg.artifact_count !== undefined && (
|
||||
<div className="package-stats__item">
|
||||
<span className="package-stats__value">{pkg.artifact_count}</span>
|
||||
<span className="package-stats__label">Artifacts</span>
|
||||
</div>
|
||||
)}
|
||||
{pkg.total_size !== undefined && pkg.total_size > 0 && (
|
||||
<div className="package-stats__item">
|
||||
<span className="package-stats__value">{formatBytes(pkg.total_size)}</span>
|
||||
<span className="package-stats__label">Size</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="project-meta">
|
||||
<span className="date">
|
||||
Created {new Date(pkg.created_at).toLocaleDateString()}
|
||||
{pkg.latest_tag && (
|
||||
<span className="latest-tag">
|
||||
Latest: <strong>{pkg.latest_tag}</strong>
|
||||
</span>
|
||||
)}
|
||||
<span className="date">Created {new Date(pkg.created_at).toLocaleDateString()}</span>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{pagination && pagination.total_pages > 1 && (
|
||||
<Pagination
|
||||
page={pagination.page}
|
||||
totalPages={pagination.total_pages}
|
||||
total={pagination.total}
|
||||
limit={pagination.limit}
|
||||
onPageChange={handlePageChange}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -8,13 +8,28 @@ export interface Project {
|
||||
created_by: string;
|
||||
}
|
||||
|
||||
export interface TagSummary {
|
||||
name: string;
|
||||
artifact_id: string;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export interface Package {
|
||||
id: string;
|
||||
project_id: string;
|
||||
name: string;
|
||||
description: string | null;
|
||||
format: string;
|
||||
platform: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
// Aggregated fields (from PackageDetailResponse)
|
||||
tag_count?: number;
|
||||
artifact_count?: number;
|
||||
total_size?: number;
|
||||
latest_tag?: string | null;
|
||||
latest_upload_at?: string | null;
|
||||
recent_tags?: TagSummary[];
|
||||
}
|
||||
|
||||
export interface Artifact {
|
||||
@@ -36,6 +51,57 @@ export interface Tag {
|
||||
created_by: string;
|
||||
}
|
||||
|
||||
export interface TagDetail extends Tag {
|
||||
artifact_size: number;
|
||||
artifact_content_type: string | null;
|
||||
artifact_original_name: string | null;
|
||||
artifact_created_at: string;
|
||||
artifact_format_metadata: Record<string, unknown> | null;
|
||||
}
|
||||
|
||||
export interface ArtifactTagInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
package_id: string;
|
||||
package_name: string;
|
||||
project_name: string;
|
||||
}
|
||||
|
||||
export interface ArtifactDetail extends Artifact {
|
||||
tags: ArtifactTagInfo[];
|
||||
}
|
||||
|
||||
export interface PaginatedResponse<T> {
|
||||
items: T[];
|
||||
pagination: {
|
||||
page: number;
|
||||
limit: number;
|
||||
total: number;
|
||||
total_pages: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ListParams {
|
||||
page?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
sort?: string;
|
||||
order?: 'asc' | 'desc';
|
||||
}
|
||||
|
||||
export interface TagListParams extends ListParams {}
|
||||
|
||||
export interface PackageListParams extends ListParams {
|
||||
format?: string;
|
||||
platform?: string;
|
||||
}
|
||||
|
||||
export interface ArtifactListParams extends ListParams {
|
||||
content_type?: string;
|
||||
created_after?: string;
|
||||
created_before?: string;
|
||||
}
|
||||
|
||||
export interface Consumer {
|
||||
id: string;
|
||||
package_id: string;
|
||||
@@ -51,3 +117,47 @@ export interface UploadResponse {
|
||||
package: string;
|
||||
tag: string | null;
|
||||
}
|
||||
|
||||
// Global search types
|
||||
export interface SearchResultProject {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string | null;
|
||||
is_public: boolean;
|
||||
}
|
||||
|
||||
export interface SearchResultPackage {
|
||||
id: string;
|
||||
project_id: string;
|
||||
project_name: string;
|
||||
name: string;
|
||||
description: string | null;
|
||||
format: string;
|
||||
}
|
||||
|
||||
export interface SearchResultArtifact {
|
||||
tag_id: string;
|
||||
tag_name: string;
|
||||
artifact_id: string;
|
||||
package_id: string;
|
||||
package_name: string;
|
||||
project_name: string;
|
||||
original_name: string | null;
|
||||
}
|
||||
|
||||
export interface GlobalSearchResponse {
|
||||
query: string;
|
||||
projects: SearchResultProject[];
|
||||
packages: SearchResultPackage[];
|
||||
artifacts: SearchResultArtifact[];
|
||||
counts: {
|
||||
projects: number;
|
||||
packages: number;
|
||||
artifacts: number;
|
||||
total: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ProjectListParams extends ListParams {
|
||||
visibility?: 'public' | 'private';
|
||||
}
|
||||
|
||||
@@ -62,5 +62,3 @@ Orchard has been installed!
|
||||
Endpoint: {{ include "orchard.minio.host" . }}
|
||||
Bucket: {{ .Values.orchard.s3.bucket }}
|
||||
{{- end }}
|
||||
|
||||
For more information, visit: https://git.bitstorm.ca/bitforge/orchard
|
||||
|
||||
@@ -97,10 +97,27 @@ password
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
MinIO host
|
||||
MinIO internal host (for server-side operations)
|
||||
*/}}
|
||||
{{- define "orchard.minio.internalHost" -}}
|
||||
{{- if .Values.minio.enabled }}
|
||||
{{- printf "http://%s-minio:9000" .Release.Name }}
|
||||
{{- else }}
|
||||
{{- .Values.orchard.s3.endpoint }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
MinIO host (uses external URL if ingress enabled, for presigned URLs)
|
||||
*/}}
|
||||
{{- define "orchard.minio.host" -}}
|
||||
{{- if .Values.minio.enabled }}
|
||||
{{- if and .Values.minio.enabled .Values.minioIngress.enabled .Values.minioIngress.host }}
|
||||
{{- if .Values.minioIngress.tls.enabled }}
|
||||
{{- printf "https://%s" .Values.minioIngress.host }}
|
||||
{{- else }}
|
||||
{{- printf "http://%s" .Values.minioIngress.host }}
|
||||
{{- end }}
|
||||
{{- else if .Values.minio.enabled }}
|
||||
{{- printf "http://%s-minio:9000" .Release.Name }}
|
||||
{{- else }}
|
||||
{{- .Values.orchard.s3.endpoint }}
|
||||
|
||||
@@ -92,6 +92,10 @@ spec:
|
||||
secretKeyRef:
|
||||
name: {{ include "orchard.minio.secretName" . }}
|
||||
key: {{ if .Values.minio.enabled }}root-password{{ else }}{{ .Values.orchard.s3.existingSecretSecretKeyKey }}{{ end }}
|
||||
- name: ORCHARD_DOWNLOAD_MODE
|
||||
value: {{ .Values.orchard.download.mode | quote }}
|
||||
- name: ORCHARD_PRESIGNED_URL_EXPIRY
|
||||
value: {{ .Values.orchard.download.presignedUrlExpiry | quote }}
|
||||
livenessProbe:
|
||||
{{- toYaml .Values.livenessProbe | nindent 12 }}
|
||||
readinessProbe:
|
||||
|
||||
34
helm/orchard/templates/minio-ingress.yaml
Normal file
34
helm/orchard/templates/minio-ingress.yaml
Normal file
@@ -0,0 +1,34 @@
|
||||
{{- if and .Values.minio.enabled .Values.minioIngress.enabled -}}
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: {{ include "orchard.fullname" . }}-minio
|
||||
labels:
|
||||
{{- include "orchard.labels" . | nindent 4 }}
|
||||
app.kubernetes.io/component: minio
|
||||
{{- with .Values.minioIngress.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- if .Values.minioIngress.className }}
|
||||
ingressClassName: {{ .Values.minioIngress.className }}
|
||||
{{- end }}
|
||||
{{- if .Values.minioIngress.tls.enabled }}
|
||||
tls:
|
||||
- hosts:
|
||||
- {{ .Values.minioIngress.host | quote }}
|
||||
secretName: {{ .Values.minioIngress.tls.secretName }}
|
||||
{{- end }}
|
||||
rules:
|
||||
- host: {{ .Values.minioIngress.host | quote }}
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: {{ .Release.Name }}-minio
|
||||
port:
|
||||
number: 9000
|
||||
{{- end }}
|
||||
@@ -115,6 +115,11 @@ orchard:
|
||||
existingSecretAccessKeyKey: "access-key-id"
|
||||
existingSecretSecretKeyKey: "secret-access-key"
|
||||
|
||||
# Download configuration
|
||||
download:
|
||||
mode: "presigned" # presigned, redirect, or proxy
|
||||
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
||||
|
||||
# PostgreSQL subchart configuration
|
||||
postgresql:
|
||||
enabled: true
|
||||
@@ -148,6 +153,18 @@ minio:
|
||||
enabled: false
|
||||
size: 50Gi
|
||||
|
||||
# MinIO external ingress for presigned URL access (separate from subchart ingress)
|
||||
minioIngress:
|
||||
enabled: true
|
||||
className: "nginx"
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "letsencrypt"
|
||||
nginx.ingress.kubernetes.io/proxy-body-size: "0" # Disable body size limit for uploads
|
||||
host: "minio-orch-dev.common.global.bsf.tools"
|
||||
tls:
|
||||
enabled: true
|
||||
secretName: minio-tls
|
||||
|
||||
# Redis subchart configuration (for future caching)
|
||||
redis:
|
||||
enabled: false
|
||||
|
||||
@@ -14,6 +14,7 @@ CREATE TABLE IF NOT EXISTS projects (
|
||||
|
||||
CREATE INDEX idx_projects_name ON projects(name);
|
||||
CREATE INDEX idx_projects_created_by ON projects(created_by);
|
||||
CREATE INDEX idx_projects_public ON projects(name) WHERE is_public = true;
|
||||
|
||||
-- Packages (collections within projects)
|
||||
CREATE TABLE IF NOT EXISTS packages (
|
||||
@@ -21,6 +22,8 @@ CREATE TABLE IF NOT EXISTS packages (
|
||||
project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
format VARCHAR(50) DEFAULT 'generic', -- package type: generic, npm, pypi, docker, etc.
|
||||
platform VARCHAR(50) DEFAULT 'any', -- target platform: any, linux, darwin, windows, etc.
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
UNIQUE(project_id, name)
|
||||
@@ -28,21 +31,30 @@ CREATE TABLE IF NOT EXISTS packages (
|
||||
|
||||
CREATE INDEX idx_packages_project_id ON packages(project_id);
|
||||
CREATE INDEX idx_packages_name ON packages(name);
|
||||
CREATE INDEX idx_packages_format ON packages(format);
|
||||
CREATE INDEX idx_packages_platform ON packages(platform);
|
||||
|
||||
-- Artifacts (Content-Addressable)
|
||||
CREATE TABLE IF NOT EXISTS artifacts (
|
||||
id VARCHAR(64) PRIMARY KEY, -- SHA256 hash
|
||||
size BIGINT NOT NULL,
|
||||
size BIGINT NOT NULL CHECK (size > 0),
|
||||
content_type VARCHAR(255),
|
||||
original_name VARCHAR(1024),
|
||||
checksum_md5 VARCHAR(32), -- MD5 hash for additional verification
|
||||
checksum_sha1 VARCHAR(40), -- SHA1 hash for compatibility
|
||||
s3_etag VARCHAR(64), -- S3 ETag for verification
|
||||
metadata JSONB, -- format-specific metadata
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by VARCHAR(255) NOT NULL,
|
||||
ref_count INTEGER DEFAULT 1,
|
||||
ref_count INTEGER DEFAULT 1 CHECK (ref_count >= 0),
|
||||
s3_key VARCHAR(1024) NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_artifacts_created_at ON artifacts(created_at);
|
||||
CREATE INDEX idx_artifacts_created_by ON artifacts(created_by);
|
||||
CREATE INDEX idx_artifacts_metadata ON artifacts USING GIN (metadata);
|
||||
CREATE INDEX idx_artifacts_checksum_md5 ON artifacts(checksum_md5) WHERE checksum_md5 IS NOT NULL;
|
||||
CREATE INDEX idx_artifacts_checksum_sha1 ON artifacts(checksum_sha1) WHERE checksum_sha1 IS NOT NULL;
|
||||
|
||||
-- Tags (Aliases pointing to artifacts)
|
||||
CREATE TABLE IF NOT EXISTS tags (
|
||||
@@ -51,12 +63,14 @@ CREATE TABLE IF NOT EXISTS tags (
|
||||
name VARCHAR(255) NOT NULL,
|
||||
artifact_id VARCHAR(64) NOT NULL REFERENCES artifacts(id),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by VARCHAR(255) NOT NULL,
|
||||
UNIQUE(package_id, name)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_tags_package_id ON tags(package_id);
|
||||
CREATE INDEX idx_tags_artifact_id ON tags(artifact_id);
|
||||
CREATE INDEX idx_tags_package_created_at ON tags(package_id, created_at DESC);
|
||||
|
||||
-- Tag History (for rollback capability)
|
||||
CREATE TABLE IF NOT EXISTS tag_history (
|
||||
@@ -64,11 +78,13 @@ CREATE TABLE IF NOT EXISTS tag_history (
|
||||
tag_id UUID NOT NULL REFERENCES tags(id) ON DELETE CASCADE,
|
||||
old_artifact_id VARCHAR(64) REFERENCES artifacts(id),
|
||||
new_artifact_id VARCHAR(64) NOT NULL REFERENCES artifacts(id),
|
||||
change_type VARCHAR(20) NOT NULL DEFAULT 'update' CHECK (change_type IN ('create', 'update', 'delete')),
|
||||
changed_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
changed_by VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_tag_history_tag_id ON tag_history(tag_id);
|
||||
CREATE INDEX idx_tag_history_changed_at ON tag_history(changed_at);
|
||||
|
||||
-- Uploads (upload event records)
|
||||
CREATE TABLE IF NOT EXISTS uploads (
|
||||
@@ -76,6 +92,11 @@ CREATE TABLE IF NOT EXISTS uploads (
|
||||
artifact_id VARCHAR(64) NOT NULL REFERENCES artifacts(id),
|
||||
package_id UUID NOT NULL REFERENCES packages(id),
|
||||
original_name VARCHAR(1024),
|
||||
tag_name VARCHAR(255), -- tag assigned during upload
|
||||
user_agent VARCHAR(512), -- client identification
|
||||
duration_ms INTEGER, -- upload timing in milliseconds
|
||||
deduplicated BOOLEAN DEFAULT false, -- whether artifact was deduplicated
|
||||
checksum_verified BOOLEAN DEFAULT true, -- whether checksum was verified
|
||||
uploaded_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
uploaded_by VARCHAR(255) NOT NULL,
|
||||
source_ip VARCHAR(45)
|
||||
@@ -84,6 +105,8 @@ CREATE TABLE IF NOT EXISTS uploads (
|
||||
CREATE INDEX idx_uploads_artifact_id ON uploads(artifact_id);
|
||||
CREATE INDEX idx_uploads_package_id ON uploads(package_id);
|
||||
CREATE INDEX idx_uploads_uploaded_at ON uploads(uploaded_at);
|
||||
CREATE INDEX idx_uploads_package_uploaded_at ON uploads(package_id, uploaded_at DESC);
|
||||
CREATE INDEX idx_uploads_uploaded_by_at ON uploads(uploaded_by, uploaded_at DESC);
|
||||
|
||||
-- Consumers (Dependency tracking)
|
||||
CREATE TABLE IF NOT EXISTS consumers (
|
||||
@@ -141,14 +164,17 @@ CREATE INDEX idx_audit_logs_action ON audit_logs(action);
|
||||
CREATE INDEX idx_audit_logs_resource ON audit_logs(resource);
|
||||
CREATE INDEX idx_audit_logs_user_id ON audit_logs(user_id);
|
||||
CREATE INDEX idx_audit_logs_timestamp ON audit_logs(timestamp);
|
||||
CREATE INDEX idx_audit_logs_resource_timestamp ON audit_logs(resource, timestamp DESC);
|
||||
CREATE INDEX idx_audit_logs_user_timestamp ON audit_logs(user_id, timestamp DESC);
|
||||
CREATE INDEX idx_audit_logs_details ON audit_logs USING GIN (details);
|
||||
|
||||
-- Trigger to update tag history on changes
|
||||
CREATE OR REPLACE FUNCTION track_tag_changes()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF TG_OP = 'UPDATE' AND OLD.artifact_id != NEW.artifact_id THEN
|
||||
INSERT INTO tag_history (id, tag_id, old_artifact_id, new_artifact_id, changed_at, changed_by)
|
||||
VALUES (gen_random_uuid(), NEW.id, OLD.artifact_id, NEW.artifact_id, NOW(), NEW.created_by);
|
||||
INSERT INTO tag_history (id, tag_id, old_artifact_id, new_artifact_id, change_type, changed_at, changed_by)
|
||||
VALUES (gen_random_uuid(), NEW.id, OLD.artifact_id, NEW.artifact_id, 'update', NOW(), NEW.created_by);
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
@@ -158,3 +184,72 @@ CREATE TRIGGER tag_changes_trigger
|
||||
AFTER UPDATE ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION track_tag_changes();
|
||||
|
||||
-- Trigger to auto-update updated_at timestamps
|
||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER projects_updated_at_trigger
|
||||
BEFORE UPDATE ON projects
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER packages_updated_at_trigger
|
||||
BEFORE UPDATE ON packages
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER tags_updated_at_trigger
|
||||
BEFORE UPDATE ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
-- Triggers for maintaining artifact ref_count accuracy
|
||||
CREATE OR REPLACE FUNCTION increment_artifact_ref_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION decrement_artifact_ref_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||
RETURN OLD;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_artifact_ref_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF OLD.artifact_id != NEW.artifact_id THEN
|
||||
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Note: ref_count triggers on tags table
|
||||
-- These track how many tags reference each artifact
|
||||
CREATE TRIGGER tags_ref_count_insert_trigger
|
||||
AFTER INSERT ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION increment_artifact_ref_count();
|
||||
|
||||
CREATE TRIGGER tags_ref_count_delete_trigger
|
||||
AFTER DELETE ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION decrement_artifact_ref_count();
|
||||
|
||||
CREATE TRIGGER tags_ref_count_update_trigger
|
||||
AFTER UPDATE ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_artifact_ref_count();
|
||||
|
||||
170
migrations/002_schema_enhancements.sql
Normal file
170
migrations/002_schema_enhancements.sql
Normal file
@@ -0,0 +1,170 @@
|
||||
-- Migration 002: Schema Enhancements
|
||||
-- Adds new fields, indexes, and triggers for improved functionality
|
||||
|
||||
-- ============================================
|
||||
-- Packages: Add format and platform fields
|
||||
-- ============================================
|
||||
ALTER TABLE packages ADD COLUMN IF NOT EXISTS format VARCHAR(50) DEFAULT 'generic';
|
||||
ALTER TABLE packages ADD COLUMN IF NOT EXISTS platform VARCHAR(50) DEFAULT 'any';
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_packages_format ON packages(format);
|
||||
CREATE INDEX IF NOT EXISTS idx_packages_platform ON packages(platform);
|
||||
|
||||
-- ============================================
|
||||
-- Artifacts: Add checksum_md5, metadata, and CHECK constraints
|
||||
-- ============================================
|
||||
ALTER TABLE artifacts ADD COLUMN IF NOT EXISTS checksum_md5 VARCHAR(32);
|
||||
ALTER TABLE artifacts ADD COLUMN IF NOT EXISTS metadata JSONB;
|
||||
|
||||
-- Add CHECK constraints (will fail if data violates them)
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'artifacts_ref_count_check') THEN
|
||||
ALTER TABLE artifacts ADD CONSTRAINT artifacts_ref_count_check CHECK (ref_count >= 0);
|
||||
END IF;
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'artifacts_size_check') THEN
|
||||
ALTER TABLE artifacts ADD CONSTRAINT artifacts_size_check CHECK (size > 0);
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_artifacts_metadata ON artifacts USING GIN (metadata);
|
||||
|
||||
-- ============================================
|
||||
-- Tags: Add updated_at and composite index
|
||||
-- ============================================
|
||||
ALTER TABLE tags ADD COLUMN IF NOT EXISTS updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW();
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_tags_package_created_at ON tags(package_id, created_at DESC);
|
||||
|
||||
-- ============================================
|
||||
-- Tag History: Add change_type and index
|
||||
-- ============================================
|
||||
ALTER TABLE tag_history ADD COLUMN IF NOT EXISTS change_type VARCHAR(20) DEFAULT 'update';
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'tag_history_change_type_check') THEN
|
||||
ALTER TABLE tag_history ADD CONSTRAINT tag_history_change_type_check
|
||||
CHECK (change_type IN ('create', 'update', 'delete'));
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_tag_history_changed_at ON tag_history(changed_at);
|
||||
|
||||
-- ============================================
|
||||
-- Uploads: Add new fields and composite indexes
|
||||
-- ============================================
|
||||
ALTER TABLE uploads ADD COLUMN IF NOT EXISTS tag_name VARCHAR(255);
|
||||
ALTER TABLE uploads ADD COLUMN IF NOT EXISTS user_agent VARCHAR(512);
|
||||
ALTER TABLE uploads ADD COLUMN IF NOT EXISTS duration_ms INTEGER;
|
||||
ALTER TABLE uploads ADD COLUMN IF NOT EXISTS deduplicated BOOLEAN DEFAULT false;
|
||||
ALTER TABLE uploads ADD COLUMN IF NOT EXISTS checksum_verified BOOLEAN DEFAULT true;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_uploads_package_uploaded_at ON uploads(package_id, uploaded_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_uploads_uploaded_by_at ON uploads(uploaded_by, uploaded_at DESC);
|
||||
|
||||
-- ============================================
|
||||
-- Audit Logs: Add composite indexes and GIN index
|
||||
-- ============================================
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_logs_resource_timestamp ON audit_logs(resource, timestamp DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_logs_user_timestamp ON audit_logs(user_id, timestamp DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_logs_details ON audit_logs USING GIN (details);
|
||||
|
||||
-- ============================================
|
||||
-- Projects: Add partial index for public projects
|
||||
-- ============================================
|
||||
CREATE INDEX IF NOT EXISTS idx_projects_public ON projects(name) WHERE is_public = true;
|
||||
|
||||
-- ============================================
|
||||
-- Triggers: Update tag_changes trigger for change_type
|
||||
-- ============================================
|
||||
CREATE OR REPLACE FUNCTION track_tag_changes()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF TG_OP = 'UPDATE' AND OLD.artifact_id != NEW.artifact_id THEN
|
||||
INSERT INTO tag_history (id, tag_id, old_artifact_id, new_artifact_id, change_type, changed_at, changed_by)
|
||||
VALUES (gen_random_uuid(), NEW.id, OLD.artifact_id, NEW.artifact_id, 'update', NOW(), NEW.created_by);
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- ============================================
|
||||
-- Triggers: Auto-update updated_at timestamps
|
||||
-- ============================================
|
||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Drop triggers if they exist, then recreate
|
||||
DROP TRIGGER IF EXISTS projects_updated_at_trigger ON projects;
|
||||
CREATE TRIGGER projects_updated_at_trigger
|
||||
BEFORE UPDATE ON projects
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
DROP TRIGGER IF EXISTS packages_updated_at_trigger ON packages;
|
||||
CREATE TRIGGER packages_updated_at_trigger
|
||||
BEFORE UPDATE ON packages
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
DROP TRIGGER IF EXISTS tags_updated_at_trigger ON tags;
|
||||
CREATE TRIGGER tags_updated_at_trigger
|
||||
BEFORE UPDATE ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
-- ============================================
|
||||
-- Triggers: Maintain artifact ref_count accuracy
|
||||
-- ============================================
|
||||
CREATE OR REPLACE FUNCTION increment_artifact_ref_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION decrement_artifact_ref_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||
RETURN OLD;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_artifact_ref_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF OLD.artifact_id != NEW.artifact_id THEN
|
||||
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Note: ref_count triggers on tags table
|
||||
-- These track how many tags reference each artifact
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
|
||||
CREATE TRIGGER tags_ref_count_insert_trigger
|
||||
AFTER INSERT ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION increment_artifact_ref_count();
|
||||
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
|
||||
CREATE TRIGGER tags_ref_count_delete_trigger
|
||||
AFTER DELETE ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION decrement_artifact_ref_count();
|
||||
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
|
||||
CREATE TRIGGER tags_ref_count_update_trigger
|
||||
AFTER UPDATE ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_artifact_ref_count();
|
||||
12
migrations/003_checksum_fields.sql
Normal file
12
migrations/003_checksum_fields.sql
Normal file
@@ -0,0 +1,12 @@
|
||||
-- Migration 003: Additional Checksum Fields
|
||||
-- Adds checksum_sha1 and s3_etag fields to artifacts table
|
||||
|
||||
-- ============================================
|
||||
-- Artifacts: Add checksum_sha1 and s3_etag fields
|
||||
-- ============================================
|
||||
ALTER TABLE artifacts ADD COLUMN IF NOT EXISTS checksum_sha1 VARCHAR(40);
|
||||
ALTER TABLE artifacts ADD COLUMN IF NOT EXISTS s3_etag VARCHAR(64);
|
||||
|
||||
-- Create indexes for checksum lookups (optional, for verification queries)
|
||||
CREATE INDEX IF NOT EXISTS idx_artifacts_checksum_md5 ON artifacts(checksum_md5) WHERE checksum_md5 IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_artifacts_checksum_sha1 ON artifacts(checksum_sha1) WHERE checksum_sha1 IS NOT NULL;
|
||||
Reference in New Issue
Block a user