Compare commits
49 Commits
72e988dda1
...
feature/pa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ba7cd96107 | ||
|
|
6c8b922818 | ||
|
|
99d28cf9c6 | ||
|
|
b5579f1643 | ||
|
|
fafa03e4ce | ||
|
|
d4b2da3232 | ||
|
|
7b04bbdf05 | ||
|
|
3a807870a3 | ||
|
|
f966fde7df | ||
|
|
133d9cbfd6 | ||
|
|
276b4f2743 | ||
|
|
67ac6bb3f8 | ||
|
|
b0bb3ed569 | ||
|
|
1ac75e1017 | ||
|
|
693613f111 | ||
|
|
9da4ae8c0d | ||
|
|
7ffdc64364 | ||
|
|
6abc0c88b0 | ||
|
|
e96dc5cde8 | ||
|
|
cba5bac383 | ||
|
|
535280a783 | ||
|
|
c9026e1950 | ||
|
|
fedbd95cf4 | ||
|
|
255e25d66d | ||
|
|
427d2fec70 | ||
|
|
199821b34d | ||
|
|
584acd1e90 | ||
|
|
f7ffc1c877 | ||
|
|
b93d5a9c68 | ||
|
|
a98ac154d5 | ||
|
|
823dfcb400 | ||
|
|
0ad106a141 | ||
|
|
5d5a054452 | ||
|
|
f3a817f8a5 | ||
|
|
f212864647 | ||
|
|
e8f26e9976 | ||
|
|
32162c4ec7 | ||
|
|
1bb0c4e911 | ||
|
|
179503c68b | ||
|
|
2f3c44b58e | ||
|
|
4b3d2fd41d | ||
|
|
7cfad28f67 | ||
|
|
37666e41a7 | ||
|
|
0cc4f25362 | ||
|
|
5c9da9003b | ||
|
|
90bb2a3a39 | ||
|
|
617bcbe89c | ||
|
|
1cbd335443 | ||
|
|
10d3694794 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -65,3 +65,4 @@ temp/
|
|||||||
.claude/
|
.claude/
|
||||||
CLAUDE.md
|
CLAUDE.md
|
||||||
AGENTS.md
|
AGENTS.md
|
||||||
|
PROSPER-NOTES.md
|
||||||
|
|||||||
558
.gitlab-ci.yml
558
.gitlab-ci.yml
@@ -6,46 +6,542 @@ include:
|
|||||||
variables:
|
variables:
|
||||||
# renovate: datasource=gitlab-tags depName=esv/bsf/pypi/prosper versioning=semver registryUrl=https://gitlab.global.bsf.tools
|
# renovate: datasource=gitlab-tags depName=esv/bsf/pypi/prosper versioning=semver registryUrl=https://gitlab.global.bsf.tools
|
||||||
PROSPER_VERSION: v0.64.1
|
PROSPER_VERSION: v0.64.1
|
||||||
|
# Use internal PyPI proxy instead of public internet
|
||||||
|
PIP_INDEX_URL: https://deps.global.bsf.tools/artifactory/api/pypi/pypi.org/simple
|
||||||
|
# Environment URLs (used by deploy and test jobs)
|
||||||
|
STAGE_URL: https://orchard-stage.common.global.bsf.tools
|
||||||
|
PROD_URL: https://orchard.common.global.bsf.tools
|
||||||
|
# Stage environment AWS resources (used by reset job)
|
||||||
|
STAGE_RDS_HOST: orchard-stage.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com
|
||||||
|
STAGE_RDS_DBNAME: postgres
|
||||||
|
STAGE_SECRET_ARN: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:rds!cluster-a573672b-1a38-4665-a654-1b7df37b5297-IaeFQL"
|
||||||
|
STAGE_S3_BUCKET: orchard-artifacts-stage
|
||||||
|
AWS_REGION: us-gov-west-1
|
||||||
|
# Shared pip cache directory
|
||||||
|
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.pip-cache"
|
||||||
|
|
||||||
kics:
|
# Prevent duplicate pipelines for MRs
|
||||||
allow_failure: true
|
workflow:
|
||||||
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
when: never
|
||||||
|
- when: always
|
||||||
|
|
||||||
|
# Define stages - extends Prosper's stages with our custom ones
|
||||||
|
stages:
|
||||||
|
- .pre
|
||||||
|
- lint
|
||||||
|
- build
|
||||||
|
- test
|
||||||
|
- analyze
|
||||||
|
- deploy
|
||||||
|
|
||||||
|
# Override Prosper template jobs to exclude tag pipelines
|
||||||
|
# Tags only run deploy_prod and smoke_test_prod (image already built on main)
|
||||||
|
build_image:
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
|
test_image:
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
hadolint:
|
hadolint:
|
||||||
allow_failure: true
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
|
kics:
|
||||||
|
variables:
|
||||||
|
KICS_CONFIG: kics.config
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
secrets:
|
secrets:
|
||||||
allow_failure: true
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
# Run Python tests
|
app_deps_scan:
|
||||||
python_tests:
|
rules:
|
||||||
stage: test
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
|
cve_scan:
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
|
app_sbom_analysis:
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
|
cve_sbom_analysis:
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
|
# Override release job to wait for stage integration tests before creating tag
|
||||||
|
# This ensures the tag (which triggers prod deploy) is only created after stage passes
|
||||||
|
release:
|
||||||
|
needs: [integration_test_stage, changelog]
|
||||||
|
|
||||||
|
# Full integration test suite template (for feature/stage deployments)
|
||||||
|
# Runs the complete pytest integration test suite against the deployed environment
|
||||||
|
.integration_test_template: &integration_test_template
|
||||||
|
stage: deploy # Runs in deploy stage, but after deployment due to 'needs'
|
||||||
image: deps.global.bsf.tools/docker/python:3.12-slim
|
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||||
|
timeout: 20m # Full suite takes longer than smoke tests
|
||||||
|
interruptible: true # Cancel if new pipeline starts
|
||||||
|
retry: 1 # Retry once on failure (network flakiness)
|
||||||
|
cache:
|
||||||
|
key: pip-$CI_COMMIT_REF_SLUG
|
||||||
|
paths:
|
||||||
|
- .pip-cache/
|
||||||
|
policy: pull-push
|
||||||
before_script:
|
before_script:
|
||||||
- pip install -r backend/requirements.txt
|
- pip install --index-url "$PIP_INDEX_URL" -r backend/requirements.txt
|
||||||
- pip install pytest pytest-asyncio httpx
|
- pip install --index-url "$PIP_INDEX_URL" pytest pytest-asyncio httpx
|
||||||
script:
|
script:
|
||||||
- cd backend
|
- cd backend
|
||||||
- python -m pytest -v || echo "No tests yet"
|
# Run full integration test suite, excluding:
|
||||||
|
# - large/slow tests
|
||||||
|
# - requires_direct_s3 tests (can't access MinIO from outside K8s cluster)
|
||||||
|
# ORCHARD_TEST_URL tells the tests which server to connect to
|
||||||
|
# Note: Auth tests work because dev/stage deployments have relaxed rate limits
|
||||||
|
- |
|
||||||
|
python -m pytest tests/integration/ -v \
|
||||||
|
--junitxml=integration-report.xml \
|
||||||
|
-m "not large and not slow and not requires_direct_s3" \
|
||||||
|
--tb=short
|
||||||
|
artifacts:
|
||||||
|
when: always
|
||||||
|
expire_in: 1 week
|
||||||
|
paths:
|
||||||
|
- backend/integration-report.xml
|
||||||
|
reports:
|
||||||
|
junit: backend/integration-report.xml
|
||||||
|
|
||||||
# deploy_helm_charts:
|
# Lightweight smoke test template (for production - no test data creation)
|
||||||
# stage: deploy
|
.smoke_test_template: &smoke_test_template
|
||||||
# image:
|
stage: deploy
|
||||||
# name: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
|
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||||
# parallel:
|
timeout: 5m
|
||||||
# matrix:
|
before_script:
|
||||||
# # - ENV: "prod"
|
- pip install --index-url "$PIP_INDEX_URL" httpx
|
||||||
# # VALUES_FILE: "helm/values-prod.yaml"
|
script:
|
||||||
# # CONTEXT: "esv/bsf/bsf-services/gitlab-kaas-agent-config:services-prod-agent"
|
- |
|
||||||
# # NAMESPACE: "bsf-services-namespace"
|
python - <<'PYTEST_SCRIPT'
|
||||||
# # ONLY: "main"
|
import httpx
|
||||||
# - ENV: "dev"
|
import os
|
||||||
# VALUES_FILE: "helm/orchard/values.yaml"
|
import sys
|
||||||
# CONTEXT: "esv/bsf/bsf-services/gitlab-kaas-agent-config:services-prod-agent"
|
|
||||||
# NAMESPACE: "bsf-services-dev-namespace"
|
|
||||||
# # ONLY: ["branches", "!main"]
|
|
||||||
# script:
|
|
||||||
# - kubectl config use-context $CONTEXT
|
|
||||||
# - echo "Deploy - buildah push ${IMAGE_NAME}:latest"
|
|
||||||
# - |
|
|
||||||
# helm upgrade --install orchard-dev ./helm/orchard --namespace $NAMESPACE -f $VALUES_FILE
|
|
||||||
|
|
||||||
|
BASE_URL = os.environ.get("ORCHARD_TEST_URL")
|
||||||
|
if not BASE_URL:
|
||||||
|
print("ERROR: ORCHARD_TEST_URL not set")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"Running smoke tests against {BASE_URL}")
|
||||||
|
client = httpx.Client(base_url=BASE_URL, timeout=30.0)
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
# Test 1: Health endpoint
|
||||||
|
print("\n=== Test 1: Health endpoint ===")
|
||||||
|
r = client.get("/health")
|
||||||
|
if r.status_code == 200:
|
||||||
|
print("PASS: Health check passed")
|
||||||
|
else:
|
||||||
|
errors.append(f"Health check failed: {r.status_code}")
|
||||||
|
|
||||||
|
# Test 2: API responds (list projects)
|
||||||
|
print("\n=== Test 2: API responds ===")
|
||||||
|
r = client.get("/api/v1/projects")
|
||||||
|
if r.status_code == 200:
|
||||||
|
projects = r.json()
|
||||||
|
print(f"PASS: API responding, found {len(projects)} project(s)")
|
||||||
|
else:
|
||||||
|
errors.append(f"API check failed: {r.status_code}")
|
||||||
|
|
||||||
|
# Test 3: Frontend served
|
||||||
|
print("\n=== Test 3: Frontend served ===")
|
||||||
|
r = client.get("/")
|
||||||
|
if r.status_code == 200 and "</html>" in r.text:
|
||||||
|
print("PASS: Frontend is being served")
|
||||||
|
else:
|
||||||
|
errors.append(f"Frontend check failed: {r.status_code}")
|
||||||
|
|
||||||
|
# Report results
|
||||||
|
print("\n" + "=" * 50)
|
||||||
|
if errors:
|
||||||
|
print(f"FAILED: {len(errors)} error(s)")
|
||||||
|
for e in errors:
|
||||||
|
print(f" FAIL: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print("SUCCESS: All smoke tests passed!")
|
||||||
|
sys.exit(0)
|
||||||
|
PYTEST_SCRIPT
|
||||||
|
|
||||||
|
# Integration tests for stage deployment (full suite)
|
||||||
|
# Reset stage template - shared by pre and post test reset jobs
|
||||||
|
# Calls the /api/v1/admin/factory-reset endpoint which handles DB and S3 cleanup
|
||||||
|
.reset_stage_template: &reset_stage_template
|
||||||
|
stage: deploy
|
||||||
|
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||||
|
timeout: 5m
|
||||||
|
retry: 1 # Retry once on transient failures
|
||||||
|
before_script:
|
||||||
|
- pip install --index-url "$PIP_INDEX_URL" httpx
|
||||||
|
script:
|
||||||
|
- |
|
||||||
|
python - <<'RESET_SCRIPT'
|
||||||
|
import httpx
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
|
BASE_URL = os.environ.get("STAGE_URL", "")
|
||||||
|
ADMIN_USER = "admin"
|
||||||
|
ADMIN_PASS = "changeme123" # Default admin password
|
||||||
|
MAX_RETRIES = 3
|
||||||
|
RETRY_DELAY = 5 # seconds
|
||||||
|
|
||||||
|
if not BASE_URL:
|
||||||
|
print("ERROR: STAGE_URL environment variable not set")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"=== Resetting stage environment at {BASE_URL} ===")
|
||||||
|
|
||||||
|
def do_reset():
|
||||||
|
with httpx.Client(base_url=BASE_URL, timeout=120.0) as client:
|
||||||
|
# Login as admin
|
||||||
|
print("Logging in as admin...")
|
||||||
|
login_response = client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": ADMIN_USER, "password": ADMIN_PASS},
|
||||||
|
)
|
||||||
|
if login_response.status_code != 200:
|
||||||
|
raise Exception(f"Login failed: {login_response.status_code} - {login_response.text}")
|
||||||
|
print("Login successful")
|
||||||
|
|
||||||
|
# Call factory reset endpoint
|
||||||
|
print("Calling factory reset endpoint...")
|
||||||
|
reset_response = client.post(
|
||||||
|
"/api/v1/admin/factory-reset",
|
||||||
|
headers={"X-Confirm-Reset": "yes-delete-all-data"},
|
||||||
|
)
|
||||||
|
|
||||||
|
if reset_response.status_code == 200:
|
||||||
|
result = reset_response.json()
|
||||||
|
print("Factory reset successful!")
|
||||||
|
print(f" Database tables dropped: {result['results']['database_tables_dropped']}")
|
||||||
|
print(f" S3 objects deleted: {result['results']['s3_objects_deleted']}")
|
||||||
|
print(f" Database reinitialized: {result['results']['database_reinitialized']}")
|
||||||
|
print(f" Seeded: {result['results']['seeded']}")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
raise Exception(f"Factory reset failed: {reset_response.status_code} - {reset_response.text}")
|
||||||
|
|
||||||
|
# Retry loop
|
||||||
|
for attempt in range(1, MAX_RETRIES + 1):
|
||||||
|
try:
|
||||||
|
print(f"Attempt {attempt}/{MAX_RETRIES}")
|
||||||
|
if do_reset():
|
||||||
|
sys.exit(0)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Attempt {attempt} failed: {e}")
|
||||||
|
if attempt < MAX_RETRIES:
|
||||||
|
print(f"Retrying in {RETRY_DELAY} seconds...")
|
||||||
|
time.sleep(RETRY_DELAY)
|
||||||
|
else:
|
||||||
|
print("All retry attempts failed")
|
||||||
|
sys.exit(1)
|
||||||
|
RESET_SCRIPT
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||||
|
when: on_success
|
||||||
|
|
||||||
|
# Reset stage BEFORE integration tests (ensure known state)
|
||||||
|
reset_stage_pre:
|
||||||
|
<<: *reset_stage_template
|
||||||
|
needs: [deploy_stage]
|
||||||
|
|
||||||
|
# Integration tests for stage deployment (full suite)
|
||||||
|
integration_test_stage:
|
||||||
|
<<: *integration_test_template
|
||||||
|
needs: [reset_stage_pre]
|
||||||
|
variables:
|
||||||
|
ORCHARD_TEST_URL: $STAGE_URL
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||||
|
when: on_success
|
||||||
|
|
||||||
|
# Reset stage AFTER integration tests (clean slate for next run)
|
||||||
|
reset_stage:
|
||||||
|
<<: *reset_stage_template
|
||||||
|
needs: [integration_test_stage]
|
||||||
|
allow_failure: true # Don't fail pipeline if reset has issues
|
||||||
|
|
||||||
|
# Integration tests for feature deployment (full suite)
|
||||||
|
integration_test_feature:
|
||||||
|
<<: *integration_test_template
|
||||||
|
needs: [deploy_feature]
|
||||||
|
variables:
|
||||||
|
ORCHARD_TEST_URL: https://orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
||||||
|
when: on_success
|
||||||
|
|
||||||
|
# Run Python backend unit tests
|
||||||
|
python_unit_tests:
|
||||||
|
stage: test
|
||||||
|
needs: [] # Run in parallel with build
|
||||||
|
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||||
|
timeout: 15m
|
||||||
|
interruptible: true # Cancel if new pipeline starts
|
||||||
|
cache:
|
||||||
|
key: pip-$CI_COMMIT_REF_SLUG
|
||||||
|
paths:
|
||||||
|
- .pip-cache/
|
||||||
|
policy: pull-push
|
||||||
|
before_script:
|
||||||
|
- pip install --index-url "$PIP_INDEX_URL" -r backend/requirements.txt
|
||||||
|
- pip install --index-url "$PIP_INDEX_URL" pytest pytest-asyncio pytest-cov httpx
|
||||||
|
script:
|
||||||
|
- cd backend
|
||||||
|
# Run unit tests (integration tests run post-deployment against live environment)
|
||||||
|
- python -m pytest tests/unit/ -v --cov=app --cov-report=term --cov-report=xml:coverage.xml --cov-report=html:coverage_html --junitxml=pytest-report.xml
|
||||||
|
artifacts:
|
||||||
|
when: always
|
||||||
|
expire_in: 1 week
|
||||||
|
paths:
|
||||||
|
- backend/coverage.xml
|
||||||
|
- backend/coverage_html/
|
||||||
|
- backend/pytest-report.xml
|
||||||
|
reports:
|
||||||
|
junit: backend/pytest-report.xml
|
||||||
|
coverage_report:
|
||||||
|
coverage_format: cobertura
|
||||||
|
path: backend/coverage.xml
|
||||||
|
coverage: '/TOTAL.*\s+(\d+%)/'
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
|
# Run frontend tests
|
||||||
|
frontend_tests:
|
||||||
|
stage: test
|
||||||
|
needs: [] # Run in parallel with build
|
||||||
|
image: deps.global.bsf.tools/docker/node:20-alpine
|
||||||
|
timeout: 15m
|
||||||
|
interruptible: true # Cancel if new pipeline starts
|
||||||
|
cache:
|
||||||
|
key: npm-$CI_COMMIT_REF_SLUG
|
||||||
|
paths:
|
||||||
|
- frontend/node_modules/
|
||||||
|
policy: pull-push
|
||||||
|
before_script:
|
||||||
|
- cd frontend
|
||||||
|
- npm config set registry https://deps.global.bsf.tools/artifactory/api/npm/registry.npmjs.org
|
||||||
|
- npm ci --verbose
|
||||||
|
script:
|
||||||
|
- npm run test -- --run --reporter=verbose --coverage
|
||||||
|
artifacts:
|
||||||
|
when: always
|
||||||
|
expire_in: 1 week
|
||||||
|
paths:
|
||||||
|
- frontend/coverage/
|
||||||
|
reports:
|
||||||
|
coverage_report:
|
||||||
|
coverage_format: cobertura
|
||||||
|
path: frontend/coverage/cobertura-coverage.xml
|
||||||
|
coverage: '/All files[^|]*\|[^|]*\s+([\d\.]+)/'
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
|
# Shared deploy configuration
|
||||||
|
.deploy_template: &deploy_template
|
||||||
|
stage: deploy
|
||||||
|
needs: [build_image, test_image, kics, hadolint, python_unit_tests, frontend_tests, secrets, app_deps_scan, cve_scan, cve_sbom_analysis, app_sbom_analysis]
|
||||||
|
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
|
||||||
|
|
||||||
|
.helm_setup: &helm_setup
|
||||||
|
- helm version
|
||||||
|
- cd helm/orchard
|
||||||
|
# OCI-based charts from internal registry - no repo add needed
|
||||||
|
- helm dependency update
|
||||||
|
|
||||||
|
# Simplified deployment verification - just health check
|
||||||
|
# Full API/frontend checks are done by integration tests post-deployment
|
||||||
|
.verify_deployment: &verify_deployment |
|
||||||
|
echo "=== Waiting for health endpoint (certs may take a few minutes) ==="
|
||||||
|
for i in $(seq 1 30); do
|
||||||
|
if curl -sf --max-time 10 "$BASE_URL/health" > /dev/null 2>&1; then
|
||||||
|
echo "Health check passed!"
|
||||||
|
echo "Deployment URL: $BASE_URL"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
echo "Attempt $i/30 - waiting 10s..."
|
||||||
|
sleep 10
|
||||||
|
done
|
||||||
|
echo "Health check failed after 30 attempts"
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
# Deploy to stage (main branch)
|
||||||
|
deploy_stage:
|
||||||
|
<<: *deploy_template
|
||||||
|
variables:
|
||||||
|
NAMESPACE: orch-stage-namespace
|
||||||
|
VALUES_FILE: helm/orchard/values-stage.yaml
|
||||||
|
BASE_URL: $STAGE_URL
|
||||||
|
before_script:
|
||||||
|
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
||||||
|
- *helm_setup
|
||||||
|
script:
|
||||||
|
- echo "Deploying to stage environment"
|
||||||
|
- cd $CI_PROJECT_DIR
|
||||||
|
- |
|
||||||
|
helm upgrade --install orchard-stage ./helm/orchard \
|
||||||
|
--namespace $NAMESPACE \
|
||||||
|
-f $VALUES_FILE \
|
||||||
|
--set image.tag=git.linux-amd64-$CI_COMMIT_SHA \
|
||||||
|
--wait \
|
||||||
|
--atomic \
|
||||||
|
--timeout 10m
|
||||||
|
- kubectl rollout status deployment/orchard-stage-server -n $NAMESPACE --timeout=10m
|
||||||
|
- *verify_deployment
|
||||||
|
environment:
|
||||||
|
name: stage
|
||||||
|
url: $STAGE_URL
|
||||||
|
kubernetes:
|
||||||
|
agent: esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||||
|
when: on_success
|
||||||
|
|
||||||
|
# Deploy feature branch to dev namespace
|
||||||
|
deploy_feature:
|
||||||
|
<<: *deploy_template
|
||||||
|
variables:
|
||||||
|
NAMESPACE: orch-dev-namespace
|
||||||
|
VALUES_FILE: helm/orchard/values-dev.yaml
|
||||||
|
before_script:
|
||||||
|
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard
|
||||||
|
- *helm_setup
|
||||||
|
script:
|
||||||
|
- echo "Deploying feature branch $CI_COMMIT_REF_SLUG"
|
||||||
|
- cd $CI_PROJECT_DIR
|
||||||
|
- |
|
||||||
|
helm upgrade --install orchard-$CI_COMMIT_REF_SLUG ./helm/orchard \
|
||||||
|
--namespace $NAMESPACE \
|
||||||
|
-f $VALUES_FILE \
|
||||||
|
--set image.tag=git.linux-amd64-$CI_COMMIT_SHA \
|
||||||
|
--set ingress.hosts[0].host=orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools \
|
||||||
|
--set ingress.tls[0].hosts[0]=orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools \
|
||||||
|
--set ingress.tls[0].secretName=orchard-$CI_COMMIT_REF_SLUG-tls \
|
||||||
|
--set minioIngress.host=minio-$CI_COMMIT_REF_SLUG.common.global.bsf.tools \
|
||||||
|
--set minioIngress.tls.secretName=minio-$CI_COMMIT_REF_SLUG-tls \
|
||||||
|
--wait \
|
||||||
|
--atomic \
|
||||||
|
--timeout 10m
|
||||||
|
- kubectl rollout status deployment/orchard-$CI_COMMIT_REF_SLUG-server -n $NAMESPACE --timeout=10m
|
||||||
|
- export BASE_URL="https://orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools"
|
||||||
|
- *verify_deployment
|
||||||
|
environment:
|
||||||
|
name: review/$CI_COMMIT_REF_SLUG
|
||||||
|
url: https://orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools
|
||||||
|
on_stop: cleanup_feature
|
||||||
|
auto_stop_in: 1 week
|
||||||
|
kubernetes:
|
||||||
|
agent: esv/bsf/bsf-integration/orchard/orchard-mvp:orchard
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
||||||
|
when: on_success
|
||||||
|
|
||||||
|
# Cleanup feature branch deployment (standalone - doesn't need deploy dependencies)
|
||||||
|
cleanup_feature:
|
||||||
|
stage: deploy
|
||||||
|
needs: []
|
||||||
|
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
|
||||||
|
timeout: 5m
|
||||||
|
variables:
|
||||||
|
NAMESPACE: orch-dev-namespace
|
||||||
|
GIT_STRATEGY: none # No source needed, branch may be deleted
|
||||||
|
before_script:
|
||||||
|
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard
|
||||||
|
script:
|
||||||
|
- echo "Cleaning up feature deployment orchard-$CI_COMMIT_REF_SLUG"
|
||||||
|
- helm uninstall orchard-$CI_COMMIT_REF_SLUG --namespace $NAMESPACE || true
|
||||||
|
environment:
|
||||||
|
name: review/$CI_COMMIT_REF_SLUG
|
||||||
|
action: stop
|
||||||
|
kubernetes:
|
||||||
|
agent: esv/bsf/bsf-integration/orchard/orchard-mvp:orchard
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
||||||
|
when: manual
|
||||||
|
allow_failure: true
|
||||||
|
|
||||||
|
# Deploy to production (version tags only)
|
||||||
|
deploy_prod:
|
||||||
|
stage: deploy
|
||||||
|
# For tag pipelines, no other jobs run - image was already built when commit was on main
|
||||||
|
needs: []
|
||||||
|
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
|
||||||
|
variables:
|
||||||
|
NAMESPACE: orch-namespace
|
||||||
|
VALUES_FILE: helm/orchard/values-prod.yaml
|
||||||
|
BASE_URL: $PROD_URL
|
||||||
|
before_script:
|
||||||
|
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-prod
|
||||||
|
- *helm_setup
|
||||||
|
script:
|
||||||
|
- echo "Deploying to PRODUCTION - version $CI_COMMIT_TAG"
|
||||||
|
- cd $CI_PROJECT_DIR
|
||||||
|
- |
|
||||||
|
helm upgrade --install orchard-prod ./helm/orchard \
|
||||||
|
--namespace $NAMESPACE \
|
||||||
|
-f $VALUES_FILE \
|
||||||
|
--set image.tag=git.linux-amd64-$CI_COMMIT_SHA \
|
||||||
|
--wait \
|
||||||
|
--atomic \
|
||||||
|
--timeout 10m
|
||||||
|
- kubectl rollout status deployment/orchard-prod-server -n $NAMESPACE --timeout=10m
|
||||||
|
- *verify_deployment
|
||||||
|
environment:
|
||||||
|
name: production
|
||||||
|
url: $PROD_URL
|
||||||
|
kubernetes:
|
||||||
|
agent: esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-prod
|
||||||
|
rules:
|
||||||
|
# Only run on semantic version tags (v1.0.0, v1.2.3, etc.)
|
||||||
|
- if: '$CI_COMMIT_TAG =~ /^v\d+\.\d+\.\d+$/'
|
||||||
|
when: on_success
|
||||||
|
allow_failure: false
|
||||||
|
|
||||||
|
# Smoke tests for production deployment (read-only, no test data creation)
|
||||||
|
smoke_test_prod:
|
||||||
|
<<: *smoke_test_template
|
||||||
|
needs: [deploy_prod]
|
||||||
|
variables:
|
||||||
|
ORCHARD_TEST_URL: $PROD_URL
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG =~ /^v\d+\.\d+\.\d+$/'
|
||||||
|
when: on_success
|
||||||
|
|||||||
0
.gitlab/.gitkeep
Normal file
0
.gitlab/.gitkeep
Normal file
4
.gitlab/agents/orchard-stage/config.yaml
Normal file
4
.gitlab/agents/orchard-stage/config.yaml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# GitLab Agent configuration for stage deployments
|
||||||
|
ci_access:
|
||||||
|
projects:
|
||||||
|
- id: esv/bsf/bsf-integration/orchard/orchard-mvp
|
||||||
4
.gitlab/agents/orchard/config.yaml
Normal file
4
.gitlab/agents/orchard/config.yaml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# GitLab Agent configuration for dev/feature deployments
|
||||||
|
ci_access:
|
||||||
|
projects:
|
||||||
|
- id: esv/bsf/bsf-integration/orchard/orchard-mvp
|
||||||
0
.gitlab/orchard/.gitkeep
Normal file
0
.gitlab/orchard/.gitkeep
Normal file
3
.gitlab/orchard/config.yaml
Normal file
3
.gitlab/orchard/config.yaml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
ci_access:
|
||||||
|
projects:
|
||||||
|
- id: esv/bsf/bsf-integration/orchard/orchard-mvp
|
||||||
8
.gitleaks.toml
Normal file
8
.gitleaks.toml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Gitleaks configuration
|
||||||
|
# https://github.com/gitleaks/gitleaks#configuration
|
||||||
|
|
||||||
|
[allowlist]
|
||||||
|
# Test files that contain variable names matching secret patterns (e.g., s3_key)
|
||||||
|
paths = [
|
||||||
|
'''backend/tests/.*\.py''',
|
||||||
|
]
|
||||||
19
.gitleaksignore
Normal file
19
.gitleaksignore
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Gitleaks ignore file
|
||||||
|
# https://github.com/gitleaks/gitleaks#gitleaksignore
|
||||||
|
#
|
||||||
|
# False positive: s3_key is an attribute name in test assertions, not a secret
|
||||||
|
# These are historical commits - files have since been deleted or updated with inline comments
|
||||||
|
7e68baed0886a3c928644cd01aa3b39f92d4f976:backend/tests/test_duplicate_detection.py:generic-api-key:154
|
||||||
|
81458b3bcb5ace97109ba4c16f4afa6e55b1b8bd:backend/tests/test_duplicate_detection.py:generic-api-key:154
|
||||||
|
2f1891cf0126ec0e7d4c789d872a2cb2dd3a1745:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
|
10d36947948de796f0bacea3827f4531529c405d:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
|
bccbc71c13570d14b8b26a11335c45f102fe3072:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
|
5c9da9003b844a2d655cce74a7c82c57e74f27c4:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
|
90bb2a3a393d2361dc3136ee8d761debb0726d8a:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
|
37666e41a72d2a4f34447c0d1a8728e1d7271d24:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
|
0cc4f253621a9601c5193f6ae1e7ae33f0e7fc9b:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
|
35fda65d381acc5ab59bc592ee3013f75906c197:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
|
08dce6cbb836b687002751fed4159bfc2da61f8b:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
|
617bcbe89cff9a009d77e4f1f1864efed1820e63:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
|
1cbd33544388e0fe6db752fa8886fab33cf9ce7c:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
|
7cfad28f678f5a5b8b927d694a17b9ba446b7138:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||||
234
CHANGELOG.md
234
CHANGELOG.md
@@ -7,6 +7,240 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
### Added
|
### Added
|
||||||
|
- Added Project Settings page accessible to project admins (#65)
|
||||||
|
- General settings section for editing description and visibility
|
||||||
|
- Access Management section (moved from project page)
|
||||||
|
- Danger Zone section with inline delete confirmation requiring project name
|
||||||
|
- Settings button (gear icon) on project page header for admins
|
||||||
|
- Added artifact dependency management system (#76, #77, #78, #79, #80, #81)
|
||||||
|
- `artifact_dependencies` table with version/tag constraints and check constraints
|
||||||
|
- `ArtifactDependency` SQLAlchemy model with indexes for fast lookups
|
||||||
|
- Ensure file parsing (`orchard.ensure` YAML format) during artifact upload
|
||||||
|
- Circular dependency detection at upload time (rejected with 400)
|
||||||
|
- Dependency conflict detection at resolution time (409 with conflict details)
|
||||||
|
- Added dependency API endpoints (#78, #79):
|
||||||
|
- `GET /api/v1/artifact/{artifact_id}/dependencies` - Get dependencies by artifact ID
|
||||||
|
- `GET /api/v1/project/{project}/{package}/+/{ref}/dependencies` - Get dependencies by ref
|
||||||
|
- `GET /api/v1/project/{project}/{package}/reverse-dependencies` - Get reverse dependencies (paginated)
|
||||||
|
- `GET /api/v1/project/{project}/{package}/+/{ref}/resolve` - Resolve full dependency tree
|
||||||
|
- Added dependency resolution with topological sorting (#79)
|
||||||
|
- Returns flat list of all artifacts needed in dependency order
|
||||||
|
- Includes download URLs, sizes, and version info for each artifact
|
||||||
|
- Added frontend dependency visualization (#84, #85, #86):
|
||||||
|
- Dependencies section on package page showing direct dependencies for selected tag
|
||||||
|
- Tag/version selector to switch between artifacts
|
||||||
|
- "Used By" section showing reverse dependencies with pagination
|
||||||
|
- Interactive dependency graph modal with:
|
||||||
|
- Tree visualization with collapsible nodes
|
||||||
|
- Zoom (mouse wheel + buttons) and pan (click-drag)
|
||||||
|
- Click to navigate to package
|
||||||
|
- Hover tooltip with package details
|
||||||
|
- Error display for circular dependencies and conflicts
|
||||||
|
- Added migration `008_artifact_dependencies.sql` for dependency schema
|
||||||
|
- Added `dependencies.py` module with parsing, validation, and resolution logic
|
||||||
|
- Added comprehensive integration tests for all dependency features
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Added pre-test stage reset to ensure known environment state before integration tests (#54)
|
||||||
|
- Upload endpoint now accepts optional `ensure` file parameter for declaring dependencies
|
||||||
|
- Updated upload API documentation with ensure file format and examples
|
||||||
|
|
||||||
|
## [0.5.1] - 2026-01-23
|
||||||
|
### Changed
|
||||||
|
- Simplified tag pipeline to only run deploy and smoke tests (image already built on main) (#54)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Fixed production CI deployment namespace to use correct `orch-namespace` (#54)
|
||||||
|
- Added gitleaks config to allowlist test files from secret scanning (#54)
|
||||||
|
|
||||||
|
## [0.5.0] - 2026-01-23
|
||||||
|
### Added
|
||||||
|
- Added factory reset endpoint `POST /api/v1/admin/factory-reset` for test environment cleanup (#54)
|
||||||
|
- Requires admin authentication and `X-Confirm-Reset: yes-delete-all-data` header
|
||||||
|
- Drops all database tables, clears S3 bucket, reinitializes schema, re-seeds default data
|
||||||
|
- CI pipeline automatically calls this after integration tests on stage
|
||||||
|
- Added `delete_all()` method to storage backend for bulk S3 object deletion (#54)
|
||||||
|
- Added AWS Secrets Manager CSI driver support for database credentials (#54)
|
||||||
|
- Added SecretProviderClass template for Secrets Manager integration (#54)
|
||||||
|
- Added IRSA service account annotations for prod and stage environments (#54)
|
||||||
|
- Added comprehensive upload/download tests for size boundaries (1B to 1GB) (#38)
|
||||||
|
- Added concurrent upload/download tests (2, 5, 10 parallel operations) (#38)
|
||||||
|
- Added data integrity tests (binary, text, unicode, compressed content) (#38)
|
||||||
|
- Added chunk boundary tests for edge cases (#38)
|
||||||
|
- Added `@pytest.mark.large` and `@pytest.mark.concurrent` test markers (#38)
|
||||||
|
- Added `generate_content()` and `generate_content_with_hash()` test helpers (#38)
|
||||||
|
- Added `sized_content` fixture for generating test content of specific sizes (#38)
|
||||||
|
- Added upload API tests: upload without tag, artifact creation verification, S3 object creation (#38)
|
||||||
|
- Added download API tests: tag: prefix resolution, 404 for nonexistent project/package/artifact (#38)
|
||||||
|
- Added download header tests: Content-Type, Content-Length, Content-Disposition, ETag, X-Checksum-SHA256 (#38)
|
||||||
|
- Added error handling tests: timeout behavior, checksum validation, resource cleanup, graceful error responses (#38)
|
||||||
|
- Added version API tests: version creation, auto-detection, listing, download by version prefix (#38)
|
||||||
|
- Added integrity verification tests: round-trip hash verification, client-side verification workflow, size variants (1KB-10MB) (#40)
|
||||||
|
- Added consistency check endpoint tests with response format validation (#40)
|
||||||
|
- Added corruption detection tests: bit flip, truncation, appended content, size mismatch, missing S3 objects (#40)
|
||||||
|
- Added Digest header tests (RFC 3230) and verification mode tests (#40)
|
||||||
|
- Added integrity verification documentation (`docs/integrity-verification.md`) (#40)
|
||||||
|
- Added conditional request support for downloads (If-None-Match, If-Modified-Since) returning 304 Not Modified (#42)
|
||||||
|
- Added caching headers to downloads: Cache-Control (immutable), Last-Modified (#42)
|
||||||
|
- Added 416 Range Not Satisfiable response for invalid range requests (#42)
|
||||||
|
- Added download completion logging with bytes transferred and throughput (#42)
|
||||||
|
- Added client disconnect handling during streaming downloads (#42)
|
||||||
|
- Added streaming download tests: range requests, conditional requests, caching headers, download resume (#42)
|
||||||
|
- Added upload duration and throughput metrics (`duration_ms`, `throughput_mbps`) to upload response (#43)
|
||||||
|
- Added upload progress logging for large files (hash computation and multipart upload phases) (#43)
|
||||||
|
- Added client disconnect handling during uploads with proper cleanup (#43)
|
||||||
|
- Added upload progress tracking endpoint `GET /upload/{upload_id}/progress` for resumable uploads (#43)
|
||||||
|
- Added large file upload tests (10MB, 100MB, 1GB) with multipart upload verification (#43)
|
||||||
|
- Added upload cancellation and timeout handling tests (#43)
|
||||||
|
- Added comprehensive API documentation for upload endpoints with curl, Python, and JavaScript examples (#43)
|
||||||
|
- Added `package_versions` table for immutable version tracking separate from mutable tags (#56)
|
||||||
|
- Versions are set at upload time via explicit `version` parameter or auto-detected from filename/metadata
|
||||||
|
- Version detection priority: explicit parameter > package metadata > filename pattern
|
||||||
|
- Versions are immutable once created (unlike tags which can be moved)
|
||||||
|
- Added version API endpoints (#56):
|
||||||
|
- `GET /api/v1/project/{project}/{package}/versions` - List all versions for a package
|
||||||
|
- `GET /api/v1/project/{project}/{package}/versions/{version}` - Get specific version details
|
||||||
|
- `DELETE /api/v1/project/{project}/{package}/versions/{version}` - Delete a version (admin only)
|
||||||
|
- Added version support to upload endpoint via `version` form parameter (#56)
|
||||||
|
- Added `version:X.Y.Z` prefix for explicit version resolution in download refs (#56)
|
||||||
|
- Added version field to tag responses (shows which version the artifact has, if any) (#56)
|
||||||
|
- Added migration `007_package_versions.sql` with ref_count triggers and data migration from semver tags (#56)
|
||||||
|
- Added production deployment job triggered by semantic version tags (v1.0.0) with manual approval gate (#63)
|
||||||
|
- Added production Helm values file with persistence enabled (20Gi PostgreSQL, 100Gi MinIO) (#63)
|
||||||
|
- Added integration tests for production deployment (#63)
|
||||||
|
- Added GitLab CI pipeline for feature branch deployments to dev namespace (#51)
|
||||||
|
- Added `deploy_feature` job with dynamic hostnames and unique release names (#51)
|
||||||
|
- Added `cleanup_feature` job with `on_stop` for automatic cleanup on merge (#51)
|
||||||
|
- Added `values-dev.yaml` Helm values for lightweight ephemeral environments (#51)
|
||||||
|
- Added main branch deployment to stage environment (#51)
|
||||||
|
- Added post-deployment integration tests (#51)
|
||||||
|
- Added internal proxy configuration for npm, pip, helm, and apt (#51)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Configured stage and prod to use AWS RDS instead of PostgreSQL subchart (#54)
|
||||||
|
- Configured stage and prod to use AWS S3 instead of MinIO subchart (#54)
|
||||||
|
- Changed prod deployment from manual to automatic on version tags (#54)
|
||||||
|
- Updated S3 client to support IRSA credentials when no explicit keys provided (#54)
|
||||||
|
- Changed prod image pullPolicy to Always (#54)
|
||||||
|
- Added proxy-body-size annotation to prod ingress for large uploads (#54)
|
||||||
|
- CI integration tests now run full pytest suite (~350 tests) against deployed environment instead of 3 smoke tests
|
||||||
|
- CI production deployment uses lightweight smoke tests only (no test data creation in prod)
|
||||||
|
- CI pipeline improvements: shared pip cache, `interruptible` flag on test jobs, retry on integration tests
|
||||||
|
- Simplified deploy verification to health check only (full checks done by integration tests)
|
||||||
|
- Extracted environment URLs to global variables for maintainability
|
||||||
|
- Made `cleanup_feature` job standalone (no longer inherits deploy template dependencies)
|
||||||
|
- Renamed `integration_test_prod` to `smoke_test_prod` for clarity
|
||||||
|
- Updated download ref resolution to check versions before tags (version → tag → artifact ID) (#56)
|
||||||
|
- Deploy jobs now require all security scans to pass before deployment (added test_image, app_deps_scan, cve_scan, cve_sbom_analysis, app_sbom_analysis to dependencies) (#63)
|
||||||
|
- Increased deploy job timeout from 5m to 10m (#63)
|
||||||
|
- Added `--atomic` flag to Helm deployments for automatic rollback on failure
|
||||||
|
- Adjusted dark mode color palette to use lighter background tones for better readability and reduced eye strain (#52)
|
||||||
|
- Replaced project card grid with sortable data table on Home page for better handling of large project lists
|
||||||
|
- Replaced package card grid with sortable data table on Project page for consistency
|
||||||
|
- Replaced SortDropdown with table header sorting on Package page for consistency
|
||||||
|
- Enabled sorting on supported table columns (name, created, updated) via clickable headers
|
||||||
|
- Updated browser tab title to "Orchard" with custom favicon
|
||||||
|
- Improved pod naming: Orchard pods now named `orchard-{env}-server-*` for clarity (#51)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Fixed factory reset not creating default admin user after reset (#60)
|
||||||
|
- Admin user was only created at server startup, not after factory reset
|
||||||
|
- CI reset job would fail to login because admin user didn't exist
|
||||||
|
- Improved reset_stage CI job reliability (#60)
|
||||||
|
- Added application-level retry logic (3 attempts with 5s delay)
|
||||||
|
- Added job-level retry for transient failures
|
||||||
|
- Fixed httpx client to use proper context manager
|
||||||
|
- Increased timeout to 120s for reset operations
|
||||||
|
- Fixed CI integration test rate limiting: added configurable `ORCHARD_LOGIN_RATE_LIMIT` env var, relaxed to 1000/minute for dev/stage
|
||||||
|
- Fixed duplicate `TestSecurityEdgeCases` class definition in test_auth_api.py
|
||||||
|
- Fixed integration tests auth: session-scoped client, configurable credentials via env vars, fail-fast on auth errors
|
||||||
|
- Fixed 413 Request Entity Too Large errors on uploads by adding `proxy-body-size: "0"` nginx annotation to Orchard ingress
|
||||||
|
- Fixed CI tests that require direct S3 access: added `@pytest.mark.requires_direct_s3` marker and excluded from CI
|
||||||
|
- Fixed ref_count triggers not being created: added auto-migration for tags ref_count trigger functions
|
||||||
|
- Fixed Content-Disposition header encoding for non-ASCII filenames using RFC 5987 (#38)
|
||||||
|
- Fixed deploy jobs running even when tests or security scans fail (changed rules from `when: always` to `when: on_success`) (#63)
|
||||||
|
- Fixed python_tests job not using internal PyPI proxy (#63)
|
||||||
|
- Fixed `cleanup_feature` job failing when branch is deleted (`GIT_STRATEGY: none`) (#51)
|
||||||
|
- Fixed gitleaks false positives with fingerprints for historical commits (#51)
|
||||||
|
- Fixed integration tests running when deploy fails (`when: on_success`) (#51)
|
||||||
|
- Fixed static file serving for favicon and other files in frontend dist root
|
||||||
|
- Fixed deploy jobs running when secrets scan fails (added `secrets` to deploy dependencies)
|
||||||
|
- Fixed dev environment memory requests to equal limits per cluster Kyverno policy
|
||||||
|
- Fixed init containers missing resource limits (Kyverno policy compliance)
|
||||||
|
- Fixed Python SyntaxWarning for invalid escape sequence in database migration regex pattern
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
- Removed unused `store_streaming()` method from storage.py (#51)
|
||||||
|
- Disabled PostgreSQL subchart for stage and prod environments (#54)
|
||||||
|
- Disabled MinIO subchart for stage and prod environments (#54)
|
||||||
|
|
||||||
|
## [0.4.0] - 2026-01-12
|
||||||
|
### Added
|
||||||
|
- Added user authentication system with session-based login (#50)
|
||||||
|
- `users` table with password hashing (bcrypt), admin flag, active status
|
||||||
|
- `sessions` table for web login sessions (24-hour expiry)
|
||||||
|
- `auth_settings` table for future OIDC configuration
|
||||||
|
- Default admin user created on first boot (username: admin, password: admin)
|
||||||
|
- Added auth API endpoints (#50)
|
||||||
|
- `POST /api/v1/auth/login` - Login with username/password
|
||||||
|
- `POST /api/v1/auth/logout` - Logout and clear session
|
||||||
|
- `GET /api/v1/auth/me` - Get current user info
|
||||||
|
- `POST /api/v1/auth/change-password` - Change own password
|
||||||
|
- Added API key management with user ownership (#50)
|
||||||
|
- `POST /api/v1/auth/keys` - Create API key (format: `orch_<random>`)
|
||||||
|
- `GET /api/v1/auth/keys` - List user's API keys
|
||||||
|
- `DELETE /api/v1/auth/keys/{id}` - Revoke API key
|
||||||
|
- Added `owner_id`, `scopes`, `description` columns to `api_keys` table
|
||||||
|
- Added admin user management endpoints (#50)
|
||||||
|
- `GET /api/v1/admin/users` - List all users
|
||||||
|
- `POST /api/v1/admin/users` - Create user
|
||||||
|
- `GET /api/v1/admin/users/{username}` - Get user details
|
||||||
|
- `PUT /api/v1/admin/users/{username}` - Update user (admin/active status)
|
||||||
|
- `POST /api/v1/admin/users/{username}/reset-password` - Reset password
|
||||||
|
- Added `auth.py` module with AuthService class and FastAPI dependencies (#50)
|
||||||
|
- Added auth schemas: LoginRequest, LoginResponse, UserResponse, APIKeyResponse (#50)
|
||||||
|
- Added migration `006_auth_tables.sql` for auth database tables (#50)
|
||||||
|
- Added frontend Login page with session management (#50)
|
||||||
|
- Added frontend API Keys management page (#50)
|
||||||
|
- Added frontend Admin Users page (admin-only) (#50)
|
||||||
|
- Added AuthContext for frontend session state (#50)
|
||||||
|
- Added user menu to Layout header with login/logout (#50)
|
||||||
|
- Added 15 integration tests for auth system (#50)
|
||||||
|
- Added reusable `DragDropUpload` component for artifact uploads (#8)
|
||||||
|
- Drag-and-drop file selection with visual feedback
|
||||||
|
- Click-to-browse fallback
|
||||||
|
- Multiple file upload support with queue management
|
||||||
|
- Real-time progress indicators with speed and ETA
|
||||||
|
- File type and size validation (configurable)
|
||||||
|
- Concurrent upload handling (configurable max concurrent)
|
||||||
|
- Automatic retry with exponential backoff for network errors
|
||||||
|
- Individual file status (pending, uploading, complete, failed)
|
||||||
|
- Retry and remove actions per file
|
||||||
|
- Auto-dismiss success messages after 5 seconds
|
||||||
|
- Integrated DragDropUpload into PackagePage replacing basic file input (#8)
|
||||||
|
- Added frontend testing infrastructure with Vitest and React Testing Library (#14)
|
||||||
|
- Configured Vitest for React/TypeScript with jsdom
|
||||||
|
- Added 24 unit tests for DragDropUpload component
|
||||||
|
- Tests cover: rendering, drag-drop events, file validation, upload queue, progress, errors
|
||||||
|
- Added chunked upload support for large files (#9)
|
||||||
|
- Files >100MB automatically use chunked upload API (10MB chunks)
|
||||||
|
- Client-side SHA256 hash computation via Web Crypto API
|
||||||
|
- localStorage persistence for resume after browser close
|
||||||
|
- Deduplication check at upload init phase
|
||||||
|
- Added offline detection and network resilience (#12)
|
||||||
|
- Automatic pause when browser goes offline
|
||||||
|
- Auto-resume when connection restored
|
||||||
|
- Offline banner UI with status message
|
||||||
|
- XHR abort on network loss to prevent hung requests
|
||||||
|
- Added download by artifact ID feature (#10)
|
||||||
|
- Direct artifact ID input field on package page
|
||||||
|
- Hex-only input validation with character count
|
||||||
|
- File size and filename displayed in tag list
|
||||||
|
- Added backend security tests (#15)
|
||||||
|
- Path traversal prevention tests for upload/download
|
||||||
|
- Malformed request handling tests
|
||||||
|
- Checksum validation tests
|
||||||
|
- 10 new security-focused integration tests
|
||||||
- Added download verification with `verify` and `verify_mode` query parameters (#26)
|
- Added download verification with `verify` and `verify_mode` query parameters (#26)
|
||||||
- `?verify=true&verify_mode=pre` - Pre-verification: verify before streaming (guaranteed no corrupt data)
|
- `?verify=true&verify_mode=pre` - Pre-verification: verify before streaming (guaranteed no corrupt data)
|
||||||
- `?verify=true&verify_mode=stream` - Streaming verification: verify while streaming (logs error if mismatch)
|
- `?verify=true&verify_mode=stream` - Streaming verification: verify while streaming (logs error if mismatch)
|
||||||
|
|||||||
14
Dockerfile
14
Dockerfile
@@ -1,7 +1,7 @@
|
|||||||
# Frontend build stage
|
# Frontend build stage
|
||||||
FROM containers.global.bsf.tools/node:20-alpine AS frontend-builder
|
FROM containers.global.bsf.tools/node:20-alpine AS frontend-builder
|
||||||
|
|
||||||
ARG NPM_REGISTRY=https://deps.global.bsf.tools/artifactory/api/npm/registry.npmjs.org/
|
ARG NPM_REGISTRY=https://deps.global.bsf.tools/artifactory/api/npm/registry.npmjs.org
|
||||||
|
|
||||||
WORKDIR /app/frontend
|
WORKDIR /app/frontend
|
||||||
|
|
||||||
@@ -21,10 +21,18 @@ RUN npm run build
|
|||||||
# Runtime stage
|
# Runtime stage
|
||||||
FROM containers.global.bsf.tools/python:3.12-slim
|
FROM containers.global.bsf.tools/python:3.12-slim
|
||||||
|
|
||||||
|
ARG PIP_INDEX_URL=https://deps.global.bsf.tools/artifactory/api/pypi/pypi.org/simple
|
||||||
|
|
||||||
|
# Configure apt to use internal Debian mirrors only (trixie = Debian testing)
|
||||||
|
RUN printf 'deb https://deps.global.bsf.tools/artifactory/deb.debian.org-debian trixie main\n\
|
||||||
|
deb https://deps.global.bsf.tools/artifactory/security.debian.org-debian-security trixie-security main\n' > /etc/apt/sources.list \
|
||||||
|
&& rm -rf /etc/apt/sources.list.d/* /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Disable proxy cache
|
# Disable proxy cache
|
||||||
RUN echo 'Acquire::http::Pipeline-Depth 0;\nAcquire::http::No-Cache true;\nAcquire::BrokenProxy true;\n' > /etc/apt/apt.conf.d/99fixbadproxy
|
RUN printf 'Acquire::http::Pipeline-Depth 0;\nAcquire::http::No-Cache true;\nAcquire::BrokenProxy true;\n' > /etc/apt/apt.conf.d/99fixbadproxy
|
||||||
|
|
||||||
# Install system dependencies
|
# Install system dependencies
|
||||||
|
# hadolint ignore=DL3008
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
curl \
|
curl \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
@@ -37,7 +45,7 @@ WORKDIR /app
|
|||||||
|
|
||||||
# Copy requirements and install Python dependencies
|
# Copy requirements and install Python dependencies
|
||||||
COPY backend/requirements.txt .
|
COPY backend/requirements.txt .
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir --index-url "$PIP_INDEX_URL" -r requirements.txt
|
||||||
|
|
||||||
# Copy backend source
|
# Copy backend source
|
||||||
COPY backend/ ./backend/
|
COPY backend/ ./backend/
|
||||||
|
|||||||
119
README.md
119
README.md
@@ -22,6 +22,7 @@ Orchard is a centralized binary artifact storage system that provides content-ad
|
|||||||
- **Package** - Named collection within a project
|
- **Package** - Named collection within a project
|
||||||
- **Artifact** - Specific content instance identified by SHA256
|
- **Artifact** - Specific content instance identified by SHA256
|
||||||
- **Tags** - Alias system for referencing artifacts by human-readable names (e.g., `v1.0.0`, `latest`, `stable`)
|
- **Tags** - Alias system for referencing artifacts by human-readable names (e.g., `v1.0.0`, `latest`, `stable`)
|
||||||
|
- **Versions** - Immutable version records set at upload time (explicit or auto-detected from filename/metadata), separate from mutable tags
|
||||||
- **Package Formats & Platforms** - Packages can be tagged with format (npm, pypi, docker, deb, rpm, etc.) and platform (linux, darwin, windows, etc.)
|
- **Package Formats & Platforms** - Packages can be tagged with format (npm, pypi, docker, deb, rpm, etc.) and platform (linux, darwin, windows, etc.)
|
||||||
- **Rich Package Metadata** - Package listings include aggregated stats (tag count, artifact count, total size, latest tag)
|
- **Rich Package Metadata** - Package listings include aggregated stats (tag count, artifact count, total size, latest tag)
|
||||||
- **S3-Compatible Backend** - Uses MinIO (or any S3-compatible storage) for artifact storage
|
- **S3-Compatible Backend** - Uses MinIO (or any S3-compatible storage) for artifact storage
|
||||||
@@ -46,6 +47,12 @@ Orchard is a centralized binary artifact storage system that provides content-ad
|
|||||||
- `.whl` - Python wheels (name, version, author)
|
- `.whl` - Python wheels (name, version, author)
|
||||||
- `.jar` - Java JARs (manifest info, Maven coordinates)
|
- `.jar` - Java JARs (manifest info, Maven coordinates)
|
||||||
- `.zip` - ZIP files (file count, uncompressed size)
|
- `.zip` - ZIP files (file count, uncompressed size)
|
||||||
|
- **Authentication** - Multiple authentication methods:
|
||||||
|
- Session-based login with username/password
|
||||||
|
- API keys for programmatic access (`orch_` prefixed tokens)
|
||||||
|
- OIDC integration for SSO
|
||||||
|
- Admin user management
|
||||||
|
- **Garbage Collection** - Clean up orphaned artifacts (ref_count=0) via admin API
|
||||||
|
|
||||||
### API Endpoints
|
### API Endpoints
|
||||||
|
|
||||||
@@ -67,6 +74,9 @@ Orchard is a centralized binary artifact storage system that provides content-ad
|
|||||||
| `POST` | `/api/v1/project/:project/:package/tags` | Create a tag |
|
| `POST` | `/api/v1/project/:project/:package/tags` | Create a tag |
|
||||||
| `GET` | `/api/v1/project/:project/:package/tags/:tag_name` | Get single tag with artifact metadata |
|
| `GET` | `/api/v1/project/:project/:package/tags/:tag_name` | Get single tag with artifact metadata |
|
||||||
| `GET` | `/api/v1/project/:project/:package/tags/:tag_name/history` | Get tag change history |
|
| `GET` | `/api/v1/project/:project/:package/tags/:tag_name/history` | Get tag change history |
|
||||||
|
| `GET` | `/api/v1/project/:project/:package/versions` | List all versions for a package |
|
||||||
|
| `GET` | `/api/v1/project/:project/:package/versions/:version` | Get specific version details |
|
||||||
|
| `DELETE` | `/api/v1/project/:project/:package/versions/:version` | Delete a version (admin only) |
|
||||||
| `GET` | `/api/v1/project/:project/:package/artifacts` | List artifacts in package (with filtering) |
|
| `GET` | `/api/v1/project/:project/:package/artifacts` | List artifacts in package (with filtering) |
|
||||||
| `GET` | `/api/v1/project/:project/:package/consumers` | List consumers of a package |
|
| `GET` | `/api/v1/project/:project/:package/consumers` | List consumers of a package |
|
||||||
| `GET` | `/api/v1/artifact/:id` | Get artifact metadata with referencing tags |
|
| `GET` | `/api/v1/artifact/:id` | Get artifact metadata with referencing tags |
|
||||||
@@ -87,12 +97,14 @@ For large files, use the resumable upload API:
|
|||||||
|
|
||||||
When downloading artifacts, the `:ref` parameter supports multiple formats:
|
When downloading artifacts, the `:ref` parameter supports multiple formats:
|
||||||
|
|
||||||
- `latest` - Tag name directly
|
- `latest` - Implicit lookup (checks version first, then tag, then artifact ID)
|
||||||
- `v1.0.0` - Version tag
|
- `v1.0.0` - Implicit lookup (version takes precedence over tag with same name)
|
||||||
|
- `version:1.0.0` - Explicit version reference
|
||||||
- `tag:stable` - Explicit tag reference
|
- `tag:stable` - Explicit tag reference
|
||||||
- `version:2024.1` - Version reference
|
|
||||||
- `artifact:a3f5d8e12b4c6789...` - Direct SHA256 hash reference
|
- `artifact:a3f5d8e12b4c6789...` - Direct SHA256 hash reference
|
||||||
|
|
||||||
|
**Resolution order for implicit refs:** version → tag → artifact ID
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
@@ -224,9 +236,16 @@ curl "http://localhost:8080/api/v1/project/my-project/packages/releases?include_
|
|||||||
### Upload an Artifact
|
### Upload an Artifact
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# Upload with tag only (version auto-detected from filename)
|
||||||
curl -X POST http://localhost:8080/api/v1/project/my-project/releases/upload \
|
curl -X POST http://localhost:8080/api/v1/project/my-project/releases/upload \
|
||||||
-F "file=@./build/app-v1.0.0.tar.gz" \
|
-F "file=@./build/app-v1.0.0.tar.gz" \
|
||||||
-F "tag=v1.0.0"
|
-F "tag=latest"
|
||||||
|
|
||||||
|
# Upload with explicit version and tag
|
||||||
|
curl -X POST http://localhost:8080/api/v1/project/my-project/releases/upload \
|
||||||
|
-F "file=@./build/app-v1.0.0.tar.gz" \
|
||||||
|
-F "tag=latest" \
|
||||||
|
-F "version=1.0.0"
|
||||||
```
|
```
|
||||||
|
|
||||||
Response:
|
Response:
|
||||||
@@ -236,7 +255,9 @@ Response:
|
|||||||
"size": 1048576,
|
"size": 1048576,
|
||||||
"project": "my-project",
|
"project": "my-project",
|
||||||
"package": "releases",
|
"package": "releases",
|
||||||
"tag": "v1.0.0",
|
"tag": "latest",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"version_source": "explicit",
|
||||||
"format_metadata": {
|
"format_metadata": {
|
||||||
"format": "tarball",
|
"format": "tarball",
|
||||||
"package_name": "app",
|
"package_name": "app",
|
||||||
@@ -394,6 +415,38 @@ curl http://localhost:8080/api/v1/project/my-project/releases/tags/latest/histor
|
|||||||
|
|
||||||
Returns list of artifact changes for the tag (most recent first).
|
Returns list of artifact changes for the tag (most recent first).
|
||||||
|
|
||||||
|
### List Versions
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Basic listing
|
||||||
|
curl http://localhost:8080/api/v1/project/my-project/releases/versions
|
||||||
|
|
||||||
|
# With pagination and sorting
|
||||||
|
curl "http://localhost:8080/api/v1/project/my-project/releases/versions?sort=version&order=desc"
|
||||||
|
```
|
||||||
|
|
||||||
|
Response includes tags pointing to each version's artifact:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"package_id": "uuid",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"version_source": "explicit",
|
||||||
|
"artifact_id": "a3f5d8e...",
|
||||||
|
"size": 1048576,
|
||||||
|
"content_type": "application/gzip",
|
||||||
|
"original_name": "app-v1.0.0.tar.gz",
|
||||||
|
"created_at": "2025-01-01T00:00:00Z",
|
||||||
|
"created_by": "user",
|
||||||
|
"tags": ["latest", "stable"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"pagination": {"page": 1, "limit": 20, "total": 1, "total_pages": 1}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### List Artifacts in Package
|
### List Artifacts in Package
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -522,15 +575,48 @@ Configuration is provided via environment variables prefixed with `ORCHARD_`:
|
|||||||
| `ORCHARD_DOWNLOAD_MODE` | Download mode: `presigned`, `redirect`, or `proxy` | `presigned` |
|
| `ORCHARD_DOWNLOAD_MODE` | Download mode: `presigned`, `redirect`, or `proxy` | `presigned` |
|
||||||
| `ORCHARD_PRESIGNED_URL_EXPIRY` | Presigned URL expiry in seconds | `3600` |
|
| `ORCHARD_PRESIGNED_URL_EXPIRY` | Presigned URL expiry in seconds | `3600` |
|
||||||
|
|
||||||
|
## CI/CD Pipeline
|
||||||
|
|
||||||
|
The GitLab CI/CD pipeline automates building, testing, and deploying Orchard.
|
||||||
|
|
||||||
|
### Pipeline Stages
|
||||||
|
|
||||||
|
| Stage | Jobs | Description |
|
||||||
|
|-------|------|-------------|
|
||||||
|
| lint | `kics`, `hadolint`, `secrets` | Security and code quality scanning |
|
||||||
|
| build | `build_image` | Build and push Docker image |
|
||||||
|
| test | `python_tests`, `frontend_tests` | Run unit tests with coverage |
|
||||||
|
| deploy | `deploy_stage`, `deploy_feature` | Deploy to Kubernetes |
|
||||||
|
| deploy | `integration_test_*` | Post-deployment integration tests |
|
||||||
|
|
||||||
|
### Environments
|
||||||
|
|
||||||
|
| Environment | Branch | Namespace | URL |
|
||||||
|
|-------------|--------|-----------|-----|
|
||||||
|
| Stage | `main` | `orch-stage-namespace` | `orchard-stage.common.global.bsf.tools` |
|
||||||
|
| Feature | `*` (non-main) | `orch-dev-namespace` | `orchard-{branch}.common.global.bsf.tools` |
|
||||||
|
|
||||||
|
### Feature Branch Workflow
|
||||||
|
|
||||||
|
1. Push a feature branch
|
||||||
|
2. Pipeline builds, tests, and deploys to isolated environment
|
||||||
|
3. Integration tests run against the deployed environment
|
||||||
|
4. GitLab UI shows environment link for manual testing
|
||||||
|
5. On merge to main, environment is automatically cleaned up
|
||||||
|
6. Environments also auto-expire after 1 week if branch is not deleted
|
||||||
|
|
||||||
|
### Manual Cleanup
|
||||||
|
|
||||||
|
Feature environments can be manually cleaned up via:
|
||||||
|
- GitLab UI: Environments → Stop environment
|
||||||
|
- CLI: `helm uninstall orchard-{branch} -n orch-dev-namespace`
|
||||||
|
|
||||||
## Kubernetes Deployment
|
## Kubernetes Deployment
|
||||||
|
|
||||||
### Using Helm
|
### Using Helm
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Add Bitnami repo for dependencies
|
# Update dependencies (uses internal OCI registry)
|
||||||
helm repo add bitnami https://charts.bitnami.com/bitnami
|
|
||||||
|
|
||||||
# Update dependencies
|
|
||||||
cd helm/orchard
|
cd helm/orchard
|
||||||
helm dependency update
|
helm dependency update
|
||||||
|
|
||||||
@@ -575,7 +661,8 @@ See `helm/orchard/values.yaml` for all configuration options.
|
|||||||
- **projects** - Top-level organizational containers
|
- **projects** - Top-level organizational containers
|
||||||
- **packages** - Collections within projects
|
- **packages** - Collections within projects
|
||||||
- **artifacts** - Content-addressable artifacts (SHA256)
|
- **artifacts** - Content-addressable artifacts (SHA256)
|
||||||
- **tags** - Aliases pointing to artifacts
|
- **tags** - Mutable aliases pointing to artifacts
|
||||||
|
- **package_versions** - Immutable version records (set at upload time)
|
||||||
- **tag_history** - Audit trail for tag changes
|
- **tag_history** - Audit trail for tag changes
|
||||||
- **uploads** - Upload event records
|
- **uploads** - Upload event records
|
||||||
- **consumers** - Dependency tracking
|
- **consumers** - Dependency tracking
|
||||||
@@ -593,10 +680,16 @@ The following features are planned but not yet implemented:
|
|||||||
- [ ] Export/Import for air-gapped systems
|
- [ ] Export/Import for air-gapped systems
|
||||||
- [ ] Consumer notification
|
- [ ] Consumer notification
|
||||||
- [ ] Automated update propagation
|
- [ ] Automated update propagation
|
||||||
- [ ] OIDC/SAML authentication
|
- [ ] SAML authentication
|
||||||
- [ ] API key management
|
|
||||||
- [ ] Redis caching layer
|
- [ ] Redis caching layer
|
||||||
- [ ] Garbage collection for orphaned artifacts
|
- [ ] Download integrity verification (see `docs/design/integrity-verification.md`)
|
||||||
|
|
||||||
|
### Recently Implemented
|
||||||
|
|
||||||
|
- [x] OIDC authentication
|
||||||
|
- [x] API key management
|
||||||
|
- [x] Garbage collection for orphaned artifacts
|
||||||
|
- [x] User authentication with sessions
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|||||||
1208
backend/app/auth.py
Normal file
1208
backend/app/auth.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -25,6 +25,7 @@ class Settings(BaseSettings):
|
|||||||
database_pool_recycle: int = (
|
database_pool_recycle: int = (
|
||||||
1800 # Recycle connections after this many seconds (30 min)
|
1800 # Recycle connections after this many seconds (30 min)
|
||||||
)
|
)
|
||||||
|
database_query_timeout: int = 30 # Query timeout in seconds (0 = no timeout)
|
||||||
|
|
||||||
# S3
|
# S3
|
||||||
s3_endpoint: str = ""
|
s3_endpoint: str = ""
|
||||||
@@ -52,6 +53,17 @@ class Settings(BaseSettings):
|
|||||||
log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
|
log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
|
||||||
log_format: str = "auto" # "json", "standard", or "auto" (json in production)
|
log_format: str = "auto" # "json", "standard", or "auto" (json in production)
|
||||||
|
|
||||||
|
# JWT Authentication settings (optional, for external identity providers)
|
||||||
|
jwt_enabled: bool = False # Enable JWT token validation
|
||||||
|
jwt_secret: str = "" # Secret key for HS256, or leave empty for RS256 with JWKS
|
||||||
|
jwt_algorithm: str = "HS256" # HS256 or RS256
|
||||||
|
jwt_issuer: str = "" # Expected issuer (iss claim), leave empty to skip validation
|
||||||
|
jwt_audience: str = "" # Expected audience (aud claim), leave empty to skip validation
|
||||||
|
jwt_jwks_url: str = "" # JWKS URL for RS256 (e.g., https://auth.example.com/.well-known/jwks.json)
|
||||||
|
jwt_username_claim: str = (
|
||||||
|
"sub" # JWT claim to use as username (sub, email, preferred_username, etc.)
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def database_url(self) -> str:
|
def database_url(self) -> str:
|
||||||
sslmode = f"?sslmode={self.database_sslmode}" if self.database_sslmode else ""
|
sslmode = f"?sslmode={self.database_sslmode}" if self.database_sslmode else ""
|
||||||
|
|||||||
@@ -12,6 +12,12 @@ from .models import Base
|
|||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Build connect_args with query timeout if configured
|
||||||
|
connect_args = {}
|
||||||
|
if settings.database_query_timeout > 0:
|
||||||
|
# PostgreSQL statement_timeout is in milliseconds
|
||||||
|
connect_args["options"] = f"-c statement_timeout={settings.database_query_timeout * 1000}"
|
||||||
|
|
||||||
# Create engine with connection pool configuration
|
# Create engine with connection pool configuration
|
||||||
engine = create_engine(
|
engine = create_engine(
|
||||||
settings.database_url,
|
settings.database_url,
|
||||||
@@ -21,6 +27,7 @@ engine = create_engine(
|
|||||||
max_overflow=settings.database_max_overflow,
|
max_overflow=settings.database_max_overflow,
|
||||||
pool_timeout=settings.database_pool_timeout,
|
pool_timeout=settings.database_pool_timeout,
|
||||||
pool_recycle=settings.database_pool_recycle,
|
pool_recycle=settings.database_pool_recycle,
|
||||||
|
connect_args=connect_args,
|
||||||
)
|
)
|
||||||
|
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
@@ -144,6 +151,140 @@ def _run_migrations():
|
|||||||
END IF;
|
END IF;
|
||||||
END $$;
|
END $$;
|
||||||
""",
|
""",
|
||||||
|
# Add package_versions indexes and triggers (007_package_versions.sql)
|
||||||
|
"""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
-- Create indexes for package_versions if table exists
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
||||||
|
-- Indexes for common queries
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_package_versions_package_id') THEN
|
||||||
|
CREATE INDEX idx_package_versions_package_id ON package_versions(package_id);
|
||||||
|
END IF;
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_package_versions_artifact_id') THEN
|
||||||
|
CREATE INDEX idx_package_versions_artifact_id ON package_versions(artifact_id);
|
||||||
|
END IF;
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_package_versions_package_version') THEN
|
||||||
|
CREATE INDEX idx_package_versions_package_version ON package_versions(package_id, version);
|
||||||
|
END IF;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
""",
|
||||||
|
# Create ref_count trigger functions for tags (ensures triggers exist even if initial migration wasn't run)
|
||||||
|
"""
|
||||||
|
CREATE OR REPLACE FUNCTION increment_artifact_ref_count()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
CREATE OR REPLACE FUNCTION decrement_artifact_ref_count()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||||
|
RETURN OLD;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
CREATE OR REPLACE FUNCTION update_artifact_ref_count()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
IF OLD.artifact_id != NEW.artifact_id THEN
|
||||||
|
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||||
|
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||||
|
END IF;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
""",
|
||||||
|
# Create triggers for tags ref_count management
|
||||||
|
"""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
-- Drop and recreate triggers to ensure they're current
|
||||||
|
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
|
||||||
|
CREATE TRIGGER tags_ref_count_insert_trigger
|
||||||
|
AFTER INSERT ON tags
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION increment_artifact_ref_count();
|
||||||
|
|
||||||
|
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
|
||||||
|
CREATE TRIGGER tags_ref_count_delete_trigger
|
||||||
|
AFTER DELETE ON tags
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION decrement_artifact_ref_count();
|
||||||
|
|
||||||
|
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
|
||||||
|
CREATE TRIGGER tags_ref_count_update_trigger
|
||||||
|
AFTER UPDATE ON tags
|
||||||
|
FOR EACH ROW
|
||||||
|
WHEN (OLD.artifact_id IS DISTINCT FROM NEW.artifact_id)
|
||||||
|
EXECUTE FUNCTION update_artifact_ref_count();
|
||||||
|
END $$;
|
||||||
|
""",
|
||||||
|
# Create ref_count trigger functions for package_versions
|
||||||
|
"""
|
||||||
|
CREATE OR REPLACE FUNCTION increment_version_ref_count()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
CREATE OR REPLACE FUNCTION decrement_version_ref_count()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||||
|
RETURN OLD;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
""",
|
||||||
|
# Create triggers for package_versions ref_count
|
||||||
|
"""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
||||||
|
-- Drop and recreate triggers to ensure they're current
|
||||||
|
DROP TRIGGER IF EXISTS package_versions_ref_count_insert ON package_versions;
|
||||||
|
CREATE TRIGGER package_versions_ref_count_insert
|
||||||
|
AFTER INSERT ON package_versions
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION increment_version_ref_count();
|
||||||
|
|
||||||
|
DROP TRIGGER IF EXISTS package_versions_ref_count_delete ON package_versions;
|
||||||
|
CREATE TRIGGER package_versions_ref_count_delete
|
||||||
|
AFTER DELETE ON package_versions
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION decrement_version_ref_count();
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
""",
|
||||||
|
# Migrate existing semver tags to package_versions
|
||||||
|
r"""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
||||||
|
-- Migrate tags that look like versions (v1.0.0, 1.2.3, 2.0.0-beta, etc.)
|
||||||
|
INSERT INTO package_versions (package_id, artifact_id, version, version_source, created_by, created_at)
|
||||||
|
SELECT
|
||||||
|
t.package_id,
|
||||||
|
t.artifact_id,
|
||||||
|
CASE WHEN t.name LIKE 'v%' THEN substring(t.name from 2) ELSE t.name END,
|
||||||
|
'migrated_from_tag',
|
||||||
|
t.created_by,
|
||||||
|
t.created_at
|
||||||
|
FROM tags t
|
||||||
|
WHERE t.name ~ '^v?[0-9]+\.[0-9]+(\.[0-9]+)?([-.][a-zA-Z0-9]+)?$'
|
||||||
|
ON CONFLICT (package_id, version) DO NOTHING;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
""",
|
||||||
]
|
]
|
||||||
|
|
||||||
with engine.connect() as conn:
|
with engine.connect() as conn:
|
||||||
|
|||||||
723
backend/app/dependencies.py
Normal file
723
backend/app/dependencies.py
Normal file
@@ -0,0 +1,723 @@
|
|||||||
|
"""
|
||||||
|
Dependency management module for artifact dependencies.
|
||||||
|
|
||||||
|
Handles:
|
||||||
|
- Parsing orchard.ensure files
|
||||||
|
- Storing dependencies in the database
|
||||||
|
- Querying dependencies and reverse dependencies
|
||||||
|
- Dependency resolution with topological sorting
|
||||||
|
- Circular dependency detection
|
||||||
|
- Conflict detection
|
||||||
|
"""
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from typing import List, Dict, Any, Optional, Set, Tuple
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy import and_
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
Project,
|
||||||
|
Package,
|
||||||
|
Artifact,
|
||||||
|
Tag,
|
||||||
|
ArtifactDependency,
|
||||||
|
PackageVersion,
|
||||||
|
)
|
||||||
|
from .schemas import (
|
||||||
|
EnsureFileContent,
|
||||||
|
EnsureFileDependency,
|
||||||
|
DependencyResponse,
|
||||||
|
ArtifactDependenciesResponse,
|
||||||
|
DependentInfo,
|
||||||
|
ReverseDependenciesResponse,
|
||||||
|
ResolvedArtifact,
|
||||||
|
DependencyResolutionResponse,
|
||||||
|
DependencyConflict,
|
||||||
|
PaginationMeta,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyError(Exception):
|
||||||
|
"""Base exception for dependency errors."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CircularDependencyError(DependencyError):
|
||||||
|
"""Raised when a circular dependency is detected."""
|
||||||
|
def __init__(self, cycle: List[str]):
|
||||||
|
self.cycle = cycle
|
||||||
|
super().__init__(f"Circular dependency detected: {' -> '.join(cycle)}")
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyConflictError(DependencyError):
|
||||||
|
"""Raised when conflicting dependency versions are detected."""
|
||||||
|
def __init__(self, conflicts: List[DependencyConflict]):
|
||||||
|
self.conflicts = conflicts
|
||||||
|
super().__init__(f"Dependency conflicts detected: {len(conflicts)} conflict(s)")
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyNotFoundError(DependencyError):
|
||||||
|
"""Raised when a dependency cannot be resolved."""
|
||||||
|
def __init__(self, project: str, package: str, constraint: str):
|
||||||
|
self.project = project
|
||||||
|
self.package = package
|
||||||
|
self.constraint = constraint
|
||||||
|
super().__init__(f"Dependency not found: {project}/{package}@{constraint}")
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidEnsureFileError(DependencyError):
|
||||||
|
"""Raised when the ensure file is invalid."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyDepthExceededError(DependencyError):
|
||||||
|
"""Raised when dependency resolution exceeds max depth."""
|
||||||
|
def __init__(self, max_depth: int):
|
||||||
|
self.max_depth = max_depth
|
||||||
|
super().__init__(f"Dependency resolution exceeded maximum depth of {max_depth}")
|
||||||
|
|
||||||
|
|
||||||
|
# Safety limits to prevent DoS attacks
|
||||||
|
MAX_DEPENDENCY_DEPTH = 50 # Maximum levels of nested dependencies
|
||||||
|
MAX_DEPENDENCIES_PER_ARTIFACT = 200 # Maximum direct dependencies per artifact
|
||||||
|
|
||||||
|
|
||||||
|
def parse_ensure_file(content: bytes) -> EnsureFileContent:
|
||||||
|
"""
|
||||||
|
Parse an orchard.ensure file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content: Raw bytes of the ensure file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Parsed EnsureFileContent
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
InvalidEnsureFileError: If the file is invalid YAML or has wrong structure
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
data = yaml.safe_load(content.decode('utf-8'))
|
||||||
|
except yaml.YAMLError as e:
|
||||||
|
raise InvalidEnsureFileError(f"Invalid YAML: {e}")
|
||||||
|
except UnicodeDecodeError as e:
|
||||||
|
raise InvalidEnsureFileError(f"Invalid encoding: {e}")
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
return EnsureFileContent(dependencies=[])
|
||||||
|
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise InvalidEnsureFileError("Ensure file must be a YAML dictionary")
|
||||||
|
|
||||||
|
dependencies = []
|
||||||
|
deps_data = data.get('dependencies', [])
|
||||||
|
|
||||||
|
if not isinstance(deps_data, list):
|
||||||
|
raise InvalidEnsureFileError("'dependencies' must be a list")
|
||||||
|
|
||||||
|
# Safety limit: prevent DoS through excessive dependencies
|
||||||
|
if len(deps_data) > MAX_DEPENDENCIES_PER_ARTIFACT:
|
||||||
|
raise InvalidEnsureFileError(
|
||||||
|
f"Too many dependencies: {len(deps_data)} exceeds maximum of {MAX_DEPENDENCIES_PER_ARTIFACT}"
|
||||||
|
)
|
||||||
|
|
||||||
|
for i, dep in enumerate(deps_data):
|
||||||
|
if not isinstance(dep, dict):
|
||||||
|
raise InvalidEnsureFileError(f"Dependency {i} must be a dictionary")
|
||||||
|
|
||||||
|
project = dep.get('project')
|
||||||
|
package = dep.get('package')
|
||||||
|
version = dep.get('version')
|
||||||
|
tag = dep.get('tag')
|
||||||
|
|
||||||
|
if not project:
|
||||||
|
raise InvalidEnsureFileError(f"Dependency {i} missing 'project'")
|
||||||
|
if not package:
|
||||||
|
raise InvalidEnsureFileError(f"Dependency {i} missing 'package'")
|
||||||
|
if not version and not tag:
|
||||||
|
raise InvalidEnsureFileError(
|
||||||
|
f"Dependency {i} must have either 'version' or 'tag'"
|
||||||
|
)
|
||||||
|
if version and tag:
|
||||||
|
raise InvalidEnsureFileError(
|
||||||
|
f"Dependency {i} cannot have both 'version' and 'tag'"
|
||||||
|
)
|
||||||
|
|
||||||
|
dependencies.append(EnsureFileDependency(
|
||||||
|
project=project,
|
||||||
|
package=package,
|
||||||
|
version=version,
|
||||||
|
tag=tag,
|
||||||
|
))
|
||||||
|
|
||||||
|
return EnsureFileContent(dependencies=dependencies)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_dependencies(
|
||||||
|
db: Session,
|
||||||
|
dependencies: List[EnsureFileDependency],
|
||||||
|
) -> List[str]:
|
||||||
|
"""
|
||||||
|
Validate that all dependency projects exist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
dependencies: List of dependencies to validate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of error messages (empty if all valid)
|
||||||
|
"""
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
for dep in dependencies:
|
||||||
|
project = db.query(Project).filter(Project.name == dep.project).first()
|
||||||
|
if not project:
|
||||||
|
errors.append(f"Project '{dep.project}' not found")
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
def store_dependencies(
|
||||||
|
db: Session,
|
||||||
|
artifact_id: str,
|
||||||
|
dependencies: List[EnsureFileDependency],
|
||||||
|
) -> List[ArtifactDependency]:
|
||||||
|
"""
|
||||||
|
Store dependencies for an artifact.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
artifact_id: The artifact ID that has these dependencies
|
||||||
|
dependencies: List of dependencies to store
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of created ArtifactDependency objects
|
||||||
|
"""
|
||||||
|
created = []
|
||||||
|
|
||||||
|
for dep in dependencies:
|
||||||
|
artifact_dep = ArtifactDependency(
|
||||||
|
artifact_id=artifact_id,
|
||||||
|
dependency_project=dep.project,
|
||||||
|
dependency_package=dep.package,
|
||||||
|
version_constraint=dep.version,
|
||||||
|
tag_constraint=dep.tag,
|
||||||
|
)
|
||||||
|
db.add(artifact_dep)
|
||||||
|
created.append(artifact_dep)
|
||||||
|
|
||||||
|
return created
|
||||||
|
|
||||||
|
|
||||||
|
def get_artifact_dependencies(
|
||||||
|
db: Session,
|
||||||
|
artifact_id: str,
|
||||||
|
) -> List[DependencyResponse]:
|
||||||
|
"""
|
||||||
|
Get all dependencies for an artifact.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
artifact_id: The artifact ID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of DependencyResponse objects
|
||||||
|
"""
|
||||||
|
deps = db.query(ArtifactDependency).filter(
|
||||||
|
ArtifactDependency.artifact_id == artifact_id
|
||||||
|
).all()
|
||||||
|
|
||||||
|
return [DependencyResponse.from_orm_model(dep) for dep in deps]
|
||||||
|
|
||||||
|
|
||||||
|
def get_reverse_dependencies(
|
||||||
|
db: Session,
|
||||||
|
project_name: str,
|
||||||
|
package_name: str,
|
||||||
|
page: int = 1,
|
||||||
|
limit: int = 50,
|
||||||
|
) -> ReverseDependenciesResponse:
|
||||||
|
"""
|
||||||
|
Get all artifacts that depend on a given package.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
project_name: Target project name
|
||||||
|
package_name: Target package name
|
||||||
|
page: Page number (1-indexed)
|
||||||
|
limit: Results per page
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ReverseDependenciesResponse with dependents and pagination
|
||||||
|
"""
|
||||||
|
# Query dependencies that point to this project/package
|
||||||
|
query = db.query(ArtifactDependency).filter(
|
||||||
|
ArtifactDependency.dependency_project == project_name,
|
||||||
|
ArtifactDependency.dependency_package == package_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
total = query.count()
|
||||||
|
offset = (page - 1) * limit
|
||||||
|
deps = query.offset(offset).limit(limit).all()
|
||||||
|
|
||||||
|
dependents = []
|
||||||
|
for dep in deps:
|
||||||
|
# Get artifact info to find the project/package/version
|
||||||
|
artifact = db.query(Artifact).filter(Artifact.id == dep.artifact_id).first()
|
||||||
|
if not artifact:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Find which package this artifact belongs to via tags or versions
|
||||||
|
tag = db.query(Tag).filter(Tag.artifact_id == dep.artifact_id).first()
|
||||||
|
if tag:
|
||||||
|
pkg = db.query(Package).filter(Package.id == tag.package_id).first()
|
||||||
|
if pkg:
|
||||||
|
proj = db.query(Project).filter(Project.id == pkg.project_id).first()
|
||||||
|
if proj:
|
||||||
|
# Get version if available
|
||||||
|
version_record = db.query(PackageVersion).filter(
|
||||||
|
PackageVersion.artifact_id == dep.artifact_id,
|
||||||
|
PackageVersion.package_id == pkg.id,
|
||||||
|
).first()
|
||||||
|
|
||||||
|
dependents.append(DependentInfo(
|
||||||
|
artifact_id=dep.artifact_id,
|
||||||
|
project=proj.name,
|
||||||
|
package=pkg.name,
|
||||||
|
version=version_record.version if version_record else None,
|
||||||
|
constraint_type="version" if dep.version_constraint else "tag",
|
||||||
|
constraint_value=dep.version_constraint or dep.tag_constraint,
|
||||||
|
))
|
||||||
|
|
||||||
|
total_pages = (total + limit - 1) // limit
|
||||||
|
|
||||||
|
return ReverseDependenciesResponse(
|
||||||
|
project=project_name,
|
||||||
|
package=package_name,
|
||||||
|
dependents=dependents,
|
||||||
|
pagination=PaginationMeta(
|
||||||
|
page=page,
|
||||||
|
limit=limit,
|
||||||
|
total=total,
|
||||||
|
total_pages=total_pages,
|
||||||
|
has_more=page < total_pages,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_dependency_to_artifact(
|
||||||
|
db: Session,
|
||||||
|
project_name: str,
|
||||||
|
package_name: str,
|
||||||
|
version: Optional[str],
|
||||||
|
tag: Optional[str],
|
||||||
|
) -> Optional[Tuple[str, str, int]]:
|
||||||
|
"""
|
||||||
|
Resolve a dependency constraint to an artifact ID.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
project_name: Project name
|
||||||
|
package_name: Package name
|
||||||
|
version: Version constraint (exact)
|
||||||
|
tag: Tag constraint
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (artifact_id, resolved_version_or_tag, size) or None if not found
|
||||||
|
"""
|
||||||
|
# Get project and package
|
||||||
|
project = db.query(Project).filter(Project.name == project_name).first()
|
||||||
|
if not project:
|
||||||
|
return None
|
||||||
|
|
||||||
|
package = db.query(Package).filter(
|
||||||
|
Package.project_id == project.id,
|
||||||
|
Package.name == package_name,
|
||||||
|
).first()
|
||||||
|
if not package:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if version:
|
||||||
|
# Look up by version
|
||||||
|
pkg_version = db.query(PackageVersion).filter(
|
||||||
|
PackageVersion.package_id == package.id,
|
||||||
|
PackageVersion.version == version,
|
||||||
|
).first()
|
||||||
|
if pkg_version:
|
||||||
|
artifact = db.query(Artifact).filter(
|
||||||
|
Artifact.id == pkg_version.artifact_id
|
||||||
|
).first()
|
||||||
|
if artifact:
|
||||||
|
return (artifact.id, version, artifact.size)
|
||||||
|
|
||||||
|
# Also check if there's a tag with this exact name
|
||||||
|
tag_record = db.query(Tag).filter(
|
||||||
|
Tag.package_id == package.id,
|
||||||
|
Tag.name == version,
|
||||||
|
).first()
|
||||||
|
if tag_record:
|
||||||
|
artifact = db.query(Artifact).filter(
|
||||||
|
Artifact.id == tag_record.artifact_id
|
||||||
|
).first()
|
||||||
|
if artifact:
|
||||||
|
return (artifact.id, version, artifact.size)
|
||||||
|
|
||||||
|
if tag:
|
||||||
|
# Look up by tag
|
||||||
|
tag_record = db.query(Tag).filter(
|
||||||
|
Tag.package_id == package.id,
|
||||||
|
Tag.name == tag,
|
||||||
|
).first()
|
||||||
|
if tag_record:
|
||||||
|
artifact = db.query(Artifact).filter(
|
||||||
|
Artifact.id == tag_record.artifact_id
|
||||||
|
).first()
|
||||||
|
if artifact:
|
||||||
|
return (artifact.id, tag, artifact.size)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _detect_package_cycle(
|
||||||
|
db: Session,
|
||||||
|
project_name: str,
|
||||||
|
package_name: str,
|
||||||
|
target_project: str,
|
||||||
|
target_package: str,
|
||||||
|
visiting: Set[str],
|
||||||
|
visited: Set[str],
|
||||||
|
path: List[str],
|
||||||
|
) -> Optional[List[str]]:
|
||||||
|
"""
|
||||||
|
Detect cycles at the package level using DFS.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
project_name: Current project being visited
|
||||||
|
package_name: Current package being visited
|
||||||
|
target_project: The project we're checking for cycles back to
|
||||||
|
target_package: The package we're checking for cycles back to
|
||||||
|
visiting: Set of package keys currently in the recursion stack
|
||||||
|
visited: Set of fully processed package keys
|
||||||
|
path: Current path for cycle reporting
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Cycle path if detected, None otherwise
|
||||||
|
"""
|
||||||
|
pkg_key = f"{project_name}/{package_name}"
|
||||||
|
|
||||||
|
# Check if we've reached the target package (cycle detected)
|
||||||
|
if project_name == target_project and package_name == target_package:
|
||||||
|
return path + [pkg_key]
|
||||||
|
|
||||||
|
if pkg_key in visiting:
|
||||||
|
# Unexpected internal cycle
|
||||||
|
return None
|
||||||
|
|
||||||
|
if pkg_key in visited:
|
||||||
|
return None
|
||||||
|
|
||||||
|
visiting.add(pkg_key)
|
||||||
|
path.append(pkg_key)
|
||||||
|
|
||||||
|
# Get the package and find any artifacts with dependencies
|
||||||
|
project = db.query(Project).filter(Project.name == project_name).first()
|
||||||
|
if project:
|
||||||
|
package = db.query(Package).filter(
|
||||||
|
Package.project_id == project.id,
|
||||||
|
Package.name == package_name,
|
||||||
|
).first()
|
||||||
|
if package:
|
||||||
|
# Find all artifacts in this package via tags
|
||||||
|
tags = db.query(Tag).filter(Tag.package_id == package.id).all()
|
||||||
|
artifact_ids = {t.artifact_id for t in tags}
|
||||||
|
|
||||||
|
# Get dependencies from all artifacts in this package
|
||||||
|
for artifact_id in artifact_ids:
|
||||||
|
deps = db.query(ArtifactDependency).filter(
|
||||||
|
ArtifactDependency.artifact_id == artifact_id
|
||||||
|
).all()
|
||||||
|
|
||||||
|
for dep in deps:
|
||||||
|
cycle = _detect_package_cycle(
|
||||||
|
db,
|
||||||
|
dep.dependency_project,
|
||||||
|
dep.dependency_package,
|
||||||
|
target_project,
|
||||||
|
target_package,
|
||||||
|
visiting,
|
||||||
|
visited,
|
||||||
|
path,
|
||||||
|
)
|
||||||
|
if cycle:
|
||||||
|
return cycle
|
||||||
|
|
||||||
|
path.pop()
|
||||||
|
visiting.remove(pkg_key)
|
||||||
|
visited.add(pkg_key)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def check_circular_dependencies(
|
||||||
|
db: Session,
|
||||||
|
artifact_id: str,
|
||||||
|
new_dependencies: List[EnsureFileDependency],
|
||||||
|
project_name: Optional[str] = None,
|
||||||
|
package_name: Optional[str] = None,
|
||||||
|
) -> Optional[List[str]]:
|
||||||
|
"""
|
||||||
|
Check if adding the new dependencies would create a circular dependency.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
artifact_id: The artifact that will have these dependencies
|
||||||
|
new_dependencies: Dependencies to be added
|
||||||
|
project_name: Project name (optional, will try to look up from tag if not provided)
|
||||||
|
package_name: Package name (optional, will try to look up from tag if not provided)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Cycle path if detected, None otherwise
|
||||||
|
"""
|
||||||
|
# First, get the package info for this artifact to build path labels
|
||||||
|
if project_name and package_name:
|
||||||
|
current_path = f"{project_name}/{package_name}"
|
||||||
|
else:
|
||||||
|
# Try to look up from tag
|
||||||
|
artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first()
|
||||||
|
if not artifact:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Find package for this artifact
|
||||||
|
tag = db.query(Tag).filter(Tag.artifact_id == artifact_id).first()
|
||||||
|
if not tag:
|
||||||
|
return None
|
||||||
|
|
||||||
|
package = db.query(Package).filter(Package.id == tag.package_id).first()
|
||||||
|
if not package:
|
||||||
|
return None
|
||||||
|
|
||||||
|
project = db.query(Project).filter(Project.id == package.project_id).first()
|
||||||
|
if not project:
|
||||||
|
return None
|
||||||
|
|
||||||
|
current_path = f"{project.name}/{package.name}"
|
||||||
|
|
||||||
|
# Extract target project and package from current_path
|
||||||
|
if "/" in current_path:
|
||||||
|
target_project, target_package = current_path.split("/", 1)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# For each new dependency, check if it would create a cycle back to our package
|
||||||
|
for dep in new_dependencies:
|
||||||
|
# Check if this dependency (transitively) depends on us at the package level
|
||||||
|
visiting: Set[str] = set()
|
||||||
|
visited: Set[str] = set()
|
||||||
|
path: List[str] = [current_path]
|
||||||
|
|
||||||
|
# Check from the dependency's package
|
||||||
|
cycle = _detect_package_cycle(
|
||||||
|
db,
|
||||||
|
dep.project,
|
||||||
|
dep.package,
|
||||||
|
target_project,
|
||||||
|
target_package,
|
||||||
|
visiting,
|
||||||
|
visited,
|
||||||
|
path,
|
||||||
|
)
|
||||||
|
if cycle:
|
||||||
|
return cycle
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_dependencies(
|
||||||
|
db: Session,
|
||||||
|
project_name: str,
|
||||||
|
package_name: str,
|
||||||
|
ref: str,
|
||||||
|
base_url: str,
|
||||||
|
) -> DependencyResolutionResponse:
|
||||||
|
"""
|
||||||
|
Resolve all dependencies for an artifact recursively.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
project_name: Project name
|
||||||
|
package_name: Package name
|
||||||
|
ref: Tag or version reference
|
||||||
|
base_url: Base URL for download URLs
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
DependencyResolutionResponse with all resolved artifacts
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
DependencyNotFoundError: If a dependency cannot be resolved
|
||||||
|
CircularDependencyError: If circular dependencies are detected
|
||||||
|
DependencyConflictError: If conflicting versions are required
|
||||||
|
"""
|
||||||
|
# Resolve the initial artifact
|
||||||
|
project = db.query(Project).filter(Project.name == project_name).first()
|
||||||
|
if not project:
|
||||||
|
raise DependencyNotFoundError(project_name, package_name, ref)
|
||||||
|
|
||||||
|
package = db.query(Package).filter(
|
||||||
|
Package.project_id == project.id,
|
||||||
|
Package.name == package_name,
|
||||||
|
).first()
|
||||||
|
if not package:
|
||||||
|
raise DependencyNotFoundError(project_name, package_name, ref)
|
||||||
|
|
||||||
|
# Try to find artifact by tag or version
|
||||||
|
resolved = _resolve_dependency_to_artifact(
|
||||||
|
db, project_name, package_name, ref, ref
|
||||||
|
)
|
||||||
|
if not resolved:
|
||||||
|
raise DependencyNotFoundError(project_name, package_name, ref)
|
||||||
|
|
||||||
|
root_artifact_id, root_version, root_size = resolved
|
||||||
|
|
||||||
|
# Track resolved artifacts and their versions
|
||||||
|
resolved_artifacts: Dict[str, ResolvedArtifact] = {}
|
||||||
|
# Track version requirements for conflict detection
|
||||||
|
version_requirements: Dict[str, List[Dict[str, Any]]] = {} # pkg_key -> [(version, required_by)]
|
||||||
|
# Track visiting/visited for cycle detection
|
||||||
|
visiting: Set[str] = set()
|
||||||
|
visited: Set[str] = set()
|
||||||
|
# Resolution order (topological)
|
||||||
|
resolution_order: List[str] = []
|
||||||
|
|
||||||
|
def _resolve_recursive(
|
||||||
|
artifact_id: str,
|
||||||
|
proj_name: str,
|
||||||
|
pkg_name: str,
|
||||||
|
version_or_tag: str,
|
||||||
|
size: int,
|
||||||
|
required_by: Optional[str],
|
||||||
|
depth: int = 0,
|
||||||
|
):
|
||||||
|
"""Recursively resolve dependencies with cycle/conflict detection."""
|
||||||
|
# Safety limit: prevent DoS through deeply nested dependencies
|
||||||
|
if depth > MAX_DEPENDENCY_DEPTH:
|
||||||
|
raise DependencyDepthExceededError(MAX_DEPENDENCY_DEPTH)
|
||||||
|
|
||||||
|
pkg_key = f"{proj_name}/{pkg_name}"
|
||||||
|
|
||||||
|
# Cycle detection (at artifact level)
|
||||||
|
if artifact_id in visiting:
|
||||||
|
# Build cycle path
|
||||||
|
raise CircularDependencyError([pkg_key, pkg_key])
|
||||||
|
|
||||||
|
# Conflict detection - check if we've seen this package before with a different version
|
||||||
|
if pkg_key in version_requirements:
|
||||||
|
existing_versions = {r["version"] for r in version_requirements[pkg_key]}
|
||||||
|
if version_or_tag not in existing_versions:
|
||||||
|
# Conflict detected - same package, different version
|
||||||
|
requirements = version_requirements[pkg_key] + [
|
||||||
|
{"version": version_or_tag, "required_by": required_by}
|
||||||
|
]
|
||||||
|
raise DependencyConflictError([
|
||||||
|
DependencyConflict(
|
||||||
|
project=proj_name,
|
||||||
|
package=pkg_name,
|
||||||
|
requirements=[
|
||||||
|
{
|
||||||
|
"version": r["version"],
|
||||||
|
"required_by": [{"path": r["required_by"]}] if r["required_by"] else []
|
||||||
|
}
|
||||||
|
for r in requirements
|
||||||
|
],
|
||||||
|
)
|
||||||
|
])
|
||||||
|
# Same version already resolved - skip
|
||||||
|
if artifact_id in visited:
|
||||||
|
return
|
||||||
|
|
||||||
|
if artifact_id in visited:
|
||||||
|
return
|
||||||
|
|
||||||
|
visiting.add(artifact_id)
|
||||||
|
|
||||||
|
# Track version requirement
|
||||||
|
if pkg_key not in version_requirements:
|
||||||
|
version_requirements[pkg_key] = []
|
||||||
|
version_requirements[pkg_key].append({
|
||||||
|
"version": version_or_tag,
|
||||||
|
"required_by": required_by,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Get dependencies
|
||||||
|
deps = db.query(ArtifactDependency).filter(
|
||||||
|
ArtifactDependency.artifact_id == artifact_id
|
||||||
|
).all()
|
||||||
|
|
||||||
|
# Resolve each dependency first (depth-first)
|
||||||
|
for dep in deps:
|
||||||
|
resolved_dep = _resolve_dependency_to_artifact(
|
||||||
|
db,
|
||||||
|
dep.dependency_project,
|
||||||
|
dep.dependency_package,
|
||||||
|
dep.version_constraint,
|
||||||
|
dep.tag_constraint,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not resolved_dep:
|
||||||
|
constraint = dep.version_constraint or dep.tag_constraint
|
||||||
|
raise DependencyNotFoundError(
|
||||||
|
dep.dependency_project,
|
||||||
|
dep.dependency_package,
|
||||||
|
constraint,
|
||||||
|
)
|
||||||
|
|
||||||
|
dep_artifact_id, dep_version, dep_size = resolved_dep
|
||||||
|
_resolve_recursive(
|
||||||
|
dep_artifact_id,
|
||||||
|
dep.dependency_project,
|
||||||
|
dep.dependency_package,
|
||||||
|
dep_version,
|
||||||
|
dep_size,
|
||||||
|
pkg_key,
|
||||||
|
depth + 1,
|
||||||
|
)
|
||||||
|
|
||||||
|
visiting.remove(artifact_id)
|
||||||
|
visited.add(artifact_id)
|
||||||
|
|
||||||
|
# Add to resolution order (dependencies before dependents)
|
||||||
|
resolution_order.append(artifact_id)
|
||||||
|
|
||||||
|
# Store resolved artifact info
|
||||||
|
resolved_artifacts[artifact_id] = ResolvedArtifact(
|
||||||
|
artifact_id=artifact_id,
|
||||||
|
project=proj_name,
|
||||||
|
package=pkg_name,
|
||||||
|
version=version_or_tag,
|
||||||
|
size=size,
|
||||||
|
download_url=f"{base_url}/api/v1/project/{proj_name}/{pkg_name}/+/{version_or_tag}",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Start resolution from root
|
||||||
|
_resolve_recursive(
|
||||||
|
root_artifact_id,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
root_version,
|
||||||
|
root_size,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Build response in topological order
|
||||||
|
resolved_list = [resolved_artifacts[aid] for aid in resolution_order]
|
||||||
|
total_size = sum(r.size for r in resolved_list)
|
||||||
|
|
||||||
|
return DependencyResolutionResponse(
|
||||||
|
requested={
|
||||||
|
"project": project_name,
|
||||||
|
"package": package_name,
|
||||||
|
"ref": ref,
|
||||||
|
},
|
||||||
|
resolved=resolved_list,
|
||||||
|
total_size=total_size,
|
||||||
|
artifact_count=len(resolved_list),
|
||||||
|
)
|
||||||
@@ -1,14 +1,19 @@
|
|||||||
from fastapi import FastAPI
|
from fastapi import FastAPI, Request
|
||||||
from fastapi.staticfiles import StaticFiles
|
from fastapi.staticfiles import StaticFiles
|
||||||
from fastapi.responses import FileResponse
|
from fastapi.responses import FileResponse
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from slowapi import _rate_limit_exceeded_handler
|
||||||
|
from slowapi.errors import RateLimitExceeded
|
||||||
|
|
||||||
from .config import get_settings
|
from .config import get_settings
|
||||||
from .database import init_db, SessionLocal
|
from .database import init_db, SessionLocal
|
||||||
from .routes import router
|
from .routes import router
|
||||||
from .seed import seed_database
|
from .seed import seed_database
|
||||||
|
from .auth import create_default_admin
|
||||||
|
from .rate_limit import limiter
|
||||||
|
|
||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
@@ -20,6 +25,18 @@ async def lifespan(app: FastAPI):
|
|||||||
# Startup: initialize database
|
# Startup: initialize database
|
||||||
init_db()
|
init_db()
|
||||||
|
|
||||||
|
# Create default admin user if no users exist
|
||||||
|
db = SessionLocal()
|
||||||
|
try:
|
||||||
|
admin = create_default_admin(db)
|
||||||
|
if admin:
|
||||||
|
logger.warning(
|
||||||
|
"Default admin user created with username 'admin' and password 'changeme123'. "
|
||||||
|
"CHANGE THIS PASSWORD IMMEDIATELY!"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
# Seed test data in development mode
|
# Seed test data in development mode
|
||||||
if settings.is_development:
|
if settings.is_development:
|
||||||
logger.info(f"Running in {settings.env} mode - checking for seed data")
|
logger.info(f"Running in {settings.env} mode - checking for seed data")
|
||||||
@@ -42,13 +59,21 @@ app = FastAPI(
|
|||||||
lifespan=lifespan,
|
lifespan=lifespan,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Set up rate limiting
|
||||||
|
app.state.limiter = limiter
|
||||||
|
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||||
|
|
||||||
# Include API routes
|
# Include API routes
|
||||||
app.include_router(router)
|
app.include_router(router)
|
||||||
|
|
||||||
# Serve static files (React build) if the directory exists
|
# Serve static files (React build) if the directory exists
|
||||||
static_dir = os.path.join(os.path.dirname(__file__), "..", "..", "frontend", "dist")
|
static_dir = os.path.join(os.path.dirname(__file__), "..", "..", "frontend", "dist")
|
||||||
if os.path.exists(static_dir):
|
if os.path.exists(static_dir):
|
||||||
app.mount("/assets", StaticFiles(directory=os.path.join(static_dir, "assets")), name="assets")
|
app.mount(
|
||||||
|
"/assets",
|
||||||
|
StaticFiles(directory=os.path.join(static_dir, "assets")),
|
||||||
|
name="assets",
|
||||||
|
)
|
||||||
|
|
||||||
@app.get("/")
|
@app.get("/")
|
||||||
async def serve_spa():
|
async def serve_spa():
|
||||||
@@ -60,12 +85,19 @@ if os.path.exists(static_dir):
|
|||||||
# Don't catch API routes or health endpoint
|
# Don't catch API routes or health endpoint
|
||||||
if full_path.startswith("api/") or full_path.startswith("health"):
|
if full_path.startswith("api/") or full_path.startswith("health"):
|
||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
|
|
||||||
raise HTTPException(status_code=404, detail="Not found")
|
raise HTTPException(status_code=404, detail="Not found")
|
||||||
|
|
||||||
|
# Check if requesting a static file from dist root (favicon, etc.)
|
||||||
|
static_file_path = os.path.join(static_dir, full_path)
|
||||||
|
if os.path.isfile(static_file_path) and not full_path.startswith("."):
|
||||||
|
return FileResponse(static_file_path)
|
||||||
|
|
||||||
# Serve SPA for all other routes (including /project/*)
|
# Serve SPA for all other routes (including /project/*)
|
||||||
index_path = os.path.join(static_dir, "index.html")
|
index_path = os.path.join(static_dir, "index.html")
|
||||||
if os.path.exists(index_path):
|
if os.path.exists(index_path):
|
||||||
return FileResponse(index_path)
|
return FileResponse(index_path)
|
||||||
|
|
||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
|
|
||||||
raise HTTPException(status_code=404, detail="Not found")
|
raise HTTPException(status_code=404, detail="Not found")
|
||||||
|
|||||||
@@ -245,9 +245,10 @@ def extract_tarball_metadata(file: BinaryIO, filename: str) -> Dict[str, Any]:
|
|||||||
break
|
break
|
||||||
|
|
||||||
# Try to split name and version
|
# Try to split name and version
|
||||||
|
# Handle optional 'v' prefix on version (e.g., package-v1.0.0)
|
||||||
patterns = [
|
patterns = [
|
||||||
r"^(.+)-(\d+\.\d+(?:\.\d+)?(?:[-._]\w+)?)$", # name-version
|
r"^(.+)-v?(\d+\.\d+(?:\.\d+)?(?:[-_]\w+)?)$", # name-version or name-vversion
|
||||||
r"^(.+)_(\d+\.\d+(?:\.\d+)?(?:[-._]\w+)?)$", # name_version
|
r"^(.+)_v?(\d+\.\d+(?:\.\d+)?(?:[-_]\w+)?)$", # name_version or name_vversion
|
||||||
]
|
]
|
||||||
|
|
||||||
for pattern in patterns:
|
for pattern in patterns:
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ from sqlalchemy import (
|
|||||||
CheckConstraint,
|
CheckConstraint,
|
||||||
Index,
|
Index,
|
||||||
JSON,
|
JSON,
|
||||||
|
ARRAY,
|
||||||
)
|
)
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
from sqlalchemy.orm import relationship, declarative_base
|
from sqlalchemy.orm import relationship, declarative_base
|
||||||
@@ -71,6 +72,9 @@ class Package(Base):
|
|||||||
consumers = relationship(
|
consumers = relationship(
|
||||||
"Consumer", back_populates="package", cascade="all, delete-orphan"
|
"Consumer", back_populates="package", cascade="all, delete-orphan"
|
||||||
)
|
)
|
||||||
|
versions = relationship(
|
||||||
|
"PackageVersion", back_populates="package", cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index("idx_packages_project_id", "project_id"),
|
Index("idx_packages_project_id", "project_id"),
|
||||||
@@ -112,6 +116,10 @@ class Artifact(Base):
|
|||||||
|
|
||||||
tags = relationship("Tag", back_populates="artifact")
|
tags = relationship("Tag", back_populates="artifact")
|
||||||
uploads = relationship("Upload", back_populates="artifact")
|
uploads = relationship("Upload", back_populates="artifact")
|
||||||
|
versions = relationship("PackageVersion", back_populates="artifact")
|
||||||
|
dependencies = relationship(
|
||||||
|
"ArtifactDependency", back_populates="artifact", cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sha256(self) -> str:
|
def sha256(self) -> str:
|
||||||
@@ -196,6 +204,38 @@ class TagHistory(Base):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PackageVersion(Base):
|
||||||
|
"""Immutable version record for a package-artifact relationship.
|
||||||
|
|
||||||
|
Separates versions (immutable, set at upload) from tags (mutable labels).
|
||||||
|
Each artifact in a package can have at most one version.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "package_versions"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
package_id = Column(
|
||||||
|
UUID(as_uuid=True),
|
||||||
|
ForeignKey("packages.id", ondelete="CASCADE"),
|
||||||
|
nullable=False,
|
||||||
|
)
|
||||||
|
artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
|
||||||
|
version = Column(String(255), nullable=False)
|
||||||
|
version_source = Column(String(50)) # 'explicit', 'filename', 'metadata', 'migrated_from_tag'
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
created_by = Column(String(255), nullable=False)
|
||||||
|
|
||||||
|
package = relationship("Package", back_populates="versions")
|
||||||
|
artifact = relationship("Artifact", back_populates="versions")
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("idx_package_versions_package_id", "package_id"),
|
||||||
|
Index("idx_package_versions_artifact_id", "artifact_id"),
|
||||||
|
Index("idx_package_versions_package_version", "package_id", "version", unique=True),
|
||||||
|
Index("idx_package_versions_package_artifact", "package_id", "artifact_id", unique=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Upload(Base):
|
class Upload(Base):
|
||||||
__tablename__ = "uploads"
|
__tablename__ = "uploads"
|
||||||
|
|
||||||
@@ -302,20 +342,104 @@ class AccessPermission(Base):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class User(Base):
|
||||||
|
"""User account for authentication."""
|
||||||
|
|
||||||
|
__tablename__ = "users"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
username = Column(String(255), unique=True, nullable=False)
|
||||||
|
password_hash = Column(String(255)) # NULL if OIDC-only user
|
||||||
|
email = Column(String(255))
|
||||||
|
is_admin = Column(Boolean, default=False)
|
||||||
|
is_active = Column(Boolean, default=True)
|
||||||
|
must_change_password = Column(Boolean, default=False)
|
||||||
|
oidc_subject = Column(String(255)) # OIDC subject claim
|
||||||
|
oidc_issuer = Column(String(512)) # OIDC issuer URL
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
updated_at = Column(
|
||||||
|
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
||||||
|
)
|
||||||
|
last_login = Column(DateTime(timezone=True))
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
api_keys = relationship(
|
||||||
|
"APIKey", back_populates="owner", cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
sessions = relationship(
|
||||||
|
"Session", back_populates="user", cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("idx_users_username", "username"),
|
||||||
|
Index("idx_users_email", "email"),
|
||||||
|
Index("idx_users_oidc_subject", "oidc_subject"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Session(Base):
|
||||||
|
"""User session for web login."""
|
||||||
|
|
||||||
|
__tablename__ = "sessions"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
user_id = Column(
|
||||||
|
UUID(as_uuid=True),
|
||||||
|
ForeignKey("users.id", ondelete="CASCADE"),
|
||||||
|
nullable=False,
|
||||||
|
)
|
||||||
|
token_hash = Column(String(64), unique=True, nullable=False)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
expires_at = Column(DateTime(timezone=True), nullable=False)
|
||||||
|
last_accessed = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
user_agent = Column(String(512))
|
||||||
|
ip_address = Column(String(45))
|
||||||
|
|
||||||
|
user = relationship("User", back_populates="sessions")
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("idx_sessions_user_id", "user_id"),
|
||||||
|
Index("idx_sessions_token_hash", "token_hash"),
|
||||||
|
Index("idx_sessions_expires_at", "expires_at"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthSettings(Base):
|
||||||
|
"""Authentication settings for OIDC configuration."""
|
||||||
|
|
||||||
|
__tablename__ = "auth_settings"
|
||||||
|
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text, nullable=False)
|
||||||
|
updated_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
|
||||||
|
|
||||||
class APIKey(Base):
|
class APIKey(Base):
|
||||||
__tablename__ = "api_keys"
|
__tablename__ = "api_keys"
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
key_hash = Column(String(64), unique=True, nullable=False)
|
key_hash = Column(String(64), unique=True, nullable=False)
|
||||||
name = Column(String(255), nullable=False)
|
name = Column(String(255), nullable=False)
|
||||||
user_id = Column(String(255), nullable=False)
|
user_id = Column(
|
||||||
|
String(255), nullable=False
|
||||||
|
) # Legacy field, kept for compatibility
|
||||||
|
owner_id = Column(
|
||||||
|
UUID(as_uuid=True),
|
||||||
|
ForeignKey("users.id", ondelete="CASCADE"),
|
||||||
|
nullable=True, # Nullable for migration compatibility
|
||||||
|
)
|
||||||
|
description = Column(Text)
|
||||||
|
scopes = Column(ARRAY(String), default=["read", "write"])
|
||||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
expires_at = Column(DateTime(timezone=True))
|
expires_at = Column(DateTime(timezone=True))
|
||||||
last_used = Column(DateTime(timezone=True))
|
last_used = Column(DateTime(timezone=True))
|
||||||
|
|
||||||
|
owner = relationship("User", back_populates="api_keys")
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index("idx_api_keys_user_id", "user_id"),
|
Index("idx_api_keys_user_id", "user_id"),
|
||||||
Index("idx_api_keys_key_hash", "key_hash"),
|
Index("idx_api_keys_key_hash", "key_hash"),
|
||||||
|
Index("idx_api_keys_owner_id", "owner_id"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -386,3 +510,54 @@ class PackageHistory(Base):
|
|||||||
Index("idx_package_history_changed_at", "changed_at"),
|
Index("idx_package_history_changed_at", "changed_at"),
|
||||||
Index("idx_package_history_package_changed_at", "package_id", "changed_at"),
|
Index("idx_package_history_package_changed_at", "package_id", "changed_at"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ArtifactDependency(Base):
|
||||||
|
"""Dependency declared by an artifact on another package.
|
||||||
|
|
||||||
|
Each artifact can declare dependencies on other packages, specifying either
|
||||||
|
an exact version or a tag. This enables recursive dependency resolution.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "artifact_dependencies"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
artifact_id = Column(
|
||||||
|
String(64),
|
||||||
|
ForeignKey("artifacts.id", ondelete="CASCADE"),
|
||||||
|
nullable=False,
|
||||||
|
)
|
||||||
|
dependency_project = Column(String(255), nullable=False)
|
||||||
|
dependency_package = Column(String(255), nullable=False)
|
||||||
|
version_constraint = Column(String(255), nullable=True)
|
||||||
|
tag_constraint = Column(String(255), nullable=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
|
||||||
|
# Relationship to the artifact that declares this dependency
|
||||||
|
artifact = relationship("Artifact", back_populates="dependencies")
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
# Exactly one of version_constraint or tag_constraint must be set
|
||||||
|
CheckConstraint(
|
||||||
|
"(version_constraint IS NOT NULL AND tag_constraint IS NULL) OR "
|
||||||
|
"(version_constraint IS NULL AND tag_constraint IS NOT NULL)",
|
||||||
|
name="check_constraint_type",
|
||||||
|
),
|
||||||
|
# Each artifact can only depend on a specific project/package once
|
||||||
|
Index(
|
||||||
|
"idx_artifact_dependencies_artifact_id",
|
||||||
|
"artifact_id",
|
||||||
|
),
|
||||||
|
Index(
|
||||||
|
"idx_artifact_dependencies_target",
|
||||||
|
"dependency_project",
|
||||||
|
"dependency_package",
|
||||||
|
),
|
||||||
|
Index(
|
||||||
|
"idx_artifact_dependencies_unique",
|
||||||
|
"artifact_id",
|
||||||
|
"dependency_project",
|
||||||
|
"dependency_package",
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|||||||
16
backend/app/rate_limit.py
Normal file
16
backend/app/rate_limit.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
"""Rate limiting configuration for Orchard API.
|
||||||
|
|
||||||
|
Uses slowapi for rate limiting with IP-based keys.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from slowapi import Limiter
|
||||||
|
from slowapi.util import get_remote_address
|
||||||
|
|
||||||
|
# Rate limiter - uses IP address as key
|
||||||
|
limiter = Limiter(key_func=get_remote_address)
|
||||||
|
|
||||||
|
# Rate limit strings - configurable via environment for testing
|
||||||
|
# Default: 5 login attempts per minute per IP
|
||||||
|
# In tests: set ORCHARD_LOGIN_RATE_LIMIT to a high value like "1000/minute"
|
||||||
|
LOGIN_RATE_LIMIT = os.environ.get("ORCHARD_LOGIN_RATE_LIMIT", "5/minute")
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -47,6 +47,13 @@ class ProjectUpdate(BaseModel):
|
|||||||
is_public: Optional[bool] = None
|
is_public: Optional[bool] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectWithAccessResponse(ProjectResponse):
|
||||||
|
"""Project response with user's access level included"""
|
||||||
|
|
||||||
|
access_level: Optional[str] = None # 'read', 'write', 'admin', or None
|
||||||
|
is_owner: bool = False
|
||||||
|
|
||||||
|
|
||||||
# Package format and platform enums
|
# Package format and platform enums
|
||||||
PACKAGE_FORMATS = [
|
PACKAGE_FORMATS = [
|
||||||
"generic",
|
"generic",
|
||||||
@@ -166,6 +173,7 @@ class TagResponse(BaseModel):
|
|||||||
artifact_id: str
|
artifact_id: str
|
||||||
created_at: datetime
|
created_at: datetime
|
||||||
created_by: str
|
created_by: str
|
||||||
|
version: Optional[str] = None # Version of the artifact this tag points to
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
@@ -180,6 +188,7 @@ class TagDetailResponse(BaseModel):
|
|||||||
artifact_id: str
|
artifact_id: str
|
||||||
created_at: datetime
|
created_at: datetime
|
||||||
created_by: str
|
created_by: str
|
||||||
|
version: Optional[str] = None # Version of the artifact this tag points to
|
||||||
# Artifact metadata
|
# Artifact metadata
|
||||||
artifact_size: int
|
artifact_size: int
|
||||||
artifact_content_type: Optional[str]
|
artifact_content_type: Optional[str]
|
||||||
@@ -376,6 +385,7 @@ class GlobalTagResponse(BaseModel):
|
|||||||
package_name: str
|
package_name: str
|
||||||
artifact_size: Optional[int] = None
|
artifact_size: Optional[int] = None
|
||||||
artifact_content_type: Optional[str] = None
|
artifact_content_type: Optional[str] = None
|
||||||
|
version: Optional[str] = None # Version of the artifact this tag points to
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
@@ -389,6 +399,8 @@ class UploadResponse(BaseModel):
|
|||||||
project: str
|
project: str
|
||||||
package: str
|
package: str
|
||||||
tag: Optional[str]
|
tag: Optional[str]
|
||||||
|
version: Optional[str] = None # Version assigned to this artifact
|
||||||
|
version_source: Optional[str] = None # How version was determined: 'explicit', 'filename', 'metadata'
|
||||||
checksum_md5: Optional[str] = None
|
checksum_md5: Optional[str] = None
|
||||||
checksum_sha1: Optional[str] = None
|
checksum_sha1: Optional[str] = None
|
||||||
s3_etag: Optional[str] = None
|
s3_etag: Optional[str] = None
|
||||||
@@ -400,6 +412,9 @@ class UploadResponse(BaseModel):
|
|||||||
content_type: Optional[str] = None
|
content_type: Optional[str] = None
|
||||||
original_name: Optional[str] = None
|
original_name: Optional[str] = None
|
||||||
created_at: Optional[datetime] = None
|
created_at: Optional[datetime] = None
|
||||||
|
# Upload metrics (Issue #43)
|
||||||
|
duration_ms: Optional[int] = None # Upload duration in milliseconds
|
||||||
|
throughput_mbps: Optional[float] = None # Upload throughput in MB/s
|
||||||
|
|
||||||
|
|
||||||
# Resumable upload schemas
|
# Resumable upload schemas
|
||||||
@@ -411,6 +426,7 @@ class ResumableUploadInitRequest(BaseModel):
|
|||||||
content_type: Optional[str] = None
|
content_type: Optional[str] = None
|
||||||
size: int
|
size: int
|
||||||
tag: Optional[str] = None
|
tag: Optional[str] = None
|
||||||
|
version: Optional[str] = None # Explicit version (auto-detected if not provided)
|
||||||
|
|
||||||
@field_validator("expected_hash")
|
@field_validator("expected_hash")
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -465,6 +481,21 @@ class ResumableUploadStatusResponse(BaseModel):
|
|||||||
total_uploaded_bytes: int
|
total_uploaded_bytes: int
|
||||||
|
|
||||||
|
|
||||||
|
class UploadProgressResponse(BaseModel):
|
||||||
|
"""Progress information for an in-flight upload"""
|
||||||
|
|
||||||
|
upload_id: str
|
||||||
|
status: str # 'in_progress', 'completed', 'failed', 'not_found'
|
||||||
|
bytes_uploaded: int = 0
|
||||||
|
bytes_total: Optional[int] = None
|
||||||
|
percent_complete: Optional[float] = None
|
||||||
|
parts_uploaded: int = 0
|
||||||
|
parts_total: Optional[int] = None
|
||||||
|
started_at: Optional[datetime] = None
|
||||||
|
elapsed_seconds: Optional[float] = None
|
||||||
|
throughput_mbps: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
# Consumer schemas
|
# Consumer schemas
|
||||||
class ConsumerResponse(BaseModel):
|
class ConsumerResponse(BaseModel):
|
||||||
id: UUID
|
id: UUID
|
||||||
@@ -477,6 +508,35 @@ class ConsumerResponse(BaseModel):
|
|||||||
from_attributes = True
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
# Package version schemas
|
||||||
|
class PackageVersionResponse(BaseModel):
|
||||||
|
"""Immutable version record for an artifact in a package"""
|
||||||
|
|
||||||
|
id: UUID
|
||||||
|
package_id: UUID
|
||||||
|
artifact_id: str
|
||||||
|
version: str
|
||||||
|
version_source: Optional[str] = None # 'explicit', 'filename', 'metadata', 'migrated_from_tag'
|
||||||
|
created_at: datetime
|
||||||
|
created_by: str
|
||||||
|
# Enriched fields from joins
|
||||||
|
size: Optional[int] = None
|
||||||
|
content_type: Optional[str] = None
|
||||||
|
original_name: Optional[str] = None
|
||||||
|
tags: List[str] = [] # Tag names pointing to this artifact
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class PackageVersionDetailResponse(PackageVersionResponse):
|
||||||
|
"""Version with full artifact metadata"""
|
||||||
|
|
||||||
|
format_metadata: Optional[Dict[str, Any]] = None
|
||||||
|
checksum_md5: Optional[str] = None
|
||||||
|
checksum_sha1: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
# Global search schemas
|
# Global search schemas
|
||||||
class SearchResultProject(BaseModel):
|
class SearchResultProject(BaseModel):
|
||||||
"""Project result for global search"""
|
"""Project result for global search"""
|
||||||
@@ -686,3 +746,310 @@ class StatsReportResponse(BaseModel):
|
|||||||
format: str # "json", "csv", "markdown"
|
format: str # "json", "csv", "markdown"
|
||||||
generated_at: datetime
|
generated_at: datetime
|
||||||
content: str # The report content
|
content: str # The report content
|
||||||
|
|
||||||
|
|
||||||
|
# Authentication schemas
|
||||||
|
class LoginRequest(BaseModel):
|
||||||
|
"""Login request with username and password"""
|
||||||
|
username: str
|
||||||
|
password: str
|
||||||
|
|
||||||
|
|
||||||
|
class LoginResponse(BaseModel):
|
||||||
|
"""Login response with user info"""
|
||||||
|
id: UUID
|
||||||
|
username: str
|
||||||
|
email: Optional[str]
|
||||||
|
is_admin: bool
|
||||||
|
must_change_password: bool
|
||||||
|
|
||||||
|
|
||||||
|
class ChangePasswordRequest(BaseModel):
|
||||||
|
"""Change password request"""
|
||||||
|
current_password: str
|
||||||
|
new_password: str
|
||||||
|
|
||||||
|
|
||||||
|
class UserResponse(BaseModel):
|
||||||
|
"""User information response"""
|
||||||
|
id: UUID
|
||||||
|
username: str
|
||||||
|
email: Optional[str]
|
||||||
|
is_admin: bool
|
||||||
|
is_active: bool
|
||||||
|
must_change_password: bool
|
||||||
|
created_at: datetime
|
||||||
|
last_login: Optional[datetime]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class UserCreate(BaseModel):
|
||||||
|
"""Create user request (admin only)"""
|
||||||
|
username: str
|
||||||
|
password: str
|
||||||
|
email: Optional[str] = None
|
||||||
|
is_admin: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class UserUpdate(BaseModel):
|
||||||
|
"""Update user request (admin only)"""
|
||||||
|
email: Optional[str] = None
|
||||||
|
is_admin: Optional[bool] = None
|
||||||
|
is_active: Optional[bool] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ResetPasswordRequest(BaseModel):
|
||||||
|
"""Reset password request (admin only)"""
|
||||||
|
new_password: str
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyCreate(BaseModel):
|
||||||
|
"""Create API key request"""
|
||||||
|
name: str
|
||||||
|
description: Optional[str] = None
|
||||||
|
scopes: Optional[List[str]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyResponse(BaseModel):
|
||||||
|
"""API key response (without the secret key)"""
|
||||||
|
id: UUID
|
||||||
|
name: str
|
||||||
|
description: Optional[str]
|
||||||
|
scopes: Optional[List[str]]
|
||||||
|
created_at: datetime
|
||||||
|
expires_at: Optional[datetime]
|
||||||
|
last_used: Optional[datetime]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyCreateResponse(BaseModel):
|
||||||
|
"""API key creation response (includes the secret key - only shown once)"""
|
||||||
|
id: UUID
|
||||||
|
name: str
|
||||||
|
description: Optional[str]
|
||||||
|
scopes: Optional[List[str]]
|
||||||
|
key: str # The actual API key - only returned on creation
|
||||||
|
created_at: datetime
|
||||||
|
expires_at: Optional[datetime]
|
||||||
|
|
||||||
|
|
||||||
|
# OIDC Configuration schemas
|
||||||
|
class OIDCConfigResponse(BaseModel):
|
||||||
|
"""OIDC configuration response (hides client secret)"""
|
||||||
|
enabled: bool
|
||||||
|
issuer_url: str
|
||||||
|
client_id: str
|
||||||
|
has_client_secret: bool # True if secret is configured, but don't expose it
|
||||||
|
scopes: List[str]
|
||||||
|
auto_create_users: bool
|
||||||
|
admin_group: str
|
||||||
|
|
||||||
|
|
||||||
|
class OIDCConfigUpdate(BaseModel):
|
||||||
|
"""Update OIDC configuration"""
|
||||||
|
enabled: Optional[bool] = None
|
||||||
|
issuer_url: Optional[str] = None
|
||||||
|
client_id: Optional[str] = None
|
||||||
|
client_secret: Optional[str] = None # Only set if changing
|
||||||
|
scopes: Optional[List[str]] = None
|
||||||
|
auto_create_users: Optional[bool] = None
|
||||||
|
admin_group: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class OIDCStatusResponse(BaseModel):
|
||||||
|
"""Public OIDC status response"""
|
||||||
|
enabled: bool
|
||||||
|
issuer_url: Optional[str] = None # Only included if enabled
|
||||||
|
|
||||||
|
|
||||||
|
class OIDCLoginResponse(BaseModel):
|
||||||
|
"""OIDC login initiation response"""
|
||||||
|
authorization_url: str
|
||||||
|
|
||||||
|
|
||||||
|
# Access Permission schemas
|
||||||
|
class AccessPermissionCreate(BaseModel):
|
||||||
|
"""Grant access to a user for a project"""
|
||||||
|
username: str
|
||||||
|
level: str # 'read', 'write', or 'admin'
|
||||||
|
expires_at: Optional[datetime] = None
|
||||||
|
|
||||||
|
@field_validator('level')
|
||||||
|
@classmethod
|
||||||
|
def validate_level(cls, v):
|
||||||
|
if v not in ('read', 'write', 'admin'):
|
||||||
|
raise ValueError("level must be 'read', 'write', or 'admin'")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class AccessPermissionUpdate(BaseModel):
|
||||||
|
"""Update access permission"""
|
||||||
|
level: Optional[str] = None
|
||||||
|
expires_at: Optional[datetime] = None
|
||||||
|
|
||||||
|
@field_validator('level')
|
||||||
|
@classmethod
|
||||||
|
def validate_level(cls, v):
|
||||||
|
if v is not None and v not in ('read', 'write', 'admin'):
|
||||||
|
raise ValueError("level must be 'read', 'write', or 'admin'")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class AccessPermissionResponse(BaseModel):
|
||||||
|
"""Access permission response"""
|
||||||
|
id: UUID
|
||||||
|
project_id: UUID
|
||||||
|
user_id: str
|
||||||
|
level: str
|
||||||
|
created_at: datetime
|
||||||
|
expires_at: Optional[datetime]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectWithAccessResponse(ProjectResponse):
|
||||||
|
"""Project response with user's access level"""
|
||||||
|
user_access_level: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
# Artifact Dependency schemas
|
||||||
|
class DependencyCreate(BaseModel):
|
||||||
|
"""Schema for creating a dependency"""
|
||||||
|
project: str
|
||||||
|
package: str
|
||||||
|
version: Optional[str] = None
|
||||||
|
tag: Optional[str] = None
|
||||||
|
|
||||||
|
@field_validator('version', 'tag')
|
||||||
|
@classmethod
|
||||||
|
def validate_constraint(cls, v, info):
|
||||||
|
return v
|
||||||
|
|
||||||
|
def model_post_init(self, __context):
|
||||||
|
"""Validate that exactly one of version or tag is set"""
|
||||||
|
if self.version is None and self.tag is None:
|
||||||
|
raise ValueError("Either 'version' or 'tag' must be specified")
|
||||||
|
if self.version is not None and self.tag is not None:
|
||||||
|
raise ValueError("Cannot specify both 'version' and 'tag'")
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyResponse(BaseModel):
|
||||||
|
"""Schema for dependency response"""
|
||||||
|
id: UUID
|
||||||
|
artifact_id: str
|
||||||
|
project: str
|
||||||
|
package: str
|
||||||
|
version: Optional[str] = None
|
||||||
|
tag: Optional[str] = None
|
||||||
|
created_at: datetime
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_orm_model(cls, dep) -> "DependencyResponse":
|
||||||
|
"""Create from ORM model with field mapping"""
|
||||||
|
return cls(
|
||||||
|
id=dep.id,
|
||||||
|
artifact_id=dep.artifact_id,
|
||||||
|
project=dep.dependency_project,
|
||||||
|
package=dep.dependency_package,
|
||||||
|
version=dep.version_constraint,
|
||||||
|
tag=dep.tag_constraint,
|
||||||
|
created_at=dep.created_at,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ArtifactDependenciesResponse(BaseModel):
|
||||||
|
"""Response containing all dependencies for an artifact"""
|
||||||
|
artifact_id: str
|
||||||
|
dependencies: List[DependencyResponse]
|
||||||
|
|
||||||
|
|
||||||
|
class DependentInfo(BaseModel):
|
||||||
|
"""Information about an artifact that depends on a package"""
|
||||||
|
artifact_id: str
|
||||||
|
project: str
|
||||||
|
package: str
|
||||||
|
version: Optional[str] = None
|
||||||
|
constraint_type: str # 'version' or 'tag'
|
||||||
|
constraint_value: str
|
||||||
|
|
||||||
|
|
||||||
|
class ReverseDependenciesResponse(BaseModel):
|
||||||
|
"""Response containing packages that depend on a given package"""
|
||||||
|
project: str
|
||||||
|
package: str
|
||||||
|
dependents: List[DependentInfo]
|
||||||
|
pagination: PaginationMeta
|
||||||
|
|
||||||
|
|
||||||
|
class EnsureFileDependency(BaseModel):
|
||||||
|
"""Dependency entry from orchard.ensure file"""
|
||||||
|
project: str
|
||||||
|
package: str
|
||||||
|
version: Optional[str] = None
|
||||||
|
tag: Optional[str] = None
|
||||||
|
|
||||||
|
@field_validator('version', 'tag')
|
||||||
|
@classmethod
|
||||||
|
def validate_constraint(cls, v, info):
|
||||||
|
return v
|
||||||
|
|
||||||
|
def model_post_init(self, __context):
|
||||||
|
"""Validate that exactly one of version or tag is set"""
|
||||||
|
if self.version is None and self.tag is None:
|
||||||
|
raise ValueError("Either 'version' or 'tag' must be specified")
|
||||||
|
if self.version is not None and self.tag is not None:
|
||||||
|
raise ValueError("Cannot specify both 'version' and 'tag'")
|
||||||
|
|
||||||
|
|
||||||
|
class EnsureFileContent(BaseModel):
|
||||||
|
"""Parsed content of orchard.ensure file"""
|
||||||
|
dependencies: List[EnsureFileDependency] = []
|
||||||
|
|
||||||
|
|
||||||
|
class ResolvedArtifact(BaseModel):
|
||||||
|
"""A resolved artifact in the dependency tree"""
|
||||||
|
artifact_id: str
|
||||||
|
project: str
|
||||||
|
package: str
|
||||||
|
version: Optional[str] = None
|
||||||
|
tag: Optional[str] = None
|
||||||
|
size: int
|
||||||
|
download_url: str
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyResolutionResponse(BaseModel):
|
||||||
|
"""Response from dependency resolution endpoint"""
|
||||||
|
requested: Dict[str, str] # project, package, ref
|
||||||
|
resolved: List[ResolvedArtifact]
|
||||||
|
total_size: int
|
||||||
|
artifact_count: int
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyConflict(BaseModel):
|
||||||
|
"""Details about a dependency conflict"""
|
||||||
|
project: str
|
||||||
|
package: str
|
||||||
|
requirements: List[Dict[str, Any]] # version/tag and required_by info
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyConflictError(BaseModel):
|
||||||
|
"""Error response for dependency conflicts"""
|
||||||
|
error: str = "dependency_conflict"
|
||||||
|
message: str
|
||||||
|
conflicts: List[DependencyConflict]
|
||||||
|
|
||||||
|
|
||||||
|
class CircularDependencyError(BaseModel):
|
||||||
|
"""Error response for circular dependencies"""
|
||||||
|
error: str = "circular_dependency"
|
||||||
|
message: str
|
||||||
|
cycle: List[str] # List of "project/package" strings showing the cycle
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import hashlib
|
|||||||
import logging
|
import logging
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from .models import Project, Package, Artifact, Tag, Upload
|
from .models import Project, Package, Artifact, Tag, Upload, PackageVersion, ArtifactDependency
|
||||||
from .storage import get_storage
|
from .storage import get_storage
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -74,7 +74,7 @@ TEST_PROJECTS = [
|
|||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
# Sample artifacts to create (content, tags)
|
# Sample artifacts to create (content, tags, version)
|
||||||
TEST_ARTIFACTS = [
|
TEST_ARTIFACTS = [
|
||||||
{
|
{
|
||||||
"project": "frontend-libs",
|
"project": "frontend-libs",
|
||||||
@@ -83,6 +83,7 @@ TEST_ARTIFACTS = [
|
|||||||
"filename": "ui-components-1.0.0.js",
|
"filename": "ui-components-1.0.0.js",
|
||||||
"content_type": "application/javascript",
|
"content_type": "application/javascript",
|
||||||
"tags": ["v1.0.0", "latest"],
|
"tags": ["v1.0.0", "latest"],
|
||||||
|
"version": "1.0.0",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"project": "frontend-libs",
|
"project": "frontend-libs",
|
||||||
@@ -91,6 +92,7 @@ TEST_ARTIFACTS = [
|
|||||||
"filename": "ui-components-1.1.0.js",
|
"filename": "ui-components-1.1.0.js",
|
||||||
"content_type": "application/javascript",
|
"content_type": "application/javascript",
|
||||||
"tags": ["v1.1.0"],
|
"tags": ["v1.1.0"],
|
||||||
|
"version": "1.1.0",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"project": "frontend-libs",
|
"project": "frontend-libs",
|
||||||
@@ -99,6 +101,7 @@ TEST_ARTIFACTS = [
|
|||||||
"filename": "tokens.json",
|
"filename": "tokens.json",
|
||||||
"content_type": "application/json",
|
"content_type": "application/json",
|
||||||
"tags": ["v1.0.0", "latest"],
|
"tags": ["v1.0.0", "latest"],
|
||||||
|
"version": "1.0.0",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"project": "backend-services",
|
"project": "backend-services",
|
||||||
@@ -107,6 +110,7 @@ TEST_ARTIFACTS = [
|
|||||||
"filename": "utils-2.0.0.py",
|
"filename": "utils-2.0.0.py",
|
||||||
"content_type": "text/x-python",
|
"content_type": "text/x-python",
|
||||||
"tags": ["v2.0.0", "stable", "latest"],
|
"tags": ["v2.0.0", "stable", "latest"],
|
||||||
|
"version": "2.0.0",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"project": "backend-services",
|
"project": "backend-services",
|
||||||
@@ -115,9 +119,21 @@ TEST_ARTIFACTS = [
|
|||||||
"filename": "auth-lib-1.0.0.go",
|
"filename": "auth-lib-1.0.0.go",
|
||||||
"content_type": "text/x-go",
|
"content_type": "text/x-go",
|
||||||
"tags": ["v1.0.0", "latest"],
|
"tags": ["v1.0.0", "latest"],
|
||||||
|
"version": "1.0.0",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Dependencies to create (source artifact -> dependency)
|
||||||
|
# Format: (source_project, source_package, source_version, dep_project, dep_package, version_constraint, tag_constraint)
|
||||||
|
TEST_DEPENDENCIES = [
|
||||||
|
# ui-components v1.1.0 depends on design-tokens v1.0.0
|
||||||
|
("frontend-libs", "ui-components", "1.1.0", "frontend-libs", "design-tokens", "1.0.0", None),
|
||||||
|
# auth-lib v1.0.0 depends on common-utils v2.0.0
|
||||||
|
("backend-services", "auth-lib", "1.0.0", "backend-services", "common-utils", "2.0.0", None),
|
||||||
|
# auth-lib v1.0.0 also depends on design-tokens (stable tag)
|
||||||
|
("backend-services", "auth-lib", "1.0.0", "frontend-libs", "design-tokens", None, "latest"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def is_database_empty(db: Session) -> bool:
|
def is_database_empty(db: Session) -> bool:
|
||||||
"""Check if the database has any projects."""
|
"""Check if the database has any projects."""
|
||||||
@@ -160,9 +176,10 @@ def seed_database(db: Session) -> None:
|
|||||||
|
|
||||||
logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages")
|
logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages")
|
||||||
|
|
||||||
# Create artifacts and tags
|
# Create artifacts, tags, and versions
|
||||||
artifact_count = 0
|
artifact_count = 0
|
||||||
tag_count = 0
|
tag_count = 0
|
||||||
|
version_count = 0
|
||||||
|
|
||||||
for artifact_data in TEST_ARTIFACTS:
|
for artifact_data in TEST_ARTIFACTS:
|
||||||
project = project_map[artifact_data["project"]]
|
project = project_map[artifact_data["project"]]
|
||||||
@@ -184,6 +201,11 @@ def seed_database(db: Session) -> None:
|
|||||||
logger.warning(f"Failed to store artifact in S3: {e}")
|
logger.warning(f"Failed to store artifact in S3: {e}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Calculate ref_count: tags + version (if present)
|
||||||
|
ref_count = len(artifact_data["tags"])
|
||||||
|
if artifact_data.get("version"):
|
||||||
|
ref_count += 1
|
||||||
|
|
||||||
# Create artifact record
|
# Create artifact record
|
||||||
artifact = Artifact(
|
artifact = Artifact(
|
||||||
id=sha256_hash,
|
id=sha256_hash,
|
||||||
@@ -192,7 +214,7 @@ def seed_database(db: Session) -> None:
|
|||||||
original_name=artifact_data["filename"],
|
original_name=artifact_data["filename"],
|
||||||
created_by="seed-user",
|
created_by="seed-user",
|
||||||
s3_key=s3_key,
|
s3_key=s3_key,
|
||||||
ref_count=len(artifact_data["tags"]),
|
ref_count=ref_count,
|
||||||
)
|
)
|
||||||
db.add(artifact)
|
db.add(artifact)
|
||||||
|
|
||||||
@@ -206,6 +228,18 @@ def seed_database(db: Session) -> None:
|
|||||||
db.add(upload)
|
db.add(upload)
|
||||||
artifact_count += 1
|
artifact_count += 1
|
||||||
|
|
||||||
|
# Create version record if specified
|
||||||
|
if artifact_data.get("version"):
|
||||||
|
version = PackageVersion(
|
||||||
|
package_id=package.id,
|
||||||
|
artifact_id=sha256_hash,
|
||||||
|
version=artifact_data["version"],
|
||||||
|
version_source="explicit",
|
||||||
|
created_by="seed-user",
|
||||||
|
)
|
||||||
|
db.add(version)
|
||||||
|
version_count += 1
|
||||||
|
|
||||||
# Create tags
|
# Create tags
|
||||||
for tag_name in artifact_data["tags"]:
|
for tag_name in artifact_data["tags"]:
|
||||||
tag = Tag(
|
tag = Tag(
|
||||||
@@ -217,6 +251,40 @@ def seed_database(db: Session) -> None:
|
|||||||
db.add(tag)
|
db.add(tag)
|
||||||
tag_count += 1
|
tag_count += 1
|
||||||
|
|
||||||
|
db.flush()
|
||||||
|
|
||||||
|
# Create dependencies
|
||||||
|
dependency_count = 0
|
||||||
|
for dep_data in TEST_DEPENDENCIES:
|
||||||
|
src_project, src_package, src_version, dep_project, dep_package, version_constraint, tag_constraint = dep_data
|
||||||
|
|
||||||
|
# Find the source artifact by looking up its version
|
||||||
|
src_pkg = package_map.get((src_project, src_package))
|
||||||
|
if not src_pkg:
|
||||||
|
logger.warning(f"Source package not found: {src_project}/{src_package}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Find the artifact for this version
|
||||||
|
src_version_record = db.query(PackageVersion).filter(
|
||||||
|
PackageVersion.package_id == src_pkg.id,
|
||||||
|
PackageVersion.version == src_version,
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not src_version_record:
|
||||||
|
logger.warning(f"Source version not found: {src_project}/{src_package}@{src_version}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Create the dependency
|
||||||
|
dependency = ArtifactDependency(
|
||||||
|
artifact_id=src_version_record.artifact_id,
|
||||||
|
dependency_project=dep_project,
|
||||||
|
dependency_package=dep_package,
|
||||||
|
version_constraint=version_constraint,
|
||||||
|
tag_constraint=tag_constraint,
|
||||||
|
)
|
||||||
|
db.add(dependency)
|
||||||
|
dependency_count += 1
|
||||||
|
|
||||||
db.commit()
|
db.commit()
|
||||||
logger.info(f"Created {artifact_count} artifacts and {tag_count} tags")
|
logger.info(f"Created {artifact_count} artifacts, {tag_count} tags, {version_count} versions, and {dependency_count} dependencies")
|
||||||
logger.info("Database seeding complete")
|
logger.info("Database seeding complete")
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ from typing import (
|
|||||||
Optional,
|
Optional,
|
||||||
Dict,
|
Dict,
|
||||||
Any,
|
Any,
|
||||||
Generator,
|
|
||||||
NamedTuple,
|
NamedTuple,
|
||||||
Protocol,
|
Protocol,
|
||||||
runtime_checkable,
|
runtime_checkable,
|
||||||
@@ -243,15 +242,19 @@ class S3Storage:
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
self.client = boto3.client(
|
# Build client kwargs - only include credentials if explicitly provided
|
||||||
"s3",
|
# This allows IRSA/IAM role credentials to be used when no explicit creds are set
|
||||||
endpoint_url=settings.s3_endpoint if settings.s3_endpoint else None,
|
client_kwargs = {
|
||||||
region_name=settings.s3_region,
|
"endpoint_url": settings.s3_endpoint if settings.s3_endpoint else None,
|
||||||
aws_access_key_id=settings.s3_access_key_id,
|
"region_name": settings.s3_region,
|
||||||
aws_secret_access_key=settings.s3_secret_access_key,
|
"config": config,
|
||||||
config=config,
|
"verify": settings.s3_verify_ssl,
|
||||||
verify=settings.s3_verify_ssl, # SSL/TLS verification
|
}
|
||||||
)
|
if settings.s3_access_key_id and settings.s3_secret_access_key:
|
||||||
|
client_kwargs["aws_access_key_id"] = settings.s3_access_key_id
|
||||||
|
client_kwargs["aws_secret_access_key"] = settings.s3_secret_access_key
|
||||||
|
|
||||||
|
self.client = boto3.client("s3", **client_kwargs)
|
||||||
self.bucket = settings.s3_bucket
|
self.bucket = settings.s3_bucket
|
||||||
# Store active multipart uploads for resumable support
|
# Store active multipart uploads for resumable support
|
||||||
self._active_uploads: Dict[str, Dict[str, Any]] = {}
|
self._active_uploads: Dict[str, Dict[str, Any]] = {}
|
||||||
@@ -379,10 +382,16 @@ class S3Storage:
|
|||||||
"""
|
"""
|
||||||
# First pass: compute all hashes by streaming through file
|
# First pass: compute all hashes by streaming through file
|
||||||
try:
|
try:
|
||||||
|
import time
|
||||||
sha256_hasher = hashlib.sha256()
|
sha256_hasher = hashlib.sha256()
|
||||||
md5_hasher = hashlib.md5()
|
md5_hasher = hashlib.md5()
|
||||||
sha1_hasher = hashlib.sha1()
|
sha1_hasher = hashlib.sha1()
|
||||||
size = 0
|
size = 0
|
||||||
|
hash_start_time = time.time()
|
||||||
|
last_log_time = hash_start_time
|
||||||
|
log_interval_seconds = 5 # Log progress every 5 seconds
|
||||||
|
|
||||||
|
logger.info(f"Computing hashes for large file: expected_size={content_length}")
|
||||||
|
|
||||||
# Read file in chunks to compute hashes
|
# Read file in chunks to compute hashes
|
||||||
while True:
|
while True:
|
||||||
@@ -394,6 +403,18 @@ class S3Storage:
|
|||||||
sha1_hasher.update(chunk)
|
sha1_hasher.update(chunk)
|
||||||
size += len(chunk)
|
size += len(chunk)
|
||||||
|
|
||||||
|
# Log hash computation progress periodically
|
||||||
|
current_time = time.time()
|
||||||
|
if current_time - last_log_time >= log_interval_seconds:
|
||||||
|
elapsed = current_time - hash_start_time
|
||||||
|
percent = (size / content_length) * 100 if content_length > 0 else 0
|
||||||
|
throughput = (size / (1024 * 1024)) / elapsed if elapsed > 0 else 0
|
||||||
|
logger.info(
|
||||||
|
f"Hash computation progress: bytes={size}/{content_length} ({percent:.1f}%) "
|
||||||
|
f"throughput={throughput:.2f}MB/s"
|
||||||
|
)
|
||||||
|
last_log_time = current_time
|
||||||
|
|
||||||
# Enforce file size limit during streaming (protection against spoofing)
|
# Enforce file size limit during streaming (protection against spoofing)
|
||||||
if size > settings.max_file_size:
|
if size > settings.max_file_size:
|
||||||
raise FileSizeExceededError(
|
raise FileSizeExceededError(
|
||||||
@@ -406,6 +427,14 @@ class S3Storage:
|
|||||||
sha256_hash = sha256_hasher.hexdigest()
|
sha256_hash = sha256_hasher.hexdigest()
|
||||||
md5_hash = md5_hasher.hexdigest()
|
md5_hash = md5_hasher.hexdigest()
|
||||||
sha1_hash = sha1_hasher.hexdigest()
|
sha1_hash = sha1_hasher.hexdigest()
|
||||||
|
|
||||||
|
# Log hash computation completion
|
||||||
|
hash_elapsed = time.time() - hash_start_time
|
||||||
|
hash_throughput = (size / (1024 * 1024)) / hash_elapsed if hash_elapsed > 0 else 0
|
||||||
|
logger.info(
|
||||||
|
f"Hash computation completed: hash={sha256_hash[:16]}... "
|
||||||
|
f"size={size} duration={hash_elapsed:.2f}s throughput={hash_throughput:.2f}MB/s"
|
||||||
|
)
|
||||||
except (HashComputationError, FileSizeExceededError):
|
except (HashComputationError, FileSizeExceededError):
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -459,8 +488,19 @@ class S3Storage:
|
|||||||
upload_id = mpu["UploadId"]
|
upload_id = mpu["UploadId"]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
import time
|
||||||
parts = []
|
parts = []
|
||||||
part_number = 1
|
part_number = 1
|
||||||
|
bytes_uploaded = 0
|
||||||
|
upload_start_time = time.time()
|
||||||
|
last_log_time = upload_start_time
|
||||||
|
log_interval_seconds = 5 # Log progress every 5 seconds
|
||||||
|
|
||||||
|
total_parts = (content_length + MULTIPART_CHUNK_SIZE - 1) // MULTIPART_CHUNK_SIZE
|
||||||
|
logger.info(
|
||||||
|
f"Starting multipart upload: hash={sha256_hash[:16]}... "
|
||||||
|
f"size={content_length} parts={total_parts}"
|
||||||
|
)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
chunk = file.read(MULTIPART_CHUNK_SIZE)
|
chunk = file.read(MULTIPART_CHUNK_SIZE)
|
||||||
@@ -480,8 +520,32 @@ class S3Storage:
|
|||||||
"ETag": response["ETag"],
|
"ETag": response["ETag"],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
bytes_uploaded += len(chunk)
|
||||||
|
|
||||||
|
# Log progress periodically
|
||||||
|
current_time = time.time()
|
||||||
|
if current_time - last_log_time >= log_interval_seconds:
|
||||||
|
elapsed = current_time - upload_start_time
|
||||||
|
percent = (bytes_uploaded / content_length) * 100
|
||||||
|
throughput = (bytes_uploaded / (1024 * 1024)) / elapsed if elapsed > 0 else 0
|
||||||
|
logger.info(
|
||||||
|
f"Upload progress: hash={sha256_hash[:16]}... "
|
||||||
|
f"part={part_number}/{total_parts} "
|
||||||
|
f"bytes={bytes_uploaded}/{content_length} ({percent:.1f}%) "
|
||||||
|
f"throughput={throughput:.2f}MB/s"
|
||||||
|
)
|
||||||
|
last_log_time = current_time
|
||||||
|
|
||||||
part_number += 1
|
part_number += 1
|
||||||
|
|
||||||
|
# Log completion
|
||||||
|
total_elapsed = time.time() - upload_start_time
|
||||||
|
final_throughput = (content_length / (1024 * 1024)) / total_elapsed if total_elapsed > 0 else 0
|
||||||
|
logger.info(
|
||||||
|
f"Multipart upload completed: hash={sha256_hash[:16]}... "
|
||||||
|
f"size={content_length} duration={total_elapsed:.2f}s throughput={final_throughput:.2f}MB/s"
|
||||||
|
)
|
||||||
|
|
||||||
# Complete multipart upload
|
# Complete multipart upload
|
||||||
complete_response = self.client.complete_multipart_upload(
|
complete_response = self.client.complete_multipart_upload(
|
||||||
Bucket=self.bucket,
|
Bucket=self.bucket,
|
||||||
@@ -503,134 +567,29 @@ class S3Storage:
|
|||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Abort multipart upload on failure
|
# Abort multipart upload on failure
|
||||||
logger.error(f"Multipart upload failed: {e}")
|
error_str = str(e).lower()
|
||||||
self.client.abort_multipart_upload(
|
is_client_disconnect = (
|
||||||
Bucket=self.bucket,
|
isinstance(e, (ConnectionResetError, BrokenPipeError)) or
|
||||||
Key=s3_key,
|
"connection" in error_str or "broken pipe" in error_str or "reset" in error_str
|
||||||
UploadId=upload_id,
|
|
||||||
)
|
)
|
||||||
raise
|
if is_client_disconnect:
|
||||||
|
logger.warning(
|
||||||
def store_streaming(self, chunks: Generator[bytes, None, None]) -> StorageResult:
|
f"Multipart upload aborted (client disconnect): hash={sha256_hash[:16]}... "
|
||||||
"""
|
f"parts_uploaded={len(parts)} bytes_uploaded={bytes_uploaded}"
|
||||||
Store a file from a stream of chunks.
|
)
|
||||||
First accumulates to compute hash, then uploads.
|
else:
|
||||||
For truly large files, consider using initiate_resumable_upload instead.
|
logger.error(f"Multipart upload failed: hash={sha256_hash[:16]}... error={e}")
|
||||||
"""
|
|
||||||
# Accumulate chunks and compute all hashes
|
|
||||||
sha256_hasher = hashlib.sha256()
|
|
||||||
md5_hasher = hashlib.md5()
|
|
||||||
sha1_hasher = hashlib.sha1()
|
|
||||||
all_chunks = []
|
|
||||||
size = 0
|
|
||||||
|
|
||||||
for chunk in chunks:
|
|
||||||
sha256_hasher.update(chunk)
|
|
||||||
md5_hasher.update(chunk)
|
|
||||||
sha1_hasher.update(chunk)
|
|
||||||
all_chunks.append(chunk)
|
|
||||||
size += len(chunk)
|
|
||||||
|
|
||||||
sha256_hash = sha256_hasher.hexdigest()
|
|
||||||
md5_hash = md5_hasher.hexdigest()
|
|
||||||
sha1_hash = sha1_hasher.hexdigest()
|
|
||||||
s3_key = f"fruits/{sha256_hash[:2]}/{sha256_hash[2:4]}/{sha256_hash}"
|
|
||||||
s3_etag = None
|
|
||||||
|
|
||||||
# Check if already exists
|
|
||||||
if self._exists(s3_key):
|
|
||||||
obj_info = self.get_object_info(s3_key)
|
|
||||||
s3_etag = obj_info.get("etag", "").strip('"') if obj_info else None
|
|
||||||
return StorageResult(
|
|
||||||
sha256=sha256_hash,
|
|
||||||
size=size,
|
|
||||||
s3_key=s3_key,
|
|
||||||
md5=md5_hash,
|
|
||||||
sha1=sha1_hash,
|
|
||||||
s3_etag=s3_etag,
|
|
||||||
already_existed=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Upload based on size
|
|
||||||
if size < MULTIPART_THRESHOLD:
|
|
||||||
content = b"".join(all_chunks)
|
|
||||||
response = self.client.put_object(
|
|
||||||
Bucket=self.bucket, Key=s3_key, Body=content
|
|
||||||
)
|
|
||||||
s3_etag = response.get("ETag", "").strip('"')
|
|
||||||
else:
|
|
||||||
# Use multipart for large files
|
|
||||||
mpu = self.client.create_multipart_upload(Bucket=self.bucket, Key=s3_key)
|
|
||||||
upload_id = mpu["UploadId"]
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
parts = []
|
|
||||||
part_number = 1
|
|
||||||
buffer = b""
|
|
||||||
|
|
||||||
for chunk in all_chunks:
|
|
||||||
buffer += chunk
|
|
||||||
while len(buffer) >= MULTIPART_CHUNK_SIZE:
|
|
||||||
part_data = buffer[:MULTIPART_CHUNK_SIZE]
|
|
||||||
buffer = buffer[MULTIPART_CHUNK_SIZE:]
|
|
||||||
|
|
||||||
response = self.client.upload_part(
|
|
||||||
Bucket=self.bucket,
|
|
||||||
Key=s3_key,
|
|
||||||
UploadId=upload_id,
|
|
||||||
PartNumber=part_number,
|
|
||||||
Body=part_data,
|
|
||||||
)
|
|
||||||
parts.append(
|
|
||||||
{
|
|
||||||
"PartNumber": part_number,
|
|
||||||
"ETag": response["ETag"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
part_number += 1
|
|
||||||
|
|
||||||
# Upload remaining buffer
|
|
||||||
if buffer:
|
|
||||||
response = self.client.upload_part(
|
|
||||||
Bucket=self.bucket,
|
|
||||||
Key=s3_key,
|
|
||||||
UploadId=upload_id,
|
|
||||||
PartNumber=part_number,
|
|
||||||
Body=buffer,
|
|
||||||
)
|
|
||||||
parts.append(
|
|
||||||
{
|
|
||||||
"PartNumber": part_number,
|
|
||||||
"ETag": response["ETag"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
complete_response = self.client.complete_multipart_upload(
|
|
||||||
Bucket=self.bucket,
|
|
||||||
Key=s3_key,
|
|
||||||
UploadId=upload_id,
|
|
||||||
MultipartUpload={"Parts": parts},
|
|
||||||
)
|
|
||||||
s3_etag = complete_response.get("ETag", "").strip('"')
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Streaming multipart upload failed: {e}")
|
|
||||||
self.client.abort_multipart_upload(
|
self.client.abort_multipart_upload(
|
||||||
Bucket=self.bucket,
|
Bucket=self.bucket,
|
||||||
Key=s3_key,
|
Key=s3_key,
|
||||||
UploadId=upload_id,
|
UploadId=upload_id,
|
||||||
)
|
)
|
||||||
raise
|
logger.info(f"Multipart upload aborted and cleaned up: upload_id={upload_id[:16]}...")
|
||||||
|
except Exception as abort_error:
|
||||||
return StorageResult(
|
logger.error(f"Failed to abort multipart upload: {abort_error}")
|
||||||
sha256=sha256_hash,
|
raise
|
||||||
size=size,
|
|
||||||
s3_key=s3_key,
|
|
||||||
md5=md5_hash,
|
|
||||||
sha1=sha1_hash,
|
|
||||||
s3_etag=s3_etag,
|
|
||||||
already_existed=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
def initiate_resumable_upload(self, expected_hash: str) -> Dict[str, Any]:
|
def initiate_resumable_upload(self, expected_hash: str) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
@@ -651,12 +610,17 @@ class S3Storage:
|
|||||||
mpu = self.client.create_multipart_upload(Bucket=self.bucket, Key=s3_key)
|
mpu = self.client.create_multipart_upload(Bucket=self.bucket, Key=s3_key)
|
||||||
upload_id = mpu["UploadId"]
|
upload_id = mpu["UploadId"]
|
||||||
|
|
||||||
|
import time
|
||||||
session = {
|
session = {
|
||||||
"upload_id": upload_id,
|
"upload_id": upload_id,
|
||||||
"s3_key": s3_key,
|
"s3_key": s3_key,
|
||||||
"already_exists": False,
|
"already_exists": False,
|
||||||
"parts": [],
|
"parts": [],
|
||||||
"expected_hash": expected_hash,
|
"expected_hash": expected_hash,
|
||||||
|
"started_at": time.time(),
|
||||||
|
"bytes_uploaded": 0,
|
||||||
|
"expected_size": None, # Set when init provides size
|
||||||
|
"status": "in_progress",
|
||||||
}
|
}
|
||||||
self._active_uploads[upload_id] = session
|
self._active_uploads[upload_id] = session
|
||||||
return session
|
return session
|
||||||
@@ -683,10 +647,57 @@ class S3Storage:
|
|||||||
part_info = {
|
part_info = {
|
||||||
"PartNumber": part_number,
|
"PartNumber": part_number,
|
||||||
"ETag": response["ETag"],
|
"ETag": response["ETag"],
|
||||||
|
"size": len(data),
|
||||||
}
|
}
|
||||||
session["parts"].append(part_info)
|
session["parts"].append(part_info)
|
||||||
|
session["bytes_uploaded"] = session.get("bytes_uploaded", 0) + len(data)
|
||||||
return part_info
|
return part_info
|
||||||
|
|
||||||
|
def get_upload_progress(self, upload_id: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Get progress information for a resumable upload.
|
||||||
|
Returns None if upload not found.
|
||||||
|
"""
|
||||||
|
import time
|
||||||
|
session = self._active_uploads.get(upload_id)
|
||||||
|
if not session:
|
||||||
|
return None
|
||||||
|
|
||||||
|
bytes_uploaded = session.get("bytes_uploaded", 0)
|
||||||
|
expected_size = session.get("expected_size")
|
||||||
|
started_at = session.get("started_at")
|
||||||
|
|
||||||
|
progress = {
|
||||||
|
"upload_id": upload_id,
|
||||||
|
"status": session.get("status", "in_progress"),
|
||||||
|
"bytes_uploaded": bytes_uploaded,
|
||||||
|
"bytes_total": expected_size,
|
||||||
|
"parts_uploaded": len(session.get("parts", [])),
|
||||||
|
"parts_total": None,
|
||||||
|
"started_at": started_at,
|
||||||
|
"elapsed_seconds": None,
|
||||||
|
"percent_complete": None,
|
||||||
|
"throughput_mbps": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
if expected_size and expected_size > 0:
|
||||||
|
progress["percent_complete"] = round((bytes_uploaded / expected_size) * 100, 2)
|
||||||
|
progress["parts_total"] = (expected_size + MULTIPART_CHUNK_SIZE - 1) // MULTIPART_CHUNK_SIZE
|
||||||
|
|
||||||
|
if started_at:
|
||||||
|
elapsed = time.time() - started_at
|
||||||
|
progress["elapsed_seconds"] = round(elapsed, 2)
|
||||||
|
if elapsed > 0 and bytes_uploaded > 0:
|
||||||
|
progress["throughput_mbps"] = round((bytes_uploaded / (1024 * 1024)) / elapsed, 2)
|
||||||
|
|
||||||
|
return progress
|
||||||
|
|
||||||
|
def set_upload_expected_size(self, upload_id: str, size: int):
|
||||||
|
"""Set the expected size for an upload (for progress tracking)."""
|
||||||
|
session = self._active_uploads.get(upload_id)
|
||||||
|
if session:
|
||||||
|
session["expected_size"] = size
|
||||||
|
|
||||||
def complete_resumable_upload(self, upload_id: str) -> Tuple[str, str]:
|
def complete_resumable_upload(self, upload_id: str) -> Tuple[str, str]:
|
||||||
"""
|
"""
|
||||||
Complete a resumable upload.
|
Complete a resumable upload.
|
||||||
@@ -824,6 +835,36 @@ class S3Storage:
|
|||||||
except ClientError:
|
except ClientError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def delete_all(self) -> int:
|
||||||
|
"""
|
||||||
|
Delete all objects in the bucket.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of objects deleted
|
||||||
|
"""
|
||||||
|
deleted_count = 0
|
||||||
|
try:
|
||||||
|
paginator = self.client.get_paginator("list_objects_v2")
|
||||||
|
for page in paginator.paginate(Bucket=self.bucket):
|
||||||
|
objects = page.get("Contents", [])
|
||||||
|
if not objects:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Delete objects in batches of 1000 (S3 limit)
|
||||||
|
delete_keys = [{"Key": obj["Key"]} for obj in objects]
|
||||||
|
if delete_keys:
|
||||||
|
self.client.delete_objects(
|
||||||
|
Bucket=self.bucket, Delete={"Objects": delete_keys}
|
||||||
|
)
|
||||||
|
deleted_count += len(delete_keys)
|
||||||
|
logger.info(f"Deleted {len(delete_keys)} objects from S3")
|
||||||
|
|
||||||
|
logger.info(f"Total objects deleted from S3: {deleted_count}")
|
||||||
|
return deleted_count
|
||||||
|
except ClientError as e:
|
||||||
|
logger.error(f"Failed to delete all S3 objects: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
def generate_presigned_url(
|
def generate_presigned_url(
|
||||||
self,
|
self,
|
||||||
s3_key: str,
|
s3_key: str,
|
||||||
|
|||||||
@@ -12,6 +12,8 @@ markers =
|
|||||||
unit: Unit tests (no external dependencies)
|
unit: Unit tests (no external dependencies)
|
||||||
integration: Integration tests (require database/storage)
|
integration: Integration tests (require database/storage)
|
||||||
slow: Slow tests (skip with -m "not slow")
|
slow: Slow tests (skip with -m "not slow")
|
||||||
|
large: Large file tests (100MB+, skip with -m "not large")
|
||||||
|
concurrent: Concurrent operation tests
|
||||||
|
|
||||||
# Coverage configuration
|
# Coverage configuration
|
||||||
[coverage:run]
|
[coverage:run]
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ pydantic==2.5.3
|
|||||||
pydantic-settings==2.1.0
|
pydantic-settings==2.1.0
|
||||||
python-jose[cryptography]==3.3.0
|
python-jose[cryptography]==3.3.0
|
||||||
passlib[bcrypt]==1.7.4
|
passlib[bcrypt]==1.7.4
|
||||||
|
bcrypt==4.0.1
|
||||||
|
slowapi==0.1.9
|
||||||
|
|
||||||
# Test dependencies
|
# Test dependencies
|
||||||
pytest>=7.4.0
|
pytest>=7.4.0
|
||||||
|
|||||||
@@ -9,6 +9,37 @@ This module provides:
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Pytest Markers
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_configure(config):
|
||||||
|
"""Register custom pytest markers."""
|
||||||
|
config.addinivalue_line(
|
||||||
|
"markers",
|
||||||
|
"auth_intensive: marks tests that make many login requests (excluded from CI integration tests due to rate limiting)",
|
||||||
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
"markers",
|
||||||
|
"integration: marks tests as integration tests",
|
||||||
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
"markers",
|
||||||
|
"large: marks tests that handle large files (slow)",
|
||||||
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
"markers",
|
||||||
|
"slow: marks tests as slow running",
|
||||||
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
"markers",
|
||||||
|
"requires_direct_s3: marks tests that require direct S3/MinIO access (skipped in CI where S3 is not directly accessible)",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
import io
|
import io
|
||||||
from typing import Generator
|
from typing import Generator
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
@@ -32,6 +63,8 @@ from tests.factories import (
|
|||||||
compute_md5,
|
compute_md5,
|
||||||
compute_sha1,
|
compute_sha1,
|
||||||
upload_test_file,
|
upload_test_file,
|
||||||
|
generate_content,
|
||||||
|
generate_content_with_hash,
|
||||||
TEST_CONTENT_HELLO,
|
TEST_CONTENT_HELLO,
|
||||||
TEST_HASH_HELLO,
|
TEST_HASH_HELLO,
|
||||||
TEST_MD5_HELLO,
|
TEST_MD5_HELLO,
|
||||||
@@ -179,19 +212,64 @@ def test_app():
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture(scope="session")
|
||||||
def integration_client():
|
def integration_client():
|
||||||
"""
|
"""
|
||||||
Create a test client for integration tests.
|
Create an authenticated test client for integration tests.
|
||||||
|
|
||||||
Uses the real database and MinIO from docker-compose.local.yml.
|
Uses the real database and MinIO from docker-compose.local.yml or deployed environment.
|
||||||
|
Authenticates as admin for write operations. Session-scoped to reuse login across tests.
|
||||||
|
|
||||||
|
Environment variables:
|
||||||
|
ORCHARD_TEST_URL: Base URL of the Orchard server (default: http://localhost:8080)
|
||||||
|
ORCHARD_TEST_USERNAME: Admin username for authentication (default: admin)
|
||||||
|
ORCHARD_TEST_PASSWORD: Admin password for authentication (default: changeme123)
|
||||||
"""
|
"""
|
||||||
from httpx import Client
|
import httpx
|
||||||
|
|
||||||
|
# Connect to the running orchard-server container or deployed environment
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
username = os.environ.get("ORCHARD_TEST_USERNAME", "admin")
|
||||||
|
password = os.environ.get("ORCHARD_TEST_PASSWORD", "changeme123")
|
||||||
|
|
||||||
|
with httpx.Client(base_url=base_url, timeout=30.0) as client:
|
||||||
|
# Login as admin to enable write operations
|
||||||
|
login_response = client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": username, "password": password},
|
||||||
|
)
|
||||||
|
if login_response.status_code != 200:
|
||||||
|
pytest.fail(
|
||||||
|
f"Authentication failed against {base_url}: {login_response.status_code} - {login_response.text}. "
|
||||||
|
f"Set ORCHARD_TEST_USERNAME and ORCHARD_TEST_PASSWORD environment variables if using non-default credentials."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify cookie was set
|
||||||
|
if not client.cookies:
|
||||||
|
pytest.fail(
|
||||||
|
f"Login succeeded but no session cookie was set. Response headers: {login_response.headers}"
|
||||||
|
)
|
||||||
|
|
||||||
|
yield client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def auth_client():
|
||||||
|
"""
|
||||||
|
Create a function-scoped test client for authentication tests.
|
||||||
|
|
||||||
|
Unlike integration_client (session-scoped), this creates a fresh client
|
||||||
|
for each test. Use this for tests that manipulate authentication state
|
||||||
|
(login, logout, cookie clearing) to avoid polluting other tests.
|
||||||
|
|
||||||
|
Environment variables:
|
||||||
|
ORCHARD_TEST_URL: Base URL of the Orchard server (default: http://localhost:8080)
|
||||||
|
"""
|
||||||
|
import httpx
|
||||||
|
|
||||||
# Connect to the running orchard-server container
|
|
||||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
with Client(base_url=base_url, timeout=30.0) as client:
|
with httpx.Client(base_url=base_url, timeout=30.0) as client:
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
|
||||||
@@ -261,3 +339,41 @@ def test_content():
|
|||||||
content = f"test-content-{uuid.uuid4().hex}".encode()
|
content = f"test-content-{uuid.uuid4().hex}".encode()
|
||||||
sha256 = compute_sha256(content)
|
sha256 = compute_sha256(content)
|
||||||
return (content, sha256)
|
return (content, sha256)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sized_content():
|
||||||
|
"""
|
||||||
|
Factory fixture for generating content of specific sizes.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
def test_example(sized_content):
|
||||||
|
content, hash = sized_content(1024) # 1KB
|
||||||
|
content, hash = sized_content(1024 * 1024) # 1MB
|
||||||
|
"""
|
||||||
|
def _generate(size: int, seed: int = None):
|
||||||
|
return generate_content_with_hash(size, seed)
|
||||||
|
return _generate
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Size Constants for Tests
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Common file sizes for boundary testing
|
||||||
|
SIZE_1B = 1
|
||||||
|
SIZE_1KB = 1024
|
||||||
|
SIZE_10KB = 10 * 1024
|
||||||
|
SIZE_100KB = 100 * 1024
|
||||||
|
SIZE_1MB = 1024 * 1024
|
||||||
|
SIZE_5MB = 5 * 1024 * 1024
|
||||||
|
SIZE_10MB = 10 * 1024 * 1024
|
||||||
|
SIZE_50MB = 50 * 1024 * 1024
|
||||||
|
SIZE_100MB = 100 * 1024 * 1024
|
||||||
|
SIZE_250MB = 250 * 1024 * 1024
|
||||||
|
SIZE_500MB = 500 * 1024 * 1024
|
||||||
|
SIZE_1GB = 1024 * 1024 * 1024
|
||||||
|
|
||||||
|
# Chunk size boundaries (based on typical S3 multipart chunk sizes)
|
||||||
|
CHUNK_SIZE = 64 * 1024 # 64KB typical chunk
|
||||||
|
MULTIPART_THRESHOLD = 100 * 1024 * 1024 # 100MB multipart threshold
|
||||||
|
|||||||
@@ -97,6 +97,7 @@ def upload_test_file(
|
|||||||
content: bytes,
|
content: bytes,
|
||||||
filename: str = "test.bin",
|
filename: str = "test.bin",
|
||||||
tag: Optional[str] = None,
|
tag: Optional[str] = None,
|
||||||
|
version: Optional[str] = None,
|
||||||
) -> dict:
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
Helper function to upload a test file via the API.
|
Helper function to upload a test file via the API.
|
||||||
@@ -108,6 +109,7 @@ def upload_test_file(
|
|||||||
content: File content as bytes
|
content: File content as bytes
|
||||||
filename: Original filename
|
filename: Original filename
|
||||||
tag: Optional tag to assign
|
tag: Optional tag to assign
|
||||||
|
version: Optional version to assign
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The upload response as a dict
|
The upload response as a dict
|
||||||
@@ -116,6 +118,8 @@ def upload_test_file(
|
|||||||
data = {}
|
data = {}
|
||||||
if tag:
|
if tag:
|
||||||
data["tag"] = tag
|
data["tag"] = tag
|
||||||
|
if version:
|
||||||
|
data["version"] = version
|
||||||
|
|
||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
@@ -126,6 +130,41 @@ def upload_test_file(
|
|||||||
return response.json()
|
return response.json()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_content(size: int, seed: Optional[int] = None) -> bytes:
|
||||||
|
"""
|
||||||
|
Generate deterministic or random content of a specified size.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
size: Size of content in bytes
|
||||||
|
seed: Optional seed for reproducible content (None for random)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Bytes of the specified size
|
||||||
|
"""
|
||||||
|
if size == 0:
|
||||||
|
return b""
|
||||||
|
if seed is not None:
|
||||||
|
import random
|
||||||
|
rng = random.Random(seed)
|
||||||
|
return bytes(rng.randint(0, 255) for _ in range(size))
|
||||||
|
return os.urandom(size)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_content_with_hash(size: int, seed: Optional[int] = None) -> tuple[bytes, str]:
|
||||||
|
"""
|
||||||
|
Generate content of specified size and compute its SHA256 hash.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
size: Size of content in bytes
|
||||||
|
seed: Optional seed for reproducible content
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (content_bytes, sha256_hash)
|
||||||
|
"""
|
||||||
|
content = generate_content(size, seed)
|
||||||
|
return content, compute_sha256(content)
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Project/Package Factories
|
# Project/Package Factories
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|||||||
615
backend/tests/integration/test_auth_api.py
Normal file
615
backend/tests/integration/test_auth_api.py
Normal file
@@ -0,0 +1,615 @@
|
|||||||
|
"""Integration tests for authentication API endpoints.
|
||||||
|
|
||||||
|
Note: These tests are marked as auth_intensive because they make many login
|
||||||
|
requests. Dev/stage deployments have relaxed rate limits (1000/minute) to
|
||||||
|
allow these tests to run. Production uses strict rate limits (5/minute).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
|
||||||
|
# Mark all tests in this module as auth_intensive (informational, not excluded from CI)
|
||||||
|
pytestmark = pytest.mark.auth_intensive
|
||||||
|
|
||||||
|
|
||||||
|
class TestAuthLogin:
|
||||||
|
"""Tests for login endpoint."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_login_success(self, auth_client):
|
||||||
|
"""Test successful login with default admin credentials."""
|
||||||
|
response = auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["username"] == "admin"
|
||||||
|
assert data["is_admin"] is True
|
||||||
|
assert "orchard_session" in response.cookies
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_login_invalid_password(self, auth_client):
|
||||||
|
"""Test login with wrong password."""
|
||||||
|
response = auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "wrongpassword"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert "Invalid username or password" in response.json()["detail"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_login_nonexistent_user(self, auth_client):
|
||||||
|
"""Test login with non-existent user."""
|
||||||
|
response = auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "nonexistent", "password": "password"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestAuthLogout:
|
||||||
|
"""Tests for logout endpoint."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_logout_success(self, auth_client):
|
||||||
|
"""Test successful logout."""
|
||||||
|
# First login
|
||||||
|
login_response = auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
assert login_response.status_code == 200
|
||||||
|
|
||||||
|
# Then logout
|
||||||
|
logout_response = auth_client.post("/api/v1/auth/logout")
|
||||||
|
assert logout_response.status_code == 200
|
||||||
|
assert "Logged out successfully" in logout_response.json()["message"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_logout_without_session(self, auth_client):
|
||||||
|
"""Test logout without being logged in."""
|
||||||
|
response = auth_client.post("/api/v1/auth/logout")
|
||||||
|
# Should succeed even without session
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
class TestAuthMe:
|
||||||
|
"""Tests for get current user endpoint."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_get_me_authenticated(self, auth_client):
|
||||||
|
"""Test getting current user when authenticated."""
|
||||||
|
# Login first
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
response = auth_client.get("/api/v1/auth/me")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["username"] == "admin"
|
||||||
|
assert data["is_admin"] is True
|
||||||
|
assert "id" in data
|
||||||
|
assert "created_at" in data
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_get_me_unauthenticated(self, auth_client):
|
||||||
|
"""Test getting current user without authentication."""
|
||||||
|
# Clear any existing cookies
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
|
||||||
|
response = auth_client.get("/api/v1/auth/me")
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert "Not authenticated" in response.json()["detail"]
|
||||||
|
|
||||||
|
|
||||||
|
class TestAuthChangePassword:
|
||||||
|
"""Tests for change password endpoint.
|
||||||
|
|
||||||
|
Note: These tests use dedicated test users instead of admin to avoid
|
||||||
|
invalidating the integration_client session (which uses admin).
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_change_password_success(self, auth_client):
|
||||||
|
"""Test successful password change."""
|
||||||
|
# Login as admin to create a test user
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
test_username = f"pwchange_{uuid4().hex[:8]}"
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "oldpassword123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Login as test user
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": test_username, "password": "oldpassword123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Change password
|
||||||
|
response = auth_client.post(
|
||||||
|
"/api/v1/auth/change-password",
|
||||||
|
json={"current_password": "oldpassword123", "new_password": "newpassword123"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Verify old password no longer works
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
response = auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": test_username, "password": "oldpassword123"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
# Verify new password works
|
||||||
|
response = auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": test_username, "password": "newpassword123"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_change_password_wrong_current(self, auth_client):
|
||||||
|
"""Test password change with wrong current password."""
|
||||||
|
# Login as admin to create a test user
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
test_username = f"pwwrong_{uuid4().hex[:8]}"
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Login as test user
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
response = auth_client.post(
|
||||||
|
"/api/v1/auth/change-password",
|
||||||
|
json={"current_password": "wrongpassword", "new_password": "newpassword"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert "Current password is incorrect" in response.json()["detail"]
|
||||||
|
|
||||||
|
|
||||||
|
class TestAPIKeys:
|
||||||
|
"""Tests for API key management endpoints."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_create_and_list_api_key(self, auth_client):
|
||||||
|
"""Test creating and listing API keys."""
|
||||||
|
# Login first
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create API key
|
||||||
|
create_response = auth_client.post(
|
||||||
|
"/api/v1/auth/keys",
|
||||||
|
json={"name": "test-key", "description": "Test API key"},
|
||||||
|
)
|
||||||
|
assert create_response.status_code == 200
|
||||||
|
data = create_response.json()
|
||||||
|
assert data["name"] == "test-key"
|
||||||
|
assert data["description"] == "Test API key"
|
||||||
|
assert "key" in data
|
||||||
|
assert data["key"].startswith("orch_")
|
||||||
|
key_id = data["id"]
|
||||||
|
api_key = data["key"]
|
||||||
|
|
||||||
|
# List API keys
|
||||||
|
list_response = auth_client.get("/api/v1/auth/keys")
|
||||||
|
assert list_response.status_code == 200
|
||||||
|
keys = list_response.json()
|
||||||
|
assert any(k["id"] == key_id for k in keys)
|
||||||
|
|
||||||
|
# Clean up - delete the key
|
||||||
|
auth_client.delete(f"/api/v1/auth/keys/{key_id}")
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_use_api_key_for_auth(self, auth_client):
|
||||||
|
"""Test using API key for authentication."""
|
||||||
|
# Login and create API key
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
create_response = auth_client.post(
|
||||||
|
"/api/v1/auth/keys",
|
||||||
|
json={"name": "auth-test-key"},
|
||||||
|
)
|
||||||
|
api_key = create_response.json()["key"]
|
||||||
|
key_id = create_response.json()["id"]
|
||||||
|
|
||||||
|
# Clear cookies and use API key
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
response = auth_client.get(
|
||||||
|
"/api/v1/auth/me",
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["username"] == "admin"
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
auth_client.delete(f"/api/v1/auth/keys/{key_id}")
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_delete_api_key(self, auth_client):
|
||||||
|
"""Test revoking an API key."""
|
||||||
|
# Login and create API key
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
create_response = auth_client.post(
|
||||||
|
"/api/v1/auth/keys",
|
||||||
|
json={"name": "delete-test-key"},
|
||||||
|
)
|
||||||
|
key_id = create_response.json()["id"]
|
||||||
|
api_key = create_response.json()["key"]
|
||||||
|
|
||||||
|
# Delete the key
|
||||||
|
delete_response = auth_client.delete(f"/api/v1/auth/keys/{key_id}")
|
||||||
|
assert delete_response.status_code == 200
|
||||||
|
|
||||||
|
# Verify key no longer works
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
response = auth_client.get(
|
||||||
|
"/api/v1/auth/me",
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestAdminUserManagement:
|
||||||
|
"""Tests for admin user management endpoints."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_list_users(self, auth_client):
|
||||||
|
"""Test listing users as admin."""
|
||||||
|
# Login as admin
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
response = auth_client.get("/api/v1/admin/users")
|
||||||
|
assert response.status_code == 200
|
||||||
|
users = response.json()
|
||||||
|
assert len(users) >= 1
|
||||||
|
assert any(u["username"] == "admin" for u in users)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_create_user(self, auth_client):
|
||||||
|
"""Test creating a new user as admin."""
|
||||||
|
# Login as admin
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create new user
|
||||||
|
test_username = f"testuser_{uuid4().hex[:8]}"
|
||||||
|
response = auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={
|
||||||
|
"username": test_username,
|
||||||
|
"password": "testpassword",
|
||||||
|
"email": "test@example.com",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["username"] == test_username
|
||||||
|
assert data["email"] == "test@example.com"
|
||||||
|
assert data["is_admin"] is False
|
||||||
|
|
||||||
|
# Verify new user can login
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
login_response = auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": test_username, "password": "testpassword"},
|
||||||
|
)
|
||||||
|
assert login_response.status_code == 200
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_update_user(self, auth_client):
|
||||||
|
"""Test updating a user as admin."""
|
||||||
|
# Login as admin
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a test user
|
||||||
|
test_username = f"updateuser_{uuid4().hex[:8]}"
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "password"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update the user
|
||||||
|
response = auth_client.put(
|
||||||
|
f"/api/v1/admin/users/{test_username}",
|
||||||
|
json={"email": "updated@example.com", "is_admin": True},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["email"] == "updated@example.com"
|
||||||
|
assert data["is_admin"] is True
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_reset_user_password(self, auth_client):
|
||||||
|
"""Test resetting a user's password as admin."""
|
||||||
|
# Login as admin
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a test user
|
||||||
|
test_username = f"resetuser_{uuid4().hex[:8]}"
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "oldpassword"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Reset password
|
||||||
|
response = auth_client.post(
|
||||||
|
f"/api/v1/admin/users/{test_username}/reset-password",
|
||||||
|
json={"new_password": "newpassword"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Verify new password works
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
login_response = auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": test_username, "password": "newpassword"},
|
||||||
|
)
|
||||||
|
assert login_response.status_code == 200
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_non_admin_cannot_access_admin_endpoints(self, auth_client):
|
||||||
|
"""Test that non-admin users cannot access admin endpoints."""
|
||||||
|
# Login as admin and create non-admin user
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
test_username = f"nonadmin_{uuid4().hex[:8]}"
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "password", "is_admin": False},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Login as non-admin
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": test_username, "password": "password"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Try to access admin endpoints
|
||||||
|
response = auth_client.get("/api/v1/admin/users")
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert "Admin privileges required" in response.json()["detail"]
|
||||||
|
|
||||||
|
|
||||||
|
class TestSecurityEdgeCases:
|
||||||
|
"""Tests for security edge cases and validation."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_login_inactive_user(self, auth_client):
|
||||||
|
"""Test that inactive users cannot login."""
|
||||||
|
# Login as admin and create a user
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
test_username = f"inactive_{uuid4().hex[:8]}"
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Deactivate the user
|
||||||
|
auth_client.put(
|
||||||
|
f"/api/v1/admin/users/{test_username}",
|
||||||
|
json={"is_active": False},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Try to login as inactive user
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
response = auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert "Invalid username or password" in response.json()["detail"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_password_too_short_on_create(self, auth_client):
|
||||||
|
"""Test that short passwords are rejected when creating users."""
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
response = auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": f"shortpw_{uuid4().hex[:8]}", "password": "short"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert "at least 8 characters" in response.json()["detail"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_password_too_short_on_change(self, auth_client):
|
||||||
|
"""Test that short passwords are rejected when changing password."""
|
||||||
|
# Create test user
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
test_username = f"shortchange_{uuid4().hex[:8]}"
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Login as test user
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
response = auth_client.post(
|
||||||
|
"/api/v1/auth/change-password",
|
||||||
|
json={"current_password": "password123", "new_password": "short"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert "at least 8 characters" in response.json()["detail"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_password_too_short_on_reset(self, auth_client):
|
||||||
|
"""Test that short passwords are rejected when resetting password."""
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a test user first
|
||||||
|
test_username = f"resetshort_{uuid4().hex[:8]}"
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
response = auth_client.post(
|
||||||
|
f"/api/v1/admin/users/{test_username}/reset-password",
|
||||||
|
json={"new_password": "short"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert "at least 8 characters" in response.json()["detail"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_duplicate_username_rejected(self, auth_client):
|
||||||
|
"""Test that duplicate usernames are rejected."""
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
test_username = f"duplicate_{uuid4().hex[:8]}"
|
||||||
|
# Create user first time
|
||||||
|
response1 = auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
assert response1.status_code == 200
|
||||||
|
|
||||||
|
# Try to create same username again
|
||||||
|
response2 = auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "password456"},
|
||||||
|
)
|
||||||
|
assert response2.status_code == 409
|
||||||
|
assert "already exists" in response2.json()["detail"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_cannot_delete_other_users_api_key(self, auth_client):
|
||||||
|
"""Test that users cannot delete API keys owned by other users."""
|
||||||
|
# Login as admin and create an API key
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
create_response = auth_client.post(
|
||||||
|
"/api/v1/auth/keys",
|
||||||
|
json={"name": "admin-key"},
|
||||||
|
)
|
||||||
|
admin_key_id = create_response.json()["id"]
|
||||||
|
|
||||||
|
# Create a non-admin user
|
||||||
|
test_username = f"nonadmin_{uuid4().hex[:8]}"
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Login as non-admin
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Try to delete admin's API key
|
||||||
|
response = auth_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert "Cannot delete another user's API key" in response.json()["detail"]
|
||||||
|
|
||||||
|
# Cleanup: login as admin and delete the key
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
auth_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_sessions_invalidated_on_password_change(self, auth_client):
|
||||||
|
"""Test that all sessions are invalidated when password is changed."""
|
||||||
|
# Create a test user
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": "admin", "password": "changeme123"},
|
||||||
|
)
|
||||||
|
test_username = f"sessiontest_{uuid4().hex[:8]}"
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/admin/users",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Login as test user
|
||||||
|
auth_client.cookies.clear()
|
||||||
|
login_response = auth_client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": test_username, "password": "password123"},
|
||||||
|
)
|
||||||
|
assert login_response.status_code == 200
|
||||||
|
|
||||||
|
# Verify session works
|
||||||
|
me_response = auth_client.get("/api/v1/auth/me")
|
||||||
|
assert me_response.status_code == 200
|
||||||
|
|
||||||
|
# Change password
|
||||||
|
auth_client.post(
|
||||||
|
"/api/v1/auth/change-password",
|
||||||
|
json={"current_password": "password123", "new_password": "newpassword123"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Old session should be invalidated - try to access /me
|
||||||
|
# (note: the change-password call itself may have cleared the session cookie)
|
||||||
|
me_response2 = auth_client.get("/api/v1/auth/me")
|
||||||
|
# This should fail because all sessions were invalidated
|
||||||
|
assert me_response2.status_code == 401
|
||||||
737
backend/tests/integration/test_concurrent_operations.py
Normal file
737
backend/tests/integration/test_concurrent_operations.py
Normal file
@@ -0,0 +1,737 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for concurrent upload and download operations.
|
||||||
|
|
||||||
|
Tests cover:
|
||||||
|
- Concurrent uploads of different files
|
||||||
|
- Concurrent uploads of same file (deduplication race)
|
||||||
|
- Concurrent downloads of same artifact
|
||||||
|
- Concurrent downloads of different artifacts
|
||||||
|
- Mixed concurrent uploads and downloads
|
||||||
|
- Data corruption prevention under concurrency
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
from tests.factories import (
|
||||||
|
compute_sha256,
|
||||||
|
upload_test_file,
|
||||||
|
generate_content_with_hash,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_api_key(integration_client):
|
||||||
|
"""Create an API key for concurrent test workers."""
|
||||||
|
import uuid
|
||||||
|
response = integration_client.post(
|
||||||
|
"/api/v1/auth/keys",
|
||||||
|
json={"name": f"concurrent-test-{uuid.uuid4().hex[:8]}"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
return response.json()["key"]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class TestConcurrentUploads:
|
||||||
|
"""Tests for concurrent upload operations."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_2_concurrent_uploads_different_files(self, integration_client, test_package):
|
||||||
|
"""Test 2 concurrent uploads of different files."""
|
||||||
|
project, package = test_package
|
||||||
|
api_key = get_api_key(integration_client)
|
||||||
|
assert api_key, "Failed to create API key"
|
||||||
|
|
||||||
|
files_data = [
|
||||||
|
generate_content_with_hash(1024, seed=i) for i in range(2)
|
||||||
|
]
|
||||||
|
|
||||||
|
results = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def upload_worker(idx, content, expected_hash):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
files = {
|
||||||
|
"file": (f"file-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||||
|
}
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": f"concurrent-{idx}"},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
results.append((idx, result, expected_hash))
|
||||||
|
else:
|
||||||
|
errors.append(f"Worker {idx}: Status {response.status_code}: {response.text}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Worker {idx}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=2) as executor:
|
||||||
|
futures = [
|
||||||
|
executor.submit(upload_worker, i, content, hash)
|
||||||
|
for i, (content, hash) in enumerate(files_data)
|
||||||
|
]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results) == 2
|
||||||
|
|
||||||
|
# Verify each upload returned correct artifact_id
|
||||||
|
for idx, result, expected_hash in results:
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_5_concurrent_uploads_different_files(self, integration_client, test_package):
|
||||||
|
"""Test 5 concurrent uploads of different files."""
|
||||||
|
project, package = test_package
|
||||||
|
api_key = get_api_key(integration_client)
|
||||||
|
assert api_key, "Failed to create API key"
|
||||||
|
|
||||||
|
num_files = 5
|
||||||
|
files_data = [
|
||||||
|
generate_content_with_hash(2048, seed=100 + i) for i in range(num_files)
|
||||||
|
]
|
||||||
|
|
||||||
|
results = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def upload_worker(idx, content, expected_hash):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
files = {
|
||||||
|
"file": (f"file-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||||
|
}
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": f"concurrent5-{idx}"},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
results.append((idx, result, expected_hash))
|
||||||
|
else:
|
||||||
|
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Worker {idx}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=num_files) as executor:
|
||||||
|
futures = [
|
||||||
|
executor.submit(upload_worker, i, content, hash)
|
||||||
|
for i, (content, hash) in enumerate(files_data)
|
||||||
|
]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results) == num_files
|
||||||
|
|
||||||
|
# Verify all uploads have unique artifact_ids
|
||||||
|
artifact_ids = set(r[1]["artifact_id"] for r in results)
|
||||||
|
assert len(artifact_ids) == num_files
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_10_concurrent_uploads_different_files(self, integration_client, test_package):
|
||||||
|
"""Test 10 concurrent uploads of different files."""
|
||||||
|
project, package = test_package
|
||||||
|
api_key = get_api_key(integration_client)
|
||||||
|
assert api_key, "Failed to create API key"
|
||||||
|
|
||||||
|
num_files = 10
|
||||||
|
files_data = [
|
||||||
|
generate_content_with_hash(1024, seed=200 + i) for i in range(num_files)
|
||||||
|
]
|
||||||
|
|
||||||
|
results = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def upload_worker(idx, content, expected_hash):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
files = {
|
||||||
|
"file": (f"file-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||||
|
}
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": f"concurrent10-{idx}"},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
results.append((idx, result, expected_hash))
|
||||||
|
else:
|
||||||
|
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Worker {idx}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=num_files) as executor:
|
||||||
|
futures = [
|
||||||
|
executor.submit(upload_worker, i, content, hash)
|
||||||
|
for i, (content, hash) in enumerate(files_data)
|
||||||
|
]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results) == num_files
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_concurrent_uploads_same_file_deduplication(self, integration_client, test_package):
|
||||||
|
"""Test concurrent uploads of same file handle deduplication correctly."""
|
||||||
|
project, package = test_package
|
||||||
|
api_key = get_api_key(integration_client)
|
||||||
|
assert api_key, "Failed to create API key"
|
||||||
|
|
||||||
|
content, expected_hash = generate_content_with_hash(4096, seed=999)
|
||||||
|
num_concurrent = 5
|
||||||
|
|
||||||
|
results = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def upload_worker(idx):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
files = {
|
||||||
|
"file": (f"same-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||||
|
}
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": f"dedup-{idx}"},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
results.append(response.json())
|
||||||
|
else:
|
||||||
|
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Worker {idx}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=num_concurrent) as executor:
|
||||||
|
futures = [executor.submit(upload_worker, i) for i in range(num_concurrent)]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results) == num_concurrent
|
||||||
|
|
||||||
|
# All should have same artifact_id
|
||||||
|
artifact_ids = set(r["artifact_id"] for r in results)
|
||||||
|
assert len(artifact_ids) == 1
|
||||||
|
assert expected_hash in artifact_ids
|
||||||
|
|
||||||
|
# Verify final ref_count equals number of uploads
|
||||||
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["ref_count"] == num_concurrent
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_concurrent_uploads_to_different_packages(self, integration_client, test_project, unique_test_id):
|
||||||
|
"""Test concurrent uploads to different packages."""
|
||||||
|
project = test_project
|
||||||
|
api_key = get_api_key(integration_client)
|
||||||
|
assert api_key, "Failed to create API key"
|
||||||
|
|
||||||
|
num_packages = 3
|
||||||
|
package_names = []
|
||||||
|
|
||||||
|
# Create multiple packages
|
||||||
|
for i in range(num_packages):
|
||||||
|
pkg_name = f"pkg-{unique_test_id}-{i}"
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/packages",
|
||||||
|
json={"name": pkg_name, "description": f"Package {i}"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
package_names.append(pkg_name)
|
||||||
|
|
||||||
|
files_data = [
|
||||||
|
generate_content_with_hash(1024, seed=300 + i) for i in range(num_packages)
|
||||||
|
]
|
||||||
|
|
||||||
|
results = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def upload_worker(idx, package, content, expected_hash):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
files = {
|
||||||
|
"file": (f"file-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||||
|
}
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": "latest"},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
results.append((package, result, expected_hash))
|
||||||
|
else:
|
||||||
|
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Worker {idx}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=num_packages) as executor:
|
||||||
|
futures = [
|
||||||
|
executor.submit(upload_worker, i, package_names[i], content, hash)
|
||||||
|
for i, (content, hash) in enumerate(files_data)
|
||||||
|
]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results) == num_packages
|
||||||
|
|
||||||
|
|
||||||
|
class TestConcurrentDownloads:
|
||||||
|
"""Tests for concurrent download operations."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_2_concurrent_downloads_same_artifact(self, integration_client, test_package):
|
||||||
|
"""Test 2 concurrent downloads of same artifact."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = generate_content_with_hash(2048, seed=400)
|
||||||
|
|
||||||
|
# Upload first
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="download-test")
|
||||||
|
|
||||||
|
results = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def download_worker(idx):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
response = client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/download-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
results.append((idx, response.content))
|
||||||
|
else:
|
||||||
|
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Worker {idx}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=2) as executor:
|
||||||
|
futures = [executor.submit(download_worker, i) for i in range(2)]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results) == 2
|
||||||
|
|
||||||
|
# All downloads should match original
|
||||||
|
for idx, downloaded in results:
|
||||||
|
assert downloaded == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_5_concurrent_downloads_same_artifact(self, integration_client, test_package):
|
||||||
|
"""Test 5 concurrent downloads of same artifact."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = generate_content_with_hash(4096, seed=500)
|
||||||
|
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="download5-test")
|
||||||
|
|
||||||
|
num_downloads = 5
|
||||||
|
results = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def download_worker(idx):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
response = client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/download5-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
results.append((idx, response.content))
|
||||||
|
else:
|
||||||
|
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Worker {idx}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=num_downloads) as executor:
|
||||||
|
futures = [executor.submit(download_worker, i) for i in range(num_downloads)]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results) == num_downloads
|
||||||
|
|
||||||
|
for idx, downloaded in results:
|
||||||
|
assert downloaded == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_10_concurrent_downloads_same_artifact(self, integration_client, test_package):
|
||||||
|
"""Test 10 concurrent downloads of same artifact."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = generate_content_with_hash(8192, seed=600)
|
||||||
|
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="download10-test")
|
||||||
|
|
||||||
|
num_downloads = 10
|
||||||
|
results = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def download_worker(idx):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
response = client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/download10-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
results.append((idx, response.content))
|
||||||
|
else:
|
||||||
|
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Worker {idx}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=num_downloads) as executor:
|
||||||
|
futures = [executor.submit(download_worker, i) for i in range(num_downloads)]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results) == num_downloads
|
||||||
|
|
||||||
|
for idx, downloaded in results:
|
||||||
|
assert downloaded == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_concurrent_downloads_different_artifacts(self, integration_client, test_package):
|
||||||
|
"""Test concurrent downloads of different artifacts."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
# Upload multiple files
|
||||||
|
num_files = 5
|
||||||
|
uploads = []
|
||||||
|
for i in range(num_files):
|
||||||
|
content, expected_hash = generate_content_with_hash(1024, seed=700 + i)
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
tag=f"multi-download-{i}"
|
||||||
|
)
|
||||||
|
uploads.append((f"multi-download-{i}", content))
|
||||||
|
|
||||||
|
results = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def download_worker(tag, expected_content):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
response = client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/{tag}",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
results.append((tag, response.content, expected_content))
|
||||||
|
else:
|
||||||
|
errors.append(f"Tag {tag}: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Tag {tag}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=num_files) as executor:
|
||||||
|
futures = [
|
||||||
|
executor.submit(download_worker, tag, content)
|
||||||
|
for tag, content in uploads
|
||||||
|
]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results) == num_files
|
||||||
|
|
||||||
|
for tag, downloaded, expected in results:
|
||||||
|
assert downloaded == expected, f"Content mismatch for {tag}"
|
||||||
|
|
||||||
|
|
||||||
|
class TestMixedConcurrentOperations:
|
||||||
|
"""Tests for mixed concurrent upload and download operations."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_upload_while_download_in_progress(self, integration_client, test_package):
|
||||||
|
"""Test uploading while a download is in progress."""
|
||||||
|
project, package = test_package
|
||||||
|
api_key = get_api_key(integration_client)
|
||||||
|
assert api_key, "Failed to create API key"
|
||||||
|
|
||||||
|
# Upload initial content
|
||||||
|
content1, hash1 = generate_content_with_hash(10240, seed=800) # 10KB
|
||||||
|
upload_test_file(integration_client, project, package, content1, tag="initial")
|
||||||
|
|
||||||
|
# New content for upload during download
|
||||||
|
content2, hash2 = generate_content_with_hash(10240, seed=801)
|
||||||
|
|
||||||
|
results = {"downloads": [], "uploads": []}
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def download_worker():
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
response = client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/initial",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
results["downloads"].append(response.content)
|
||||||
|
else:
|
||||||
|
errors.append(f"Download: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Download: {str(e)}")
|
||||||
|
|
||||||
|
def upload_worker():
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
files = {
|
||||||
|
"file": ("new.bin", io.BytesIO(content2), "application/octet-stream")
|
||||||
|
}
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": "during-download"},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
results["uploads"].append(response.json())
|
||||||
|
else:
|
||||||
|
errors.append(f"Upload: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Upload: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=2) as executor:
|
||||||
|
futures = [
|
||||||
|
executor.submit(download_worker),
|
||||||
|
executor.submit(upload_worker),
|
||||||
|
]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results["downloads"]) == 1
|
||||||
|
assert len(results["uploads"]) == 1
|
||||||
|
|
||||||
|
# Verify download got correct content
|
||||||
|
assert results["downloads"][0] == content1
|
||||||
|
|
||||||
|
# Verify upload succeeded
|
||||||
|
assert results["uploads"][0]["artifact_id"] == hash2
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_multiple_uploads_and_downloads_simultaneously(self, integration_client, test_package):
|
||||||
|
"""Test multiple uploads and downloads running simultaneously."""
|
||||||
|
project, package = test_package
|
||||||
|
api_key = get_api_key(integration_client)
|
||||||
|
assert api_key, "Failed to create API key"
|
||||||
|
|
||||||
|
# Pre-upload some files for downloading
|
||||||
|
existing_files = []
|
||||||
|
for i in range(3):
|
||||||
|
content, hash = generate_content_with_hash(2048, seed=900 + i)
|
||||||
|
upload_test_file(integration_client, project, package, content, tag=f"existing-{i}")
|
||||||
|
existing_files.append((f"existing-{i}", content))
|
||||||
|
|
||||||
|
# New files for uploading
|
||||||
|
new_files = [
|
||||||
|
generate_content_with_hash(2048, seed=910 + i) for i in range(3)
|
||||||
|
]
|
||||||
|
|
||||||
|
results = {"downloads": [], "uploads": []}
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def download_worker(tag, expected):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
response = client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/{tag}",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
results["downloads"].append((tag, response.content, expected))
|
||||||
|
else:
|
||||||
|
errors.append(f"Download {tag}: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Download {tag}: {str(e)}")
|
||||||
|
|
||||||
|
def upload_worker(idx, content, expected_hash):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
files = {
|
||||||
|
"file": (f"new-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||||
|
}
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": f"new-{idx}"},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
results["uploads"].append((idx, response.json(), expected_hash))
|
||||||
|
else:
|
||||||
|
errors.append(f"Upload {idx}: Status {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Upload {idx}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=6) as executor:
|
||||||
|
futures = []
|
||||||
|
|
||||||
|
# Submit downloads
|
||||||
|
for tag, content in existing_files:
|
||||||
|
futures.append(executor.submit(download_worker, tag, content))
|
||||||
|
|
||||||
|
# Submit uploads
|
||||||
|
for i, (content, hash) in enumerate(new_files):
|
||||||
|
futures.append(executor.submit(upload_worker, i, content, hash))
|
||||||
|
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results["downloads"]) == 3
|
||||||
|
assert len(results["uploads"]) == 3
|
||||||
|
|
||||||
|
# Verify downloads
|
||||||
|
for tag, downloaded, expected in results["downloads"]:
|
||||||
|
assert downloaded == expected, f"Download mismatch for {tag}"
|
||||||
|
|
||||||
|
# Verify uploads
|
||||||
|
for idx, result, expected_hash in results["uploads"]:
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.concurrent
|
||||||
|
def test_no_data_corruption_under_concurrency(self, integration_client, test_package):
|
||||||
|
"""Test that no data corruption occurs under concurrent operations."""
|
||||||
|
project, package = test_package
|
||||||
|
api_key = get_api_key(integration_client)
|
||||||
|
assert api_key, "Failed to create API key"
|
||||||
|
|
||||||
|
# Create content with recognizable patterns
|
||||||
|
num_files = 5
|
||||||
|
files_data = []
|
||||||
|
for i in range(num_files):
|
||||||
|
# Each file has unique repeating pattern for easy corruption detection
|
||||||
|
pattern = bytes([i] * 256)
|
||||||
|
content = pattern * 40 # 10KB each
|
||||||
|
hash = compute_sha256(content)
|
||||||
|
files_data.append((content, hash))
|
||||||
|
|
||||||
|
results = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def upload_and_verify(idx, content, expected_hash):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
with Client(base_url=base_url, timeout=60.0) as client:
|
||||||
|
# Upload
|
||||||
|
files = {
|
||||||
|
"file": (f"pattern-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||||
|
}
|
||||||
|
upload_resp = client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": f"pattern-{idx}"},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
if upload_resp.status_code != 200:
|
||||||
|
errors.append(f"Upload {idx}: Status {upload_resp.status_code}")
|
||||||
|
return
|
||||||
|
|
||||||
|
upload_result = upload_resp.json()
|
||||||
|
if upload_result["artifact_id"] != expected_hash:
|
||||||
|
errors.append(f"Upload {idx}: Hash mismatch")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Immediately download and verify
|
||||||
|
download_resp = client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/pattern-{idx}",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
if download_resp.status_code != 200:
|
||||||
|
errors.append(f"Download {idx}: Status {download_resp.status_code}")
|
||||||
|
return
|
||||||
|
|
||||||
|
if download_resp.content != content:
|
||||||
|
errors.append(f"Worker {idx}: DATA CORRUPTION DETECTED")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Verify the downloaded content hash
|
||||||
|
downloaded_hash = compute_sha256(download_resp.content)
|
||||||
|
if downloaded_hash != expected_hash:
|
||||||
|
errors.append(f"Worker {idx}: Hash verification failed")
|
||||||
|
return
|
||||||
|
|
||||||
|
results.append(idx)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Worker {idx}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=num_files) as executor:
|
||||||
|
futures = [
|
||||||
|
executor.submit(upload_and_verify, i, content, hash)
|
||||||
|
for i, (content, hash) in enumerate(files_data)
|
||||||
|
]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Errors: {errors}"
|
||||||
|
assert len(results) == num_files
|
||||||
322
backend/tests/integration/test_error_handling.py
Normal file
322
backend/tests/integration/test_error_handling.py
Normal file
@@ -0,0 +1,322 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for error handling in upload and download operations.
|
||||||
|
|
||||||
|
Tests cover:
|
||||||
|
- Timeout handling
|
||||||
|
- Invalid request handling
|
||||||
|
- Resource cleanup on failures
|
||||||
|
- Graceful error responses
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import io
|
||||||
|
import time
|
||||||
|
from tests.factories import (
|
||||||
|
compute_sha256,
|
||||||
|
upload_test_file,
|
||||||
|
generate_content_with_hash,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestUploadErrorHandling:
|
||||||
|
"""Tests for upload error handling."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_to_nonexistent_project_returns_404(
|
||||||
|
self, integration_client, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test upload to nonexistent project returns 404."""
|
||||||
|
content = b"test content for nonexistent project"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_to_nonexistent_package_returns_404(
|
||||||
|
self, integration_client, test_project, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test upload to nonexistent package returns 404."""
|
||||||
|
content = b"test content for nonexistent package"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_empty_file_rejected(self, integration_client, test_package):
|
||||||
|
"""Test empty file upload is rejected."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
files = {"file": ("empty.bin", io.BytesIO(b""), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code in [400, 422]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_missing_file_returns_422(self, integration_client, test_package):
|
||||||
|
"""Test upload without file field returns 422."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
data={"tag": "no-file-provided"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_invalid_checksum_format_returns_400(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test upload with invalid checksum format returns 400."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"checksum format test"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": "invalid-hash-format"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_checksum_mismatch_returns_422(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test upload with mismatched checksum returns 422."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"checksum mismatch test"
|
||||||
|
wrong_hash = "0" * 64 # Valid format but wrong hash
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": wrong_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_correct_checksum_succeeds(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test upload with correct checksum succeeds."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"correct checksum test"
|
||||||
|
correct_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": correct_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["artifact_id"] == correct_hash
|
||||||
|
|
||||||
|
|
||||||
|
class TestDownloadErrorHandling:
|
||||||
|
"""Tests for download error handling."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_nonexistent_tag_returns_404(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test download of nonexistent tag returns 404."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/nonexistent-tag-xyz"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_nonexistent_artifact_returns_404(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test download of nonexistent artifact ID returns 404."""
|
||||||
|
project, package = test_package
|
||||||
|
fake_hash = "a" * 64
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/artifact:{fake_hash}"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_invalid_artifact_id_format(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test download with invalid artifact ID format."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
# Too short
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/artifact:abc123"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_from_nonexistent_project_returns_404(
|
||||||
|
self, integration_client, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test download from nonexistent project returns 404."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/nonexistent-{unique_test_id}/pkg/+/tag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_from_nonexistent_package_returns_404(
|
||||||
|
self, integration_client, test_project, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test download from nonexistent package returns 404."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{test_project}/nonexistent-{unique_test_id}/+/tag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestTimeoutBehavior:
|
||||||
|
"""Tests for timeout behavior (integration level)."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
def test_large_upload_completes_within_reasonable_time(
|
||||||
|
self, integration_client, test_package, sized_content
|
||||||
|
):
|
||||||
|
"""Test that a 10MB upload completes within reasonable time."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(10 * 1024 * 1024, seed=999) # 10MB
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="timeout-test"
|
||||||
|
)
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
# Should complete within 60 seconds for 10MB on local docker
|
||||||
|
assert elapsed < 60, f"Upload took too long: {elapsed:.2f}s"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
def test_large_download_completes_within_reasonable_time(
|
||||||
|
self, integration_client, test_package, sized_content
|
||||||
|
):
|
||||||
|
"""Test that a 10MB download completes within reasonable time."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(10 * 1024 * 1024, seed=998) # 10MB
|
||||||
|
|
||||||
|
# First upload
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="download-timeout-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Then download and time it
|
||||||
|
start_time = time.time()
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/download-timeout-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert len(response.content) == len(content)
|
||||||
|
# Should complete within 60 seconds for 10MB on local docker
|
||||||
|
assert elapsed < 60, f"Download took too long: {elapsed:.2f}s"
|
||||||
|
|
||||||
|
|
||||||
|
class TestResourceCleanup:
|
||||||
|
"""Tests for proper resource cleanup on failures.
|
||||||
|
|
||||||
|
Note: More comprehensive cleanup tests are in test_upload_download_api.py
|
||||||
|
(TestUploadFailureCleanup class) including S3 object cleanup verification.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_checksum_mismatch_no_orphaned_artifact(
|
||||||
|
self, integration_client, test_package, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test checksum mismatch doesn't leave orphaned artifact."""
|
||||||
|
project, package = test_package
|
||||||
|
# Use unique content to ensure artifact doesn't exist from prior tests
|
||||||
|
content = f"checksum mismatch orphan test {unique_test_id}".encode()
|
||||||
|
wrong_hash = "0" * 64
|
||||||
|
actual_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
# Verify artifact doesn't exist before test
|
||||||
|
pre_check = integration_client.get(f"/api/v1/artifact/{actual_hash}")
|
||||||
|
assert pre_check.status_code == 404, "Artifact should not exist before test"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": wrong_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
# Verify no artifact was created with either hash
|
||||||
|
response1 = integration_client.get(f"/api/v1/artifact/{wrong_hash}")
|
||||||
|
response2 = integration_client.get(f"/api/v1/artifact/{actual_hash}")
|
||||||
|
assert response1.status_code == 404
|
||||||
|
assert response2.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestGracefulErrorResponses:
|
||||||
|
"""Tests for graceful and informative error responses."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_404_response_has_detail_message(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test 404 responses include a detail message."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/nonexistent-tag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
data = response.json()
|
||||||
|
assert "detail" in data
|
||||||
|
assert len(data["detail"]) > 0
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_422_response_has_detail_message(self, integration_client, test_package):
|
||||||
|
"""Test 422 responses include a detail message."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
# Upload with mismatched checksum
|
||||||
|
content = b"detail message test"
|
||||||
|
wrong_hash = "0" * 64
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": wrong_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
data = response.json()
|
||||||
|
assert "detail" in data
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_error_response_is_json(self, integration_client, unique_test_id):
|
||||||
|
"""Test error responses are valid JSON."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/nonexistent-{unique_test_id}/pkg/+/tag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
# Should not raise exception - valid JSON
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, dict)
|
||||||
768
backend/tests/integration/test_integrity_verification.py
Normal file
768
backend/tests/integration/test_integrity_verification.py
Normal file
@@ -0,0 +1,768 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for artifact integrity verification.
|
||||||
|
|
||||||
|
Tests cover:
|
||||||
|
- Round-trip verification (upload -> download -> verify hash)
|
||||||
|
- Consistency check endpoint
|
||||||
|
- Header-based verification
|
||||||
|
- Integrity verification across file sizes
|
||||||
|
- Client-side verification workflow
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import io
|
||||||
|
import hashlib
|
||||||
|
from tests.factories import (
|
||||||
|
compute_sha256,
|
||||||
|
upload_test_file,
|
||||||
|
generate_content_with_hash,
|
||||||
|
s3_object_exists,
|
||||||
|
get_s3_client,
|
||||||
|
get_s3_bucket,
|
||||||
|
)
|
||||||
|
from tests.conftest import (
|
||||||
|
SIZE_1KB,
|
||||||
|
SIZE_10KB,
|
||||||
|
SIZE_100KB,
|
||||||
|
SIZE_1MB,
|
||||||
|
SIZE_10MB,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRoundTripVerification:
|
||||||
|
"""Tests for complete round-trip integrity verification."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_download_hash_matches(self, integration_client, test_package):
|
||||||
|
"""Test that upload -> download round trip preserves content integrity."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Round trip integrity test content"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
# Upload and capture returned hash
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="roundtrip"
|
||||||
|
)
|
||||||
|
uploaded_hash = result["artifact_id"]
|
||||||
|
|
||||||
|
# Verify upload returned correct hash
|
||||||
|
assert uploaded_hash == expected_hash
|
||||||
|
|
||||||
|
# Download artifact
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/roundtrip",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Compute hash of downloaded content
|
||||||
|
downloaded_hash = compute_sha256(response.content)
|
||||||
|
|
||||||
|
# All three hashes should match
|
||||||
|
assert downloaded_hash == expected_hash
|
||||||
|
assert downloaded_hash == uploaded_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_response_contains_hash(self, integration_client, test_package):
|
||||||
|
"""Test upload response contains artifact_id which is the SHA256 hash."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Upload response hash test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(integration_client, project, package, content)
|
||||||
|
|
||||||
|
assert "artifact_id" in result
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert len(result["artifact_id"]) == 64
|
||||||
|
assert all(c in "0123456789abcdef" for c in result["artifact_id"])
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_header_matches_artifact_id(self, integration_client, test_package):
|
||||||
|
"""Test X-Checksum-SHA256 header matches artifact ID."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Header verification test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="header-check"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/header-check",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_etag_matches_artifact_id(self, integration_client, test_package):
|
||||||
|
"""Test ETag header matches artifact ID."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"ETag verification test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="etag-check"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/etag-check",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
etag = response.headers.get("ETag", "").strip('"')
|
||||||
|
assert etag == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_artifact_endpoint_returns_correct_hash(self, integration_client, test_package):
|
||||||
|
"""Test artifact endpoint returns correct hash/ID."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Artifact endpoint hash test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(integration_client, project, package, content)
|
||||||
|
|
||||||
|
# Query artifact directly
|
||||||
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == expected_hash
|
||||||
|
assert data.get("sha256") == expected_hash
|
||||||
|
|
||||||
|
|
||||||
|
class TestClientSideVerificationWorkflow:
|
||||||
|
"""Tests for client-side verification workflow."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_client_can_verify_before_upload(self, integration_client, test_package):
|
||||||
|
"""Test client can compute hash before upload and verify response matches."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Client pre-upload verification test"
|
||||||
|
|
||||||
|
# Client computes hash locally before upload
|
||||||
|
client_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
# Upload
|
||||||
|
result = upload_test_file(integration_client, project, package, content)
|
||||||
|
|
||||||
|
# Client verifies server returned the same hash
|
||||||
|
assert result["artifact_id"] == client_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_client_can_provide_checksum_header(self, integration_client, test_package):
|
||||||
|
"""Test client can provide X-Checksum-SHA256 header for verification."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Client checksum header test"
|
||||||
|
client_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": client_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["artifact_id"] == client_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_checksum_mismatch_rejected(self, integration_client, test_package):
|
||||||
|
"""Test upload with wrong client checksum is rejected."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Checksum mismatch test"
|
||||||
|
wrong_hash = "0" * 64
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": wrong_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_client_can_verify_after_download(self, integration_client, test_package):
|
||||||
|
"""Test client can verify downloaded content matches header hash."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Client post-download verification"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="verify-after"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/verify-after",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Client gets hash from header
|
||||||
|
header_hash = response.headers.get("X-Checksum-SHA256")
|
||||||
|
|
||||||
|
# Client computes hash of downloaded content
|
||||||
|
downloaded_hash = compute_sha256(response.content)
|
||||||
|
|
||||||
|
# Client verifies they match
|
||||||
|
assert downloaded_hash == header_hash
|
||||||
|
|
||||||
|
|
||||||
|
class TestIntegritySizeVariants:
|
||||||
|
"""Tests for integrity verification across different file sizes."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_integrity_1kb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test integrity verification for 1KB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_1KB, seed=100)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="int-1kb"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/int-1kb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_integrity_100kb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test integrity verification for 100KB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_100KB, seed=101)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="int-100kb"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/int-100kb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_integrity_1mb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test integrity verification for 1MB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_1MB, seed=102)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="int-1mb"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/int-1mb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
def test_integrity_10mb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test integrity verification for 10MB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_10MB, seed=103)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="int-10mb"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/int-10mb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||||
|
|
||||||
|
|
||||||
|
class TestConsistencyCheck:
|
||||||
|
"""Tests for the admin consistency check endpoint."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_consistency_check_returns_200(self, integration_client):
|
||||||
|
"""Test consistency check endpoint returns 200."""
|
||||||
|
response = integration_client.get("/api/v1/admin/consistency-check")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_consistency_check_response_format(self, integration_client):
|
||||||
|
"""Test consistency check returns expected response format."""
|
||||||
|
response = integration_client.get("/api/v1/admin/consistency-check")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Check expected fields
|
||||||
|
assert "total_artifacts_checked" in data
|
||||||
|
assert "orphaned_s3_objects" in data
|
||||||
|
assert "missing_s3_objects" in data
|
||||||
|
assert "size_mismatches" in data
|
||||||
|
assert "healthy" in data
|
||||||
|
assert "orphaned_s3_keys" in data
|
||||||
|
assert "missing_s3_keys" in data
|
||||||
|
assert "size_mismatch_artifacts" in data
|
||||||
|
# Verify types
|
||||||
|
assert isinstance(data["total_artifacts_checked"], int)
|
||||||
|
assert isinstance(data["orphaned_s3_objects"], int)
|
||||||
|
assert isinstance(data["missing_s3_objects"], int)
|
||||||
|
assert isinstance(data["size_mismatches"], int)
|
||||||
|
assert isinstance(data["healthy"], bool)
|
||||||
|
assert isinstance(data["orphaned_s3_keys"], list)
|
||||||
|
assert isinstance(data["missing_s3_keys"], list)
|
||||||
|
assert isinstance(data["size_mismatch_artifacts"], list)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_consistency_check_after_upload(self, integration_client, test_package):
|
||||||
|
"""Test consistency check passes after valid upload."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Consistency check test content"
|
||||||
|
|
||||||
|
# Upload artifact
|
||||||
|
upload_test_file(integration_client, project, package, content)
|
||||||
|
|
||||||
|
# Run consistency check
|
||||||
|
response = integration_client.get("/api/v1/admin/consistency-check")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Verify check ran and no issues
|
||||||
|
assert data["total_artifacts_checked"] >= 1
|
||||||
|
assert data["healthy"] is True
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_consistency_check_limit_parameter(self, integration_client):
|
||||||
|
"""Test consistency check respects limit parameter."""
|
||||||
|
response = integration_client.get(
|
||||||
|
"/api/v1/admin/consistency-check",
|
||||||
|
params={"limit": 10}
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Lists should not exceed limit
|
||||||
|
assert len(data["orphaned_s3_keys"]) <= 10
|
||||||
|
assert len(data["missing_s3_keys"]) <= 10
|
||||||
|
assert len(data["size_mismatch_artifacts"]) <= 10
|
||||||
|
|
||||||
|
|
||||||
|
class TestDigestHeader:
|
||||||
|
"""Tests for RFC 3230 Digest header."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_includes_digest_header(self, integration_client, test_package):
|
||||||
|
"""Test download includes Digest header in RFC 3230 format."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Digest header test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="digest-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/digest-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "Digest" in response.headers
|
||||||
|
|
||||||
|
# Verify Digest format (sha-256=base64hash)
|
||||||
|
digest = response.headers["Digest"]
|
||||||
|
assert digest.startswith("sha-256=")
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_digest_header_base64_valid(self, integration_client, test_package):
|
||||||
|
"""Test Digest header contains valid base64 encoding."""
|
||||||
|
import base64
|
||||||
|
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Digest base64 test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="digest-b64"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/digest-b64",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
digest = response.headers["Digest"]
|
||||||
|
base64_part = digest.split("=", 1)[1]
|
||||||
|
|
||||||
|
# Should be valid base64
|
||||||
|
try:
|
||||||
|
decoded = base64.b64decode(base64_part)
|
||||||
|
assert len(decoded) == 32 # SHA256 is 32 bytes
|
||||||
|
except Exception as e:
|
||||||
|
pytest.fail(f"Invalid base64 in Digest header: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
class TestVerificationModes:
|
||||||
|
"""Tests for download verification modes."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_pre_verification_mode(self, integration_client, test_package):
|
||||||
|
"""Test pre-verification mode verifies before streaming."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Pre-verification mode test"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="pre-verify"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/pre-verify",
|
||||||
|
params={"mode": "proxy", "verify": "true", "verify_mode": "pre"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
# X-Verified header should be true
|
||||||
|
assert response.headers.get("X-Verified") == "true"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_stream_verification_mode(self, integration_client, test_package):
|
||||||
|
"""Test streaming verification mode."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Stream verification mode test"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="stream-verify"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/stream-verify",
|
||||||
|
params={"mode": "proxy", "verify": "true", "verify_mode": "stream"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
|
||||||
|
class TestArtifactIntegrityEndpoint:
|
||||||
|
"""Tests for artifact-specific integrity operations."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_artifact_size_matches(self, integration_client, test_package):
|
||||||
|
"""Test artifact endpoint returns correct size."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Artifact size test content"
|
||||||
|
expected_size = len(content)
|
||||||
|
|
||||||
|
result = upload_test_file(integration_client, project, package, content)
|
||||||
|
artifact_id = result["artifact_id"]
|
||||||
|
|
||||||
|
response = integration_client.get(f"/api/v1/artifact/{artifact_id}")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["size"] == expected_size
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_content_length_header_matches_size(self, integration_client, test_package):
|
||||||
|
"""Test Content-Length header matches artifact size."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Content-Length header test"
|
||||||
|
expected_size = len(content)
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="content-len"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/content-len",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert int(response.headers.get("Content-Length", 0)) == expected_size
|
||||||
|
assert len(response.content) == expected_size
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.requires_direct_s3
|
||||||
|
class TestCorruptionDetection:
|
||||||
|
"""Tests for detecting corrupted S3 objects.
|
||||||
|
|
||||||
|
These tests directly manipulate S3 objects to simulate corruption
|
||||||
|
and verify that the system can detect hash mismatches.
|
||||||
|
|
||||||
|
Note: These tests require direct S3/MinIO access and are skipped in CI
|
||||||
|
where S3 is not directly accessible from the test runner.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_detection_of_corrupted_content(self, integration_client, test_package):
|
||||||
|
"""Test that corrupted S3 content is detected via hash mismatch.
|
||||||
|
|
||||||
|
Uploads content, then directly modifies the S3 object, then
|
||||||
|
verifies that the downloaded content hash doesn't match.
|
||||||
|
"""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Original content for corruption test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
# Upload original content
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="corrupt-test"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
# Get the S3 object and corrupt it
|
||||||
|
s3_client = get_s3_client()
|
||||||
|
bucket = get_s3_bucket()
|
||||||
|
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||||
|
|
||||||
|
# Replace with corrupted content
|
||||||
|
corrupted_content = b"Corrupted content - different from original!"
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=corrupted_content)
|
||||||
|
|
||||||
|
# Download via proxy (bypasses hash verification)
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/corrupt-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Verify the downloaded content doesn't match original hash
|
||||||
|
downloaded_hash = compute_sha256(response.content)
|
||||||
|
assert downloaded_hash != expected_hash, "Corruption was not detected - hashes match"
|
||||||
|
assert response.content == corrupted_content
|
||||||
|
|
||||||
|
# The X-Checksum-SHA256 header should still show the original hash (from DB)
|
||||||
|
# but the actual content hash is different
|
||||||
|
header_hash = response.headers.get("X-Checksum-SHA256")
|
||||||
|
assert header_hash == expected_hash # Header shows expected hash
|
||||||
|
assert downloaded_hash != header_hash # But content is corrupted
|
||||||
|
|
||||||
|
# Restore original content for cleanup
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_detection_of_single_bit_flip(self, integration_client, test_package):
|
||||||
|
"""Test detection of a single bit flip in S3 object content."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Content for single bit flip detection test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="bitflip-test"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
# Get S3 object and flip a single bit
|
||||||
|
s3_client = get_s3_client()
|
||||||
|
bucket = get_s3_bucket()
|
||||||
|
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||||
|
|
||||||
|
# Flip the first bit of the first byte
|
||||||
|
corrupted_content = bytearray(content)
|
||||||
|
corrupted_content[0] ^= 0x01
|
||||||
|
corrupted_content = bytes(corrupted_content)
|
||||||
|
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=corrupted_content)
|
||||||
|
|
||||||
|
# Download and verify hash mismatch
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/bitflip-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
downloaded_hash = compute_sha256(response.content)
|
||||||
|
assert downloaded_hash != expected_hash, "Single bit flip not detected"
|
||||||
|
|
||||||
|
# Restore original
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_detection_of_truncated_content(self, integration_client, test_package):
|
||||||
|
"""Test detection of truncated S3 object."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"This is content that will be truncated for testing purposes"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="truncate-test"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
# Get S3 object and truncate it
|
||||||
|
s3_client = get_s3_client()
|
||||||
|
bucket = get_s3_bucket()
|
||||||
|
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||||
|
|
||||||
|
# Truncate to half the original size
|
||||||
|
truncated_content = content[: len(content) // 2]
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=truncated_content)
|
||||||
|
|
||||||
|
# Download and verify hash mismatch
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/truncate-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
downloaded_hash = compute_sha256(response.content)
|
||||||
|
assert downloaded_hash != expected_hash, "Truncation not detected"
|
||||||
|
assert len(response.content) < len(content), "Content was not truncated"
|
||||||
|
|
||||||
|
# Restore original
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_detection_of_appended_content(self, integration_client, test_package):
|
||||||
|
"""Test detection of content with extra bytes appended."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Original content"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="append-test"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
# Get S3 object and append extra bytes
|
||||||
|
s3_client = get_s3_client()
|
||||||
|
bucket = get_s3_bucket()
|
||||||
|
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||||
|
|
||||||
|
appended_content = content + b" - extra bytes appended"
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=appended_content)
|
||||||
|
|
||||||
|
# Download and verify hash mismatch
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/append-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
downloaded_hash = compute_sha256(response.content)
|
||||||
|
assert downloaded_hash != expected_hash, "Appended content not detected"
|
||||||
|
assert len(response.content) > len(content), "Content was not extended"
|
||||||
|
|
||||||
|
# Restore original
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_client_detects_hash_mismatch_post_download(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test that a client can detect hash mismatch after downloading corrupted content.
|
||||||
|
|
||||||
|
This simulates the full client verification workflow:
|
||||||
|
1. Download content
|
||||||
|
2. Get expected hash from header
|
||||||
|
3. Compute actual hash of content
|
||||||
|
4. Verify they match (or detect corruption)
|
||||||
|
"""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Content for client-side corruption detection"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="client-detect"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Corrupt the S3 object
|
||||||
|
s3_client = get_s3_client()
|
||||||
|
bucket = get_s3_bucket()
|
||||||
|
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||||
|
corrupted = b"This is completely different content"
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=corrupted)
|
||||||
|
|
||||||
|
# Simulate client download and verification
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/client-detect",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Client gets expected hash from header
|
||||||
|
header_hash = response.headers.get("X-Checksum-SHA256")
|
||||||
|
|
||||||
|
# Client computes hash of downloaded content
|
||||||
|
actual_hash = compute_sha256(response.content)
|
||||||
|
|
||||||
|
# Client detects the mismatch
|
||||||
|
corruption_detected = actual_hash != header_hash
|
||||||
|
assert corruption_detected, "Client should detect hash mismatch"
|
||||||
|
|
||||||
|
# Restore original
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_consistency_check_detects_size_mismatch(
|
||||||
|
self, integration_client, test_package, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test that consistency check detects size mismatches.
|
||||||
|
|
||||||
|
Uploads content, modifies S3 object size, then runs consistency check.
|
||||||
|
"""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Content for size mismatch consistency check test " + unique_test_id.encode()
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="size-mismatch"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Modify S3 object to have different size
|
||||||
|
s3_client = get_s3_client()
|
||||||
|
bucket = get_s3_bucket()
|
||||||
|
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||||
|
different_size_content = content + b"extra extra extra"
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=different_size_content)
|
||||||
|
|
||||||
|
# Run consistency check
|
||||||
|
response = integration_client.get("/api/v1/admin/consistency-check")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Should detect the size mismatch
|
||||||
|
assert data["size_mismatches"] >= 1 or len(data["size_mismatch_artifacts"]) >= 1
|
||||||
|
|
||||||
|
# Restore original
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_consistency_check_detects_missing_s3_object(
|
||||||
|
self, integration_client, test_package, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test that consistency check detects missing S3 objects.
|
||||||
|
|
||||||
|
Uploads content, deletes S3 object, then runs consistency check.
|
||||||
|
"""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"Content for missing S3 object test " + unique_test_id.encode()
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="missing-s3"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Delete the S3 object
|
||||||
|
s3_client = get_s3_client()
|
||||||
|
bucket = get_s3_bucket()
|
||||||
|
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||||
|
s3_client.delete_object(Bucket=bucket, Key=s3_key)
|
||||||
|
|
||||||
|
# Run consistency check
|
||||||
|
response = integration_client.get("/api/v1/admin/consistency-check")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Should detect the missing S3 object
|
||||||
|
assert data["missing_s3_objects"] >= 1 or len(data["missing_s3_keys"]) >= 1
|
||||||
|
|
||||||
|
# Restore the object for cleanup
|
||||||
|
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||||
552
backend/tests/integration/test_large_uploads.py
Normal file
552
backend/tests/integration/test_large_uploads.py
Normal file
@@ -0,0 +1,552 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for large file upload functionality.
|
||||||
|
|
||||||
|
Tests cover:
|
||||||
|
- Large file uploads (100MB, 1GB)
|
||||||
|
- Multipart upload behavior
|
||||||
|
- Upload metrics (duration, throughput)
|
||||||
|
- Memory efficiency during uploads
|
||||||
|
- Upload progress tracking
|
||||||
|
|
||||||
|
Note: Large tests are marked with @pytest.mark.slow and will be skipped
|
||||||
|
by default. Run with `pytest --run-slow` to include them.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
import io
|
||||||
|
import time
|
||||||
|
from tests.factories import (
|
||||||
|
compute_sha256,
|
||||||
|
upload_test_file,
|
||||||
|
s3_object_exists,
|
||||||
|
)
|
||||||
|
from tests.conftest import (
|
||||||
|
SIZE_1KB,
|
||||||
|
SIZE_100KB,
|
||||||
|
SIZE_1MB,
|
||||||
|
SIZE_10MB,
|
||||||
|
SIZE_100MB,
|
||||||
|
SIZE_1GB,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestUploadMetrics:
|
||||||
|
"""Tests for upload duration and throughput metrics."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_response_includes_duration_ms(self, integration_client, test_package):
|
||||||
|
"""Test upload response includes duration_ms field."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"duration test content"
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="duration-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "duration_ms" in result
|
||||||
|
assert result["duration_ms"] is not None
|
||||||
|
assert result["duration_ms"] >= 0
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_response_includes_throughput(self, integration_client, test_package):
|
||||||
|
"""Test upload response includes throughput_mbps field."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"throughput test content"
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="throughput-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "throughput_mbps" in result
|
||||||
|
# For small files throughput may be very high or None
|
||||||
|
# Just verify the field exists
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_duration_reasonable(
|
||||||
|
self, integration_client, test_package, sized_content
|
||||||
|
):
|
||||||
|
"""Test upload duration is reasonable for file size."""
|
||||||
|
project, package = test_package
|
||||||
|
content, _ = sized_content(SIZE_1MB, seed=100)
|
||||||
|
|
||||||
|
start = time.time()
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="duration-check"
|
||||||
|
)
|
||||||
|
actual_duration = (time.time() - start) * 1000 # ms
|
||||||
|
|
||||||
|
# Reported duration should be close to actual
|
||||||
|
assert result["duration_ms"] is not None
|
||||||
|
# Allow some variance (network overhead)
|
||||||
|
assert result["duration_ms"] <= actual_duration + 1000 # Within 1s
|
||||||
|
|
||||||
|
|
||||||
|
class TestLargeFileUploads:
|
||||||
|
"""Tests for large file uploads using multipart."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_10mb_file(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test uploading a 10MB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_10MB, seed=200)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="large-10mb"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_10MB
|
||||||
|
assert result["duration_ms"] is not None
|
||||||
|
assert result["throughput_mbps"] is not None
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
@pytest.mark.requires_direct_s3
|
||||||
|
def test_upload_100mb_file(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test uploading a 100MB file (triggers multipart upload)."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_100MB, seed=300)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="large-100mb"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_100MB
|
||||||
|
# Verify S3 object exists
|
||||||
|
assert s3_object_exists(expected_hash)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
@pytest.mark.large
|
||||||
|
def test_upload_1gb_file(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test uploading a 1GB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_1GB, seed=400)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="large-1gb"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_1GB
|
||||||
|
# Should have measurable throughput
|
||||||
|
assert result["throughput_mbps"] is not None
|
||||||
|
assert result["throughput_mbps"] > 0
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_large_file_deduplication(
|
||||||
|
self, integration_client, test_package, sized_content, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test deduplication works for large files."""
|
||||||
|
project, package = test_package
|
||||||
|
# Use unique_test_id to ensure unique content per test run
|
||||||
|
seed = hash(unique_test_id) % 10000
|
||||||
|
content, expected_hash = sized_content(SIZE_10MB, seed=seed)
|
||||||
|
|
||||||
|
# First upload
|
||||||
|
result1 = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag=f"dedup-{unique_test_id}-1"
|
||||||
|
)
|
||||||
|
# Note: may be True if previous test uploaded same content
|
||||||
|
first_dedupe = result1["deduplicated"]
|
||||||
|
|
||||||
|
# Second upload of same content
|
||||||
|
result2 = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag=f"dedup-{unique_test_id}-2"
|
||||||
|
)
|
||||||
|
assert result2["artifact_id"] == expected_hash
|
||||||
|
# Second upload MUST be deduplicated
|
||||||
|
assert result2["deduplicated"] is True
|
||||||
|
|
||||||
|
|
||||||
|
class TestUploadProgress:
|
||||||
|
"""Tests for upload progress tracking endpoint."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_progress_endpoint_returns_not_found_for_invalid_id(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test progress endpoint returns not_found status for invalid upload ID."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload/invalid-upload-id/progress"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["status"] == "not_found"
|
||||||
|
assert data["upload_id"] == "invalid-upload-id"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_progress_endpoint_requires_valid_project(
|
||||||
|
self, integration_client, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test progress endpoint validates project exists."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/nonexistent-{unique_test_id}/pkg/upload/upload-id/progress"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_progress_endpoint_requires_valid_package(
|
||||||
|
self, integration_client, test_project, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test progress endpoint validates package exists."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{test_project}/nonexistent-{unique_test_id}/upload/upload-id/progress"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestResumableUploadProgress:
|
||||||
|
"""Tests for progress tracking during resumable uploads."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_resumable_upload_init_and_progress(
|
||||||
|
self, integration_client, test_package, sized_content
|
||||||
|
):
|
||||||
|
"""Test initializing resumable upload and checking progress."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_100KB, seed=600)
|
||||||
|
|
||||||
|
# Get API key for auth
|
||||||
|
api_key_response = integration_client.post(
|
||||||
|
"/api/v1/auth/keys",
|
||||||
|
json={"name": "progress-test-key"},
|
||||||
|
)
|
||||||
|
assert api_key_response.status_code == 200
|
||||||
|
api_key = api_key_response.json()["key"]
|
||||||
|
|
||||||
|
# Initialize resumable upload
|
||||||
|
init_response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload/init",
|
||||||
|
json={
|
||||||
|
"expected_hash": expected_hash,
|
||||||
|
"filename": "progress-test.bin",
|
||||||
|
"size": SIZE_100KB,
|
||||||
|
},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
assert init_response.status_code == 200
|
||||||
|
upload_id = init_response.json().get("upload_id")
|
||||||
|
|
||||||
|
if upload_id:
|
||||||
|
# Check initial progress
|
||||||
|
progress_response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload/{upload_id}/progress",
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
assert progress_response.status_code == 200
|
||||||
|
progress = progress_response.json()
|
||||||
|
assert progress["status"] == "in_progress"
|
||||||
|
assert progress["bytes_uploaded"] == 0
|
||||||
|
assert progress["bytes_total"] == SIZE_100KB
|
||||||
|
|
||||||
|
# Abort to clean up
|
||||||
|
integration_client.delete(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload/{upload_id}",
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestUploadSizeLimits:
|
||||||
|
"""Tests for upload size limit enforcement."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_empty_file_rejected(self, integration_client, test_package):
|
||||||
|
"""Test empty files are rejected."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
files = {"file": ("empty.txt", io.BytesIO(b""), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code in [400, 422]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_minimum_size_accepted(self, integration_client, test_package):
|
||||||
|
"""Test 1-byte file is accepted."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"X"
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="min-size"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["size"] == 1
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_content_length_header_used_in_response(self, integration_client, test_package):
|
||||||
|
"""Test that upload response size matches Content-Length."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"content length verification test"
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="content-length-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Size in response should match actual content length
|
||||||
|
assert result["size"] == len(content)
|
||||||
|
|
||||||
|
|
||||||
|
class TestUploadErrorHandling:
|
||||||
|
"""Tests for upload error handling."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_to_nonexistent_project_returns_404(
|
||||||
|
self, integration_client, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test upload to nonexistent project returns 404."""
|
||||||
|
content = b"test content"
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/nonexistent-{unique_test_id}/pkg/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_to_nonexistent_package_returns_404(
|
||||||
|
self, integration_client, test_project, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test upload to nonexistent package returns 404."""
|
||||||
|
content = b"test content"
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{test_project}/nonexistent-{unique_test_id}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_without_file_returns_422(self, integration_client, test_package):
|
||||||
|
"""Test upload without file field returns 422."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
data={"tag": "no-file"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_invalid_checksum_rejected(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test upload with invalid checksum header format is rejected."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"checksum test"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": "invalid-checksum"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_mismatched_checksum_rejected(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test upload with wrong checksum is rejected."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"mismatch test"
|
||||||
|
wrong_hash = "0" * 64
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": wrong_hash},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 422
|
||||||
|
assert "verification failed" in response.json().get("detail", "").lower()
|
||||||
|
|
||||||
|
|
||||||
|
class TestResumableUploadCancellation:
|
||||||
|
"""Tests for resumable upload cancellation."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_abort_resumable_upload(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test aborting a resumable upload cleans up properly."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_100KB, seed=700)
|
||||||
|
|
||||||
|
# Get API key for auth
|
||||||
|
api_key_response = integration_client.post(
|
||||||
|
"/api/v1/auth/keys",
|
||||||
|
json={"name": "abort-test-key"},
|
||||||
|
)
|
||||||
|
assert api_key_response.status_code == 200
|
||||||
|
api_key = api_key_response.json()["key"]
|
||||||
|
|
||||||
|
# Initialize resumable upload
|
||||||
|
init_response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload/init",
|
||||||
|
json={
|
||||||
|
"expected_hash": expected_hash,
|
||||||
|
"filename": "abort-test.bin",
|
||||||
|
"size": SIZE_100KB,
|
||||||
|
},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
assert init_response.status_code == 200
|
||||||
|
upload_id = init_response.json().get("upload_id")
|
||||||
|
|
||||||
|
if upload_id:
|
||||||
|
# Abort the upload (without uploading any parts)
|
||||||
|
abort_response = integration_client.delete(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload/{upload_id}",
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
assert abort_response.status_code in [200, 204]
|
||||||
|
|
||||||
|
# Verify progress shows not_found after abort
|
||||||
|
progress_response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload/{upload_id}/progress",
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
assert progress_response.status_code == 200
|
||||||
|
assert progress_response.json()["status"] == "not_found"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_abort_nonexistent_upload(self, integration_client, test_package):
|
||||||
|
"""Test aborting nonexistent upload returns appropriate error."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
# Get API key for auth
|
||||||
|
api_key_response = integration_client.post(
|
||||||
|
"/api/v1/auth/keys",
|
||||||
|
json={"name": "abort-nonexistent-key"},
|
||||||
|
)
|
||||||
|
assert api_key_response.status_code == 200
|
||||||
|
api_key = api_key_response.json()["key"]
|
||||||
|
|
||||||
|
response = integration_client.delete(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload/nonexistent-upload-id",
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should return 404 or 200 (idempotent delete)
|
||||||
|
assert response.status_code in [200, 204, 404]
|
||||||
|
|
||||||
|
|
||||||
|
class TestUploadTimeout:
|
||||||
|
"""Tests for upload timeout handling."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_short_timeout_succeeds_for_small_file(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test small file upload succeeds with reasonable timeout."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"small timeout test"
|
||||||
|
|
||||||
|
# httpx client should handle this quickly
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="timeout-small"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["artifact_id"] is not None
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_response_duration_under_timeout(
|
||||||
|
self, integration_client, test_package, sized_content
|
||||||
|
):
|
||||||
|
"""Test upload completes within reasonable time."""
|
||||||
|
project, package = test_package
|
||||||
|
content, _ = sized_content(SIZE_1MB, seed=800)
|
||||||
|
|
||||||
|
start = time.time()
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="timeout-check"
|
||||||
|
)
|
||||||
|
duration = time.time() - start
|
||||||
|
|
||||||
|
# 1MB should upload in well under 60 seconds on local
|
||||||
|
assert duration < 60
|
||||||
|
assert result["artifact_id"] is not None
|
||||||
|
|
||||||
|
|
||||||
|
class TestConcurrentUploads:
|
||||||
|
"""Tests for concurrent upload handling."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_concurrent_different_files(
|
||||||
|
self, integration_client, test_package, sized_content
|
||||||
|
):
|
||||||
|
"""Test concurrent uploads of different files succeed."""
|
||||||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
# Get API key for auth
|
||||||
|
api_key_response = integration_client.post(
|
||||||
|
"/api/v1/auth/keys",
|
||||||
|
json={"name": "concurrent-diff-key"},
|
||||||
|
)
|
||||||
|
assert api_key_response.status_code == 200
|
||||||
|
api_key = api_key_response.json()["key"]
|
||||||
|
|
||||||
|
num_uploads = 3
|
||||||
|
results = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
def upload_unique_file(idx):
|
||||||
|
try:
|
||||||
|
from httpx import Client
|
||||||
|
|
||||||
|
content, expected_hash = sized_content(SIZE_100KB, seed=900 + idx)
|
||||||
|
|
||||||
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
with Client(base_url=base_url, timeout=30.0) as client:
|
||||||
|
files = {
|
||||||
|
"file": (
|
||||||
|
f"concurrent-{idx}.bin",
|
||||||
|
io.BytesIO(content),
|
||||||
|
"application/octet-stream",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": f"concurrent-diff-{idx}"},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
results.append((idx, response.json(), expected_hash))
|
||||||
|
else:
|
||||||
|
errors.append(f"Upload {idx}: {response.status_code} - {response.text}")
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Upload {idx}: {str(e)}")
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=num_uploads) as executor:
|
||||||
|
futures = [executor.submit(upload_unique_file, i) for i in range(num_uploads)]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(errors) == 0, f"Concurrent upload errors: {errors}"
|
||||||
|
assert len(results) == num_uploads
|
||||||
|
|
||||||
|
# Each upload should have unique artifact ID
|
||||||
|
artifact_ids = set(r[1]["artifact_id"] for r in results)
|
||||||
|
assert len(artifact_ids) == num_uploads
|
||||||
|
|
||||||
|
# Each should match expected hash
|
||||||
|
for idx, result, expected_hash in results:
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
@@ -59,7 +59,8 @@ class TestProjectCRUD:
|
|||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_list_projects(self, integration_client, test_project):
|
def test_list_projects(self, integration_client, test_project):
|
||||||
"""Test listing projects includes created project."""
|
"""Test listing projects includes created project."""
|
||||||
response = integration_client.get("/api/v1/projects")
|
# Search specifically for our test project to avoid pagination issues
|
||||||
|
response = integration_client.get(f"/api/v1/projects?search={test_project}")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -107,9 +108,11 @@ class TestProjectListingFilters:
|
|||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_projects_search(self, integration_client, test_project):
|
def test_projects_search(self, integration_client, test_project):
|
||||||
"""Test project search by name."""
|
"""Test project search by name."""
|
||||||
# Search for our test project
|
# Search using the unique portion of our test project name
|
||||||
|
# test_project format is "test-project-test-{uuid[:8]}"
|
||||||
|
unique_part = test_project.split("-")[-1] # Get the UUID portion
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/projects?search={test_project[:10]}"
|
f"/api/v1/projects?search={unique_part}"
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|||||||
583
backend/tests/integration/test_size_boundary.py
Normal file
583
backend/tests/integration/test_size_boundary.py
Normal file
@@ -0,0 +1,583 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for upload/download with various file sizes.
|
||||||
|
|
||||||
|
Tests cover:
|
||||||
|
- Small files (0B - 100KB)
|
||||||
|
- Medium files (1MB - 50MB)
|
||||||
|
- Large files (100MB - 1GB) - marked as slow/large
|
||||||
|
- Exact chunk boundaries
|
||||||
|
- Data integrity verification across all sizes
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import io
|
||||||
|
import time
|
||||||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
from tests.factories import (
|
||||||
|
compute_sha256,
|
||||||
|
upload_test_file,
|
||||||
|
generate_content,
|
||||||
|
generate_content_with_hash,
|
||||||
|
)
|
||||||
|
from tests.conftest import (
|
||||||
|
SIZE_1B,
|
||||||
|
SIZE_1KB,
|
||||||
|
SIZE_10KB,
|
||||||
|
SIZE_100KB,
|
||||||
|
SIZE_1MB,
|
||||||
|
SIZE_5MB,
|
||||||
|
SIZE_10MB,
|
||||||
|
SIZE_50MB,
|
||||||
|
SIZE_100MB,
|
||||||
|
SIZE_250MB,
|
||||||
|
SIZE_500MB,
|
||||||
|
SIZE_1GB,
|
||||||
|
CHUNK_SIZE,
|
||||||
|
MULTIPART_THRESHOLD,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSmallFileSizes:
|
||||||
|
"""Tests for small file uploads/downloads (0B - 100KB)."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_download_1_byte(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 1 byte file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_1B, seed=1)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="1byte.bin", tag="1byte"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_1B
|
||||||
|
|
||||||
|
# Download and verify
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/1byte",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
assert len(response.content) == SIZE_1B
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_download_1kb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 1KB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_1KB, seed=2)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="1kb.bin", tag="1kb"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_1KB
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/1kb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_download_10kb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 10KB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_10KB, seed=3)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="10kb.bin", tag="10kb"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_10KB
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/10kb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_download_100kb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 100KB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_100KB, seed=4)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="100kb.bin", tag="100kb"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_100KB
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/100kb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
|
||||||
|
class TestMediumFileSizes:
|
||||||
|
"""Tests for medium file uploads/downloads (1MB - 50MB)."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_download_1mb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 1MB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_1MB, seed=10)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="1mb.bin", tag="1mb"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_1MB
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/1mb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert len(response.content) == SIZE_1MB
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_download_5mb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 5MB file (multipart threshold boundary area)."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_5MB, seed=11)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="5mb.bin", tag="5mb"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_5MB
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/5mb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert len(response.content) == SIZE_5MB
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
def test_upload_download_10mb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 10MB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_10MB, seed=12)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="10mb.bin", tag="10mb"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_10MB
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/10mb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert len(response.content) == SIZE_10MB
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
def test_upload_download_50mb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 50MB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_50MB, seed=13)
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="50mb.bin", tag="50mb"
|
||||||
|
)
|
||||||
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_50MB
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/50mb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
download_time = time.time() - start_time
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert len(response.content) == SIZE_50MB
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
|
||||||
|
# Log timing for performance tracking
|
||||||
|
print(f"\n50MB upload: {upload_time:.2f}s, download: {download_time:.2f}s")
|
||||||
|
|
||||||
|
|
||||||
|
class TestLargeFileSizes:
|
||||||
|
"""Tests for large file uploads/downloads (100MB - 1GB).
|
||||||
|
|
||||||
|
These tests are marked as slow and large, skipped by default.
|
||||||
|
Run with: pytest -m "large" to include these tests.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
@pytest.mark.large
|
||||||
|
def test_upload_download_100mb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 100MB file (multipart threshold)."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_100MB, seed=100)
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="100mb.bin", tag="100mb"
|
||||||
|
)
|
||||||
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_100MB
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/100mb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
download_time = time.time() - start_time
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert len(response.content) == SIZE_100MB
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
|
||||||
|
print(f"\n100MB upload: {upload_time:.2f}s, download: {download_time:.2f}s")
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
@pytest.mark.large
|
||||||
|
def test_upload_download_250mb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 250MB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_250MB, seed=250)
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="250mb.bin", tag="250mb"
|
||||||
|
)
|
||||||
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_250MB
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/250mb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
download_time = time.time() - start_time
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert len(response.content) == SIZE_250MB
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
|
||||||
|
print(f"\n250MB upload: {upload_time:.2f}s, download: {download_time:.2f}s")
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
@pytest.mark.large
|
||||||
|
def test_upload_download_500mb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 500MB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_500MB, seed=500)
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="500mb.bin", tag="500mb"
|
||||||
|
)
|
||||||
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_500MB
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/500mb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
download_time = time.time() - start_time
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert len(response.content) == SIZE_500MB
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
|
||||||
|
print(f"\n500MB upload: {upload_time:.2f}s, download: {download_time:.2f}s")
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
@pytest.mark.large
|
||||||
|
def test_upload_download_1gb(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download of 1GB file.
|
||||||
|
|
||||||
|
This test may take several minutes depending on network/disk speed.
|
||||||
|
"""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_1GB, seed=1024)
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="1gb.bin", tag="1gb"
|
||||||
|
)
|
||||||
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == SIZE_1GB
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/1gb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
download_time = time.time() - start_time
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert len(response.content) == SIZE_1GB
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
|
||||||
|
print(f"\n1GB upload: {upload_time:.2f}s, download: {download_time:.2f}s")
|
||||||
|
|
||||||
|
|
||||||
|
class TestChunkBoundaries:
|
||||||
|
"""Tests for exact chunk size boundaries."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_download_at_chunk_size(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download at exact chunk size (64KB)."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(CHUNK_SIZE, seed=64)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="chunk.bin", tag="chunk-exact"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == CHUNK_SIZE
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/chunk-exact",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_download_chunk_size_plus_1(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download at chunk size + 1 byte."""
|
||||||
|
project, package = test_package
|
||||||
|
size = CHUNK_SIZE + 1
|
||||||
|
content, expected_hash = sized_content(size, seed=65)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="chunk_plus.bin", tag="chunk-plus"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == size
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/chunk-plus",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_download_chunk_size_minus_1(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download at chunk size - 1 byte."""
|
||||||
|
project, package = test_package
|
||||||
|
size = CHUNK_SIZE - 1
|
||||||
|
content, expected_hash = sized_content(size, seed=63)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="chunk_minus.bin", tag="chunk-minus"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == size
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/chunk-minus",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_download_multiple_chunks(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test upload/download spanning multiple chunks."""
|
||||||
|
project, package = test_package
|
||||||
|
size = CHUNK_SIZE * 3 + 1000 # 3 full chunks + partial
|
||||||
|
content, expected_hash = sized_content(size, seed=300)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="multi_chunk.bin", tag="multi-chunk"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["size"] == size
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/multi-chunk",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
|
||||||
|
class TestDataIntegrity:
|
||||||
|
"""Tests for data integrity with various content types."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_binary_content_integrity(self, integration_client, test_package):
|
||||||
|
"""Test binary content (all byte values 0-255) integrity."""
|
||||||
|
project, package = test_package
|
||||||
|
# Content with all 256 possible byte values
|
||||||
|
content = bytes(range(256)) * 100 # 25.6KB
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="binary.bin", tag="binary"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/binary",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_text_content_integrity(self, integration_client, test_package):
|
||||||
|
"""Test UTF-8 text content integrity."""
|
||||||
|
project, package = test_package
|
||||||
|
content = "Hello, World! 你好世界 🌍 مرحبا العالم".encode("utf-8")
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="text.txt", tag="text"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/text",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
assert response.content.decode("utf-8") == "Hello, World! 你好世界 🌍 مرحبا العالم"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_null_bytes_content_integrity(self, integration_client, test_package):
|
||||||
|
"""Test content with null bytes."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"before\x00null\x00bytes\x00after"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="nulls.bin", tag="nulls"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/nulls",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
assert b"\x00" in response.content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_unicode_filename_integrity(self, integration_client, test_package):
|
||||||
|
"""Test file with unicode filename."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"unicode filename test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="文件名.txt", tag="unicode-name"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result["original_name"] == "文件名.txt"
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/unicode-name",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_compressed_content_integrity(self, integration_client, test_package):
|
||||||
|
"""Test gzip-compressed content integrity."""
|
||||||
|
import gzip
|
||||||
|
|
||||||
|
project, package = test_package
|
||||||
|
original = b"This is some text that will be compressed " * 100
|
||||||
|
content = gzip.compress(original)
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="data.gz", tag="compressed"
|
||||||
|
)
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/compressed",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
# Verify we can decompress
|
||||||
|
assert gzip.decompress(response.content) == original
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_hash_verification_matches(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test that computed hash matches artifact_id for various sizes."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
sizes = [SIZE_1B, SIZE_1KB, SIZE_10KB, SIZE_100KB, SIZE_1MB]
|
||||||
|
|
||||||
|
for i, size in enumerate(sizes):
|
||||||
|
content, expected_hash = sized_content(size, seed=1000 + i)
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename=f"hash_test_{size}.bin", tag=f"hash-{size}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify artifact_id matches expected hash
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
# Download and verify hash of downloaded content
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/hash-{size}",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
downloaded_hash = compute_sha256(response.content)
|
||||||
|
assert downloaded_hash == expected_hash
|
||||||
535
backend/tests/integration/test_streaming_download.py
Normal file
535
backend/tests/integration/test_streaming_download.py
Normal file
@@ -0,0 +1,535 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for streaming download functionality.
|
||||||
|
|
||||||
|
Tests cover:
|
||||||
|
- HTTP Range requests (partial downloads, resume)
|
||||||
|
- Conditional requests (If-None-Match, If-Modified-Since)
|
||||||
|
- Caching headers (Cache-Control, Last-Modified, Accept-Ranges)
|
||||||
|
- Large file streaming
|
||||||
|
- Download modes (proxy, redirect, presigned)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import io
|
||||||
|
import time
|
||||||
|
from email.utils import formatdate
|
||||||
|
from tests.factories import (
|
||||||
|
compute_sha256,
|
||||||
|
upload_test_file,
|
||||||
|
)
|
||||||
|
from tests.conftest import (
|
||||||
|
SIZE_1KB,
|
||||||
|
SIZE_100KB,
|
||||||
|
SIZE_1MB,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRangeRequests:
|
||||||
|
"""Tests for HTTP Range request support (partial downloads)."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_range_request_first_bytes(self, integration_client, test_package):
|
||||||
|
"""Test range request for first N bytes."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"0123456789" * 100 # 1000 bytes
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="range-test")
|
||||||
|
|
||||||
|
# Request first 10 bytes
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/range-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"Range": "bytes=0-9"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 206 # Partial Content
|
||||||
|
assert response.content == b"0123456789"
|
||||||
|
assert "Content-Range" in response.headers
|
||||||
|
assert response.headers["Content-Range"].startswith("bytes 0-9/")
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_range_request_middle_bytes(self, integration_client, test_package):
|
||||||
|
"""Test range request for bytes in the middle."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="range-mid")
|
||||||
|
|
||||||
|
# Request bytes 10-19 (KLMNOPQRST)
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/range-mid",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"Range": "bytes=10-19"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 206
|
||||||
|
assert response.content == b"KLMNOPQRST"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_range_request_suffix_bytes(self, integration_client, test_package):
|
||||||
|
"""Test range request for last N bytes (suffix range)."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"0123456789ABCDEF" # 16 bytes
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="range-suffix")
|
||||||
|
|
||||||
|
# Request last 4 bytes
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/range-suffix",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"Range": "bytes=-4"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 206
|
||||||
|
assert response.content == b"CDEF"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_range_request_open_ended(self, integration_client, test_package):
|
||||||
|
"""Test range request from offset to end."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"0123456789"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="range-open")
|
||||||
|
|
||||||
|
# Request from byte 5 to end
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/range-open",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"Range": "bytes=5-"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 206
|
||||||
|
assert response.content == b"56789"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_range_request_includes_accept_ranges_header(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test that range requests include Accept-Ranges header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"test content"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="accept-ranges")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/accept-ranges",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"Range": "bytes=0-4"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 206
|
||||||
|
assert response.headers.get("Accept-Ranges") == "bytes"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_full_download_advertises_accept_ranges(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test that full downloads advertise range support."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"test content"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="full-accept")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/full-accept",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.headers.get("Accept-Ranges") == "bytes"
|
||||||
|
|
||||||
|
|
||||||
|
class TestConditionalRequests:
|
||||||
|
"""Tests for conditional request handling (304 Not Modified)."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_if_none_match_returns_304(self, integration_client, test_package):
|
||||||
|
"""Test If-None-Match with matching ETag returns 304."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"conditional request test content"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="cond-etag")
|
||||||
|
|
||||||
|
# Request with matching ETag
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/cond-etag",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"If-None-Match": f'"{expected_hash}"'},
|
||||||
|
)
|
||||||
|
assert response.status_code == 304
|
||||||
|
assert response.content == b"" # No body for 304
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_if_none_match_without_quotes(self, integration_client, test_package):
|
||||||
|
"""Test If-None-Match works with or without quotes."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"etag no quotes test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="cond-noquote")
|
||||||
|
|
||||||
|
# Request with ETag without quotes
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/cond-noquote",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"If-None-Match": expected_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 304
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_if_none_match_mismatch_returns_200(self, integration_client, test_package):
|
||||||
|
"""Test If-None-Match with non-matching ETag returns 200."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"etag mismatch test"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="cond-mismatch")
|
||||||
|
|
||||||
|
# Request with different ETag
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/cond-mismatch",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"If-None-Match": '"different-etag-value"'},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_if_modified_since_returns_304(self, integration_client, test_package):
|
||||||
|
"""Test If-Modified-Since with future date returns 304."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"modified since test"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="cond-modified")
|
||||||
|
|
||||||
|
# Request with future date (artifact was definitely created before this)
|
||||||
|
future_date = formatdate(time.time() + 86400, usegmt=True) # Tomorrow
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/cond-modified",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"If-Modified-Since": future_date},
|
||||||
|
)
|
||||||
|
assert response.status_code == 304
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_if_modified_since_old_date_returns_200(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test If-Modified-Since with old date returns 200."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"old date test"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="cond-old")
|
||||||
|
|
||||||
|
# Request with old date (2020-01-01)
|
||||||
|
old_date = "Wed, 01 Jan 2020 00:00:00 GMT"
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/cond-old",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"If-Modified-Since": old_date},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_304_includes_etag(self, integration_client, test_package):
|
||||||
|
"""Test 304 response includes ETag header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"304 etag test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="304-etag")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/304-etag",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"If-None-Match": f'"{expected_hash}"'},
|
||||||
|
)
|
||||||
|
assert response.status_code == 304
|
||||||
|
assert response.headers.get("ETag") == f'"{expected_hash}"'
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_304_includes_cache_control(self, integration_client, test_package):
|
||||||
|
"""Test 304 response includes Cache-Control header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"304 cache test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="304-cache")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/304-cache",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"If-None-Match": f'"{expected_hash}"'},
|
||||||
|
)
|
||||||
|
assert response.status_code == 304
|
||||||
|
assert "immutable" in response.headers.get("Cache-Control", "")
|
||||||
|
|
||||||
|
|
||||||
|
class TestCachingHeaders:
|
||||||
|
"""Tests for caching headers on download responses."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_includes_cache_control(self, integration_client, test_package):
|
||||||
|
"""Test download response includes Cache-Control header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"cache control test"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="cache-ctl")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/cache-ctl",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
cache_control = response.headers.get("Cache-Control", "")
|
||||||
|
assert "public" in cache_control
|
||||||
|
assert "immutable" in cache_control
|
||||||
|
assert "max-age" in cache_control
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_includes_last_modified(self, integration_client, test_package):
|
||||||
|
"""Test download response includes Last-Modified header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"last modified test"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="last-mod")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/last-mod",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "Last-Modified" in response.headers
|
||||||
|
# Should be in RFC 7231 format
|
||||||
|
last_modified = response.headers["Last-Modified"]
|
||||||
|
assert "GMT" in last_modified
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_includes_etag(self, integration_client, test_package):
|
||||||
|
"""Test download response includes ETag header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"etag header test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="etag-hdr")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/etag-hdr",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.headers.get("ETag") == f'"{expected_hash}"'
|
||||||
|
|
||||||
|
|
||||||
|
class TestDownloadResume:
|
||||||
|
"""Tests for download resume functionality using range requests."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_resume_download_after_partial(self, integration_client, test_package):
|
||||||
|
"""Test resuming download from where it left off."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"ABCDEFGHIJ" * 100 # 1000 bytes
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="resume-test")
|
||||||
|
|
||||||
|
# Simulate partial download (first 500 bytes)
|
||||||
|
response1 = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/resume-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"Range": "bytes=0-499"},
|
||||||
|
)
|
||||||
|
assert response1.status_code == 206
|
||||||
|
first_half = response1.content
|
||||||
|
assert len(first_half) == 500
|
||||||
|
|
||||||
|
# Resume from byte 500
|
||||||
|
response2 = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/resume-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"Range": "bytes=500-"},
|
||||||
|
)
|
||||||
|
assert response2.status_code == 206
|
||||||
|
second_half = response2.content
|
||||||
|
assert len(second_half) == 500
|
||||||
|
|
||||||
|
# Combine and verify
|
||||||
|
combined = first_half + second_half
|
||||||
|
assert combined == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_resume_with_etag_verification(self, integration_client, test_package):
|
||||||
|
"""Test that resumed download can verify content hasn't changed."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"resume etag verification test content"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="resume-etag")
|
||||||
|
|
||||||
|
# Get ETag from first request
|
||||||
|
response1 = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/resume-etag",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"Range": "bytes=0-9"},
|
||||||
|
)
|
||||||
|
assert response1.status_code == 206
|
||||||
|
etag = response1.headers.get("ETag")
|
||||||
|
assert etag == f'"{expected_hash}"'
|
||||||
|
|
||||||
|
# Resume with If-Match to ensure content hasn't changed
|
||||||
|
# (Note: If-Match would fail and return 412 if content changed)
|
||||||
|
response2 = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/resume-etag",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"Range": "bytes=10-"},
|
||||||
|
)
|
||||||
|
assert response2.status_code == 206
|
||||||
|
# ETag should be the same
|
||||||
|
assert response2.headers.get("ETag") == etag
|
||||||
|
|
||||||
|
|
||||||
|
class TestLargeFileStreaming:
|
||||||
|
"""Tests for streaming large files."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_stream_1mb_file(self, integration_client, test_package, sized_content):
|
||||||
|
"""Test streaming a 1MB file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_1MB, seed=500)
|
||||||
|
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="stream-1mb")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/stream-1mb",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert len(response.content) == SIZE_1MB
|
||||||
|
assert compute_sha256(response.content) == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_stream_large_file_has_correct_headers(
|
||||||
|
self, integration_client, test_package, sized_content
|
||||||
|
):
|
||||||
|
"""Test that large file streaming has correct headers."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(SIZE_100KB, seed=501)
|
||||||
|
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="stream-hdr")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/stream-hdr",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert int(response.headers.get("Content-Length", 0)) == SIZE_100KB
|
||||||
|
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||||
|
assert response.headers.get("Accept-Ranges") == "bytes"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_range_request_on_large_file(
|
||||||
|
self, integration_client, test_package, sized_content
|
||||||
|
):
|
||||||
|
"""Test range request on a larger file."""
|
||||||
|
project, package = test_package
|
||||||
|
content, _ = sized_content(SIZE_100KB, seed=502)
|
||||||
|
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="range-large")
|
||||||
|
|
||||||
|
# Request a slice from the middle
|
||||||
|
start = 50000
|
||||||
|
end = 50999
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/range-large",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
headers={"Range": f"bytes={start}-{end}"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 206
|
||||||
|
assert len(response.content) == 1000
|
||||||
|
assert response.content == content[start : end + 1]
|
||||||
|
|
||||||
|
|
||||||
|
class TestDownloadModes:
|
||||||
|
"""Tests for different download modes."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_proxy_mode_streams_content(self, integration_client, test_package):
|
||||||
|
"""Test proxy mode streams content through backend."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"proxy mode test content"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="mode-proxy")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/mode-proxy",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_presigned_mode_returns_url(self, integration_client, test_package):
|
||||||
|
"""Test presigned mode returns JSON with URL."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"presigned mode test"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="mode-presign")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/mode-presign",
|
||||||
|
params={"mode": "presigned"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "url" in data
|
||||||
|
assert "expires_at" in data
|
||||||
|
assert data["url"].startswith("http")
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_redirect_mode_returns_302(self, integration_client, test_package):
|
||||||
|
"""Test redirect mode returns 302 to presigned URL."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"redirect mode test"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="mode-redir")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/mode-redir",
|
||||||
|
params={"mode": "redirect"},
|
||||||
|
follow_redirects=False,
|
||||||
|
)
|
||||||
|
assert response.status_code == 302
|
||||||
|
assert "Location" in response.headers
|
||||||
|
|
||||||
|
|
||||||
|
class TestIntegrityDuringStreaming:
|
||||||
|
"""Tests for data integrity during streaming downloads."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_checksum_header_matches_content(self, integration_client, test_package):
|
||||||
|
"""Test X-Checksum-SHA256 header matches actual downloaded content."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"integrity check content"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="integrity")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/integrity",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
header_hash = response.headers.get("X-Checksum-SHA256")
|
||||||
|
actual_hash = compute_sha256(response.content)
|
||||||
|
|
||||||
|
assert header_hash == expected_hash
|
||||||
|
assert actual_hash == expected_hash
|
||||||
|
assert header_hash == actual_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_etag_matches_content_hash(self, integration_client, test_package):
|
||||||
|
"""Test ETag header matches content hash."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"etag integrity test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="etag-int")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/etag-int",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
etag = response.headers.get("ETag", "").strip('"')
|
||||||
|
actual_hash = compute_sha256(response.content)
|
||||||
|
|
||||||
|
assert etag == expected_hash
|
||||||
|
assert actual_hash == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_digest_header_present(self, integration_client, test_package):
|
||||||
|
"""Test Digest header is present in RFC 3230 format."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"digest header test"
|
||||||
|
upload_test_file(integration_client, project, package, content, tag="digest")
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/digest",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "Digest" in response.headers
|
||||||
|
assert response.headers["Digest"].startswith("sha-256=")
|
||||||
@@ -10,6 +10,7 @@ Tests cover:
|
|||||||
- S3 storage verification
|
- S3 storage verification
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
import pytest
|
import pytest
|
||||||
import io
|
import io
|
||||||
import threading
|
import threading
|
||||||
@@ -25,6 +26,19 @@ from tests.factories import (
|
|||||||
class TestUploadBasics:
|
class TestUploadBasics:
|
||||||
"""Tests for basic upload functionality."""
|
"""Tests for basic upload functionality."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_returns_200(self, integration_client, test_package):
|
||||||
|
"""Test upload with valid file returns 200."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"valid file upload test"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_upload_returns_artifact_id(self, integration_client, test_package):
|
def test_upload_returns_artifact_id(self, integration_client, test_package):
|
||||||
"""Test upload returns the artifact ID (SHA256 hash)."""
|
"""Test upload returns the artifact ID (SHA256 hash)."""
|
||||||
@@ -101,6 +115,83 @@ class TestUploadBasics:
|
|||||||
assert "created_at" in result
|
assert "created_at" in result
|
||||||
assert result["created_at"] is not None
|
assert result["created_at"] is not None
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_without_tag_succeeds(self, integration_client, test_package):
|
||||||
|
"""Test upload without tag succeeds (no tag created)."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"upload without tag test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
files = {"file": ("no_tag.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
# No tag parameter
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
result = response.json()
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
# Verify no tag was created - list tags and check
|
||||||
|
tags_response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/tags"
|
||||||
|
)
|
||||||
|
assert tags_response.status_code == 200
|
||||||
|
tags = tags_response.json()
|
||||||
|
# Filter for tags pointing to this artifact
|
||||||
|
artifact_tags = [t for t in tags.get("items", tags) if t.get("artifact_id") == expected_hash]
|
||||||
|
assert len(artifact_tags) == 0, "Tag should not be created when not specified"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_creates_artifact_in_database(self, integration_client, test_package):
|
||||||
|
"""Test upload creates artifact record in database."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"database artifact test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(integration_client, project, package, content)
|
||||||
|
|
||||||
|
# Verify artifact exists via API
|
||||||
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
|
assert response.status_code == 200
|
||||||
|
artifact = response.json()
|
||||||
|
assert artifact["id"] == expected_hash
|
||||||
|
assert artifact["size"] == len(content)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.requires_direct_s3
|
||||||
|
def test_upload_creates_object_in_s3(self, integration_client, test_package):
|
||||||
|
"""Test upload creates object in S3 storage."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"s3 object creation test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(integration_client, project, package, content)
|
||||||
|
|
||||||
|
# Verify S3 object exists
|
||||||
|
assert s3_object_exists(expected_hash), "S3 object should exist after upload"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_tag_creates_tag_record(self, integration_client, test_package):
|
||||||
|
"""Test upload with tag creates tag record."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"tag creation test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
tag_name = "my-tag-v1"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag=tag_name
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify tag exists
|
||||||
|
tags_response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/tags"
|
||||||
|
)
|
||||||
|
assert tags_response.status_code == 200
|
||||||
|
tags = tags_response.json()
|
||||||
|
tag_names = [t["name"] for t in tags.get("items", tags)]
|
||||||
|
assert tag_name in tag_names
|
||||||
|
|
||||||
|
|
||||||
class TestDuplicateUploads:
|
class TestDuplicateUploads:
|
||||||
"""Tests for duplicate upload deduplication behavior."""
|
"""Tests for duplicate upload deduplication behavior."""
|
||||||
@@ -248,6 +339,23 @@ class TestDownload:
|
|||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.content == original_content
|
assert response.content == original_content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_by_tag_prefix(self, integration_client, test_package):
|
||||||
|
"""Test downloading artifact using tag: prefix."""
|
||||||
|
project, package = test_package
|
||||||
|
original_content = b"download by tag prefix test"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, original_content, tag="prefix-tag"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/tag:prefix-tag",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == original_content
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_download_nonexistent_tag(self, integration_client, test_package):
|
def test_download_nonexistent_tag(self, integration_client, test_package):
|
||||||
"""Test downloading nonexistent tag returns 404."""
|
"""Test downloading nonexistent tag returns 404."""
|
||||||
@@ -258,6 +366,33 @@ class TestDownload:
|
|||||||
)
|
)
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_nonexistent_artifact(self, integration_client, test_package):
|
||||||
|
"""Test downloading nonexistent artifact ID returns 404."""
|
||||||
|
project, package = test_package
|
||||||
|
fake_hash = "0" * 64
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/artifact:{fake_hash}"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_from_nonexistent_project(self, integration_client, unique_test_id):
|
||||||
|
"""Test downloading from nonexistent project returns 404."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/nonexistent-project-{unique_test_id}/somepackage/+/sometag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_from_nonexistent_package(self, integration_client, test_project, unique_test_id):
|
||||||
|
"""Test downloading from nonexistent package returns 404."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/+/sometag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_content_matches_original(self, integration_client, test_package):
|
def test_content_matches_original(self, integration_client, test_package):
|
||||||
"""Test downloaded content matches original exactly."""
|
"""Test downloaded content matches original exactly."""
|
||||||
@@ -275,6 +410,111 @@ class TestDownload:
|
|||||||
assert response.content == original_content
|
assert response.content == original_content
|
||||||
|
|
||||||
|
|
||||||
|
class TestDownloadHeaders:
|
||||||
|
"""Tests for download response headers."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_content_type_header(self, integration_client, test_package):
|
||||||
|
"""Test download returns correct Content-Type header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"content type header test"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="test.txt", tag="content-type-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/content-type-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
# Content-Type should be set (either text/plain or application/octet-stream)
|
||||||
|
assert "content-type" in response.headers
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_content_length_header(self, integration_client, test_package):
|
||||||
|
"""Test download returns correct Content-Length header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"content length header test - exactly 41 bytes!"
|
||||||
|
expected_length = len(content)
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="content-length-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/content-length-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "content-length" in response.headers
|
||||||
|
assert int(response.headers["content-length"]) == expected_length
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_content_disposition_header(self, integration_client, test_package):
|
||||||
|
"""Test download returns correct Content-Disposition header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"content disposition test"
|
||||||
|
filename = "my-test-file.bin"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename=filename, tag="disposition-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/disposition-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "content-disposition" in response.headers
|
||||||
|
disposition = response.headers["content-disposition"]
|
||||||
|
assert "attachment" in disposition
|
||||||
|
assert filename in disposition
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_checksum_headers(self, integration_client, test_package):
|
||||||
|
"""Test download returns checksum headers."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"checksum header test content"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="checksum-headers"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/checksum-headers",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
# Check for checksum headers
|
||||||
|
assert "x-checksum-sha256" in response.headers
|
||||||
|
assert response.headers["x-checksum-sha256"] == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_etag_header(self, integration_client, test_package):
|
||||||
|
"""Test download returns ETag header (artifact ID)."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"etag header test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="etag-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/etag-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "etag" in response.headers
|
||||||
|
# ETag should contain the artifact ID (hash)
|
||||||
|
etag = response.headers["etag"].strip('"')
|
||||||
|
assert etag == expected_hash
|
||||||
|
|
||||||
|
|
||||||
class TestConcurrentUploads:
|
class TestConcurrentUploads:
|
||||||
"""Tests for concurrent upload handling."""
|
"""Tests for concurrent upload handling."""
|
||||||
|
|
||||||
@@ -286,6 +526,14 @@ class TestConcurrentUploads:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
num_concurrent = 5
|
num_concurrent = 5
|
||||||
|
|
||||||
|
# Create an API key for worker threads
|
||||||
|
api_key_response = integration_client.post(
|
||||||
|
"/api/v1/auth/keys",
|
||||||
|
json={"name": "concurrent-test-key"},
|
||||||
|
)
|
||||||
|
assert api_key_response.status_code == 200, f"Failed to create API key: {api_key_response.text}"
|
||||||
|
api_key = api_key_response.json()["key"]
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
@@ -293,7 +541,7 @@ class TestConcurrentUploads:
|
|||||||
try:
|
try:
|
||||||
from httpx import Client
|
from httpx import Client
|
||||||
|
|
||||||
base_url = "http://localhost:8080"
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
with Client(base_url=base_url, timeout=30.0) as client:
|
with Client(base_url=base_url, timeout=30.0) as client:
|
||||||
files = {
|
files = {
|
||||||
"file": (
|
"file": (
|
||||||
@@ -306,6 +554,7 @@ class TestConcurrentUploads:
|
|||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"concurrent-{tag_suffix}"},
|
data={"tag": f"concurrent-{tag_suffix}"},
|
||||||
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
results.append(response.json())
|
results.append(response.json())
|
||||||
@@ -388,6 +637,7 @@ class TestUploadFailureCleanup:
|
|||||||
"""Tests for cleanup when uploads fail."""
|
"""Tests for cleanup when uploads fail."""
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.requires_direct_s3
|
||||||
def test_upload_failure_invalid_project_no_orphaned_s3(
|
def test_upload_failure_invalid_project_no_orphaned_s3(
|
||||||
self, integration_client, unique_test_id
|
self, integration_client, unique_test_id
|
||||||
):
|
):
|
||||||
@@ -410,6 +660,7 @@ class TestUploadFailureCleanup:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.requires_direct_s3
|
||||||
def test_upload_failure_invalid_package_no_orphaned_s3(
|
def test_upload_failure_invalid_package_no_orphaned_s3(
|
||||||
self, integration_client, test_project, unique_test_id
|
self, integration_client, test_project, unique_test_id
|
||||||
):
|
):
|
||||||
@@ -457,6 +708,7 @@ class TestS3StorageVerification:
|
|||||||
"""Tests to verify S3 storage behavior."""
|
"""Tests to verify S3 storage behavior."""
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.requires_direct_s3
|
||||||
def test_s3_single_object_after_duplicates(
|
def test_s3_single_object_after_duplicates(
|
||||||
self, integration_client, test_package, unique_test_id
|
self, integration_client, test_package, unique_test_id
|
||||||
):
|
):
|
||||||
@@ -500,3 +752,211 @@ class TestS3StorageVerification:
|
|||||||
artifact = response.json()
|
artifact = response.json()
|
||||||
assert artifact["id"] == expected_hash
|
assert artifact["id"] == expected_hash
|
||||||
assert artifact["ref_count"] == 3
|
assert artifact["ref_count"] == 3
|
||||||
|
|
||||||
|
|
||||||
|
class TestSecurityPathTraversal:
|
||||||
|
"""Tests for path traversal attack prevention.
|
||||||
|
|
||||||
|
Note: Orchard uses content-addressable storage where files are stored by
|
||||||
|
SHA256 hash, not filename. Filenames are metadata only and never used in
|
||||||
|
file path construction, so path traversal in filenames is not a security
|
||||||
|
vulnerability. These tests verify the system handles unusual inputs safely.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.requires_direct_s3
|
||||||
|
def test_path_traversal_in_filename_stored_safely(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test filenames with path traversal are stored safely (as metadata only)."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"path traversal test content"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
files = {
|
||||||
|
"file": (
|
||||||
|
"../../../etc/passwd",
|
||||||
|
io.BytesIO(content),
|
||||||
|
"application/octet-stream",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": "traversal-test"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
result = response.json()
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
s3_objects = list_s3_objects_by_hash(expected_hash)
|
||||||
|
assert len(s3_objects) == 1
|
||||||
|
assert ".." not in s3_objects[0]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_path_traversal_in_package_name(self, integration_client, test_project):
|
||||||
|
"""Test package names with path traversal sequences are rejected."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{test_project}/packages/../../../etc/passwd"
|
||||||
|
)
|
||||||
|
assert response.status_code in [400, 404, 422]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_path_traversal_in_tag_name(self, integration_client, test_package):
|
||||||
|
"""Test tag names with path traversal are handled safely."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"tag traversal test"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": "../../../etc/passwd"},
|
||||||
|
)
|
||||||
|
assert response.status_code in [200, 400, 422]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_path_traversal_in_ref(self, integration_client, test_package):
|
||||||
|
"""Test download ref with path traversal is rejected."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/../../../etc/passwd"
|
||||||
|
)
|
||||||
|
assert response.status_code in [400, 404, 422]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_path_traversal_in_package_name(self, integration_client, test_project):
|
||||||
|
"""Test package names with path traversal sequences are rejected."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{test_project}/packages/../../../etc/passwd"
|
||||||
|
)
|
||||||
|
assert response.status_code in [400, 404, 422]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_path_traversal_in_tag_name(self, integration_client, test_package):
|
||||||
|
"""Test tag names with path traversal are rejected or handled safely."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"tag traversal test"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"tag": "../../../etc/passwd"},
|
||||||
|
)
|
||||||
|
assert response.status_code in [200, 400, 422]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_path_traversal_in_ref(self, integration_client, test_package):
|
||||||
|
"""Test download ref with path traversal is rejected."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/../../../etc/passwd"
|
||||||
|
)
|
||||||
|
assert response.status_code in [400, 404, 422]
|
||||||
|
|
||||||
|
|
||||||
|
class TestSecurityMalformedRequests:
|
||||||
|
"""Tests for malformed request handling."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_missing_file_field(self, integration_client, test_package):
|
||||||
|
"""Test upload without file field returns appropriate error."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
data={"tag": "no-file"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_null_bytes_in_filename(self, integration_client, test_package):
|
||||||
|
"""Test filename with null bytes is handled safely."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"null byte test"
|
||||||
|
|
||||||
|
files = {
|
||||||
|
"file": ("test\x00.bin", io.BytesIO(content), "application/octet-stream")
|
||||||
|
}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code in [200, 400, 422]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_very_long_filename(self, integration_client, test_package):
|
||||||
|
"""Test very long filename is handled (truncated or rejected)."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"long filename test"
|
||||||
|
long_filename = "a" * 1000 + ".bin"
|
||||||
|
|
||||||
|
files = {
|
||||||
|
"file": (long_filename, io.BytesIO(content), "application/octet-stream")
|
||||||
|
}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code in [200, 400, 413, 422]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_special_characters_in_filename(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test filenames with special characters are handled safely."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"special char test"
|
||||||
|
|
||||||
|
special_filenames = [
|
||||||
|
"test<script>.bin",
|
||||||
|
'test"quote.bin',
|
||||||
|
"test'apostrophe.bin",
|
||||||
|
"test;semicolon.bin",
|
||||||
|
"test|pipe.bin",
|
||||||
|
]
|
||||||
|
|
||||||
|
for filename in special_filenames:
|
||||||
|
files = {
|
||||||
|
"file": (filename, io.BytesIO(content), "application/octet-stream")
|
||||||
|
}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code in [200, 400, 422], (
|
||||||
|
f"Unexpected status {response.status_code} for filename: {filename}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_invalid_checksum_header_format(self, integration_client, test_package):
|
||||||
|
"""Test invalid X-Checksum-SHA256 header format is rejected."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"checksum test"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": "not-a-valid-hash"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert "Invalid" in response.json().get("detail", "")
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_checksum_mismatch_rejected(self, integration_client, test_package):
|
||||||
|
"""Test upload with wrong checksum is rejected."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"checksum mismatch test"
|
||||||
|
wrong_hash = "0" * 64
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": wrong_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
assert "verification failed" in response.json().get("detail", "").lower()
|
||||||
|
|||||||
347
backend/tests/integration/test_version_api.py
Normal file
347
backend/tests/integration/test_version_api.py
Normal file
@@ -0,0 +1,347 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for package version API endpoints.
|
||||||
|
|
||||||
|
Tests cover:
|
||||||
|
- Version creation via upload
|
||||||
|
- Version auto-detection from filename
|
||||||
|
- Version listing and retrieval
|
||||||
|
- Download by version prefix
|
||||||
|
- Version deletion
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import io
|
||||||
|
from tests.factories import (
|
||||||
|
compute_sha256,
|
||||||
|
upload_test_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionCreation:
|
||||||
|
"""Tests for creating versions via upload."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_explicit_version(self, integration_client, test_package):
|
||||||
|
"""Test upload with explicit version parameter creates version record."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"version creation test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": "1.0.0"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
result = response.json()
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
assert result.get("version") == "1.0.0"
|
||||||
|
assert result.get("version_source") == "explicit"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_version_and_tag(self, integration_client, test_package):
|
||||||
|
"""Test upload with both version and tag creates both records."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"version and tag test"
|
||||||
|
|
||||||
|
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": "2.0.0", "tag": "latest"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
result = response.json()
|
||||||
|
assert result.get("version") == "2.0.0"
|
||||||
|
|
||||||
|
# Verify tag was also created
|
||||||
|
tags_response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/tags"
|
||||||
|
)
|
||||||
|
assert tags_response.status_code == 200
|
||||||
|
tags = tags_response.json()
|
||||||
|
tag_names = [t["name"] for t in tags.get("items", tags)]
|
||||||
|
assert "latest" in tag_names
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_duplicate_version_same_content_succeeds(self, integration_client, test_package):
|
||||||
|
"""Test uploading same version with same content succeeds (deduplication)."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"version dedup test"
|
||||||
|
|
||||||
|
# First upload with version
|
||||||
|
files1 = {"file": ("app1.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response1 = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files1,
|
||||||
|
data={"version": "3.0.0"},
|
||||||
|
)
|
||||||
|
assert response1.status_code == 200
|
||||||
|
|
||||||
|
# Second upload with same version and same content succeeds
|
||||||
|
files2 = {"file": ("app2.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response2 = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files2,
|
||||||
|
data={"version": "3.0.0"},
|
||||||
|
)
|
||||||
|
# This succeeds because it's the same artifact (deduplication)
|
||||||
|
assert response2.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionAutoDetection:
|
||||||
|
"""Tests for automatic version detection from filename."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_version_detected_from_filename_tarball(self, integration_client, test_package):
|
||||||
|
"""Test version is auto-detected from tarball filename or metadata."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"auto detect version tarball"
|
||||||
|
|
||||||
|
files = {"file": ("myapp-1.2.3.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
result = response.json()
|
||||||
|
assert result.get("version") == "1.2.3"
|
||||||
|
# Version source can be 'filename' or 'metadata' depending on detection order
|
||||||
|
assert result.get("version_source") in ["filename", "metadata"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_version_detected_from_filename_zip(self, integration_client, test_package):
|
||||||
|
"""Test version is auto-detected from zip filename."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"auto detect version zip"
|
||||||
|
|
||||||
|
files = {"file": ("package-2.0.0.zip", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
result = response.json()
|
||||||
|
assert result.get("version") == "2.0.0"
|
||||||
|
assert result.get("version_source") == "filename"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_explicit_version_overrides_filename(self, integration_client, test_package):
|
||||||
|
"""Test explicit version parameter overrides filename detection."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"explicit override test"
|
||||||
|
|
||||||
|
files = {"file": ("myapp-1.0.0.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": "9.9.9"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
result = response.json()
|
||||||
|
assert result.get("version") == "9.9.9"
|
||||||
|
assert result.get("version_source") == "explicit"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_no_version_detected_from_plain_filename(self, integration_client, test_package):
|
||||||
|
"""Test no version is created for filenames without version pattern."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"no version in filename"
|
||||||
|
|
||||||
|
files = {"file": ("plain-file.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
result = response.json()
|
||||||
|
# Version should be None or not present
|
||||||
|
assert result.get("version") is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionListing:
|
||||||
|
"""Tests for listing and retrieving versions."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_list_versions(self, integration_client, test_package):
|
||||||
|
"""Test listing all versions for a package."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
# Create multiple versions
|
||||||
|
for ver in ["1.0.0", "1.1.0", "2.0.0"]:
|
||||||
|
content = f"version {ver} content".encode()
|
||||||
|
files = {"file": (f"app-{ver}.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": ver},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# List versions
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/versions"
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
versions = [v["version"] for v in data.get("items", data)]
|
||||||
|
assert "1.0.0" in versions
|
||||||
|
assert "1.1.0" in versions
|
||||||
|
assert "2.0.0" in versions
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_get_specific_version(self, integration_client, test_package):
|
||||||
|
"""Test getting details for a specific version."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"specific version test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
# Create version
|
||||||
|
files = {"file": ("app-4.0.0.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": "4.0.0"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get version details
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/versions/4.0.0"
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["version"] == "4.0.0"
|
||||||
|
assert data["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_get_nonexistent_version_returns_404(self, integration_client, test_package):
|
||||||
|
"""Test getting nonexistent version returns 404."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/versions/99.99.99"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestDownloadByVersion:
|
||||||
|
"""Tests for downloading artifacts by version."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_by_version_prefix(self, integration_client, test_package):
|
||||||
|
"""Test downloading artifact using version: prefix."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"download by version test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
# Upload with version
|
||||||
|
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": "5.0.0"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Download by version prefix
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/version:5.0.0",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_nonexistent_version_returns_404(self, integration_client, test_package):
|
||||||
|
"""Test downloading nonexistent version returns 404."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/version:99.0.0"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_version_resolution_priority(self, integration_client, test_package):
|
||||||
|
"""Test that version: prefix explicitly resolves to version, not tag."""
|
||||||
|
project, package = test_package
|
||||||
|
version_content = b"this is the version content"
|
||||||
|
tag_content = b"this is the tag content"
|
||||||
|
|
||||||
|
# Create a version 6.0.0
|
||||||
|
files1 = {"file": ("app-v.tar.gz", io.BytesIO(version_content), "application/octet-stream")}
|
||||||
|
integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files1,
|
||||||
|
data={"version": "6.0.0"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a tag named "6.0.0" pointing to different content
|
||||||
|
files2 = {"file": ("app-t.tar.gz", io.BytesIO(tag_content), "application/octet-stream")}
|
||||||
|
integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files2,
|
||||||
|
data={"tag": "6.0.0"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Download with version: prefix should get version content
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/version:6.0.0",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == version_content
|
||||||
|
|
||||||
|
# Download with tag: prefix should get tag content
|
||||||
|
response2 = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/tag:6.0.0",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response2.status_code == 200
|
||||||
|
assert response2.content == tag_content
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionDeletion:
|
||||||
|
"""Tests for deleting versions."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_delete_version(self, integration_client, test_package):
|
||||||
|
"""Test deleting a version."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"delete version test"
|
||||||
|
|
||||||
|
# Create version
|
||||||
|
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": "7.0.0"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify version exists
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/versions/7.0.0"
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Delete version - returns 204 No Content on success
|
||||||
|
delete_response = integration_client.delete(
|
||||||
|
f"/api/v1/project/{project}/{package}/versions/7.0.0"
|
||||||
|
)
|
||||||
|
assert delete_response.status_code == 204
|
||||||
|
|
||||||
|
# Verify version no longer exists
|
||||||
|
response2 = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/versions/7.0.0"
|
||||||
|
)
|
||||||
|
assert response2.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_delete_nonexistent_version_returns_404(self, integration_client, test_package):
|
||||||
|
"""Test deleting nonexistent version returns 404."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.delete(
|
||||||
|
f"/api/v1/project/{project}/{package}/versions/99.0.0"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
412
backend/tests/integration/test_versions_api.py
Normal file
412
backend/tests/integration/test_versions_api.py
Normal file
@@ -0,0 +1,412 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for version API endpoints.
|
||||||
|
|
||||||
|
Tests cover:
|
||||||
|
- Version creation via upload
|
||||||
|
- Version auto-detection from filename
|
||||||
|
- Version listing with pagination
|
||||||
|
- Version deletion
|
||||||
|
- Download by version ref
|
||||||
|
- ref_count behavior with version operations
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from tests.factories import upload_test_file
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionCreation:
|
||||||
|
"""Tests for version creation during upload."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_explicit_version(self, integration_client, test_package):
|
||||||
|
"""Test creating a version via explicit version parameter."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"version create test",
|
||||||
|
tag="latest",
|
||||||
|
version="1.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["tag"] == "latest"
|
||||||
|
assert result["version"] == "1.0.0"
|
||||||
|
assert result["version_source"] == "explicit"
|
||||||
|
assert result["artifact_id"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_version_auto_detect_from_tarball(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test version auto-detection from tarball filename pattern."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"auto version test",
|
||||||
|
filename="myapp-2.1.0.tar.gz",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["version"] == "2.1.0"
|
||||||
|
# Tarball metadata extractor parses version from filename
|
||||||
|
assert result["version_source"] == "metadata"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_version_auto_detect_v_prefix(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test version auto-detection strips 'v' prefix from tarball filename."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"v prefix test",
|
||||||
|
filename="package-v3.0.0.tar.gz",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["version"] == "3.0.0"
|
||||||
|
# Tarball metadata extractor parses version from filename
|
||||||
|
assert result["version_source"] == "metadata"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_duplicate_version_warning(self, integration_client, test_package):
|
||||||
|
"""Test that duplicate version during upload returns response without error."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
# Upload with version 1.0.0
|
||||||
|
upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"first upload",
|
||||||
|
version="1.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upload different content with same version - should succeed but no new version
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"second upload different content",
|
||||||
|
version="1.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upload succeeds but version may not be set (duplicate)
|
||||||
|
assert result["artifact_id"]
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionCRUD:
|
||||||
|
"""Tests for version list, get, delete operations."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_list_versions(self, integration_client, test_package):
|
||||||
|
"""Test listing versions for a package."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
# Create some versions
|
||||||
|
upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"v1 content",
|
||||||
|
version="1.0.0",
|
||||||
|
)
|
||||||
|
upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"v2 content",
|
||||||
|
version="2.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/versions"
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
assert "items" in data
|
||||||
|
assert "pagination" in data
|
||||||
|
|
||||||
|
versions = [v["version"] for v in data["items"]]
|
||||||
|
assert "1.0.0" in versions
|
||||||
|
assert "2.0.0" in versions
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_list_versions_with_artifact_info(self, integration_client, test_package):
|
||||||
|
"""Test that version list includes artifact metadata."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"version with info",
|
||||||
|
version="1.0.0",
|
||||||
|
tag="release",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/versions"
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
assert len(data["items"]) >= 1
|
||||||
|
|
||||||
|
version_item = next(
|
||||||
|
(v for v in data["items"] if v["version"] == "1.0.0"), None
|
||||||
|
)
|
||||||
|
assert version_item is not None
|
||||||
|
assert "size" in version_item
|
||||||
|
assert "artifact_id" in version_item
|
||||||
|
assert "tags" in version_item
|
||||||
|
assert "release" in version_item["tags"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_get_version(self, integration_client, test_package):
|
||||||
|
"""Test getting a specific version."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
upload_result = upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"get version test",
|
||||||
|
version="3.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/versions/3.0.0"
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
assert data["version"] == "3.0.0"
|
||||||
|
assert data["artifact_id"] == upload_result["artifact_id"]
|
||||||
|
assert data["version_source"] == "explicit"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_get_version_not_found(self, integration_client, test_package):
|
||||||
|
"""Test getting a non-existent version returns 404."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/versions/99.99.99"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_delete_version(self, integration_client, test_package):
|
||||||
|
"""Test deleting a version."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"delete version test",
|
||||||
|
version="4.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Delete version
|
||||||
|
response = integration_client.delete(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/versions/4.0.0"
|
||||||
|
)
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
# Verify deleted
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/versions/4.0.0"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionDownload:
|
||||||
|
"""Tests for downloading artifacts by version reference."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_by_version_prefix(self, integration_client, test_package):
|
||||||
|
"""Test downloading an artifact using version: prefix."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
content = b"download by version test"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
content,
|
||||||
|
version="5.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/+/version:5.0.0",
|
||||||
|
follow_redirects=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should either redirect or return content
|
||||||
|
assert response.status_code in [200, 302, 307]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_by_implicit_version(self, integration_client, test_package):
|
||||||
|
"""Test downloading an artifact using version number directly (no prefix)."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
content = b"implicit version download test"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
content,
|
||||||
|
version="6.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/+/6.0.0",
|
||||||
|
follow_redirects=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should resolve version first (before tag)
|
||||||
|
assert response.status_code in [200, 302, 307]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_version_takes_precedence_over_tag(self, integration_client, test_package):
|
||||||
|
"""Test that version is checked before tag when resolving refs."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
# Upload with version "1.0"
|
||||||
|
version_result = upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"version content",
|
||||||
|
version="1.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a tag with the same name "1.0" pointing to different artifact
|
||||||
|
tag_result = upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"tag content different",
|
||||||
|
tag="1.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Download by "1.0" should resolve to version, not tag
|
||||||
|
# Since version:1.0 artifact was uploaded first
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/+/1.0",
|
||||||
|
follow_redirects=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code in [200, 302, 307]
|
||||||
|
|
||||||
|
|
||||||
|
class TestTagVersionEnrichment:
|
||||||
|
"""Tests for tag responses including version information."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_tag_response_includes_version(self, integration_client, test_package):
|
||||||
|
"""Test that tag responses include version of the artifact."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
# Upload with both version and tag
|
||||||
|
upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"enriched tag test",
|
||||||
|
version="7.0.0",
|
||||||
|
tag="stable",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get tag and check version field
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/tags/stable"
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
assert data["name"] == "stable"
|
||||||
|
assert data["version"] == "7.0.0"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_tag_list_includes_versions(self, integration_client, test_package):
|
||||||
|
"""Test that tag list responses include version for each tag."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"list version test",
|
||||||
|
version="8.0.0",
|
||||||
|
tag="latest",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/tags"
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
tag_item = next((t for t in data["items"] if t["name"] == "latest"), None)
|
||||||
|
assert tag_item is not None
|
||||||
|
assert tag_item.get("version") == "8.0.0"
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionPagination:
|
||||||
|
"""Tests for version listing pagination and sorting."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_versions_pagination(self, integration_client, test_package):
|
||||||
|
"""Test version listing respects pagination."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/versions?limit=5"
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
assert "pagination" in data
|
||||||
|
assert data["pagination"]["limit"] == 5
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_versions_sorting(self, integration_client, test_package):
|
||||||
|
"""Test version listing can be sorted."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
# Create versions with different timestamps
|
||||||
|
upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"sort test 1",
|
||||||
|
version="1.0.0",
|
||||||
|
)
|
||||||
|
upload_test_file(
|
||||||
|
integration_client,
|
||||||
|
project_name,
|
||||||
|
package_name,
|
||||||
|
b"sort test 2",
|
||||||
|
version="2.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Test ascending sort
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/versions?sort=version&order=asc"
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
versions = [v["version"] for v in data["items"]]
|
||||||
|
# First version should be 1.0.0 when sorted ascending
|
||||||
|
if len(versions) >= 2:
|
||||||
|
assert versions.index("1.0.0") < versions.index("2.0.0")
|
||||||
1080
backend/tests/test_dependencies.py
Normal file
1080
backend/tests/test_dependencies.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -378,7 +378,7 @@ class TestDeduplicationBehavior:
|
|||||||
result2 = mock_storage._store_simple(file2)
|
result2 = mock_storage._store_simple(file2)
|
||||||
|
|
||||||
assert result1.sha256 == result2.sha256
|
assert result1.sha256 == result2.sha256
|
||||||
assert result1.s3_key == result2.s3_key
|
assert result1.s3_key == result2.s3_key # gitleaks:allow
|
||||||
|
|
||||||
@pytest.mark.unit
|
@pytest.mark.unit
|
||||||
def test_different_content_different_keys(self, mock_storage):
|
def test_different_content_different_keys(self, mock_storage):
|
||||||
@@ -393,7 +393,7 @@ class TestDeduplicationBehavior:
|
|||||||
result2 = mock_storage._store_simple(file2)
|
result2 = mock_storage._store_simple(file2)
|
||||||
|
|
||||||
assert result1.sha256 != result2.sha256
|
assert result1.sha256 != result2.sha256
|
||||||
assert result1.s3_key != result2.s3_key
|
assert result1.s3_key != result2.s3_key # gitleaks:allow
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ services:
|
|||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile.local
|
dockerfile: Dockerfile.local
|
||||||
ports:
|
ports:
|
||||||
- "8080:8080"
|
- "0.0.0.0:8080:8080"
|
||||||
environment:
|
environment:
|
||||||
- ORCHARD_SERVER_HOST=0.0.0.0
|
- ORCHARD_SERVER_HOST=0.0.0.0
|
||||||
- ORCHARD_SERVER_PORT=8080
|
- ORCHARD_SERVER_PORT=8080
|
||||||
@@ -24,6 +24,8 @@ services:
|
|||||||
- ORCHARD_S3_USE_PATH_STYLE=true
|
- ORCHARD_S3_USE_PATH_STYLE=true
|
||||||
- ORCHARD_REDIS_HOST=redis
|
- ORCHARD_REDIS_HOST=redis
|
||||||
- ORCHARD_REDIS_PORT=6379
|
- ORCHARD_REDIS_PORT=6379
|
||||||
|
# Higher rate limit for local development/testing
|
||||||
|
- ORCHARD_LOGIN_RATE_LIMIT=1000/minute
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
@@ -40,6 +42,15 @@ services:
|
|||||||
timeout: 3s
|
timeout: 3s
|
||||||
start_period: 10s
|
start_period: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
|
security_opt:
|
||||||
|
- no-new-privileges:true
|
||||||
|
cap_drop:
|
||||||
|
- ALL
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: '1.0'
|
||||||
|
memory: 1G
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:16-alpine
|
image: postgres:16-alpine
|
||||||
@@ -51,7 +62,7 @@ services:
|
|||||||
- postgres-data-local:/var/lib/postgresql/data
|
- postgres-data-local:/var/lib/postgresql/data
|
||||||
- ./migrations:/docker-entrypoint-initdb.d:ro
|
- ./migrations:/docker-entrypoint-initdb.d:ro
|
||||||
ports:
|
ports:
|
||||||
- "5432:5432"
|
- "127.0.0.1:5432:5432"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "pg_isready -U orchard -d orchard"]
|
test: ["CMD-SHELL", "pg_isready -U orchard -d orchard"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
@@ -60,6 +71,11 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- orchard-network
|
- orchard-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: '0.5'
|
||||||
|
memory: 512M
|
||||||
|
|
||||||
minio:
|
minio:
|
||||||
image: minio/minio:latest
|
image: minio/minio:latest
|
||||||
@@ -70,8 +86,8 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- minio-data-local:/data
|
- minio-data-local:/data
|
||||||
ports:
|
ports:
|
||||||
- "9000:9000"
|
- "127.0.0.1:9000:9000"
|
||||||
- "9001:9001"
|
- "127.0.0.1:9001:9001"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "mc", "ready", "local"]
|
test: ["CMD", "mc", "ready", "local"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
@@ -80,6 +96,11 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- orchard-network
|
- orchard-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: '0.5'
|
||||||
|
memory: 512M
|
||||||
|
|
||||||
minio-init:
|
minio-init:
|
||||||
image: minio/mc:latest
|
image: minio/mc:latest
|
||||||
@@ -95,6 +116,11 @@ services:
|
|||||||
"
|
"
|
||||||
networks:
|
networks:
|
||||||
- orchard-network
|
- orchard-network
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: '0.25'
|
||||||
|
memory: 128M
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: redis:7-alpine
|
image: redis:7-alpine
|
||||||
@@ -102,7 +128,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- redis-data-local:/data
|
- redis-data-local:/data
|
||||||
ports:
|
ports:
|
||||||
- "6379:6379"
|
- "127.0.0.1:6379:6379"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "redis-cli", "ping"]
|
test: ["CMD", "redis-cli", "ping"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
@@ -111,6 +137,11 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- orchard-network
|
- orchard-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: '0.25'
|
||||||
|
memory: 256M
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres-data-local:
|
postgres-data-local:
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ services:
|
|||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
ports:
|
ports:
|
||||||
- "8080:8080"
|
- "127.0.0.1:8080:8080"
|
||||||
environment:
|
environment:
|
||||||
- ORCHARD_SERVER_HOST=0.0.0.0
|
- ORCHARD_SERVER_HOST=0.0.0.0
|
||||||
- ORCHARD_SERVER_PORT=8080
|
- ORCHARD_SERVER_PORT=8080
|
||||||
@@ -34,6 +34,21 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- orchard-network
|
- orchard-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 3s
|
||||||
|
start_period: 10s
|
||||||
|
retries: 3
|
||||||
|
security_opt:
|
||||||
|
- no-new-privileges:true
|
||||||
|
cap_drop:
|
||||||
|
- ALL
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: '1.0'
|
||||||
|
memory: 1G
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: containers.global.bsf.tools/postgres:16-alpine
|
image: containers.global.bsf.tools/postgres:16-alpine
|
||||||
@@ -45,7 +60,7 @@ services:
|
|||||||
- postgres-data:/var/lib/postgresql/data
|
- postgres-data:/var/lib/postgresql/data
|
||||||
- ./migrations:/docker-entrypoint-initdb.d:ro
|
- ./migrations:/docker-entrypoint-initdb.d:ro
|
||||||
ports:
|
ports:
|
||||||
- "5432:5432"
|
- "127.0.0.1:5432:5432"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "pg_isready -U orchard -d orchard"]
|
test: ["CMD-SHELL", "pg_isready -U orchard -d orchard"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
@@ -54,6 +69,15 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- orchard-network
|
- orchard-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
security_opt:
|
||||||
|
- no-new-privileges:true
|
||||||
|
cap_drop:
|
||||||
|
- ALL
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: '0.5'
|
||||||
|
memory: 512M
|
||||||
|
|
||||||
minio:
|
minio:
|
||||||
image: containers.global.bsf.tools/minio/minio:latest
|
image: containers.global.bsf.tools/minio/minio:latest
|
||||||
@@ -64,8 +88,8 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- minio-data:/data
|
- minio-data:/data
|
||||||
ports:
|
ports:
|
||||||
- "9000:9000"
|
- "127.0.0.1:9000:9000"
|
||||||
- "9001:9001"
|
- "127.0.0.1:9001:9001"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "mc", "ready", "local"]
|
test: ["CMD", "mc", "ready", "local"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
@@ -74,6 +98,15 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- orchard-network
|
- orchard-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
security_opt:
|
||||||
|
- no-new-privileges:true
|
||||||
|
cap_drop:
|
||||||
|
- ALL
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: '0.5'
|
||||||
|
memory: 512M
|
||||||
|
|
||||||
minio-init:
|
minio-init:
|
||||||
image: containers.global.bsf.tools/minio/mc:latest
|
image: containers.global.bsf.tools/minio/mc:latest
|
||||||
@@ -89,6 +122,15 @@ services:
|
|||||||
"
|
"
|
||||||
networks:
|
networks:
|
||||||
- orchard-network
|
- orchard-network
|
||||||
|
security_opt:
|
||||||
|
- no-new-privileges:true
|
||||||
|
cap_drop:
|
||||||
|
- ALL
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: '0.25'
|
||||||
|
memory: 128M
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: containers.global.bsf.tools/redis:7-alpine
|
image: containers.global.bsf.tools/redis:7-alpine
|
||||||
@@ -96,7 +138,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- redis-data:/data
|
- redis-data:/data
|
||||||
ports:
|
ports:
|
||||||
- "6379:6379"
|
- "127.0.0.1:6379:6379"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "redis-cli", "ping"]
|
test: ["CMD", "redis-cli", "ping"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
@@ -105,6 +147,15 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- orchard-network
|
- orchard-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
security_opt:
|
||||||
|
- no-new-privileges:true
|
||||||
|
cap_drop:
|
||||||
|
- ALL
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: '0.25'
|
||||||
|
memory: 256M
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres-data:
|
postgres-data:
|
||||||
|
|||||||
294
docs/integrity-verification.md
Normal file
294
docs/integrity-verification.md
Normal file
@@ -0,0 +1,294 @@
|
|||||||
|
# Integrity Verification
|
||||||
|
|
||||||
|
Orchard uses content-addressable storage with SHA256 hashing to ensure artifact integrity. This document describes how integrity verification works and how to use it.
|
||||||
|
|
||||||
|
## How It Works
|
||||||
|
|
||||||
|
### Content-Addressable Storage
|
||||||
|
|
||||||
|
Orchard stores artifacts using their SHA256 hash as the unique identifier. This provides several benefits:
|
||||||
|
|
||||||
|
1. **Automatic deduplication**: Identical content is stored only once
|
||||||
|
2. **Built-in integrity**: The artifact ID *is* the content hash
|
||||||
|
3. **Tamper detection**: Any modification changes the hash, making corruption detectable
|
||||||
|
|
||||||
|
When you upload a file:
|
||||||
|
1. Orchard computes the SHA256 hash of the content
|
||||||
|
2. The hash becomes the artifact ID (64-character hex string)
|
||||||
|
3. The file is stored in S3 at `fruits/{hash[0:2]}/{hash[2:4]}/{hash}`
|
||||||
|
4. The hash and metadata are recorded in the database
|
||||||
|
|
||||||
|
### Hash Format
|
||||||
|
|
||||||
|
- Algorithm: SHA256
|
||||||
|
- Format: 64-character lowercase hexadecimal string
|
||||||
|
- Example: `dffd6021bb2bd5b0af676290809ec3a53191dd81c7f70a4b28688a362182986f`
|
||||||
|
|
||||||
|
## Client-Side Verification
|
||||||
|
|
||||||
|
### Before Upload
|
||||||
|
|
||||||
|
Compute the hash locally before uploading to verify the server received your content correctly:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
def compute_sha256(content: bytes) -> str:
|
||||||
|
return hashlib.sha256(content).hexdigest()
|
||||||
|
|
||||||
|
# Compute hash before upload
|
||||||
|
content = open("myfile.tar.gz", "rb").read()
|
||||||
|
local_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
# Upload the file
|
||||||
|
response = requests.post(
|
||||||
|
f"{base_url}/api/v1/project/{project}/{package}/upload",
|
||||||
|
files={"file": ("myfile.tar.gz", content)},
|
||||||
|
)
|
||||||
|
result = response.json()
|
||||||
|
|
||||||
|
# Verify server computed the same hash
|
||||||
|
assert result["artifact_id"] == local_hash, "Hash mismatch!"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Providing Expected Hash on Upload
|
||||||
|
|
||||||
|
You can provide the expected hash in the upload request. The server will reject the upload if the computed hash doesn't match:
|
||||||
|
|
||||||
|
```python
|
||||||
|
response = requests.post(
|
||||||
|
f"{base_url}/api/v1/project/{project}/{package}/upload",
|
||||||
|
files={"file": ("myfile.tar.gz", content)},
|
||||||
|
headers={"X-Checksum-SHA256": local_hash},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Returns 422 if hash doesn't match
|
||||||
|
if response.status_code == 422:
|
||||||
|
print("Checksum mismatch - upload rejected")
|
||||||
|
```
|
||||||
|
|
||||||
|
### After Download
|
||||||
|
|
||||||
|
Verify downloaded content matches the expected hash using response headers:
|
||||||
|
|
||||||
|
```python
|
||||||
|
response = requests.get(
|
||||||
|
f"{base_url}/api/v1/project/{project}/{package}/+/{tag}",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get expected hash from header
|
||||||
|
expected_hash = response.headers.get("X-Checksum-SHA256")
|
||||||
|
|
||||||
|
# Compute hash of downloaded content
|
||||||
|
actual_hash = compute_sha256(response.content)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
if actual_hash != expected_hash:
|
||||||
|
raise Exception(f"Integrity check failed! Expected {expected_hash}, got {actual_hash}")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Response Headers for Verification
|
||||||
|
|
||||||
|
Download responses include multiple headers for verification:
|
||||||
|
|
||||||
|
| Header | Format | Description |
|
||||||
|
|--------|--------|-------------|
|
||||||
|
| `X-Checksum-SHA256` | Hex string | SHA256 hash (64 chars) |
|
||||||
|
| `ETag` | `"<hash>"` | SHA256 hash in quotes |
|
||||||
|
| `Digest` | `sha-256=<base64>` | RFC 3230 format (base64-encoded) |
|
||||||
|
| `Content-Length` | Integer | File size in bytes |
|
||||||
|
|
||||||
|
### Server-Side Verification on Download
|
||||||
|
|
||||||
|
Request server-side verification during download:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Pre-verification: Server verifies before streaming (returns 500 if corrupt)
|
||||||
|
curl "${base_url}/api/v1/project/${project}/${package}/+/${tag}?mode=proxy&verify=true&verify_mode=pre"
|
||||||
|
|
||||||
|
# Stream verification: Server verifies while streaming (logs error if corrupt)
|
||||||
|
curl "${base_url}/api/v1/project/${project}/${package}/+/${tag}?mode=proxy&verify=true&verify_mode=stream"
|
||||||
|
```
|
||||||
|
|
||||||
|
The `X-Verified` header indicates whether server-side verification was performed:
|
||||||
|
- `X-Verified: true` - Content was verified by the server
|
||||||
|
|
||||||
|
## Server-Side Consistency Check
|
||||||
|
|
||||||
|
### Consistency Check Endpoint
|
||||||
|
|
||||||
|
Administrators can run a consistency check to verify all stored artifacts:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl "${base_url}/api/v1/admin/consistency-check"
|
||||||
|
```
|
||||||
|
|
||||||
|
Response:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"total_artifacts_checked": 1234,
|
||||||
|
"healthy": true,
|
||||||
|
"orphaned_s3_objects": 0,
|
||||||
|
"missing_s3_objects": 0,
|
||||||
|
"size_mismatches": 0,
|
||||||
|
"orphaned_s3_keys": [],
|
||||||
|
"missing_s3_keys": [],
|
||||||
|
"size_mismatch_artifacts": []
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### What the Check Verifies
|
||||||
|
|
||||||
|
1. **Missing S3 objects**: Database records with no corresponding S3 object
|
||||||
|
2. **Orphaned S3 objects**: S3 objects with no database record
|
||||||
|
3. **Size mismatches**: S3 object size doesn't match database record
|
||||||
|
|
||||||
|
### Running Consistency Checks
|
||||||
|
|
||||||
|
**Manual check:**
|
||||||
|
```bash
|
||||||
|
# Check all artifacts
|
||||||
|
curl "${base_url}/api/v1/admin/consistency-check"
|
||||||
|
|
||||||
|
# Limit results (for large deployments)
|
||||||
|
curl "${base_url}/api/v1/admin/consistency-check?limit=100"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Scheduled checks (recommended):**
|
||||||
|
|
||||||
|
Set up a cron job or Kubernetes CronJob to run periodic checks:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Kubernetes CronJob example
|
||||||
|
apiVersion: batch/v1
|
||||||
|
kind: CronJob
|
||||||
|
metadata:
|
||||||
|
name: orchard-consistency-check
|
||||||
|
spec:
|
||||||
|
schedule: "0 2 * * *" # Daily at 2 AM
|
||||||
|
jobTemplate:
|
||||||
|
spec:
|
||||||
|
template:
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: check
|
||||||
|
image: curlimages/curl
|
||||||
|
command:
|
||||||
|
- /bin/sh
|
||||||
|
- -c
|
||||||
|
- |
|
||||||
|
response=$(curl -s "${ORCHARD_URL}/api/v1/admin/consistency-check")
|
||||||
|
healthy=$(echo "$response" | jq -r '.healthy')
|
||||||
|
if [ "$healthy" != "true" ]; then
|
||||||
|
echo "ALERT: Consistency check failed!"
|
||||||
|
echo "$response"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Consistency check passed"
|
||||||
|
restartPolicy: OnFailure
|
||||||
|
```
|
||||||
|
|
||||||
|
## Recovery Procedures
|
||||||
|
|
||||||
|
### Corrupted Artifact (Size Mismatch)
|
||||||
|
|
||||||
|
If the consistency check reports size mismatches:
|
||||||
|
|
||||||
|
1. **Identify affected artifacts:**
|
||||||
|
```bash
|
||||||
|
curl "${base_url}/api/v1/admin/consistency-check" | jq '.size_mismatch_artifacts'
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Check if artifact can be re-uploaded:**
|
||||||
|
- If the original content is available, delete the corrupted artifact and re-upload
|
||||||
|
- The same content will produce the same artifact ID
|
||||||
|
|
||||||
|
3. **If original content is lost:**
|
||||||
|
- The artifact data is corrupted and cannot be recovered
|
||||||
|
- Delete the artifact record and notify affected users
|
||||||
|
- Consider restoring from backup if available
|
||||||
|
|
||||||
|
### Missing S3 Object
|
||||||
|
|
||||||
|
If database records exist but S3 objects are missing:
|
||||||
|
|
||||||
|
1. **Identify affected artifacts:**
|
||||||
|
```bash
|
||||||
|
curl "${base_url}/api/v1/admin/consistency-check" | jq '.missing_s3_keys'
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Check S3 bucket:**
|
||||||
|
- Verify the S3 bucket exists and is accessible
|
||||||
|
- Check S3 access logs for deletion events
|
||||||
|
- Check if objects were moved or lifecycle-deleted
|
||||||
|
|
||||||
|
3. **Recovery options:**
|
||||||
|
- Restore from S3 versioning (if enabled)
|
||||||
|
- Restore from backup
|
||||||
|
- Re-upload original content (if available)
|
||||||
|
- Delete orphaned database records
|
||||||
|
|
||||||
|
### Orphaned S3 Objects
|
||||||
|
|
||||||
|
If S3 objects exist without database records:
|
||||||
|
|
||||||
|
1. **Identify orphaned objects:**
|
||||||
|
```bash
|
||||||
|
curl "${base_url}/api/v1/admin/consistency-check" | jq '.orphaned_s3_keys'
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Investigate cause:**
|
||||||
|
- Upload interrupted before database commit?
|
||||||
|
- Database record deleted but S3 cleanup failed?
|
||||||
|
|
||||||
|
3. **Resolution:**
|
||||||
|
- If content is needed, create database record manually
|
||||||
|
- If content is not needed, delete the S3 object to reclaim storage
|
||||||
|
|
||||||
|
### Preventive Measures
|
||||||
|
|
||||||
|
1. **Enable S3 versioning** to recover from accidental deletions
|
||||||
|
2. **Regular backups** of both database and S3 bucket
|
||||||
|
3. **Scheduled consistency checks** to detect issues early
|
||||||
|
4. **Monitoring and alerting** on consistency check failures
|
||||||
|
5. **Audit logging** to track all artifact operations
|
||||||
|
|
||||||
|
## Verification in CI/CD
|
||||||
|
|
||||||
|
### Verifying Artifacts in Pipelines
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# Download and verify artifact in CI pipeline
|
||||||
|
|
||||||
|
ARTIFACT_URL="${ORCHARD_URL}/api/v1/project/${PROJECT}/${PACKAGE}/+/${TAG}"
|
||||||
|
|
||||||
|
# Download with verification headers
|
||||||
|
response=$(curl -s -D - "${ARTIFACT_URL}?mode=proxy" -o artifact.tar.gz)
|
||||||
|
expected_hash=$(echo "$response" | grep -i "X-Checksum-SHA256" | cut -d: -f2 | tr -d ' \r')
|
||||||
|
|
||||||
|
# Compute actual hash
|
||||||
|
actual_hash=$(sha256sum artifact.tar.gz | cut -d' ' -f1)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
if [ "$actual_hash" != "$expected_hash" ]; then
|
||||||
|
echo "ERROR: Integrity check failed!"
|
||||||
|
echo "Expected: $expected_hash"
|
||||||
|
echo "Actual: $actual_hash"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Integrity verified: $actual_hash"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Using Server-Side Verification
|
||||||
|
|
||||||
|
For critical deployments, use server-side pre-verification:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Server verifies before streaming - returns 500 if corrupt
|
||||||
|
curl -f "${ARTIFACT_URL}?mode=proxy&verify=true&verify_mode=pre" -o artifact.tar.gz
|
||||||
|
```
|
||||||
|
|
||||||
|
This ensures the artifact is verified before any bytes are streamed to your pipeline.
|
||||||
@@ -2,9 +2,9 @@
|
|||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
<link rel="icon" type="image/svg+xml" href="/orchard.svg" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<title>Orchard - Content-Addressable Storage</title>
|
<title>Orchard</title>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="root"></div>
|
<div id="root"></div>
|
||||||
|
|||||||
4717
frontend/package-lock.json
generated
Normal file
4717
frontend/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -6,18 +6,34 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
"build": "tsc && vite build",
|
"build": "tsc && vite build",
|
||||||
"preview": "vite preview"
|
"preview": "vite preview",
|
||||||
|
"test": "vitest",
|
||||||
|
"test:run": "vitest run",
|
||||||
|
"test:coverage": "vitest run --coverage"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"react": "^18.2.0",
|
"react": "^18.2.0",
|
||||||
"react-dom": "^18.2.0",
|
"react-dom": "^18.2.0",
|
||||||
"react-router-dom": "^6.21.3"
|
"react-router-dom": "6.28.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@testing-library/jest-dom": "^6.4.2",
|
||||||
|
"@testing-library/react": "^14.2.1",
|
||||||
|
"@testing-library/user-event": "^14.5.2",
|
||||||
"@types/react": "^18.2.48",
|
"@types/react": "^18.2.48",
|
||||||
"@types/react-dom": "^18.2.18",
|
"@types/react-dom": "^18.2.18",
|
||||||
"@vitejs/plugin-react": "^4.2.1",
|
"@vitejs/plugin-react": "^4.2.1",
|
||||||
|
"@vitest/coverage-v8": "^1.3.1",
|
||||||
|
"jsdom": "^24.0.0",
|
||||||
"typescript": "^5.3.3",
|
"typescript": "^5.3.3",
|
||||||
"vite": "^5.0.12"
|
"vite": "^5.0.12",
|
||||||
|
"vitest": "^1.3.1"
|
||||||
|
},
|
||||||
|
"overrides": {
|
||||||
|
"ws": "8.18.0",
|
||||||
|
"ufo": "1.5.4",
|
||||||
|
"rollup": "4.52.4",
|
||||||
|
"caniuse-lite": "1.0.30001692",
|
||||||
|
"baseline-browser-mapping": "2.9.5"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
18
frontend/public/orchard.svg
Normal file
18
frontend/public/orchard.svg
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
<svg width="32" height="32" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<!-- Green background -->
|
||||||
|
<rect width="24" height="24" rx="4" fill="#4CAF50"/>
|
||||||
|
<!-- Three fruit trees representing an orchard - shifted down to center -->
|
||||||
|
<g transform="translate(0, 2)">
|
||||||
|
<!-- Left tree - rounded canopy -->
|
||||||
|
<path d="M6 14 Q6 8 3 8 Q6 4 6 4 Q6 4 9 8 Q6 8 6 14" fill="white" opacity="0.7"/>
|
||||||
|
<rect x="5.25" y="13" width="1.5" height="4" fill="white" opacity="0.7"/>
|
||||||
|
<!-- Center tree - larger rounded canopy -->
|
||||||
|
<path d="M12 12 Q12 5 8 5 Q12 1 12 1 Q12 1 16 5 Q12 5 12 12" fill="white"/>
|
||||||
|
<rect x="11.25" y="11" width="1.5" height="5" fill="white"/>
|
||||||
|
<!-- Right tree - rounded canopy -->
|
||||||
|
<path d="M18 14 Q18 8 15 8 Q18 4 18 4 Q18 4 21 8 Q18 8 18 14" fill="white" opacity="0.7"/>
|
||||||
|
<rect x="17.25" y="13" width="1.5" height="4" fill="white" opacity="0.7"/>
|
||||||
|
<!-- Ground -->
|
||||||
|
<ellipse cx="12" cy="18" rx="8" ry="1.5" fill="white" opacity="0.4"/>
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 1012 B |
@@ -1,20 +1,67 @@
|
|||||||
import { Routes, Route } from 'react-router-dom';
|
import { Routes, Route, Navigate, useLocation } from 'react-router-dom';
|
||||||
|
import { AuthProvider, useAuth } from './contexts/AuthContext';
|
||||||
import Layout from './components/Layout';
|
import Layout from './components/Layout';
|
||||||
import Home from './pages/Home';
|
import Home from './pages/Home';
|
||||||
import ProjectPage from './pages/ProjectPage';
|
import ProjectPage from './pages/ProjectPage';
|
||||||
import PackagePage from './pages/PackagePage';
|
import PackagePage from './pages/PackagePage';
|
||||||
import Dashboard from './pages/Dashboard';
|
import Dashboard from './pages/Dashboard';
|
||||||
|
import LoginPage from './pages/LoginPage';
|
||||||
|
import ChangePasswordPage from './pages/ChangePasswordPage';
|
||||||
|
import APIKeysPage from './pages/APIKeysPage';
|
||||||
|
import AdminUsersPage from './pages/AdminUsersPage';
|
||||||
|
import AdminOIDCPage from './pages/AdminOIDCPage';
|
||||||
|
import ProjectSettingsPage from './pages/ProjectSettingsPage';
|
||||||
|
|
||||||
|
// Component that checks if user must change password
|
||||||
|
function RequirePasswordChange({ children }: { children: React.ReactNode }) {
|
||||||
|
const { user, loading } = useAuth();
|
||||||
|
const location = useLocation();
|
||||||
|
|
||||||
|
if (loading) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If user is logged in and must change password, redirect to change password page
|
||||||
|
if (user?.must_change_password && location.pathname !== '/change-password') {
|
||||||
|
return <Navigate to="/change-password" replace />;
|
||||||
|
}
|
||||||
|
|
||||||
|
return <>{children}</>;
|
||||||
|
}
|
||||||
|
|
||||||
|
function AppRoutes() {
|
||||||
|
return (
|
||||||
|
<Routes>
|
||||||
|
<Route path="/login" element={<LoginPage />} />
|
||||||
|
<Route path="/change-password" element={<ChangePasswordPage />} />
|
||||||
|
<Route
|
||||||
|
path="*"
|
||||||
|
element={
|
||||||
|
<RequirePasswordChange>
|
||||||
|
<Layout>
|
||||||
|
<Routes>
|
||||||
|
<Route path="/" element={<Home />} />
|
||||||
|
<Route path="/dashboard" element={<Dashboard />} />
|
||||||
|
<Route path="/settings/api-keys" element={<APIKeysPage />} />
|
||||||
|
<Route path="/admin/users" element={<AdminUsersPage />} />
|
||||||
|
<Route path="/admin/oidc" element={<AdminOIDCPage />} />
|
||||||
|
<Route path="/project/:projectName" element={<ProjectPage />} />
|
||||||
|
<Route path="/project/:projectName/settings" element={<ProjectSettingsPage />} />
|
||||||
|
<Route path="/project/:projectName/:packageName" element={<PackagePage />} />
|
||||||
|
</Routes>
|
||||||
|
</Layout>
|
||||||
|
</RequirePasswordChange>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</Routes>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
function App() {
|
function App() {
|
||||||
return (
|
return (
|
||||||
<Layout>
|
<AuthProvider>
|
||||||
<Routes>
|
<AppRoutes />
|
||||||
<Route path="/" element={<Home />} />
|
</AuthProvider>
|
||||||
<Route path="/dashboard" element={<Dashboard />} />
|
|
||||||
<Route path="/project/:projectName" element={<ProjectPage />} />
|
|
||||||
<Route path="/project/:projectName/:packageName" element={<PackagePage />} />
|
|
||||||
</Routes>
|
|
||||||
</Layout>
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -17,14 +17,66 @@ import {
|
|||||||
DeduplicationStats,
|
DeduplicationStats,
|
||||||
TimelineStats,
|
TimelineStats,
|
||||||
CrossProjectStats,
|
CrossProjectStats,
|
||||||
|
User,
|
||||||
|
LoginCredentials,
|
||||||
|
APIKey,
|
||||||
|
APIKeyCreate,
|
||||||
|
APIKeyCreateResponse,
|
||||||
|
AdminUser,
|
||||||
|
UserCreate,
|
||||||
|
UserUpdate,
|
||||||
|
AccessPermission,
|
||||||
|
AccessPermissionCreate,
|
||||||
|
AccessPermissionUpdate,
|
||||||
|
AccessLevel,
|
||||||
|
OIDCConfig,
|
||||||
|
OIDCConfigUpdate,
|
||||||
|
OIDCStatus,
|
||||||
|
PackageVersion,
|
||||||
|
ArtifactDependenciesResponse,
|
||||||
|
ReverseDependenciesResponse,
|
||||||
|
DependencyResolutionResponse,
|
||||||
} from './types';
|
} from './types';
|
||||||
|
|
||||||
const API_BASE = '/api/v1';
|
const API_BASE = '/api/v1';
|
||||||
|
|
||||||
|
// Custom error classes for better error handling
|
||||||
|
export class ApiError extends Error {
|
||||||
|
status: number;
|
||||||
|
|
||||||
|
constructor(message: string, status: number) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'ApiError';
|
||||||
|
this.status = status;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class UnauthorizedError extends ApiError {
|
||||||
|
constructor(message: string = 'Not authenticated') {
|
||||||
|
super(message, 401);
|
||||||
|
this.name = 'UnauthorizedError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ForbiddenError extends ApiError {
|
||||||
|
constructor(message: string = 'Access denied') {
|
||||||
|
super(message, 403);
|
||||||
|
this.name = 'ForbiddenError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function handleResponse<T>(response: Response): Promise<T> {
|
async function handleResponse<T>(response: Response): Promise<T> {
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||||
throw new Error(error.detail || `HTTP ${response.status}`);
|
const message = error.detail || `HTTP ${response.status}`;
|
||||||
|
|
||||||
|
if (response.status === 401) {
|
||||||
|
throw new UnauthorizedError(message);
|
||||||
|
}
|
||||||
|
if (response.status === 403) {
|
||||||
|
throw new ForbiddenError(message);
|
||||||
|
}
|
||||||
|
throw new ApiError(message, response.status);
|
||||||
}
|
}
|
||||||
return response.json();
|
return response.json();
|
||||||
}
|
}
|
||||||
@@ -40,6 +92,55 @@ function buildQueryString(params: Record<string, unknown>): string {
|
|||||||
return query ? `?${query}` : '';
|
return query ? `?${query}` : '';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Auth API
|
||||||
|
export async function login(credentials: LoginCredentials): Promise<User> {
|
||||||
|
const response = await fetch(`${API_BASE}/auth/login`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(credentials),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<User>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function logout(): Promise<void> {
|
||||||
|
const response = await fetch(`${API_BASE}/auth/logout`, {
|
||||||
|
method: 'POST',
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
if (!response.ok) {
|
||||||
|
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||||
|
throw new Error(error.detail || `HTTP ${response.status}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function changePassword(currentPassword: string, newPassword: string): Promise<void> {
|
||||||
|
const response = await fetch(`${API_BASE}/auth/change-password`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ current_password: currentPassword, new_password: newPassword }),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
if (!response.ok) {
|
||||||
|
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||||
|
throw new Error(error.detail || `HTTP ${response.status}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getCurrentUser(): Promise<User | null> {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${API_BASE}/auth/me`, {
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
if (response.status === 401) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return handleResponse<User>(response);
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Global Search API
|
// Global Search API
|
||||||
export async function globalSearch(query: string, limit: number = 5): Promise<GlobalSearchResponse> {
|
export async function globalSearch(query: string, limit: number = 5): Promise<GlobalSearchResponse> {
|
||||||
const params = buildQueryString({ q: query, limit });
|
const params = buildQueryString({ q: query, limit });
|
||||||
@@ -73,6 +174,30 @@ export async function getProject(name: string): Promise<Project> {
|
|||||||
return handleResponse<Project>(response);
|
return handleResponse<Project>(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function updateProject(
|
||||||
|
projectName: string,
|
||||||
|
data: { description?: string; is_public?: boolean }
|
||||||
|
): Promise<Project> {
|
||||||
|
const response = await fetch(`${API_BASE}/projects/${projectName}`, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<Project>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function deleteProject(projectName: string): Promise<void> {
|
||||||
|
const response = await fetch(`${API_BASE}/projects/${projectName}`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
if (!response.ok) {
|
||||||
|
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||||
|
throw new Error(error.detail || `HTTP ${response.status}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Package API
|
// Package API
|
||||||
export async function listPackages(projectName: string, params: PackageListParams = {}): Promise<PaginatedResponse<Package>> {
|
export async function listPackages(projectName: string, params: PackageListParams = {}): Promise<PaginatedResponse<Package>> {
|
||||||
const query = buildQueryString(params as Record<string, unknown>);
|
const query = buildQueryString(params as Record<string, unknown>);
|
||||||
@@ -142,12 +267,21 @@ export async function listPackageArtifacts(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Upload
|
// Upload
|
||||||
export async function uploadArtifact(projectName: string, packageName: string, file: File, tag?: string): Promise<UploadResponse> {
|
export async function uploadArtifact(
|
||||||
|
projectName: string,
|
||||||
|
packageName: string,
|
||||||
|
file: File,
|
||||||
|
tag?: string,
|
||||||
|
version?: string
|
||||||
|
): Promise<UploadResponse> {
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('file', file);
|
formData.append('file', file);
|
||||||
if (tag) {
|
if (tag) {
|
||||||
formData.append('tag', tag);
|
formData.append('tag', tag);
|
||||||
}
|
}
|
||||||
|
if (version) {
|
||||||
|
formData.append('version', version);
|
||||||
|
}
|
||||||
|
|
||||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/upload`, {
|
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/upload`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -186,3 +320,245 @@ export async function getCrossProjectStats(): Promise<CrossProjectStats> {
|
|||||||
const response = await fetch(`${API_BASE}/stats/cross-project`);
|
const response = await fetch(`${API_BASE}/stats/cross-project`);
|
||||||
return handleResponse<CrossProjectStats>(response);
|
return handleResponse<CrossProjectStats>(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function listAPIKeys(): Promise<APIKey[]> {
|
||||||
|
const response = await fetch(`${API_BASE}/auth/keys`, {
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<APIKey[]>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createAPIKey(data: APIKeyCreate): Promise<APIKeyCreateResponse> {
|
||||||
|
const response = await fetch(`${API_BASE}/auth/keys`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<APIKeyCreateResponse>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function deleteAPIKey(id: string): Promise<void> {
|
||||||
|
const response = await fetch(`${API_BASE}/auth/keys/${id}`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
if (!response.ok) {
|
||||||
|
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||||
|
throw new Error(error.detail || `HTTP ${response.status}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Admin User Management API
|
||||||
|
export async function listUsers(): Promise<AdminUser[]> {
|
||||||
|
const response = await fetch(`${API_BASE}/admin/users`, {
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<AdminUser[]>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createUser(data: UserCreate): Promise<AdminUser> {
|
||||||
|
const response = await fetch(`${API_BASE}/admin/users`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<AdminUser>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateUser(username: string, data: UserUpdate): Promise<AdminUser> {
|
||||||
|
const response = await fetch(`${API_BASE}/admin/users/${username}`, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<AdminUser>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function resetUserPassword(username: string, newPassword: string): Promise<void> {
|
||||||
|
const response = await fetch(`${API_BASE}/admin/users/${username}/reset-password`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ new_password: newPassword }),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
if (!response.ok) {
|
||||||
|
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||||
|
throw new Error(error.detail || `HTTP ${response.status}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Access Permission API
|
||||||
|
export interface MyAccessResponse {
|
||||||
|
project: string;
|
||||||
|
access_level: AccessLevel | null;
|
||||||
|
is_owner: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getMyProjectAccess(projectName: string): Promise<MyAccessResponse> {
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/my-access`, {
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<MyAccessResponse>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function listProjectPermissions(projectName: string): Promise<AccessPermission[]> {
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/permissions`, {
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<AccessPermission[]>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function grantProjectAccess(
|
||||||
|
projectName: string,
|
||||||
|
data: AccessPermissionCreate
|
||||||
|
): Promise<AccessPermission> {
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/permissions`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<AccessPermission>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateProjectAccess(
|
||||||
|
projectName: string,
|
||||||
|
username: string,
|
||||||
|
data: AccessPermissionUpdate
|
||||||
|
): Promise<AccessPermission> {
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/permissions/${username}`, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<AccessPermission>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function revokeProjectAccess(projectName: string, username: string): Promise<void> {
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/permissions/${username}`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
if (!response.ok) {
|
||||||
|
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||||
|
throw new Error(error.detail || `HTTP ${response.status}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OIDC API
|
||||||
|
export async function getOIDCStatus(): Promise<OIDCStatus> {
|
||||||
|
const response = await fetch(`${API_BASE}/auth/oidc/status`);
|
||||||
|
return handleResponse<OIDCStatus>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getOIDCConfig(): Promise<OIDCConfig> {
|
||||||
|
const response = await fetch(`${API_BASE}/auth/oidc/config`, {
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<OIDCConfig>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateOIDCConfig(data: OIDCConfigUpdate): Promise<OIDCConfig> {
|
||||||
|
const response = await fetch(`${API_BASE}/auth/oidc/config`, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<OIDCConfig>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getOIDCLoginUrl(returnTo?: string): string {
|
||||||
|
const params = new URLSearchParams();
|
||||||
|
if (returnTo) {
|
||||||
|
params.set('return_to', returnTo);
|
||||||
|
}
|
||||||
|
const query = params.toString();
|
||||||
|
return `${API_BASE}/auth/oidc/login${query ? `?${query}` : ''}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Version API
|
||||||
|
export async function listVersions(
|
||||||
|
projectName: string,
|
||||||
|
packageName: string,
|
||||||
|
params: ListParams = {}
|
||||||
|
): Promise<PaginatedResponse<PackageVersion>> {
|
||||||
|
const query = buildQueryString(params as Record<string, unknown>);
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/versions${query}`);
|
||||||
|
return handleResponse<PaginatedResponse<PackageVersion>>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getVersion(
|
||||||
|
projectName: string,
|
||||||
|
packageName: string,
|
||||||
|
version: string
|
||||||
|
): Promise<PackageVersion> {
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/versions/${version}`);
|
||||||
|
return handleResponse<PackageVersion>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function deleteVersion(
|
||||||
|
projectName: string,
|
||||||
|
packageName: string,
|
||||||
|
version: string
|
||||||
|
): Promise<void> {
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/versions/${version}`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
if (!response.ok) {
|
||||||
|
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||||
|
throw new Error(error.detail || `HTTP ${response.status}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dependency API
|
||||||
|
export async function getArtifactDependencies(artifactId: string): Promise<ArtifactDependenciesResponse> {
|
||||||
|
const response = await fetch(`${API_BASE}/artifact/${artifactId}/dependencies`);
|
||||||
|
return handleResponse<ArtifactDependenciesResponse>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getDependenciesByRef(
|
||||||
|
projectName: string,
|
||||||
|
packageName: string,
|
||||||
|
ref: string
|
||||||
|
): Promise<ArtifactDependenciesResponse> {
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/+/${ref}/dependencies`);
|
||||||
|
return handleResponse<ArtifactDependenciesResponse>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getReverseDependencies(
|
||||||
|
projectName: string,
|
||||||
|
packageName: string,
|
||||||
|
params: { page?: number; limit?: number } = {}
|
||||||
|
): Promise<ReverseDependenciesResponse> {
|
||||||
|
const query = buildQueryString(params as Record<string, unknown>);
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/reverse-dependencies${query}`);
|
||||||
|
return handleResponse<ReverseDependenciesResponse>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function resolveDependencies(
|
||||||
|
projectName: string,
|
||||||
|
packageName: string,
|
||||||
|
ref: string
|
||||||
|
): Promise<DependencyResolutionResponse> {
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/+/${ref}/resolve`);
|
||||||
|
return handleResponse<DependencyResolutionResponse>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getEnsureFile(
|
||||||
|
projectName: string,
|
||||||
|
packageName: string,
|
||||||
|
ref: string
|
||||||
|
): Promise<string> {
|
||||||
|
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/+/${ref}/ensure`);
|
||||||
|
if (!response.ok) {
|
||||||
|
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||||
|
throw new ApiError(error.detail || `HTTP ${response.status}`, response.status);
|
||||||
|
}
|
||||||
|
return response.text();
|
||||||
|
}
|
||||||
|
|||||||
116
frontend/src/components/AccessManagement.css
Normal file
116
frontend/src/components/AccessManagement.css
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
.access-management {
|
||||||
|
margin-top: 1.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-management__header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-management__header h3 {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-management__form {
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
padding: 1rem;
|
||||||
|
border-radius: 6px;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-management__form .form-row {
|
||||||
|
display: flex;
|
||||||
|
gap: 1rem;
|
||||||
|
align-items: flex-end;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-management__form .form-group {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-management__form .form-group:last-of-type {
|
||||||
|
flex: 0 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-management__list {
|
||||||
|
margin-top: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-table th,
|
||||||
|
.access-table td {
|
||||||
|
padding: 0.75rem;
|
||||||
|
text-align: left;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-table th {
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-table td.actions {
|
||||||
|
display: flex;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-badge {
|
||||||
|
display: inline-block;
|
||||||
|
padding: 0.25rem 0.5rem;
|
||||||
|
border-radius: 4px;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 600;
|
||||||
|
text-transform: capitalize;
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-badge--read {
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
color: var(--text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-badge--write {
|
||||||
|
background: var(--color-info-bg);
|
||||||
|
color: var(--color-info);
|
||||||
|
}
|
||||||
|
|
||||||
|
.access-badge--admin {
|
||||||
|
background: var(--color-success-bg);
|
||||||
|
color: var(--color-success);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-sm {
|
||||||
|
padding: 0.25rem 0.5rem;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-danger {
|
||||||
|
background: var(--color-error);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-danger:hover {
|
||||||
|
background: #c0392b;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Expired permission styling */
|
||||||
|
.expired {
|
||||||
|
color: var(--color-error);
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Date input styling in table */
|
||||||
|
.access-table input[type="date"] {
|
||||||
|
padding: 0.25rem 0.5rem;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: 4px;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
296
frontend/src/components/AccessManagement.tsx
Normal file
296
frontend/src/components/AccessManagement.tsx
Normal file
@@ -0,0 +1,296 @@
|
|||||||
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
|
import { AccessPermission, AccessLevel } from '../types';
|
||||||
|
import {
|
||||||
|
listProjectPermissions,
|
||||||
|
grantProjectAccess,
|
||||||
|
updateProjectAccess,
|
||||||
|
revokeProjectAccess,
|
||||||
|
} from '../api';
|
||||||
|
import './AccessManagement.css';
|
||||||
|
|
||||||
|
interface AccessManagementProps {
|
||||||
|
projectName: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function AccessManagement({ projectName }: AccessManagementProps) {
|
||||||
|
const [permissions, setPermissions] = useState<AccessPermission[]>([]);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const [success, setSuccess] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Form state
|
||||||
|
const [showAddForm, setShowAddForm] = useState(false);
|
||||||
|
const [newUsername, setNewUsername] = useState('');
|
||||||
|
const [newLevel, setNewLevel] = useState<AccessLevel>('read');
|
||||||
|
const [newExpiresAt, setNewExpiresAt] = useState('');
|
||||||
|
const [submitting, setSubmitting] = useState(false);
|
||||||
|
|
||||||
|
// Edit state
|
||||||
|
const [editingUser, setEditingUser] = useState<string | null>(null);
|
||||||
|
const [editLevel, setEditLevel] = useState<AccessLevel>('read');
|
||||||
|
const [editExpiresAt, setEditExpiresAt] = useState('');
|
||||||
|
|
||||||
|
const loadPermissions = useCallback(async () => {
|
||||||
|
try {
|
||||||
|
setLoading(true);
|
||||||
|
const data = await listProjectPermissions(projectName);
|
||||||
|
setPermissions(data);
|
||||||
|
setError(null);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to load permissions');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [projectName]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
loadPermissions();
|
||||||
|
}, [loadPermissions]);
|
||||||
|
|
||||||
|
const handleGrant = async (e: React.FormEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
if (!newUsername.trim()) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
setSubmitting(true);
|
||||||
|
setError(null);
|
||||||
|
await grantProjectAccess(projectName, {
|
||||||
|
username: newUsername.trim(),
|
||||||
|
level: newLevel,
|
||||||
|
expires_at: newExpiresAt || undefined,
|
||||||
|
});
|
||||||
|
setSuccess(`Access granted to ${newUsername}`);
|
||||||
|
setNewUsername('');
|
||||||
|
setNewLevel('read');
|
||||||
|
setNewExpiresAt('');
|
||||||
|
setShowAddForm(false);
|
||||||
|
await loadPermissions();
|
||||||
|
setTimeout(() => setSuccess(null), 3000);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to grant access');
|
||||||
|
} finally {
|
||||||
|
setSubmitting(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleUpdate = async (username: string) => {
|
||||||
|
try {
|
||||||
|
setSubmitting(true);
|
||||||
|
setError(null);
|
||||||
|
await updateProjectAccess(projectName, username, {
|
||||||
|
level: editLevel,
|
||||||
|
expires_at: editExpiresAt || null,
|
||||||
|
});
|
||||||
|
setSuccess(`Updated access for ${username}`);
|
||||||
|
setEditingUser(null);
|
||||||
|
await loadPermissions();
|
||||||
|
setTimeout(() => setSuccess(null), 3000);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to update access');
|
||||||
|
} finally {
|
||||||
|
setSubmitting(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleRevoke = async (username: string) => {
|
||||||
|
if (!confirm(`Revoke access for ${username}?`)) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
setSubmitting(true);
|
||||||
|
setError(null);
|
||||||
|
await revokeProjectAccess(projectName, username);
|
||||||
|
setSuccess(`Access revoked for ${username}`);
|
||||||
|
await loadPermissions();
|
||||||
|
setTimeout(() => setSuccess(null), 3000);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to revoke access');
|
||||||
|
} finally {
|
||||||
|
setSubmitting(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const startEdit = (permission: AccessPermission) => {
|
||||||
|
setEditingUser(permission.user_id);
|
||||||
|
setEditLevel(permission.level as AccessLevel);
|
||||||
|
// Convert ISO date to local date format for date input
|
||||||
|
setEditExpiresAt(permission.expires_at ? permission.expires_at.split('T')[0] : '');
|
||||||
|
};
|
||||||
|
|
||||||
|
const cancelEdit = () => {
|
||||||
|
setEditingUser(null);
|
||||||
|
setEditExpiresAt('');
|
||||||
|
};
|
||||||
|
|
||||||
|
const formatExpiration = (expiresAt: string | null) => {
|
||||||
|
if (!expiresAt) return 'Never';
|
||||||
|
const date = new Date(expiresAt);
|
||||||
|
const now = new Date();
|
||||||
|
const isExpired = date < now;
|
||||||
|
return (
|
||||||
|
<span className={isExpired ? 'expired' : ''}>
|
||||||
|
{date.toLocaleDateString()}
|
||||||
|
{isExpired && ' (Expired)'}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (loading) {
|
||||||
|
return <div className="access-management loading">Loading permissions...</div>;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="access-management card">
|
||||||
|
<div className="access-management__header">
|
||||||
|
<h3>Access Management</h3>
|
||||||
|
<button
|
||||||
|
className="btn btn-primary btn-sm"
|
||||||
|
onClick={() => setShowAddForm(!showAddForm)}
|
||||||
|
>
|
||||||
|
{showAddForm ? 'Cancel' : '+ Add User'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{error && <div className="error-message">{error}</div>}
|
||||||
|
{success && <div className="success-message">{success}</div>}
|
||||||
|
|
||||||
|
{showAddForm && (
|
||||||
|
<form className="access-management__form" onSubmit={handleGrant}>
|
||||||
|
<div className="form-row">
|
||||||
|
<div className="form-group">
|
||||||
|
<label htmlFor="username">Username</label>
|
||||||
|
<input
|
||||||
|
id="username"
|
||||||
|
type="text"
|
||||||
|
value={newUsername}
|
||||||
|
onChange={(e) => setNewUsername(e.target.value)}
|
||||||
|
placeholder="Enter username"
|
||||||
|
required
|
||||||
|
disabled={submitting}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="form-group">
|
||||||
|
<label htmlFor="level">Access Level</label>
|
||||||
|
<select
|
||||||
|
id="level"
|
||||||
|
value={newLevel}
|
||||||
|
onChange={(e) => setNewLevel(e.target.value as AccessLevel)}
|
||||||
|
disabled={submitting}
|
||||||
|
>
|
||||||
|
<option value="read">Read</option>
|
||||||
|
<option value="write">Write</option>
|
||||||
|
<option value="admin">Admin</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
<div className="form-group">
|
||||||
|
<label htmlFor="expires_at">Expires (optional)</label>
|
||||||
|
<input
|
||||||
|
id="expires_at"
|
||||||
|
type="date"
|
||||||
|
value={newExpiresAt}
|
||||||
|
onChange={(e) => setNewExpiresAt(e.target.value)}
|
||||||
|
disabled={submitting}
|
||||||
|
min={new Date().toISOString().split('T')[0]}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<button type="submit" className="btn btn-primary" disabled={submitting}>
|
||||||
|
{submitting ? 'Granting...' : 'Grant Access'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="access-management__list">
|
||||||
|
{permissions.length === 0 ? (
|
||||||
|
<p className="text-muted">No explicit permissions set. Only the project owner has access.</p>
|
||||||
|
) : (
|
||||||
|
<table className="access-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>User</th>
|
||||||
|
<th>Access Level</th>
|
||||||
|
<th>Granted</th>
|
||||||
|
<th>Expires</th>
|
||||||
|
<th>Actions</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{permissions.map((p) => (
|
||||||
|
<tr key={p.id}>
|
||||||
|
<td>{p.user_id}</td>
|
||||||
|
<td>
|
||||||
|
{editingUser === p.user_id ? (
|
||||||
|
<select
|
||||||
|
value={editLevel}
|
||||||
|
onChange={(e) => setEditLevel(e.target.value as AccessLevel)}
|
||||||
|
disabled={submitting}
|
||||||
|
>
|
||||||
|
<option value="read">Read</option>
|
||||||
|
<option value="write">Write</option>
|
||||||
|
<option value="admin">Admin</option>
|
||||||
|
</select>
|
||||||
|
) : (
|
||||||
|
<span className={`access-badge access-badge--${p.level}`}>
|
||||||
|
{p.level}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td>{new Date(p.created_at).toLocaleDateString()}</td>
|
||||||
|
<td>
|
||||||
|
{editingUser === p.user_id ? (
|
||||||
|
<input
|
||||||
|
type="date"
|
||||||
|
value={editExpiresAt}
|
||||||
|
onChange={(e) => setEditExpiresAt(e.target.value)}
|
||||||
|
disabled={submitting}
|
||||||
|
min={new Date().toISOString().split('T')[0]}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
formatExpiration(p.expires_at)
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="actions">
|
||||||
|
{editingUser === p.user_id ? (
|
||||||
|
<>
|
||||||
|
<button
|
||||||
|
className="btn btn-sm btn-primary"
|
||||||
|
onClick={() => handleUpdate(p.user_id)}
|
||||||
|
disabled={submitting}
|
||||||
|
>
|
||||||
|
Save
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="btn btn-sm"
|
||||||
|
onClick={cancelEdit}
|
||||||
|
disabled={submitting}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<button
|
||||||
|
className="btn btn-sm"
|
||||||
|
onClick={() => startEdit(p)}
|
||||||
|
disabled={submitting}
|
||||||
|
>
|
||||||
|
Edit
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="btn btn-sm btn-danger"
|
||||||
|
onClick={() => handleRevoke(p.user_id)}
|
||||||
|
disabled={submitting}
|
||||||
|
>
|
||||||
|
Revoke
|
||||||
|
</button>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -98,3 +98,58 @@
|
|||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Clickable rows */
|
||||||
|
.data-table__row--clickable {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.data-table__row--clickable:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive table wrapper */
|
||||||
|
.data-table--responsive {
|
||||||
|
overflow-x: auto;
|
||||||
|
-webkit-overflow-scrolling: touch;
|
||||||
|
}
|
||||||
|
|
||||||
|
.data-table--responsive table {
|
||||||
|
min-width: 800px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Cell with name and icon */
|
||||||
|
.data-table .cell-name {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.data-table .cell-name:hover {
|
||||||
|
color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Date cells */
|
||||||
|
.data-table .cell-date {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Description cell */
|
||||||
|
.data-table .cell-description {
|
||||||
|
max-width: 300px;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Owner cell */
|
||||||
|
.data-table .cell-owner {
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ interface DataTableProps<T> {
|
|||||||
onSort?: (key: string) => void;
|
onSort?: (key: string) => void;
|
||||||
sortKey?: string;
|
sortKey?: string;
|
||||||
sortOrder?: 'asc' | 'desc';
|
sortOrder?: 'asc' | 'desc';
|
||||||
|
onRowClick?: (item: T) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function DataTable<T>({
|
export function DataTable<T>({
|
||||||
@@ -29,6 +30,7 @@ export function DataTable<T>({
|
|||||||
onSort,
|
onSort,
|
||||||
sortKey,
|
sortKey,
|
||||||
sortOrder,
|
sortOrder,
|
||||||
|
onRowClick,
|
||||||
}: DataTableProps<T>) {
|
}: DataTableProps<T>) {
|
||||||
if (data.length === 0) {
|
if (data.length === 0) {
|
||||||
return (
|
return (
|
||||||
@@ -71,7 +73,11 @@ export function DataTable<T>({
|
|||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
{data.map((item) => (
|
{data.map((item) => (
|
||||||
<tr key={keyExtractor(item)}>
|
<tr
|
||||||
|
key={keyExtractor(item)}
|
||||||
|
onClick={() => onRowClick?.(item)}
|
||||||
|
className={onRowClick ? 'data-table__row--clickable' : ''}
|
||||||
|
>
|
||||||
{columns.map((column) => (
|
{columns.map((column) => (
|
||||||
<td key={column.key} className={column.className}>
|
<td key={column.key} className={column.className}>
|
||||||
{column.render(item)}
|
{column.render(item)}
|
||||||
|
|||||||
338
frontend/src/components/DependencyGraph.css
Normal file
338
frontend/src/components/DependencyGraph.css
Normal file
@@ -0,0 +1,338 @@
|
|||||||
|
/* Dependency Graph Modal */
|
||||||
|
.dependency-graph-modal {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
background: rgba(0, 0, 0, 0.8);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
z-index: 1000;
|
||||||
|
padding: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependency-graph-content {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
width: 100%;
|
||||||
|
max-width: 1200px;
|
||||||
|
height: 80vh;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependency-graph-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 16px;
|
||||||
|
padding: 16px 20px;
|
||||||
|
border-bottom: 1px solid var(--border-primary);
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependency-graph-header h2 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 1.125rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependency-graph-info {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
flex: 1;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-stats {
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.close-btn {
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
cursor: pointer;
|
||||||
|
padding: 4px;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.close-btn:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependency-graph-toolbar {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
padding: 12px 20px;
|
||||||
|
border-bottom: 1px solid var(--border-primary);
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.zoom-level {
|
||||||
|
margin-left: auto;
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-family: 'JetBrains Mono', monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependency-graph-container {
|
||||||
|
flex: 1;
|
||||||
|
overflow: hidden;
|
||||||
|
position: relative;
|
||||||
|
background:
|
||||||
|
linear-gradient(90deg, var(--border-primary) 1px, transparent 1px),
|
||||||
|
linear-gradient(var(--border-primary) 1px, transparent 1px);
|
||||||
|
background-size: 20px 20px;
|
||||||
|
background-position: center center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-canvas {
|
||||||
|
padding: 40px;
|
||||||
|
min-width: 100%;
|
||||||
|
min-height: 100%;
|
||||||
|
transform-origin: center center;
|
||||||
|
transition: transform 0.1s ease-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Graph Nodes */
|
||||||
|
.graph-node-container {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-node {
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 2px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
padding: 12px 16px;
|
||||||
|
min-width: 200px;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-node:hover {
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
box-shadow: 0 4px 12px rgba(16, 185, 129, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-node--root {
|
||||||
|
background: linear-gradient(135deg, rgba(16, 185, 129, 0.15) 0%, rgba(5, 150, 105, 0.15) 100%);
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-node--hovered {
|
||||||
|
transform: scale(1.02);
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-node__header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
margin-bottom: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-node__name {
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--accent-primary);
|
||||||
|
font-family: 'JetBrains Mono', monospace;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-node__toggle {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: 4px;
|
||||||
|
width: 20px;
|
||||||
|
height: 20px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-weight: 600;
|
||||||
|
margin-left: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-node__toggle:hover {
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-node__details {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-node__version {
|
||||||
|
font-family: 'JetBrains Mono', monospace;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-node__size {
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Graph Children / Tree Structure */
|
||||||
|
.graph-children {
|
||||||
|
display: flex;
|
||||||
|
padding-left: 24px;
|
||||||
|
margin-top: 8px;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-connector {
|
||||||
|
position: absolute;
|
||||||
|
left: 12px;
|
||||||
|
top: 0;
|
||||||
|
bottom: 50%;
|
||||||
|
width: 12px;
|
||||||
|
border-left: 2px solid var(--border-primary);
|
||||||
|
border-bottom: 2px solid var(--border-primary);
|
||||||
|
border-bottom-left-radius: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-children-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 8px;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-children-list::before {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: -12px;
|
||||||
|
top: 20px;
|
||||||
|
bottom: 20px;
|
||||||
|
border-left: 2px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-children-list > .graph-node-container {
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-children-list > .graph-node-container::before {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: -12px;
|
||||||
|
top: 20px;
|
||||||
|
width: 12px;
|
||||||
|
border-top: 2px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Loading, Error, Empty States */
|
||||||
|
.graph-loading,
|
||||||
|
.graph-error,
|
||||||
|
.graph-empty {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
height: 100%;
|
||||||
|
gap: 16px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-loading .spinner {
|
||||||
|
width: 32px;
|
||||||
|
height: 32px;
|
||||||
|
border: 3px solid var(--border-primary);
|
||||||
|
border-top-color: var(--accent-primary);
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: spin 1s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes spin {
|
||||||
|
to { transform: rotate(360deg); }
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-error {
|
||||||
|
color: var(--error-color, #ef4444);
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-error svg {
|
||||||
|
opacity: 0.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-error p {
|
||||||
|
max-width: 400px;
|
||||||
|
text-align: center;
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tooltip */
|
||||||
|
.graph-tooltip {
|
||||||
|
position: fixed;
|
||||||
|
bottom: 24px;
|
||||||
|
left: 50%;
|
||||||
|
transform: translateX(-50%);
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
padding: 12px 16px;
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.4);
|
||||||
|
z-index: 1001;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-tooltip strong {
|
||||||
|
display: block;
|
||||||
|
color: var(--accent-primary);
|
||||||
|
font-family: 'JetBrains Mono', monospace;
|
||||||
|
margin-bottom: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-tooltip div {
|
||||||
|
color: var(--text-secondary);
|
||||||
|
margin-top: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tooltip-hint {
|
||||||
|
margin-top: 8px;
|
||||||
|
padding-top: 8px;
|
||||||
|
border-top: 1px solid var(--border-primary);
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-size: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.dependency-graph-modal {
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependency-graph-content {
|
||||||
|
height: 100vh;
|
||||||
|
border-radius: 0;
|
||||||
|
max-width: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependency-graph-header {
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependency-graph-info {
|
||||||
|
flex-basis: 100%;
|
||||||
|
order: 3;
|
||||||
|
margin-top: 8px;
|
||||||
|
}
|
||||||
|
}
|
||||||
323
frontend/src/components/DependencyGraph.tsx
Normal file
323
frontend/src/components/DependencyGraph.tsx
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
import { useState, useEffect, useCallback, useRef } from 'react';
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import { ResolvedArtifact, DependencyResolutionResponse, Dependency } from '../types';
|
||||||
|
import { resolveDependencies, getArtifactDependencies } from '../api';
|
||||||
|
import './DependencyGraph.css';
|
||||||
|
|
||||||
|
interface DependencyGraphProps {
|
||||||
|
projectName: string;
|
||||||
|
packageName: string;
|
||||||
|
tagName: string;
|
||||||
|
onClose: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GraphNode {
|
||||||
|
id: string;
|
||||||
|
project: string;
|
||||||
|
package: string;
|
||||||
|
version: string | null;
|
||||||
|
size: number;
|
||||||
|
depth: number;
|
||||||
|
children: GraphNode[];
|
||||||
|
isRoot?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatBytes(bytes: number): string {
|
||||||
|
if (bytes === 0) return '0 B';
|
||||||
|
const k = 1024;
|
||||||
|
const sizes = ['B', 'KB', 'MB', 'GB'];
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||||
|
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
function DependencyGraph({ projectName, packageName, tagName, onClose }: DependencyGraphProps) {
|
||||||
|
const navigate = useNavigate();
|
||||||
|
const containerRef = useRef<HTMLDivElement>(null);
|
||||||
|
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const [resolution, setResolution] = useState<DependencyResolutionResponse | null>(null);
|
||||||
|
const [graphRoot, setGraphRoot] = useState<GraphNode | null>(null);
|
||||||
|
const [hoveredNode, setHoveredNode] = useState<GraphNode | null>(null);
|
||||||
|
const [zoom, setZoom] = useState(1);
|
||||||
|
const [pan, setPan] = useState({ x: 0, y: 0 });
|
||||||
|
const [isDragging, setIsDragging] = useState(false);
|
||||||
|
const [dragStart, setDragStart] = useState({ x: 0, y: 0 });
|
||||||
|
const [collapsedNodes, setCollapsedNodes] = useState<Set<string>>(new Set());
|
||||||
|
|
||||||
|
// Build graph structure from resolution data
|
||||||
|
const buildGraph = useCallback(async (resolutionData: DependencyResolutionResponse) => {
|
||||||
|
const artifactMap = new Map<string, ResolvedArtifact>();
|
||||||
|
resolutionData.resolved.forEach(artifact => {
|
||||||
|
artifactMap.set(artifact.artifact_id, artifact);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch dependencies for each artifact to build the tree
|
||||||
|
const depsMap = new Map<string, Dependency[]>();
|
||||||
|
|
||||||
|
for (const artifact of resolutionData.resolved) {
|
||||||
|
try {
|
||||||
|
const deps = await getArtifactDependencies(artifact.artifact_id);
|
||||||
|
depsMap.set(artifact.artifact_id, deps.dependencies);
|
||||||
|
} catch {
|
||||||
|
depsMap.set(artifact.artifact_id, []);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the root artifact (the requested one)
|
||||||
|
const rootArtifact = resolutionData.resolved.find(
|
||||||
|
a => a.project === resolutionData.requested.project &&
|
||||||
|
a.package === resolutionData.requested.package
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!rootArtifact) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build tree recursively
|
||||||
|
const visited = new Set<string>();
|
||||||
|
|
||||||
|
const buildNode = (artifact: ResolvedArtifact, depth: number): GraphNode => {
|
||||||
|
const nodeId = `${artifact.project}/${artifact.package}`;
|
||||||
|
visited.add(artifact.artifact_id);
|
||||||
|
|
||||||
|
const deps = depsMap.get(artifact.artifact_id) || [];
|
||||||
|
const children: GraphNode[] = [];
|
||||||
|
|
||||||
|
for (const dep of deps) {
|
||||||
|
// Find the resolved artifact for this dependency
|
||||||
|
const childArtifact = resolutionData.resolved.find(
|
||||||
|
a => a.project === dep.project && a.package === dep.package
|
||||||
|
);
|
||||||
|
|
||||||
|
if (childArtifact && !visited.has(childArtifact.artifact_id)) {
|
||||||
|
children.push(buildNode(childArtifact, depth + 1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: nodeId,
|
||||||
|
project: artifact.project,
|
||||||
|
package: artifact.package,
|
||||||
|
version: artifact.version || artifact.tag,
|
||||||
|
size: artifact.size,
|
||||||
|
depth,
|
||||||
|
children,
|
||||||
|
isRoot: depth === 0,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
return buildNode(rootArtifact, 0);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
async function loadData() {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await resolveDependencies(projectName, packageName, tagName);
|
||||||
|
setResolution(result);
|
||||||
|
|
||||||
|
const graph = await buildGraph(result);
|
||||||
|
setGraphRoot(graph);
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof Error) {
|
||||||
|
// Check if it's a resolution error
|
||||||
|
try {
|
||||||
|
const errorData = JSON.parse(err.message);
|
||||||
|
if (errorData.error === 'circular_dependency') {
|
||||||
|
setError(`Circular dependency detected: ${errorData.cycle?.join(' → ')}`);
|
||||||
|
} else if (errorData.error === 'dependency_conflict') {
|
||||||
|
setError(`Dependency conflict: ${errorData.message}`);
|
||||||
|
} else {
|
||||||
|
setError(err.message);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
setError(err.message);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
setError('Failed to load dependency graph');
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
loadData();
|
||||||
|
}, [projectName, packageName, tagName, buildGraph]);
|
||||||
|
|
||||||
|
const handleNodeClick = (node: GraphNode) => {
|
||||||
|
navigate(`/project/${node.project}/${node.package}`);
|
||||||
|
onClose();
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleNodeToggle = (node: GraphNode, e: React.MouseEvent) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
setCollapsedNodes(prev => {
|
||||||
|
const next = new Set(prev);
|
||||||
|
if (next.has(node.id)) {
|
||||||
|
next.delete(node.id);
|
||||||
|
} else {
|
||||||
|
next.add(node.id);
|
||||||
|
}
|
||||||
|
return next;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleWheel = (e: React.WheelEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
const delta = e.deltaY > 0 ? -0.1 : 0.1;
|
||||||
|
setZoom(z => Math.max(0.25, Math.min(2, z + delta)));
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleMouseDown = (e: React.MouseEvent) => {
|
||||||
|
if (e.target === containerRef.current || (e.target as HTMLElement).classList.contains('graph-canvas')) {
|
||||||
|
setIsDragging(true);
|
||||||
|
setDragStart({ x: e.clientX - pan.x, y: e.clientY - pan.y });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleMouseMove = (e: React.MouseEvent) => {
|
||||||
|
if (isDragging) {
|
||||||
|
setPan({ x: e.clientX - dragStart.x, y: e.clientY - dragStart.y });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleMouseUp = () => {
|
||||||
|
setIsDragging(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
const resetView = () => {
|
||||||
|
setZoom(1);
|
||||||
|
setPan({ x: 0, y: 0 });
|
||||||
|
};
|
||||||
|
|
||||||
|
const renderNode = (node: GraphNode, index: number = 0): JSX.Element => {
|
||||||
|
const isCollapsed = collapsedNodes.has(node.id);
|
||||||
|
const hasChildren = node.children.length > 0;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div key={`${node.id}-${index}`} className="graph-node-container">
|
||||||
|
<div
|
||||||
|
className={`graph-node ${node.isRoot ? 'graph-node--root' : ''} ${hoveredNode?.id === node.id ? 'graph-node--hovered' : ''}`}
|
||||||
|
onClick={() => handleNodeClick(node)}
|
||||||
|
onMouseEnter={() => setHoveredNode(node)}
|
||||||
|
onMouseLeave={() => setHoveredNode(null)}
|
||||||
|
>
|
||||||
|
<div className="graph-node__header">
|
||||||
|
<span className="graph-node__name">{node.project}/{node.package}</span>
|
||||||
|
{hasChildren && (
|
||||||
|
<button
|
||||||
|
className="graph-node__toggle"
|
||||||
|
onClick={(e) => handleNodeToggle(node, e)}
|
||||||
|
title={isCollapsed ? 'Expand' : 'Collapse'}
|
||||||
|
>
|
||||||
|
{isCollapsed ? '+' : '-'}
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<div className="graph-node__details">
|
||||||
|
{node.version && <span className="graph-node__version">@ {node.version}</span>}
|
||||||
|
<span className="graph-node__size">{formatBytes(node.size)}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{hasChildren && !isCollapsed && (
|
||||||
|
<div className="graph-children">
|
||||||
|
<div className="graph-connector"></div>
|
||||||
|
<div className="graph-children-list">
|
||||||
|
{node.children.map((child, i) => renderNode(child, i))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="dependency-graph-modal" onClick={onClose}>
|
||||||
|
<div className="dependency-graph-content" onClick={e => e.stopPropagation()}>
|
||||||
|
<div className="dependency-graph-header">
|
||||||
|
<h2>Dependency Graph</h2>
|
||||||
|
<div className="dependency-graph-info">
|
||||||
|
<span>{projectName}/{packageName} @ {tagName}</span>
|
||||||
|
{resolution && (
|
||||||
|
<span className="graph-stats">
|
||||||
|
{resolution.artifact_count} packages • {formatBytes(resolution.total_size)} total
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<button className="close-btn" onClick={onClose} title="Close">
|
||||||
|
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18"></line>
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18"></line>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="dependency-graph-toolbar">
|
||||||
|
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.min(2, z + 0.25))}>
|
||||||
|
Zoom In
|
||||||
|
</button>
|
||||||
|
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.max(0.25, z - 0.25))}>
|
||||||
|
Zoom Out
|
||||||
|
</button>
|
||||||
|
<button className="btn btn-secondary btn-small" onClick={resetView}>
|
||||||
|
Reset View
|
||||||
|
</button>
|
||||||
|
<span className="zoom-level">{Math.round(zoom * 100)}%</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div
|
||||||
|
ref={containerRef}
|
||||||
|
className="dependency-graph-container"
|
||||||
|
onWheel={handleWheel}
|
||||||
|
onMouseDown={handleMouseDown}
|
||||||
|
onMouseMove={handleMouseMove}
|
||||||
|
onMouseUp={handleMouseUp}
|
||||||
|
onMouseLeave={handleMouseUp}
|
||||||
|
>
|
||||||
|
{loading ? (
|
||||||
|
<div className="graph-loading">
|
||||||
|
<div className="spinner"></div>
|
||||||
|
<span>Resolving dependencies...</span>
|
||||||
|
</div>
|
||||||
|
) : error ? (
|
||||||
|
<div className="graph-error">
|
||||||
|
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<circle cx="12" cy="12" r="10"></circle>
|
||||||
|
<line x1="12" y1="8" x2="12" y2="12"></line>
|
||||||
|
<line x1="12" y1="16" x2="12.01" y2="16"></line>
|
||||||
|
</svg>
|
||||||
|
<p>{error}</p>
|
||||||
|
</div>
|
||||||
|
) : graphRoot ? (
|
||||||
|
<div
|
||||||
|
className="graph-canvas"
|
||||||
|
style={{
|
||||||
|
transform: `translate(${pan.x}px, ${pan.y}px) scale(${zoom})`,
|
||||||
|
cursor: isDragging ? 'grabbing' : 'grab',
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{renderNode(graphRoot)}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="graph-empty">No dependencies to display</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{hoveredNode && (
|
||||||
|
<div className="graph-tooltip">
|
||||||
|
<strong>{hoveredNode.project}/{hoveredNode.package}</strong>
|
||||||
|
{hoveredNode.version && <div>Version: {hoveredNode.version}</div>}
|
||||||
|
<div>Size: {formatBytes(hoveredNode.size)}</div>
|
||||||
|
<div className="tooltip-hint">Click to navigate</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default DependencyGraph;
|
||||||
332
frontend/src/components/DragDropUpload.css
Normal file
332
frontend/src/components/DragDropUpload.css
Normal file
@@ -0,0 +1,332 @@
|
|||||||
|
.drag-drop-upload {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Offline Banner */
|
||||||
|
.offline-banner {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
background: var(--warning-bg, #fff3cd);
|
||||||
|
border: 1px solid var(--warning-border, #ffc107);
|
||||||
|
border-radius: 8px;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
color: var(--warning-text, #856404);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.offline-banner svg {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Drop Zone */
|
||||||
|
.drop-zone {
|
||||||
|
border: 2px dashed var(--border-color, #ddd);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 2rem;
|
||||||
|
text-align: center;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
background: var(--bg-secondary, #f9f9f9);
|
||||||
|
}
|
||||||
|
|
||||||
|
.drop-zone:hover {
|
||||||
|
border-color: var(--accent-color, #007bff);
|
||||||
|
background: var(--bg-hover, #f0f7ff);
|
||||||
|
}
|
||||||
|
|
||||||
|
.drop-zone--active {
|
||||||
|
border-color: var(--accent-color, #007bff);
|
||||||
|
background: var(--bg-active, #e6f0ff);
|
||||||
|
border-style: solid;
|
||||||
|
}
|
||||||
|
|
||||||
|
.drop-zone--disabled {
|
||||||
|
cursor: not-allowed;
|
||||||
|
opacity: 0.6;
|
||||||
|
background: var(--bg-disabled, #f5f5f5);
|
||||||
|
}
|
||||||
|
|
||||||
|
.drop-zone--disabled:hover {
|
||||||
|
border-color: var(--border-color, #ddd);
|
||||||
|
background: var(--bg-disabled, #f5f5f5);
|
||||||
|
}
|
||||||
|
|
||||||
|
.drop-zone__input {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.drop-zone__content {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.75rem;
|
||||||
|
color: var(--text-secondary, #666);
|
||||||
|
}
|
||||||
|
|
||||||
|
.drop-zone__content svg {
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.drop-zone--active .drop-zone__content svg {
|
||||||
|
opacity: 1;
|
||||||
|
color: var(--accent-color, #007bff);
|
||||||
|
}
|
||||||
|
|
||||||
|
.drop-zone__text {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.drop-zone__text strong {
|
||||||
|
color: var(--text-primary, #333);
|
||||||
|
}
|
||||||
|
|
||||||
|
.drop-zone__hint {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 0.8rem;
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Upload Queue */
|
||||||
|
.upload-queue {
|
||||||
|
margin-top: 1rem;
|
||||||
|
border: 1px solid var(--border-color, #ddd);
|
||||||
|
border-radius: 8px;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-queue__header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
background: var(--bg-secondary, #f9f9f9);
|
||||||
|
border-bottom: 1px solid var(--border-color, #ddd);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-queue__title {
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-primary, #333);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-queue__clear {
|
||||||
|
padding: 0.25rem 0.5rem;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
border: none;
|
||||||
|
background: none;
|
||||||
|
color: var(--accent-color, #007bff);
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-queue__clear:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-queue__overall {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.75rem;
|
||||||
|
padding: 0.5rem 1rem;
|
||||||
|
background: var(--bg-secondary, #f9f9f9);
|
||||||
|
border-bottom: 1px solid var(--border-color, #ddd);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-queue__overall .progress-bar {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-queue__overall .progress-bar__text {
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary, #666);
|
||||||
|
min-width: 3rem;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-queue__list {
|
||||||
|
list-style: none;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
max-height: 300px;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Upload Item */
|
||||||
|
.upload-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.75rem;
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
border-bottom: 1px solid var(--border-color-light, #eee);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item:last-child {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__icon {
|
||||||
|
flex-shrink: 0;
|
||||||
|
width: 24px;
|
||||||
|
height: 24px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
color: var(--text-secondary, #666);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item--complete .upload-item__icon {
|
||||||
|
color: var(--success-color, #28a745);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item--failed .upload-item__icon {
|
||||||
|
color: var(--error-color, #dc3545);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item--uploading .upload-item__icon {
|
||||||
|
color: var(--accent-color, #007bff);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item--paused .upload-item__icon {
|
||||||
|
color: var(--warning-color, #ffc107);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item--validating .upload-item__icon {
|
||||||
|
color: var(--accent-color, #007bff);
|
||||||
|
}
|
||||||
|
|
||||||
|
.spinner-icon {
|
||||||
|
animation: spin 1s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes spin {
|
||||||
|
from { transform: rotate(0deg); }
|
||||||
|
to { transform: rotate(360deg); }
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__info {
|
||||||
|
flex: 1;
|
||||||
|
min-width: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.25rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__name {
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-primary, #333);
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__meta {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: 0.5rem;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
color: var(--text-secondary, #666);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__size {
|
||||||
|
color: var(--text-secondary, #666);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__speed,
|
||||||
|
.upload-item__eta {
|
||||||
|
color: var(--accent-color, #007bff);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__artifact {
|
||||||
|
color: var(--success-color, #28a745);
|
||||||
|
font-family: monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__error {
|
||||||
|
color: var(--error-color, #dc3545);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__retry-count {
|
||||||
|
color: var(--warning-color, #ffc107);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__validating {
|
||||||
|
color: var(--accent-color, #007bff);
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__actions {
|
||||||
|
display: flex;
|
||||||
|
gap: 0.25rem;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__btn {
|
||||||
|
width: 28px;
|
||||||
|
height: 28px;
|
||||||
|
border: none;
|
||||||
|
background: none;
|
||||||
|
cursor: pointer;
|
||||||
|
border-radius: 4px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
color: var(--text-secondary, #666);
|
||||||
|
transition: all 0.15s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__btn:hover {
|
||||||
|
background: var(--bg-hover, #f0f0f0);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__btn--retry:hover {
|
||||||
|
color: var(--accent-color, #007bff);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__btn--remove:hover {
|
||||||
|
color: var(--error-color, #dc3545);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Progress Bar */
|
||||||
|
.progress-bar {
|
||||||
|
height: 8px;
|
||||||
|
background: var(--border-color, #ddd);
|
||||||
|
border-radius: 4px;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-bar--small {
|
||||||
|
height: 4px;
|
||||||
|
margin-top: 0.25rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-bar__fill {
|
||||||
|
height: 100%;
|
||||||
|
background: var(--accent-color, #007bff);
|
||||||
|
border-radius: 4px;
|
||||||
|
transition: width 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item--complete .progress-bar__fill {
|
||||||
|
background: var(--success-color, #28a745);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive */
|
||||||
|
@media (max-width: 480px) {
|
||||||
|
.drop-zone {
|
||||||
|
padding: 1.5rem 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__meta {
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.125rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-item__speed,
|
||||||
|
.upload-item__eta {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
545
frontend/src/components/DragDropUpload.test.tsx
Normal file
545
frontend/src/components/DragDropUpload.test.tsx
Normal file
@@ -0,0 +1,545 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
|
||||||
|
import userEvent from '@testing-library/user-event';
|
||||||
|
import { DragDropUpload } from './DragDropUpload';
|
||||||
|
|
||||||
|
function createMockFile(name: string, size: number, type: string): File {
|
||||||
|
const content = new Array(size).fill('a').join('');
|
||||||
|
return new File([content], name, { type });
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockXHR(options: {
|
||||||
|
status?: number;
|
||||||
|
response?: object;
|
||||||
|
progressEvents?: { loaded: number; total: number }[];
|
||||||
|
shouldError?: boolean;
|
||||||
|
shouldTimeout?: boolean;
|
||||||
|
} = {}) {
|
||||||
|
const {
|
||||||
|
status = 200,
|
||||||
|
response = { artifact_id: 'abc123', size: 100 },
|
||||||
|
progressEvents = [],
|
||||||
|
shouldError = false,
|
||||||
|
shouldTimeout = false,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
return class MockXHR {
|
||||||
|
status = status;
|
||||||
|
responseText = JSON.stringify(response);
|
||||||
|
timeout = 0;
|
||||||
|
upload = {
|
||||||
|
addEventListener: vi.fn((event: string, handler: (e: ProgressEvent) => void) => {
|
||||||
|
if (event === 'progress') {
|
||||||
|
progressEvents.forEach((p, i) => {
|
||||||
|
setTimeout(() => {
|
||||||
|
handler({ lengthComputable: true, loaded: p.loaded, total: p.total } as ProgressEvent);
|
||||||
|
}, i * 10);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
addEventListener = vi.fn((event: string, handler: () => void) => {
|
||||||
|
if (event === 'load' && !shouldError && !shouldTimeout) {
|
||||||
|
setTimeout(handler, progressEvents.length * 10 + 10);
|
||||||
|
}
|
||||||
|
if (event === 'error' && shouldError) {
|
||||||
|
setTimeout(handler, 10);
|
||||||
|
}
|
||||||
|
if (event === 'timeout' && shouldTimeout) {
|
||||||
|
setTimeout(handler, 10);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
open = vi.fn();
|
||||||
|
send = vi.fn();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('DragDropUpload', () => {
|
||||||
|
const defaultProps = {
|
||||||
|
projectName: 'test-project',
|
||||||
|
packageName: 'test-package',
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.useFakeTimers({ shouldAdvanceTime: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Rendering', () => {
|
||||||
|
it('renders drop zone with instructional text', () => {
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
expect(screen.getByText(/drag files here/i)).toBeInTheDocument();
|
||||||
|
expect(screen.getByText(/click to browse/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders hidden file input', () => {
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]');
|
||||||
|
expect(input).toBeInTheDocument();
|
||||||
|
expect(input).toHaveClass('drop-zone__input');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows max file size hint when provided', () => {
|
||||||
|
render(<DragDropUpload {...defaultProps} maxFileSize={1024 * 1024} />);
|
||||||
|
|
||||||
|
expect(screen.getByText(/max file size: 1 mb/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows allowed types hint when provided', () => {
|
||||||
|
render(<DragDropUpload {...defaultProps} allowedTypes={['.zip', '.tar.gz']} allowAllTypes={false} />);
|
||||||
|
|
||||||
|
expect(screen.getByText(/\.zip, \.tar\.gz/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Click to Browse', () => {
|
||||||
|
it('opens file picker when drop zone is clicked', async () => {
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const clickSpy = vi.spyOn(input, 'click');
|
||||||
|
|
||||||
|
const dropZone = screen.getByRole('button');
|
||||||
|
await userEvent.click(dropZone);
|
||||||
|
|
||||||
|
expect(clickSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('opens file picker on Enter key', () => {
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const clickSpy = vi.spyOn(input, 'click');
|
||||||
|
|
||||||
|
const dropZone = screen.getByRole('button');
|
||||||
|
fireEvent.keyDown(dropZone, { key: 'Enter' });
|
||||||
|
|
||||||
|
expect(clickSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Drag and Drop Events', () => {
|
||||||
|
it('shows visual feedback on drag over', () => {
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const dropZone = screen.getByRole('button');
|
||||||
|
|
||||||
|
fireEvent.dragEnter(dropZone, {
|
||||||
|
dataTransfer: { items: [{}] },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dropZone).toHaveClass('drop-zone--active');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('removes visual feedback on drag leave', () => {
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const dropZone = screen.getByRole('button');
|
||||||
|
|
||||||
|
fireEvent.dragEnter(dropZone, { dataTransfer: { items: [{}] } });
|
||||||
|
expect(dropZone).toHaveClass('drop-zone--active');
|
||||||
|
|
||||||
|
fireEvent.dragLeave(dropZone);
|
||||||
|
expect(dropZone).not.toHaveClass('drop-zone--active');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('accepts dropped files', async () => {
|
||||||
|
const MockXHR = createMockXHR();
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const dropZone = screen.getByRole('button');
|
||||||
|
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||||
|
|
||||||
|
const dataTransfer = new DataTransfer();
|
||||||
|
Object.defineProperty(dataTransfer, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.drop(dropZone, { dataTransfer });
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('test.txt')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('File Validation', () => {
|
||||||
|
it('rejects files exceeding max size', async () => {
|
||||||
|
render(<DragDropUpload {...defaultProps} maxFileSize={100} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('large.txt', 200, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText(/exceeds.*limit/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects files with invalid type when allowAllTypes is false', async () => {
|
||||||
|
render(<DragDropUpload {...defaultProps} allowedTypes={['.zip']} allowAllTypes={false} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText(/not allowed/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects empty files', async () => {
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('empty.txt', 0, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText(/empty file/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('accepts valid files when allowAllTypes is true', async () => {
|
||||||
|
const MockXHR = createMockXHR();
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
render(<DragDropUpload {...defaultProps} allowAllTypes={true} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('test.txt')).toBeInTheDocument();
|
||||||
|
expect(screen.queryByText(/not allowed/i)).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Upload Queue', () => {
|
||||||
|
it('shows file in queue after selection', async () => {
|
||||||
|
const MockXHR = createMockXHR();
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('document.pdf', 1024, 'application/pdf');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('document.pdf')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('1 KB')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles multiple files', async () => {
|
||||||
|
const MockXHR = createMockXHR();
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const files = [
|
||||||
|
createMockFile('file1.txt', 100, 'text/plain'),
|
||||||
|
createMockFile('file2.txt', 200, 'text/plain'),
|
||||||
|
createMockFile('file3.txt', 300, 'text/plain'),
|
||||||
|
];
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign(files, { item: (i: number) => files[i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('file1.txt')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('file2.txt')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('file3.txt')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows overall progress for multiple files', async () => {
|
||||||
|
const MockXHR = createMockXHR();
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const files = [
|
||||||
|
createMockFile('file1.txt', 100, 'text/plain'),
|
||||||
|
createMockFile('file2.txt', 100, 'text/plain'),
|
||||||
|
];
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign(files, { item: (i: number) => files[i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText(/uploading.*of.*files/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Upload Progress', () => {
|
||||||
|
it('shows progress bar during upload', async () => {
|
||||||
|
const MockXHR = createMockXHR({
|
||||||
|
progressEvents: [
|
||||||
|
{ loaded: 50, total: 100 },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
const progressBar = document.querySelector('.progress-bar__fill');
|
||||||
|
expect(progressBar).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Upload Completion', () => {
|
||||||
|
it('shows success state when upload completes', async () => {
|
||||||
|
const MockXHR = createMockXHR({
|
||||||
|
response: { artifact_id: 'abc123def456', size: 100 },
|
||||||
|
});
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
const onComplete = vi.fn();
|
||||||
|
render(<DragDropUpload {...defaultProps} onUploadComplete={onComplete} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await vi.advanceTimersByTimeAsync(100);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText(/abc123def456/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('calls onUploadComplete callback with results', async () => {
|
||||||
|
const MockXHR = createMockXHR({
|
||||||
|
response: { artifact_id: 'test-artifact-id', size: 100 },
|
||||||
|
});
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
const onComplete = vi.fn();
|
||||||
|
render(<DragDropUpload {...defaultProps} onUploadComplete={onComplete} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await vi.advanceTimersByTimeAsync(100);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(onComplete).toHaveBeenCalledWith([
|
||||||
|
expect.objectContaining({ artifact_id: 'test-artifact-id' }),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Upload Errors', () => {
|
||||||
|
it('shows error state when upload fails after retries exhausted', async () => {
|
||||||
|
const MockXHR = createMockXHR({
|
||||||
|
status: 500,
|
||||||
|
response: { detail: 'Server error' },
|
||||||
|
shouldError: true,
|
||||||
|
});
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
render(<DragDropUpload {...defaultProps} maxRetries={0} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await vi.advanceTimersByTimeAsync(100);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText(/network error/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('calls onUploadError callback when retries exhausted', async () => {
|
||||||
|
const MockXHR = createMockXHR({ shouldError: true });
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
const onError = vi.fn();
|
||||||
|
render(<DragDropUpload {...defaultProps} maxRetries={0} onUploadError={onError} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await vi.advanceTimersByTimeAsync(100);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(onError).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Queue Actions', () => {
|
||||||
|
it('removes item from queue when remove button clicked', async () => {
|
||||||
|
const MockXHR = createMockXHR();
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await vi.advanceTimersByTimeAsync(100);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('test.txt')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
const removeButton = screen.getByTitle('Remove');
|
||||||
|
fireEvent.click(removeButton);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.queryByText('test.txt')).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('clears completed items when clear button clicked', async () => {
|
||||||
|
const MockXHR = createMockXHR();
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
render(<DragDropUpload {...defaultProps} />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await vi.advanceTimersByTimeAsync(100);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
const clearButton = screen.queryByText(/clear finished/i);
|
||||||
|
if (clearButton) {
|
||||||
|
fireEvent.click(clearButton);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Tag Support', () => {
|
||||||
|
it('includes tag in upload request', async () => {
|
||||||
|
let capturedFormData: FormData | null = null;
|
||||||
|
|
||||||
|
class MockXHR {
|
||||||
|
status = 200;
|
||||||
|
responseText = JSON.stringify({ artifact_id: 'abc123', size: 100 });
|
||||||
|
timeout = 0;
|
||||||
|
upload = { addEventListener: vi.fn() };
|
||||||
|
addEventListener = vi.fn((event: string, handler: () => void) => {
|
||||||
|
if (event === 'load') setTimeout(handler, 10);
|
||||||
|
});
|
||||||
|
open = vi.fn();
|
||||||
|
send = vi.fn((data: FormData) => {
|
||||||
|
capturedFormData = data;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
||||||
|
|
||||||
|
render(<DragDropUpload {...defaultProps} tag="v1.0.0" />);
|
||||||
|
|
||||||
|
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
||||||
|
const file = createMockFile('test.txt', 100, 'text/plain');
|
||||||
|
|
||||||
|
Object.defineProperty(input, 'files', {
|
||||||
|
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.change(input);
|
||||||
|
|
||||||
|
await vi.advanceTimersByTimeAsync(100);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(capturedFormData?.get('tag')).toBe('v1.0.0');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
912
frontend/src/components/DragDropUpload.tsx
Normal file
912
frontend/src/components/DragDropUpload.tsx
Normal file
@@ -0,0 +1,912 @@
|
|||||||
|
import { useState, useRef, useCallback, useEffect } from 'react';
|
||||||
|
import './DragDropUpload.css';
|
||||||
|
|
||||||
|
const CHUNK_SIZE = 10 * 1024 * 1024;
|
||||||
|
const CHUNKED_UPLOAD_THRESHOLD = 100 * 1024 * 1024;
|
||||||
|
const UPLOAD_STATE_PREFIX = 'orchard_upload_';
|
||||||
|
|
||||||
|
interface StoredUploadState {
|
||||||
|
uploadId: string;
|
||||||
|
fileHash: string;
|
||||||
|
filename: string;
|
||||||
|
fileSize: number;
|
||||||
|
completedParts: number[];
|
||||||
|
project: string;
|
||||||
|
package: string;
|
||||||
|
tag?: string;
|
||||||
|
createdAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getUploadStateKey(project: string, pkg: string, fileHash: string): string {
|
||||||
|
return `${UPLOAD_STATE_PREFIX}${project}_${pkg}_${fileHash}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function saveUploadState(state: StoredUploadState): void {
|
||||||
|
try {
|
||||||
|
const key = getUploadStateKey(state.project, state.package, state.fileHash);
|
||||||
|
localStorage.setItem(key, JSON.stringify(state));
|
||||||
|
} catch {
|
||||||
|
// localStorage might be full or unavailable
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function loadUploadState(project: string, pkg: string, fileHash: string): StoredUploadState | null {
|
||||||
|
try {
|
||||||
|
const key = getUploadStateKey(project, pkg, fileHash);
|
||||||
|
const stored = localStorage.getItem(key);
|
||||||
|
if (!stored) return null;
|
||||||
|
const state = JSON.parse(stored) as StoredUploadState;
|
||||||
|
const oneDay = 24 * 60 * 60 * 1000;
|
||||||
|
if (Date.now() - state.createdAt > oneDay) {
|
||||||
|
localStorage.removeItem(key);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return state;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearUploadState(project: string, pkg: string, fileHash: string): void {
|
||||||
|
try {
|
||||||
|
const key = getUploadStateKey(project, pkg, fileHash);
|
||||||
|
localStorage.removeItem(key);
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Types
|
||||||
|
export type UploadStatus = 'pending' | 'uploading' | 'complete' | 'failed' | 'validating' | 'paused';
|
||||||
|
|
||||||
|
export interface UploadItem {
|
||||||
|
id: string;
|
||||||
|
file: File;
|
||||||
|
status: UploadStatus;
|
||||||
|
progress: number;
|
||||||
|
speed: number; // bytes per second
|
||||||
|
error?: string;
|
||||||
|
artifactId?: string;
|
||||||
|
retryCount: number;
|
||||||
|
startTime?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UploadResult {
|
||||||
|
artifact_id: string;
|
||||||
|
size: number;
|
||||||
|
deduplicated?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DragDropUploadProps {
|
||||||
|
projectName: string;
|
||||||
|
packageName: string;
|
||||||
|
onUploadComplete?: (results: UploadResult[]) => void;
|
||||||
|
onUploadError?: (error: string) => void;
|
||||||
|
allowedTypes?: string[]; // e.g., ['.tar.gz', '.zip', '.deb']
|
||||||
|
allowAllTypes?: boolean;
|
||||||
|
maxFileSize?: number; // in bytes
|
||||||
|
maxConcurrentUploads?: number;
|
||||||
|
maxRetries?: number;
|
||||||
|
tag?: string;
|
||||||
|
className?: string;
|
||||||
|
disabled?: boolean;
|
||||||
|
disabledReason?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utility functions
|
||||||
|
function generateId(): string {
|
||||||
|
return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatBytes(bytes: number): string {
|
||||||
|
if (bytes === 0) return '0 B';
|
||||||
|
const k = 1024;
|
||||||
|
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||||
|
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatSpeed(bytesPerSecond: number): string {
|
||||||
|
return `${formatBytes(bytesPerSecond)}/s`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatTimeRemaining(seconds: number): string {
|
||||||
|
if (!isFinite(seconds) || seconds < 0) return '--:--';
|
||||||
|
if (seconds < 60) return `${Math.round(seconds)}s`;
|
||||||
|
if (seconds < 3600) return `${Math.floor(seconds / 60)}m ${Math.round(seconds % 60)}s`;
|
||||||
|
return `${Math.floor(seconds / 3600)}h ${Math.floor((seconds % 3600) / 60)}m`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getFileExtension(filename: string): string {
|
||||||
|
const parts = filename.toLowerCase().split('.');
|
||||||
|
if (parts.length >= 3 && parts[parts.length - 2] === 'tar') {
|
||||||
|
return `.${parts.slice(-2).join('.')}`;
|
||||||
|
}
|
||||||
|
return parts.length > 1 ? `.${parts[parts.length - 1]}` : '';
|
||||||
|
}
|
||||||
|
|
||||||
|
async function computeSHA256(file: File): Promise<string> {
|
||||||
|
const buffer = await file.arrayBuffer();
|
||||||
|
const hashBuffer = await crypto.subtle.digest('SHA-256', buffer);
|
||||||
|
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||||
|
return hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Icons
|
||||||
|
function UploadIcon() {
|
||||||
|
return (
|
||||||
|
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
|
||||||
|
<path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4" />
|
||||||
|
<polyline points="17 8 12 3 7 8" />
|
||||||
|
<line x1="12" y1="3" x2="12" y2="15" />
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function CheckIcon() {
|
||||||
|
return (
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<polyline points="20 6 9 17 4 12" />
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function ErrorIcon() {
|
||||||
|
return (
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<circle cx="12" cy="12" r="10" />
|
||||||
|
<line x1="15" y1="9" x2="9" y2="15" />
|
||||||
|
<line x1="9" y1="9" x2="15" y2="15" />
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function RetryIcon() {
|
||||||
|
return (
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<polyline points="23 4 23 10 17 10" />
|
||||||
|
<path d="M20.49 15a9 9 0 1 1-2.12-9.36L23 10" />
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function RemoveIcon() {
|
||||||
|
return (
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18" />
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18" />
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function FileIcon() {
|
||||||
|
return (
|
||||||
|
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
|
||||||
|
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z" />
|
||||||
|
<polyline points="14 2 14 8 20 8" />
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function PauseIcon() {
|
||||||
|
return (
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<rect x="6" y="4" width="4" height="16" />
|
||||||
|
<rect x="14" y="4" width="4" height="16" />
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function WifiOffIcon() {
|
||||||
|
return (
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="1" y1="1" x2="23" y2="23" />
|
||||||
|
<path d="M16.72 11.06A10.94 10.94 0 0 1 19 12.55" />
|
||||||
|
<path d="M5 12.55a10.94 10.94 0 0 1 5.17-2.39" />
|
||||||
|
<path d="M10.71 5.05A16 16 0 0 1 22.58 9" />
|
||||||
|
<path d="M1.42 9a15.91 15.91 0 0 1 4.7-2.88" />
|
||||||
|
<path d="M8.53 16.11a6 6 0 0 1 6.95 0" />
|
||||||
|
<line x1="12" y1="20" x2="12.01" y2="20" />
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function SpinnerIcon() {
|
||||||
|
return (
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" className="spinner-icon">
|
||||||
|
<circle cx="12" cy="12" r="10" strokeOpacity="0.25" />
|
||||||
|
<path d="M12 2a10 10 0 0 1 10 10" />
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function DragDropUpload({
|
||||||
|
projectName,
|
||||||
|
packageName,
|
||||||
|
onUploadComplete,
|
||||||
|
onUploadError,
|
||||||
|
allowedTypes,
|
||||||
|
allowAllTypes = true,
|
||||||
|
maxFileSize,
|
||||||
|
maxConcurrentUploads = 3,
|
||||||
|
maxRetries = 3,
|
||||||
|
tag,
|
||||||
|
className = '',
|
||||||
|
disabled = false,
|
||||||
|
disabledReason,
|
||||||
|
}: DragDropUploadProps) {
|
||||||
|
const [isDragOver, setIsDragOver] = useState(false);
|
||||||
|
const [uploadQueue, setUploadQueue] = useState<UploadItem[]>([]);
|
||||||
|
const [isOnline, setIsOnline] = useState(navigator.onLine);
|
||||||
|
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||||
|
const dragCounterRef = useRef(0);
|
||||||
|
const activeUploadsRef = useRef(0);
|
||||||
|
const xhrMapRef = useRef<Map<string, XMLHttpRequest>>(new Map());
|
||||||
|
|
||||||
|
// Online/Offline detection
|
||||||
|
useEffect(() => {
|
||||||
|
const handleOnline = () => {
|
||||||
|
setIsOnline(true);
|
||||||
|
// Resume paused uploads
|
||||||
|
setUploadQueue(prev => prev.map(item =>
|
||||||
|
item.status === 'paused'
|
||||||
|
? { ...item, status: 'pending' as UploadStatus, error: undefined }
|
||||||
|
: item
|
||||||
|
));
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleOffline = () => {
|
||||||
|
setIsOnline(false);
|
||||||
|
// Pause uploading items and cancel their XHR requests
|
||||||
|
setUploadQueue(prev => prev.map(item => {
|
||||||
|
if (item.status === 'uploading') {
|
||||||
|
// Abort the XHR request
|
||||||
|
const xhr = xhrMapRef.current.get(item.id);
|
||||||
|
if (xhr) {
|
||||||
|
xhr.abort();
|
||||||
|
xhrMapRef.current.delete(item.id);
|
||||||
|
}
|
||||||
|
return { ...item, status: 'paused' as UploadStatus, error: 'Network offline - will resume when connection is restored', progress: 0 };
|
||||||
|
}
|
||||||
|
if (item.status === 'pending') {
|
||||||
|
return { ...item, status: 'paused' as UploadStatus, error: 'Network offline - waiting for connection' };
|
||||||
|
}
|
||||||
|
return item;
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
window.addEventListener('online', handleOnline);
|
||||||
|
window.addEventListener('offline', handleOffline);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
window.removeEventListener('online', handleOnline);
|
||||||
|
window.removeEventListener('offline', handleOffline);
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Validate a single file
|
||||||
|
const validateFile = useCallback((file: File): string | null => {
|
||||||
|
// Check file size
|
||||||
|
if (maxFileSize && file.size > maxFileSize) {
|
||||||
|
return `File exceeds ${formatBytes(maxFileSize)} limit`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check file type if not allowing all types
|
||||||
|
if (!allowAllTypes && allowedTypes && allowedTypes.length > 0) {
|
||||||
|
const ext = getFileExtension(file.name);
|
||||||
|
if (!allowedTypes.some(t => t.toLowerCase() === ext)) {
|
||||||
|
return `File type ${ext || 'unknown'} not allowed. Accepted: ${allowedTypes.join(', ')}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for empty file
|
||||||
|
if (file.size === 0) {
|
||||||
|
return 'Cannot upload empty file';
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}, [allowedTypes, allowAllTypes, maxFileSize]);
|
||||||
|
|
||||||
|
// Add files to queue
|
||||||
|
const addFiles = useCallback((files: FileList | File[]) => {
|
||||||
|
const newItems: UploadItem[] = Array.from(files).map(file => {
|
||||||
|
const validationError = validateFile(file);
|
||||||
|
return {
|
||||||
|
id: generateId(),
|
||||||
|
file,
|
||||||
|
status: validationError ? 'failed' : 'pending',
|
||||||
|
progress: 0,
|
||||||
|
speed: 0,
|
||||||
|
error: validationError || undefined,
|
||||||
|
retryCount: 0,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
setUploadQueue(prev => [...prev, ...newItems]);
|
||||||
|
}, [validateFile]);
|
||||||
|
|
||||||
|
const uploadFileChunked = useCallback(async (item: UploadItem): Promise<UploadResult> => {
|
||||||
|
setUploadQueue(prev => prev.map(u =>
|
||||||
|
u.id === item.id
|
||||||
|
? { ...u, status: 'validating' as UploadStatus, startTime: Date.now() }
|
||||||
|
: u
|
||||||
|
));
|
||||||
|
|
||||||
|
const fileHash = await computeSHA256(item.file);
|
||||||
|
|
||||||
|
const storedState = loadUploadState(projectName, packageName, fileHash);
|
||||||
|
let uploadId: string;
|
||||||
|
let completedParts: number[] = [];
|
||||||
|
|
||||||
|
if (storedState && storedState.fileSize === item.file.size && storedState.filename === item.file.name) {
|
||||||
|
try {
|
||||||
|
const statusResponse = await fetch(
|
||||||
|
`/api/v1/project/${projectName}/${packageName}/upload/${storedState.uploadId}/status`
|
||||||
|
);
|
||||||
|
if (statusResponse.ok) {
|
||||||
|
const statusData = await statusResponse.json();
|
||||||
|
uploadId = storedState.uploadId;
|
||||||
|
completedParts = statusData.uploaded_parts || [];
|
||||||
|
} else {
|
||||||
|
throw new Error('Stored upload no longer valid');
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
clearUploadState(projectName, packageName, fileHash);
|
||||||
|
uploadId = await initNewUpload();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
uploadId = await initNewUpload();
|
||||||
|
}
|
||||||
|
|
||||||
|
async function initNewUpload(): Promise<string> {
|
||||||
|
const initResponse = await fetch(
|
||||||
|
`/api/v1/project/${projectName}/${packageName}/upload/init`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({
|
||||||
|
expected_hash: fileHash,
|
||||||
|
filename: item.file.name,
|
||||||
|
size: item.file.size,
|
||||||
|
tag: tag || undefined,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!initResponse.ok) {
|
||||||
|
const error = await initResponse.json().catch(() => ({}));
|
||||||
|
throw new Error(error.detail || `Init failed: ${initResponse.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const initData = await initResponse.json();
|
||||||
|
|
||||||
|
if (initData.already_exists) {
|
||||||
|
throw { deduplicated: true, artifact_id: initData.artifact_id };
|
||||||
|
}
|
||||||
|
|
||||||
|
saveUploadState({
|
||||||
|
uploadId: initData.upload_id,
|
||||||
|
fileHash,
|
||||||
|
filename: item.file.name,
|
||||||
|
fileSize: item.file.size,
|
||||||
|
completedParts: [],
|
||||||
|
project: projectName,
|
||||||
|
package: packageName,
|
||||||
|
tag: tag || undefined,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
});
|
||||||
|
|
||||||
|
return initData.upload_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalChunks = Math.ceil(item.file.size / CHUNK_SIZE);
|
||||||
|
let uploadedBytes = completedParts.length * CHUNK_SIZE;
|
||||||
|
if (uploadedBytes > item.file.size) uploadedBytes = item.file.size - (item.file.size % CHUNK_SIZE);
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
for (let partNumber = 1; partNumber <= totalChunks; partNumber++) {
|
||||||
|
if (completedParts.includes(partNumber)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isOnline) {
|
||||||
|
throw new Error('Network offline');
|
||||||
|
}
|
||||||
|
|
||||||
|
const start = (partNumber - 1) * CHUNK_SIZE;
|
||||||
|
const end = Math.min(start + CHUNK_SIZE, item.file.size);
|
||||||
|
const chunk = item.file.slice(start, end);
|
||||||
|
|
||||||
|
const partResponse = await fetch(
|
||||||
|
`/api/v1/project/${projectName}/${packageName}/upload/${uploadId}/part/${partNumber}`,
|
||||||
|
{
|
||||||
|
method: 'PUT',
|
||||||
|
body: chunk,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!partResponse.ok) {
|
||||||
|
throw new Error(`Part ${partNumber} upload failed: ${partResponse.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
completedParts.push(partNumber);
|
||||||
|
saveUploadState({
|
||||||
|
uploadId,
|
||||||
|
fileHash,
|
||||||
|
filename: item.file.name,
|
||||||
|
fileSize: item.file.size,
|
||||||
|
completedParts,
|
||||||
|
project: projectName,
|
||||||
|
package: packageName,
|
||||||
|
tag: tag || undefined,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
});
|
||||||
|
|
||||||
|
uploadedBytes += chunk.size;
|
||||||
|
const elapsed = (Date.now() - startTime) / 1000;
|
||||||
|
const speed = elapsed > 0 ? uploadedBytes / elapsed : 0;
|
||||||
|
const progress = Math.round((uploadedBytes / item.file.size) * 100);
|
||||||
|
|
||||||
|
setUploadQueue(prev => prev.map(u =>
|
||||||
|
u.id === item.id
|
||||||
|
? { ...u, progress, speed, status: 'uploading' as UploadStatus }
|
||||||
|
: u
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
const completeResponse = await fetch(
|
||||||
|
`/api/v1/project/${projectName}/${packageName}/upload/${uploadId}/complete`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ tag: tag || undefined }),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!completeResponse.ok) {
|
||||||
|
throw new Error(`Complete failed: ${completeResponse.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
clearUploadState(projectName, packageName, fileHash);
|
||||||
|
|
||||||
|
const completeData = await completeResponse.json();
|
||||||
|
return {
|
||||||
|
artifact_id: completeData.artifact_id,
|
||||||
|
size: completeData.size,
|
||||||
|
deduplicated: false,
|
||||||
|
};
|
||||||
|
}, [projectName, packageName, tag, isOnline]);
|
||||||
|
|
||||||
|
const uploadFileSimple = useCallback((item: UploadItem): Promise<UploadResult> => {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const xhr = new XMLHttpRequest();
|
||||||
|
xhrMapRef.current.set(item.id, xhr);
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
formData.append('file', item.file);
|
||||||
|
if (tag) {
|
||||||
|
formData.append('tag', tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
let lastLoaded = 0;
|
||||||
|
let lastTime = Date.now();
|
||||||
|
|
||||||
|
xhr.upload.addEventListener('progress', (e) => {
|
||||||
|
if (e.lengthComputable) {
|
||||||
|
const now = Date.now();
|
||||||
|
const timeDiff = (now - lastTime) / 1000;
|
||||||
|
const loadedDiff = e.loaded - lastLoaded;
|
||||||
|
|
||||||
|
const speed = timeDiff > 0 ? loadedDiff / timeDiff : 0;
|
||||||
|
const progress = Math.round((e.loaded / e.total) * 100);
|
||||||
|
|
||||||
|
setUploadQueue(prev => prev.map(u =>
|
||||||
|
u.id === item.id
|
||||||
|
? { ...u, progress, speed, status: 'uploading' as UploadStatus }
|
||||||
|
: u
|
||||||
|
));
|
||||||
|
|
||||||
|
lastLoaded = e.loaded;
|
||||||
|
lastTime = now;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
xhr.addEventListener('load', () => {
|
||||||
|
xhrMapRef.current.delete(item.id);
|
||||||
|
if (xhr.status >= 200 && xhr.status < 300) {
|
||||||
|
try {
|
||||||
|
const result = JSON.parse(xhr.responseText) as UploadResult;
|
||||||
|
resolve(result);
|
||||||
|
} catch {
|
||||||
|
reject(new Error('Invalid response from server'));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
const error = JSON.parse(xhr.responseText);
|
||||||
|
reject(new Error(error.detail || `Upload failed: ${xhr.status}`));
|
||||||
|
} catch {
|
||||||
|
reject(new Error(`Upload failed: ${xhr.status}`));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
xhr.addEventListener('error', () => {
|
||||||
|
xhrMapRef.current.delete(item.id);
|
||||||
|
reject(new Error('Network error - check your connection'));
|
||||||
|
});
|
||||||
|
|
||||||
|
xhr.addEventListener('timeout', () => {
|
||||||
|
xhrMapRef.current.delete(item.id);
|
||||||
|
reject(new Error('Upload timed out'));
|
||||||
|
});
|
||||||
|
|
||||||
|
xhr.addEventListener('abort', () => {
|
||||||
|
xhrMapRef.current.delete(item.id);
|
||||||
|
reject(new Error('Upload cancelled'));
|
||||||
|
});
|
||||||
|
|
||||||
|
xhr.open('POST', `/api/v1/project/${projectName}/${packageName}/upload`);
|
||||||
|
xhr.timeout = 300000;
|
||||||
|
xhr.send(formData);
|
||||||
|
|
||||||
|
setUploadQueue(prev => prev.map(u =>
|
||||||
|
u.id === item.id
|
||||||
|
? { ...u, status: 'uploading' as UploadStatus, startTime: Date.now() }
|
||||||
|
: u
|
||||||
|
));
|
||||||
|
});
|
||||||
|
}, [projectName, packageName, tag]);
|
||||||
|
|
||||||
|
const uploadFile = useCallback((item: UploadItem): Promise<UploadResult> => {
|
||||||
|
if (item.file.size >= CHUNKED_UPLOAD_THRESHOLD) {
|
||||||
|
return uploadFileChunked(item);
|
||||||
|
}
|
||||||
|
return uploadFileSimple(item);
|
||||||
|
}, [uploadFileChunked, uploadFileSimple]);
|
||||||
|
|
||||||
|
const processQueue = useCallback(async () => {
|
||||||
|
if (!isOnline) return;
|
||||||
|
|
||||||
|
const pendingItems = uploadQueue.filter(item => item.status === 'pending');
|
||||||
|
|
||||||
|
for (const item of pendingItems) {
|
||||||
|
if (activeUploadsRef.current >= maxConcurrentUploads) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
activeUploadsRef.current++;
|
||||||
|
|
||||||
|
// Start upload
|
||||||
|
setUploadQueue(prev => prev.map(u =>
|
||||||
|
u.id === item.id ? { ...u, status: 'uploading' as UploadStatus } : u
|
||||||
|
));
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await uploadFile(item);
|
||||||
|
|
||||||
|
setUploadQueue(prev => prev.map(u =>
|
||||||
|
u.id === item.id
|
||||||
|
? { ...u, status: 'complete' as UploadStatus, progress: 100, artifactId: result.artifact_id }
|
||||||
|
: u
|
||||||
|
));
|
||||||
|
} catch (err: unknown) {
|
||||||
|
const dedupErr = err as { deduplicated?: boolean; artifact_id?: string };
|
||||||
|
if (dedupErr.deduplicated && dedupErr.artifact_id) {
|
||||||
|
setUploadQueue(prev => prev.map(u =>
|
||||||
|
u.id === item.id
|
||||||
|
? { ...u, status: 'complete' as UploadStatus, progress: 100, artifactId: dedupErr.artifact_id }
|
||||||
|
: u
|
||||||
|
));
|
||||||
|
} else {
|
||||||
|
const errorMessage = err instanceof Error ? err.message : 'Upload failed';
|
||||||
|
const shouldRetry = item.retryCount < maxRetries &&
|
||||||
|
(errorMessage.includes('Network') || errorMessage.includes('timeout'));
|
||||||
|
|
||||||
|
if (shouldRetry) {
|
||||||
|
const delay = Math.pow(2, item.retryCount) * 1000;
|
||||||
|
setTimeout(() => {
|
||||||
|
setUploadQueue(prev => prev.map(u =>
|
||||||
|
u.id === item.id
|
||||||
|
? { ...u, status: 'pending' as UploadStatus, retryCount: u.retryCount + 1, progress: 0 }
|
||||||
|
: u
|
||||||
|
));
|
||||||
|
}, delay);
|
||||||
|
} else {
|
||||||
|
setUploadQueue(prev => prev.map(u =>
|
||||||
|
u.id === item.id
|
||||||
|
? { ...u, status: 'failed' as UploadStatus, error: errorMessage }
|
||||||
|
: u
|
||||||
|
));
|
||||||
|
onUploadError?.(errorMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
activeUploadsRef.current--;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [uploadQueue, maxConcurrentUploads, maxRetries, uploadFile, onUploadError, isOnline]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const hasPending = uploadQueue.some(item => item.status === 'pending');
|
||||||
|
if (hasPending && activeUploadsRef.current < maxConcurrentUploads && isOnline) {
|
||||||
|
processQueue();
|
||||||
|
}
|
||||||
|
|
||||||
|
const allComplete = uploadQueue.length > 0 &&
|
||||||
|
uploadQueue.every(item => item.status === 'complete' || item.status === 'failed');
|
||||||
|
|
||||||
|
if (allComplete) {
|
||||||
|
const completedResults = uploadQueue
|
||||||
|
.filter(item => item.status === 'complete' && item.artifactId)
|
||||||
|
.map(item => ({
|
||||||
|
artifact_id: item.artifactId!,
|
||||||
|
size: item.file.size,
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (completedResults.length > 0) {
|
||||||
|
onUploadComplete?.(completedResults);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [uploadQueue, maxConcurrentUploads, processQueue, onUploadComplete, isOnline]);
|
||||||
|
|
||||||
|
// Drag event handlers
|
||||||
|
const handleDragEnter = useCallback((e: React.DragEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
e.stopPropagation();
|
||||||
|
if (disabled) return;
|
||||||
|
dragCounterRef.current++;
|
||||||
|
if (e.dataTransfer.items && e.dataTransfer.items.length > 0) {
|
||||||
|
setIsDragOver(true);
|
||||||
|
}
|
||||||
|
}, [disabled]);
|
||||||
|
|
||||||
|
const handleDragLeave = useCallback((e: React.DragEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
e.stopPropagation();
|
||||||
|
if (disabled) return;
|
||||||
|
dragCounterRef.current--;
|
||||||
|
if (dragCounterRef.current === 0) {
|
||||||
|
setIsDragOver(false);
|
||||||
|
}
|
||||||
|
}, [disabled]);
|
||||||
|
|
||||||
|
const handleDragOver = useCallback((e: React.DragEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
e.stopPropagation();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleDrop = useCallback((e: React.DragEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
e.stopPropagation();
|
||||||
|
setIsDragOver(false);
|
||||||
|
dragCounterRef.current = 0;
|
||||||
|
|
||||||
|
if (disabled) return;
|
||||||
|
|
||||||
|
const files = e.dataTransfer.files;
|
||||||
|
if (files && files.length > 0) {
|
||||||
|
addFiles(files);
|
||||||
|
}
|
||||||
|
}, [addFiles, disabled]);
|
||||||
|
|
||||||
|
// Click to browse
|
||||||
|
const handleClick = useCallback(() => {
|
||||||
|
if (disabled) return;
|
||||||
|
fileInputRef.current?.click();
|
||||||
|
}, [disabled]);
|
||||||
|
|
||||||
|
const handleFileChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
|
if (disabled) return;
|
||||||
|
const files = e.target.files;
|
||||||
|
if (files && files.length > 0) {
|
||||||
|
addFiles(files);
|
||||||
|
}
|
||||||
|
// Reset input so same file can be selected again
|
||||||
|
if (fileInputRef.current) {
|
||||||
|
fileInputRef.current.value = '';
|
||||||
|
}
|
||||||
|
}, [addFiles, disabled]);
|
||||||
|
|
||||||
|
// Remove item from queue
|
||||||
|
const removeItem = useCallback((id: string) => {
|
||||||
|
setUploadQueue(prev => prev.filter(item => item.id !== id));
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Retry failed upload
|
||||||
|
const retryItem = useCallback((id: string) => {
|
||||||
|
setUploadQueue(prev => prev.map(item =>
|
||||||
|
item.id === id
|
||||||
|
? { ...item, status: 'pending' as UploadStatus, error: undefined, progress: 0, retryCount: 0 }
|
||||||
|
: item
|
||||||
|
));
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Clear completed/failed items
|
||||||
|
const clearCompleted = useCallback(() => {
|
||||||
|
setUploadQueue(prev => prev.filter(item =>
|
||||||
|
item.status !== 'complete' && item.status !== 'failed'
|
||||||
|
));
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Calculate overall progress
|
||||||
|
const overallProgress = uploadQueue.length > 0
|
||||||
|
? Math.round(uploadQueue.reduce((sum, item) => sum + item.progress, 0) / uploadQueue.length)
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
const completedCount = uploadQueue.filter(item => item.status === 'complete').length;
|
||||||
|
const failedCount = uploadQueue.filter(item => item.status === 'failed').length;
|
||||||
|
const uploadingCount = uploadQueue.filter(item => item.status === 'uploading').length;
|
||||||
|
const pausedCount = uploadQueue.filter(item => item.status === 'paused').length;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={`drag-drop-upload ${className}`}>
|
||||||
|
{!isOnline && (
|
||||||
|
<div className="offline-banner">
|
||||||
|
<WifiOffIcon />
|
||||||
|
<span>You're offline. Uploads will resume when connection is restored.</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div
|
||||||
|
className={`drop-zone ${isDragOver ? 'drop-zone--active' : ''} ${disabled ? 'drop-zone--disabled' : ''}`}
|
||||||
|
onDragEnter={handleDragEnter}
|
||||||
|
onDragLeave={handleDragLeave}
|
||||||
|
onDragOver={handleDragOver}
|
||||||
|
onDrop={handleDrop}
|
||||||
|
onClick={handleClick}
|
||||||
|
role="button"
|
||||||
|
tabIndex={disabled ? -1 : 0}
|
||||||
|
onKeyDown={(e) => e.key === 'Enter' && handleClick()}
|
||||||
|
aria-disabled={disabled}
|
||||||
|
title={disabled ? disabledReason : undefined}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
ref={fileInputRef}
|
||||||
|
type="file"
|
||||||
|
multiple
|
||||||
|
onChange={handleFileChange}
|
||||||
|
className="drop-zone__input"
|
||||||
|
accept={!allowAllTypes && allowedTypes ? allowedTypes.join(',') : undefined}
|
||||||
|
disabled={disabled}
|
||||||
|
/>
|
||||||
|
<div className="drop-zone__content">
|
||||||
|
<UploadIcon />
|
||||||
|
<p className="drop-zone__text">
|
||||||
|
{disabled ? (
|
||||||
|
<span>{disabledReason || 'Upload disabled'}</span>
|
||||||
|
) : (
|
||||||
|
<><strong>Drag files here</strong> or click to browse</>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
{!disabled && (
|
||||||
|
<p className="drop-zone__hint">
|
||||||
|
{maxFileSize && `Max file size: ${formatBytes(maxFileSize)}`}
|
||||||
|
{!allowAllTypes && allowedTypes && ` • Accepted: ${allowedTypes.join(', ')}`}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Upload Queue */}
|
||||||
|
{uploadQueue.length > 0 && (
|
||||||
|
<div className="upload-queue">
|
||||||
|
<div className="upload-queue__header">
|
||||||
|
<span className="upload-queue__title">
|
||||||
|
{pausedCount > 0 && !isOnline
|
||||||
|
? `${pausedCount} uploads paused (offline)`
|
||||||
|
: uploadingCount > 0
|
||||||
|
? `Uploading ${uploadingCount} of ${uploadQueue.length} files`
|
||||||
|
: `${completedCount} of ${uploadQueue.length} files uploaded`
|
||||||
|
}
|
||||||
|
{failedCount > 0 && ` (${failedCount} failed)`}
|
||||||
|
</span>
|
||||||
|
{(completedCount > 0 || failedCount > 0) && (
|
||||||
|
<button
|
||||||
|
className="upload-queue__clear"
|
||||||
|
onClick={clearCompleted}
|
||||||
|
type="button"
|
||||||
|
>
|
||||||
|
Clear finished
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overall progress bar */}
|
||||||
|
{uploadingCount > 0 && (
|
||||||
|
<div className="upload-queue__overall">
|
||||||
|
<div className="progress-bar">
|
||||||
|
<div
|
||||||
|
className="progress-bar__fill"
|
||||||
|
style={{ width: `${overallProgress}%` }}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<span className="progress-bar__text">{overallProgress}%</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Individual file items */}
|
||||||
|
<ul className="upload-queue__list">
|
||||||
|
{uploadQueue.map(item => (
|
||||||
|
<li key={item.id} className={`upload-item upload-item--${item.status}`}>
|
||||||
|
<div className="upload-item__icon">
|
||||||
|
{item.status === 'complete' ? <CheckIcon /> :
|
||||||
|
item.status === 'failed' ? <ErrorIcon /> :
|
||||||
|
item.status === 'paused' ? <PauseIcon /> :
|
||||||
|
item.status === 'validating' ? <SpinnerIcon /> :
|
||||||
|
<FileIcon />}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="upload-item__info">
|
||||||
|
<div className="upload-item__name" title={item.file.name}>
|
||||||
|
{item.file.name}
|
||||||
|
</div>
|
||||||
|
<div className="upload-item__meta">
|
||||||
|
<span className="upload-item__size">{formatBytes(item.file.size)}</span>
|
||||||
|
{item.status === 'uploading' && item.speed > 0 && (
|
||||||
|
<>
|
||||||
|
<span className="upload-item__speed">{formatSpeed(item.speed)}</span>
|
||||||
|
{item.startTime && (
|
||||||
|
<span className="upload-item__eta">
|
||||||
|
{formatTimeRemaining(
|
||||||
|
(item.file.size - (item.file.size * item.progress / 100)) / item.speed
|
||||||
|
)} remaining
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
{item.status === 'complete' && item.artifactId && (
|
||||||
|
<span className="upload-item__artifact">
|
||||||
|
ID: {item.artifactId.substring(0, 12)}...
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{item.error && (
|
||||||
|
<span className="upload-item__error">{item.error}</span>
|
||||||
|
)}
|
||||||
|
{item.retryCount > 0 && item.status === 'uploading' && (
|
||||||
|
<span className="upload-item__retry-count">Retry {item.retryCount}</span>
|
||||||
|
)}
|
||||||
|
{item.status === 'validating' && (
|
||||||
|
<span className="upload-item__validating">Computing hash...</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{item.status === 'uploading' && (
|
||||||
|
<div className="progress-bar progress-bar--small">
|
||||||
|
<div
|
||||||
|
className="progress-bar__fill"
|
||||||
|
style={{ width: `${item.progress}%` }}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="upload-item__actions">
|
||||||
|
{(item.status === 'failed' || (item.status === 'paused' && isOnline)) && (
|
||||||
|
<button
|
||||||
|
className="upload-item__btn upload-item__btn--retry"
|
||||||
|
onClick={() => retryItem(item.id)}
|
||||||
|
title="Retry upload"
|
||||||
|
type="button"
|
||||||
|
>
|
||||||
|
<RetryIcon />
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
{(item.status === 'complete' || item.status === 'failed' || item.status === 'pending' || item.status === 'paused') && (
|
||||||
|
<button
|
||||||
|
className="upload-item__btn upload-item__btn--remove"
|
||||||
|
onClick={() => removeItem(item.id)}
|
||||||
|
title="Remove"
|
||||||
|
type="button"
|
||||||
|
>
|
||||||
|
<RemoveIcon />
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
top: 0;
|
top: 0;
|
||||||
z-index: 100;
|
z-index: 100;
|
||||||
backdrop-filter: blur(12px);
|
backdrop-filter: blur(12px);
|
||||||
background: rgba(17, 17, 19, 0.85);
|
background: rgba(37, 37, 41, 0.85);
|
||||||
}
|
}
|
||||||
|
|
||||||
.header-content {
|
.header-content {
|
||||||
@@ -98,6 +98,170 @@
|
|||||||
opacity: 0.7;
|
opacity: 0.7;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Login link */
|
||||||
|
.nav-login {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
padding: 8px 16px;
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
margin-left: 8px;
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-login:hover {
|
||||||
|
color: var(--text-primary);
|
||||||
|
background: var(--bg-hover);
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* User Menu */
|
||||||
|
.user-menu {
|
||||||
|
position: relative;
|
||||||
|
margin-left: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-trigger {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
padding: 6px 12px;
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-trigger:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-avatar {
|
||||||
|
width: 28px;
|
||||||
|
height: 28px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
color: white;
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-name {
|
||||||
|
max-width: 120px;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-dropdown {
|
||||||
|
position: absolute;
|
||||||
|
top: 100%;
|
||||||
|
right: 0;
|
||||||
|
margin-top: 8px;
|
||||||
|
min-width: 200px;
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
box-shadow: var(--shadow-lg);
|
||||||
|
z-index: 200;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: 12px 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-username {
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-badge {
|
||||||
|
padding: 2px 8px;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
border-radius: 100px;
|
||||||
|
font-size: 0.6875rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: white;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.02em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-divider {
|
||||||
|
height: 1px;
|
||||||
|
background: var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
width: 100%;
|
||||||
|
padding: 12px 16px;
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
text-align: left;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-item:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-item svg {
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-item:hover svg {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* User menu loading state */
|
||||||
|
.user-menu-loading {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
width: 40px;
|
||||||
|
height: 40px;
|
||||||
|
margin-left: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-menu-spinner {
|
||||||
|
width: 16px;
|
||||||
|
height: 16px;
|
||||||
|
border: 2px solid var(--border-secondary);
|
||||||
|
border-top-color: var(--accent-primary);
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: user-menu-spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes user-menu-spin {
|
||||||
|
to {
|
||||||
|
transform: rotate(360deg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Main content */
|
/* Main content */
|
||||||
.main {
|
.main {
|
||||||
flex: 1;
|
flex: 1;
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { ReactNode } from 'react';
|
import { ReactNode, useState, useRef, useEffect } from 'react';
|
||||||
import { Link, useLocation } from 'react-router-dom';
|
import { Link, NavLink, useLocation, useNavigate } from 'react-router-dom';
|
||||||
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
import { GlobalSearch } from './GlobalSearch';
|
import { GlobalSearch } from './GlobalSearch';
|
||||||
import './Layout.css';
|
import './Layout.css';
|
||||||
|
|
||||||
@@ -9,6 +10,31 @@ interface LayoutProps {
|
|||||||
|
|
||||||
function Layout({ children }: LayoutProps) {
|
function Layout({ children }: LayoutProps) {
|
||||||
const location = useLocation();
|
const location = useLocation();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
const { user, loading, logout } = useAuth();
|
||||||
|
const [showUserMenu, setShowUserMenu] = useState(false);
|
||||||
|
const menuRef = useRef<HTMLDivElement>(null);
|
||||||
|
|
||||||
|
// Close menu when clicking outside
|
||||||
|
useEffect(() => {
|
||||||
|
function handleClickOutside(event: MouseEvent) {
|
||||||
|
if (menuRef.current && !menuRef.current.contains(event.target as Node)) {
|
||||||
|
setShowUserMenu(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
document.addEventListener('mousedown', handleClickOutside);
|
||||||
|
return () => document.removeEventListener('mousedown', handleClickOutside);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
async function handleLogout() {
|
||||||
|
try {
|
||||||
|
await logout();
|
||||||
|
setShowUserMenu(false);
|
||||||
|
navigate('/');
|
||||||
|
} catch {
|
||||||
|
// Error handled in context
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="layout">
|
<div className="layout">
|
||||||
@@ -60,6 +86,97 @@ function Layout({ children }: LayoutProps) {
|
|||||||
</svg>
|
</svg>
|
||||||
Docs
|
Docs
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
|
{/* User Menu */}
|
||||||
|
{loading ? (
|
||||||
|
<div className="user-menu-loading">
|
||||||
|
<div className="user-menu-spinner"></div>
|
||||||
|
</div>
|
||||||
|
) : user ? (
|
||||||
|
<div className="user-menu" ref={menuRef}>
|
||||||
|
<button
|
||||||
|
className="user-menu-trigger"
|
||||||
|
onClick={() => setShowUserMenu(!showUserMenu)}
|
||||||
|
aria-expanded={showUserMenu}
|
||||||
|
aria-haspopup="true"
|
||||||
|
>
|
||||||
|
<div className="user-avatar">
|
||||||
|
{user.username.charAt(0).toUpperCase()}
|
||||||
|
</div>
|
||||||
|
<span className="user-name">{user.display_name || user.username}</span>
|
||||||
|
<svg width="12" height="12" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<polyline points="6 9 12 15 18 9"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{showUserMenu && (
|
||||||
|
<div className="user-menu-dropdown">
|
||||||
|
<div className="user-menu-header">
|
||||||
|
<span className="user-menu-username">{user.username}</span>
|
||||||
|
{user.is_admin && (
|
||||||
|
<span className="user-menu-badge">Admin</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<div className="user-menu-divider"></div>
|
||||||
|
<NavLink
|
||||||
|
to="/settings/api-keys"
|
||||||
|
className="user-menu-item"
|
||||||
|
onClick={() => setShowUserMenu(false)}
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M21 2l-2 2m-7.61 7.61a5.5 5.5 0 1 1-7.778 7.778 5.5 5.5 0 0 1 7.777-7.777zm0 0L15.5 7.5m0 0l3 3L22 7l-3-3m-3.5 3.5L19 4"/>
|
||||||
|
</svg>
|
||||||
|
API Keys
|
||||||
|
</NavLink>
|
||||||
|
{user.is_admin && (
|
||||||
|
<>
|
||||||
|
<NavLink
|
||||||
|
to="/admin/users"
|
||||||
|
className="user-menu-item"
|
||||||
|
onClick={() => setShowUserMenu(false)}
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
||||||
|
<circle cx="9" cy="7" r="4"/>
|
||||||
|
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
||||||
|
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
||||||
|
</svg>
|
||||||
|
User Management
|
||||||
|
</NavLink>
|
||||||
|
<NavLink
|
||||||
|
to="/admin/oidc"
|
||||||
|
className="user-menu-item"
|
||||||
|
onClick={() => setShowUserMenu(false)}
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M12 22s8-4 8-10V5l-8-3-8 3v7c0 6 8 10 8 10z"/>
|
||||||
|
</svg>
|
||||||
|
SSO Configuration
|
||||||
|
</NavLink>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
<div className="user-menu-divider"></div>
|
||||||
|
<button className="user-menu-item" onClick={handleLogout}>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M9 21H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h4"/>
|
||||||
|
<polyline points="16 17 21 12 16 7"/>
|
||||||
|
<line x1="21" y1="12" x2="9" y2="12"/>
|
||||||
|
</svg>
|
||||||
|
Sign out
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<Link to="/login" className="nav-login">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M15 3h4a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2h-4"/>
|
||||||
|
<polyline points="10 17 15 12 10 7"/>
|
||||||
|
<line x1="15" y1="12" x2="3" y2="12"/>
|
||||||
|
</svg>
|
||||||
|
Login
|
||||||
|
</Link>
|
||||||
|
)}
|
||||||
</nav>
|
</nav>
|
||||||
</div>
|
</div>
|
||||||
</header>
|
</header>
|
||||||
@@ -76,7 +193,6 @@ function Layout({ children }: LayoutProps) {
|
|||||||
</div>
|
</div>
|
||||||
<div className="footer-links">
|
<div className="footer-links">
|
||||||
<a href="/docs">Documentation</a>
|
<a href="/docs">Documentation</a>
|
||||||
<a href="/api/v1">API</a>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</footer>
|
</footer>
|
||||||
|
|||||||
@@ -10,3 +10,5 @@ export { FilterChip, FilterChipGroup } from './FilterChip';
|
|||||||
export { DataTable } from './DataTable';
|
export { DataTable } from './DataTable';
|
||||||
export { Pagination } from './Pagination';
|
export { Pagination } from './Pagination';
|
||||||
export { GlobalSearch } from './GlobalSearch';
|
export { GlobalSearch } from './GlobalSearch';
|
||||||
|
export { DragDropUpload } from './DragDropUpload';
|
||||||
|
export type { DragDropUploadProps, UploadItem, UploadResult, UploadStatus } from './DragDropUpload';
|
||||||
|
|||||||
166
frontend/src/contexts/AuthContext.tsx
Normal file
166
frontend/src/contexts/AuthContext.tsx
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
import { createContext, useContext, useState, useEffect, useCallback, useRef, ReactNode } from 'react';
|
||||||
|
import { User, AccessLevel } from '../types';
|
||||||
|
import { getCurrentUser, login as apiLogin, logout as apiLogout, getMyProjectAccess } from '../api';
|
||||||
|
|
||||||
|
interface PermissionCacheEntry {
|
||||||
|
accessLevel: AccessLevel | null;
|
||||||
|
timestamp: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AuthContextType {
|
||||||
|
user: User | null;
|
||||||
|
loading: boolean;
|
||||||
|
error: string | null;
|
||||||
|
login: (username: string, password: string) => Promise<void>;
|
||||||
|
logout: () => Promise<void>;
|
||||||
|
refreshUser: () => Promise<void>;
|
||||||
|
clearError: () => void;
|
||||||
|
getProjectPermission: (projectName: string) => Promise<AccessLevel | null>;
|
||||||
|
invalidatePermissionCache: (projectName?: string) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const AuthContext = createContext<AuthContextType | undefined>(undefined);
|
||||||
|
|
||||||
|
interface AuthProviderProps {
|
||||||
|
children: ReactNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache TTL in milliseconds (5 minutes)
|
||||||
|
const PERMISSION_CACHE_TTL = 5 * 60 * 1000;
|
||||||
|
|
||||||
|
export function AuthProvider({ children }: AuthProviderProps) {
|
||||||
|
const [user, setUser] = useState<User | null>(null);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const permissionCacheRef = useRef<Map<string, PermissionCacheEntry>>(new Map());
|
||||||
|
|
||||||
|
// Clear permission cache
|
||||||
|
const clearPermissionCache = useCallback(() => {
|
||||||
|
permissionCacheRef.current.clear();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Check session on initial load
|
||||||
|
useEffect(() => {
|
||||||
|
async function checkAuth() {
|
||||||
|
try {
|
||||||
|
const currentUser = await getCurrentUser();
|
||||||
|
setUser(currentUser);
|
||||||
|
} catch {
|
||||||
|
setUser(null);
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
checkAuth();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const login = useCallback(async (username: string, password: string) => {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
try {
|
||||||
|
const loggedInUser = await apiLogin({ username, password });
|
||||||
|
setUser(loggedInUser);
|
||||||
|
// Clear permission cache on login - permissions may have changed
|
||||||
|
clearPermissionCache();
|
||||||
|
} catch (err) {
|
||||||
|
const message = err instanceof Error ? err.message : 'Login failed';
|
||||||
|
setError(message);
|
||||||
|
throw err;
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [clearPermissionCache]);
|
||||||
|
|
||||||
|
const logout = useCallback(async () => {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
try {
|
||||||
|
await apiLogout();
|
||||||
|
setUser(null);
|
||||||
|
// Clear permission cache on logout
|
||||||
|
clearPermissionCache();
|
||||||
|
} catch (err) {
|
||||||
|
const message = err instanceof Error ? err.message : 'Logout failed';
|
||||||
|
setError(message);
|
||||||
|
throw err;
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [clearPermissionCache]);
|
||||||
|
|
||||||
|
const clearError = useCallback(() => {
|
||||||
|
setError(null);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const refreshUser = useCallback(async () => {
|
||||||
|
try {
|
||||||
|
const currentUser = await getCurrentUser();
|
||||||
|
setUser(currentUser);
|
||||||
|
} catch {
|
||||||
|
setUser(null);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Get project permission with caching
|
||||||
|
const getProjectPermission = useCallback(async (projectName: string): Promise<AccessLevel | null> => {
|
||||||
|
const cached = permissionCacheRef.current.get(projectName);
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
// Return cached value if still valid
|
||||||
|
if (cached && (now - cached.timestamp) < PERMISSION_CACHE_TTL) {
|
||||||
|
return cached.accessLevel;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch fresh permission
|
||||||
|
try {
|
||||||
|
const result = await getMyProjectAccess(projectName);
|
||||||
|
const entry: PermissionCacheEntry = {
|
||||||
|
accessLevel: result.access_level,
|
||||||
|
timestamp: now,
|
||||||
|
};
|
||||||
|
permissionCacheRef.current.set(projectName, entry);
|
||||||
|
return result.access_level;
|
||||||
|
} catch {
|
||||||
|
// On error, cache null to avoid repeated failed requests
|
||||||
|
const entry: PermissionCacheEntry = {
|
||||||
|
accessLevel: null,
|
||||||
|
timestamp: now,
|
||||||
|
};
|
||||||
|
permissionCacheRef.current.set(projectName, entry);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Invalidate permission cache for a specific project or all projects
|
||||||
|
const invalidatePermissionCache = useCallback((projectName?: string) => {
|
||||||
|
if (projectName) {
|
||||||
|
permissionCacheRef.current.delete(projectName);
|
||||||
|
} else {
|
||||||
|
clearPermissionCache();
|
||||||
|
}
|
||||||
|
}, [clearPermissionCache]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<AuthContext.Provider value={{
|
||||||
|
user,
|
||||||
|
loading,
|
||||||
|
error,
|
||||||
|
login,
|
||||||
|
logout,
|
||||||
|
refreshUser,
|
||||||
|
clearError,
|
||||||
|
getProjectPermission,
|
||||||
|
invalidatePermissionCache,
|
||||||
|
}}>
|
||||||
|
{children}
|
||||||
|
</AuthContext.Provider>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useAuth() {
|
||||||
|
const context = useContext(AuthContext);
|
||||||
|
if (context === undefined) {
|
||||||
|
throw new Error('useAuth must be used within an AuthProvider');
|
||||||
|
}
|
||||||
|
return context;
|
||||||
|
}
|
||||||
@@ -5,12 +5,12 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
:root {
|
:root {
|
||||||
/* Dark mode color palette */
|
/* Dark mode color palette - lighter tones for better readability */
|
||||||
--bg-primary: #0a0a0b;
|
--bg-primary: #1e1e22;
|
||||||
--bg-secondary: #111113;
|
--bg-secondary: #252529;
|
||||||
--bg-tertiary: #1a1a1d;
|
--bg-tertiary: #2d2d32;
|
||||||
--bg-elevated: #222225;
|
--bg-elevated: #35353a;
|
||||||
--bg-hover: #2a2a2e;
|
--bg-hover: #3d3d42;
|
||||||
|
|
||||||
/* Accent colors - Green/Emerald theme */
|
/* Accent colors - Green/Emerald theme */
|
||||||
--accent-primary: #10b981;
|
--accent-primary: #10b981;
|
||||||
@@ -24,9 +24,9 @@
|
|||||||
--text-tertiary: #9ca3af;
|
--text-tertiary: #9ca3af;
|
||||||
--text-muted: #6b7280;
|
--text-muted: #6b7280;
|
||||||
|
|
||||||
/* Border colors */
|
/* Border colors - slightly more visible */
|
||||||
--border-primary: #27272a;
|
--border-primary: #37373d;
|
||||||
--border-secondary: #3f3f46;
|
--border-secondary: #48484e;
|
||||||
--border-accent: #10b981;
|
--border-accent: #10b981;
|
||||||
|
|
||||||
/* Status colors */
|
/* Status colors */
|
||||||
|
|||||||
580
frontend/src/pages/APIKeysPage.css
Normal file
580
frontend/src/pages/APIKeysPage.css
Normal file
@@ -0,0 +1,580 @@
|
|||||||
|
.api-keys-page {
|
||||||
|
max-width: 900px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: flex-start;
|
||||||
|
margin-bottom: 32px;
|
||||||
|
gap: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-header-content h1 {
|
||||||
|
font-size: 1.75rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 8px;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-subtitle {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-create-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
padding: 12px 20px;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: white;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-create-button:hover:not(:disabled) {
|
||||||
|
transform: translateY(-1px);
|
||||||
|
box-shadow: var(--shadow-md), 0 0 30px rgba(16, 185, 129, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-create-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
transform: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-error {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
background: var(--error-bg);
|
||||||
|
border: 1px solid rgba(239, 68, 68, 0.2);
|
||||||
|
color: var(--error);
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
margin-bottom: 24px;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-error svg {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-error span {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-error-dismiss {
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
padding: 4px;
|
||||||
|
color: var(--error);
|
||||||
|
cursor: pointer;
|
||||||
|
opacity: 0.7;
|
||||||
|
transition: opacity var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-error-dismiss:hover {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-new-key-banner {
|
||||||
|
background: linear-gradient(135deg, rgba(16, 185, 129, 0.12) 0%, rgba(5, 150, 105, 0.08) 100%);
|
||||||
|
border: 1px solid rgba(16, 185, 129, 0.3);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
padding: 24px;
|
||||||
|
margin-bottom: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-new-key-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-new-key-title {
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-new-key-warning {
|
||||||
|
background: var(--warning-bg);
|
||||||
|
border: 1px solid rgba(245, 158, 11, 0.3);
|
||||||
|
color: var(--warning);
|
||||||
|
padding: 10px 14px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
font-weight: 500;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-new-key-value-container {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-new-key-value {
|
||||||
|
flex: 1;
|
||||||
|
background: var(--bg-primary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
padding: 14px 16px;
|
||||||
|
font-family: 'JetBrains Mono', 'Fira Code', 'SF Mono', Monaco, monospace;
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
word-break: break-all;
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-copy-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 6px;
|
||||||
|
padding: 10px 16px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
font-weight: 500;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-copy-button:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-done-button {
|
||||||
|
padding: 10px 20px;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: white;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-done-button:hover {
|
||||||
|
transform: translateY(-1px);
|
||||||
|
box-shadow: var(--shadow-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-create-form-card {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
padding: 24px;
|
||||||
|
margin-bottom: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-create-form-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-create-form-header h2 {
|
||||||
|
font-size: 1.125rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-create-form-close {
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
padding: 4px;
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
cursor: pointer;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-create-form-close:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-create-error {
|
||||||
|
background: var(--error-bg);
|
||||||
|
border: 1px solid rgba(239, 68, 68, 0.2);
|
||||||
|
color: var(--error);
|
||||||
|
padding: 10px 14px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-create-form {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-form-group {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-form-group label {
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-form-group input {
|
||||||
|
padding: 12px 14px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-form-group input::placeholder {
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-form-group input:hover:not(:disabled) {
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
background: var(--bg-elevated);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-form-group input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||||
|
background: var(--bg-elevated);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-form-group input:disabled {
|
||||||
|
opacity: 0.6;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-form-actions {
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
|
gap: 12px;
|
||||||
|
margin-top: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-cancel-button {
|
||||||
|
padding: 10px 18px;
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-cancel-button:hover:not(:disabled) {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-cancel-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-submit-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 8px;
|
||||||
|
padding: 10px 18px;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: white;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
min-width: 110px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-submit-button:hover:not(:disabled) {
|
||||||
|
transform: translateY(-1px);
|
||||||
|
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-submit-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
transform: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-button-spinner {
|
||||||
|
width: 14px;
|
||||||
|
height: 14px;
|
||||||
|
border: 2px solid rgba(255, 255, 255, 0.3);
|
||||||
|
border-top-color: white;
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: api-keys-spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes api-keys-spin {
|
||||||
|
to {
|
||||||
|
transform: rotate(360deg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-list-container {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-list-loading,
|
||||||
|
.api-keys-loading {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 12px;
|
||||||
|
padding: 64px 24px;
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-spinner {
|
||||||
|
width: 20px;
|
||||||
|
height: 20px;
|
||||||
|
border: 2px solid var(--border-secondary);
|
||||||
|
border-top-color: var(--accent-primary);
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: api-keys-spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-empty {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
padding: 64px 24px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-empty-icon {
|
||||||
|
color: var(--text-muted);
|
||||||
|
margin-bottom: 16px;
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-empty h3 {
|
||||||
|
font-size: 1.125rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-empty p {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-list-header {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 1fr 160px 160px 140px;
|
||||||
|
gap: 16px;
|
||||||
|
padding: 14px 20px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border-bottom: 1px solid var(--border-primary);
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.04em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-list-item {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 1fr 160px 160px 140px;
|
||||||
|
gap: 16px;
|
||||||
|
padding: 16px 20px;
|
||||||
|
align-items: center;
|
||||||
|
border-bottom: 1px solid var(--border-primary);
|
||||||
|
transition: background var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-list-item:last-child {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-list-item:hover {
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-item-name {
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-item-description {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
margin-top: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-col-created,
|
||||||
|
.api-keys-col-used {
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-col-actions {
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-revoke-button {
|
||||||
|
padding: 6px 14px;
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid rgba(239, 68, 68, 0.3);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--error);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-revoke-button:hover {
|
||||||
|
background: var(--error-bg);
|
||||||
|
border-color: rgba(239, 68, 68, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-delete-confirm {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-confirm-yes {
|
||||||
|
padding: 4px 12px;
|
||||||
|
background: var(--error);
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: white;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-confirm-yes:hover:not(:disabled) {
|
||||||
|
opacity: 0.9;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-confirm-yes:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-confirm-no {
|
||||||
|
padding: 4px 12px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-confirm-no:hover:not(:disabled) {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-confirm-no:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.api-keys-header {
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: stretch;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-create-button {
|
||||||
|
align-self: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-list-header {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-list-item {
|
||||||
|
grid-template-columns: 1fr;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-col-name {
|
||||||
|
order: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-col-created,
|
||||||
|
.api-keys-col-used {
|
||||||
|
font-size: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-col-created::before {
|
||||||
|
content: 'Created: ';
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-col-used::before {
|
||||||
|
content: 'Last used: ';
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-col-actions {
|
||||||
|
justify-content: flex-start;
|
||||||
|
margin-top: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-new-key-value-container {
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-keys-copy-button {
|
||||||
|
align-self: flex-start;
|
||||||
|
}
|
||||||
|
}
|
||||||
371
frontend/src/pages/APIKeysPage.tsx
Normal file
371
frontend/src/pages/APIKeysPage.tsx
Normal file
@@ -0,0 +1,371 @@
|
|||||||
|
import { useState, useEffect } from 'react';
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
|
import { listAPIKeys, createAPIKey, deleteAPIKey } from '../api';
|
||||||
|
import { APIKey, APIKeyCreateResponse } from '../types';
|
||||||
|
import './APIKeysPage.css';
|
||||||
|
|
||||||
|
function APIKeysPage() {
|
||||||
|
const { user, loading: authLoading } = useAuth();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
const [keys, setKeys] = useState<APIKey[]>([]);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
const [showCreateForm, setShowCreateForm] = useState(false);
|
||||||
|
const [createName, setCreateName] = useState('');
|
||||||
|
const [createDescription, setCreateDescription] = useState('');
|
||||||
|
const [isCreating, setIsCreating] = useState(false);
|
||||||
|
const [createError, setCreateError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
const [newlyCreatedKey, setNewlyCreatedKey] = useState<APIKeyCreateResponse | null>(null);
|
||||||
|
const [copied, setCopied] = useState(false);
|
||||||
|
|
||||||
|
const [deleteConfirmId, setDeleteConfirmId] = useState<string | null>(null);
|
||||||
|
const [isDeleting, setIsDeleting] = useState(false);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!authLoading && !user) {
|
||||||
|
navigate('/login', { state: { from: '/settings/api-keys' } });
|
||||||
|
}
|
||||||
|
}, [user, authLoading, navigate]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (user) {
|
||||||
|
loadKeys();
|
||||||
|
}
|
||||||
|
}, [user]);
|
||||||
|
|
||||||
|
async function loadKeys() {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
try {
|
||||||
|
const data = await listAPIKeys();
|
||||||
|
setKeys(data);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to load API keys');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleCreate(e: React.FormEvent) {
|
||||||
|
e.preventDefault();
|
||||||
|
if (!createName.trim()) {
|
||||||
|
setCreateError('Name is required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsCreating(true);
|
||||||
|
setCreateError(null);
|
||||||
|
try {
|
||||||
|
const response = await createAPIKey({
|
||||||
|
name: createName.trim(),
|
||||||
|
description: createDescription.trim() || undefined,
|
||||||
|
});
|
||||||
|
setNewlyCreatedKey(response);
|
||||||
|
setShowCreateForm(false);
|
||||||
|
setCreateName('');
|
||||||
|
setCreateDescription('');
|
||||||
|
await loadKeys();
|
||||||
|
} catch (err) {
|
||||||
|
setCreateError(err instanceof Error ? err.message : 'Failed to create API key');
|
||||||
|
} finally {
|
||||||
|
setIsCreating(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleDelete(id: string) {
|
||||||
|
setIsDeleting(true);
|
||||||
|
try {
|
||||||
|
await deleteAPIKey(id);
|
||||||
|
setDeleteConfirmId(null);
|
||||||
|
await loadKeys();
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to revoke API key');
|
||||||
|
} finally {
|
||||||
|
setIsDeleting(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleCopyKey() {
|
||||||
|
if (newlyCreatedKey) {
|
||||||
|
try {
|
||||||
|
await navigator.clipboard.writeText(newlyCreatedKey.key);
|
||||||
|
setCopied(true);
|
||||||
|
setTimeout(() => setCopied(false), 2000);
|
||||||
|
} catch {
|
||||||
|
setError('Failed to copy to clipboard');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleDismissNewKey() {
|
||||||
|
setNewlyCreatedKey(null);
|
||||||
|
setCopied(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatDate(dateString: string | null): string {
|
||||||
|
if (!dateString) return 'Never';
|
||||||
|
return new Date(dateString).toLocaleDateString('en-US', {
|
||||||
|
year: 'numeric',
|
||||||
|
month: 'short',
|
||||||
|
day: 'numeric',
|
||||||
|
hour: '2-digit',
|
||||||
|
minute: '2-digit',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (authLoading) {
|
||||||
|
return (
|
||||||
|
<div className="api-keys-page">
|
||||||
|
<div className="api-keys-loading">
|
||||||
|
<div className="api-keys-spinner"></div>
|
||||||
|
<span>Loading...</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="api-keys-page">
|
||||||
|
<div className="api-keys-header">
|
||||||
|
<div className="api-keys-header-content">
|
||||||
|
<h1>API Keys</h1>
|
||||||
|
<p className="api-keys-subtitle">
|
||||||
|
Manage API keys for programmatic access to Orchard
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
className="api-keys-create-button"
|
||||||
|
onClick={() => setShowCreateForm(true)}
|
||||||
|
disabled={showCreateForm}
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="12" y1="5" x2="12" y2="19"/>
|
||||||
|
<line x1="5" y1="12" x2="19" y2="12"/>
|
||||||
|
</svg>
|
||||||
|
Create New Key
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{error && (
|
||||||
|
<div className="api-keys-error">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<circle cx="12" cy="12" r="10"/>
|
||||||
|
<line x1="12" y1="8" x2="12" y2="12"/>
|
||||||
|
<line x1="12" y1="16" x2="12.01" y2="16"/>
|
||||||
|
</svg>
|
||||||
|
<span>{error}</span>
|
||||||
|
<button onClick={() => setError(null)} className="api-keys-error-dismiss">
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18"/>
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{newlyCreatedKey && (
|
||||||
|
<div className="api-keys-new-key-banner">
|
||||||
|
<div className="api-keys-new-key-header">
|
||||||
|
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M12 22s8-4 8-10V5l-8-3-8 3v7c0 6 8 10 8 10z"/>
|
||||||
|
</svg>
|
||||||
|
<span className="api-keys-new-key-title">New API Key Created</span>
|
||||||
|
</div>
|
||||||
|
<div className="api-keys-new-key-warning">
|
||||||
|
Copy this key now! It won't be shown again.
|
||||||
|
</div>
|
||||||
|
<div className="api-keys-new-key-value-container">
|
||||||
|
<code className="api-keys-new-key-value">{newlyCreatedKey.key}</code>
|
||||||
|
<button
|
||||||
|
className="api-keys-copy-button"
|
||||||
|
onClick={handleCopyKey}
|
||||||
|
title="Copy to clipboard"
|
||||||
|
>
|
||||||
|
{copied ? (
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<polyline points="20 6 9 17 4 12"/>
|
||||||
|
</svg>
|
||||||
|
) : (
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<rect x="9" y="9" width="13" height="13" rx="2" ry="2"/>
|
||||||
|
<path d="M5 15H4a2 2 0 0 1-2-2V4a2 2 0 0 1 2-2h9a2 2 0 0 1 2 2v1"/>
|
||||||
|
</svg>
|
||||||
|
)}
|
||||||
|
{copied ? 'Copied!' : 'Copy'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<button className="api-keys-done-button" onClick={handleDismissNewKey}>
|
||||||
|
Done
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{showCreateForm && (
|
||||||
|
<div className="api-keys-create-form-card">
|
||||||
|
<div className="api-keys-create-form-header">
|
||||||
|
<h2>Create New API Key</h2>
|
||||||
|
<button
|
||||||
|
className="api-keys-create-form-close"
|
||||||
|
onClick={() => {
|
||||||
|
setShowCreateForm(false);
|
||||||
|
setCreateName('');
|
||||||
|
setCreateDescription('');
|
||||||
|
setCreateError(null);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18"/>
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{createError && (
|
||||||
|
<div className="api-keys-create-error">
|
||||||
|
{createError}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<form onSubmit={handleCreate} className="api-keys-create-form">
|
||||||
|
<div className="api-keys-form-group">
|
||||||
|
<label htmlFor="key-name">Name</label>
|
||||||
|
<input
|
||||||
|
id="key-name"
|
||||||
|
type="text"
|
||||||
|
value={createName}
|
||||||
|
onChange={(e) => setCreateName(e.target.value)}
|
||||||
|
placeholder="e.g., CI/CD Pipeline, Local Development"
|
||||||
|
autoFocus
|
||||||
|
disabled={isCreating}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="api-keys-form-group">
|
||||||
|
<label htmlFor="key-description">Description (optional)</label>
|
||||||
|
<input
|
||||||
|
id="key-description"
|
||||||
|
type="text"
|
||||||
|
value={createDescription}
|
||||||
|
onChange={(e) => setCreateDescription(e.target.value)}
|
||||||
|
placeholder="What will this key be used for?"
|
||||||
|
disabled={isCreating}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="api-keys-form-actions">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="api-keys-cancel-button"
|
||||||
|
onClick={() => {
|
||||||
|
setShowCreateForm(false);
|
||||||
|
setCreateName('');
|
||||||
|
setCreateDescription('');
|
||||||
|
setCreateError(null);
|
||||||
|
}}
|
||||||
|
disabled={isCreating}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
className="api-keys-submit-button"
|
||||||
|
disabled={isCreating || !createName.trim()}
|
||||||
|
>
|
||||||
|
{isCreating ? (
|
||||||
|
<>
|
||||||
|
<span className="api-keys-button-spinner"></span>
|
||||||
|
Creating...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Create Key'
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="api-keys-list-container">
|
||||||
|
{loading ? (
|
||||||
|
<div className="api-keys-list-loading">
|
||||||
|
<div className="api-keys-spinner"></div>
|
||||||
|
<span>Loading API keys...</span>
|
||||||
|
</div>
|
||||||
|
) : keys.length === 0 ? (
|
||||||
|
<div className="api-keys-empty">
|
||||||
|
<div className="api-keys-empty-icon">
|
||||||
|
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
|
||||||
|
<path d="M21 2l-2 2m-7.61 7.61a5.5 5.5 0 1 1-7.778 7.778 5.5 5.5 0 0 1 7.777-7.777zm0 0L15.5 7.5m0 0l3 3L22 7l-3-3m-3.5 3.5L19 4"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<h3>No API Keys</h3>
|
||||||
|
<p>Create an API key to access Orchard programmatically</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="api-keys-list">
|
||||||
|
<div className="api-keys-list-header">
|
||||||
|
<span className="api-keys-col-name">Name</span>
|
||||||
|
<span className="api-keys-col-created">Created</span>
|
||||||
|
<span className="api-keys-col-used">Last Used</span>
|
||||||
|
<span className="api-keys-col-actions">Actions</span>
|
||||||
|
</div>
|
||||||
|
{keys.map((key) => (
|
||||||
|
<div key={key.id} className="api-keys-list-item">
|
||||||
|
<div className="api-keys-col-name">
|
||||||
|
<div className="api-keys-item-name">{key.name}</div>
|
||||||
|
{key.description && (
|
||||||
|
<div className="api-keys-item-description">{key.description}</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<div className="api-keys-col-created">
|
||||||
|
{formatDate(key.created_at)}
|
||||||
|
</div>
|
||||||
|
<div className="api-keys-col-used">
|
||||||
|
{formatDate(key.last_used)}
|
||||||
|
</div>
|
||||||
|
<div className="api-keys-col-actions">
|
||||||
|
{deleteConfirmId === key.id ? (
|
||||||
|
<div className="api-keys-delete-confirm">
|
||||||
|
<span>Revoke?</span>
|
||||||
|
<button
|
||||||
|
className="api-keys-confirm-yes"
|
||||||
|
onClick={() => handleDelete(key.id)}
|
||||||
|
disabled={isDeleting}
|
||||||
|
>
|
||||||
|
{isDeleting ? 'Revoking...' : 'Yes'}
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="api-keys-confirm-no"
|
||||||
|
onClick={() => setDeleteConfirmId(null)}
|
||||||
|
disabled={isDeleting}
|
||||||
|
>
|
||||||
|
No
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<button
|
||||||
|
className="api-keys-revoke-button"
|
||||||
|
onClick={() => setDeleteConfirmId(key.id)}
|
||||||
|
>
|
||||||
|
Revoke
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default APIKeysPage;
|
||||||
405
frontend/src/pages/AdminOIDCPage.css
Normal file
405
frontend/src/pages/AdminOIDCPage.css
Normal file
@@ -0,0 +1,405 @@
|
|||||||
|
.admin-oidc-page {
|
||||||
|
max-width: 800px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-header {
|
||||||
|
margin-bottom: 32px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-header-content h1 {
|
||||||
|
font-size: 1.75rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 8px;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-subtitle {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-success {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
background: var(--success-bg);
|
||||||
|
border: 1px solid rgba(34, 197, 94, 0.2);
|
||||||
|
color: var(--success);
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
margin-bottom: 24px;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
animation: admin-oidc-fade-in 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes admin-oidc-fade-in {
|
||||||
|
from {
|
||||||
|
opacity: 0;
|
||||||
|
transform: translateY(-8px);
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-error {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
background: var(--error-bg);
|
||||||
|
border: 1px solid rgba(239, 68, 68, 0.2);
|
||||||
|
color: var(--error);
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
margin-bottom: 24px;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-error svg {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-error span {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-error-dismiss {
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
padding: 4px;
|
||||||
|
color: var(--error);
|
||||||
|
cursor: pointer;
|
||||||
|
opacity: 0.7;
|
||||||
|
transition: opacity var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-error-dismiss:hover {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-access-denied {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
padding: 80px 24px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-access-denied-icon {
|
||||||
|
color: var(--error);
|
||||||
|
margin-bottom: 24px;
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-access-denied h2 {
|
||||||
|
font-size: 1.5rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-access-denied p {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
max-width: 400px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-card {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
padding: 24px;
|
||||||
|
margin-bottom: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-section {
|
||||||
|
margin-bottom: 32px;
|
||||||
|
padding-bottom: 24px;
|
||||||
|
border-bottom: 1px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-section:last-of-type {
|
||||||
|
margin-bottom: 0;
|
||||||
|
padding-bottom: 0;
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-section h2 {
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-form-group {
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-form-group:last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-form-group label {
|
||||||
|
display: block;
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
margin-bottom: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-form-group input[type="text"],
|
||||||
|
.admin-oidc-form-group input[type="password"],
|
||||||
|
.admin-oidc-form-group input[type="url"] {
|
||||||
|
width: 100%;
|
||||||
|
padding: 12px 14px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-form-group input::placeholder {
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-form-group input:hover:not(:disabled) {
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
background: var(--bg-elevated);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-form-group input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||||
|
background: var(--bg-elevated);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-form-group input:disabled {
|
||||||
|
opacity: 0.6;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-form-row {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 1fr 1fr;
|
||||||
|
gap: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-field-help {
|
||||||
|
margin-top: 6px;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
color: var(--text-muted);
|
||||||
|
line-height: 1.4;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-field-help code {
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
padding: 1px 4px;
|
||||||
|
border-radius: 3px;
|
||||||
|
font-size: 0.6875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-secret-status {
|
||||||
|
color: var(--success);
|
||||||
|
font-weight: 400;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-toggle-group {
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-toggle-label {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-primary);
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-toggle-label input[type="checkbox"] {
|
||||||
|
position: absolute;
|
||||||
|
opacity: 0;
|
||||||
|
width: 0;
|
||||||
|
height: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-toggle-custom {
|
||||||
|
width: 44px;
|
||||||
|
height: 24px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-secondary);
|
||||||
|
border-radius: 12px;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
position: relative;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-toggle-custom::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: 2px;
|
||||||
|
top: 2px;
|
||||||
|
width: 18px;
|
||||||
|
height: 18px;
|
||||||
|
background: var(--text-muted);
|
||||||
|
border-radius: 50%;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-toggle-label input[type="checkbox"]:checked + .admin-oidc-toggle-custom {
|
||||||
|
background: var(--accent-primary);
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-toggle-label input[type="checkbox"]:checked + .admin-oidc-toggle-custom::after {
|
||||||
|
left: 22px;
|
||||||
|
background: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-toggle-label input[type="checkbox"]:focus + .admin-oidc-toggle-custom {
|
||||||
|
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-toggle-label:hover .admin-oidc-toggle-custom {
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-form-actions {
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
|
gap: 12px;
|
||||||
|
margin-top: 24px;
|
||||||
|
padding-top: 24px;
|
||||||
|
border-top: 1px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-cancel-button {
|
||||||
|
padding: 10px 18px;
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-cancel-button:hover:not(:disabled) {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-cancel-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-submit-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 8px;
|
||||||
|
padding: 10px 18px;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: white;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
min-width: 160px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-submit-button:hover:not(:disabled) {
|
||||||
|
transform: translateY(-1px);
|
||||||
|
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-submit-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
transform: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-button-spinner {
|
||||||
|
width: 14px;
|
||||||
|
height: 14px;
|
||||||
|
border: 2px solid rgba(255, 255, 255, 0.3);
|
||||||
|
border-top-color: white;
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: admin-oidc-spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes admin-oidc-spin {
|
||||||
|
to {
|
||||||
|
transform: rotate(360deg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-loading {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 12px;
|
||||||
|
padding: 64px 24px;
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-spinner {
|
||||||
|
width: 20px;
|
||||||
|
height: 20px;
|
||||||
|
border: 2px solid var(--border-secondary);
|
||||||
|
border-top-color: var(--accent-primary);
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: admin-oidc-spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-info-card {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
padding: 20px 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-info-card h3 {
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-info-card p {
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-oidc-callback-url {
|
||||||
|
display: block;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
word-break: break-all;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 640px) {
|
||||||
|
.admin-oidc-form-row {
|
||||||
|
grid-template-columns: 1fr;
|
||||||
|
}
|
||||||
|
}
|
||||||
342
frontend/src/pages/AdminOIDCPage.tsx
Normal file
342
frontend/src/pages/AdminOIDCPage.tsx
Normal file
@@ -0,0 +1,342 @@
|
|||||||
|
import { useState, useEffect } from 'react';
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
|
import { getOIDCConfig, updateOIDCConfig } from '../api';
|
||||||
|
import { OIDCConfig } from '../types';
|
||||||
|
import './AdminOIDCPage.css';
|
||||||
|
|
||||||
|
function AdminOIDCPage() {
|
||||||
|
const { user, loading: authLoading } = useAuth();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
const [config, setConfig] = useState<OIDCConfig | null>(null);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const [successMessage, setSuccessMessage] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Form state
|
||||||
|
const [enabled, setEnabled] = useState(false);
|
||||||
|
const [issuerUrl, setIssuerUrl] = useState('');
|
||||||
|
const [clientId, setClientId] = useState('');
|
||||||
|
const [clientSecret, setClientSecret] = useState('');
|
||||||
|
const [scopes, setScopes] = useState('openid profile email');
|
||||||
|
const [autoCreateUsers, setAutoCreateUsers] = useState(true);
|
||||||
|
const [adminGroup, setAdminGroup] = useState('');
|
||||||
|
const [isSaving, setIsSaving] = useState(false);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!authLoading && !user) {
|
||||||
|
navigate('/login', { state: { from: '/admin/oidc' } });
|
||||||
|
}
|
||||||
|
}, [user, authLoading, navigate]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (user && user.is_admin) {
|
||||||
|
loadConfig();
|
||||||
|
}
|
||||||
|
}, [user]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (successMessage) {
|
||||||
|
const timer = setTimeout(() => setSuccessMessage(null), 3000);
|
||||||
|
return () => clearTimeout(timer);
|
||||||
|
}
|
||||||
|
}, [successMessage]);
|
||||||
|
|
||||||
|
async function loadConfig() {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
try {
|
||||||
|
const data = await getOIDCConfig();
|
||||||
|
setConfig(data);
|
||||||
|
setEnabled(data.enabled);
|
||||||
|
setIssuerUrl(data.issuer_url);
|
||||||
|
setClientId(data.client_id);
|
||||||
|
setScopes(data.scopes.join(' '));
|
||||||
|
setAutoCreateUsers(data.auto_create_users);
|
||||||
|
setAdminGroup(data.admin_group);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to load OIDC configuration');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleSave(e: React.FormEvent) {
|
||||||
|
e.preventDefault();
|
||||||
|
|
||||||
|
if (enabled && !issuerUrl.trim()) {
|
||||||
|
setError('Issuer URL is required when OIDC is enabled');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (enabled && !clientId.trim()) {
|
||||||
|
setError('Client ID is required when OIDC is enabled');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsSaving(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const scopesList = scopes.split(/\s+/).filter(s => s.length > 0);
|
||||||
|
const updateData: Record<string, unknown> = {
|
||||||
|
enabled,
|
||||||
|
issuer_url: issuerUrl.trim(),
|
||||||
|
client_id: clientId.trim(),
|
||||||
|
scopes: scopesList,
|
||||||
|
auto_create_users: autoCreateUsers,
|
||||||
|
admin_group: adminGroup.trim(),
|
||||||
|
};
|
||||||
|
|
||||||
|
if (clientSecret) {
|
||||||
|
updateData.client_secret = clientSecret;
|
||||||
|
}
|
||||||
|
|
||||||
|
await updateOIDCConfig(updateData);
|
||||||
|
setSuccessMessage('OIDC configuration saved successfully');
|
||||||
|
setClientSecret('');
|
||||||
|
await loadConfig();
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to save OIDC configuration');
|
||||||
|
} finally {
|
||||||
|
setIsSaving(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (authLoading) {
|
||||||
|
return (
|
||||||
|
<div className="admin-oidc-page">
|
||||||
|
<div className="admin-oidc-loading">
|
||||||
|
<div className="admin-oidc-spinner"></div>
|
||||||
|
<span>Loading...</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!user.is_admin) {
|
||||||
|
return (
|
||||||
|
<div className="admin-oidc-page">
|
||||||
|
<div className="admin-oidc-access-denied">
|
||||||
|
<div className="admin-oidc-access-denied-icon">
|
||||||
|
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
|
||||||
|
<circle cx="12" cy="12" r="10"/>
|
||||||
|
<line x1="4.93" y1="4.93" x2="19.07" y2="19.07"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<h2>Access Denied</h2>
|
||||||
|
<p>You do not have permission to access this page. Admin privileges are required.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="admin-oidc-page">
|
||||||
|
<div className="admin-oidc-header">
|
||||||
|
<div className="admin-oidc-header-content">
|
||||||
|
<h1>Single Sign-On (OIDC)</h1>
|
||||||
|
<p className="admin-oidc-subtitle">
|
||||||
|
Configure OpenID Connect for SSO authentication
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{successMessage && (
|
||||||
|
<div className="admin-oidc-success">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M22 11.08V12a10 10 0 1 1-5.93-9.14"/>
|
||||||
|
<polyline points="22 4 12 14.01 9 11.01"/>
|
||||||
|
</svg>
|
||||||
|
<span>{successMessage}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{error && (
|
||||||
|
<div className="admin-oidc-error">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<circle cx="12" cy="12" r="10"/>
|
||||||
|
<line x1="12" y1="8" x2="12" y2="12"/>
|
||||||
|
<line x1="12" y1="16" x2="12.01" y2="16"/>
|
||||||
|
</svg>
|
||||||
|
<span>{error}</span>
|
||||||
|
<button onClick={() => setError(null)} className="admin-oidc-error-dismiss">
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18"/>
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{loading ? (
|
||||||
|
<div className="admin-oidc-card">
|
||||||
|
<div className="admin-oidc-loading">
|
||||||
|
<div className="admin-oidc-spinner"></div>
|
||||||
|
<span>Loading configuration...</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<form onSubmit={handleSave} className="admin-oidc-card">
|
||||||
|
<div className="admin-oidc-section">
|
||||||
|
<h2>Status</h2>
|
||||||
|
<div className="admin-oidc-toggle-group">
|
||||||
|
<label className="admin-oidc-toggle-label">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={enabled}
|
||||||
|
onChange={(e) => setEnabled(e.target.checked)}
|
||||||
|
disabled={isSaving}
|
||||||
|
/>
|
||||||
|
<span className="admin-oidc-toggle-custom"></span>
|
||||||
|
Enable OIDC Authentication
|
||||||
|
</label>
|
||||||
|
<p className="admin-oidc-field-help">
|
||||||
|
When enabled, users can sign in using your organization's identity provider.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="admin-oidc-section">
|
||||||
|
<h2>Provider Configuration</h2>
|
||||||
|
|
||||||
|
<div className="admin-oidc-form-group">
|
||||||
|
<label htmlFor="issuer-url">Issuer URL</label>
|
||||||
|
<input
|
||||||
|
id="issuer-url"
|
||||||
|
type="url"
|
||||||
|
value={issuerUrl}
|
||||||
|
onChange={(e) => setIssuerUrl(e.target.value)}
|
||||||
|
placeholder="https://your-provider.com"
|
||||||
|
disabled={isSaving}
|
||||||
|
/>
|
||||||
|
<p className="admin-oidc-field-help">
|
||||||
|
The base URL of your OIDC provider. Discovery document will be fetched from <code>/.well-known/openid-configuration</code>.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="admin-oidc-form-row">
|
||||||
|
<div className="admin-oidc-form-group">
|
||||||
|
<label htmlFor="client-id">Client ID</label>
|
||||||
|
<input
|
||||||
|
id="client-id"
|
||||||
|
type="text"
|
||||||
|
value={clientId}
|
||||||
|
onChange={(e) => setClientId(e.target.value)}
|
||||||
|
placeholder="your-client-id"
|
||||||
|
disabled={isSaving}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="admin-oidc-form-group">
|
||||||
|
<label htmlFor="client-secret">
|
||||||
|
Client Secret
|
||||||
|
{config?.has_client_secret && (
|
||||||
|
<span className="admin-oidc-secret-status"> (configured)</span>
|
||||||
|
)}
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
id="client-secret"
|
||||||
|
type="password"
|
||||||
|
value={clientSecret}
|
||||||
|
onChange={(e) => setClientSecret(e.target.value)}
|
||||||
|
placeholder={config?.has_client_secret ? 'Leave blank to keep current' : 'Enter client secret'}
|
||||||
|
disabled={isSaving}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="admin-oidc-form-group">
|
||||||
|
<label htmlFor="scopes">Scopes</label>
|
||||||
|
<input
|
||||||
|
id="scopes"
|
||||||
|
type="text"
|
||||||
|
value={scopes}
|
||||||
|
onChange={(e) => setScopes(e.target.value)}
|
||||||
|
placeholder="openid profile email"
|
||||||
|
disabled={isSaving}
|
||||||
|
/>
|
||||||
|
<p className="admin-oidc-field-help">
|
||||||
|
Space-separated list of OIDC scopes to request. Common scopes: openid, profile, email, groups.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="admin-oidc-section">
|
||||||
|
<h2>User Provisioning</h2>
|
||||||
|
|
||||||
|
<div className="admin-oidc-toggle-group">
|
||||||
|
<label className="admin-oidc-toggle-label">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={autoCreateUsers}
|
||||||
|
onChange={(e) => setAutoCreateUsers(e.target.checked)}
|
||||||
|
disabled={isSaving}
|
||||||
|
/>
|
||||||
|
<span className="admin-oidc-toggle-custom"></span>
|
||||||
|
Auto-create users on first login
|
||||||
|
</label>
|
||||||
|
<p className="admin-oidc-field-help">
|
||||||
|
When enabled, new users will be created automatically when they sign in via OIDC for the first time.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="admin-oidc-form-group">
|
||||||
|
<label htmlFor="admin-group">Admin Group (optional)</label>
|
||||||
|
<input
|
||||||
|
id="admin-group"
|
||||||
|
type="text"
|
||||||
|
value={adminGroup}
|
||||||
|
onChange={(e) => setAdminGroup(e.target.value)}
|
||||||
|
placeholder="admin, orchard-admins"
|
||||||
|
disabled={isSaving}
|
||||||
|
/>
|
||||||
|
<p className="admin-oidc-field-help">
|
||||||
|
Users in this group (from the groups claim) will be granted admin privileges. Leave blank to disable automatic admin assignment.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="admin-oidc-form-actions">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="admin-oidc-cancel-button"
|
||||||
|
onClick={loadConfig}
|
||||||
|
disabled={isSaving}
|
||||||
|
>
|
||||||
|
Reset
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
className="admin-oidc-submit-button"
|
||||||
|
disabled={isSaving}
|
||||||
|
>
|
||||||
|
{isSaving ? (
|
||||||
|
<>
|
||||||
|
<span className="admin-oidc-button-spinner"></span>
|
||||||
|
Saving...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Save Configuration'
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="admin-oidc-info-card">
|
||||||
|
<h3>Callback URL</h3>
|
||||||
|
<p>Configure your identity provider with the following callback URL:</p>
|
||||||
|
<code className="admin-oidc-callback-url">
|
||||||
|
{window.location.origin}/api/v1/auth/oidc/callback
|
||||||
|
</code>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default AdminOIDCPage;
|
||||||
667
frontend/src/pages/AdminUsersPage.css
Normal file
667
frontend/src/pages/AdminUsersPage.css
Normal file
@@ -0,0 +1,667 @@
|
|||||||
|
.admin-users-page {
|
||||||
|
max-width: 1100px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: flex-start;
|
||||||
|
margin-bottom: 32px;
|
||||||
|
gap: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-header-content h1 {
|
||||||
|
font-size: 1.75rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 8px;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-subtitle {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-create-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
padding: 12px 20px;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: white;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-create-button:hover:not(:disabled) {
|
||||||
|
transform: translateY(-1px);
|
||||||
|
box-shadow: var(--shadow-md), 0 0 30px rgba(16, 185, 129, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-create-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
transform: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-success {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
background: var(--success-bg);
|
||||||
|
border: 1px solid rgba(34, 197, 94, 0.2);
|
||||||
|
color: var(--success);
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
margin-bottom: 24px;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
animation: admin-users-fade-in 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes admin-users-fade-in {
|
||||||
|
from {
|
||||||
|
opacity: 0;
|
||||||
|
transform: translateY(-8px);
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-error {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
background: var(--error-bg);
|
||||||
|
border: 1px solid rgba(239, 68, 68, 0.2);
|
||||||
|
color: var(--error);
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
margin-bottom: 24px;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-error svg {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-error span {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-error-dismiss {
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
padding: 4px;
|
||||||
|
color: var(--error);
|
||||||
|
cursor: pointer;
|
||||||
|
opacity: 0.7;
|
||||||
|
transition: opacity var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-error-dismiss:hover {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-access-denied {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
padding: 80px 24px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-access-denied-icon {
|
||||||
|
color: var(--error);
|
||||||
|
margin-bottom: 24px;
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-access-denied h2 {
|
||||||
|
font-size: 1.5rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-access-denied p {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
max-width: 400px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-create-form-card,
|
||||||
|
.admin-users-reset-password-card {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
padding: 24px;
|
||||||
|
margin-bottom: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-create-form-header,
|
||||||
|
.admin-users-reset-password-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-create-form-header h2,
|
||||||
|
.admin-users-reset-password-header h2 {
|
||||||
|
font-size: 1.125rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-create-form-close {
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
padding: 4px;
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
cursor: pointer;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-create-form-close:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-reset-password-info {
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-reset-password-info strong {
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-create-error {
|
||||||
|
background: var(--error-bg);
|
||||||
|
border: 1px solid rgba(239, 68, 68, 0.2);
|
||||||
|
color: var(--error);
|
||||||
|
padding: 10px 14px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-create-form,
|
||||||
|
.admin-users-reset-password-form {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-form-group {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-form-group label {
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-form-group input[type="text"],
|
||||||
|
.admin-users-form-group input[type="password"],
|
||||||
|
.admin-users-form-group input[type="email"] {
|
||||||
|
padding: 12px 14px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-form-group input::placeholder {
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-form-group input:hover:not(:disabled) {
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
background: var(--bg-elevated);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-form-group input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||||
|
background: var(--bg-elevated);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-form-group input:disabled {
|
||||||
|
opacity: 0.6;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-checkbox-group {
|
||||||
|
flex-direction: row;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-checkbox-label {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 400;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-checkbox-label input[type="checkbox"] {
|
||||||
|
position: absolute;
|
||||||
|
opacity: 0;
|
||||||
|
width: 0;
|
||||||
|
height: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-checkbox-custom {
|
||||||
|
width: 18px;
|
||||||
|
height: 18px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-secondary);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-checkbox-label input[type="checkbox"]:checked + .admin-users-checkbox-custom {
|
||||||
|
background: var(--accent-primary);
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-checkbox-label input[type="checkbox"]:checked + .admin-users-checkbox-custom::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: 5px;
|
||||||
|
top: 2px;
|
||||||
|
width: 5px;
|
||||||
|
height: 9px;
|
||||||
|
border: solid white;
|
||||||
|
border-width: 0 2px 2px 0;
|
||||||
|
transform: rotate(45deg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-checkbox-label input[type="checkbox"]:focus + .admin-users-checkbox-custom {
|
||||||
|
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-checkbox-label:hover .admin-users-checkbox-custom {
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-form-actions {
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
|
gap: 12px;
|
||||||
|
margin-top: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-cancel-button {
|
||||||
|
padding: 10px 18px;
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-cancel-button:hover:not(:disabled) {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-cancel-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-submit-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 8px;
|
||||||
|
padding: 10px 18px;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: white;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
min-width: 120px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-submit-button:hover:not(:disabled) {
|
||||||
|
transform: translateY(-1px);
|
||||||
|
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-submit-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
transform: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-button-spinner {
|
||||||
|
width: 14px;
|
||||||
|
height: 14px;
|
||||||
|
border: 2px solid rgba(255, 255, 255, 0.3);
|
||||||
|
border-top-color: white;
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: admin-users-spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes admin-users-spin {
|
||||||
|
to {
|
||||||
|
transform: rotate(360deg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list-container {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list-loading,
|
||||||
|
.admin-users-loading {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 12px;
|
||||||
|
padding: 64px 24px;
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-spinner {
|
||||||
|
width: 20px;
|
||||||
|
height: 20px;
|
||||||
|
border: 2px solid var(--border-secondary);
|
||||||
|
border-top-color: var(--accent-primary);
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: admin-users-spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-empty {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
padding: 64px 24px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-empty-icon {
|
||||||
|
color: var(--text-muted);
|
||||||
|
margin-bottom: 16px;
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-empty h3 {
|
||||||
|
font-size: 1.125rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-empty p {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list-header {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 2fr 100px 140px 140px 1fr;
|
||||||
|
gap: 16px;
|
||||||
|
padding: 14px 20px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border-bottom: 1px solid var(--border-primary);
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.04em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list-item {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 2fr 100px 140px 140px 1fr;
|
||||||
|
gap: 16px;
|
||||||
|
padding: 16px 20px;
|
||||||
|
align-items: center;
|
||||||
|
border-bottom: 1px solid var(--border-primary);
|
||||||
|
transition: background var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list-item:last-child {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list-item:hover {
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list-item.admin-users-inactive {
|
||||||
|
opacity: 0.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-col-user {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-item-avatar {
|
||||||
|
width: 36px;
|
||||||
|
height: 36px;
|
||||||
|
border-radius: 50%;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
color: white;
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-item-info {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
min-width: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-item-username {
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-admin-badge {
|
||||||
|
display: inline-flex;
|
||||||
|
padding: 2px 8px;
|
||||||
|
background: linear-gradient(135deg, rgba(16, 185, 129, 0.15) 0%, rgba(5, 150, 105, 0.1) 100%);
|
||||||
|
border: 1px solid rgba(16, 185, 129, 0.3);
|
||||||
|
border-radius: 20px;
|
||||||
|
font-size: 0.6875rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--accent-primary);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.03em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-item-email {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-col-status {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-status-badge {
|
||||||
|
display: inline-flex;
|
||||||
|
padding: 4px 10px;
|
||||||
|
border-radius: 20px;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-status-badge.active {
|
||||||
|
background: var(--success-bg);
|
||||||
|
color: var(--success);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-status-badge.inactive {
|
||||||
|
background: var(--error-bg);
|
||||||
|
color: var(--error);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-col-created,
|
||||||
|
.admin-users-col-login {
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-col-actions {
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-actions-menu {
|
||||||
|
display: flex;
|
||||||
|
gap: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-action-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 4px;
|
||||||
|
padding: 6px 10px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-action-button:hover:not(:disabled) {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-action-button:disabled {
|
||||||
|
opacity: 0.4;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-action-spinner {
|
||||||
|
width: 12px;
|
||||||
|
height: 12px;
|
||||||
|
border: 2px solid var(--border-secondary);
|
||||||
|
border-top-color: var(--accent-primary);
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: admin-users-spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 1024px) {
|
||||||
|
.admin-users-list-header {
|
||||||
|
grid-template-columns: 2fr 100px 1fr;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list-item {
|
||||||
|
grid-template-columns: 2fr 100px 1fr;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-col-created,
|
||||||
|
.admin-users-col-login {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list-header .admin-users-col-created,
|
||||||
|
.admin-users-list-header .admin-users-col-login {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.admin-users-header {
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: stretch;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-create-button {
|
||||||
|
align-self: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list-header {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-list-item {
|
||||||
|
grid-template-columns: 1fr;
|
||||||
|
gap: 12px;
|
||||||
|
padding: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-col-user {
|
||||||
|
order: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-col-status {
|
||||||
|
order: 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-col-actions {
|
||||||
|
order: 3;
|
||||||
|
justify-content: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-users-actions-menu {
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
}
|
||||||
529
frontend/src/pages/AdminUsersPage.tsx
Normal file
529
frontend/src/pages/AdminUsersPage.tsx
Normal file
@@ -0,0 +1,529 @@
|
|||||||
|
import { useState, useEffect } from 'react';
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
|
import { listUsers, createUser, updateUser, resetUserPassword } from '../api';
|
||||||
|
import { AdminUser } from '../types';
|
||||||
|
import './AdminUsersPage.css';
|
||||||
|
|
||||||
|
function AdminUsersPage() {
|
||||||
|
const { user, loading: authLoading } = useAuth();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
const [users, setUsers] = useState<AdminUser[]>([]);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
const [showCreateForm, setShowCreateForm] = useState(false);
|
||||||
|
const [createUsername, setCreateUsername] = useState('');
|
||||||
|
const [createPassword, setCreatePassword] = useState('');
|
||||||
|
const [createEmail, setCreateEmail] = useState('');
|
||||||
|
const [createIsAdmin, setCreateIsAdmin] = useState(false);
|
||||||
|
const [isCreating, setIsCreating] = useState(false);
|
||||||
|
const [createError, setCreateError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
const [resetPasswordUsername, setResetPasswordUsername] = useState<string | null>(null);
|
||||||
|
const [newPassword, setNewPassword] = useState('');
|
||||||
|
const [isResetting, setIsResetting] = useState(false);
|
||||||
|
|
||||||
|
const [togglingUser, setTogglingUser] = useState<string | null>(null);
|
||||||
|
|
||||||
|
const [successMessage, setSuccessMessage] = useState<string | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!authLoading && !user) {
|
||||||
|
navigate('/login', { state: { from: '/admin/users' } });
|
||||||
|
}
|
||||||
|
}, [user, authLoading, navigate]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (user && user.is_admin) {
|
||||||
|
loadUsers();
|
||||||
|
}
|
||||||
|
}, [user]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (successMessage) {
|
||||||
|
const timer = setTimeout(() => setSuccessMessage(null), 3000);
|
||||||
|
return () => clearTimeout(timer);
|
||||||
|
}
|
||||||
|
}, [successMessage]);
|
||||||
|
|
||||||
|
async function loadUsers() {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
try {
|
||||||
|
const data = await listUsers();
|
||||||
|
setUsers(data);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to load users');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleCreate(e: React.FormEvent) {
|
||||||
|
e.preventDefault();
|
||||||
|
if (!createUsername.trim()) {
|
||||||
|
setCreateError('Username is required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!createPassword.trim()) {
|
||||||
|
setCreateError('Password is required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsCreating(true);
|
||||||
|
setCreateError(null);
|
||||||
|
try {
|
||||||
|
await createUser({
|
||||||
|
username: createUsername.trim(),
|
||||||
|
password: createPassword,
|
||||||
|
email: createEmail.trim() || undefined,
|
||||||
|
is_admin: createIsAdmin,
|
||||||
|
});
|
||||||
|
setShowCreateForm(false);
|
||||||
|
setCreateUsername('');
|
||||||
|
setCreatePassword('');
|
||||||
|
setCreateEmail('');
|
||||||
|
setCreateIsAdmin(false);
|
||||||
|
setSuccessMessage('User created successfully');
|
||||||
|
await loadUsers();
|
||||||
|
} catch (err) {
|
||||||
|
setCreateError(err instanceof Error ? err.message : 'Failed to create user');
|
||||||
|
} finally {
|
||||||
|
setIsCreating(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleToggleAdmin(targetUser: AdminUser) {
|
||||||
|
setTogglingUser(targetUser.username);
|
||||||
|
try {
|
||||||
|
await updateUser(targetUser.username, { is_admin: !targetUser.is_admin });
|
||||||
|
setSuccessMessage(`${targetUser.username} is ${!targetUser.is_admin ? 'now' : 'no longer'} an admin`);
|
||||||
|
await loadUsers();
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to update user');
|
||||||
|
} finally {
|
||||||
|
setTogglingUser(null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleToggleActive(targetUser: AdminUser) {
|
||||||
|
setTogglingUser(targetUser.username);
|
||||||
|
try {
|
||||||
|
await updateUser(targetUser.username, { is_active: !targetUser.is_active });
|
||||||
|
setSuccessMessage(`${targetUser.username} has been ${!targetUser.is_active ? 'enabled' : 'disabled'}`);
|
||||||
|
await loadUsers();
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to update user');
|
||||||
|
} finally {
|
||||||
|
setTogglingUser(null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleResetPassword(e: React.FormEvent) {
|
||||||
|
e.preventDefault();
|
||||||
|
if (!resetPasswordUsername || !newPassword.trim()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsResetting(true);
|
||||||
|
try {
|
||||||
|
await resetUserPassword(resetPasswordUsername, newPassword);
|
||||||
|
setResetPasswordUsername(null);
|
||||||
|
setNewPassword('');
|
||||||
|
setSuccessMessage(`Password reset for ${resetPasswordUsername}`);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to reset password');
|
||||||
|
} finally {
|
||||||
|
setIsResetting(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatDate(dateString: string | null): string {
|
||||||
|
if (!dateString) return 'Never';
|
||||||
|
return new Date(dateString).toLocaleDateString('en-US', {
|
||||||
|
year: 'numeric',
|
||||||
|
month: 'short',
|
||||||
|
day: 'numeric',
|
||||||
|
hour: '2-digit',
|
||||||
|
minute: '2-digit',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (authLoading) {
|
||||||
|
return (
|
||||||
|
<div className="admin-users-page">
|
||||||
|
<div className="admin-users-loading">
|
||||||
|
<div className="admin-users-spinner"></div>
|
||||||
|
<span>Loading...</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!user.is_admin) {
|
||||||
|
return (
|
||||||
|
<div className="admin-users-page">
|
||||||
|
<div className="admin-users-access-denied">
|
||||||
|
<div className="admin-users-access-denied-icon">
|
||||||
|
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
|
||||||
|
<circle cx="12" cy="12" r="10"/>
|
||||||
|
<line x1="4.93" y1="4.93" x2="19.07" y2="19.07"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<h2>Access Denied</h2>
|
||||||
|
<p>You do not have permission to access this page. Admin privileges are required.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="admin-users-page">
|
||||||
|
<div className="admin-users-header">
|
||||||
|
<div className="admin-users-header-content">
|
||||||
|
<h1>User Management</h1>
|
||||||
|
<p className="admin-users-subtitle">
|
||||||
|
Manage user accounts and permissions
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
className="admin-users-create-button"
|
||||||
|
onClick={() => setShowCreateForm(true)}
|
||||||
|
disabled={showCreateForm}
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="12" y1="5" x2="12" y2="19"/>
|
||||||
|
<line x1="5" y1="12" x2="19" y2="12"/>
|
||||||
|
</svg>
|
||||||
|
Create User
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{successMessage && (
|
||||||
|
<div className="admin-users-success">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M22 11.08V12a10 10 0 1 1-5.93-9.14"/>
|
||||||
|
<polyline points="22 4 12 14.01 9 11.01"/>
|
||||||
|
</svg>
|
||||||
|
<span>{successMessage}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{error && (
|
||||||
|
<div className="admin-users-error">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<circle cx="12" cy="12" r="10"/>
|
||||||
|
<line x1="12" y1="8" x2="12" y2="12"/>
|
||||||
|
<line x1="12" y1="16" x2="12.01" y2="16"/>
|
||||||
|
</svg>
|
||||||
|
<span>{error}</span>
|
||||||
|
<button onClick={() => setError(null)} className="admin-users-error-dismiss">
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18"/>
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{showCreateForm && (
|
||||||
|
<div className="admin-users-create-form-card">
|
||||||
|
<div className="admin-users-create-form-header">
|
||||||
|
<h2>Create New User</h2>
|
||||||
|
<button
|
||||||
|
className="admin-users-create-form-close"
|
||||||
|
onClick={() => {
|
||||||
|
setShowCreateForm(false);
|
||||||
|
setCreateUsername('');
|
||||||
|
setCreatePassword('');
|
||||||
|
setCreateEmail('');
|
||||||
|
setCreateIsAdmin(false);
|
||||||
|
setCreateError(null);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18"/>
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{createError && (
|
||||||
|
<div className="admin-users-create-error">
|
||||||
|
{createError}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<form onSubmit={handleCreate} className="admin-users-create-form">
|
||||||
|
<div className="admin-users-form-group">
|
||||||
|
<label htmlFor="username">Username</label>
|
||||||
|
<input
|
||||||
|
id="username"
|
||||||
|
type="text"
|
||||||
|
value={createUsername}
|
||||||
|
onChange={(e) => setCreateUsername(e.target.value)}
|
||||||
|
placeholder="Enter username"
|
||||||
|
autoFocus
|
||||||
|
disabled={isCreating}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="admin-users-form-group">
|
||||||
|
<label htmlFor="password">Password</label>
|
||||||
|
<input
|
||||||
|
id="password"
|
||||||
|
type="password"
|
||||||
|
value={createPassword}
|
||||||
|
onChange={(e) => setCreatePassword(e.target.value)}
|
||||||
|
placeholder="Enter password"
|
||||||
|
disabled={isCreating}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="admin-users-form-group">
|
||||||
|
<label htmlFor="email">Email (optional)</label>
|
||||||
|
<input
|
||||||
|
id="email"
|
||||||
|
type="email"
|
||||||
|
value={createEmail}
|
||||||
|
onChange={(e) => setCreateEmail(e.target.value)}
|
||||||
|
placeholder="user@example.com"
|
||||||
|
disabled={isCreating}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="admin-users-form-group admin-users-checkbox-group">
|
||||||
|
<label className="admin-users-checkbox-label">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={createIsAdmin}
|
||||||
|
onChange={(e) => setCreateIsAdmin(e.target.checked)}
|
||||||
|
disabled={isCreating}
|
||||||
|
/>
|
||||||
|
<span className="admin-users-checkbox-custom"></span>
|
||||||
|
Grant admin privileges
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="admin-users-form-actions">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="admin-users-cancel-button"
|
||||||
|
onClick={() => {
|
||||||
|
setShowCreateForm(false);
|
||||||
|
setCreateUsername('');
|
||||||
|
setCreatePassword('');
|
||||||
|
setCreateEmail('');
|
||||||
|
setCreateIsAdmin(false);
|
||||||
|
setCreateError(null);
|
||||||
|
}}
|
||||||
|
disabled={isCreating}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
className="admin-users-submit-button"
|
||||||
|
disabled={isCreating || !createUsername.trim() || !createPassword.trim()}
|
||||||
|
>
|
||||||
|
{isCreating ? (
|
||||||
|
<>
|
||||||
|
<span className="admin-users-button-spinner"></span>
|
||||||
|
Creating...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Create User'
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{resetPasswordUsername && (
|
||||||
|
<div className="admin-users-reset-password-card">
|
||||||
|
<div className="admin-users-reset-password-header">
|
||||||
|
<h2>Reset Password</h2>
|
||||||
|
<button
|
||||||
|
className="admin-users-create-form-close"
|
||||||
|
onClick={() => {
|
||||||
|
setResetPasswordUsername(null);
|
||||||
|
setNewPassword('');
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18"/>
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<p className="admin-users-reset-password-info">
|
||||||
|
Set a new password for <strong>{resetPasswordUsername}</strong>
|
||||||
|
</p>
|
||||||
|
<form onSubmit={handleResetPassword} className="admin-users-reset-password-form">
|
||||||
|
<div className="admin-users-form-group">
|
||||||
|
<label htmlFor="new-password">New Password</label>
|
||||||
|
<input
|
||||||
|
id="new-password"
|
||||||
|
type="password"
|
||||||
|
value={newPassword}
|
||||||
|
onChange={(e) => setNewPassword(e.target.value)}
|
||||||
|
placeholder="Enter new password"
|
||||||
|
autoFocus
|
||||||
|
disabled={isResetting}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="admin-users-form-actions">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="admin-users-cancel-button"
|
||||||
|
onClick={() => {
|
||||||
|
setResetPasswordUsername(null);
|
||||||
|
setNewPassword('');
|
||||||
|
}}
|
||||||
|
disabled={isResetting}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
className="admin-users-submit-button"
|
||||||
|
disabled={isResetting || !newPassword.trim()}
|
||||||
|
>
|
||||||
|
{isResetting ? (
|
||||||
|
<>
|
||||||
|
<span className="admin-users-button-spinner"></span>
|
||||||
|
Resetting...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Reset Password'
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="admin-users-list-container">
|
||||||
|
{loading ? (
|
||||||
|
<div className="admin-users-list-loading">
|
||||||
|
<div className="admin-users-spinner"></div>
|
||||||
|
<span>Loading users...</span>
|
||||||
|
</div>
|
||||||
|
) : users.length === 0 ? (
|
||||||
|
<div className="admin-users-empty">
|
||||||
|
<div className="admin-users-empty-icon">
|
||||||
|
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
|
||||||
|
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
||||||
|
<circle cx="9" cy="7" r="4"/>
|
||||||
|
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
||||||
|
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<h3>No Users</h3>
|
||||||
|
<p>Create a user to get started</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="admin-users-list">
|
||||||
|
<div className="admin-users-list-header">
|
||||||
|
<span className="admin-users-col-user">User</span>
|
||||||
|
<span className="admin-users-col-status">Status</span>
|
||||||
|
<span className="admin-users-col-created">Created</span>
|
||||||
|
<span className="admin-users-col-login">Last Login</span>
|
||||||
|
<span className="admin-users-col-actions">Actions</span>
|
||||||
|
</div>
|
||||||
|
{users.map((u) => (
|
||||||
|
<div key={u.id} className={`admin-users-list-item ${!u.is_active ? 'admin-users-inactive' : ''}`}>
|
||||||
|
<div className="admin-users-col-user">
|
||||||
|
<div className="admin-users-item-avatar">
|
||||||
|
{u.username.charAt(0).toUpperCase()}
|
||||||
|
</div>
|
||||||
|
<div className="admin-users-item-info">
|
||||||
|
<div className="admin-users-item-username">
|
||||||
|
{u.username}
|
||||||
|
{u.is_admin && <span className="admin-users-admin-badge">Admin</span>}
|
||||||
|
</div>
|
||||||
|
{u.email && (
|
||||||
|
<div className="admin-users-item-email">{u.email}</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="admin-users-col-status">
|
||||||
|
<span className={`admin-users-status-badge ${u.is_active ? 'active' : 'inactive'}`}>
|
||||||
|
{u.is_active ? 'Active' : 'Disabled'}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="admin-users-col-created">
|
||||||
|
{formatDate(u.created_at)}
|
||||||
|
</div>
|
||||||
|
<div className="admin-users-col-login">
|
||||||
|
{formatDate(u.last_login)}
|
||||||
|
</div>
|
||||||
|
<div className="admin-users-col-actions">
|
||||||
|
<div className="admin-users-actions-menu">
|
||||||
|
<button
|
||||||
|
className="admin-users-action-button"
|
||||||
|
onClick={() => handleToggleAdmin(u)}
|
||||||
|
disabled={togglingUser === u.username || u.username === user.username}
|
||||||
|
title={u.is_admin ? 'Remove admin' : 'Make admin'}
|
||||||
|
>
|
||||||
|
{togglingUser === u.username ? (
|
||||||
|
<span className="admin-users-action-spinner"></span>
|
||||||
|
) : (
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M12 22s8-4 8-10V5l-8-3-8 3v7c0 6 8 10 8 10z"/>
|
||||||
|
</svg>
|
||||||
|
)}
|
||||||
|
{u.is_admin ? 'Revoke' : 'Admin'}
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="admin-users-action-button"
|
||||||
|
onClick={() => handleToggleActive(u)}
|
||||||
|
disabled={togglingUser === u.username || u.username === user.username}
|
||||||
|
title={u.is_active ? 'Disable user' : 'Enable user'}
|
||||||
|
>
|
||||||
|
{togglingUser === u.username ? (
|
||||||
|
<span className="admin-users-action-spinner"></span>
|
||||||
|
) : u.is_active ? (
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<circle cx="12" cy="12" r="10"/>
|
||||||
|
<line x1="4.93" y1="4.93" x2="19.07" y2="19.07"/>
|
||||||
|
</svg>
|
||||||
|
) : (
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M22 11.08V12a10 10 0 1 1-5.93-9.14"/>
|
||||||
|
<polyline points="22 4 12 14.01 9 11.01"/>
|
||||||
|
</svg>
|
||||||
|
)}
|
||||||
|
{u.is_active ? 'Disable' : 'Enable'}
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="admin-users-action-button"
|
||||||
|
onClick={() => setResetPasswordUsername(u.username)}
|
||||||
|
disabled={togglingUser === u.username}
|
||||||
|
title="Reset password"
|
||||||
|
>
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<rect x="3" y="11" width="18" height="11" rx="2" ry="2"/>
|
||||||
|
<path d="M7 11V7a5 5 0 0 1 10 0v4"/>
|
||||||
|
</svg>
|
||||||
|
Reset
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default AdminUsersPage;
|
||||||
156
frontend/src/pages/ChangePasswordPage.tsx
Normal file
156
frontend/src/pages/ChangePasswordPage.tsx
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
import { useState } from 'react';
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
|
import { changePassword } from '../api';
|
||||||
|
import './LoginPage.css';
|
||||||
|
|
||||||
|
function ChangePasswordPage() {
|
||||||
|
const [currentPassword, setCurrentPassword] = useState('');
|
||||||
|
const [newPassword, setNewPassword] = useState('');
|
||||||
|
const [confirmPassword, setConfirmPassword] = useState('');
|
||||||
|
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
const { user, refreshUser } = useAuth();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
async function handleSubmit(e: React.FormEvent) {
|
||||||
|
e.preventDefault();
|
||||||
|
|
||||||
|
if (!currentPassword || !newPassword || !confirmPassword) {
|
||||||
|
setError('Please fill in all fields');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newPassword !== confirmPassword) {
|
||||||
|
setError('New passwords do not match');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newPassword.length < 8) {
|
||||||
|
setError('New password must be at least 8 characters');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newPassword === currentPassword) {
|
||||||
|
setError('New password must be different from current password');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsSubmitting(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await changePassword(currentPassword, newPassword);
|
||||||
|
// Refresh user to clear must_change_password flag
|
||||||
|
await refreshUser();
|
||||||
|
navigate('/', { replace: true });
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to change password');
|
||||||
|
} finally {
|
||||||
|
setIsSubmitting(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="login-page">
|
||||||
|
<div className="login-container">
|
||||||
|
<div className="login-card">
|
||||||
|
<div className="login-header">
|
||||||
|
<div className="login-logo">
|
||||||
|
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M6 14 Q6 8 3 8 Q6 4 6 4 Q6 4 9 8 Q6 8 6 14" fill="currentColor" opacity="0.6"/>
|
||||||
|
<rect x="5.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
|
||||||
|
<path d="M12 12 Q12 5 8 5 Q12 1 12 1 Q12 1 16 5 Q12 5 12 12" fill="currentColor"/>
|
||||||
|
<rect x="11.25" y="11" width="1.5" height="5" fill="currentColor"/>
|
||||||
|
<path d="M18 14 Q18 8 15 8 Q18 4 18 4 Q18 4 21 8 Q18 8 18 14" fill="currentColor" opacity="0.6"/>
|
||||||
|
<rect x="17.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
|
||||||
|
<ellipse cx="12" cy="19" rx="9" ry="1.5" fill="currentColor" opacity="0.3"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<h1>Change Password</h1>
|
||||||
|
{user?.must_change_password && (
|
||||||
|
<p className="login-subtitle login-warning">
|
||||||
|
You must change your password before continuing
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{error && (
|
||||||
|
<div className="login-error">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<circle cx="12" cy="12" r="10"/>
|
||||||
|
<line x1="12" y1="8" x2="12" y2="12"/>
|
||||||
|
<line x1="12" y1="16" x2="12.01" y2="16"/>
|
||||||
|
</svg>
|
||||||
|
<span>{error}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<form onSubmit={handleSubmit} className="login-form">
|
||||||
|
<div className="login-form-group">
|
||||||
|
<label htmlFor="currentPassword">Current Password</label>
|
||||||
|
<input
|
||||||
|
id="currentPassword"
|
||||||
|
type="password"
|
||||||
|
value={currentPassword}
|
||||||
|
onChange={(e) => setCurrentPassword(e.target.value)}
|
||||||
|
placeholder="Enter current password"
|
||||||
|
autoComplete="current-password"
|
||||||
|
autoFocus
|
||||||
|
disabled={isSubmitting}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="login-form-group">
|
||||||
|
<label htmlFor="newPassword">New Password</label>
|
||||||
|
<input
|
||||||
|
id="newPassword"
|
||||||
|
type="password"
|
||||||
|
value={newPassword}
|
||||||
|
onChange={(e) => setNewPassword(e.target.value)}
|
||||||
|
placeholder="Enter new password (min 8 characters)"
|
||||||
|
autoComplete="new-password"
|
||||||
|
disabled={isSubmitting}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="login-form-group">
|
||||||
|
<label htmlFor="confirmPassword">Confirm New Password</label>
|
||||||
|
<input
|
||||||
|
id="confirmPassword"
|
||||||
|
type="password"
|
||||||
|
value={confirmPassword}
|
||||||
|
onChange={(e) => setConfirmPassword(e.target.value)}
|
||||||
|
placeholder="Confirm new password"
|
||||||
|
autoComplete="new-password"
|
||||||
|
disabled={isSubmitting}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
className="login-submit"
|
||||||
|
disabled={isSubmitting}
|
||||||
|
>
|
||||||
|
{isSubmitting ? (
|
||||||
|
<>
|
||||||
|
<span className="login-spinner"></span>
|
||||||
|
Changing password...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Change Password'
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="login-footer">
|
||||||
|
<p>Artifact storage and management system</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default ChangePasswordPage;
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
/* Page Layout */
|
/* Page Layout */
|
||||||
.home {
|
.home {
|
||||||
max-width: 1000px;
|
max-width: 1200px;
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -358,6 +358,12 @@
|
|||||||
gap: 4px;
|
gap: 4px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.page-header__actions {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
/* Package card styles */
|
/* Package card styles */
|
||||||
.package-card__header {
|
.package-card__header {
|
||||||
display: flex;
|
display: flex;
|
||||||
@@ -474,3 +480,16 @@
|
|||||||
margin-top: 4px;
|
margin-top: 4px;
|
||||||
font-size: 0.9375rem;
|
font-size: 0.9375rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Lock icon for private projects */
|
||||||
|
.lock-icon {
|
||||||
|
color: var(--warning);
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Project badges container */
|
||||||
|
.project-badges {
|
||||||
|
display: flex;
|
||||||
|
gap: 6px;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,19 +1,24 @@
|
|||||||
import { useState, useEffect, useCallback } from 'react';
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
import { Link, useSearchParams } from 'react-router-dom';
|
import { Link, useSearchParams, useNavigate } from 'react-router-dom';
|
||||||
import { Project, PaginatedResponse } from '../types';
|
import { Project, PaginatedResponse } from '../types';
|
||||||
import { listProjects, createProject } from '../api';
|
import { listProjects, createProject } from '../api';
|
||||||
import { Badge } from '../components/Badge';
|
import { Badge } from '../components/Badge';
|
||||||
import { SortDropdown, SortOption } from '../components/SortDropdown';
|
import { DataTable } from '../components/DataTable';
|
||||||
import { FilterDropdown, FilterOption } from '../components/FilterDropdown';
|
import { FilterDropdown, FilterOption } from '../components/FilterDropdown';
|
||||||
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
||||||
import { Pagination } from '../components/Pagination';
|
import { Pagination } from '../components/Pagination';
|
||||||
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
import './Home.css';
|
import './Home.css';
|
||||||
|
|
||||||
const SORT_OPTIONS: SortOption[] = [
|
// Lock icon SVG component
|
||||||
{ value: 'name', label: 'Name' },
|
function LockIcon() {
|
||||||
{ value: 'created_at', label: 'Created' },
|
return (
|
||||||
{ value: 'updated_at', label: 'Updated' },
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className="lock-icon">
|
||||||
];
|
<rect x="3" y="11" width="18" height="11" rx="2" ry="2" />
|
||||||
|
<path d="M7 11V7a5 5 0 0 1 10 0v4" />
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
const VISIBILITY_OPTIONS: FilterOption[] = [
|
const VISIBILITY_OPTIONS: FilterOption[] = [
|
||||||
{ value: '', label: 'All Projects' },
|
{ value: '', label: 'All Projects' },
|
||||||
@@ -23,6 +28,8 @@ const VISIBILITY_OPTIONS: FilterOption[] = [
|
|||||||
|
|
||||||
function Home() {
|
function Home() {
|
||||||
const [searchParams, setSearchParams] = useSearchParams();
|
const [searchParams, setSearchParams] = useSearchParams();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
const { user } = useAuth();
|
||||||
|
|
||||||
const [projectsData, setProjectsData] = useState<PaginatedResponse<Project> | null>(null);
|
const [projectsData, setProjectsData] = useState<PaginatedResponse<Project> | null>(null);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
@@ -89,8 +96,10 @@ function Home() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleSortChange = (newSort: string, newOrder: 'asc' | 'desc') => {
|
const handleSortChange = (columnKey: string) => {
|
||||||
updateParams({ sort: newSort, order: newOrder, page: '1' });
|
// Toggle order if clicking the same column, otherwise default to asc
|
||||||
|
const newOrder = columnKey === sort ? (order === 'asc' ? 'desc' : 'asc') : 'asc';
|
||||||
|
updateParams({ sort: columnKey, order: newOrder, page: '1' });
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleVisibilityChange = (value: string) => {
|
const handleVisibilityChange = (value: string) => {
|
||||||
@@ -117,9 +126,15 @@ function Home() {
|
|||||||
<div className="home">
|
<div className="home">
|
||||||
<div className="page-header">
|
<div className="page-header">
|
||||||
<h1>Projects</h1>
|
<h1>Projects</h1>
|
||||||
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
{user ? (
|
||||||
{showForm ? 'Cancel' : '+ New Project'}
|
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
||||||
</button>
|
{showForm ? 'Cancel' : '+ New Project'}
|
||||||
|
</button>
|
||||||
|
) : (
|
||||||
|
<Link to="/login" className="btn btn-secondary">
|
||||||
|
Login to create projects
|
||||||
|
</Link>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{error && <div className="error-message">{error}</div>}
|
{error && <div className="error-message">{error}</div>}
|
||||||
@@ -171,7 +186,6 @@ function Home() {
|
|||||||
value={visibility}
|
value={visibility}
|
||||||
onChange={handleVisibilityChange}
|
onChange={handleVisibilityChange}
|
||||||
/>
|
/>
|
||||||
<SortDropdown options={SORT_OPTIONS} value={sort} order={order} onChange={handleSortChange} />
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{hasActiveFilters && (
|
{hasActiveFilters && (
|
||||||
@@ -186,49 +200,106 @@ function Home() {
|
|||||||
</FilterChipGroup>
|
</FilterChipGroup>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{projects.length === 0 ? (
|
<div className="data-table--responsive">
|
||||||
<div className="empty-state">
|
<DataTable
|
||||||
{hasActiveFilters ? (
|
data={projects}
|
||||||
<p>No projects match your filters. Try adjusting your search.</p>
|
keyExtractor={(project) => project.id}
|
||||||
) : (
|
onRowClick={(project) => navigate(`/project/${project.name}`)}
|
||||||
<p>No projects yet. Create your first project to get started!</p>
|
onSort={handleSortChange}
|
||||||
)}
|
sortKey={sort}
|
||||||
</div>
|
sortOrder={order}
|
||||||
) : (
|
emptyMessage={
|
||||||
<>
|
hasActiveFilters
|
||||||
<div className="project-grid">
|
? 'No projects match your filters. Try adjusting your search.'
|
||||||
{projects.map((project) => (
|
: 'No projects yet. Create your first project to get started!'
|
||||||
<Link to={`/project/${project.name}`} key={project.id} className="project-card card">
|
}
|
||||||
<h3>{project.name}</h3>
|
columns={[
|
||||||
{project.description && <p>{project.description}</p>}
|
{
|
||||||
<div className="project-meta">
|
key: 'name',
|
||||||
<Badge variant={project.is_public ? 'public' : 'private'}>
|
header: 'Name',
|
||||||
{project.is_public ? 'Public' : 'Private'}
|
sortable: true,
|
||||||
</Badge>
|
render: (project) => (
|
||||||
<div className="project-meta__dates">
|
<span className="cell-name">
|
||||||
<span className="date">Created {new Date(project.created_at).toLocaleDateString()}</span>
|
{!project.is_public && <LockIcon />}
|
||||||
{project.updated_at !== project.created_at && (
|
{project.name}
|
||||||
<span className="date">Updated {new Date(project.updated_at).toLocaleDateString()}</span>
|
</span>
|
||||||
)}
|
),
|
||||||
</div>
|
},
|
||||||
</div>
|
{
|
||||||
<div className="project-meta__owner">
|
key: 'description',
|
||||||
<span className="owner">by {project.created_by}</span>
|
header: 'Description',
|
||||||
</div>
|
className: 'cell-description',
|
||||||
</Link>
|
render: (project) => project.description || '—',
|
||||||
))}
|
},
|
||||||
</div>
|
{
|
||||||
|
key: 'visibility',
|
||||||
|
header: 'Visibility',
|
||||||
|
render: (project) => (
|
||||||
|
<Badge variant={project.is_public ? 'public' : 'private'}>
|
||||||
|
{project.is_public ? 'Public' : 'Private'}
|
||||||
|
</Badge>
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'created_by',
|
||||||
|
header: 'Owner',
|
||||||
|
className: 'cell-owner',
|
||||||
|
render: (project) => project.created_by,
|
||||||
|
},
|
||||||
|
...(user
|
||||||
|
? [
|
||||||
|
{
|
||||||
|
key: 'access_level',
|
||||||
|
header: 'Access',
|
||||||
|
render: (project: Project) =>
|
||||||
|
project.access_level ? (
|
||||||
|
<Badge
|
||||||
|
variant={
|
||||||
|
project.is_owner
|
||||||
|
? 'success'
|
||||||
|
: project.access_level === 'admin'
|
||||||
|
? 'success'
|
||||||
|
: project.access_level === 'write'
|
||||||
|
? 'info'
|
||||||
|
: 'default'
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{project.is_owner
|
||||||
|
? 'Owner'
|
||||||
|
: project.access_level.charAt(0).toUpperCase() + project.access_level.slice(1)}
|
||||||
|
</Badge>
|
||||||
|
) : (
|
||||||
|
'—'
|
||||||
|
),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
: []),
|
||||||
|
{
|
||||||
|
key: 'created_at',
|
||||||
|
header: 'Created',
|
||||||
|
sortable: true,
|
||||||
|
className: 'cell-date',
|
||||||
|
render: (project) => new Date(project.created_at).toLocaleDateString(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'updated_at',
|
||||||
|
header: 'Updated',
|
||||||
|
sortable: true,
|
||||||
|
className: 'cell-date',
|
||||||
|
render: (project) => new Date(project.updated_at).toLocaleDateString(),
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
{pagination && pagination.total_pages > 1 && (
|
{pagination && pagination.total_pages > 1 && (
|
||||||
<Pagination
|
<Pagination
|
||||||
page={pagination.page}
|
page={pagination.page}
|
||||||
totalPages={pagination.total_pages}
|
totalPages={pagination.total_pages}
|
||||||
total={pagination.total}
|
total={pagination.total}
|
||||||
limit={pagination.limit}
|
limit={pagination.limit}
|
||||||
onPageChange={handlePageChange}
|
onPageChange={handlePageChange}
|
||||||
/>
|
/>
|
||||||
)}
|
|
||||||
</>
|
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
292
frontend/src/pages/LoginPage.css
Normal file
292
frontend/src/pages/LoginPage.css
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
/* Login Page - Full viewport centered layout */
|
||||||
|
.login-page {
|
||||||
|
min-height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: var(--bg-primary);
|
||||||
|
padding: 24px;
|
||||||
|
position: relative;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Subtle background pattern */
|
||||||
|
.login-page::before {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
inset: 0;
|
||||||
|
background:
|
||||||
|
radial-gradient(circle at 20% 50%, rgba(16, 185, 129, 0.08) 0%, transparent 50%),
|
||||||
|
radial-gradient(circle at 80% 50%, rgba(16, 185, 129, 0.05) 0%, transparent 50%);
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-container {
|
||||||
|
width: 100%;
|
||||||
|
max-width: 400px;
|
||||||
|
position: relative;
|
||||||
|
z-index: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Card styling */
|
||||||
|
.login-card {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-xl);
|
||||||
|
padding: 40px;
|
||||||
|
box-shadow: var(--shadow-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Header section */
|
||||||
|
.login-header {
|
||||||
|
text-align: center;
|
||||||
|
margin-bottom: 32px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-logo {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
width: 80px;
|
||||||
|
height: 80px;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
color: white;
|
||||||
|
margin-bottom: 24px;
|
||||||
|
box-shadow: var(--shadow-glow);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-header h1 {
|
||||||
|
font-size: 1.5rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 8px;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-subtitle {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-subtitle.login-warning {
|
||||||
|
color: var(--warning);
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Error message */
|
||||||
|
.login-error {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
background: var(--error-bg);
|
||||||
|
border: 1px solid rgba(239, 68, 68, 0.2);
|
||||||
|
color: var(--error);
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
margin-bottom: 24px;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-error svg {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Form styling */
|
||||||
|
.login-form {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-form-group {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-form-group label {
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-form-group input {
|
||||||
|
width: 100%;
|
||||||
|
padding: 14px 16px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-form-group input::placeholder {
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-form-group input:hover:not(:disabled) {
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
background: var(--bg-elevated);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-form-group input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||||
|
background: var(--bg-elevated);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-form-group input:disabled {
|
||||||
|
opacity: 0.6;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Submit button */
|
||||||
|
.login-submit {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 10px;
|
||||||
|
width: 100%;
|
||||||
|
padding: 14px 20px;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: white;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
margin-top: 8px;
|
||||||
|
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-submit:hover:not(:disabled) {
|
||||||
|
transform: translateY(-1px);
|
||||||
|
box-shadow: var(--shadow-md), 0 0 30px rgba(16, 185, 129, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-submit:active:not(:disabled) {
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-submit:disabled {
|
||||||
|
opacity: 0.7;
|
||||||
|
cursor: not-allowed;
|
||||||
|
transform: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Loading spinner */
|
||||||
|
.login-spinner {
|
||||||
|
width: 16px;
|
||||||
|
height: 16px;
|
||||||
|
border: 2px solid rgba(255, 255, 255, 0.3);
|
||||||
|
border-top-color: white;
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes spin {
|
||||||
|
to {
|
||||||
|
transform: rotate(360deg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Loading state */
|
||||||
|
.login-loading {
|
||||||
|
text-align: center;
|
||||||
|
padding: 64px 32px;
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Footer */
|
||||||
|
.login-footer {
|
||||||
|
text-align: center;
|
||||||
|
margin-top: 24px;
|
||||||
|
padding-top: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-footer p {
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* SSO Divider */
|
||||||
|
.login-divider {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 16px;
|
||||||
|
margin: 24px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-divider::before,
|
||||||
|
.login-divider::after {
|
||||||
|
content: '';
|
||||||
|
flex: 1;
|
||||||
|
height: 1px;
|
||||||
|
background: var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-divider span {
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
color: var(--text-muted);
|
||||||
|
text-transform: lowercase;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* SSO Button */
|
||||||
|
.login-sso-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 10px;
|
||||||
|
width: 100%;
|
||||||
|
padding: 14px 20px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-primary);
|
||||||
|
text-decoration: none;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-sso-button:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
transform: translateY(-1px);
|
||||||
|
box-shadow: var(--shadow-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-sso-button:active {
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-sso-button svg {
|
||||||
|
color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive adjustments */
|
||||||
|
@media (max-width: 480px) {
|
||||||
|
.login-card {
|
||||||
|
padding: 32px 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-logo {
|
||||||
|
width: 64px;
|
||||||
|
height: 64px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-logo svg {
|
||||||
|
width: 36px;
|
||||||
|
height: 36px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-header h1 {
|
||||||
|
font-size: 1.25rem;
|
||||||
|
}
|
||||||
|
}
|
||||||
186
frontend/src/pages/LoginPage.tsx
Normal file
186
frontend/src/pages/LoginPage.tsx
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
import { useState, useEffect } from 'react';
|
||||||
|
import { useNavigate, useLocation, useSearchParams } from 'react-router-dom';
|
||||||
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
|
import { getOIDCStatus, getOIDCLoginUrl } from '../api';
|
||||||
|
import { OIDCStatus } from '../types';
|
||||||
|
import './LoginPage.css';
|
||||||
|
|
||||||
|
function LoginPage() {
|
||||||
|
const [username, setUsername] = useState('');
|
||||||
|
const [password, setPassword] = useState('');
|
||||||
|
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const [oidcStatus, setOidcStatus] = useState<OIDCStatus | null>(null);
|
||||||
|
const [searchParams] = useSearchParams();
|
||||||
|
|
||||||
|
const { user, login, loading: authLoading, refreshUser } = useAuth();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
const location = useLocation();
|
||||||
|
|
||||||
|
// Get the return URL from location state, default to home
|
||||||
|
const from = (location.state as { from?: string })?.from || '/';
|
||||||
|
|
||||||
|
// Load OIDC status on mount
|
||||||
|
useEffect(() => {
|
||||||
|
getOIDCStatus()
|
||||||
|
.then(setOidcStatus)
|
||||||
|
.catch(() => setOidcStatus({ enabled: false }));
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Handle SSO callback - check for oidc_success or oidc_error params
|
||||||
|
useEffect(() => {
|
||||||
|
const oidcSuccess = searchParams.get('oidc_success');
|
||||||
|
const oidcError = searchParams.get('oidc_error');
|
||||||
|
|
||||||
|
if (oidcSuccess === 'true') {
|
||||||
|
refreshUser().then(() => {
|
||||||
|
navigate(from, { replace: true });
|
||||||
|
});
|
||||||
|
} else if (oidcError) {
|
||||||
|
setError(decodeURIComponent(oidcError));
|
||||||
|
}
|
||||||
|
}, [searchParams, refreshUser, navigate, from]);
|
||||||
|
|
||||||
|
// Redirect if already logged in
|
||||||
|
useEffect(() => {
|
||||||
|
if (user && !authLoading) {
|
||||||
|
navigate(from, { replace: true });
|
||||||
|
}
|
||||||
|
}, [user, authLoading, navigate, from]);
|
||||||
|
|
||||||
|
async function handleSubmit(e: React.FormEvent) {
|
||||||
|
e.preventDefault();
|
||||||
|
|
||||||
|
if (!username.trim() || !password) {
|
||||||
|
setError('Please enter both username and password');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsSubmitting(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await login(username, password);
|
||||||
|
navigate(from, { replace: true });
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Login failed. Please try again.');
|
||||||
|
} finally {
|
||||||
|
setIsSubmitting(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show loading while checking auth state
|
||||||
|
if (authLoading) {
|
||||||
|
return (
|
||||||
|
<div className="login-page">
|
||||||
|
<div className="login-container">
|
||||||
|
<div className="login-loading">Checking session...</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="login-page">
|
||||||
|
<div className="login-container">
|
||||||
|
<div className="login-card">
|
||||||
|
<div className="login-header">
|
||||||
|
<div className="login-logo">
|
||||||
|
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M6 14 Q6 8 3 8 Q6 4 6 4 Q6 4 9 8 Q6 8 6 14" fill="currentColor" opacity="0.6"/>
|
||||||
|
<rect x="5.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
|
||||||
|
<path d="M12 12 Q12 5 8 5 Q12 1 12 1 Q12 1 16 5 Q12 5 12 12" fill="currentColor"/>
|
||||||
|
<rect x="11.25" y="11" width="1.5" height="5" fill="currentColor"/>
|
||||||
|
<path d="M18 14 Q18 8 15 8 Q18 4 18 4 Q18 4 21 8 Q18 8 18 14" fill="currentColor" opacity="0.6"/>
|
||||||
|
<rect x="17.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
|
||||||
|
<ellipse cx="12" cy="19" rx="9" ry="1.5" fill="currentColor" opacity="0.3"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<h1>Sign in to Orchard</h1>
|
||||||
|
<p className="login-subtitle">Content-Addressable Storage</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{error && (
|
||||||
|
<div className="login-error">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<circle cx="12" cy="12" r="10"/>
|
||||||
|
<line x1="12" y1="8" x2="12" y2="12"/>
|
||||||
|
<line x1="12" y1="16" x2="12.01" y2="16"/>
|
||||||
|
</svg>
|
||||||
|
<span>{error}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<form onSubmit={handleSubmit} className="login-form">
|
||||||
|
<div className="login-form-group">
|
||||||
|
<label htmlFor="username">Username</label>
|
||||||
|
<input
|
||||||
|
id="username"
|
||||||
|
type="text"
|
||||||
|
value={username}
|
||||||
|
onChange={(e) => setUsername(e.target.value)}
|
||||||
|
placeholder="Enter your username"
|
||||||
|
autoComplete="username"
|
||||||
|
autoFocus
|
||||||
|
disabled={isSubmitting}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="login-form-group">
|
||||||
|
<label htmlFor="password">Password</label>
|
||||||
|
<input
|
||||||
|
id="password"
|
||||||
|
type="password"
|
||||||
|
value={password}
|
||||||
|
onChange={(e) => setPassword(e.target.value)}
|
||||||
|
placeholder="Enter your password"
|
||||||
|
autoComplete="current-password"
|
||||||
|
disabled={isSubmitting}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
className="login-submit"
|
||||||
|
disabled={isSubmitting}
|
||||||
|
>
|
||||||
|
{isSubmitting ? (
|
||||||
|
<>
|
||||||
|
<span className="login-spinner"></span>
|
||||||
|
Signing in...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Sign in'
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
{oidcStatus?.enabled && (
|
||||||
|
<>
|
||||||
|
<div className="login-divider">
|
||||||
|
<span>or</span>
|
||||||
|
</div>
|
||||||
|
<a
|
||||||
|
href={getOIDCLoginUrl(from !== '/' ? from : undefined)}
|
||||||
|
className="login-sso-button"
|
||||||
|
>
|
||||||
|
<svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M15 3h4a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2h-4"/>
|
||||||
|
<polyline points="10 17 15 12 10 7"/>
|
||||||
|
<line x1="15" y1="12" x2="3" y2="12"/>
|
||||||
|
</svg>
|
||||||
|
Sign in with SSO
|
||||||
|
</a>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="login-footer">
|
||||||
|
<p>Artifact storage and management system</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default LoginPage;
|
||||||
@@ -127,6 +127,64 @@ h2 {
|
|||||||
font-size: 0.75rem;
|
font-size: 0.75rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Action buttons in table */
|
||||||
|
.action-buttons {
|
||||||
|
display: flex;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Download by Artifact ID Section */
|
||||||
|
.download-by-id-section {
|
||||||
|
margin-top: 32px;
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-by-id-section h3 {
|
||||||
|
margin-bottom: 12px;
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-by-id-form {
|
||||||
|
display: flex;
|
||||||
|
gap: 12px;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.artifact-id-input {
|
||||||
|
flex: 1;
|
||||||
|
padding: 10px 16px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.artifact-id-input::placeholder {
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.artifact-id-input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.validation-hint {
|
||||||
|
margin-top: 8px;
|
||||||
|
margin-bottom: 0;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
color: var(--warning-color, #f59e0b);
|
||||||
|
}
|
||||||
|
|
||||||
/* Usage Section */
|
/* Usage Section */
|
||||||
.usage-section {
|
.usage-section {
|
||||||
margin-top: 32px;
|
margin-top: 32px;
|
||||||
@@ -272,6 +330,86 @@ tr:hover .copy-btn {
|
|||||||
color: var(--text-muted);
|
color: var(--text-muted);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Version badge */
|
||||||
|
.version-badge {
|
||||||
|
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
color: var(--accent-primary);
|
||||||
|
background: rgba(16, 185, 129, 0.1);
|
||||||
|
padding: 2px 8px;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Create Tag Section */
|
||||||
|
.create-tag-section {
|
||||||
|
margin-top: 32px;
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.create-tag-section h3 {
|
||||||
|
margin-bottom: 4px;
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.section-description {
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.create-tag-form .form-row {
|
||||||
|
display: flex;
|
||||||
|
gap: 12px;
|
||||||
|
align-items: flex-end;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.create-tag-form .form-group {
|
||||||
|
flex: 1;
|
||||||
|
min-width: 150px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.create-tag-form .form-group--wide {
|
||||||
|
flex: 2;
|
||||||
|
min-width: 300px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.create-tag-form .form-group label {
|
||||||
|
display: block;
|
||||||
|
margin-bottom: 6px;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.05em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.create-tag-form .form-group input {
|
||||||
|
width: 100%;
|
||||||
|
padding: 10px 14px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.create-tag-form .form-group input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.create-tag-form .form-group input:disabled {
|
||||||
|
opacity: 0.6;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.create-tag-form button {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
/* Created cell */
|
/* Created cell */
|
||||||
.created-cell {
|
.created-cell {
|
||||||
display: flex;
|
display: flex;
|
||||||
@@ -292,6 +430,340 @@ tr:hover .copy-btn {
|
|||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Dependencies Section */
|
||||||
|
.dependencies-section {
|
||||||
|
margin-top: 32px;
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependencies-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependencies-header h3 {
|
||||||
|
margin: 0;
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependencies-controls {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependencies-controls .btn {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dependencies-tag-select {
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-selector {
|
||||||
|
padding: 8px 12px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
cursor: pointer;
|
||||||
|
min-width: 200px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-selector:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.deps-loading {
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
padding: 16px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.deps-error {
|
||||||
|
color: var(--error-color, #ef4444);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
padding: 12px 16px;
|
||||||
|
background: rgba(239, 68, 68, 0.1);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.deps-empty {
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
padding: 16px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.deps-summary {
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.deps-summary strong {
|
||||||
|
color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.deps-items {
|
||||||
|
list-style: none;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dep-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
padding: 12px 16px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dep-link {
|
||||||
|
color: var(--accent-primary);
|
||||||
|
font-weight: 500;
|
||||||
|
text-decoration: none;
|
||||||
|
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dep-link:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dep-constraint {
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dep-status {
|
||||||
|
margin-left: auto;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dep-status--ok {
|
||||||
|
color: var(--success-color, #10b981);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dep-status--missing {
|
||||||
|
color: var(--warning-color, #f59e0b);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tag name link in table */
|
||||||
|
.tag-name-link {
|
||||||
|
color: var(--accent-primary);
|
||||||
|
transition: opacity var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-name-link:hover {
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-name-link.selected {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Used By (Reverse Dependencies) Section */
|
||||||
|
.used-by-section {
|
||||||
|
margin-top: 32px;
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.used-by-section h3 {
|
||||||
|
margin-bottom: 16px;
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.reverse-dep-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dep-version {
|
||||||
|
color: var(--accent-primary);
|
||||||
|
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
background: rgba(16, 185, 129, 0.1);
|
||||||
|
padding: 2px 8px;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dep-requires {
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||||
|
margin-left: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.reverse-deps-pagination {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 16px;
|
||||||
|
margin-top: 16px;
|
||||||
|
padding-top: 16px;
|
||||||
|
border-top: 1px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.pagination-info {
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Ensure File Modal */
|
||||||
|
.modal-overlay {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
background: rgba(0, 0, 0, 0.7);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
z-index: 1000;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-modal {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
max-width: 700px;
|
||||||
|
width: 100%;
|
||||||
|
max-height: 80vh;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
box-shadow: 0 20px 50px rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: 16px 20px;
|
||||||
|
border-bottom: 1px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-header h3 {
|
||||||
|
margin: 0;
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-actions {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-actions .copy-btn {
|
||||||
|
opacity: 1;
|
||||||
|
width: 32px;
|
||||||
|
height: 32px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-close {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
width: 32px;
|
||||||
|
height: 32px;
|
||||||
|
padding: 0;
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
color: var(--text-muted);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-close:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-content {
|
||||||
|
flex: 1;
|
||||||
|
overflow: auto;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-loading {
|
||||||
|
color: var(--text-muted);
|
||||||
|
text-align: center;
|
||||||
|
padding: 40px 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-error {
|
||||||
|
color: var(--error-color, #ef4444);
|
||||||
|
padding: 16px;
|
||||||
|
background: rgba(239, 68, 68, 0.1);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-empty {
|
||||||
|
color: var(--text-muted);
|
||||||
|
text-align: center;
|
||||||
|
padding: 40px 20px;
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-yaml {
|
||||||
|
margin: 0;
|
||||||
|
padding: 16px;
|
||||||
|
background: #0d0d0f;
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
overflow-x: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-yaml code {
|
||||||
|
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
color: #e2e8f0;
|
||||||
|
white-space: pre;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-footer {
|
||||||
|
padding: 16px 20px;
|
||||||
|
border-top: 1px solid var(--border-primary);
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border-radius: 0 0 var(--radius-lg) var(--radius-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-hint {
|
||||||
|
margin: 0;
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-hint code {
|
||||||
|
background: rgba(0, 0, 0, 0.2);
|
||||||
|
padding: 2px 6px;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||||
|
color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
/* Responsive adjustments */
|
/* Responsive adjustments */
|
||||||
@media (max-width: 768px) {
|
@media (max-width: 768px) {
|
||||||
.upload-form {
|
.upload-form {
|
||||||
@@ -307,4 +779,18 @@ tr:hover .copy-btn {
|
|||||||
flex-wrap: wrap;
|
flex-wrap: wrap;
|
||||||
gap: 12px;
|
gap: 12px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.dependencies-header {
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: flex-start;
|
||||||
|
gap: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-selector {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ensure-file-modal {
|
||||||
|
max-height: 90vh;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,22 +1,19 @@
|
|||||||
import { useState, useEffect, useRef, useCallback } from 'react';
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
import { useParams, useSearchParams, useNavigate } from 'react-router-dom';
|
import { useParams, useSearchParams, useNavigate, useLocation, Link } from 'react-router-dom';
|
||||||
import { TagDetail, Package, PaginatedResponse } from '../types';
|
import { TagDetail, Package, PaginatedResponse, AccessLevel, Dependency, DependentInfo } from '../types';
|
||||||
import { listTags, uploadArtifact, getDownloadUrl, getPackage } from '../api';
|
import { listTags, getDownloadUrl, getPackage, getMyProjectAccess, createTag, getArtifactDependencies, getReverseDependencies, getEnsureFile, UnauthorizedError, ForbiddenError } from '../api';
|
||||||
import { Breadcrumb } from '../components/Breadcrumb';
|
import { Breadcrumb } from '../components/Breadcrumb';
|
||||||
import { Badge } from '../components/Badge';
|
import { Badge } from '../components/Badge';
|
||||||
import { SearchInput } from '../components/SearchInput';
|
import { SearchInput } from '../components/SearchInput';
|
||||||
import { SortDropdown, SortOption } from '../components/SortDropdown';
|
|
||||||
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
||||||
import { DataTable } from '../components/DataTable';
|
import { DataTable } from '../components/DataTable';
|
||||||
import { Pagination } from '../components/Pagination';
|
import { Pagination } from '../components/Pagination';
|
||||||
|
import { DragDropUpload, UploadResult } from '../components/DragDropUpload';
|
||||||
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
|
import DependencyGraph from '../components/DependencyGraph';
|
||||||
import './Home.css';
|
import './Home.css';
|
||||||
import './PackagePage.css';
|
import './PackagePage.css';
|
||||||
|
|
||||||
const SORT_OPTIONS: SortOption[] = [
|
|
||||||
{ value: 'name', label: 'Name' },
|
|
||||||
{ value: 'created_at', label: 'Created' },
|
|
||||||
];
|
|
||||||
|
|
||||||
function formatBytes(bytes: number): string {
|
function formatBytes(bytes: number): string {
|
||||||
if (bytes === 0) return '0 B';
|
if (bytes === 0) return '0 B';
|
||||||
const k = 1024;
|
const k = 1024;
|
||||||
@@ -55,16 +52,49 @@ function CopyButton({ text }: { text: string }) {
|
|||||||
function PackagePage() {
|
function PackagePage() {
|
||||||
const { projectName, packageName } = useParams<{ projectName: string; packageName: string }>();
|
const { projectName, packageName } = useParams<{ projectName: string; packageName: string }>();
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
|
const location = useLocation();
|
||||||
const [searchParams, setSearchParams] = useSearchParams();
|
const [searchParams, setSearchParams] = useSearchParams();
|
||||||
|
const { user } = useAuth();
|
||||||
|
|
||||||
const [pkg, setPkg] = useState<Package | null>(null);
|
const [pkg, setPkg] = useState<Package | null>(null);
|
||||||
const [tagsData, setTagsData] = useState<PaginatedResponse<TagDetail> | null>(null);
|
const [tagsData, setTagsData] = useState<PaginatedResponse<TagDetail> | null>(null);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
const [uploading, setUploading] = useState(false);
|
const [accessDenied, setAccessDenied] = useState(false);
|
||||||
const [uploadResult, setUploadResult] = useState<string | null>(null);
|
const [uploadTag, setUploadTag] = useState('');
|
||||||
const [tag, setTag] = useState('');
|
const [uploadSuccess, setUploadSuccess] = useState<string | null>(null);
|
||||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
const [artifactIdInput, setArtifactIdInput] = useState('');
|
||||||
|
const [accessLevel, setAccessLevel] = useState<AccessLevel | null>(null);
|
||||||
|
const [createTagName, setCreateTagName] = useState('');
|
||||||
|
const [createTagArtifactId, setCreateTagArtifactId] = useState('');
|
||||||
|
const [createTagLoading, setCreateTagLoading] = useState(false);
|
||||||
|
|
||||||
|
// Dependencies state
|
||||||
|
const [selectedTag, setSelectedTag] = useState<TagDetail | null>(null);
|
||||||
|
const [dependencies, setDependencies] = useState<Dependency[]>([]);
|
||||||
|
const [depsLoading, setDepsLoading] = useState(false);
|
||||||
|
const [depsError, setDepsError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Reverse dependencies state
|
||||||
|
const [reverseDeps, setReverseDeps] = useState<DependentInfo[]>([]);
|
||||||
|
const [reverseDepsLoading, setReverseDepsLoading] = useState(false);
|
||||||
|
const [reverseDepsError, setReverseDepsError] = useState<string | null>(null);
|
||||||
|
const [reverseDepsPage, setReverseDepsPage] = useState(1);
|
||||||
|
const [reverseDepsTotal, setReverseDepsTotal] = useState(0);
|
||||||
|
const [reverseDepsHasMore, setReverseDepsHasMore] = useState(false);
|
||||||
|
|
||||||
|
// Dependency graph modal state
|
||||||
|
const [showGraph, setShowGraph] = useState(false);
|
||||||
|
|
||||||
|
// Ensure file modal state
|
||||||
|
const [showEnsureFile, setShowEnsureFile] = useState(false);
|
||||||
|
const [ensureFileContent, setEnsureFileContent] = useState<string | null>(null);
|
||||||
|
const [ensureFileLoading, setEnsureFileLoading] = useState(false);
|
||||||
|
const [ensureFileError, setEnsureFileError] = useState<string | null>(null);
|
||||||
|
const [ensureFileTagName, setEnsureFileTagName] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Derived permissions
|
||||||
|
const canWrite = accessLevel === 'write' || accessLevel === 'admin';
|
||||||
|
|
||||||
// Get params from URL
|
// Get params from URL
|
||||||
const page = parseInt(searchParams.get('page') || '1', 10);
|
const page = parseInt(searchParams.get('page') || '1', 10);
|
||||||
@@ -92,24 +122,129 @@ function PackagePage() {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
const [pkgData, tagsResult] = await Promise.all([
|
setAccessDenied(false);
|
||||||
|
const [pkgData, tagsResult, accessResult] = await Promise.all([
|
||||||
getPackage(projectName, packageName),
|
getPackage(projectName, packageName),
|
||||||
listTags(projectName, packageName, { page, search, sort, order }),
|
listTags(projectName, packageName, { page, search, sort, order }),
|
||||||
|
getMyProjectAccess(projectName),
|
||||||
]);
|
]);
|
||||||
setPkg(pkgData);
|
setPkg(pkgData);
|
||||||
setTagsData(tagsResult);
|
setTagsData(tagsResult);
|
||||||
|
setAccessLevel(accessResult.access_level);
|
||||||
setError(null);
|
setError(null);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
if (err instanceof UnauthorizedError) {
|
||||||
|
navigate('/login', { state: { from: location.pathname } });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (err instanceof ForbiddenError) {
|
||||||
|
setAccessDenied(true);
|
||||||
|
setError('You do not have access to this package');
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
setError(err instanceof Error ? err.message : 'Failed to load data');
|
setError(err instanceof Error ? err.message : 'Failed to load data');
|
||||||
} finally {
|
} finally {
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
}
|
}
|
||||||
}, [projectName, packageName, page, search, sort, order]);
|
}, [projectName, packageName, page, search, sort, order, navigate, location.pathname]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
loadData();
|
loadData();
|
||||||
}, [loadData]);
|
}, [loadData]);
|
||||||
|
|
||||||
|
// Auto-select tag when tags are loaded (prefer version from URL, then first tag)
|
||||||
|
// Re-run when package changes to pick up new tags
|
||||||
|
useEffect(() => {
|
||||||
|
if (tagsData?.items && tagsData.items.length > 0) {
|
||||||
|
const versionParam = searchParams.get('version');
|
||||||
|
if (versionParam) {
|
||||||
|
// Find tag matching the version parameter
|
||||||
|
const matchingTag = tagsData.items.find(t => t.version === versionParam);
|
||||||
|
if (matchingTag) {
|
||||||
|
setSelectedTag(matchingTag);
|
||||||
|
setDependencies([]);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Fall back to first tag
|
||||||
|
setSelectedTag(tagsData.items[0]);
|
||||||
|
setDependencies([]);
|
||||||
|
}
|
||||||
|
}, [tagsData, searchParams, projectName, packageName]);
|
||||||
|
|
||||||
|
// Fetch dependencies when selected tag changes
|
||||||
|
const fetchDependencies = useCallback(async (artifactId: string) => {
|
||||||
|
setDepsLoading(true);
|
||||||
|
setDepsError(null);
|
||||||
|
try {
|
||||||
|
const result = await getArtifactDependencies(artifactId);
|
||||||
|
setDependencies(result.dependencies);
|
||||||
|
} catch (err) {
|
||||||
|
setDepsError(err instanceof Error ? err.message : 'Failed to load dependencies');
|
||||||
|
setDependencies([]);
|
||||||
|
} finally {
|
||||||
|
setDepsLoading(false);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (selectedTag) {
|
||||||
|
fetchDependencies(selectedTag.artifact_id);
|
||||||
|
}
|
||||||
|
}, [selectedTag, fetchDependencies]);
|
||||||
|
|
||||||
|
// Fetch reverse dependencies
|
||||||
|
const fetchReverseDeps = useCallback(async (pageNum: number = 1) => {
|
||||||
|
if (!projectName || !packageName) return;
|
||||||
|
|
||||||
|
setReverseDepsLoading(true);
|
||||||
|
setReverseDepsError(null);
|
||||||
|
try {
|
||||||
|
const result = await getReverseDependencies(projectName, packageName, { page: pageNum, limit: 10 });
|
||||||
|
setReverseDeps(result.dependents);
|
||||||
|
setReverseDepsTotal(result.pagination.total);
|
||||||
|
setReverseDepsHasMore(result.pagination.has_more);
|
||||||
|
setReverseDepsPage(pageNum);
|
||||||
|
} catch (err) {
|
||||||
|
setReverseDepsError(err instanceof Error ? err.message : 'Failed to load reverse dependencies');
|
||||||
|
setReverseDeps([]);
|
||||||
|
} finally {
|
||||||
|
setReverseDepsLoading(false);
|
||||||
|
}
|
||||||
|
}, [projectName, packageName]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (projectName && packageName && !loading) {
|
||||||
|
fetchReverseDeps(1);
|
||||||
|
}
|
||||||
|
}, [projectName, packageName, loading, fetchReverseDeps]);
|
||||||
|
|
||||||
|
// Fetch ensure file for a specific tag
|
||||||
|
const fetchEnsureFileForTag = useCallback(async (tagName: string) => {
|
||||||
|
if (!projectName || !packageName) return;
|
||||||
|
|
||||||
|
setEnsureFileTagName(tagName);
|
||||||
|
setEnsureFileLoading(true);
|
||||||
|
setEnsureFileError(null);
|
||||||
|
try {
|
||||||
|
const content = await getEnsureFile(projectName, packageName, tagName);
|
||||||
|
setEnsureFileContent(content);
|
||||||
|
setShowEnsureFile(true);
|
||||||
|
} catch (err) {
|
||||||
|
setEnsureFileError(err instanceof Error ? err.message : 'Failed to load ensure file');
|
||||||
|
setShowEnsureFile(true);
|
||||||
|
} finally {
|
||||||
|
setEnsureFileLoading(false);
|
||||||
|
}
|
||||||
|
}, [projectName, packageName]);
|
||||||
|
|
||||||
|
// Fetch ensure file for selected tag
|
||||||
|
const fetchEnsureFile = useCallback(async () => {
|
||||||
|
if (!selectedTag) return;
|
||||||
|
fetchEnsureFileForTag(selectedTag.name);
|
||||||
|
}, [selectedTag, fetchEnsureFileForTag]);
|
||||||
|
|
||||||
// Keyboard navigation - go back with backspace
|
// Keyboard navigation - go back with backspace
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const handleKeyDown = (e: KeyboardEvent) => {
|
const handleKeyDown = (e: KeyboardEvent) => {
|
||||||
@@ -122,37 +257,54 @@ function PackagePage() {
|
|||||||
return () => window.removeEventListener('keydown', handleKeyDown);
|
return () => window.removeEventListener('keydown', handleKeyDown);
|
||||||
}, [navigate, projectName]);
|
}, [navigate, projectName]);
|
||||||
|
|
||||||
async function handleUpload(e: React.FormEvent) {
|
const handleUploadComplete = useCallback((results: UploadResult[]) => {
|
||||||
|
const count = results.length;
|
||||||
|
const message = count === 1
|
||||||
|
? `Uploaded successfully! Artifact ID: ${results[0].artifact_id}`
|
||||||
|
: `${count} files uploaded successfully!`;
|
||||||
|
setUploadSuccess(message);
|
||||||
|
setUploadTag('');
|
||||||
|
loadData();
|
||||||
|
|
||||||
|
// Auto-dismiss success message after 5 seconds
|
||||||
|
setTimeout(() => setUploadSuccess(null), 5000);
|
||||||
|
}, [loadData]);
|
||||||
|
|
||||||
|
const handleUploadError = useCallback((errorMsg: string) => {
|
||||||
|
setError(errorMsg);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleCreateTag = async (e: React.FormEvent) => {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
const file = fileInputRef.current?.files?.[0];
|
if (!createTagName.trim() || createTagArtifactId.length !== 64) return;
|
||||||
if (!file) {
|
|
||||||
setError('Please select a file');
|
setCreateTagLoading(true);
|
||||||
return;
|
setError(null);
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
setUploading(true);
|
await createTag(projectName!, packageName!, {
|
||||||
setError(null);
|
name: createTagName.trim(),
|
||||||
const result = await uploadArtifact(projectName!, packageName!, file, tag || undefined);
|
artifact_id: createTagArtifactId,
|
||||||
setUploadResult(`Uploaded successfully! Artifact ID: ${result.artifact_id}`);
|
});
|
||||||
setTag('');
|
setUploadSuccess(`Tag "${createTagName}" created successfully!`);
|
||||||
if (fileInputRef.current) {
|
setCreateTagName('');
|
||||||
fileInputRef.current.value = '';
|
setCreateTagArtifactId('');
|
||||||
}
|
|
||||||
loadData();
|
loadData();
|
||||||
|
setTimeout(() => setUploadSuccess(null), 5000);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
setError(err instanceof Error ? err.message : 'Upload failed');
|
setError(err instanceof Error ? err.message : 'Failed to create tag');
|
||||||
} finally {
|
} finally {
|
||||||
setUploading(false);
|
setCreateTagLoading(false);
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
const handleSearchChange = (value: string) => {
|
const handleSearchChange = (value: string) => {
|
||||||
updateParams({ search: value, page: '1' });
|
updateParams({ search: value, page: '1' });
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleSortChange = (newSort: string, newOrder: 'asc' | 'desc') => {
|
const handleSortChange = (columnKey: string) => {
|
||||||
updateParams({ sort: newSort, order: newOrder, page: '1' });
|
const newOrder = columnKey === sort ? (order === 'asc' ? 'desc' : 'asc') : 'asc';
|
||||||
|
updateParams({ sort: columnKey, order: newOrder, page: '1' });
|
||||||
};
|
};
|
||||||
|
|
||||||
const handlePageChange = (newPage: number) => {
|
const handlePageChange = (newPage: number) => {
|
||||||
@@ -167,12 +319,31 @@ function PackagePage() {
|
|||||||
const tags = tagsData?.items || [];
|
const tags = tagsData?.items || [];
|
||||||
const pagination = tagsData?.pagination;
|
const pagination = tagsData?.pagination;
|
||||||
|
|
||||||
|
const handleTagSelect = (tag: TagDetail) => {
|
||||||
|
setSelectedTag(tag);
|
||||||
|
};
|
||||||
|
|
||||||
const columns = [
|
const columns = [
|
||||||
{
|
{
|
||||||
key: 'name',
|
key: 'name',
|
||||||
header: 'Tag',
|
header: 'Tag',
|
||||||
sortable: true,
|
sortable: true,
|
||||||
render: (t: TagDetail) => <strong>{t.name}</strong>,
|
render: (t: TagDetail) => (
|
||||||
|
<strong
|
||||||
|
className={`tag-name-link ${selectedTag?.id === t.id ? 'selected' : ''}`}
|
||||||
|
onClick={() => handleTagSelect(t)}
|
||||||
|
style={{ cursor: 'pointer' }}
|
||||||
|
>
|
||||||
|
{t.name}
|
||||||
|
</strong>
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'version',
|
||||||
|
header: 'Version',
|
||||||
|
render: (t: TagDetail) => (
|
||||||
|
<span className="version-badge">{t.version || '-'}</span>
|
||||||
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'artifact_id',
|
key: 'artifact_id',
|
||||||
@@ -185,19 +356,19 @@ function PackagePage() {
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'size',
|
key: 'artifact_size',
|
||||||
header: 'Size',
|
header: 'Size',
|
||||||
render: (t: TagDetail) => <span>{formatBytes(t.artifact_size)}</span>,
|
render: (t: TagDetail) => <span>{formatBytes(t.artifact_size)}</span>,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'content_type',
|
key: 'artifact_content_type',
|
||||||
header: 'Type',
|
header: 'Type',
|
||||||
render: (t: TagDetail) => (
|
render: (t: TagDetail) => (
|
||||||
<span className="content-type">{t.artifact_content_type || '-'}</span>
|
<span className="content-type">{t.artifact_content_type || '-'}</span>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'original_name',
|
key: 'artifact_original_name',
|
||||||
header: 'Filename',
|
header: 'Filename',
|
||||||
className: 'cell-truncate',
|
className: 'cell-truncate',
|
||||||
render: (t: TagDetail) => (
|
render: (t: TagDetail) => (
|
||||||
@@ -219,13 +390,22 @@ function PackagePage() {
|
|||||||
key: 'actions',
|
key: 'actions',
|
||||||
header: 'Actions',
|
header: 'Actions',
|
||||||
render: (t: TagDetail) => (
|
render: (t: TagDetail) => (
|
||||||
<a
|
<div className="action-buttons">
|
||||||
href={getDownloadUrl(projectName!, packageName!, t.name)}
|
<button
|
||||||
className="btn btn-secondary btn-small"
|
className="btn btn-secondary btn-small"
|
||||||
download
|
onClick={() => fetchEnsureFileForTag(t.name)}
|
||||||
>
|
title="View orchard.ensure file"
|
||||||
Download
|
>
|
||||||
</a>
|
Ensure
|
||||||
|
</button>
|
||||||
|
<a
|
||||||
|
href={getDownloadUrl(projectName!, packageName!, t.name)}
|
||||||
|
className="btn btn-secondary btn-small"
|
||||||
|
download
|
||||||
|
>
|
||||||
|
Download
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
@@ -234,6 +414,28 @@ function PackagePage() {
|
|||||||
return <div className="loading">Loading...</div>;
|
return <div className="loading">Loading...</div>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (accessDenied) {
|
||||||
|
return (
|
||||||
|
<div className="home">
|
||||||
|
<Breadcrumb
|
||||||
|
items={[
|
||||||
|
{ label: 'Projects', href: '/' },
|
||||||
|
{ label: projectName!, href: `/project/${projectName}` },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
<div className="error-message" style={{ textAlign: 'center', padding: '48px 24px' }}>
|
||||||
|
<h2>Access Denied</h2>
|
||||||
|
<p>You do not have permission to view this package.</p>
|
||||||
|
{!user && (
|
||||||
|
<p style={{ marginTop: '16px' }}>
|
||||||
|
<a href="/login" className="btn btn-primary">Sign in</a>
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="home">
|
<div className="home">
|
||||||
<Breadcrumb
|
<Breadcrumb
|
||||||
@@ -292,30 +494,43 @@ function PackagePage() {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
{error && <div className="error-message">{error}</div>}
|
{error && <div className="error-message">{error}</div>}
|
||||||
{uploadResult && <div className="success-message">{uploadResult}</div>}
|
{uploadSuccess && <div className="success-message">{uploadSuccess}</div>}
|
||||||
|
|
||||||
<div className="upload-section card">
|
{user && (
|
||||||
<h3>Upload Artifact</h3>
|
<div className="upload-section card">
|
||||||
<form onSubmit={handleUpload} className="upload-form">
|
<h3>Upload Artifact</h3>
|
||||||
<div className="form-group">
|
{canWrite ? (
|
||||||
<label htmlFor="file">File</label>
|
<div className="upload-form">
|
||||||
<input id="file" type="file" ref={fileInputRef} required />
|
<div className="form-group">
|
||||||
</div>
|
<label htmlFor="upload-tag">Tag (optional)</label>
|
||||||
<div className="form-group">
|
<input
|
||||||
<label htmlFor="tag">Tag (optional)</label>
|
id="upload-tag"
|
||||||
<input
|
type="text"
|
||||||
id="tag"
|
value={uploadTag}
|
||||||
type="text"
|
onChange={(e) => setUploadTag(e.target.value)}
|
||||||
value={tag}
|
placeholder="v1.0.0, latest, stable..."
|
||||||
onChange={(e) => setTag(e.target.value)}
|
/>
|
||||||
placeholder="v1.0.0, latest, stable..."
|
</div>
|
||||||
|
<DragDropUpload
|
||||||
|
projectName={projectName!}
|
||||||
|
packageName={packageName!}
|
||||||
|
tag={uploadTag || undefined}
|
||||||
|
onUploadComplete={handleUploadComplete}
|
||||||
|
onUploadError={handleUploadError}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<DragDropUpload
|
||||||
|
projectName={projectName!}
|
||||||
|
packageName={packageName!}
|
||||||
|
disabled={true}
|
||||||
|
disabledReason="You have read-only access to this project and cannot upload artifacts."
|
||||||
|
onUploadComplete={handleUploadComplete}
|
||||||
|
onUploadError={handleUploadError}
|
||||||
/>
|
/>
|
||||||
</div>
|
)}
|
||||||
<button type="submit" className="btn btn-primary" disabled={uploading}>
|
</div>
|
||||||
{uploading ? 'Uploading...' : 'Upload'}
|
)}
|
||||||
</button>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="section-header">
|
<div className="section-header">
|
||||||
<h2>Tags / Versions</h2>
|
<h2>Tags / Versions</h2>
|
||||||
@@ -328,7 +543,6 @@ function PackagePage() {
|
|||||||
placeholder="Filter tags..."
|
placeholder="Filter tags..."
|
||||||
className="list-controls__search"
|
className="list-controls__search"
|
||||||
/>
|
/>
|
||||||
<SortDropdown options={SORT_OPTIONS} value={sort} order={order} onChange={handleSortChange} />
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{hasActiveFilters && (
|
{hasActiveFilters && (
|
||||||
@@ -337,25 +551,21 @@ function PackagePage() {
|
|||||||
</FilterChipGroup>
|
</FilterChipGroup>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<DataTable
|
<div className="data-table--responsive">
|
||||||
data={tags}
|
<DataTable
|
||||||
columns={columns}
|
data={tags}
|
||||||
keyExtractor={(t) => t.id}
|
columns={columns}
|
||||||
emptyMessage={
|
keyExtractor={(t) => t.id}
|
||||||
hasActiveFilters
|
emptyMessage={
|
||||||
? 'No tags match your filters. Try adjusting your search.'
|
hasActiveFilters
|
||||||
: 'No tags yet. Upload an artifact with a tag to create one!'
|
? 'No tags match your filters. Try adjusting your search.'
|
||||||
}
|
: 'No tags yet. Upload an artifact with a tag to create one!'
|
||||||
onSort={(key) => {
|
|
||||||
if (key === sort) {
|
|
||||||
handleSortChange(key, order === 'asc' ? 'desc' : 'asc');
|
|
||||||
} else {
|
|
||||||
handleSortChange(key, 'asc');
|
|
||||||
}
|
}
|
||||||
}}
|
onSort={handleSortChange}
|
||||||
sortKey={sort}
|
sortKey={sort}
|
||||||
sortOrder={order}
|
sortOrder={order}
|
||||||
/>
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
{pagination && pagination.total_pages > 1 && (
|
{pagination && pagination.total_pages > 1 && (
|
||||||
<Pagination
|
<Pagination
|
||||||
@@ -367,6 +577,238 @@ function PackagePage() {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Dependencies Section */}
|
||||||
|
{tags.length > 0 && (
|
||||||
|
<div className="dependencies-section card">
|
||||||
|
<div className="dependencies-header">
|
||||||
|
<h3>Dependencies</h3>
|
||||||
|
<div className="dependencies-controls">
|
||||||
|
{selectedTag && (
|
||||||
|
<>
|
||||||
|
<button
|
||||||
|
className="btn btn-secondary btn-small"
|
||||||
|
onClick={fetchEnsureFile}
|
||||||
|
disabled={ensureFileLoading}
|
||||||
|
title="View orchard.ensure file"
|
||||||
|
>
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" style={{ marginRight: '6px' }}>
|
||||||
|
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path>
|
||||||
|
<polyline points="14 2 14 8 20 8"></polyline>
|
||||||
|
<line x1="16" y1="13" x2="8" y2="13"></line>
|
||||||
|
<line x1="16" y1="17" x2="8" y2="17"></line>
|
||||||
|
<polyline points="10 9 9 9 8 9"></polyline>
|
||||||
|
</svg>
|
||||||
|
{ensureFileLoading ? 'Loading...' : 'View Ensure File'}
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="btn btn-secondary btn-small"
|
||||||
|
onClick={() => setShowGraph(true)}
|
||||||
|
title="View full dependency tree"
|
||||||
|
>
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" style={{ marginRight: '6px' }}>
|
||||||
|
<circle cx="12" cy="12" r="3"></circle>
|
||||||
|
<circle cx="4" cy="4" r="2"></circle>
|
||||||
|
<circle cx="20" cy="4" r="2"></circle>
|
||||||
|
<circle cx="4" cy="20" r="2"></circle>
|
||||||
|
<circle cx="20" cy="20" r="2"></circle>
|
||||||
|
<line x1="9.5" y1="9.5" x2="5.5" y2="5.5"></line>
|
||||||
|
<line x1="14.5" y1="9.5" x2="18.5" y2="5.5"></line>
|
||||||
|
<line x1="9.5" y1="14.5" x2="5.5" y2="18.5"></line>
|
||||||
|
<line x1="14.5" y1="14.5" x2="18.5" y2="18.5"></line>
|
||||||
|
</svg>
|
||||||
|
View Graph
|
||||||
|
</button>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="dependencies-tag-select">
|
||||||
|
{selectedTag && (
|
||||||
|
<select
|
||||||
|
className="tag-selector"
|
||||||
|
value={selectedTag.id}
|
||||||
|
onChange={(e) => {
|
||||||
|
const tag = tags.find(t => t.id === e.target.value);
|
||||||
|
if (tag) setSelectedTag(tag);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{tags.map(t => (
|
||||||
|
<option key={t.id} value={t.id}>
|
||||||
|
{t.name}{t.version ? ` (${t.version})` : ''}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{depsLoading ? (
|
||||||
|
<div className="deps-loading">Loading dependencies...</div>
|
||||||
|
) : depsError ? (
|
||||||
|
<div className="deps-error">{depsError}</div>
|
||||||
|
) : dependencies.length === 0 ? (
|
||||||
|
<div className="deps-empty">
|
||||||
|
{selectedTag ? (
|
||||||
|
<span><strong>{selectedTag.name}</strong> has no dependencies</span>
|
||||||
|
) : (
|
||||||
|
<span>No dependencies</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="deps-list">
|
||||||
|
<div className="deps-summary">
|
||||||
|
<strong>{selectedTag?.name}</strong> has {dependencies.length} {dependencies.length === 1 ? 'dependency' : 'dependencies'}:
|
||||||
|
</div>
|
||||||
|
<ul className="deps-items">
|
||||||
|
{dependencies.map((dep) => (
|
||||||
|
<li key={dep.id} className="dep-item">
|
||||||
|
<Link
|
||||||
|
to={`/project/${dep.project}/${dep.package}`}
|
||||||
|
className="dep-link"
|
||||||
|
>
|
||||||
|
{dep.project}/{dep.package}
|
||||||
|
</Link>
|
||||||
|
<span className="dep-constraint">
|
||||||
|
@ {dep.version || dep.tag}
|
||||||
|
</span>
|
||||||
|
<span className="dep-status dep-status--ok" title="Package exists">
|
||||||
|
✓
|
||||||
|
</span>
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Used By (Reverse Dependencies) Section */}
|
||||||
|
<div className="used-by-section card">
|
||||||
|
<h3>Used By</h3>
|
||||||
|
|
||||||
|
{reverseDepsLoading ? (
|
||||||
|
<div className="deps-loading">Loading reverse dependencies...</div>
|
||||||
|
) : reverseDepsError ? (
|
||||||
|
<div className="deps-error">{reverseDepsError}</div>
|
||||||
|
) : reverseDeps.length === 0 ? (
|
||||||
|
<div className="deps-empty">No packages depend on this package</div>
|
||||||
|
) : (
|
||||||
|
<div className="reverse-deps-list">
|
||||||
|
<div className="deps-summary">
|
||||||
|
{reverseDepsTotal} {reverseDepsTotal === 1 ? 'package depends' : 'packages depend'} on this:
|
||||||
|
</div>
|
||||||
|
<ul className="deps-items">
|
||||||
|
{reverseDeps.map((dep) => (
|
||||||
|
<li key={dep.artifact_id} className="dep-item reverse-dep-item">
|
||||||
|
<Link
|
||||||
|
to={`/project/${dep.project}/${dep.package}${dep.version ? `?version=${dep.version}` : ''}`}
|
||||||
|
className="dep-link"
|
||||||
|
>
|
||||||
|
{dep.project}/{dep.package}
|
||||||
|
{dep.version && (
|
||||||
|
<span className="dep-version">v{dep.version}</span>
|
||||||
|
)}
|
||||||
|
</Link>
|
||||||
|
<span className="dep-requires">
|
||||||
|
requires @ {dep.constraint_value}
|
||||||
|
</span>
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
{(reverseDepsHasMore || reverseDepsPage > 1) && (
|
||||||
|
<div className="reverse-deps-pagination">
|
||||||
|
<button
|
||||||
|
className="btn btn-secondary btn-small"
|
||||||
|
onClick={() => fetchReverseDeps(reverseDepsPage - 1)}
|
||||||
|
disabled={reverseDepsPage <= 1 || reverseDepsLoading}
|
||||||
|
>
|
||||||
|
Previous
|
||||||
|
</button>
|
||||||
|
<span className="pagination-info">Page {reverseDepsPage}</span>
|
||||||
|
<button
|
||||||
|
className="btn btn-secondary btn-small"
|
||||||
|
onClick={() => fetchReverseDeps(reverseDepsPage + 1)}
|
||||||
|
disabled={!reverseDepsHasMore || reverseDepsLoading}
|
||||||
|
>
|
||||||
|
Next
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="download-by-id-section card">
|
||||||
|
<h3>Download by Artifact ID</h3>
|
||||||
|
<div className="download-by-id-form">
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value={artifactIdInput}
|
||||||
|
onChange={(e) => setArtifactIdInput(e.target.value.toLowerCase().replace(/[^a-f0-9]/g, '').slice(0, 64))}
|
||||||
|
placeholder="Enter SHA256 artifact ID (64 hex characters)"
|
||||||
|
className="artifact-id-input"
|
||||||
|
/>
|
||||||
|
<a
|
||||||
|
href={artifactIdInput.length === 64 ? getDownloadUrl(projectName!, packageName!, `artifact:${artifactIdInput}`) : '#'}
|
||||||
|
className={`btn btn-primary ${artifactIdInput.length !== 64 ? 'btn-disabled' : ''}`}
|
||||||
|
download
|
||||||
|
onClick={(e) => {
|
||||||
|
if (artifactIdInput.length !== 64) {
|
||||||
|
e.preventDefault();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Download
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
{artifactIdInput.length > 0 && artifactIdInput.length !== 64 && (
|
||||||
|
<p className="validation-hint">Artifact ID must be exactly 64 hex characters ({artifactIdInput.length}/64)</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{user && canWrite && (
|
||||||
|
<div className="create-tag-section card">
|
||||||
|
<h3>Create / Update Tag</h3>
|
||||||
|
<p className="section-description">Point a tag at any existing artifact by its ID</p>
|
||||||
|
<form onSubmit={handleCreateTag} className="create-tag-form">
|
||||||
|
<div className="form-row">
|
||||||
|
<div className="form-group">
|
||||||
|
<label htmlFor="create-tag-name">Tag Name</label>
|
||||||
|
<input
|
||||||
|
id="create-tag-name"
|
||||||
|
type="text"
|
||||||
|
value={createTagName}
|
||||||
|
onChange={(e) => setCreateTagName(e.target.value)}
|
||||||
|
placeholder="latest, stable, v1.0.0..."
|
||||||
|
disabled={createTagLoading}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="form-group form-group--wide">
|
||||||
|
<label htmlFor="create-tag-artifact">Artifact ID</label>
|
||||||
|
<input
|
||||||
|
id="create-tag-artifact"
|
||||||
|
type="text"
|
||||||
|
value={createTagArtifactId}
|
||||||
|
onChange={(e) => setCreateTagArtifactId(e.target.value.toLowerCase().replace(/[^a-f0-9]/g, '').slice(0, 64))}
|
||||||
|
placeholder="SHA256 hash (64 hex characters)"
|
||||||
|
className="artifact-id-input"
|
||||||
|
disabled={createTagLoading}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
className="btn btn-primary"
|
||||||
|
disabled={createTagLoading || !createTagName.trim() || createTagArtifactId.length !== 64}
|
||||||
|
>
|
||||||
|
{createTagLoading ? 'Creating...' : 'Create Tag'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{createTagArtifactId.length > 0 && createTagArtifactId.length !== 64 && (
|
||||||
|
<p className="validation-hint">Artifact ID must be exactly 64 hex characters ({createTagArtifactId.length}/64)</p>
|
||||||
|
)}
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
<div className="usage-section card">
|
<div className="usage-section card">
|
||||||
<h3>Usage</h3>
|
<h3>Usage</h3>
|
||||||
<p>Download artifacts using:</p>
|
<p>Download artifacts using:</p>
|
||||||
@@ -378,6 +820,58 @@ function PackagePage() {
|
|||||||
<code>curl -O {window.location.origin}/api/v1/project/{projectName}/{packageName}/+/v1.0.0</code>
|
<code>curl -O {window.location.origin}/api/v1/project/{projectName}/{packageName}/+/v1.0.0</code>
|
||||||
</pre>
|
</pre>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Dependency Graph Modal */}
|
||||||
|
{showGraph && selectedTag && (
|
||||||
|
<DependencyGraph
|
||||||
|
projectName={projectName!}
|
||||||
|
packageName={packageName!}
|
||||||
|
tagName={selectedTag.name}
|
||||||
|
onClose={() => setShowGraph(false)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Ensure File Modal */}
|
||||||
|
{showEnsureFile && (
|
||||||
|
<div className="modal-overlay" onClick={() => setShowEnsureFile(false)}>
|
||||||
|
<div className="ensure-file-modal" onClick={(e) => e.stopPropagation()}>
|
||||||
|
<div className="ensure-file-header">
|
||||||
|
<h3>orchard.ensure for {ensureFileTagName}</h3>
|
||||||
|
<div className="ensure-file-actions">
|
||||||
|
{ensureFileContent && (
|
||||||
|
<CopyButton text={ensureFileContent} />
|
||||||
|
)}
|
||||||
|
<button
|
||||||
|
className="modal-close"
|
||||||
|
onClick={() => setShowEnsureFile(false)}
|
||||||
|
title="Close"
|
||||||
|
>
|
||||||
|
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18"></line>
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18"></line>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="ensure-file-content">
|
||||||
|
{ensureFileLoading ? (
|
||||||
|
<div className="ensure-file-loading">Loading...</div>
|
||||||
|
) : ensureFileError ? (
|
||||||
|
<div className="ensure-file-error">{ensureFileError}</div>
|
||||||
|
) : ensureFileContent ? (
|
||||||
|
<pre className="ensure-file-yaml"><code>{ensureFileContent}</code></pre>
|
||||||
|
) : (
|
||||||
|
<div className="ensure-file-empty">No dependencies defined for this artifact.</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<div className="ensure-file-footer">
|
||||||
|
<p className="ensure-file-hint">
|
||||||
|
Save this as <code>orchard.ensure</code> in your project root to declare dependencies.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,21 +1,16 @@
|
|||||||
import { useState, useEffect, useCallback } from 'react';
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
import { useParams, Link, useSearchParams, useNavigate } from 'react-router-dom';
|
import { useParams, useSearchParams, useNavigate, useLocation } from 'react-router-dom';
|
||||||
import { Project, Package, PaginatedResponse } from '../types';
|
import { Project, Package, PaginatedResponse, AccessLevel } from '../types';
|
||||||
import { getProject, listPackages, createPackage } from '../api';
|
import { getProject, listPackages, createPackage, getMyProjectAccess, UnauthorizedError, ForbiddenError } from '../api';
|
||||||
import { Breadcrumb } from '../components/Breadcrumb';
|
import { Breadcrumb } from '../components/Breadcrumb';
|
||||||
import { Badge } from '../components/Badge';
|
import { Badge } from '../components/Badge';
|
||||||
|
import { DataTable } from '../components/DataTable';
|
||||||
import { SearchInput } from '../components/SearchInput';
|
import { SearchInput } from '../components/SearchInput';
|
||||||
import { SortDropdown, SortOption } from '../components/SortDropdown';
|
|
||||||
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
||||||
import { Pagination } from '../components/Pagination';
|
import { Pagination } from '../components/Pagination';
|
||||||
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
import './Home.css';
|
import './Home.css';
|
||||||
|
|
||||||
const SORT_OPTIONS: SortOption[] = [
|
|
||||||
{ value: 'name', label: 'Name' },
|
|
||||||
{ value: 'created_at', label: 'Created' },
|
|
||||||
{ value: 'updated_at', label: 'Updated' },
|
|
||||||
];
|
|
||||||
|
|
||||||
const FORMAT_OPTIONS = ['generic', 'npm', 'pypi', 'docker', 'deb', 'rpm', 'maven', 'nuget', 'helm'];
|
const FORMAT_OPTIONS = ['generic', 'npm', 'pypi', 'docker', 'deb', 'rpm', 'maven', 'nuget', 'helm'];
|
||||||
|
|
||||||
function formatBytes(bytes: number): string {
|
function formatBytes(bytes: number): string {
|
||||||
@@ -29,15 +24,24 @@ function formatBytes(bytes: number): string {
|
|||||||
function ProjectPage() {
|
function ProjectPage() {
|
||||||
const { projectName } = useParams<{ projectName: string }>();
|
const { projectName } = useParams<{ projectName: string }>();
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
|
const location = useLocation();
|
||||||
const [searchParams, setSearchParams] = useSearchParams();
|
const [searchParams, setSearchParams] = useSearchParams();
|
||||||
|
const { user } = useAuth();
|
||||||
|
|
||||||
const [project, setProject] = useState<Project | null>(null);
|
const [project, setProject] = useState<Project | null>(null);
|
||||||
const [packagesData, setPackagesData] = useState<PaginatedResponse<Package> | null>(null);
|
const [packagesData, setPackagesData] = useState<PaginatedResponse<Package> | null>(null);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const [accessDenied, setAccessDenied] = useState(false);
|
||||||
const [showForm, setShowForm] = useState(false);
|
const [showForm, setShowForm] = useState(false);
|
||||||
const [newPackage, setNewPackage] = useState({ name: '', description: '', format: 'generic', platform: 'any' });
|
const [newPackage, setNewPackage] = useState({ name: '', description: '', format: 'generic', platform: 'any' });
|
||||||
const [creating, setCreating] = useState(false);
|
const [creating, setCreating] = useState(false);
|
||||||
|
const [accessLevel, setAccessLevel] = useState<AccessLevel | null>(null);
|
||||||
|
const [isOwner, setIsOwner] = useState(false);
|
||||||
|
|
||||||
|
// Derived permissions
|
||||||
|
const canWrite = accessLevel === 'write' || accessLevel === 'admin';
|
||||||
|
const canAdmin = accessLevel === 'admin';
|
||||||
|
|
||||||
// Get params from URL
|
// Get params from URL
|
||||||
const page = parseInt(searchParams.get('page') || '1', 10);
|
const page = parseInt(searchParams.get('page') || '1', 10);
|
||||||
@@ -66,19 +70,33 @@ function ProjectPage() {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
const [projectData, packagesResult] = await Promise.all([
|
setAccessDenied(false);
|
||||||
|
const [projectData, packagesResult, accessResult] = await Promise.all([
|
||||||
getProject(projectName),
|
getProject(projectName),
|
||||||
listPackages(projectName, { page, search, sort, order, format: format || undefined }),
|
listPackages(projectName, { page, search, sort, order, format: format || undefined }),
|
||||||
|
getMyProjectAccess(projectName),
|
||||||
]);
|
]);
|
||||||
setProject(projectData);
|
setProject(projectData);
|
||||||
setPackagesData(packagesResult);
|
setPackagesData(packagesResult);
|
||||||
|
setAccessLevel(accessResult.access_level);
|
||||||
|
setIsOwner(accessResult.is_owner);
|
||||||
setError(null);
|
setError(null);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
if (err instanceof UnauthorizedError) {
|
||||||
|
navigate('/login', { state: { from: location.pathname } });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (err instanceof ForbiddenError) {
|
||||||
|
setAccessDenied(true);
|
||||||
|
setError('You do not have access to this project');
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
setError(err instanceof Error ? err.message : 'Failed to load data');
|
setError(err instanceof Error ? err.message : 'Failed to load data');
|
||||||
} finally {
|
} finally {
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
}
|
}
|
||||||
}, [projectName, page, search, sort, order, format]);
|
}, [projectName, page, search, sort, order, format, navigate, location.pathname]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
loadData();
|
loadData();
|
||||||
@@ -115,8 +133,9 @@ function ProjectPage() {
|
|||||||
updateParams({ search: value, page: '1' });
|
updateParams({ search: value, page: '1' });
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleSortChange = (newSort: string, newOrder: 'asc' | 'desc') => {
|
const handleSortChange = (columnKey: string) => {
|
||||||
updateParams({ sort: newSort, order: newOrder, page: '1' });
|
const newOrder = columnKey === sort ? (order === 'asc' ? 'desc' : 'asc') : 'asc';
|
||||||
|
updateParams({ sort: columnKey, order: newOrder, page: '1' });
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleFormatChange = (value: string) => {
|
const handleFormatChange = (value: string) => {
|
||||||
@@ -139,6 +158,23 @@ function ProjectPage() {
|
|||||||
return <div className="loading">Loading...</div>;
|
return <div className="loading">Loading...</div>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (accessDenied) {
|
||||||
|
return (
|
||||||
|
<div className="home">
|
||||||
|
<Breadcrumb items={[{ label: 'Projects', href: '/' }]} />
|
||||||
|
<div className="error-message" style={{ textAlign: 'center', padding: '48px 24px' }}>
|
||||||
|
<h2>Access Denied</h2>
|
||||||
|
<p>You do not have permission to view this project.</p>
|
||||||
|
{!user && (
|
||||||
|
<p style={{ marginTop: '16px' }}>
|
||||||
|
<a href="/login" className="btn btn-primary">Sign in</a>
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (!project) {
|
if (!project) {
|
||||||
return <div className="error-message">Project not found</div>;
|
return <div className="error-message">Project not found</div>;
|
||||||
}
|
}
|
||||||
@@ -159,6 +195,11 @@ function ProjectPage() {
|
|||||||
<Badge variant={project.is_public ? 'public' : 'private'}>
|
<Badge variant={project.is_public ? 'public' : 'private'}>
|
||||||
{project.is_public ? 'Public' : 'Private'}
|
{project.is_public ? 'Public' : 'Private'}
|
||||||
</Badge>
|
</Badge>
|
||||||
|
{accessLevel && (
|
||||||
|
<Badge variant={accessLevel === 'admin' ? 'success' : accessLevel === 'write' ? 'info' : 'default'}>
|
||||||
|
{isOwner ? 'Owner' : accessLevel.charAt(0).toUpperCase() + accessLevel.slice(1)}
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
{project.description && <p className="description">{project.description}</p>}
|
{project.description && <p className="description">{project.description}</p>}
|
||||||
<div className="page-header__meta">
|
<div className="page-header__meta">
|
||||||
@@ -169,14 +210,35 @@ function ProjectPage() {
|
|||||||
<span className="meta-item">by {project.created_by}</span>
|
<span className="meta-item">by {project.created_by}</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
<div className="page-header__actions">
|
||||||
{showForm ? 'Cancel' : '+ New Package'}
|
{canAdmin && (
|
||||||
</button>
|
<button
|
||||||
|
className="btn btn-secondary"
|
||||||
|
onClick={() => navigate(`/project/${projectName}/settings`)}
|
||||||
|
title="Project Settings"
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round">
|
||||||
|
<circle cx="12" cy="12" r="3" />
|
||||||
|
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z" />
|
||||||
|
</svg>
|
||||||
|
Settings
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
{canWrite ? (
|
||||||
|
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
||||||
|
{showForm ? 'Cancel' : '+ New Package'}
|
||||||
|
</button>
|
||||||
|
) : user ? (
|
||||||
|
<span className="text-muted" title="You have read-only access to this project">
|
||||||
|
Read-only access
|
||||||
|
</span>
|
||||||
|
) : null}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{error && <div className="error-message">{error}</div>}
|
{error && <div className="error-message">{error}</div>}
|
||||||
|
|
||||||
{showForm && (
|
{showForm && canWrite && (
|
||||||
<form className="form card" onSubmit={handleCreatePackage}>
|
<form className="form card" onSubmit={handleCreatePackage}>
|
||||||
<h3>Create New Package</h3>
|
<h3>Create New Package</h3>
|
||||||
<div className="form-row">
|
<div className="form-row">
|
||||||
@@ -241,7 +303,6 @@ function ProjectPage() {
|
|||||||
</option>
|
</option>
|
||||||
))}
|
))}
|
||||||
</select>
|
</select>
|
||||||
<SortDropdown options={SORT_OPTIONS} value={sort} order={order} onChange={handleSortChange} />
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{hasActiveFilters && (
|
{hasActiveFilters && (
|
||||||
@@ -251,70 +312,78 @@ function ProjectPage() {
|
|||||||
</FilterChipGroup>
|
</FilterChipGroup>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{packages.length === 0 ? (
|
<div className="data-table--responsive">
|
||||||
<div className="empty-state">
|
<DataTable
|
||||||
{hasActiveFilters ? (
|
data={packages}
|
||||||
<p>No packages match your filters. Try adjusting your search.</p>
|
keyExtractor={(pkg) => pkg.id}
|
||||||
) : (
|
onRowClick={(pkg) => navigate(`/project/${projectName}/${pkg.name}`)}
|
||||||
<p>No packages yet. Create your first package to start uploading artifacts!</p>
|
onSort={handleSortChange}
|
||||||
)}
|
sortKey={sort}
|
||||||
</div>
|
sortOrder={order}
|
||||||
) : (
|
emptyMessage={
|
||||||
<>
|
hasActiveFilters
|
||||||
<div className="project-grid">
|
? 'No packages match your filters. Try adjusting your search.'
|
||||||
{packages.map((pkg) => (
|
: 'No packages yet. Create your first package to start uploading artifacts!'
|
||||||
<Link to={`/project/${projectName}/${pkg.name}`} key={pkg.id} className="project-card card">
|
}
|
||||||
<div className="package-card__header">
|
columns={[
|
||||||
<h3>{pkg.name}</h3>
|
{
|
||||||
<Badge variant="default">{pkg.format}</Badge>
|
key: 'name',
|
||||||
</div>
|
header: 'Name',
|
||||||
{pkg.description && <p>{pkg.description}</p>}
|
sortable: true,
|
||||||
|
render: (pkg) => <span className="cell-name">{pkg.name}</span>,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'description',
|
||||||
|
header: 'Description',
|
||||||
|
className: 'cell-description',
|
||||||
|
render: (pkg) => pkg.description || '—',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'format',
|
||||||
|
header: 'Format',
|
||||||
|
render: (pkg) => <Badge variant="default">{pkg.format}</Badge>,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'tag_count',
|
||||||
|
header: 'Tags',
|
||||||
|
render: (pkg) => pkg.tag_count ?? '—',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'artifact_count',
|
||||||
|
header: 'Artifacts',
|
||||||
|
render: (pkg) => pkg.artifact_count ?? '—',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'total_size',
|
||||||
|
header: 'Size',
|
||||||
|
render: (pkg) =>
|
||||||
|
pkg.total_size !== undefined && pkg.total_size > 0 ? formatBytes(pkg.total_size) : '—',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'latest_tag',
|
||||||
|
header: 'Latest',
|
||||||
|
render: (pkg) =>
|
||||||
|
pkg.latest_tag ? <strong style={{ color: 'var(--accent-primary)' }}>{pkg.latest_tag}</strong> : '—',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'created_at',
|
||||||
|
header: 'Created',
|
||||||
|
sortable: true,
|
||||||
|
className: 'cell-date',
|
||||||
|
render: (pkg) => new Date(pkg.created_at).toLocaleDateString(),
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
{(pkg.tag_count !== undefined || pkg.artifact_count !== undefined) && (
|
{pagination && pagination.total_pages > 1 && (
|
||||||
<div className="package-stats">
|
<Pagination
|
||||||
{pkg.tag_count !== undefined && (
|
page={pagination.page}
|
||||||
<div className="package-stats__item">
|
totalPages={pagination.total_pages}
|
||||||
<span className="package-stats__value">{pkg.tag_count}</span>
|
total={pagination.total}
|
||||||
<span className="package-stats__label">Tags</span>
|
limit={pagination.limit}
|
||||||
</div>
|
onPageChange={handlePageChange}
|
||||||
)}
|
/>
|
||||||
{pkg.artifact_count !== undefined && (
|
|
||||||
<div className="package-stats__item">
|
|
||||||
<span className="package-stats__value">{pkg.artifact_count}</span>
|
|
||||||
<span className="package-stats__label">Artifacts</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{pkg.total_size !== undefined && pkg.total_size > 0 && (
|
|
||||||
<div className="package-stats__item">
|
|
||||||
<span className="package-stats__value">{formatBytes(pkg.total_size)}</span>
|
|
||||||
<span className="package-stats__label">Size</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="project-meta">
|
|
||||||
{pkg.latest_tag && (
|
|
||||||
<span className="latest-tag">
|
|
||||||
Latest: <strong>{pkg.latest_tag}</strong>
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
<span className="date">Created {new Date(pkg.created_at).toLocaleDateString()}</span>
|
|
||||||
</div>
|
|
||||||
</Link>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{pagination && pagination.total_pages > 1 && (
|
|
||||||
<Pagination
|
|
||||||
page={pagination.page}
|
|
||||||
totalPages={pagination.total_pages}
|
|
||||||
total={pagination.total}
|
|
||||||
limit={pagination.limit}
|
|
||||||
onPageChange={handlePageChange}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</>
|
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
476
frontend/src/pages/ProjectSettingsPage.css
Normal file
476
frontend/src/pages/ProjectSettingsPage.css
Normal file
@@ -0,0 +1,476 @@
|
|||||||
|
.project-settings-page {
|
||||||
|
max-width: 900px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-header {
|
||||||
|
margin-bottom: 32px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-header h1 {
|
||||||
|
font-size: 1.75rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 8px;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-subtitle {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-loading {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 12px;
|
||||||
|
padding: 64px 24px;
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-spinner {
|
||||||
|
width: 20px;
|
||||||
|
height: 20px;
|
||||||
|
border: 2px solid var(--border-secondary);
|
||||||
|
border-top-color: var(--accent-primary);
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: project-settings-spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes project-settings-spin {
|
||||||
|
to {
|
||||||
|
transform: rotate(360deg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-error {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
background: var(--error-bg);
|
||||||
|
border: 1px solid rgba(239, 68, 68, 0.2);
|
||||||
|
color: var(--error);
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
margin-bottom: 24px;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-success {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
background: var(--success-bg);
|
||||||
|
border: 1px solid rgba(34, 197, 94, 0.2);
|
||||||
|
color: var(--success);
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
margin-bottom: 24px;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
animation: project-settings-fade-in 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes project-settings-fade-in {
|
||||||
|
from {
|
||||||
|
opacity: 0;
|
||||||
|
transform: translateY(-8px);
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-section {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
padding: 24px;
|
||||||
|
margin-bottom: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-section h2 {
|
||||||
|
font-size: 1.125rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 20px;
|
||||||
|
padding-bottom: 16px;
|
||||||
|
border-bottom: 1px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-form {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-form-group {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-form-group label {
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-form-group textarea,
|
||||||
|
.project-settings-form-group input[type="text"] {
|
||||||
|
padding: 12px 14px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
font-family: inherit;
|
||||||
|
resize: vertical;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-form-group textarea {
|
||||||
|
min-height: 100px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-form-group textarea::placeholder,
|
||||||
|
.project-settings-form-group input::placeholder {
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-form-group textarea:hover:not(:disabled),
|
||||||
|
.project-settings-form-group input:hover:not(:disabled) {
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
background: var(--bg-elevated);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-form-group textarea:focus,
|
||||||
|
.project-settings-form-group input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||||
|
background: var(--bg-elevated);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-form-group textarea:disabled,
|
||||||
|
.project-settings-form-group input:disabled {
|
||||||
|
opacity: 0.6;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-checkbox-group {
|
||||||
|
flex-direction: row;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-checkbox-label {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 400;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-checkbox-label input[type="checkbox"] {
|
||||||
|
position: absolute;
|
||||||
|
opacity: 0;
|
||||||
|
width: 0;
|
||||||
|
height: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-checkbox-custom {
|
||||||
|
width: 18px;
|
||||||
|
height: 18px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-secondary);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
position: relative;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-checkbox-label input[type="checkbox"]:checked + .project-settings-checkbox-custom {
|
||||||
|
background: var(--accent-primary);
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-checkbox-label input[type="checkbox"]:checked + .project-settings-checkbox-custom::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: 5px;
|
||||||
|
top: 2px;
|
||||||
|
width: 5px;
|
||||||
|
height: 9px;
|
||||||
|
border: solid white;
|
||||||
|
border-width: 0 2px 2px 0;
|
||||||
|
transform: rotate(45deg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-checkbox-label input[type="checkbox"]:focus + .project-settings-checkbox-custom {
|
||||||
|
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-checkbox-label:hover .project-settings-checkbox-custom {
|
||||||
|
border-color: var(--accent-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-form-actions {
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
|
gap: 12px;
|
||||||
|
margin-top: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-save-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 8px;
|
||||||
|
padding: 10px 18px;
|
||||||
|
background: var(--accent-gradient);
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: white;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
min-width: 120px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-save-button:hover:not(:disabled) {
|
||||||
|
transform: translateY(-1px);
|
||||||
|
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-save-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
transform: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-button-spinner {
|
||||||
|
width: 14px;
|
||||||
|
height: 14px;
|
||||||
|
border: 2px solid rgba(255, 255, 255, 0.3);
|
||||||
|
border-top-color: white;
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: project-settings-spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Danger Zone */
|
||||||
|
.project-settings-danger-zone {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid rgba(239, 68, 68, 0.3);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
padding: 24px;
|
||||||
|
margin-bottom: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-danger-zone h2 {
|
||||||
|
font-size: 1.125rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--error);
|
||||||
|
margin-bottom: 20px;
|
||||||
|
padding-bottom: 16px;
|
||||||
|
border-bottom: 1px solid rgba(239, 68, 68, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-danger-item {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: flex-start;
|
||||||
|
gap: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-danger-info h3 {
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-danger-info p {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
max-width: 400px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-button {
|
||||||
|
padding: 10px 18px;
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid rgba(239, 68, 68, 0.3);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--error);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-button:hover:not(:disabled) {
|
||||||
|
background: var(--error-bg);
|
||||||
|
border-color: rgba(239, 68, 68, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Delete Confirmation */
|
||||||
|
.project-settings-delete-confirm {
|
||||||
|
margin-top: 20px;
|
||||||
|
padding-top: 20px;
|
||||||
|
border-top: 1px solid rgba(239, 68, 68, 0.2);
|
||||||
|
animation: project-settings-fade-in 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-confirm p {
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-confirm strong {
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-family: 'JetBrains Mono', 'Fira Code', 'SF Mono', Monaco, monospace;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
padding: 2px 6px;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-confirm-input {
|
||||||
|
width: 100%;
|
||||||
|
padding: 12px 14px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-confirm-input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--error);
|
||||||
|
box-shadow: 0 0 0 3px rgba(239, 68, 68, 0.15);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-confirm-input::placeholder {
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-confirm-actions {
|
||||||
|
display: flex;
|
||||||
|
gap: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-confirm-delete-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 8px;
|
||||||
|
padding: 10px 18px;
|
||||||
|
background: var(--error);
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: white;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
min-width: 120px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-confirm-delete-button:hover:not(:disabled) {
|
||||||
|
opacity: 0.9;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-confirm-delete-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-cancel-button {
|
||||||
|
padding: 10px 18px;
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-cancel-button:hover:not(:disabled) {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
border-color: var(--border-secondary);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-cancel-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-spinner {
|
||||||
|
width: 14px;
|
||||||
|
height: 14px;
|
||||||
|
border: 2px solid rgba(255, 255, 255, 0.3);
|
||||||
|
border-top-color: white;
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: project-settings-spin 0.6s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Access denied */
|
||||||
|
.project-settings-access-denied {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
padding: 80px 24px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-access-denied h2 {
|
||||||
|
font-size: 1.5rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-primary);
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-access-denied p {
|
||||||
|
color: var(--text-tertiary);
|
||||||
|
font-size: 0.9375rem;
|
||||||
|
max-width: 400px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.project-settings-danger-item {
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-button {
|
||||||
|
align-self: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-delete-confirm-actions {
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.project-settings-confirm-delete-button,
|
||||||
|
.project-settings-cancel-button {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
}
|
||||||
308
frontend/src/pages/ProjectSettingsPage.tsx
Normal file
308
frontend/src/pages/ProjectSettingsPage.tsx
Normal file
@@ -0,0 +1,308 @@
|
|||||||
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
|
import { useParams, useNavigate } from 'react-router-dom';
|
||||||
|
import { Project } from '../types';
|
||||||
|
import {
|
||||||
|
getProject,
|
||||||
|
updateProject,
|
||||||
|
deleteProject,
|
||||||
|
getMyProjectAccess,
|
||||||
|
UnauthorizedError,
|
||||||
|
ForbiddenError,
|
||||||
|
} from '../api';
|
||||||
|
import { Breadcrumb } from '../components/Breadcrumb';
|
||||||
|
import { AccessManagement } from '../components/AccessManagement';
|
||||||
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
|
import './ProjectSettingsPage.css';
|
||||||
|
|
||||||
|
function ProjectSettingsPage() {
|
||||||
|
const { projectName } = useParams<{ projectName: string }>();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
const { user } = useAuth();
|
||||||
|
|
||||||
|
const [project, setProject] = useState<Project | null>(null);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const [success, setSuccess] = useState<string | null>(null);
|
||||||
|
const [accessDenied, setAccessDenied] = useState(false);
|
||||||
|
const [canAdmin, setCanAdmin] = useState(false);
|
||||||
|
|
||||||
|
// General settings form state
|
||||||
|
const [description, setDescription] = useState('');
|
||||||
|
const [isPublic, setIsPublic] = useState(false);
|
||||||
|
const [saving, setSaving] = useState(false);
|
||||||
|
|
||||||
|
// Delete confirmation state
|
||||||
|
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
|
||||||
|
const [deleteConfirmText, setDeleteConfirmText] = useState('');
|
||||||
|
const [deleting, setDeleting] = useState(false);
|
||||||
|
|
||||||
|
const loadData = useCallback(async () => {
|
||||||
|
if (!projectName) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
setLoading(true);
|
||||||
|
setAccessDenied(false);
|
||||||
|
const [projectData, accessResult] = await Promise.all([
|
||||||
|
getProject(projectName),
|
||||||
|
getMyProjectAccess(projectName),
|
||||||
|
]);
|
||||||
|
setProject(projectData);
|
||||||
|
setDescription(projectData.description || '');
|
||||||
|
setIsPublic(projectData.is_public);
|
||||||
|
|
||||||
|
const hasAdminAccess = accessResult.access_level === 'admin';
|
||||||
|
setCanAdmin(hasAdminAccess);
|
||||||
|
|
||||||
|
if (!hasAdminAccess) {
|
||||||
|
setAccessDenied(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
setError(null);
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof UnauthorizedError) {
|
||||||
|
navigate('/login', { state: { from: `/project/${projectName}/settings` } });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (err instanceof ForbiddenError) {
|
||||||
|
setAccessDenied(true);
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to load project');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [projectName, navigate]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
loadData();
|
||||||
|
}, [loadData]);
|
||||||
|
|
||||||
|
const handleSaveSettings = async (e: React.FormEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
if (!projectName) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
setSaving(true);
|
||||||
|
setError(null);
|
||||||
|
const updatedProject = await updateProject(projectName, {
|
||||||
|
description: description || undefined,
|
||||||
|
is_public: isPublic,
|
||||||
|
});
|
||||||
|
setProject(updatedProject);
|
||||||
|
setSuccess('Settings saved successfully');
|
||||||
|
setTimeout(() => setSuccess(null), 3000);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to save settings');
|
||||||
|
} finally {
|
||||||
|
setSaving(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleDeleteProject = async () => {
|
||||||
|
if (!projectName || deleteConfirmText !== projectName) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
setDeleting(true);
|
||||||
|
setError(null);
|
||||||
|
await deleteProject(projectName);
|
||||||
|
navigate('/');
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to delete project');
|
||||||
|
setDeleting(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleCancelDelete = () => {
|
||||||
|
setShowDeleteConfirm(false);
|
||||||
|
setDeleteConfirmText('');
|
||||||
|
};
|
||||||
|
|
||||||
|
if (loading) {
|
||||||
|
return (
|
||||||
|
<div className="project-settings-page">
|
||||||
|
<Breadcrumb
|
||||||
|
items={[
|
||||||
|
{ label: 'Projects', href: '/' },
|
||||||
|
{ label: projectName || '', href: `/project/${projectName}` },
|
||||||
|
{ label: 'Settings' },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
<div className="project-settings-loading">
|
||||||
|
<div className="project-settings-spinner" />
|
||||||
|
<span>Loading...</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (accessDenied || !canAdmin) {
|
||||||
|
return (
|
||||||
|
<div className="project-settings-page">
|
||||||
|
<Breadcrumb
|
||||||
|
items={[
|
||||||
|
{ label: 'Projects', href: '/' },
|
||||||
|
{ label: projectName || '', href: `/project/${projectName}` },
|
||||||
|
{ label: 'Settings' },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
<div className="project-settings-access-denied">
|
||||||
|
<h2>Access Denied</h2>
|
||||||
|
<p>You must be a project admin to access settings.</p>
|
||||||
|
{!user && (
|
||||||
|
<p style={{ marginTop: '16px' }}>
|
||||||
|
<a href="/login" className="btn btn-primary">
|
||||||
|
Sign in
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!project) {
|
||||||
|
return (
|
||||||
|
<div className="project-settings-page">
|
||||||
|
<Breadcrumb
|
||||||
|
items={[
|
||||||
|
{ label: 'Projects', href: '/' },
|
||||||
|
{ label: projectName || '' },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
<div className="project-settings-error">Project not found</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="project-settings-page">
|
||||||
|
<Breadcrumb
|
||||||
|
items={[
|
||||||
|
{ label: 'Projects', href: '/' },
|
||||||
|
{ label: project.name, href: `/project/${project.name}` },
|
||||||
|
{ label: 'Settings' },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<div className="project-settings-header">
|
||||||
|
<h1>Project Settings</h1>
|
||||||
|
<p className="project-settings-subtitle">Manage settings for {project.name}</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{error && <div className="project-settings-error">{error}</div>}
|
||||||
|
{success && <div className="project-settings-success">{success}</div>}
|
||||||
|
|
||||||
|
{/* General Settings Section */}
|
||||||
|
<div className="project-settings-section">
|
||||||
|
<h2>General</h2>
|
||||||
|
<form className="project-settings-form" onSubmit={handleSaveSettings}>
|
||||||
|
<div className="project-settings-form-group">
|
||||||
|
<label htmlFor="description">Description</label>
|
||||||
|
<textarea
|
||||||
|
id="description"
|
||||||
|
value={description}
|
||||||
|
onChange={(e) => setDescription(e.target.value)}
|
||||||
|
placeholder="Describe your project..."
|
||||||
|
disabled={saving}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="project-settings-form-group project-settings-checkbox-group">
|
||||||
|
<label className="project-settings-checkbox-label">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={isPublic}
|
||||||
|
onChange={(e) => setIsPublic(e.target.checked)}
|
||||||
|
disabled={saving}
|
||||||
|
/>
|
||||||
|
<span className="project-settings-checkbox-custom" />
|
||||||
|
<span>Public project (visible to everyone)</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="project-settings-form-actions">
|
||||||
|
<button type="submit" className="project-settings-save-button" disabled={saving}>
|
||||||
|
{saving ? (
|
||||||
|
<>
|
||||||
|
<span className="project-settings-button-spinner" />
|
||||||
|
Saving...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Save Changes'
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Access Management Section */}
|
||||||
|
<AccessManagement projectName={projectName!} />
|
||||||
|
|
||||||
|
{/* Danger Zone Section */}
|
||||||
|
<div className="project-settings-danger-zone">
|
||||||
|
<h2>Danger Zone</h2>
|
||||||
|
<div className="project-settings-danger-item">
|
||||||
|
<div className="project-settings-danger-info">
|
||||||
|
<h3>Delete this project</h3>
|
||||||
|
<p>
|
||||||
|
Once you delete a project, there is no going back. This will permanently delete the
|
||||||
|
project, all packages, artifacts, and tags.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{!showDeleteConfirm && (
|
||||||
|
<button
|
||||||
|
className="project-settings-delete-button"
|
||||||
|
onClick={() => setShowDeleteConfirm(true)}
|
||||||
|
disabled={deleting}
|
||||||
|
>
|
||||||
|
Delete Project
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{showDeleteConfirm && (
|
||||||
|
<div className="project-settings-delete-confirm">
|
||||||
|
<p>
|
||||||
|
Type <strong>{project.name}</strong> to confirm deletion:
|
||||||
|
</p>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
className="project-settings-delete-confirm-input"
|
||||||
|
value={deleteConfirmText}
|
||||||
|
onChange={(e) => setDeleteConfirmText(e.target.value)}
|
||||||
|
placeholder={project.name}
|
||||||
|
disabled={deleting}
|
||||||
|
autoFocus
|
||||||
|
/>
|
||||||
|
<div className="project-settings-delete-confirm-actions">
|
||||||
|
<button
|
||||||
|
className="project-settings-confirm-delete-button"
|
||||||
|
onClick={handleDeleteProject}
|
||||||
|
disabled={deleting || deleteConfirmText !== project.name}
|
||||||
|
>
|
||||||
|
{deleting ? (
|
||||||
|
<>
|
||||||
|
<span className="project-settings-delete-spinner" />
|
||||||
|
Deleting...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Yes, delete this project'
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="project-settings-cancel-button"
|
||||||
|
onClick={handleCancelDelete}
|
||||||
|
disabled={deleting}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default ProjectSettingsPage;
|
||||||
37
frontend/src/test/setup.ts
Normal file
37
frontend/src/test/setup.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import '@testing-library/jest-dom';
|
||||||
|
|
||||||
|
class MockDataTransfer implements DataTransfer {
|
||||||
|
dropEffect: DataTransfer['dropEffect'] = 'none';
|
||||||
|
effectAllowed: DataTransfer['effectAllowed'] = 'all';
|
||||||
|
files: FileList = Object.assign([], { item: (i: number) => this.files[i] || null });
|
||||||
|
items: DataTransferItemList = Object.assign([], {
|
||||||
|
add: () => null,
|
||||||
|
remove: () => {},
|
||||||
|
clear: () => {},
|
||||||
|
item: () => null,
|
||||||
|
}) as unknown as DataTransferItemList;
|
||||||
|
types: readonly string[] = [];
|
||||||
|
|
||||||
|
clearData(): void {}
|
||||||
|
getData(): string { return ''; }
|
||||||
|
setData(): void {}
|
||||||
|
setDragImage(): void {}
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.defineProperty(globalThis, 'DataTransfer', {
|
||||||
|
value: MockDataTransfer,
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(window, 'matchMedia', {
|
||||||
|
writable: true,
|
||||||
|
value: (query: string) => ({
|
||||||
|
matches: false,
|
||||||
|
media: query,
|
||||||
|
onchange: null,
|
||||||
|
addListener: () => {},
|
||||||
|
removeListener: () => {},
|
||||||
|
addEventListener: () => {},
|
||||||
|
removeEventListener: () => {},
|
||||||
|
dispatchEvent: () => false,
|
||||||
|
}),
|
||||||
|
});
|
||||||
@@ -1,3 +1,6 @@
|
|||||||
|
// Access Control types (moved to top for use in Project interface)
|
||||||
|
export type AccessLevel = 'read' | 'write' | 'admin';
|
||||||
|
|
||||||
export interface Project {
|
export interface Project {
|
||||||
id: string;
|
id: string;
|
||||||
name: string;
|
name: string;
|
||||||
@@ -6,6 +9,9 @@ export interface Project {
|
|||||||
created_at: string;
|
created_at: string;
|
||||||
updated_at: string;
|
updated_at: string;
|
||||||
created_by: string;
|
created_by: string;
|
||||||
|
// Access level info (populated when listing projects)
|
||||||
|
access_level?: AccessLevel | null;
|
||||||
|
is_owner?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TagSummary {
|
export interface TagSummary {
|
||||||
@@ -57,6 +63,22 @@ export interface TagDetail extends Tag {
|
|||||||
artifact_original_name: string | null;
|
artifact_original_name: string | null;
|
||||||
artifact_created_at: string;
|
artifact_created_at: string;
|
||||||
artifact_format_metadata: Record<string, unknown> | null;
|
artifact_format_metadata: Record<string, unknown> | null;
|
||||||
|
version: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PackageVersion {
|
||||||
|
id: string;
|
||||||
|
package_id: string;
|
||||||
|
artifact_id: string;
|
||||||
|
version: string;
|
||||||
|
version_source: string | null;
|
||||||
|
created_at: string;
|
||||||
|
created_by: string;
|
||||||
|
// Enriched fields from joins
|
||||||
|
size?: number;
|
||||||
|
content_type?: string | null;
|
||||||
|
original_name?: string | null;
|
||||||
|
tags?: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ArtifactTagInfo {
|
export interface ArtifactTagInfo {
|
||||||
@@ -116,6 +138,8 @@ export interface UploadResponse {
|
|||||||
project: string;
|
project: string;
|
||||||
package: string;
|
package: string;
|
||||||
tag: string | null;
|
tag: string | null;
|
||||||
|
version: string | null;
|
||||||
|
version_source: string | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Global search types
|
// Global search types
|
||||||
@@ -225,3 +249,201 @@ export interface CrossProjectStats {
|
|||||||
bytes_saved_cross_project: number;
|
bytes_saved_cross_project: number;
|
||||||
duplicates: CrossProjectDuplicate[];
|
duplicates: CrossProjectDuplicate[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Auth types
|
||||||
|
export interface User {
|
||||||
|
id: string;
|
||||||
|
username: string;
|
||||||
|
display_name: string | null;
|
||||||
|
is_admin: boolean;
|
||||||
|
must_change_password?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface LoginCredentials {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// API Key types
|
||||||
|
export interface APIKey {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
description: string | null;
|
||||||
|
scopes: string[];
|
||||||
|
created_at: string;
|
||||||
|
expires_at: string | null;
|
||||||
|
last_used: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface APIKeyCreate {
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface APIKeyCreateResponse {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
description: string | null;
|
||||||
|
scopes: string[];
|
||||||
|
key: string;
|
||||||
|
created_at: string;
|
||||||
|
expires_at: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Admin User Management types
|
||||||
|
export interface AdminUser {
|
||||||
|
id: string;
|
||||||
|
username: string;
|
||||||
|
email: string | null;
|
||||||
|
display_name: string | null;
|
||||||
|
is_admin: boolean;
|
||||||
|
is_active: boolean;
|
||||||
|
created_at: string;
|
||||||
|
last_login: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UserCreate {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
email?: string;
|
||||||
|
is_admin?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UserUpdate {
|
||||||
|
email?: string;
|
||||||
|
is_admin?: boolean;
|
||||||
|
is_active?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Access Permission types
|
||||||
|
export interface AccessPermission {
|
||||||
|
id: string;
|
||||||
|
project_id: string;
|
||||||
|
user_id: string;
|
||||||
|
level: AccessLevel;
|
||||||
|
created_at: string;
|
||||||
|
expires_at: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AccessPermissionCreate {
|
||||||
|
username: string;
|
||||||
|
level: AccessLevel;
|
||||||
|
expires_at?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AccessPermissionUpdate {
|
||||||
|
level?: AccessLevel;
|
||||||
|
expires_at?: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extended Project with user's access level
|
||||||
|
export interface ProjectWithAccess extends Project {
|
||||||
|
user_access_level?: AccessLevel;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Current user with permissions context
|
||||||
|
export interface CurrentUser extends User {
|
||||||
|
permissions?: {
|
||||||
|
[projectId: string]: AccessLevel;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// OIDC types
|
||||||
|
export interface OIDCConfig {
|
||||||
|
enabled: boolean;
|
||||||
|
issuer_url: string;
|
||||||
|
client_id: string;
|
||||||
|
has_client_secret: boolean;
|
||||||
|
scopes: string[];
|
||||||
|
auto_create_users: boolean;
|
||||||
|
admin_group: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface OIDCConfigUpdate {
|
||||||
|
enabled?: boolean;
|
||||||
|
issuer_url?: string;
|
||||||
|
client_id?: string;
|
||||||
|
client_secret?: string;
|
||||||
|
scopes?: string[];
|
||||||
|
auto_create_users?: boolean;
|
||||||
|
admin_group?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface OIDCStatus {
|
||||||
|
enabled: boolean;
|
||||||
|
issuer_url?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dependency types
|
||||||
|
export interface Dependency {
|
||||||
|
id: string;
|
||||||
|
artifact_id: string;
|
||||||
|
project: string;
|
||||||
|
package: string;
|
||||||
|
version: string | null;
|
||||||
|
tag: string | null;
|
||||||
|
created_at: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ArtifactDependenciesResponse {
|
||||||
|
artifact_id: string;
|
||||||
|
dependencies: Dependency[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DependentInfo {
|
||||||
|
artifact_id: string;
|
||||||
|
project: string;
|
||||||
|
package: string;
|
||||||
|
version: string | null;
|
||||||
|
constraint_type: 'version' | 'tag';
|
||||||
|
constraint_value: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ReverseDependenciesResponse {
|
||||||
|
project: string;
|
||||||
|
package: string;
|
||||||
|
dependents: DependentInfo[];
|
||||||
|
pagination: {
|
||||||
|
page: number;
|
||||||
|
limit: number;
|
||||||
|
total: number;
|
||||||
|
total_pages: number;
|
||||||
|
has_more: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dependency Resolution types
|
||||||
|
export interface ResolvedArtifact {
|
||||||
|
artifact_id: string;
|
||||||
|
project: string;
|
||||||
|
package: string;
|
||||||
|
version: string | null;
|
||||||
|
tag: string | null;
|
||||||
|
size: number;
|
||||||
|
download_url: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DependencyResolutionResponse {
|
||||||
|
requested: {
|
||||||
|
project: string;
|
||||||
|
package: string;
|
||||||
|
ref: string;
|
||||||
|
};
|
||||||
|
resolved: ResolvedArtifact[];
|
||||||
|
total_size: number;
|
||||||
|
artifact_count: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DependencyResolutionError {
|
||||||
|
error: 'circular_dependency' | 'dependency_conflict' | 'not_found';
|
||||||
|
message: string;
|
||||||
|
cycle?: string[];
|
||||||
|
conflicts?: Array<{
|
||||||
|
project: string;
|
||||||
|
package: string;
|
||||||
|
requirements: Array<{
|
||||||
|
version: string;
|
||||||
|
required_by: Array<{ path: string }>;
|
||||||
|
}>;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
/// <reference types="vitest" />
|
||||||
import { defineConfig } from 'vite'
|
import { defineConfig } from 'vite'
|
||||||
import react from '@vitejs/plugin-react'
|
import react from '@vitejs/plugin-react'
|
||||||
|
|
||||||
@@ -9,5 +10,16 @@ export default defineConfig({
|
|||||||
'/health': 'http://localhost:8080',
|
'/health': 'http://localhost:8080',
|
||||||
'/project': 'http://localhost:8080',
|
'/project': 'http://localhost:8080',
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
test: {
|
||||||
|
globals: true,
|
||||||
|
environment: 'jsdom',
|
||||||
|
setupFiles: './src/test/setup.ts',
|
||||||
|
css: true,
|
||||||
|
coverage: {
|
||||||
|
provider: 'v8',
|
||||||
|
reporter: ['text', 'cobertura', 'html'],
|
||||||
|
reportsDirectory: './coverage',
|
||||||
|
},
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -17,13 +17,13 @@ maintainers:
|
|||||||
dependencies:
|
dependencies:
|
||||||
- name: postgresql
|
- name: postgresql
|
||||||
version: "15.5.x"
|
version: "15.5.x"
|
||||||
repository: https://charts.bitnami.com/bitnami
|
repository: oci://deps.global.bsf.tools/registry-1.docker.io-helmoci/bitnamicharts
|
||||||
condition: postgresql.enabled
|
condition: postgresql.enabled
|
||||||
- name: minio
|
- name: minio
|
||||||
version: "14.x.x"
|
version: "14.x.x"
|
||||||
repository: https://charts.bitnami.com/bitnami
|
repository: oci://deps.global.bsf.tools/registry-1.docker.io-helmoci/bitnamicharts
|
||||||
condition: minio.enabled
|
condition: minio.enabled
|
||||||
- name: redis
|
- name: redis
|
||||||
version: "19.x.x"
|
version: "19.x.x"
|
||||||
repository: https://charts.bitnami.com/bitnami
|
repository: oci://deps.global.bsf.tools/registry-1.docker.io-helmoci/bitnamicharts
|
||||||
condition: redis.enabled
|
condition: redis.enabled
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ Expand the name of the chart.
|
|||||||
|
|
||||||
{{/*
|
{{/*
|
||||||
Create a default fully qualified app name.
|
Create a default fully qualified app name.
|
||||||
|
Appends "-server" to distinguish from subcharts (minio, postgresql, redis).
|
||||||
*/}}
|
*/}}
|
||||||
{{- define "orchard.fullname" -}}
|
{{- define "orchard.fullname" -}}
|
||||||
{{- if .Values.fullnameOverride }}
|
{{- if .Values.fullnameOverride }}
|
||||||
@@ -14,9 +15,9 @@ Create a default fully qualified app name.
|
|||||||
{{- else }}
|
{{- else }}
|
||||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||||
{{- if contains $name .Release.Name }}
|
{{- if contains $name .Release.Name }}
|
||||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
{{- printf "%s-server" .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||||
{{- else }}
|
{{- else }}
|
||||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
{{- printf "%s-%s-server" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
@@ -76,6 +77,8 @@ PostgreSQL secret name
|
|||||||
{{- define "orchard.postgresql.secretName" -}}
|
{{- define "orchard.postgresql.secretName" -}}
|
||||||
{{- if .Values.orchard.database.existingSecret }}
|
{{- if .Values.orchard.database.existingSecret }}
|
||||||
{{- .Values.orchard.database.existingSecret }}
|
{{- .Values.orchard.database.existingSecret }}
|
||||||
|
{{- else if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||||
|
{{- printf "%s-db-credentials" (include "orchard.fullname" .) }}
|
||||||
{{- else if .Values.postgresql.enabled }}
|
{{- else if .Values.postgresql.enabled }}
|
||||||
{{- printf "%s-postgresql" .Release.Name }}
|
{{- printf "%s-postgresql" .Release.Name }}
|
||||||
{{- else }}
|
{{- else }}
|
||||||
@@ -89,6 +92,8 @@ PostgreSQL password key in secret
|
|||||||
{{- define "orchard.postgresql.passwordKey" -}}
|
{{- define "orchard.postgresql.passwordKey" -}}
|
||||||
{{- if .Values.orchard.database.existingSecret -}}
|
{{- if .Values.orchard.database.existingSecret -}}
|
||||||
{{- .Values.orchard.database.existingSecretPasswordKey -}}
|
{{- .Values.orchard.database.existingSecretPasswordKey -}}
|
||||||
|
{{- else if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled -}}
|
||||||
|
password
|
||||||
{{- else if .Values.postgresql.enabled -}}
|
{{- else if .Values.postgresql.enabled -}}
|
||||||
password
|
password
|
||||||
{{- else -}}
|
{{- else -}}
|
||||||
|
|||||||
@@ -37,12 +37,26 @@ spec:
|
|||||||
image: "{{ .Values.initContainer.image.repository }}:{{ .Values.initContainer.image.tag }}"
|
image: "{{ .Values.initContainer.image.repository }}:{{ .Values.initContainer.image.tag }}"
|
||||||
imagePullPolicy: {{ .Values.initContainer.image.pullPolicy }}
|
imagePullPolicy: {{ .Values.initContainer.image.pullPolicy }}
|
||||||
command: ['sh', '-c', 'until nc -z {{ include "orchard.postgresql.host" . }} 5432; do echo waiting for database; sleep 2; done;']
|
command: ['sh', '-c', 'until nc -z {{ include "orchard.postgresql.host" . }} 5432; do echo waiting for database; sleep 2; done;']
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 50m
|
||||||
|
memory: 32Mi
|
||||||
|
requests:
|
||||||
|
cpu: 10m
|
||||||
|
memory: 32Mi
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- if .Values.minio.enabled }}
|
{{- if .Values.minio.enabled }}
|
||||||
- name: wait-for-minio
|
- name: wait-for-minio
|
||||||
image: "{{ .Values.initContainer.image.repository }}:{{ .Values.initContainer.image.tag }}"
|
image: "{{ .Values.initContainer.image.repository }}:{{ .Values.initContainer.image.tag }}"
|
||||||
imagePullPolicy: {{ .Values.initContainer.image.pullPolicy }}
|
imagePullPolicy: {{ .Values.initContainer.image.pullPolicy }}
|
||||||
command: ['sh', '-c', 'until nc -z {{ .Release.Name }}-minio 9000; do echo waiting for minio; sleep 2; done;']
|
command: ['sh', '-c', 'until nc -z {{ .Release.Name }}-minio 9000; do echo waiting for minio; sleep 2; done;']
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 50m
|
||||||
|
memory: 32Mi
|
||||||
|
requests:
|
||||||
|
cpu: 10m
|
||||||
|
memory: 32Mi
|
||||||
{{- end }}
|
{{- end }}
|
||||||
containers:
|
containers:
|
||||||
- name: {{ .Chart.Name }}
|
- name: {{ .Chart.Name }}
|
||||||
@@ -55,6 +69,8 @@ spec:
|
|||||||
containerPort: {{ .Values.orchard.server.port }}
|
containerPort: {{ .Values.orchard.server.port }}
|
||||||
protocol: TCP
|
protocol: TCP
|
||||||
env:
|
env:
|
||||||
|
- name: ORCHARD_ENV
|
||||||
|
value: {{ .Values.orchard.env | default "development" | quote }}
|
||||||
- name: ORCHARD_SERVER_HOST
|
- name: ORCHARD_SERVER_HOST
|
||||||
value: {{ .Values.orchard.server.host | quote }}
|
value: {{ .Values.orchard.server.host | quote }}
|
||||||
- name: ORCHARD_SERVER_PORT
|
- name: ORCHARD_SERVER_PORT
|
||||||
@@ -63,8 +79,16 @@ spec:
|
|||||||
value: {{ include "orchard.postgresql.host" . | quote }}
|
value: {{ include "orchard.postgresql.host" . | quote }}
|
||||||
- name: ORCHARD_DATABASE_PORT
|
- name: ORCHARD_DATABASE_PORT
|
||||||
value: {{ .Values.orchard.database.port | quote }}
|
value: {{ .Values.orchard.database.port | quote }}
|
||||||
|
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||||
|
- name: ORCHARD_DATABASE_USER
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ include "orchard.postgresql.secretName" . }}
|
||||||
|
key: username
|
||||||
|
{{- else }}
|
||||||
- name: ORCHARD_DATABASE_USER
|
- name: ORCHARD_DATABASE_USER
|
||||||
value: {{ .Values.orchard.database.user | default .Values.postgresql.auth.username | quote }}
|
value: {{ .Values.orchard.database.user | default .Values.postgresql.auth.username | quote }}
|
||||||
|
{{- end }}
|
||||||
- name: ORCHARD_DATABASE_DBNAME
|
- name: ORCHARD_DATABASE_DBNAME
|
||||||
value: {{ .Values.orchard.database.dbname | default .Values.postgresql.auth.database | quote }}
|
value: {{ .Values.orchard.database.dbname | default .Values.postgresql.auth.database | quote }}
|
||||||
- name: ORCHARD_DATABASE_SSLMODE
|
- name: ORCHARD_DATABASE_SSLMODE
|
||||||
@@ -82,6 +106,7 @@ spec:
|
|||||||
value: {{ .Values.orchard.s3.bucket | quote }}
|
value: {{ .Values.orchard.s3.bucket | quote }}
|
||||||
- name: ORCHARD_S3_USE_PATH_STYLE
|
- name: ORCHARD_S3_USE_PATH_STYLE
|
||||||
value: {{ .Values.orchard.s3.usePathStyle | quote }}
|
value: {{ .Values.orchard.s3.usePathStyle | quote }}
|
||||||
|
{{- if or .Values.minio.enabled .Values.orchard.s3.existingSecret .Values.orchard.s3.accessKeyId }}
|
||||||
- name: ORCHARD_S3_ACCESS_KEY_ID
|
- name: ORCHARD_S3_ACCESS_KEY_ID
|
||||||
valueFrom:
|
valueFrom:
|
||||||
secretKeyRef:
|
secretKeyRef:
|
||||||
@@ -92,16 +117,38 @@ spec:
|
|||||||
secretKeyRef:
|
secretKeyRef:
|
||||||
name: {{ include "orchard.minio.secretName" . }}
|
name: {{ include "orchard.minio.secretName" . }}
|
||||||
key: {{ if .Values.minio.enabled }}root-password{{ else }}{{ .Values.orchard.s3.existingSecretSecretKeyKey }}{{ end }}
|
key: {{ if .Values.minio.enabled }}root-password{{ else }}{{ .Values.orchard.s3.existingSecretSecretKeyKey }}{{ end }}
|
||||||
|
{{- end }}
|
||||||
- name: ORCHARD_DOWNLOAD_MODE
|
- name: ORCHARD_DOWNLOAD_MODE
|
||||||
value: {{ .Values.orchard.download.mode | quote }}
|
value: {{ .Values.orchard.download.mode | quote }}
|
||||||
- name: ORCHARD_PRESIGNED_URL_EXPIRY
|
- name: ORCHARD_PRESIGNED_URL_EXPIRY
|
||||||
value: {{ .Values.orchard.download.presignedUrlExpiry | quote }}
|
value: {{ .Values.orchard.download.presignedUrlExpiry | quote }}
|
||||||
|
{{- if .Values.orchard.rateLimit }}
|
||||||
|
{{- if .Values.orchard.rateLimit.login }}
|
||||||
|
- name: ORCHARD_LOGIN_RATE_LIMIT
|
||||||
|
value: {{ .Values.orchard.rateLimit.login | quote }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: db-secrets
|
||||||
|
mountPath: /mnt/secrets-store
|
||||||
|
readOnly: true
|
||||||
|
{{- end }}
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
{{- toYaml .Values.livenessProbe | nindent 12 }}
|
{{- toYaml .Values.livenessProbe | nindent 12 }}
|
||||||
readinessProbe:
|
readinessProbe:
|
||||||
{{- toYaml .Values.readinessProbe | nindent 12 }}
|
{{- toYaml .Values.readinessProbe | nindent 12 }}
|
||||||
resources:
|
resources:
|
||||||
{{- toYaml .Values.resources | nindent 12 }}
|
{{- toYaml .Values.resources | nindent 12 }}
|
||||||
|
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||||
|
volumes:
|
||||||
|
- name: db-secrets
|
||||||
|
csi:
|
||||||
|
driver: secrets-store.csi.k8s.io
|
||||||
|
readOnly: true
|
||||||
|
volumeAttributes:
|
||||||
|
secretProviderClass: {{ include "orchard.fullname" . }}-db-secret
|
||||||
|
{{- end }}
|
||||||
{{- with .Values.nodeSelector }}
|
{{- with .Values.nodeSelector }}
|
||||||
nodeSelector:
|
nodeSelector:
|
||||||
{{- toYaml . | nindent 8 }}
|
{{- toYaml . | nindent 8 }}
|
||||||
|
|||||||
27
helm/orchard/templates/secret-provider-class.yaml
Normal file
27
helm/orchard/templates/secret-provider-class.yaml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||||
|
apiVersion: secrets-store.csi.x-k8s.io/v1
|
||||||
|
kind: SecretProviderClass
|
||||||
|
metadata:
|
||||||
|
name: {{ include "orchard.fullname" . }}-db-secret
|
||||||
|
labels:
|
||||||
|
{{- include "orchard.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
provider: aws
|
||||||
|
parameters:
|
||||||
|
objects: |
|
||||||
|
- objectName: "{{ .Values.orchard.database.secretsManager.secretArn }}"
|
||||||
|
objectType: "secretsmanager"
|
||||||
|
jmesPath:
|
||||||
|
- path: username
|
||||||
|
objectAlias: db-username
|
||||||
|
- path: password
|
||||||
|
objectAlias: db-password
|
||||||
|
secretObjects:
|
||||||
|
- secretName: {{ include "orchard.fullname" . }}-db-credentials
|
||||||
|
type: Opaque
|
||||||
|
data:
|
||||||
|
- objectName: db-username
|
||||||
|
key: username
|
||||||
|
- objectName: db-password
|
||||||
|
key: password
|
||||||
|
{{- end }}
|
||||||
220
helm/orchard/values-dev.yaml
Normal file
220
helm/orchard/values-dev.yaml
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
# Values for feature branch deployments (ephemeral dev environments)
|
||||||
|
# Hostnames are overridden by CI pipeline via --set flags
|
||||||
|
replicaCount: 1
|
||||||
|
|
||||||
|
image:
|
||||||
|
repository: registry.global.bsf.tools/esv/bsf/bsf-integration/orchard/orchard-mvp
|
||||||
|
pullPolicy: Always
|
||||||
|
tag: "latest" # Overridden by CI
|
||||||
|
|
||||||
|
imagePullSecrets:
|
||||||
|
- name: orchard-pull-secret
|
||||||
|
|
||||||
|
initContainer:
|
||||||
|
image:
|
||||||
|
repository: containers.global.bsf.tools/busybox
|
||||||
|
tag: "1.36"
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
|
||||||
|
serviceAccount:
|
||||||
|
create: true
|
||||||
|
automount: true
|
||||||
|
annotations: {}
|
||||||
|
name: "" # Auto-generated based on release name
|
||||||
|
|
||||||
|
podAnnotations: {}
|
||||||
|
podLabels: {}
|
||||||
|
|
||||||
|
podSecurityContext: {}
|
||||||
|
|
||||||
|
securityContext:
|
||||||
|
readOnlyRootFilesystem: false
|
||||||
|
runAsNonRoot: true
|
||||||
|
runAsUser: 1000
|
||||||
|
|
||||||
|
service:
|
||||||
|
type: ClusterIP
|
||||||
|
port: 8080
|
||||||
|
|
||||||
|
# Ingress - hostnames overridden by CI pipeline
|
||||||
|
ingress:
|
||||||
|
enabled: true
|
||||||
|
className: "nginx"
|
||||||
|
annotations:
|
||||||
|
cert-manager.io/cluster-issuer: "letsencrypt"
|
||||||
|
nginx.ingress.kubernetes.io/proxy-body-size: "0" # Disable body size limit for uploads
|
||||||
|
hosts:
|
||||||
|
- host: orchard-dev.common.global.bsf.tools # Overridden by CI
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
tls:
|
||||||
|
- secretName: orchard-tls # Overridden by CI
|
||||||
|
hosts:
|
||||||
|
- orchard-dev.common.global.bsf.tools # Overridden by CI
|
||||||
|
|
||||||
|
# Lighter resources for ephemeral environments
|
||||||
|
# Note: memory requests must equal limits per cluster policy
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 250m
|
||||||
|
memory: 256Mi
|
||||||
|
requests:
|
||||||
|
cpu: 100m
|
||||||
|
memory: 256Mi
|
||||||
|
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 5
|
||||||
|
periodSeconds: 5
|
||||||
|
|
||||||
|
autoscaling:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
nodeSelector: {}
|
||||||
|
tolerations: []
|
||||||
|
affinity: {}
|
||||||
|
|
||||||
|
orchard:
|
||||||
|
env: "development" # Allows seed data for testing
|
||||||
|
server:
|
||||||
|
host: "0.0.0.0"
|
||||||
|
port: 8080
|
||||||
|
|
||||||
|
database:
|
||||||
|
host: ""
|
||||||
|
port: 5432
|
||||||
|
user: orchard
|
||||||
|
password: ""
|
||||||
|
dbname: orchard
|
||||||
|
sslmode: disable
|
||||||
|
existingSecret: ""
|
||||||
|
existingSecretPasswordKey: "password"
|
||||||
|
|
||||||
|
s3:
|
||||||
|
endpoint: ""
|
||||||
|
region: us-east-1
|
||||||
|
bucket: orchard-artifacts
|
||||||
|
accessKeyId: ""
|
||||||
|
secretAccessKey: ""
|
||||||
|
usePathStyle: true
|
||||||
|
existingSecret: ""
|
||||||
|
existingSecretAccessKeyKey: "access-key-id"
|
||||||
|
existingSecretSecretKeyKey: "secret-access-key"
|
||||||
|
|
||||||
|
download:
|
||||||
|
mode: "presigned"
|
||||||
|
presignedUrlExpiry: 3600
|
||||||
|
|
||||||
|
# Relaxed rate limits for dev/feature environments (allows integration tests to run)
|
||||||
|
rateLimit:
|
||||||
|
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests
|
||||||
|
|
||||||
|
# PostgreSQL - ephemeral, no persistence
|
||||||
|
postgresql:
|
||||||
|
enabled: true
|
||||||
|
image:
|
||||||
|
registry: containers.global.bsf.tools
|
||||||
|
repository: bitnami/postgresql
|
||||||
|
tag: "15"
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
auth:
|
||||||
|
username: orchard
|
||||||
|
password: orchard-password
|
||||||
|
database: orchard
|
||||||
|
primary:
|
||||||
|
persistence:
|
||||||
|
enabled: false
|
||||||
|
# Resources with memory requests = limits per cluster policy
|
||||||
|
resourcesPreset: "none"
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 250m
|
||||||
|
memory: 256Mi
|
||||||
|
requests:
|
||||||
|
cpu: 100m
|
||||||
|
memory: 256Mi
|
||||||
|
# Volume permissions init container
|
||||||
|
volumePermissions:
|
||||||
|
resourcesPreset: "none"
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 50m
|
||||||
|
memory: 64Mi
|
||||||
|
requests:
|
||||||
|
cpu: 10m
|
||||||
|
memory: 64Mi
|
||||||
|
|
||||||
|
# MinIO - ephemeral, no persistence
|
||||||
|
minio:
|
||||||
|
enabled: true
|
||||||
|
image:
|
||||||
|
registry: containers.global.bsf.tools
|
||||||
|
repository: bitnami/minio
|
||||||
|
tag: "latest"
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
auth:
|
||||||
|
rootUser: minioadmin
|
||||||
|
rootPassword: minioadmin
|
||||||
|
defaultBuckets: "orchard-artifacts"
|
||||||
|
persistence:
|
||||||
|
enabled: false
|
||||||
|
# Resources with memory requests = limits per cluster policy
|
||||||
|
resourcesPreset: "none" # Disable preset to use explicit resources
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 250m
|
||||||
|
memory: 256Mi
|
||||||
|
requests:
|
||||||
|
cpu: 100m
|
||||||
|
memory: 256Mi
|
||||||
|
# Init container resources
|
||||||
|
defaultInitContainers:
|
||||||
|
volumePermissions:
|
||||||
|
resourcesPreset: "none"
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 50m
|
||||||
|
memory: 64Mi
|
||||||
|
requests:
|
||||||
|
cpu: 10m
|
||||||
|
memory: 64Mi
|
||||||
|
# Provisioning job resources
|
||||||
|
provisioning:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 100m
|
||||||
|
memory: 128Mi
|
||||||
|
requests:
|
||||||
|
cpu: 50m
|
||||||
|
memory: 128Mi
|
||||||
|
|
||||||
|
# MinIO ingress - hostname overridden by CI
|
||||||
|
minioIngress:
|
||||||
|
enabled: true
|
||||||
|
className: "nginx"
|
||||||
|
annotations:
|
||||||
|
cert-manager.io/cluster-issuer: "letsencrypt"
|
||||||
|
nginx.ingress.kubernetes.io/proxy-body-size: "0"
|
||||||
|
host: "minio-dev.common.global.bsf.tools" # Overridden by CI
|
||||||
|
tls:
|
||||||
|
enabled: true
|
||||||
|
secretName: minio-tls # Overridden by CI
|
||||||
|
|
||||||
|
redis:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
waitForDatabase: true
|
||||||
|
|
||||||
|
global:
|
||||||
|
security:
|
||||||
|
allowInsecureImages: true
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
# Values for using external PostgreSQL and S3 storage
|
|
||||||
# Use this when you have existing infrastructure
|
|
||||||
|
|
||||||
replicaCount: 2
|
|
||||||
|
|
||||||
image:
|
|
||||||
pullPolicy: Always
|
|
||||||
|
|
||||||
# Disable subcharts - use external services
|
|
||||||
postgresql:
|
|
||||||
enabled: false
|
|
||||||
|
|
||||||
minio:
|
|
||||||
enabled: false
|
|
||||||
|
|
||||||
redis:
|
|
||||||
enabled: false
|
|
||||||
|
|
||||||
orchard:
|
|
||||||
database:
|
|
||||||
host: "your-postgres-host.example.com"
|
|
||||||
port: 5432
|
|
||||||
user: orchard
|
|
||||||
dbname: orchard
|
|
||||||
sslmode: require
|
|
||||||
# Option 1: Use existing secret
|
|
||||||
existingSecret: "my-postgres-secret"
|
|
||||||
existingSecretPasswordKey: "password"
|
|
||||||
# Option 2: Set password directly (not recommended)
|
|
||||||
# password: "your-password"
|
|
||||||
|
|
||||||
s3:
|
|
||||||
endpoint: "https://s3.amazonaws.com"
|
|
||||||
region: us-east-1
|
|
||||||
bucket: orchard-artifacts
|
|
||||||
usePathStyle: false
|
|
||||||
# Option 1: Use existing secret
|
|
||||||
existingSecret: "my-s3-secret"
|
|
||||||
existingSecretAccessKeyKey: "access-key-id"
|
|
||||||
existingSecretSecretKeyKey: "secret-access-key"
|
|
||||||
# Option 2: Set credentials directly (not recommended)
|
|
||||||
# accessKeyId: "your-access-key"
|
|
||||||
# secretAccessKey: "your-secret-key"
|
|
||||||
|
|
||||||
ingress:
|
|
||||||
enabled: true
|
|
||||||
className: nginx
|
|
||||||
annotations:
|
|
||||||
cert-manager.io/cluster-issuer: letsencrypt-prod
|
|
||||||
hosts:
|
|
||||||
- host: orchard.example.com
|
|
||||||
paths:
|
|
||||||
- path: /
|
|
||||||
pathType: Prefix
|
|
||||||
tls:
|
|
||||||
- secretName: orchard-tls
|
|
||||||
hosts:
|
|
||||||
- orchard.example.com
|
|
||||||
136
helm/orchard/values-prod.yaml
Normal file
136
helm/orchard/values-prod.yaml
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
# Production values for orchard
|
||||||
|
# TODO: Replace subcharts with managed services (RDS, S3) when ready
|
||||||
|
replicaCount: 1
|
||||||
|
|
||||||
|
image:
|
||||||
|
repository: registry.global.bsf.tools/esv/bsf/bsf-integration/orchard/orchard-mvp
|
||||||
|
pullPolicy: Always
|
||||||
|
tag: "latest" # Overridden by CI
|
||||||
|
|
||||||
|
imagePullSecrets:
|
||||||
|
- name: orchard-pull-secret
|
||||||
|
|
||||||
|
initContainer:
|
||||||
|
image:
|
||||||
|
repository: containers.global.bsf.tools/busybox
|
||||||
|
tag: "1.36"
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
|
||||||
|
serviceAccount:
|
||||||
|
create: true
|
||||||
|
automount: true
|
||||||
|
annotations:
|
||||||
|
eks.amazonaws.com/role-arn: arn:aws-us-gov:iam::052673043337:role/service-orchard-prd
|
||||||
|
name: "orchard"
|
||||||
|
|
||||||
|
podAnnotations: {}
|
||||||
|
podLabels: {}
|
||||||
|
|
||||||
|
podSecurityContext: {}
|
||||||
|
|
||||||
|
securityContext:
|
||||||
|
readOnlyRootFilesystem: false
|
||||||
|
runAsNonRoot: true
|
||||||
|
runAsUser: 1000
|
||||||
|
|
||||||
|
service:
|
||||||
|
type: ClusterIP
|
||||||
|
port: 8080
|
||||||
|
|
||||||
|
ingress:
|
||||||
|
enabled: true
|
||||||
|
className: "nginx"
|
||||||
|
annotations:
|
||||||
|
cert-manager.io/cluster-issuer: "letsencrypt"
|
||||||
|
nginx.ingress.kubernetes.io/proxy-body-size: "0" # Disable body size limit for uploads
|
||||||
|
hosts:
|
||||||
|
- host: orchard.common.global.bsf.tools
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
tls:
|
||||||
|
- secretName: orchard-prod-tls
|
||||||
|
hosts:
|
||||||
|
- orchard.common.global.bsf.tools
|
||||||
|
|
||||||
|
# Production resources - same as stage for MVP, increase as needed
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 500m
|
||||||
|
memory: 512Mi
|
||||||
|
requests:
|
||||||
|
cpu: 500m
|
||||||
|
memory: 512Mi
|
||||||
|
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 5
|
||||||
|
periodSeconds: 5
|
||||||
|
|
||||||
|
autoscaling:
|
||||||
|
enabled: false
|
||||||
|
minReplicas: 1
|
||||||
|
maxReplicas: 10
|
||||||
|
targetCPUUtilizationPercentage: 80
|
||||||
|
targetMemoryUtilizationPercentage: 80
|
||||||
|
|
||||||
|
nodeSelector: {}
|
||||||
|
tolerations: []
|
||||||
|
affinity: {}
|
||||||
|
|
||||||
|
orchard:
|
||||||
|
env: "production" # Disables seed data
|
||||||
|
server:
|
||||||
|
host: "0.0.0.0"
|
||||||
|
port: 8080
|
||||||
|
|
||||||
|
# Database configuration - uses AWS Secrets Manager via CSI driver
|
||||||
|
database:
|
||||||
|
host: "orchard-prd.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com"
|
||||||
|
port: 5432
|
||||||
|
dbname: orchard_prod
|
||||||
|
sslmode: require
|
||||||
|
secretsManager:
|
||||||
|
enabled: true
|
||||||
|
secretArn: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:rds!cluster-0afc8af5-f644-4284-92fb-2ed545490f92-3v9uXV"
|
||||||
|
|
||||||
|
# S3 configuration - uses IRSA for credentials
|
||||||
|
s3:
|
||||||
|
endpoint: "" # Empty = use AWS default
|
||||||
|
region: us-gov-west-1
|
||||||
|
bucket: orchard-artifacts-prod
|
||||||
|
usePathStyle: false # Real S3 uses virtual-hosted style
|
||||||
|
|
||||||
|
download:
|
||||||
|
mode: "presigned"
|
||||||
|
presignedUrlExpiry: 3600
|
||||||
|
|
||||||
|
# PostgreSQL subchart - disabled in prod, using RDS
|
||||||
|
postgresql:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
# MinIO subchart - disabled in prod, using real S3
|
||||||
|
minio:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
# MinIO ingress - disabled in prod, using real S3
|
||||||
|
minioIngress:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
redis:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
waitForDatabase: true
|
||||||
|
|
||||||
|
global:
|
||||||
|
security:
|
||||||
|
allowInsecureImages: true
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
# Production values for orchard
|
|
||||||
replicaCount: 3
|
|
||||||
|
|
||||||
image:
|
|
||||||
pullPolicy: Always
|
|
||||||
|
|
||||||
resources:
|
|
||||||
limits:
|
|
||||||
cpu: 1000m
|
|
||||||
memory: 1Gi
|
|
||||||
requests:
|
|
||||||
cpu: 250m
|
|
||||||
memory: 256Mi
|
|
||||||
|
|
||||||
autoscaling:
|
|
||||||
enabled: true
|
|
||||||
minReplicas: 3
|
|
||||||
maxReplicas: 20
|
|
||||||
targetCPUUtilizationPercentage: 70
|
|
||||||
targetMemoryUtilizationPercentage: 80
|
|
||||||
|
|
||||||
ingress:
|
|
||||||
enabled: true
|
|
||||||
className: nginx
|
|
||||||
annotations:
|
|
||||||
cert-manager.io/cluster-issuer: letsencrypt-prod
|
|
||||||
nginx.ingress.kubernetes.io/proxy-body-size: "500m"
|
|
||||||
hosts:
|
|
||||||
- host: orchard.example.com
|
|
||||||
paths:
|
|
||||||
- path: /
|
|
||||||
pathType: Prefix
|
|
||||||
tls:
|
|
||||||
- secretName: orchard-tls
|
|
||||||
hosts:
|
|
||||||
- orchard.example.com
|
|
||||||
|
|
||||||
orchard:
|
|
||||||
database:
|
|
||||||
sslmode: require
|
|
||||||
|
|
||||||
postgresql:
|
|
||||||
enabled: true
|
|
||||||
auth:
|
|
||||||
password: "" # Set via --set or external secret
|
|
||||||
primary:
|
|
||||||
persistence:
|
|
||||||
enabled: true
|
|
||||||
size: 100Gi
|
|
||||||
resources:
|
|
||||||
limits:
|
|
||||||
cpu: 2000m
|
|
||||||
memory: 4Gi
|
|
||||||
requests:
|
|
||||||
cpu: 500m
|
|
||||||
memory: 1Gi
|
|
||||||
|
|
||||||
minio:
|
|
||||||
enabled: true
|
|
||||||
auth:
|
|
||||||
rootPassword: "" # Set via --set or external secret
|
|
||||||
persistence:
|
|
||||||
enabled: true
|
|
||||||
size: 500Gi
|
|
||||||
resources:
|
|
||||||
limits:
|
|
||||||
cpu: 2000m
|
|
||||||
memory: 4Gi
|
|
||||||
requests:
|
|
||||||
cpu: 500m
|
|
||||||
memory: 1Gi
|
|
||||||
|
|
||||||
redis:
|
|
||||||
enabled: true
|
|
||||||
auth:
|
|
||||||
password: "" # Set via --set or external secret
|
|
||||||
master:
|
|
||||||
persistence:
|
|
||||||
enabled: true
|
|
||||||
size: 10Gi
|
|
||||||
158
helm/orchard/values-stage.yaml
Normal file
158
helm/orchard/values-stage.yaml
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
# Default values for orchard
|
||||||
|
replicaCount: 1
|
||||||
|
|
||||||
|
image:
|
||||||
|
repository: registry.global.bsf.tools/esv/bsf/bsf-integration/orchard/orchard-mvp
|
||||||
|
pullPolicy: Always
|
||||||
|
tag: "latest" # Defaults to chart appVersion
|
||||||
|
|
||||||
|
imagePullSecrets:
|
||||||
|
- name: orchard-pull-secret
|
||||||
|
|
||||||
|
# Init container image (used for wait-for-db, wait-for-minio)
|
||||||
|
initContainer:
|
||||||
|
image:
|
||||||
|
repository: containers.global.bsf.tools/busybox
|
||||||
|
tag: "1.36"
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
|
||||||
|
serviceAccount:
|
||||||
|
create: true
|
||||||
|
automount: true
|
||||||
|
annotations:
|
||||||
|
eks.amazonaws.com/role-arn: arn:aws-us-gov:iam::052673043337:role/service-orchard-stage
|
||||||
|
name: "orchard"
|
||||||
|
|
||||||
|
podAnnotations: {}
|
||||||
|
podLabels: {}
|
||||||
|
|
||||||
|
podSecurityContext: {}
|
||||||
|
|
||||||
|
securityContext:
|
||||||
|
readOnlyRootFilesystem: false # Python needs to write __pycache__
|
||||||
|
runAsNonRoot: true
|
||||||
|
runAsUser: 1000
|
||||||
|
|
||||||
|
service:
|
||||||
|
type: ClusterIP
|
||||||
|
port: 8080
|
||||||
|
|
||||||
|
ingress:
|
||||||
|
enabled: true
|
||||||
|
className: "nginx"
|
||||||
|
annotations:
|
||||||
|
cert-manager.io/cluster-issuer: "letsencrypt"
|
||||||
|
nginx.ingress.kubernetes.io/proxy-body-size: "0" # Disable body size limit for uploads
|
||||||
|
hosts:
|
||||||
|
- host: orchard-stage.common.global.bsf.tools
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
tls:
|
||||||
|
- secretName: orchard-tls
|
||||||
|
hosts:
|
||||||
|
- orchard-stage.common.global.bsf.tools
|
||||||
|
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 500m
|
||||||
|
memory: 512Mi
|
||||||
|
requests:
|
||||||
|
cpu: 500m
|
||||||
|
memory: 512Mi
|
||||||
|
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 5
|
||||||
|
periodSeconds: 5
|
||||||
|
|
||||||
|
autoscaling:
|
||||||
|
enabled: false
|
||||||
|
minReplicas: 1
|
||||||
|
maxReplicas: 10
|
||||||
|
targetCPUUtilizationPercentage: 80
|
||||||
|
targetMemoryUtilizationPercentage: 80
|
||||||
|
|
||||||
|
nodeSelector: {}
|
||||||
|
|
||||||
|
tolerations: []
|
||||||
|
|
||||||
|
affinity: {}
|
||||||
|
|
||||||
|
# Orchard server configuration
|
||||||
|
orchard:
|
||||||
|
env: "development" # Allows seed data for testing
|
||||||
|
server:
|
||||||
|
host: "0.0.0.0"
|
||||||
|
port: 8080
|
||||||
|
|
||||||
|
# Database configuration - uses AWS Secrets Manager via CSI driver
|
||||||
|
database:
|
||||||
|
host: "orchard-stage.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com"
|
||||||
|
port: 5432
|
||||||
|
dbname: postgres
|
||||||
|
sslmode: require
|
||||||
|
secretsManager:
|
||||||
|
enabled: true
|
||||||
|
secretArn: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:rds!cluster-a573672b-1a38-4665-a654-1b7df37b5297-IaeFQL"
|
||||||
|
|
||||||
|
# S3 configuration - uses IRSA for credentials
|
||||||
|
s3:
|
||||||
|
endpoint: "" # Empty = use AWS default
|
||||||
|
region: us-gov-west-1
|
||||||
|
bucket: orchard-artifacts-stage
|
||||||
|
usePathStyle: false # Real S3 uses virtual-hosted style
|
||||||
|
|
||||||
|
# Download configuration
|
||||||
|
download:
|
||||||
|
mode: "presigned" # presigned, redirect, or proxy
|
||||||
|
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
||||||
|
|
||||||
|
# Relaxed rate limits for stage (allows CI integration tests to run)
|
||||||
|
rateLimit:
|
||||||
|
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests
|
||||||
|
|
||||||
|
# PostgreSQL subchart - disabled in stage, using RDS
|
||||||
|
postgresql:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
# MinIO subchart - disabled in stage, using real S3
|
||||||
|
minio:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
# MinIO ingress - disabled in stage, using real S3
|
||||||
|
minioIngress:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
# Redis subchart configuration (for future caching)
|
||||||
|
redis:
|
||||||
|
enabled: false
|
||||||
|
image:
|
||||||
|
registry: containers.global.bsf.tools
|
||||||
|
repository: bitnami/redis
|
||||||
|
tag: "7.2"
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
auth:
|
||||||
|
enabled: true
|
||||||
|
password: redis-password
|
||||||
|
architecture: standalone
|
||||||
|
master:
|
||||||
|
persistence:
|
||||||
|
enabled: true
|
||||||
|
size: 1Gi
|
||||||
|
|
||||||
|
# Wait for database before starting (SQLAlchemy creates tables on startup)
|
||||||
|
waitForDatabase: true
|
||||||
|
|
||||||
|
global:
|
||||||
|
security:
|
||||||
|
allowInsecureImages: true
|
||||||
51
kics.config
Normal file
51
kics.config
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# KICS Configuration File
|
||||||
|
# https://docs.kics.io/latest/configuration-file/
|
||||||
|
|
||||||
|
# Exclude specific queries that are acceptable for this project
|
||||||
|
exclude-queries:
|
||||||
|
# Shared Volumes Between Containers (INFO)
|
||||||
|
# Reason: Database services (postgres, minio, redis) require persistent volumes
|
||||||
|
# for data storage. This is expected and necessary behavior.
|
||||||
|
- 8c978947-0ff6-485c-b0c2-0bfca6026466
|
||||||
|
|
||||||
|
# Passwords And Secrets - Generic Password (HIGH)
|
||||||
|
# Reason: These are LOCAL DEVELOPMENT configs only. Production deployments
|
||||||
|
# use Kubernetes secrets injected at runtime. The passwords in docker-compose
|
||||||
|
# and helm values files are placeholder/dev values, not real secrets.
|
||||||
|
- a88baa34-e2ad-44ea-ad6f-8cac87bc7c71
|
||||||
|
|
||||||
|
# Healthcheck Not Set (MEDIUM)
|
||||||
|
# Reason: minio-init is an init container that runs once and exits.
|
||||||
|
# Healthchecks are not applicable to containers that are designed to exit.
|
||||||
|
- 698ed579-b239-4f8f-a388-baa4bcb13ef8
|
||||||
|
|
||||||
|
# Apt Get Install Pin Version Not Defined (MEDIUM)
|
||||||
|
# Reason: We intentionally don't pin curl version to get security updates.
|
||||||
|
# This is documented with hadolint ignore comment in Dockerfile.
|
||||||
|
- 965a08d7-ef86-4f14-8792-4a3b2098937e
|
||||||
|
|
||||||
|
# Container Capabilities Unrestricted (MEDIUM)
|
||||||
|
# Reason: LOCAL DEVELOPMENT ONLY. Stock postgres, redis, minio images require
|
||||||
|
# certain capabilities (SETUID, SETGID, CHOWN) to switch users at startup.
|
||||||
|
# cap_drop: ALL breaks these containers. Production Kubernetes deployments
|
||||||
|
# use securityContext with appropriate settings.
|
||||||
|
- ce76b7d0-9e77-464d-b86f-c5c48e03e22d
|
||||||
|
|
||||||
|
# No New Privileges Not Set (HIGH)
|
||||||
|
# Reason: LOCAL DEVELOPMENT ONLY. Stock postgres, redis, minio images need
|
||||||
|
# to escalate privileges during initialization (e.g., postgres switches from
|
||||||
|
# root to postgres user). no-new-privileges:true prevents this and causes
|
||||||
|
# containers to crash. Production Kubernetes deployments handle this via
|
||||||
|
# securityContext.
|
||||||
|
- 27fcc7d6-c49b-46e0-98f1-6c082a6a2750
|
||||||
|
|
||||||
|
# Security Opt Not Set (MEDIUM)
|
||||||
|
# Reason: LOCAL DEVELOPMENT ONLY. Related to above - security_opt is not set
|
||||||
|
# on database services because no-new-privileges breaks them.
|
||||||
|
- 610e266e-6c12-4bca-9925-1ed0cd29742b
|
||||||
|
|
||||||
|
# Container Traffic Not Bound To Host Interface (MEDIUM)
|
||||||
|
# Reason: LOCAL DEVELOPMENT ONLY. The orchard-server port is bound to 0.0.0.0
|
||||||
|
# to allow testing from other machines on the local network. This is only in
|
||||||
|
# docker-compose.local.yml, not production deployments.
|
||||||
|
- 451d79dc-0588-476a-ad03-3c7f0320abb3
|
||||||
86
migrations/006_auth_tables.sql
Normal file
86
migrations/006_auth_tables.sql
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
-- Authentication Tables Migration
|
||||||
|
-- Adds users table and updates api_keys with foreign key
|
||||||
|
|
||||||
|
-- Users table
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
username VARCHAR(255) UNIQUE NOT NULL,
|
||||||
|
password_hash VARCHAR(255),
|
||||||
|
email VARCHAR(255),
|
||||||
|
is_admin BOOLEAN DEFAULT FALSE,
|
||||||
|
is_active BOOLEAN DEFAULT TRUE,
|
||||||
|
must_change_password BOOLEAN DEFAULT FALSE,
|
||||||
|
oidc_subject VARCHAR(255),
|
||||||
|
oidc_issuer VARCHAR(512),
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
last_login TIMESTAMP WITH TIME ZONE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email) WHERE email IS NOT NULL;
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_users_oidc_subject ON users(oidc_subject) WHERE oidc_subject IS NOT NULL;
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_users_is_active ON users(is_active) WHERE is_active = TRUE;
|
||||||
|
|
||||||
|
-- Sessions table for web login
|
||||||
|
CREATE TABLE IF NOT EXISTS sessions (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token_hash VARCHAR(64) NOT NULL UNIQUE,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||||
|
last_accessed TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
user_agent VARCHAR(512),
|
||||||
|
ip_address VARCHAR(45)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sessions_user_id ON sessions(user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sessions_token_hash ON sessions(token_hash);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sessions_expires_at ON sessions(expires_at);
|
||||||
|
|
||||||
|
-- Auth settings for OIDC configuration (future use)
|
||||||
|
CREATE TABLE IF NOT EXISTS auth_settings (
|
||||||
|
key VARCHAR(255) PRIMARY KEY,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Add user_id foreign key to api_keys table
|
||||||
|
-- First add the column (nullable initially)
|
||||||
|
ALTER TABLE api_keys ADD COLUMN IF NOT EXISTS owner_id UUID REFERENCES users(id) ON DELETE CASCADE;
|
||||||
|
|
||||||
|
-- Add scopes column for API key permissions
|
||||||
|
ALTER TABLE api_keys ADD COLUMN IF NOT EXISTS scopes TEXT[] DEFAULT ARRAY['read', 'write'];
|
||||||
|
|
||||||
|
-- Add description column
|
||||||
|
ALTER TABLE api_keys ADD COLUMN IF NOT EXISTS description TEXT;
|
||||||
|
|
||||||
|
-- Create index for owner_id
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_api_keys_owner_id ON api_keys(owner_id) WHERE owner_id IS NOT NULL;
|
||||||
|
|
||||||
|
-- Trigger to update users.updated_at
|
||||||
|
CREATE TRIGGER users_updated_at_trigger
|
||||||
|
BEFORE UPDATE ON users
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- Trigger to update sessions.last_accessed on access
|
||||||
|
CREATE OR REPLACE FUNCTION update_session_last_accessed()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.last_accessed = NOW();
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
-- Function to clean up expired sessions (can be called periodically)
|
||||||
|
CREATE OR REPLACE FUNCTION cleanup_expired_sessions()
|
||||||
|
RETURNS INTEGER AS $$
|
||||||
|
DECLARE
|
||||||
|
deleted_count INTEGER;
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM sessions WHERE expires_at < NOW();
|
||||||
|
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
||||||
|
RETURN deleted_count;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
67
migrations/007_package_versions.sql
Normal file
67
migrations/007_package_versions.sql
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
-- Migration: Add package_versions table for separate version tracking
|
||||||
|
-- This separates immutable versions from mutable tags
|
||||||
|
|
||||||
|
-- Create package_versions table
|
||||||
|
CREATE TABLE IF NOT EXISTS package_versions (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
package_id UUID NOT NULL REFERENCES packages(id) ON DELETE CASCADE,
|
||||||
|
artifact_id VARCHAR(64) NOT NULL REFERENCES artifacts(id),
|
||||||
|
version VARCHAR(255) NOT NULL,
|
||||||
|
version_source VARCHAR(50), -- 'explicit', 'filename', 'metadata', 'migrated_from_tag'
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
created_by VARCHAR(255) NOT NULL,
|
||||||
|
UNIQUE(package_id, version),
|
||||||
|
UNIQUE(package_id, artifact_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes for common queries
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_package_versions_package_id ON package_versions(package_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_package_versions_artifact_id ON package_versions(artifact_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_package_versions_package_version ON package_versions(package_id, version);
|
||||||
|
|
||||||
|
-- Trigger functions for ref_count management (same pattern as tags)
|
||||||
|
CREATE OR REPLACE FUNCTION increment_version_ref_count()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION decrement_version_ref_count()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||||
|
RETURN OLD;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
-- Create triggers
|
||||||
|
DROP TRIGGER IF EXISTS package_versions_ref_count_insert ON package_versions;
|
||||||
|
CREATE TRIGGER package_versions_ref_count_insert
|
||||||
|
AFTER INSERT ON package_versions
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION increment_version_ref_count();
|
||||||
|
|
||||||
|
DROP TRIGGER IF EXISTS package_versions_ref_count_delete ON package_versions;
|
||||||
|
CREATE TRIGGER package_versions_ref_count_delete
|
||||||
|
AFTER DELETE ON package_versions
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION decrement_version_ref_count();
|
||||||
|
|
||||||
|
-- Data migration: populate from existing semver-pattern tags
|
||||||
|
-- This extracts versions from tags that look like version numbers
|
||||||
|
-- Tags like "v1.0.0", "1.2.3", "2.0.0-beta" will be migrated
|
||||||
|
-- Tags like "latest", "stable", "dev" will NOT be migrated
|
||||||
|
INSERT INTO package_versions (package_id, artifact_id, version, version_source, created_by, created_at)
|
||||||
|
SELECT
|
||||||
|
t.package_id,
|
||||||
|
t.artifact_id,
|
||||||
|
-- Strip leading 'v' if present
|
||||||
|
CASE WHEN t.name LIKE 'v%' THEN substring(t.name from 2) ELSE t.name END,
|
||||||
|
'migrated_from_tag',
|
||||||
|
t.created_by,
|
||||||
|
t.created_at
|
||||||
|
FROM tags t
|
||||||
|
WHERE t.name ~ '^v?[0-9]+\.[0-9]+(\.[0-9]+)?([-.][a-zA-Z0-9]+)?$'
|
||||||
|
ON CONFLICT (package_id, version) DO NOTHING;
|
||||||
48
migrations/008_artifact_dependencies.sql
Normal file
48
migrations/008_artifact_dependencies.sql
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
-- Migration 008: Artifact Dependencies
|
||||||
|
-- Adds support for declaring dependencies between artifacts
|
||||||
|
-- Part of Package Dependency Management feature (#76)
|
||||||
|
|
||||||
|
-- Create artifact_dependencies table
|
||||||
|
CREATE TABLE IF NOT EXISTS artifact_dependencies (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
artifact_id VARCHAR(64) NOT NULL REFERENCES artifacts(id) ON DELETE CASCADE,
|
||||||
|
dependency_project VARCHAR(255) NOT NULL,
|
||||||
|
dependency_package VARCHAR(255) NOT NULL,
|
||||||
|
version_constraint VARCHAR(255),
|
||||||
|
tag_constraint VARCHAR(255),
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
-- Exactly one of version_constraint or tag_constraint must be set
|
||||||
|
CONSTRAINT check_constraint_type CHECK (
|
||||||
|
(version_constraint IS NOT NULL AND tag_constraint IS NULL) OR
|
||||||
|
(version_constraint IS NULL AND tag_constraint IS NOT NULL)
|
||||||
|
),
|
||||||
|
|
||||||
|
-- Each artifact can only have one dependency on a specific project/package
|
||||||
|
CONSTRAINT unique_artifact_dependency UNIQUE (artifact_id, dependency_project, dependency_package)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Index for fast lookups by artifact_id (get all deps for an artifact)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_artifact_id
|
||||||
|
ON artifact_dependencies(artifact_id);
|
||||||
|
|
||||||
|
-- Index for reverse dependency lookups (find what depends on a package)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_target
|
||||||
|
ON artifact_dependencies(dependency_project, dependency_package);
|
||||||
|
|
||||||
|
-- Index for finding dependencies with specific version constraints
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_version
|
||||||
|
ON artifact_dependencies(dependency_project, dependency_package, version_constraint)
|
||||||
|
WHERE version_constraint IS NOT NULL;
|
||||||
|
|
||||||
|
-- Index for finding dependencies with specific tag constraints
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_tag
|
||||||
|
ON artifact_dependencies(dependency_project, dependency_package, tag_constraint)
|
||||||
|
WHERE tag_constraint IS NOT NULL;
|
||||||
|
|
||||||
|
COMMENT ON TABLE artifact_dependencies IS 'Stores dependencies declared by artifacts on other packages';
|
||||||
|
COMMENT ON COLUMN artifact_dependencies.artifact_id IS 'The artifact that declares this dependency';
|
||||||
|
COMMENT ON COLUMN artifact_dependencies.dependency_project IS 'Project name of the dependency';
|
||||||
|
COMMENT ON COLUMN artifact_dependencies.dependency_package IS 'Package name of the dependency';
|
||||||
|
COMMENT ON COLUMN artifact_dependencies.version_constraint IS 'Exact version required (mutually exclusive with tag_constraint)';
|
||||||
|
COMMENT ON COLUMN artifact_dependencies.tag_constraint IS 'Tag name required (mutually exclusive with version_constraint)';
|
||||||
Reference in New Issue
Block a user