Compare commits
35 Commits
f7ffc1c877
...
fix/teams-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
832e4b27a8 | ||
|
|
0a69910e8b | ||
|
|
576791d19e | ||
|
|
a5796f5437 | ||
|
|
284945ba33 | ||
|
|
fe07638485 | ||
|
|
7120cf64f1 | ||
|
|
718e6e7193 | ||
|
|
abba90ebac | ||
|
|
6c8b922818 | ||
|
|
99d28cf9c6 | ||
|
|
b5579f1643 | ||
|
|
fafa03e4ce | ||
|
|
d4b2da3232 | ||
|
|
7b04bbdf05 | ||
|
|
3a807870a3 | ||
|
|
f966fde7df | ||
|
|
133d9cbfd6 | ||
|
|
276b4f2743 | ||
|
|
67ac6bb3f8 | ||
|
|
b0bb3ed569 | ||
|
|
1ac75e1017 | ||
|
|
693613f111 | ||
|
|
9da4ae8c0d | ||
|
|
7ffdc64364 | ||
|
|
6abc0c88b0 | ||
|
|
e96dc5cde8 | ||
|
|
cba5bac383 | ||
|
|
535280a783 | ||
|
|
c9026e1950 | ||
|
|
fedbd95cf4 | ||
|
|
255e25d66d | ||
|
|
427d2fec70 | ||
|
|
199821b34d | ||
|
|
584acd1e90 |
7
.env.example
Normal file
7
.env.example
Normal file
@@ -0,0 +1,7 @@
|
||||
# Orchard Local Development Environment
|
||||
# Copy this file to .env and customize as needed
|
||||
# Note: .env is gitignored and will not be committed
|
||||
|
||||
# Admin account password (required for local development)
|
||||
# This sets the initial admin password when the database is first created
|
||||
ORCHARD_ADMIN_PASSWORD=changeme123
|
||||
325
.gitlab-ci.yml
325
.gitlab-ci.yml
@@ -8,6 +8,18 @@ variables:
|
||||
PROSPER_VERSION: v0.64.1
|
||||
# Use internal PyPI proxy instead of public internet
|
||||
PIP_INDEX_URL: https://deps.global.bsf.tools/artifactory/api/pypi/pypi.org/simple
|
||||
# Environment URLs (used by deploy and test jobs)
|
||||
STAGE_URL: https://orchard-stage.common.global.bsf.tools
|
||||
PROD_URL: https://orchard.common.global.bsf.tools
|
||||
# Stage environment AWS resources (used by reset job)
|
||||
STAGE_RDS_HOST: orchard-stage.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com
|
||||
STAGE_RDS_DBNAME: postgres
|
||||
STAGE_SECRET_ARN: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:rds!cluster-a573672b-1a38-4665-a654-1b7df37b5297-IaeFQL"
|
||||
STAGE_AUTH_SECRET_ARN: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:orchard-stage-creds-SMqvQx"
|
||||
STAGE_S3_BUCKET: orchard-artifacts-stage
|
||||
AWS_REGION: us-gov-west-1
|
||||
# Shared pip cache directory
|
||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.pip-cache"
|
||||
|
||||
# Prevent duplicate pipelines for MRs
|
||||
workflow:
|
||||
@@ -25,15 +37,113 @@ stages:
|
||||
- analyze
|
||||
- deploy
|
||||
|
||||
# Override Prosper template jobs to exclude tag pipelines
|
||||
# Tags only run deploy_prod and smoke_test_prod (image already built on main)
|
||||
build_image:
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
when: never
|
||||
- when: on_success
|
||||
|
||||
test_image:
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
when: never
|
||||
- when: on_success
|
||||
|
||||
hadolint:
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
when: never
|
||||
- when: on_success
|
||||
|
||||
kics:
|
||||
variables:
|
||||
KICS_CONFIG: kics.config
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
when: never
|
||||
- when: on_success
|
||||
|
||||
# Post-deployment integration tests template
|
||||
secrets:
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
when: never
|
||||
- when: on_success
|
||||
|
||||
app_deps_scan:
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
when: never
|
||||
- when: on_success
|
||||
|
||||
cve_scan:
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
when: never
|
||||
- when: on_success
|
||||
|
||||
app_sbom_analysis:
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
when: never
|
||||
- when: on_success
|
||||
|
||||
cve_sbom_analysis:
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
when: never
|
||||
- when: on_success
|
||||
|
||||
# Override release job to wait for stage integration tests before creating tag
|
||||
# This ensures the tag (which triggers prod deploy) is only created after stage passes
|
||||
release:
|
||||
needs: [integration_test_stage, changelog]
|
||||
|
||||
# Full integration test suite template (for feature/stage deployments)
|
||||
# Runs the complete pytest integration test suite against the deployed environment
|
||||
.integration_test_template: &integration_test_template
|
||||
stage: deploy # Runs in deploy stage, but after deployment due to 'needs'
|
||||
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||
timeout: 10m
|
||||
timeout: 20m # Full suite takes longer than smoke tests
|
||||
interruptible: true # Cancel if new pipeline starts
|
||||
retry: 1 # Retry once on failure (network flakiness)
|
||||
cache:
|
||||
key: pip-$CI_COMMIT_REF_SLUG
|
||||
paths:
|
||||
- .pip-cache/
|
||||
policy: pull-push
|
||||
before_script:
|
||||
- pip install --index-url "$PIP_INDEX_URL" -r backend/requirements.txt
|
||||
- pip install --index-url "$PIP_INDEX_URL" pytest pytest-asyncio httpx
|
||||
script:
|
||||
- cd backend
|
||||
# Debug: Print environment variables for test configuration
|
||||
- echo "ORCHARD_TEST_URL=$ORCHARD_TEST_URL"
|
||||
- echo "ORCHARD_TEST_PASSWORD is set to '${ORCHARD_TEST_PASSWORD:-NOT SET}'"
|
||||
# Run full integration test suite, excluding:
|
||||
# - large/slow tests
|
||||
# - requires_direct_s3 tests (can't access MinIO from outside K8s cluster)
|
||||
# ORCHARD_TEST_URL tells the tests which server to connect to
|
||||
# Note: Auth tests work because dev/stage deployments have relaxed rate limits
|
||||
- |
|
||||
python -m pytest tests/integration/ -v \
|
||||
--junitxml=integration-report.xml \
|
||||
-m "not large and not slow and not requires_direct_s3" \
|
||||
--tb=short
|
||||
artifacts:
|
||||
when: always
|
||||
expire_in: 1 week
|
||||
paths:
|
||||
- backend/integration-report.xml
|
||||
reports:
|
||||
junit: backend/integration-report.xml
|
||||
|
||||
# Lightweight smoke test template (for production - no test data creation)
|
||||
.smoke_test_template: &smoke_test_template
|
||||
stage: deploy
|
||||
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||
timeout: 5m
|
||||
before_script:
|
||||
- pip install --index-url "$PIP_INDEX_URL" httpx
|
||||
script:
|
||||
@@ -43,12 +153,12 @@ kics:
|
||||
import os
|
||||
import sys
|
||||
|
||||
BASE_URL = os.environ.get("BASE_URL")
|
||||
BASE_URL = os.environ.get("ORCHARD_TEST_URL")
|
||||
if not BASE_URL:
|
||||
print("ERROR: BASE_URL not set")
|
||||
print("ERROR: ORCHARD_TEST_URL not set")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Running integration tests against {BASE_URL}")
|
||||
print(f"Running smoke tests against {BASE_URL}")
|
||||
client = httpx.Client(base_url=BASE_URL, timeout=30.0)
|
||||
|
||||
errors = []
|
||||
@@ -86,38 +196,131 @@ kics:
|
||||
print(f" FAIL: {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("SUCCESS: All integration tests passed!")
|
||||
print("SUCCESS: All smoke tests passed!")
|
||||
sys.exit(0)
|
||||
PYTEST_SCRIPT
|
||||
|
||||
# Integration tests for stage deployment
|
||||
integration_test_stage:
|
||||
<<: *integration_test_template
|
||||
needs: [deploy_stage]
|
||||
variables:
|
||||
BASE_URL: https://orchard-stage.common.global.bsf.tools
|
||||
# Reset stage template - runs from CI runner, uses CI variable for auth
|
||||
# Calls the /api/v1/admin/factory-reset endpoint which handles DB and S3 cleanup
|
||||
.reset_stage_template: &reset_stage_template
|
||||
stage: deploy
|
||||
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||
timeout: 5m
|
||||
retry: 1
|
||||
before_script:
|
||||
- pip install --index-url "$PIP_INDEX_URL" httpx
|
||||
script:
|
||||
- |
|
||||
python - <<'RESET_SCRIPT'
|
||||
import httpx
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
|
||||
BASE_URL = os.environ.get("STAGE_URL", "")
|
||||
ADMIN_USER = "admin"
|
||||
ADMIN_PASS = os.environ.get("STAGE_ADMIN_PASSWORD", "")
|
||||
MAX_RETRIES = 3
|
||||
RETRY_DELAY = 5
|
||||
|
||||
if not BASE_URL:
|
||||
print("ERROR: STAGE_URL not set")
|
||||
sys.exit(1)
|
||||
|
||||
if not ADMIN_PASS:
|
||||
print("ERROR: STAGE_ADMIN_PASSWORD not set")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"=== Resetting stage environment at {BASE_URL} ===")
|
||||
|
||||
def do_reset():
|
||||
with httpx.Client(base_url=BASE_URL, timeout=120.0) as client:
|
||||
print("Logging in as admin...")
|
||||
login_response = client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": ADMIN_USER, "password": ADMIN_PASS},
|
||||
)
|
||||
if login_response.status_code != 200:
|
||||
raise Exception(f"Login failed: {login_response.status_code} - {login_response.text}")
|
||||
print("Login successful")
|
||||
|
||||
print("Calling factory reset endpoint...")
|
||||
reset_response = client.post(
|
||||
"/api/v1/admin/factory-reset",
|
||||
headers={"X-Confirm-Reset": "yes-delete-all-data"},
|
||||
)
|
||||
|
||||
if reset_response.status_code == 200:
|
||||
result = reset_response.json()
|
||||
print("Factory reset successful!")
|
||||
print(f" Database tables dropped: {result['results']['database_tables_dropped']}")
|
||||
print(f" S3 objects deleted: {result['results']['s3_objects_deleted']}")
|
||||
print(f" Database reinitialized: {result['results']['database_reinitialized']}")
|
||||
print(f" Seeded: {result['results']['seeded']}")
|
||||
return True
|
||||
else:
|
||||
raise Exception(f"Factory reset failed: {reset_response.status_code} - {reset_response.text}")
|
||||
|
||||
for attempt in range(1, MAX_RETRIES + 1):
|
||||
try:
|
||||
print(f"Attempt {attempt}/{MAX_RETRIES}")
|
||||
if do_reset():
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
print(f"Attempt {attempt} failed: {e}")
|
||||
if attempt < MAX_RETRIES:
|
||||
print(f"Retrying in {RETRY_DELAY} seconds...")
|
||||
time.sleep(RETRY_DELAY)
|
||||
else:
|
||||
print("All retry attempts failed")
|
||||
sys.exit(1)
|
||||
RESET_SCRIPT
|
||||
rules:
|
||||
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||
when: on_success
|
||||
|
||||
# Integration tests for feature deployment
|
||||
# Reset stage BEFORE integration tests (ensure known state)
|
||||
reset_stage_pre:
|
||||
<<: *reset_stage_template
|
||||
needs: [deploy_stage]
|
||||
|
||||
# Integration tests for stage deployment
|
||||
# Uses CI variable STAGE_ADMIN_PASSWORD (set in GitLab CI/CD settings)
|
||||
integration_test_stage:
|
||||
<<: *integration_test_template
|
||||
needs: [reset_stage_pre]
|
||||
variables:
|
||||
ORCHARD_TEST_URL: $STAGE_URL
|
||||
ORCHARD_TEST_PASSWORD: $STAGE_ADMIN_PASSWORD
|
||||
rules:
|
||||
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||
when: on_success
|
||||
|
||||
# Reset stage AFTER integration tests (clean slate for next run)
|
||||
reset_stage:
|
||||
<<: *reset_stage_template
|
||||
needs: [integration_test_stage]
|
||||
allow_failure: true # Don't fail pipeline if reset has issues
|
||||
|
||||
# Integration tests for feature deployment (full suite)
|
||||
# Uses DEV_ADMIN_PASSWORD CI variable (same as deploy_feature)
|
||||
integration_test_feature:
|
||||
<<: *integration_test_template
|
||||
needs: [deploy_feature]
|
||||
variables:
|
||||
BASE_URL: https://orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools
|
||||
ORCHARD_TEST_URL: https://orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools
|
||||
ORCHARD_TEST_PASSWORD: $DEV_ADMIN_PASSWORD
|
||||
rules:
|
||||
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
||||
when: on_success
|
||||
|
||||
# Run Python backend tests
|
||||
python_tests:
|
||||
# Run Python backend unit tests
|
||||
python_unit_tests:
|
||||
stage: test
|
||||
needs: [] # Run in parallel with build
|
||||
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||
timeout: 15m
|
||||
variables:
|
||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.pip-cache"
|
||||
interruptible: true # Cancel if new pipeline starts
|
||||
cache:
|
||||
key: pip-$CI_COMMIT_REF_SLUG
|
||||
paths:
|
||||
@@ -128,7 +331,7 @@ python_tests:
|
||||
- pip install --index-url "$PIP_INDEX_URL" pytest pytest-asyncio pytest-cov httpx
|
||||
script:
|
||||
- cd backend
|
||||
# Only run unit tests - integration tests require Docker Compose services
|
||||
# Run unit tests (integration tests run post-deployment against live environment)
|
||||
- python -m pytest tests/unit/ -v --cov=app --cov-report=term --cov-report=xml:coverage.xml --cov-report=html:coverage_html --junitxml=pytest-report.xml
|
||||
artifacts:
|
||||
when: always
|
||||
@@ -143,6 +346,10 @@ python_tests:
|
||||
coverage_format: cobertura
|
||||
path: backend/coverage.xml
|
||||
coverage: '/TOTAL.*\s+(\d+%)/'
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
when: never
|
||||
- when: on_success
|
||||
|
||||
# Run frontend tests
|
||||
frontend_tests:
|
||||
@@ -150,6 +357,7 @@ frontend_tests:
|
||||
needs: [] # Run in parallel with build
|
||||
image: deps.global.bsf.tools/docker/node:20-alpine
|
||||
timeout: 15m
|
||||
interruptible: true # Cancel if new pipeline starts
|
||||
cache:
|
||||
key: npm-$CI_COMMIT_REF_SLUG
|
||||
paths:
|
||||
@@ -171,11 +379,15 @@ frontend_tests:
|
||||
coverage_format: cobertura
|
||||
path: frontend/coverage/cobertura-coverage.xml
|
||||
coverage: '/All files[^|]*\|[^|]*\s+([\d\.]+)/'
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
when: never
|
||||
- when: on_success
|
||||
|
||||
# Shared deploy configuration
|
||||
.deploy_template: &deploy_template
|
||||
stage: deploy
|
||||
needs: [build_image, test_image, kics, hadolint, python_tests, frontend_tests, secrets, app_deps_scan, cve_scan, cve_sbom_analysis, app_sbom_analysis]
|
||||
needs: [build_image, test_image, kics, hadolint, python_unit_tests, frontend_tests, secrets, app_deps_scan, cve_scan, cve_sbom_analysis, app_sbom_analysis]
|
||||
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
|
||||
|
||||
.helm_setup: &helm_setup
|
||||
@@ -184,47 +396,21 @@ frontend_tests:
|
||||
# OCI-based charts from internal registry - no repo add needed
|
||||
- helm dependency update
|
||||
|
||||
# Simplified deployment verification - just health check
|
||||
# Full API/frontend checks are done by integration tests post-deployment
|
||||
.verify_deployment: &verify_deployment |
|
||||
echo "=== Waiting for health endpoint (certs may take a few minutes) ==="
|
||||
for i in $(seq 1 30); do
|
||||
if curl -sf --max-time 10 "$BASE_URL/health" > /dev/null 2>&1; then
|
||||
echo "Health check passed!"
|
||||
break
|
||||
echo "Deployment URL: $BASE_URL"
|
||||
exit 0
|
||||
fi
|
||||
echo "Attempt $i/30 - waiting 10s..."
|
||||
sleep 10
|
||||
done
|
||||
|
||||
# Verify health endpoint
|
||||
echo ""
|
||||
echo "=== Health Check ==="
|
||||
curl -sf "$BASE_URL/health" || { echo "Health check failed"; exit 1; }
|
||||
echo ""
|
||||
|
||||
# Verify API is responding
|
||||
echo ""
|
||||
echo "=== API Check (GET /api/v1/projects) ==="
|
||||
HTTP_CODE=$(curl -sf -o /dev/null -w "%{http_code}" "$BASE_URL/api/v1/projects")
|
||||
if [ "$HTTP_CODE" = "200" ]; then
|
||||
echo "API responding: HTTP $HTTP_CODE"
|
||||
else
|
||||
echo "API check failed: HTTP $HTTP_CODE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify frontend is served
|
||||
echo ""
|
||||
echo "=== Frontend Check ==="
|
||||
if curl -sf "$BASE_URL/" | grep -q "</html>"; then
|
||||
echo "Frontend is being served"
|
||||
else
|
||||
echo "Frontend check failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=== All checks passed! ==="
|
||||
echo "Deployment URL: $BASE_URL"
|
||||
echo "Health check failed after 30 attempts"
|
||||
exit 1
|
||||
|
||||
# Deploy to stage (main branch)
|
||||
deploy_stage:
|
||||
@@ -232,7 +418,7 @@ deploy_stage:
|
||||
variables:
|
||||
NAMESPACE: orch-stage-namespace
|
||||
VALUES_FILE: helm/orchard/values-stage.yaml
|
||||
BASE_URL: https://orchard-stage.common.global.bsf.tools
|
||||
BASE_URL: $STAGE_URL
|
||||
before_script:
|
||||
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
||||
- *helm_setup
|
||||
@@ -244,6 +430,7 @@ deploy_stage:
|
||||
--namespace $NAMESPACE \
|
||||
-f $VALUES_FILE \
|
||||
--set image.tag=git.linux-amd64-$CI_COMMIT_SHA \
|
||||
--set orchard.auth.adminPassword=$STAGE_ADMIN_PASSWORD \
|
||||
--wait \
|
||||
--atomic \
|
||||
--timeout 10m
|
||||
@@ -251,7 +438,7 @@ deploy_stage:
|
||||
- *verify_deployment
|
||||
environment:
|
||||
name: stage
|
||||
url: https://orchard-stage.common.global.bsf.tools
|
||||
url: $STAGE_URL
|
||||
kubernetes:
|
||||
agent: esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
||||
rules:
|
||||
@@ -275,6 +462,7 @@ deploy_feature:
|
||||
--namespace $NAMESPACE \
|
||||
-f $VALUES_FILE \
|
||||
--set image.tag=git.linux-amd64-$CI_COMMIT_SHA \
|
||||
--set orchard.auth.adminPassword=$DEV_ADMIN_PASSWORD \
|
||||
--set ingress.hosts[0].host=orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools \
|
||||
--set ingress.tls[0].hosts[0]=orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools \
|
||||
--set ingress.tls[0].secretName=orchard-$CI_COMMIT_REF_SLUG-tls \
|
||||
@@ -297,10 +485,12 @@ deploy_feature:
|
||||
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
||||
when: on_success
|
||||
|
||||
# Cleanup feature branch deployment
|
||||
# Cleanup feature branch deployment (standalone - doesn't need deploy dependencies)
|
||||
cleanup_feature:
|
||||
<<: *deploy_template
|
||||
stage: deploy
|
||||
needs: []
|
||||
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
|
||||
timeout: 5m
|
||||
variables:
|
||||
NAMESPACE: orch-dev-namespace
|
||||
GIT_STRATEGY: none # No source needed, branch may be deleted
|
||||
@@ -319,17 +509,16 @@ cleanup_feature:
|
||||
when: manual
|
||||
allow_failure: true
|
||||
|
||||
# Deploy to production (version tags only, manual approval required)
|
||||
# Deploy to production (version tags only)
|
||||
deploy_prod:
|
||||
stage: deploy
|
||||
# For tag pipelines, most jobs don't run (trusting main was tested)
|
||||
# We only need build_image to have the image available
|
||||
needs: [build_image]
|
||||
# For tag pipelines, no other jobs run - image was already built when commit was on main
|
||||
needs: []
|
||||
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
|
||||
variables:
|
||||
NAMESPACE: orch-prod-namespace
|
||||
NAMESPACE: orch-namespace
|
||||
VALUES_FILE: helm/orchard/values-prod.yaml
|
||||
BASE_URL: https://orchard.common.global.bsf.tools
|
||||
BASE_URL: $PROD_URL
|
||||
before_script:
|
||||
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-prod
|
||||
- *helm_setup
|
||||
@@ -348,21 +537,21 @@ deploy_prod:
|
||||
- *verify_deployment
|
||||
environment:
|
||||
name: production
|
||||
url: https://orchard.common.global.bsf.tools
|
||||
url: $PROD_URL
|
||||
kubernetes:
|
||||
agent: esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-prod
|
||||
rules:
|
||||
# Only run on semantic version tags (v1.0.0, v1.2.3, etc.)
|
||||
- if: '$CI_COMMIT_TAG =~ /^v\d+\.\d+\.\d+$/'
|
||||
when: manual # Require manual approval for prod
|
||||
when: on_success
|
||||
allow_failure: false
|
||||
|
||||
# Integration tests for production deployment
|
||||
integration_test_prod:
|
||||
<<: *integration_test_template
|
||||
# Smoke tests for production deployment (read-only, no test data creation)
|
||||
smoke_test_prod:
|
||||
<<: *smoke_test_template
|
||||
needs: [deploy_prod]
|
||||
variables:
|
||||
BASE_URL: https://orchard.common.global.bsf.tools
|
||||
ORCHARD_TEST_URL: $PROD_URL
|
||||
rules:
|
||||
- if: '$CI_COMMIT_TAG =~ /^v\d+\.\d+\.\d+$/'
|
||||
when: on_success
|
||||
|
||||
8
.gitleaks.toml
Normal file
8
.gitleaks.toml
Normal file
@@ -0,0 +1,8 @@
|
||||
# Gitleaks configuration
|
||||
# https://github.com/gitleaks/gitleaks#configuration
|
||||
|
||||
[allowlist]
|
||||
# Test files that contain variable names matching secret patterns (e.g., s3_key)
|
||||
paths = [
|
||||
'''backend/tests/.*\.py''',
|
||||
]
|
||||
@@ -4,6 +4,7 @@
|
||||
# False positive: s3_key is an attribute name in test assertions, not a secret
|
||||
# These are historical commits - files have since been deleted or updated with inline comments
|
||||
7e68baed0886a3c928644cd01aa3b39f92d4f976:backend/tests/test_duplicate_detection.py:generic-api-key:154
|
||||
81458b3bcb5ace97109ba4c16f4afa6e55b1b8bd:backend/tests/test_duplicate_detection.py:generic-api-key:154
|
||||
2f1891cf0126ec0e7d4c789d872a2cb2dd3a1745:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||
10d36947948de796f0bacea3827f4531529c405d:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||
bccbc71c13570d14b8b26a11335c45f102fe3072:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||
@@ -11,3 +12,8 @@ bccbc71c13570d14b8b26a11335c45f102fe3072:backend/tests/unit/test_storage.py:gene
|
||||
90bb2a3a393d2361dc3136ee8d761debb0726d8a:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||
37666e41a72d2a4f34447c0d1a8728e1d7271d24:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||
0cc4f253621a9601c5193f6ae1e7ae33f0e7fc9b:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||
35fda65d381acc5ab59bc592ee3013f75906c197:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||
08dce6cbb836b687002751fed4159bfc2da61f8b:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||
617bcbe89cff9a009d77e4f1f1864efed1820e63:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||
1cbd33544388e0fe6db752fa8886fab33cf9ce7c:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||
7cfad28f678f5a5b8b927d694a17b9ba446b7138:backend/tests/unit/test_storage.py:generic-api-key:381
|
||||
|
||||
185
CHANGELOG.md
185
CHANGELOG.md
@@ -7,6 +7,160 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
### Added
|
||||
- Added team-based multi-tenancy for organizing projects and collaboration (#88-#104)
|
||||
- Teams serve as organizational containers for projects
|
||||
- Users can belong to multiple teams with different roles (owner, admin, member)
|
||||
- Projects can optionally belong to a team
|
||||
- Added database schema for teams (#88):
|
||||
- `teams` table with id, name, slug, description, settings, timestamps
|
||||
- `team_memberships` table mapping users to teams with roles
|
||||
- `team_id` column on projects table for team association
|
||||
- Migrations `009_teams.sql` and `009b_migrate_projects.sql`
|
||||
- Added Team and TeamMembership ORM models with relationships (#89)
|
||||
- Added TeamAuthorizationService for team-level access control (#90):
|
||||
- Team owner/admin gets admin access to all team projects
|
||||
- Team member gets read access to team projects (upgradeable by explicit permission)
|
||||
- Role hierarchy: owner > admin > member
|
||||
- Added Team API endpoints (#92, #93, #94, #95):
|
||||
- `GET /api/v1/teams` - List teams user belongs to (paginated)
|
||||
- `POST /api/v1/teams` - Create team (creator becomes owner)
|
||||
- `GET /api/v1/teams/{slug}` - Get team details
|
||||
- `PUT /api/v1/teams/{slug}` - Update team (requires admin)
|
||||
- `DELETE /api/v1/teams/{slug}` - Delete team (requires owner)
|
||||
- `GET /api/v1/teams/{slug}/members` - List team members
|
||||
- `POST /api/v1/teams/{slug}/members` - Add member (requires admin)
|
||||
- `PUT /api/v1/teams/{slug}/members/{username}` - Update member role
|
||||
- `DELETE /api/v1/teams/{slug}/members/{username}` - Remove member
|
||||
- `GET /api/v1/teams/{slug}/projects` - List team projects (paginated)
|
||||
- Updated project creation to support optional team assignment (#95)
|
||||
- Updated project responses to include team info (team_id, team_slug, team_name)
|
||||
- Added frontend team management (#97-#104):
|
||||
- TeamContext provider for managing current team selection
|
||||
- TeamSelector dropdown component (persists selection in localStorage)
|
||||
- Teams list page at `/teams`
|
||||
- Team dashboard page at `/teams/{slug}` with inline project creation
|
||||
- Team settings page at `/teams/{slug}/settings`
|
||||
- Team members page at `/teams/{slug}/members`
|
||||
- Teams navigation link in header (authenticated users only)
|
||||
- Updated seed data to create a "Demo Team" and assign all seed projects to it
|
||||
- Added TypeScript types and API client functions for teams
|
||||
- Access management now shows team-based permissions alongside explicit permissions
|
||||
- Team-based access displayed as read-only with "Source" column indicating origin
|
||||
- Team members with access show team slug and role
|
||||
- Added integration tests for team CRUD, membership, and project operations
|
||||
- Redesigned teams portal with modern card-based layout
|
||||
- Card grid view with team avatar, name, slug, role badge, and stats
|
||||
- Stats bar showing total teams, owned teams, and total projects
|
||||
- Search functionality for filtering teams (appears when >3 teams)
|
||||
- Empty states for no teams and no search results
|
||||
- Added user autocomplete component for team member invitations
|
||||
- `GET /api/v1/users/search` endpoint for username prefix search
|
||||
- Dropdown shows matching users as you type
|
||||
- Keyboard navigation support (arrow keys, enter, escape)
|
||||
- Debounced search to reduce API calls
|
||||
- Added unit tests for TeamAuthorizationService
|
||||
- Added `ORCHARD_ADMIN_PASSWORD` environment variable to configure initial admin password (#87)
|
||||
- When set, admin user is created with the specified password (no password change required)
|
||||
- When not set, defaults to `changeme123` and requires password change on first login
|
||||
- Added Helm chart support for admin password via multiple sources (#87):
|
||||
- `orchard.auth.adminPassword` - plain value (creates K8s secret)
|
||||
- `orchard.auth.existingSecret` - reference existing K8s secret
|
||||
- `orchard.auth.secretsManager` - AWS Secrets Manager integration
|
||||
- Added `.env.example` template for local development (#87)
|
||||
- Added `.env` file support in docker-compose.local.yml (#87)
|
||||
- Added Project Settings page accessible to project admins (#65)
|
||||
- General settings section for editing description and visibility
|
||||
- Access Management section (moved from project page)
|
||||
- Danger Zone section with inline delete confirmation requiring project name
|
||||
- Settings button (gear icon) on project page header for admins
|
||||
- Added artifact dependency management system (#76, #77, #78, #79, #80, #81)
|
||||
- `artifact_dependencies` table with version/tag constraints and check constraints
|
||||
- `ArtifactDependency` SQLAlchemy model with indexes for fast lookups
|
||||
- Ensure file parsing (`orchard.ensure` YAML format) during artifact upload
|
||||
- Circular dependency detection at upload time (rejected with 400)
|
||||
- Dependency conflict detection at resolution time (409 with conflict details)
|
||||
- Added dependency API endpoints (#78, #79):
|
||||
- `GET /api/v1/artifact/{artifact_id}/dependencies` - Get dependencies by artifact ID
|
||||
- `GET /api/v1/project/{project}/{package}/+/{ref}/dependencies` - Get dependencies by ref
|
||||
- `GET /api/v1/project/{project}/{package}/reverse-dependencies` - Get reverse dependencies (paginated)
|
||||
- `GET /api/v1/project/{project}/{package}/+/{ref}/resolve` - Resolve full dependency tree
|
||||
- Added dependency resolution with topological sorting (#79)
|
||||
- Returns flat list of all artifacts needed in dependency order
|
||||
- Includes download URLs, sizes, and version info for each artifact
|
||||
- Added frontend dependency visualization (#84, #85, #86):
|
||||
- Dependencies section on package page showing direct dependencies for selected tag
|
||||
- Tag/version selector to switch between artifacts
|
||||
- "Used By" section showing reverse dependencies with pagination
|
||||
- Interactive dependency graph modal with:
|
||||
- Tree visualization with collapsible nodes
|
||||
- Zoom (mouse wheel + buttons) and pan (click-drag)
|
||||
- Click to navigate to package
|
||||
- Hover tooltip with package details
|
||||
- Error display for circular dependencies and conflicts
|
||||
- Added migration `008_artifact_dependencies.sql` for dependency schema
|
||||
- Added `dependencies.py` module with parsing, validation, and resolution logic
|
||||
- Added comprehensive integration tests for all dependency features
|
||||
|
||||
### Changed
|
||||
- Added pre-test stage reset to ensure known environment state before integration tests (#54)
|
||||
- Upload endpoint now accepts optional `ensure` file parameter for declaring dependencies
|
||||
- Updated upload API documentation with ensure file format and examples
|
||||
- Converted teams list and team projects to use DataTable component for consistent styling
|
||||
- Centered team members and team settings page content
|
||||
- Added orchard logo icon and dot separator to footer
|
||||
|
||||
### Fixed
|
||||
- Fixed dark theme styling for team pages - modals, forms, and dropdowns now use correct theme variables
|
||||
- Fixed UserAutocomplete and TeamSelector dropdown backgrounds for dark theme
|
||||
|
||||
## [0.5.1] - 2026-01-23
|
||||
### Changed
|
||||
- Simplified tag pipeline to only run deploy and smoke tests (image already built on main) (#54)
|
||||
|
||||
### Fixed
|
||||
- Fixed production CI deployment namespace to use correct `orch-namespace` (#54)
|
||||
- Added gitleaks config to allowlist test files from secret scanning (#54)
|
||||
|
||||
## [0.5.0] - 2026-01-23
|
||||
### Added
|
||||
- Added factory reset endpoint `POST /api/v1/admin/factory-reset` for test environment cleanup (#54)
|
||||
- Requires admin authentication and `X-Confirm-Reset: yes-delete-all-data` header
|
||||
- Drops all database tables, clears S3 bucket, reinitializes schema, re-seeds default data
|
||||
- CI pipeline automatically calls this after integration tests on stage
|
||||
- Added `delete_all()` method to storage backend for bulk S3 object deletion (#54)
|
||||
- Added AWS Secrets Manager CSI driver support for database credentials (#54)
|
||||
- Added SecretProviderClass template for Secrets Manager integration (#54)
|
||||
- Added IRSA service account annotations for prod and stage environments (#54)
|
||||
- Added comprehensive upload/download tests for size boundaries (1B to 1GB) (#38)
|
||||
- Added concurrent upload/download tests (2, 5, 10 parallel operations) (#38)
|
||||
- Added data integrity tests (binary, text, unicode, compressed content) (#38)
|
||||
- Added chunk boundary tests for edge cases (#38)
|
||||
- Added `@pytest.mark.large` and `@pytest.mark.concurrent` test markers (#38)
|
||||
- Added `generate_content()` and `generate_content_with_hash()` test helpers (#38)
|
||||
- Added `sized_content` fixture for generating test content of specific sizes (#38)
|
||||
- Added upload API tests: upload without tag, artifact creation verification, S3 object creation (#38)
|
||||
- Added download API tests: tag: prefix resolution, 404 for nonexistent project/package/artifact (#38)
|
||||
- Added download header tests: Content-Type, Content-Length, Content-Disposition, ETag, X-Checksum-SHA256 (#38)
|
||||
- Added error handling tests: timeout behavior, checksum validation, resource cleanup, graceful error responses (#38)
|
||||
- Added version API tests: version creation, auto-detection, listing, download by version prefix (#38)
|
||||
- Added integrity verification tests: round-trip hash verification, client-side verification workflow, size variants (1KB-10MB) (#40)
|
||||
- Added consistency check endpoint tests with response format validation (#40)
|
||||
- Added corruption detection tests: bit flip, truncation, appended content, size mismatch, missing S3 objects (#40)
|
||||
- Added Digest header tests (RFC 3230) and verification mode tests (#40)
|
||||
- Added integrity verification documentation (`docs/integrity-verification.md`) (#40)
|
||||
- Added conditional request support for downloads (If-None-Match, If-Modified-Since) returning 304 Not Modified (#42)
|
||||
- Added caching headers to downloads: Cache-Control (immutable), Last-Modified (#42)
|
||||
- Added 416 Range Not Satisfiable response for invalid range requests (#42)
|
||||
- Added download completion logging with bytes transferred and throughput (#42)
|
||||
- Added client disconnect handling during streaming downloads (#42)
|
||||
- Added streaming download tests: range requests, conditional requests, caching headers, download resume (#42)
|
||||
- Added upload duration and throughput metrics (`duration_ms`, `throughput_mbps`) to upload response (#43)
|
||||
- Added upload progress logging for large files (hash computation and multipart upload phases) (#43)
|
||||
- Added client disconnect handling during uploads with proper cleanup (#43)
|
||||
- Added upload progress tracking endpoint `GET /upload/{upload_id}/progress` for resumable uploads (#43)
|
||||
- Added large file upload tests (10MB, 100MB, 1GB) with multipart upload verification (#43)
|
||||
- Added upload cancellation and timeout handling tests (#43)
|
||||
- Added comprehensive API documentation for upload endpoints with curl, Python, and JavaScript examples (#43)
|
||||
- Added `package_versions` table for immutable version tracking separate from mutable tags (#56)
|
||||
- Versions are set at upload time via explicit `version` parameter or auto-detected from filename/metadata
|
||||
- Version detection priority: explicit parameter > package metadata > filename pattern
|
||||
@@ -31,6 +185,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Added internal proxy configuration for npm, pip, helm, and apt (#51)
|
||||
|
||||
### Changed
|
||||
- Configured stage and prod to use AWS RDS instead of PostgreSQL subchart (#54)
|
||||
- Configured stage and prod to use AWS S3 instead of MinIO subchart (#54)
|
||||
- Changed prod deployment from manual to automatic on version tags (#54)
|
||||
- Updated S3 client to support IRSA credentials when no explicit keys provided (#54)
|
||||
- Changed prod image pullPolicy to Always (#54)
|
||||
- Added proxy-body-size annotation to prod ingress for large uploads (#54)
|
||||
- CI integration tests now run full pytest suite (~350 tests) against deployed environment instead of 3 smoke tests
|
||||
- CI production deployment uses lightweight smoke tests only (no test data creation in prod)
|
||||
- CI pipeline improvements: shared pip cache, `interruptible` flag on test jobs, retry on integration tests
|
||||
- Simplified deploy verification to health check only (full checks done by integration tests)
|
||||
- Extracted environment URLs to global variables for maintainability
|
||||
- Made `cleanup_feature` job standalone (no longer inherits deploy template dependencies)
|
||||
- Renamed `integration_test_prod` to `smoke_test_prod` for clarity
|
||||
- Updated download ref resolution to check versions before tags (version → tag → artifact ID) (#56)
|
||||
- Deploy jobs now require all security scans to pass before deployment (added test_image, app_deps_scan, cve_scan, cve_sbom_analysis, app_sbom_analysis to dependencies) (#63)
|
||||
- Increased deploy job timeout from 5m to 10m (#63)
|
||||
@@ -44,6 +211,21 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Improved pod naming: Orchard pods now named `orchard-{env}-server-*` for clarity (#51)
|
||||
|
||||
### Fixed
|
||||
- Fixed factory reset not creating default admin user after reset (#60)
|
||||
- Admin user was only created at server startup, not after factory reset
|
||||
- CI reset job would fail to login because admin user didn't exist
|
||||
- Improved reset_stage CI job reliability (#60)
|
||||
- Added application-level retry logic (3 attempts with 5s delay)
|
||||
- Added job-level retry for transient failures
|
||||
- Fixed httpx client to use proper context manager
|
||||
- Increased timeout to 120s for reset operations
|
||||
- Fixed CI integration test rate limiting: added configurable `ORCHARD_LOGIN_RATE_LIMIT` env var, relaxed to 1000/minute for dev/stage
|
||||
- Fixed duplicate `TestSecurityEdgeCases` class definition in test_auth_api.py
|
||||
- Fixed integration tests auth: session-scoped client, configurable credentials via env vars, fail-fast on auth errors
|
||||
- Fixed 413 Request Entity Too Large errors on uploads by adding `proxy-body-size: "0"` nginx annotation to Orchard ingress
|
||||
- Fixed CI tests that require direct S3 access: added `@pytest.mark.requires_direct_s3` marker and excluded from CI
|
||||
- Fixed ref_count triggers not being created: added auto-migration for tags ref_count trigger functions
|
||||
- Fixed Content-Disposition header encoding for non-ASCII filenames using RFC 5987 (#38)
|
||||
- Fixed deploy jobs running even when tests or security scans fail (changed rules from `when: always` to `when: on_success`) (#63)
|
||||
- Fixed python_tests job not using internal PyPI proxy (#63)
|
||||
- Fixed `cleanup_feature` job failing when branch is deleted (`GIT_STRATEGY: none`) (#51)
|
||||
@@ -53,9 +235,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Fixed deploy jobs running when secrets scan fails (added `secrets` to deploy dependencies)
|
||||
- Fixed dev environment memory requests to equal limits per cluster Kyverno policy
|
||||
- Fixed init containers missing resource limits (Kyverno policy compliance)
|
||||
- Fixed Python SyntaxWarning for invalid escape sequence in database migration regex pattern
|
||||
|
||||
### Removed
|
||||
- Removed unused `store_streaming()` method from storage.py (#51)
|
||||
- Disabled PostgreSQL subchart for stage and prod environments (#54)
|
||||
- Disabled MinIO subchart for stage and prod environments (#54)
|
||||
|
||||
## [0.4.0] - 2026-01-12
|
||||
### Added
|
||||
|
||||
@@ -360,21 +360,36 @@ def create_default_admin(db: Session) -> Optional[User]:
|
||||
"""Create the default admin user if no users exist.
|
||||
|
||||
Returns the created user, or None if users already exist.
|
||||
|
||||
The admin password can be set via ORCHARD_ADMIN_PASSWORD environment variable.
|
||||
If not set, defaults to 'changeme123' and requires password change on first login.
|
||||
"""
|
||||
# Check if any users exist
|
||||
user_count = db.query(User).count()
|
||||
if user_count > 0:
|
||||
return None
|
||||
|
||||
settings = get_settings()
|
||||
|
||||
# Use configured password or default
|
||||
password = settings.admin_password if settings.admin_password else "changeme123"
|
||||
# Only require password change if using the default password
|
||||
must_change = not settings.admin_password
|
||||
|
||||
# Create default admin
|
||||
auth_service = AuthService(db)
|
||||
admin = auth_service.create_user(
|
||||
username="admin",
|
||||
password="changeme123",
|
||||
password=password,
|
||||
is_admin=True,
|
||||
must_change_password=True,
|
||||
must_change_password=must_change,
|
||||
)
|
||||
|
||||
if settings.admin_password:
|
||||
logger.info("Created default admin user with configured password")
|
||||
else:
|
||||
logger.info("Created default admin user with default password (changeme123)")
|
||||
|
||||
return admin
|
||||
|
||||
|
||||
@@ -643,32 +658,51 @@ class AuthorizationService:
|
||||
self, project_id: str, user: Optional[User]
|
||||
) -> Optional[str]:
|
||||
"""Get the user's access level for a project.
|
||||
|
||||
|
||||
Returns the highest access level the user has, or None if no access.
|
||||
Checks in order:
|
||||
1. System admin - gets admin access to all projects
|
||||
2. Project owner (created_by) - gets admin access
|
||||
3. Explicit permission in access_permissions table
|
||||
3. Team-based access (owner/admin gets admin, member gets read)
|
||||
4. Explicit permission in access_permissions table
|
||||
5. Public access
|
||||
"""
|
||||
from .models import Project, AccessPermission
|
||||
|
||||
from .models import Project, AccessPermission, TeamMembership
|
||||
|
||||
# Get the project
|
||||
project = self.db.query(Project).filter(Project.id == project_id).first()
|
||||
if not project:
|
||||
return None
|
||||
|
||||
|
||||
# Anonymous users only get access to public projects
|
||||
if not user:
|
||||
return "read" if project.is_public else None
|
||||
|
||||
|
||||
# System admins get admin access everywhere
|
||||
if user.is_admin:
|
||||
return "admin"
|
||||
|
||||
|
||||
# Project owner gets admin access
|
||||
if project.created_by == user.username:
|
||||
return "admin"
|
||||
|
||||
|
||||
# Check team-based access if project belongs to a team
|
||||
if project.team_id:
|
||||
membership = (
|
||||
self.db.query(TeamMembership)
|
||||
.filter(
|
||||
TeamMembership.team_id == project.team_id,
|
||||
TeamMembership.user_id == user.id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if membership:
|
||||
# Team owner/admin gets admin on all team projects
|
||||
if membership.role in ("owner", "admin"):
|
||||
return "admin"
|
||||
# Team member gets read access (upgradeable by explicit permission)
|
||||
# Continue checking explicit permissions for potential upgrade
|
||||
|
||||
# Check explicit permissions
|
||||
permission = (
|
||||
self.db.query(AccessPermission)
|
||||
@@ -678,13 +712,27 @@ class AuthorizationService:
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
if permission:
|
||||
# Check expiration
|
||||
if permission.expires_at and permission.expires_at < datetime.now(timezone.utc):
|
||||
return "read" if project.is_public else None
|
||||
return permission.level
|
||||
|
||||
pass # Permission expired, fall through
|
||||
else:
|
||||
return permission.level
|
||||
|
||||
# Team member gets read access if no explicit permission
|
||||
if project.team_id:
|
||||
membership = (
|
||||
self.db.query(TeamMembership)
|
||||
.filter(
|
||||
TeamMembership.team_id == project.team_id,
|
||||
TeamMembership.user_id == user.id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if membership:
|
||||
return "read"
|
||||
|
||||
# Fall back to public access
|
||||
return "read" if project.is_public else None
|
||||
|
||||
@@ -869,6 +917,226 @@ def check_project_access(
|
||||
return project
|
||||
|
||||
|
||||
# --- Team Authorization ---
|
||||
|
||||
# Team roles in order of increasing privilege
|
||||
TEAM_ROLES = ["member", "admin", "owner"]
|
||||
|
||||
|
||||
def get_team_role_rank(role: str) -> int:
|
||||
"""Get numeric rank for team role comparison."""
|
||||
try:
|
||||
return TEAM_ROLES.index(role)
|
||||
except ValueError:
|
||||
return -1
|
||||
|
||||
|
||||
def has_sufficient_team_role(user_role: str, required_role: str) -> bool:
|
||||
"""Check if user_role is sufficient for required_role.
|
||||
|
||||
Role hierarchy: owner > admin > member
|
||||
"""
|
||||
return get_team_role_rank(user_role) >= get_team_role_rank(required_role)
|
||||
|
||||
|
||||
class TeamAuthorizationService:
|
||||
"""Service for checking team-level authorization."""
|
||||
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
def get_user_team_role(
|
||||
self, team_id: str, user: Optional[User]
|
||||
) -> Optional[str]:
|
||||
"""Get the user's role in a team.
|
||||
|
||||
Returns the role ('owner', 'admin', 'member') or None if not a member.
|
||||
System admins who are not team members are treated as team admins.
|
||||
"""
|
||||
from .models import Team, TeamMembership
|
||||
|
||||
if not user:
|
||||
return None
|
||||
|
||||
# Check actual membership first
|
||||
membership = (
|
||||
self.db.query(TeamMembership)
|
||||
.filter(
|
||||
TeamMembership.team_id == team_id,
|
||||
TeamMembership.user_id == user.id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if membership:
|
||||
return membership.role
|
||||
|
||||
# System admins who are not members get admin access
|
||||
if user.is_admin:
|
||||
return "admin"
|
||||
|
||||
return None
|
||||
|
||||
def check_team_access(
|
||||
self,
|
||||
team_id: str,
|
||||
user: Optional[User],
|
||||
required_role: str = "member",
|
||||
) -> bool:
|
||||
"""Check if user has required role in team.
|
||||
|
||||
Args:
|
||||
team_id: Team ID to check
|
||||
user: User to check (None means no access)
|
||||
required_role: Minimum required role ('member', 'admin', 'owner')
|
||||
|
||||
Returns:
|
||||
True if user has sufficient role, False otherwise
|
||||
"""
|
||||
user_role = self.get_user_team_role(team_id, user)
|
||||
if not user_role:
|
||||
return False
|
||||
return has_sufficient_team_role(user_role, required_role)
|
||||
|
||||
def can_create_project(self, team_id: str, user: Optional[User]) -> bool:
|
||||
"""Check if user can create projects in team (requires admin+)."""
|
||||
return self.check_team_access(team_id, user, "admin")
|
||||
|
||||
def can_manage_members(self, team_id: str, user: Optional[User]) -> bool:
|
||||
"""Check if user can manage team members (requires admin+)."""
|
||||
return self.check_team_access(team_id, user, "admin")
|
||||
|
||||
def can_delete_team(self, team_id: str, user: Optional[User]) -> bool:
|
||||
"""Check if user can delete the team (requires owner)."""
|
||||
return self.check_team_access(team_id, user, "owner")
|
||||
|
||||
def get_team_by_slug(self, slug: str) -> Optional["Team"]:
|
||||
"""Get a team by its slug."""
|
||||
from .models import Team
|
||||
|
||||
return self.db.query(Team).filter(Team.slug == slug).first()
|
||||
|
||||
def get_user_teams(self, user: User) -> list:
|
||||
"""Get all teams a user is a member of."""
|
||||
from .models import Team, TeamMembership
|
||||
|
||||
return (
|
||||
self.db.query(Team)
|
||||
.join(TeamMembership)
|
||||
.filter(TeamMembership.user_id == user.id)
|
||||
.order_by(Team.name)
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
def get_team_authorization_service(db: Session = Depends(get_db)) -> TeamAuthorizationService:
|
||||
"""Get a TeamAuthorizationService instance."""
|
||||
return TeamAuthorizationService(db)
|
||||
|
||||
|
||||
class TeamAccessChecker:
|
||||
"""Dependency for checking team access in route handlers."""
|
||||
|
||||
def __init__(self, required_role: str = "member"):
|
||||
self.required_role = required_role
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
slug: str,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: Optional[User] = Depends(get_current_user_optional),
|
||||
) -> User:
|
||||
"""Check if user has required role in team.
|
||||
|
||||
Raises 404 if team not found, 401 if not authenticated, 403 if insufficient role.
|
||||
Returns the current user.
|
||||
"""
|
||||
from .models import Team
|
||||
|
||||
# Find team by slug
|
||||
team = db.query(Team).filter(Team.slug == slug).first()
|
||||
if not team:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Team '{slug}' not found",
|
||||
)
|
||||
|
||||
if not current_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
auth_service = TeamAuthorizationService(db)
|
||||
|
||||
if not auth_service.check_team_access(str(team.id), current_user, self.required_role):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=f"Insufficient team permissions. Required role: {self.required_role}",
|
||||
)
|
||||
|
||||
return current_user
|
||||
|
||||
|
||||
# Pre-configured team access checkers
|
||||
require_team_member = TeamAccessChecker("member")
|
||||
require_team_admin = TeamAccessChecker("admin")
|
||||
require_team_owner = TeamAccessChecker("owner")
|
||||
|
||||
|
||||
def check_team_access(
|
||||
db: Session,
|
||||
team_slug: str,
|
||||
user: Optional[User],
|
||||
required_role: str = "member",
|
||||
) -> "Team":
|
||||
"""Check if user has required role in team.
|
||||
|
||||
This is a helper function for use in route handlers.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
team_slug: Slug of the team
|
||||
user: Current user (can be None for no access)
|
||||
required_role: Required team role (member, admin, owner)
|
||||
|
||||
Returns:
|
||||
The Team object if access is granted
|
||||
|
||||
Raises:
|
||||
HTTPException 404: Team not found
|
||||
HTTPException 401: Authentication required
|
||||
HTTPException 403: Insufficient permissions
|
||||
"""
|
||||
from .models import Team
|
||||
|
||||
# Find team by slug
|
||||
team = db.query(Team).filter(Team.slug == team_slug).first()
|
||||
if not team:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Team '{team_slug}' not found",
|
||||
)
|
||||
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
auth_service = TeamAuthorizationService(db)
|
||||
|
||||
if not auth_service.check_team_access(str(team.id), user, required_role):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=f"Insufficient team permissions. Required role: {required_role}",
|
||||
)
|
||||
|
||||
return team
|
||||
|
||||
|
||||
# --- OIDC Configuration Service ---
|
||||
|
||||
|
||||
|
||||
@@ -53,6 +53,9 @@ class Settings(BaseSettings):
|
||||
log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
|
||||
log_format: str = "auto" # "json", "standard", or "auto" (json in production)
|
||||
|
||||
# Initial admin user settings
|
||||
admin_password: str = "" # Initial admin password (if empty, uses 'changeme123')
|
||||
|
||||
# JWT Authentication settings (optional, for external identity providers)
|
||||
jwt_enabled: bool = False # Enable JWT token validation
|
||||
jwt_secret: str = "" # Secret key for HS256, or leave empty for RS256 with JWKS
|
||||
|
||||
@@ -170,6 +170,62 @@ def _run_migrations():
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
# Create ref_count trigger functions for tags (ensures triggers exist even if initial migration wasn't run)
|
||||
"""
|
||||
CREATE OR REPLACE FUNCTION increment_artifact_ref_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
""",
|
||||
"""
|
||||
CREATE OR REPLACE FUNCTION decrement_artifact_ref_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||
RETURN OLD;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
""",
|
||||
"""
|
||||
CREATE OR REPLACE FUNCTION update_artifact_ref_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF OLD.artifact_id != NEW.artifact_id THEN
|
||||
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
""",
|
||||
# Create triggers for tags ref_count management
|
||||
"""
|
||||
DO $$
|
||||
BEGIN
|
||||
-- Drop and recreate triggers to ensure they're current
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
|
||||
CREATE TRIGGER tags_ref_count_insert_trigger
|
||||
AFTER INSERT ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION increment_artifact_ref_count();
|
||||
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
|
||||
CREATE TRIGGER tags_ref_count_delete_trigger
|
||||
AFTER DELETE ON tags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION decrement_artifact_ref_count();
|
||||
|
||||
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
|
||||
CREATE TRIGGER tags_ref_count_update_trigger
|
||||
AFTER UPDATE ON tags
|
||||
FOR EACH ROW
|
||||
WHEN (OLD.artifact_id IS DISTINCT FROM NEW.artifact_id)
|
||||
EXECUTE FUNCTION update_artifact_ref_count();
|
||||
END $$;
|
||||
""",
|
||||
# Create ref_count trigger functions for package_versions
|
||||
"""
|
||||
CREATE OR REPLACE FUNCTION increment_version_ref_count()
|
||||
@@ -210,7 +266,7 @@ def _run_migrations():
|
||||
END $$;
|
||||
""",
|
||||
# Migrate existing semver tags to package_versions
|
||||
"""
|
||||
r"""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
||||
@@ -229,6 +285,60 @@ def _run_migrations():
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
# Teams and multi-tenancy migration (009_teams.sql)
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS teams (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
slug VARCHAR(255) NOT NULL UNIQUE,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by VARCHAR(255) NOT NULL,
|
||||
settings JSONB DEFAULT '{}'
|
||||
);
|
||||
""",
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS team_memberships (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
team_id UUID NOT NULL REFERENCES teams(id) ON DELETE CASCADE,
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
role VARCHAR(50) NOT NULL DEFAULT 'member',
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
invited_by VARCHAR(255),
|
||||
CONSTRAINT team_memberships_unique UNIQUE (team_id, user_id),
|
||||
CONSTRAINT team_memberships_role_check CHECK (role IN ('owner', 'admin', 'member'))
|
||||
);
|
||||
""",
|
||||
"""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'projects' AND column_name = 'team_id'
|
||||
) THEN
|
||||
ALTER TABLE projects ADD COLUMN team_id UUID REFERENCES teams(id) ON DELETE SET NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_projects_team_id ON projects(team_id);
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
"""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_teams_slug') THEN
|
||||
CREATE INDEX idx_teams_slug ON teams(slug);
|
||||
END IF;
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_teams_created_by') THEN
|
||||
CREATE INDEX idx_teams_created_by ON teams(created_by);
|
||||
END IF;
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_team_memberships_team_id') THEN
|
||||
CREATE INDEX idx_team_memberships_team_id ON team_memberships(team_id);
|
||||
END IF;
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_team_memberships_user_id') THEN
|
||||
CREATE INDEX idx_team_memberships_user_id ON team_memberships(user_id);
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
]
|
||||
|
||||
with engine.connect() as conn:
|
||||
@@ -237,6 +347,7 @@ def _run_migrations():
|
||||
conn.execute(text(migration))
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
logger.warning(f"Migration failed (may already be applied): {e}")
|
||||
|
||||
|
||||
|
||||
723
backend/app/dependencies.py
Normal file
723
backend/app/dependencies.py
Normal file
@@ -0,0 +1,723 @@
|
||||
"""
|
||||
Dependency management module for artifact dependencies.
|
||||
|
||||
Handles:
|
||||
- Parsing orchard.ensure files
|
||||
- Storing dependencies in the database
|
||||
- Querying dependencies and reverse dependencies
|
||||
- Dependency resolution with topological sorting
|
||||
- Circular dependency detection
|
||||
- Conflict detection
|
||||
"""
|
||||
|
||||
import yaml
|
||||
from typing import List, Dict, Any, Optional, Set, Tuple
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import and_
|
||||
|
||||
from .models import (
|
||||
Project,
|
||||
Package,
|
||||
Artifact,
|
||||
Tag,
|
||||
ArtifactDependency,
|
||||
PackageVersion,
|
||||
)
|
||||
from .schemas import (
|
||||
EnsureFileContent,
|
||||
EnsureFileDependency,
|
||||
DependencyResponse,
|
||||
ArtifactDependenciesResponse,
|
||||
DependentInfo,
|
||||
ReverseDependenciesResponse,
|
||||
ResolvedArtifact,
|
||||
DependencyResolutionResponse,
|
||||
DependencyConflict,
|
||||
PaginationMeta,
|
||||
)
|
||||
|
||||
|
||||
class DependencyError(Exception):
|
||||
"""Base exception for dependency errors."""
|
||||
pass
|
||||
|
||||
|
||||
class CircularDependencyError(DependencyError):
|
||||
"""Raised when a circular dependency is detected."""
|
||||
def __init__(self, cycle: List[str]):
|
||||
self.cycle = cycle
|
||||
super().__init__(f"Circular dependency detected: {' -> '.join(cycle)}")
|
||||
|
||||
|
||||
class DependencyConflictError(DependencyError):
|
||||
"""Raised when conflicting dependency versions are detected."""
|
||||
def __init__(self, conflicts: List[DependencyConflict]):
|
||||
self.conflicts = conflicts
|
||||
super().__init__(f"Dependency conflicts detected: {len(conflicts)} conflict(s)")
|
||||
|
||||
|
||||
class DependencyNotFoundError(DependencyError):
|
||||
"""Raised when a dependency cannot be resolved."""
|
||||
def __init__(self, project: str, package: str, constraint: str):
|
||||
self.project = project
|
||||
self.package = package
|
||||
self.constraint = constraint
|
||||
super().__init__(f"Dependency not found: {project}/{package}@{constraint}")
|
||||
|
||||
|
||||
class InvalidEnsureFileError(DependencyError):
|
||||
"""Raised when the ensure file is invalid."""
|
||||
pass
|
||||
|
||||
|
||||
class DependencyDepthExceededError(DependencyError):
|
||||
"""Raised when dependency resolution exceeds max depth."""
|
||||
def __init__(self, max_depth: int):
|
||||
self.max_depth = max_depth
|
||||
super().__init__(f"Dependency resolution exceeded maximum depth of {max_depth}")
|
||||
|
||||
|
||||
# Safety limits to prevent DoS attacks
|
||||
MAX_DEPENDENCY_DEPTH = 50 # Maximum levels of nested dependencies
|
||||
MAX_DEPENDENCIES_PER_ARTIFACT = 200 # Maximum direct dependencies per artifact
|
||||
|
||||
|
||||
def parse_ensure_file(content: bytes) -> EnsureFileContent:
|
||||
"""
|
||||
Parse an orchard.ensure file.
|
||||
|
||||
Args:
|
||||
content: Raw bytes of the ensure file
|
||||
|
||||
Returns:
|
||||
Parsed EnsureFileContent
|
||||
|
||||
Raises:
|
||||
InvalidEnsureFileError: If the file is invalid YAML or has wrong structure
|
||||
"""
|
||||
try:
|
||||
data = yaml.safe_load(content.decode('utf-8'))
|
||||
except yaml.YAMLError as e:
|
||||
raise InvalidEnsureFileError(f"Invalid YAML: {e}")
|
||||
except UnicodeDecodeError as e:
|
||||
raise InvalidEnsureFileError(f"Invalid encoding: {e}")
|
||||
|
||||
if data is None:
|
||||
return EnsureFileContent(dependencies=[])
|
||||
|
||||
if not isinstance(data, dict):
|
||||
raise InvalidEnsureFileError("Ensure file must be a YAML dictionary")
|
||||
|
||||
dependencies = []
|
||||
deps_data = data.get('dependencies', [])
|
||||
|
||||
if not isinstance(deps_data, list):
|
||||
raise InvalidEnsureFileError("'dependencies' must be a list")
|
||||
|
||||
# Safety limit: prevent DoS through excessive dependencies
|
||||
if len(deps_data) > MAX_DEPENDENCIES_PER_ARTIFACT:
|
||||
raise InvalidEnsureFileError(
|
||||
f"Too many dependencies: {len(deps_data)} exceeds maximum of {MAX_DEPENDENCIES_PER_ARTIFACT}"
|
||||
)
|
||||
|
||||
for i, dep in enumerate(deps_data):
|
||||
if not isinstance(dep, dict):
|
||||
raise InvalidEnsureFileError(f"Dependency {i} must be a dictionary")
|
||||
|
||||
project = dep.get('project')
|
||||
package = dep.get('package')
|
||||
version = dep.get('version')
|
||||
tag = dep.get('tag')
|
||||
|
||||
if not project:
|
||||
raise InvalidEnsureFileError(f"Dependency {i} missing 'project'")
|
||||
if not package:
|
||||
raise InvalidEnsureFileError(f"Dependency {i} missing 'package'")
|
||||
if not version and not tag:
|
||||
raise InvalidEnsureFileError(
|
||||
f"Dependency {i} must have either 'version' or 'tag'"
|
||||
)
|
||||
if version and tag:
|
||||
raise InvalidEnsureFileError(
|
||||
f"Dependency {i} cannot have both 'version' and 'tag'"
|
||||
)
|
||||
|
||||
dependencies.append(EnsureFileDependency(
|
||||
project=project,
|
||||
package=package,
|
||||
version=version,
|
||||
tag=tag,
|
||||
))
|
||||
|
||||
return EnsureFileContent(dependencies=dependencies)
|
||||
|
||||
|
||||
def validate_dependencies(
|
||||
db: Session,
|
||||
dependencies: List[EnsureFileDependency],
|
||||
) -> List[str]:
|
||||
"""
|
||||
Validate that all dependency projects exist.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
dependencies: List of dependencies to validate
|
||||
|
||||
Returns:
|
||||
List of error messages (empty if all valid)
|
||||
"""
|
||||
errors = []
|
||||
|
||||
for dep in dependencies:
|
||||
project = db.query(Project).filter(Project.name == dep.project).first()
|
||||
if not project:
|
||||
errors.append(f"Project '{dep.project}' not found")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def store_dependencies(
|
||||
db: Session,
|
||||
artifact_id: str,
|
||||
dependencies: List[EnsureFileDependency],
|
||||
) -> List[ArtifactDependency]:
|
||||
"""
|
||||
Store dependencies for an artifact.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
artifact_id: The artifact ID that has these dependencies
|
||||
dependencies: List of dependencies to store
|
||||
|
||||
Returns:
|
||||
List of created ArtifactDependency objects
|
||||
"""
|
||||
created = []
|
||||
|
||||
for dep in dependencies:
|
||||
artifact_dep = ArtifactDependency(
|
||||
artifact_id=artifact_id,
|
||||
dependency_project=dep.project,
|
||||
dependency_package=dep.package,
|
||||
version_constraint=dep.version,
|
||||
tag_constraint=dep.tag,
|
||||
)
|
||||
db.add(artifact_dep)
|
||||
created.append(artifact_dep)
|
||||
|
||||
return created
|
||||
|
||||
|
||||
def get_artifact_dependencies(
|
||||
db: Session,
|
||||
artifact_id: str,
|
||||
) -> List[DependencyResponse]:
|
||||
"""
|
||||
Get all dependencies for an artifact.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
artifact_id: The artifact ID
|
||||
|
||||
Returns:
|
||||
List of DependencyResponse objects
|
||||
"""
|
||||
deps = db.query(ArtifactDependency).filter(
|
||||
ArtifactDependency.artifact_id == artifact_id
|
||||
).all()
|
||||
|
||||
return [DependencyResponse.from_orm_model(dep) for dep in deps]
|
||||
|
||||
|
||||
def get_reverse_dependencies(
|
||||
db: Session,
|
||||
project_name: str,
|
||||
package_name: str,
|
||||
page: int = 1,
|
||||
limit: int = 50,
|
||||
) -> ReverseDependenciesResponse:
|
||||
"""
|
||||
Get all artifacts that depend on a given package.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
project_name: Target project name
|
||||
package_name: Target package name
|
||||
page: Page number (1-indexed)
|
||||
limit: Results per page
|
||||
|
||||
Returns:
|
||||
ReverseDependenciesResponse with dependents and pagination
|
||||
"""
|
||||
# Query dependencies that point to this project/package
|
||||
query = db.query(ArtifactDependency).filter(
|
||||
ArtifactDependency.dependency_project == project_name,
|
||||
ArtifactDependency.dependency_package == package_name,
|
||||
)
|
||||
|
||||
total = query.count()
|
||||
offset = (page - 1) * limit
|
||||
deps = query.offset(offset).limit(limit).all()
|
||||
|
||||
dependents = []
|
||||
for dep in deps:
|
||||
# Get artifact info to find the project/package/version
|
||||
artifact = db.query(Artifact).filter(Artifact.id == dep.artifact_id).first()
|
||||
if not artifact:
|
||||
continue
|
||||
|
||||
# Find which package this artifact belongs to via tags or versions
|
||||
tag = db.query(Tag).filter(Tag.artifact_id == dep.artifact_id).first()
|
||||
if tag:
|
||||
pkg = db.query(Package).filter(Package.id == tag.package_id).first()
|
||||
if pkg:
|
||||
proj = db.query(Project).filter(Project.id == pkg.project_id).first()
|
||||
if proj:
|
||||
# Get version if available
|
||||
version_record = db.query(PackageVersion).filter(
|
||||
PackageVersion.artifact_id == dep.artifact_id,
|
||||
PackageVersion.package_id == pkg.id,
|
||||
).first()
|
||||
|
||||
dependents.append(DependentInfo(
|
||||
artifact_id=dep.artifact_id,
|
||||
project=proj.name,
|
||||
package=pkg.name,
|
||||
version=version_record.version if version_record else None,
|
||||
constraint_type="version" if dep.version_constraint else "tag",
|
||||
constraint_value=dep.version_constraint or dep.tag_constraint,
|
||||
))
|
||||
|
||||
total_pages = (total + limit - 1) // limit
|
||||
|
||||
return ReverseDependenciesResponse(
|
||||
project=project_name,
|
||||
package=package_name,
|
||||
dependents=dependents,
|
||||
pagination=PaginationMeta(
|
||||
page=page,
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _resolve_dependency_to_artifact(
|
||||
db: Session,
|
||||
project_name: str,
|
||||
package_name: str,
|
||||
version: Optional[str],
|
||||
tag: Optional[str],
|
||||
) -> Optional[Tuple[str, str, int]]:
|
||||
"""
|
||||
Resolve a dependency constraint to an artifact ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
project_name: Project name
|
||||
package_name: Package name
|
||||
version: Version constraint (exact)
|
||||
tag: Tag constraint
|
||||
|
||||
Returns:
|
||||
Tuple of (artifact_id, resolved_version_or_tag, size) or None if not found
|
||||
"""
|
||||
# Get project and package
|
||||
project = db.query(Project).filter(Project.name == project_name).first()
|
||||
if not project:
|
||||
return None
|
||||
|
||||
package = db.query(Package).filter(
|
||||
Package.project_id == project.id,
|
||||
Package.name == package_name,
|
||||
).first()
|
||||
if not package:
|
||||
return None
|
||||
|
||||
if version:
|
||||
# Look up by version
|
||||
pkg_version = db.query(PackageVersion).filter(
|
||||
PackageVersion.package_id == package.id,
|
||||
PackageVersion.version == version,
|
||||
).first()
|
||||
if pkg_version:
|
||||
artifact = db.query(Artifact).filter(
|
||||
Artifact.id == pkg_version.artifact_id
|
||||
).first()
|
||||
if artifact:
|
||||
return (artifact.id, version, artifact.size)
|
||||
|
||||
# Also check if there's a tag with this exact name
|
||||
tag_record = db.query(Tag).filter(
|
||||
Tag.package_id == package.id,
|
||||
Tag.name == version,
|
||||
).first()
|
||||
if tag_record:
|
||||
artifact = db.query(Artifact).filter(
|
||||
Artifact.id == tag_record.artifact_id
|
||||
).first()
|
||||
if artifact:
|
||||
return (artifact.id, version, artifact.size)
|
||||
|
||||
if tag:
|
||||
# Look up by tag
|
||||
tag_record = db.query(Tag).filter(
|
||||
Tag.package_id == package.id,
|
||||
Tag.name == tag,
|
||||
).first()
|
||||
if tag_record:
|
||||
artifact = db.query(Artifact).filter(
|
||||
Artifact.id == tag_record.artifact_id
|
||||
).first()
|
||||
if artifact:
|
||||
return (artifact.id, tag, artifact.size)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _detect_package_cycle(
|
||||
db: Session,
|
||||
project_name: str,
|
||||
package_name: str,
|
||||
target_project: str,
|
||||
target_package: str,
|
||||
visiting: Set[str],
|
||||
visited: Set[str],
|
||||
path: List[str],
|
||||
) -> Optional[List[str]]:
|
||||
"""
|
||||
Detect cycles at the package level using DFS.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
project_name: Current project being visited
|
||||
package_name: Current package being visited
|
||||
target_project: The project we're checking for cycles back to
|
||||
target_package: The package we're checking for cycles back to
|
||||
visiting: Set of package keys currently in the recursion stack
|
||||
visited: Set of fully processed package keys
|
||||
path: Current path for cycle reporting
|
||||
|
||||
Returns:
|
||||
Cycle path if detected, None otherwise
|
||||
"""
|
||||
pkg_key = f"{project_name}/{package_name}"
|
||||
|
||||
# Check if we've reached the target package (cycle detected)
|
||||
if project_name == target_project and package_name == target_package:
|
||||
return path + [pkg_key]
|
||||
|
||||
if pkg_key in visiting:
|
||||
# Unexpected internal cycle
|
||||
return None
|
||||
|
||||
if pkg_key in visited:
|
||||
return None
|
||||
|
||||
visiting.add(pkg_key)
|
||||
path.append(pkg_key)
|
||||
|
||||
# Get the package and find any artifacts with dependencies
|
||||
project = db.query(Project).filter(Project.name == project_name).first()
|
||||
if project:
|
||||
package = db.query(Package).filter(
|
||||
Package.project_id == project.id,
|
||||
Package.name == package_name,
|
||||
).first()
|
||||
if package:
|
||||
# Find all artifacts in this package via tags
|
||||
tags = db.query(Tag).filter(Tag.package_id == package.id).all()
|
||||
artifact_ids = {t.artifact_id for t in tags}
|
||||
|
||||
# Get dependencies from all artifacts in this package
|
||||
for artifact_id in artifact_ids:
|
||||
deps = db.query(ArtifactDependency).filter(
|
||||
ArtifactDependency.artifact_id == artifact_id
|
||||
).all()
|
||||
|
||||
for dep in deps:
|
||||
cycle = _detect_package_cycle(
|
||||
db,
|
||||
dep.dependency_project,
|
||||
dep.dependency_package,
|
||||
target_project,
|
||||
target_package,
|
||||
visiting,
|
||||
visited,
|
||||
path,
|
||||
)
|
||||
if cycle:
|
||||
return cycle
|
||||
|
||||
path.pop()
|
||||
visiting.remove(pkg_key)
|
||||
visited.add(pkg_key)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def check_circular_dependencies(
|
||||
db: Session,
|
||||
artifact_id: str,
|
||||
new_dependencies: List[EnsureFileDependency],
|
||||
project_name: Optional[str] = None,
|
||||
package_name: Optional[str] = None,
|
||||
) -> Optional[List[str]]:
|
||||
"""
|
||||
Check if adding the new dependencies would create a circular dependency.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
artifact_id: The artifact that will have these dependencies
|
||||
new_dependencies: Dependencies to be added
|
||||
project_name: Project name (optional, will try to look up from tag if not provided)
|
||||
package_name: Package name (optional, will try to look up from tag if not provided)
|
||||
|
||||
Returns:
|
||||
Cycle path if detected, None otherwise
|
||||
"""
|
||||
# First, get the package info for this artifact to build path labels
|
||||
if project_name and package_name:
|
||||
current_path = f"{project_name}/{package_name}"
|
||||
else:
|
||||
# Try to look up from tag
|
||||
artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first()
|
||||
if not artifact:
|
||||
return None
|
||||
|
||||
# Find package for this artifact
|
||||
tag = db.query(Tag).filter(Tag.artifact_id == artifact_id).first()
|
||||
if not tag:
|
||||
return None
|
||||
|
||||
package = db.query(Package).filter(Package.id == tag.package_id).first()
|
||||
if not package:
|
||||
return None
|
||||
|
||||
project = db.query(Project).filter(Project.id == package.project_id).first()
|
||||
if not project:
|
||||
return None
|
||||
|
||||
current_path = f"{project.name}/{package.name}"
|
||||
|
||||
# Extract target project and package from current_path
|
||||
if "/" in current_path:
|
||||
target_project, target_package = current_path.split("/", 1)
|
||||
else:
|
||||
return None
|
||||
|
||||
# For each new dependency, check if it would create a cycle back to our package
|
||||
for dep in new_dependencies:
|
||||
# Check if this dependency (transitively) depends on us at the package level
|
||||
visiting: Set[str] = set()
|
||||
visited: Set[str] = set()
|
||||
path: List[str] = [current_path]
|
||||
|
||||
# Check from the dependency's package
|
||||
cycle = _detect_package_cycle(
|
||||
db,
|
||||
dep.project,
|
||||
dep.package,
|
||||
target_project,
|
||||
target_package,
|
||||
visiting,
|
||||
visited,
|
||||
path,
|
||||
)
|
||||
if cycle:
|
||||
return cycle
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def resolve_dependencies(
|
||||
db: Session,
|
||||
project_name: str,
|
||||
package_name: str,
|
||||
ref: str,
|
||||
base_url: str,
|
||||
) -> DependencyResolutionResponse:
|
||||
"""
|
||||
Resolve all dependencies for an artifact recursively.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
project_name: Project name
|
||||
package_name: Package name
|
||||
ref: Tag or version reference
|
||||
base_url: Base URL for download URLs
|
||||
|
||||
Returns:
|
||||
DependencyResolutionResponse with all resolved artifacts
|
||||
|
||||
Raises:
|
||||
DependencyNotFoundError: If a dependency cannot be resolved
|
||||
CircularDependencyError: If circular dependencies are detected
|
||||
DependencyConflictError: If conflicting versions are required
|
||||
"""
|
||||
# Resolve the initial artifact
|
||||
project = db.query(Project).filter(Project.name == project_name).first()
|
||||
if not project:
|
||||
raise DependencyNotFoundError(project_name, package_name, ref)
|
||||
|
||||
package = db.query(Package).filter(
|
||||
Package.project_id == project.id,
|
||||
Package.name == package_name,
|
||||
).first()
|
||||
if not package:
|
||||
raise DependencyNotFoundError(project_name, package_name, ref)
|
||||
|
||||
# Try to find artifact by tag or version
|
||||
resolved = _resolve_dependency_to_artifact(
|
||||
db, project_name, package_name, ref, ref
|
||||
)
|
||||
if not resolved:
|
||||
raise DependencyNotFoundError(project_name, package_name, ref)
|
||||
|
||||
root_artifact_id, root_version, root_size = resolved
|
||||
|
||||
# Track resolved artifacts and their versions
|
||||
resolved_artifacts: Dict[str, ResolvedArtifact] = {}
|
||||
# Track version requirements for conflict detection
|
||||
version_requirements: Dict[str, List[Dict[str, Any]]] = {} # pkg_key -> [(version, required_by)]
|
||||
# Track visiting/visited for cycle detection
|
||||
visiting: Set[str] = set()
|
||||
visited: Set[str] = set()
|
||||
# Resolution order (topological)
|
||||
resolution_order: List[str] = []
|
||||
|
||||
def _resolve_recursive(
|
||||
artifact_id: str,
|
||||
proj_name: str,
|
||||
pkg_name: str,
|
||||
version_or_tag: str,
|
||||
size: int,
|
||||
required_by: Optional[str],
|
||||
depth: int = 0,
|
||||
):
|
||||
"""Recursively resolve dependencies with cycle/conflict detection."""
|
||||
# Safety limit: prevent DoS through deeply nested dependencies
|
||||
if depth > MAX_DEPENDENCY_DEPTH:
|
||||
raise DependencyDepthExceededError(MAX_DEPENDENCY_DEPTH)
|
||||
|
||||
pkg_key = f"{proj_name}/{pkg_name}"
|
||||
|
||||
# Cycle detection (at artifact level)
|
||||
if artifact_id in visiting:
|
||||
# Build cycle path
|
||||
raise CircularDependencyError([pkg_key, pkg_key])
|
||||
|
||||
# Conflict detection - check if we've seen this package before with a different version
|
||||
if pkg_key in version_requirements:
|
||||
existing_versions = {r["version"] for r in version_requirements[pkg_key]}
|
||||
if version_or_tag not in existing_versions:
|
||||
# Conflict detected - same package, different version
|
||||
requirements = version_requirements[pkg_key] + [
|
||||
{"version": version_or_tag, "required_by": required_by}
|
||||
]
|
||||
raise DependencyConflictError([
|
||||
DependencyConflict(
|
||||
project=proj_name,
|
||||
package=pkg_name,
|
||||
requirements=[
|
||||
{
|
||||
"version": r["version"],
|
||||
"required_by": [{"path": r["required_by"]}] if r["required_by"] else []
|
||||
}
|
||||
for r in requirements
|
||||
],
|
||||
)
|
||||
])
|
||||
# Same version already resolved - skip
|
||||
if artifact_id in visited:
|
||||
return
|
||||
|
||||
if artifact_id in visited:
|
||||
return
|
||||
|
||||
visiting.add(artifact_id)
|
||||
|
||||
# Track version requirement
|
||||
if pkg_key not in version_requirements:
|
||||
version_requirements[pkg_key] = []
|
||||
version_requirements[pkg_key].append({
|
||||
"version": version_or_tag,
|
||||
"required_by": required_by,
|
||||
})
|
||||
|
||||
# Get dependencies
|
||||
deps = db.query(ArtifactDependency).filter(
|
||||
ArtifactDependency.artifact_id == artifact_id
|
||||
).all()
|
||||
|
||||
# Resolve each dependency first (depth-first)
|
||||
for dep in deps:
|
||||
resolved_dep = _resolve_dependency_to_artifact(
|
||||
db,
|
||||
dep.dependency_project,
|
||||
dep.dependency_package,
|
||||
dep.version_constraint,
|
||||
dep.tag_constraint,
|
||||
)
|
||||
|
||||
if not resolved_dep:
|
||||
constraint = dep.version_constraint or dep.tag_constraint
|
||||
raise DependencyNotFoundError(
|
||||
dep.dependency_project,
|
||||
dep.dependency_package,
|
||||
constraint,
|
||||
)
|
||||
|
||||
dep_artifact_id, dep_version, dep_size = resolved_dep
|
||||
_resolve_recursive(
|
||||
dep_artifact_id,
|
||||
dep.dependency_project,
|
||||
dep.dependency_package,
|
||||
dep_version,
|
||||
dep_size,
|
||||
pkg_key,
|
||||
depth + 1,
|
||||
)
|
||||
|
||||
visiting.remove(artifact_id)
|
||||
visited.add(artifact_id)
|
||||
|
||||
# Add to resolution order (dependencies before dependents)
|
||||
resolution_order.append(artifact_id)
|
||||
|
||||
# Store resolved artifact info
|
||||
resolved_artifacts[artifact_id] = ResolvedArtifact(
|
||||
artifact_id=artifact_id,
|
||||
project=proj_name,
|
||||
package=pkg_name,
|
||||
version=version_or_tag,
|
||||
size=size,
|
||||
download_url=f"{base_url}/api/v1/project/{proj_name}/{pkg_name}/+/{version_or_tag}",
|
||||
)
|
||||
|
||||
# Start resolution from root
|
||||
_resolve_recursive(
|
||||
root_artifact_id,
|
||||
project_name,
|
||||
package_name,
|
||||
root_version,
|
||||
root_size,
|
||||
None,
|
||||
)
|
||||
|
||||
# Build response in topological order
|
||||
resolved_list = [resolved_artifacts[aid] for aid in resolution_order]
|
||||
total_size = sum(r.size for r in resolved_list)
|
||||
|
||||
return DependencyResolutionResponse(
|
||||
requested={
|
||||
"project": project_name,
|
||||
"package": package_name,
|
||||
"ref": ref,
|
||||
},
|
||||
resolved=resolved_list,
|
||||
total_size=total_size,
|
||||
artifact_count=len(resolved_list),
|
||||
)
|
||||
@@ -32,6 +32,7 @@ class Project(Base):
|
||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
||||
)
|
||||
created_by = Column(String(255), nullable=False)
|
||||
team_id = Column(UUID(as_uuid=True), ForeignKey("teams.id", ondelete="SET NULL"))
|
||||
|
||||
packages = relationship(
|
||||
"Package", back_populates="project", cascade="all, delete-orphan"
|
||||
@@ -39,10 +40,12 @@ class Project(Base):
|
||||
permissions = relationship(
|
||||
"AccessPermission", back_populates="project", cascade="all, delete-orphan"
|
||||
)
|
||||
team = relationship("Team", back_populates="projects")
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_projects_name", "name"),
|
||||
Index("idx_projects_created_by", "created_by"),
|
||||
Index("idx_projects_team_id", "team_id"),
|
||||
)
|
||||
|
||||
|
||||
@@ -117,6 +120,9 @@ class Artifact(Base):
|
||||
tags = relationship("Tag", back_populates="artifact")
|
||||
uploads = relationship("Upload", back_populates="artifact")
|
||||
versions = relationship("PackageVersion", back_populates="artifact")
|
||||
dependencies = relationship(
|
||||
"ArtifactDependency", back_populates="artifact", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
@property
|
||||
def sha256(self) -> str:
|
||||
@@ -366,6 +372,9 @@ class User(Base):
|
||||
sessions = relationship(
|
||||
"Session", back_populates="user", cascade="all, delete-orphan"
|
||||
)
|
||||
team_memberships = relationship(
|
||||
"TeamMembership", back_populates="user", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_users_username", "username"),
|
||||
@@ -507,3 +516,124 @@ class PackageHistory(Base):
|
||||
Index("idx_package_history_changed_at", "changed_at"),
|
||||
Index("idx_package_history_package_changed_at", "package_id", "changed_at"),
|
||||
)
|
||||
|
||||
|
||||
class ArtifactDependency(Base):
|
||||
"""Dependency declared by an artifact on another package.
|
||||
|
||||
Each artifact can declare dependencies on other packages, specifying either
|
||||
an exact version or a tag. This enables recursive dependency resolution.
|
||||
"""
|
||||
|
||||
__tablename__ = "artifact_dependencies"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
artifact_id = Column(
|
||||
String(64),
|
||||
ForeignKey("artifacts.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
dependency_project = Column(String(255), nullable=False)
|
||||
dependency_package = Column(String(255), nullable=False)
|
||||
version_constraint = Column(String(255), nullable=True)
|
||||
tag_constraint = Column(String(255), nullable=True)
|
||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||
|
||||
# Relationship to the artifact that declares this dependency
|
||||
artifact = relationship("Artifact", back_populates="dependencies")
|
||||
|
||||
__table_args__ = (
|
||||
# Exactly one of version_constraint or tag_constraint must be set
|
||||
CheckConstraint(
|
||||
"(version_constraint IS NOT NULL AND tag_constraint IS NULL) OR "
|
||||
"(version_constraint IS NULL AND tag_constraint IS NOT NULL)",
|
||||
name="check_constraint_type",
|
||||
),
|
||||
# Each artifact can only depend on a specific project/package once
|
||||
Index(
|
||||
"idx_artifact_dependencies_artifact_id",
|
||||
"artifact_id",
|
||||
),
|
||||
Index(
|
||||
"idx_artifact_dependencies_target",
|
||||
"dependency_project",
|
||||
"dependency_package",
|
||||
),
|
||||
Index(
|
||||
"idx_artifact_dependencies_unique",
|
||||
"artifact_id",
|
||||
"dependency_project",
|
||||
"dependency_package",
|
||||
unique=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Team(Base):
|
||||
"""Team for organizing projects and users."""
|
||||
|
||||
__tablename__ = "teams"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
name = Column(String(255), nullable=False)
|
||||
slug = Column(String(255), unique=True, nullable=False)
|
||||
description = Column(Text)
|
||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
||||
)
|
||||
created_by = Column(String(255), nullable=False)
|
||||
settings = Column(JSON, default=dict)
|
||||
|
||||
# Relationships
|
||||
memberships = relationship(
|
||||
"TeamMembership", back_populates="team", cascade="all, delete-orphan"
|
||||
)
|
||||
projects = relationship("Project", back_populates="team")
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_teams_slug", "slug"),
|
||||
Index("idx_teams_created_by", "created_by"),
|
||||
Index("idx_teams_created_at", "created_at"),
|
||||
CheckConstraint(
|
||||
"slug ~ '^[a-z0-9][a-z0-9-]*[a-z0-9]$' OR slug ~ '^[a-z0-9]$'",
|
||||
name="check_team_slug_format",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class TeamMembership(Base):
|
||||
"""Maps users to teams with their roles."""
|
||||
|
||||
__tablename__ = "team_memberships"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
team_id = Column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("teams.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
user_id = Column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("users.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
role = Column(String(20), nullable=False, default="member")
|
||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||
invited_by = Column(String(255))
|
||||
|
||||
# Relationships
|
||||
team = relationship("Team", back_populates="memberships")
|
||||
user = relationship("User", back_populates="team_memberships")
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_team_memberships_team_id", "team_id"),
|
||||
Index("idx_team_memberships_user_id", "user_id"),
|
||||
Index("idx_team_memberships_role", "role"),
|
||||
Index("idx_team_memberships_team_role", "team_id", "role"),
|
||||
Index("idx_team_memberships_unique", "team_id", "user_id", unique=True),
|
||||
CheckConstraint(
|
||||
"role IN ('owner', 'admin', 'member')",
|
||||
name="check_team_role",
|
||||
),
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -25,6 +25,7 @@ class ProjectCreate(BaseModel):
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
is_public: bool = True
|
||||
team_id: Optional[UUID] = None
|
||||
|
||||
|
||||
class ProjectResponse(BaseModel):
|
||||
@@ -35,6 +36,9 @@ class ProjectResponse(BaseModel):
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: str
|
||||
team_id: Optional[UUID] = None
|
||||
team_slug: Optional[str] = None
|
||||
team_name: Optional[str] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
@@ -412,6 +416,9 @@ class UploadResponse(BaseModel):
|
||||
content_type: Optional[str] = None
|
||||
original_name: Optional[str] = None
|
||||
created_at: Optional[datetime] = None
|
||||
# Upload metrics (Issue #43)
|
||||
duration_ms: Optional[int] = None # Upload duration in milliseconds
|
||||
throughput_mbps: Optional[float] = None # Upload throughput in MB/s
|
||||
|
||||
|
||||
# Resumable upload schemas
|
||||
@@ -478,6 +485,21 @@ class ResumableUploadStatusResponse(BaseModel):
|
||||
total_uploaded_bytes: int
|
||||
|
||||
|
||||
class UploadProgressResponse(BaseModel):
|
||||
"""Progress information for an in-flight upload"""
|
||||
|
||||
upload_id: str
|
||||
status: str # 'in_progress', 'completed', 'failed', 'not_found'
|
||||
bytes_uploaded: int = 0
|
||||
bytes_total: Optional[int] = None
|
||||
percent_complete: Optional[float] = None
|
||||
parts_uploaded: int = 0
|
||||
parts_total: Optional[int] = None
|
||||
started_at: Optional[datetime] = None
|
||||
elapsed_seconds: Optional[float] = None
|
||||
throughput_mbps: Optional[float] = None
|
||||
|
||||
|
||||
# Consumer schemas
|
||||
class ConsumerResponse(BaseModel):
|
||||
id: UUID
|
||||
@@ -889,6 +911,9 @@ class AccessPermissionResponse(BaseModel):
|
||||
level: str
|
||||
created_at: datetime
|
||||
expires_at: Optional[datetime]
|
||||
source: Optional[str] = "explicit" # "explicit" or "team"
|
||||
team_slug: Optional[str] = None # Team slug if source is "team"
|
||||
team_role: Optional[str] = None # Team role if source is "team"
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
@@ -898,3 +923,276 @@ class ProjectWithAccessResponse(ProjectResponse):
|
||||
"""Project response with user's access level"""
|
||||
user_access_level: Optional[str] = None
|
||||
|
||||
|
||||
# Artifact Dependency schemas
|
||||
class DependencyCreate(BaseModel):
|
||||
"""Schema for creating a dependency"""
|
||||
project: str
|
||||
package: str
|
||||
version: Optional[str] = None
|
||||
tag: Optional[str] = None
|
||||
|
||||
@field_validator('version', 'tag')
|
||||
@classmethod
|
||||
def validate_constraint(cls, v, info):
|
||||
return v
|
||||
|
||||
def model_post_init(self, __context):
|
||||
"""Validate that exactly one of version or tag is set"""
|
||||
if self.version is None and self.tag is None:
|
||||
raise ValueError("Either 'version' or 'tag' must be specified")
|
||||
if self.version is not None and self.tag is not None:
|
||||
raise ValueError("Cannot specify both 'version' and 'tag'")
|
||||
|
||||
|
||||
class DependencyResponse(BaseModel):
|
||||
"""Schema for dependency response"""
|
||||
id: UUID
|
||||
artifact_id: str
|
||||
project: str
|
||||
package: str
|
||||
version: Optional[str] = None
|
||||
tag: Optional[str] = None
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
@classmethod
|
||||
def from_orm_model(cls, dep) -> "DependencyResponse":
|
||||
"""Create from ORM model with field mapping"""
|
||||
return cls(
|
||||
id=dep.id,
|
||||
artifact_id=dep.artifact_id,
|
||||
project=dep.dependency_project,
|
||||
package=dep.dependency_package,
|
||||
version=dep.version_constraint,
|
||||
tag=dep.tag_constraint,
|
||||
created_at=dep.created_at,
|
||||
)
|
||||
|
||||
|
||||
class ArtifactDependenciesResponse(BaseModel):
|
||||
"""Response containing all dependencies for an artifact"""
|
||||
artifact_id: str
|
||||
dependencies: List[DependencyResponse]
|
||||
|
||||
|
||||
class DependentInfo(BaseModel):
|
||||
"""Information about an artifact that depends on a package"""
|
||||
artifact_id: str
|
||||
project: str
|
||||
package: str
|
||||
version: Optional[str] = None
|
||||
constraint_type: str # 'version' or 'tag'
|
||||
constraint_value: str
|
||||
|
||||
|
||||
class ReverseDependenciesResponse(BaseModel):
|
||||
"""Response containing packages that depend on a given package"""
|
||||
project: str
|
||||
package: str
|
||||
dependents: List[DependentInfo]
|
||||
pagination: PaginationMeta
|
||||
|
||||
|
||||
class EnsureFileDependency(BaseModel):
|
||||
"""Dependency entry from orchard.ensure file"""
|
||||
project: str
|
||||
package: str
|
||||
version: Optional[str] = None
|
||||
tag: Optional[str] = None
|
||||
|
||||
@field_validator('version', 'tag')
|
||||
@classmethod
|
||||
def validate_constraint(cls, v, info):
|
||||
return v
|
||||
|
||||
def model_post_init(self, __context):
|
||||
"""Validate that exactly one of version or tag is set"""
|
||||
if self.version is None and self.tag is None:
|
||||
raise ValueError("Either 'version' or 'tag' must be specified")
|
||||
if self.version is not None and self.tag is not None:
|
||||
raise ValueError("Cannot specify both 'version' and 'tag'")
|
||||
|
||||
|
||||
class EnsureFileContent(BaseModel):
|
||||
"""Parsed content of orchard.ensure file"""
|
||||
dependencies: List[EnsureFileDependency] = []
|
||||
|
||||
|
||||
class ResolvedArtifact(BaseModel):
|
||||
"""A resolved artifact in the dependency tree"""
|
||||
artifact_id: str
|
||||
project: str
|
||||
package: str
|
||||
version: Optional[str] = None
|
||||
tag: Optional[str] = None
|
||||
size: int
|
||||
download_url: str
|
||||
|
||||
|
||||
class DependencyResolutionResponse(BaseModel):
|
||||
"""Response from dependency resolution endpoint"""
|
||||
requested: Dict[str, str] # project, package, ref
|
||||
resolved: List[ResolvedArtifact]
|
||||
total_size: int
|
||||
artifact_count: int
|
||||
|
||||
|
||||
class DependencyConflict(BaseModel):
|
||||
"""Details about a dependency conflict"""
|
||||
project: str
|
||||
package: str
|
||||
requirements: List[Dict[str, Any]] # version/tag and required_by info
|
||||
|
||||
|
||||
class DependencyConflictError(BaseModel):
|
||||
"""Error response for dependency conflicts"""
|
||||
error: str = "dependency_conflict"
|
||||
message: str
|
||||
conflicts: List[DependencyConflict]
|
||||
|
||||
|
||||
class CircularDependencyError(BaseModel):
|
||||
"""Error response for circular dependencies"""
|
||||
error: str = "circular_dependency"
|
||||
message: str
|
||||
cycle: List[str] # List of "project/package" strings showing the cycle
|
||||
|
||||
|
||||
# Team schemas
|
||||
TEAM_ROLES = ["owner", "admin", "member"]
|
||||
RESERVED_TEAM_SLUGS = {"new", "api", "admin", "settings", "members", "projects", "search"}
|
||||
|
||||
|
||||
class TeamCreate(BaseModel):
|
||||
"""Create a new team"""
|
||||
name: str
|
||||
slug: str
|
||||
description: Optional[str] = None
|
||||
|
||||
@field_validator('name')
|
||||
@classmethod
|
||||
def validate_name(cls, v: str) -> str:
|
||||
"""Validate team name."""
|
||||
if not v or not v.strip():
|
||||
raise ValueError("Name cannot be empty")
|
||||
if len(v) > 255:
|
||||
raise ValueError("Name must be 255 characters or less")
|
||||
return v.strip()
|
||||
|
||||
@field_validator('slug')
|
||||
@classmethod
|
||||
def validate_slug(cls, v: str) -> str:
|
||||
"""Validate team slug format (lowercase alphanumeric with hyphens)."""
|
||||
import re
|
||||
if not v:
|
||||
raise ValueError("Slug cannot be empty")
|
||||
if len(v) < 2:
|
||||
raise ValueError("Slug must be at least 2 characters")
|
||||
if len(v) > 255:
|
||||
raise ValueError("Slug must be 255 characters or less")
|
||||
if not re.match(r'^[a-z0-9][a-z0-9-]*[a-z0-9]$', v) and not re.match(r'^[a-z0-9]$', v):
|
||||
raise ValueError(
|
||||
"Slug must be lowercase alphanumeric with hyphens, "
|
||||
"starting and ending with alphanumeric characters"
|
||||
)
|
||||
if '--' in v:
|
||||
raise ValueError("Slug cannot contain consecutive hyphens")
|
||||
if v in RESERVED_TEAM_SLUGS:
|
||||
raise ValueError(f"Slug '{v}' is reserved and cannot be used")
|
||||
return v
|
||||
|
||||
@field_validator('description')
|
||||
@classmethod
|
||||
def validate_description(cls, v: Optional[str]) -> Optional[str]:
|
||||
"""Validate team description."""
|
||||
if v is not None and len(v) > 2000:
|
||||
raise ValueError("Description must be 2000 characters or less")
|
||||
return v
|
||||
|
||||
|
||||
class TeamUpdate(BaseModel):
|
||||
"""Update team details"""
|
||||
name: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
|
||||
@field_validator('name')
|
||||
@classmethod
|
||||
def validate_name(cls, v: Optional[str]) -> Optional[str]:
|
||||
"""Validate team name."""
|
||||
if v is not None:
|
||||
if not v.strip():
|
||||
raise ValueError("Name cannot be empty")
|
||||
if len(v) > 255:
|
||||
raise ValueError("Name must be 255 characters or less")
|
||||
return v.strip()
|
||||
return v
|
||||
|
||||
@field_validator('description')
|
||||
@classmethod
|
||||
def validate_description(cls, v: Optional[str]) -> Optional[str]:
|
||||
"""Validate team description."""
|
||||
if v is not None and len(v) > 2000:
|
||||
raise ValueError("Description must be 2000 characters or less")
|
||||
return v
|
||||
|
||||
|
||||
class TeamResponse(BaseModel):
|
||||
"""Team response with basic info"""
|
||||
id: UUID
|
||||
name: str
|
||||
slug: str
|
||||
description: Optional[str]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
member_count: int = 0
|
||||
project_count: int = 0
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TeamDetailResponse(TeamResponse):
|
||||
"""Team response with user's role"""
|
||||
user_role: Optional[str] = None # 'owner', 'admin', 'member', or None
|
||||
|
||||
|
||||
class TeamMemberCreate(BaseModel):
|
||||
"""Add a member to a team"""
|
||||
username: str
|
||||
role: str = "member"
|
||||
|
||||
@field_validator('role')
|
||||
@classmethod
|
||||
def validate_role(cls, v: str) -> str:
|
||||
if v not in TEAM_ROLES:
|
||||
raise ValueError(f"Role must be one of: {', '.join(TEAM_ROLES)}")
|
||||
return v
|
||||
|
||||
|
||||
class TeamMemberUpdate(BaseModel):
|
||||
"""Update a team member's role"""
|
||||
role: str
|
||||
|
||||
@field_validator('role')
|
||||
@classmethod
|
||||
def validate_role(cls, v: str) -> str:
|
||||
if v not in TEAM_ROLES:
|
||||
raise ValueError(f"Role must be one of: {', '.join(TEAM_ROLES)}")
|
||||
return v
|
||||
|
||||
|
||||
class TeamMemberResponse(BaseModel):
|
||||
"""Team member response"""
|
||||
id: UUID
|
||||
user_id: UUID
|
||||
username: str
|
||||
email: Optional[str]
|
||||
role: str
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
@@ -5,8 +5,9 @@ import hashlib
|
||||
import logging
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from .models import Project, Package, Artifact, Tag, Upload, PackageVersion
|
||||
from .models import Project, Package, Artifact, Tag, Upload, PackageVersion, ArtifactDependency, Team, TeamMembership, User
|
||||
from .storage import get_storage
|
||||
from .auth import hash_password
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -123,6 +124,17 @@ TEST_ARTIFACTS = [
|
||||
},
|
||||
]
|
||||
|
||||
# Dependencies to create (source artifact -> dependency)
|
||||
# Format: (source_project, source_package, source_version, dep_project, dep_package, version_constraint, tag_constraint)
|
||||
TEST_DEPENDENCIES = [
|
||||
# ui-components v1.1.0 depends on design-tokens v1.0.0
|
||||
("frontend-libs", "ui-components", "1.1.0", "frontend-libs", "design-tokens", "1.0.0", None),
|
||||
# auth-lib v1.0.0 depends on common-utils v2.0.0
|
||||
("backend-services", "auth-lib", "1.0.0", "backend-services", "common-utils", "2.0.0", None),
|
||||
# auth-lib v1.0.0 also depends on design-tokens (stable tag)
|
||||
("backend-services", "auth-lib", "1.0.0", "frontend-libs", "design-tokens", None, "latest"),
|
||||
]
|
||||
|
||||
|
||||
def is_database_empty(db: Session) -> bool:
|
||||
"""Check if the database has any projects."""
|
||||
@@ -138,6 +150,80 @@ def seed_database(db: Session) -> None:
|
||||
logger.info("Seeding database with test data...")
|
||||
storage = get_storage()
|
||||
|
||||
# Find or use admin user for team ownership
|
||||
admin_user = db.query(User).filter(User.username == "admin").first()
|
||||
team_owner_username = admin_user.username if admin_user else "seed-user"
|
||||
|
||||
# Create a demo team
|
||||
demo_team = Team(
|
||||
name="Demo Team",
|
||||
slug="demo-team",
|
||||
description="A demonstration team with sample projects",
|
||||
created_by=team_owner_username,
|
||||
)
|
||||
db.add(demo_team)
|
||||
db.flush()
|
||||
|
||||
# Add admin user as team owner if they exist
|
||||
if admin_user:
|
||||
membership = TeamMembership(
|
||||
team_id=demo_team.id,
|
||||
user_id=admin_user.id,
|
||||
role="owner",
|
||||
invited_by=team_owner_username,
|
||||
)
|
||||
db.add(membership)
|
||||
db.flush()
|
||||
|
||||
logger.info(f"Created team: {demo_team.name} ({demo_team.slug})")
|
||||
|
||||
# Create test users with various roles
|
||||
test_users = [
|
||||
{"username": "alice", "email": "alice@example.com", "role": "admin"},
|
||||
{"username": "bob", "email": "bob@example.com", "role": "admin"},
|
||||
{"username": "charlie", "email": "charlie@example.com", "role": "member"},
|
||||
{"username": "diana", "email": "diana@example.com", "role": "member"},
|
||||
{"username": "eve", "email": "eve@example.com", "role": "member"},
|
||||
{"username": "frank", "email": None, "role": "member"},
|
||||
]
|
||||
|
||||
for user_data in test_users:
|
||||
# Check if user already exists
|
||||
existing_user = db.query(User).filter(User.username == user_data["username"]).first()
|
||||
if existing_user:
|
||||
test_user = existing_user
|
||||
else:
|
||||
# Create the user with password same as username
|
||||
test_user = User(
|
||||
username=user_data["username"],
|
||||
email=user_data["email"],
|
||||
password_hash=hash_password(user_data["username"]),
|
||||
is_admin=False,
|
||||
is_active=True,
|
||||
must_change_password=False,
|
||||
)
|
||||
db.add(test_user)
|
||||
db.flush()
|
||||
logger.info(f"Created test user: {user_data['username']}")
|
||||
|
||||
# Add to demo team with specified role
|
||||
existing_membership = db.query(TeamMembership).filter(
|
||||
TeamMembership.team_id == demo_team.id,
|
||||
TeamMembership.user_id == test_user.id,
|
||||
).first()
|
||||
|
||||
if not existing_membership:
|
||||
membership = TeamMembership(
|
||||
team_id=demo_team.id,
|
||||
user_id=test_user.id,
|
||||
role=user_data["role"],
|
||||
invited_by=team_owner_username,
|
||||
)
|
||||
db.add(membership)
|
||||
logger.info(f"Added {user_data['username']} to {demo_team.slug} as {user_data['role']}")
|
||||
|
||||
db.flush()
|
||||
|
||||
# Create projects and packages
|
||||
project_map = {}
|
||||
package_map = {}
|
||||
@@ -147,7 +233,8 @@ def seed_database(db: Session) -> None:
|
||||
name=project_data["name"],
|
||||
description=project_data["description"],
|
||||
is_public=project_data["is_public"],
|
||||
created_by="seed-user",
|
||||
created_by=team_owner_username,
|
||||
team_id=demo_team.id, # Assign to demo team
|
||||
)
|
||||
db.add(project)
|
||||
db.flush() # Get the ID
|
||||
@@ -163,7 +250,7 @@ def seed_database(db: Session) -> None:
|
||||
db.flush()
|
||||
package_map[(project_data["name"], package_data["name"])] = package
|
||||
|
||||
logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages")
|
||||
logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages (assigned to {demo_team.slug})")
|
||||
|
||||
# Create artifacts, tags, and versions
|
||||
artifact_count = 0
|
||||
@@ -201,7 +288,7 @@ def seed_database(db: Session) -> None:
|
||||
size=size,
|
||||
content_type=artifact_data["content_type"],
|
||||
original_name=artifact_data["filename"],
|
||||
created_by="seed-user",
|
||||
created_by=team_owner_username,
|
||||
s3_key=s3_key,
|
||||
ref_count=ref_count,
|
||||
)
|
||||
@@ -224,7 +311,7 @@ def seed_database(db: Session) -> None:
|
||||
artifact_id=sha256_hash,
|
||||
version=artifact_data["version"],
|
||||
version_source="explicit",
|
||||
created_by="seed-user",
|
||||
created_by=team_owner_username,
|
||||
)
|
||||
db.add(version)
|
||||
version_count += 1
|
||||
@@ -235,11 +322,45 @@ def seed_database(db: Session) -> None:
|
||||
package_id=package.id,
|
||||
name=tag_name,
|
||||
artifact_id=sha256_hash,
|
||||
created_by="seed-user",
|
||||
created_by=team_owner_username,
|
||||
)
|
||||
db.add(tag)
|
||||
tag_count += 1
|
||||
|
||||
db.flush()
|
||||
|
||||
# Create dependencies
|
||||
dependency_count = 0
|
||||
for dep_data in TEST_DEPENDENCIES:
|
||||
src_project, src_package, src_version, dep_project, dep_package, version_constraint, tag_constraint = dep_data
|
||||
|
||||
# Find the source artifact by looking up its version
|
||||
src_pkg = package_map.get((src_project, src_package))
|
||||
if not src_pkg:
|
||||
logger.warning(f"Source package not found: {src_project}/{src_package}")
|
||||
continue
|
||||
|
||||
# Find the artifact for this version
|
||||
src_version_record = db.query(PackageVersion).filter(
|
||||
PackageVersion.package_id == src_pkg.id,
|
||||
PackageVersion.version == src_version,
|
||||
).first()
|
||||
|
||||
if not src_version_record:
|
||||
logger.warning(f"Source version not found: {src_project}/{src_package}@{src_version}")
|
||||
continue
|
||||
|
||||
# Create the dependency
|
||||
dependency = ArtifactDependency(
|
||||
artifact_id=src_version_record.artifact_id,
|
||||
dependency_project=dep_project,
|
||||
dependency_package=dep_package,
|
||||
version_constraint=version_constraint,
|
||||
tag_constraint=tag_constraint,
|
||||
)
|
||||
db.add(dependency)
|
||||
dependency_count += 1
|
||||
|
||||
db.commit()
|
||||
logger.info(f"Created {artifact_count} artifacts, {tag_count} tags, and {version_count} versions")
|
||||
logger.info(f"Created {artifact_count} artifacts, {tag_count} tags, {version_count} versions, and {dependency_count} dependencies")
|
||||
logger.info("Database seeding complete")
|
||||
|
||||
@@ -242,15 +242,19 @@ class S3Storage:
|
||||
},
|
||||
)
|
||||
|
||||
self.client = boto3.client(
|
||||
"s3",
|
||||
endpoint_url=settings.s3_endpoint if settings.s3_endpoint else None,
|
||||
region_name=settings.s3_region,
|
||||
aws_access_key_id=settings.s3_access_key_id,
|
||||
aws_secret_access_key=settings.s3_secret_access_key,
|
||||
config=config,
|
||||
verify=settings.s3_verify_ssl, # SSL/TLS verification
|
||||
)
|
||||
# Build client kwargs - only include credentials if explicitly provided
|
||||
# This allows IRSA/IAM role credentials to be used when no explicit creds are set
|
||||
client_kwargs = {
|
||||
"endpoint_url": settings.s3_endpoint if settings.s3_endpoint else None,
|
||||
"region_name": settings.s3_region,
|
||||
"config": config,
|
||||
"verify": settings.s3_verify_ssl,
|
||||
}
|
||||
if settings.s3_access_key_id and settings.s3_secret_access_key:
|
||||
client_kwargs["aws_access_key_id"] = settings.s3_access_key_id
|
||||
client_kwargs["aws_secret_access_key"] = settings.s3_secret_access_key
|
||||
|
||||
self.client = boto3.client("s3", **client_kwargs)
|
||||
self.bucket = settings.s3_bucket
|
||||
# Store active multipart uploads for resumable support
|
||||
self._active_uploads: Dict[str, Dict[str, Any]] = {}
|
||||
@@ -378,10 +382,16 @@ class S3Storage:
|
||||
"""
|
||||
# First pass: compute all hashes by streaming through file
|
||||
try:
|
||||
import time
|
||||
sha256_hasher = hashlib.sha256()
|
||||
md5_hasher = hashlib.md5()
|
||||
sha1_hasher = hashlib.sha1()
|
||||
size = 0
|
||||
hash_start_time = time.time()
|
||||
last_log_time = hash_start_time
|
||||
log_interval_seconds = 5 # Log progress every 5 seconds
|
||||
|
||||
logger.info(f"Computing hashes for large file: expected_size={content_length}")
|
||||
|
||||
# Read file in chunks to compute hashes
|
||||
while True:
|
||||
@@ -393,6 +403,18 @@ class S3Storage:
|
||||
sha1_hasher.update(chunk)
|
||||
size += len(chunk)
|
||||
|
||||
# Log hash computation progress periodically
|
||||
current_time = time.time()
|
||||
if current_time - last_log_time >= log_interval_seconds:
|
||||
elapsed = current_time - hash_start_time
|
||||
percent = (size / content_length) * 100 if content_length > 0 else 0
|
||||
throughput = (size / (1024 * 1024)) / elapsed if elapsed > 0 else 0
|
||||
logger.info(
|
||||
f"Hash computation progress: bytes={size}/{content_length} ({percent:.1f}%) "
|
||||
f"throughput={throughput:.2f}MB/s"
|
||||
)
|
||||
last_log_time = current_time
|
||||
|
||||
# Enforce file size limit during streaming (protection against spoofing)
|
||||
if size > settings.max_file_size:
|
||||
raise FileSizeExceededError(
|
||||
@@ -405,6 +427,14 @@ class S3Storage:
|
||||
sha256_hash = sha256_hasher.hexdigest()
|
||||
md5_hash = md5_hasher.hexdigest()
|
||||
sha1_hash = sha1_hasher.hexdigest()
|
||||
|
||||
# Log hash computation completion
|
||||
hash_elapsed = time.time() - hash_start_time
|
||||
hash_throughput = (size / (1024 * 1024)) / hash_elapsed if hash_elapsed > 0 else 0
|
||||
logger.info(
|
||||
f"Hash computation completed: hash={sha256_hash[:16]}... "
|
||||
f"size={size} duration={hash_elapsed:.2f}s throughput={hash_throughput:.2f}MB/s"
|
||||
)
|
||||
except (HashComputationError, FileSizeExceededError):
|
||||
raise
|
||||
except Exception as e:
|
||||
@@ -458,8 +488,19 @@ class S3Storage:
|
||||
upload_id = mpu["UploadId"]
|
||||
|
||||
try:
|
||||
import time
|
||||
parts = []
|
||||
part_number = 1
|
||||
bytes_uploaded = 0
|
||||
upload_start_time = time.time()
|
||||
last_log_time = upload_start_time
|
||||
log_interval_seconds = 5 # Log progress every 5 seconds
|
||||
|
||||
total_parts = (content_length + MULTIPART_CHUNK_SIZE - 1) // MULTIPART_CHUNK_SIZE
|
||||
logger.info(
|
||||
f"Starting multipart upload: hash={sha256_hash[:16]}... "
|
||||
f"size={content_length} parts={total_parts}"
|
||||
)
|
||||
|
||||
while True:
|
||||
chunk = file.read(MULTIPART_CHUNK_SIZE)
|
||||
@@ -479,8 +520,32 @@ class S3Storage:
|
||||
"ETag": response["ETag"],
|
||||
}
|
||||
)
|
||||
bytes_uploaded += len(chunk)
|
||||
|
||||
# Log progress periodically
|
||||
current_time = time.time()
|
||||
if current_time - last_log_time >= log_interval_seconds:
|
||||
elapsed = current_time - upload_start_time
|
||||
percent = (bytes_uploaded / content_length) * 100
|
||||
throughput = (bytes_uploaded / (1024 * 1024)) / elapsed if elapsed > 0 else 0
|
||||
logger.info(
|
||||
f"Upload progress: hash={sha256_hash[:16]}... "
|
||||
f"part={part_number}/{total_parts} "
|
||||
f"bytes={bytes_uploaded}/{content_length} ({percent:.1f}%) "
|
||||
f"throughput={throughput:.2f}MB/s"
|
||||
)
|
||||
last_log_time = current_time
|
||||
|
||||
part_number += 1
|
||||
|
||||
# Log completion
|
||||
total_elapsed = time.time() - upload_start_time
|
||||
final_throughput = (content_length / (1024 * 1024)) / total_elapsed if total_elapsed > 0 else 0
|
||||
logger.info(
|
||||
f"Multipart upload completed: hash={sha256_hash[:16]}... "
|
||||
f"size={content_length} duration={total_elapsed:.2f}s throughput={final_throughput:.2f}MB/s"
|
||||
)
|
||||
|
||||
# Complete multipart upload
|
||||
complete_response = self.client.complete_multipart_upload(
|
||||
Bucket=self.bucket,
|
||||
@@ -502,12 +567,28 @@ class S3Storage:
|
||||
|
||||
except Exception as e:
|
||||
# Abort multipart upload on failure
|
||||
logger.error(f"Multipart upload failed: {e}")
|
||||
self.client.abort_multipart_upload(
|
||||
Bucket=self.bucket,
|
||||
Key=s3_key,
|
||||
UploadId=upload_id,
|
||||
error_str = str(e).lower()
|
||||
is_client_disconnect = (
|
||||
isinstance(e, (ConnectionResetError, BrokenPipeError)) or
|
||||
"connection" in error_str or "broken pipe" in error_str or "reset" in error_str
|
||||
)
|
||||
if is_client_disconnect:
|
||||
logger.warning(
|
||||
f"Multipart upload aborted (client disconnect): hash={sha256_hash[:16]}... "
|
||||
f"parts_uploaded={len(parts)} bytes_uploaded={bytes_uploaded}"
|
||||
)
|
||||
else:
|
||||
logger.error(f"Multipart upload failed: hash={sha256_hash[:16]}... error={e}")
|
||||
|
||||
try:
|
||||
self.client.abort_multipart_upload(
|
||||
Bucket=self.bucket,
|
||||
Key=s3_key,
|
||||
UploadId=upload_id,
|
||||
)
|
||||
logger.info(f"Multipart upload aborted and cleaned up: upload_id={upload_id[:16]}...")
|
||||
except Exception as abort_error:
|
||||
logger.error(f"Failed to abort multipart upload: {abort_error}")
|
||||
raise
|
||||
|
||||
def initiate_resumable_upload(self, expected_hash: str) -> Dict[str, Any]:
|
||||
@@ -529,12 +610,17 @@ class S3Storage:
|
||||
mpu = self.client.create_multipart_upload(Bucket=self.bucket, Key=s3_key)
|
||||
upload_id = mpu["UploadId"]
|
||||
|
||||
import time
|
||||
session = {
|
||||
"upload_id": upload_id,
|
||||
"s3_key": s3_key,
|
||||
"already_exists": False,
|
||||
"parts": [],
|
||||
"expected_hash": expected_hash,
|
||||
"started_at": time.time(),
|
||||
"bytes_uploaded": 0,
|
||||
"expected_size": None, # Set when init provides size
|
||||
"status": "in_progress",
|
||||
}
|
||||
self._active_uploads[upload_id] = session
|
||||
return session
|
||||
@@ -561,10 +647,57 @@ class S3Storage:
|
||||
part_info = {
|
||||
"PartNumber": part_number,
|
||||
"ETag": response["ETag"],
|
||||
"size": len(data),
|
||||
}
|
||||
session["parts"].append(part_info)
|
||||
session["bytes_uploaded"] = session.get("bytes_uploaded", 0) + len(data)
|
||||
return part_info
|
||||
|
||||
def get_upload_progress(self, upload_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get progress information for a resumable upload.
|
||||
Returns None if upload not found.
|
||||
"""
|
||||
import time
|
||||
session = self._active_uploads.get(upload_id)
|
||||
if not session:
|
||||
return None
|
||||
|
||||
bytes_uploaded = session.get("bytes_uploaded", 0)
|
||||
expected_size = session.get("expected_size")
|
||||
started_at = session.get("started_at")
|
||||
|
||||
progress = {
|
||||
"upload_id": upload_id,
|
||||
"status": session.get("status", "in_progress"),
|
||||
"bytes_uploaded": bytes_uploaded,
|
||||
"bytes_total": expected_size,
|
||||
"parts_uploaded": len(session.get("parts", [])),
|
||||
"parts_total": None,
|
||||
"started_at": started_at,
|
||||
"elapsed_seconds": None,
|
||||
"percent_complete": None,
|
||||
"throughput_mbps": None,
|
||||
}
|
||||
|
||||
if expected_size and expected_size > 0:
|
||||
progress["percent_complete"] = round((bytes_uploaded / expected_size) * 100, 2)
|
||||
progress["parts_total"] = (expected_size + MULTIPART_CHUNK_SIZE - 1) // MULTIPART_CHUNK_SIZE
|
||||
|
||||
if started_at:
|
||||
elapsed = time.time() - started_at
|
||||
progress["elapsed_seconds"] = round(elapsed, 2)
|
||||
if elapsed > 0 and bytes_uploaded > 0:
|
||||
progress["throughput_mbps"] = round((bytes_uploaded / (1024 * 1024)) / elapsed, 2)
|
||||
|
||||
return progress
|
||||
|
||||
def set_upload_expected_size(self, upload_id: str, size: int):
|
||||
"""Set the expected size for an upload (for progress tracking)."""
|
||||
session = self._active_uploads.get(upload_id)
|
||||
if session:
|
||||
session["expected_size"] = size
|
||||
|
||||
def complete_resumable_upload(self, upload_id: str) -> Tuple[str, str]:
|
||||
"""
|
||||
Complete a resumable upload.
|
||||
@@ -702,6 +835,36 @@ class S3Storage:
|
||||
except ClientError:
|
||||
return False
|
||||
|
||||
def delete_all(self) -> int:
|
||||
"""
|
||||
Delete all objects in the bucket.
|
||||
|
||||
Returns:
|
||||
Number of objects deleted
|
||||
"""
|
||||
deleted_count = 0
|
||||
try:
|
||||
paginator = self.client.get_paginator("list_objects_v2")
|
||||
for page in paginator.paginate(Bucket=self.bucket):
|
||||
objects = page.get("Contents", [])
|
||||
if not objects:
|
||||
continue
|
||||
|
||||
# Delete objects in batches of 1000 (S3 limit)
|
||||
delete_keys = [{"Key": obj["Key"]} for obj in objects]
|
||||
if delete_keys:
|
||||
self.client.delete_objects(
|
||||
Bucket=self.bucket, Delete={"Objects": delete_keys}
|
||||
)
|
||||
deleted_count += len(delete_keys)
|
||||
logger.info(f"Deleted {len(delete_keys)} objects from S3")
|
||||
|
||||
logger.info(f"Total objects deleted from S3: {deleted_count}")
|
||||
return deleted_count
|
||||
except ClientError as e:
|
||||
logger.error(f"Failed to delete all S3 objects: {e}")
|
||||
raise
|
||||
|
||||
def generate_presigned_url(
|
||||
self,
|
||||
s3_key: str,
|
||||
|
||||
@@ -12,6 +12,8 @@ markers =
|
||||
unit: Unit tests (no external dependencies)
|
||||
integration: Integration tests (require database/storage)
|
||||
slow: Slow tests (skip with -m "not slow")
|
||||
large: Large file tests (100MB+, skip with -m "not large")
|
||||
concurrent: Concurrent operation tests
|
||||
|
||||
# Coverage configuration
|
||||
[coverage:run]
|
||||
|
||||
@@ -9,6 +9,37 @@ This module provides:
|
||||
|
||||
import os
|
||||
import pytest
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pytest Markers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
"""Register custom pytest markers."""
|
||||
config.addinivalue_line(
|
||||
"markers",
|
||||
"auth_intensive: marks tests that make many login requests (excluded from CI integration tests due to rate limiting)",
|
||||
)
|
||||
config.addinivalue_line(
|
||||
"markers",
|
||||
"integration: marks tests as integration tests",
|
||||
)
|
||||
config.addinivalue_line(
|
||||
"markers",
|
||||
"large: marks tests that handle large files (slow)",
|
||||
)
|
||||
config.addinivalue_line(
|
||||
"markers",
|
||||
"slow: marks tests as slow running",
|
||||
)
|
||||
config.addinivalue_line(
|
||||
"markers",
|
||||
"requires_direct_s3: marks tests that require direct S3/MinIO access (skipped in CI where S3 is not directly accessible)",
|
||||
)
|
||||
|
||||
|
||||
import io
|
||||
from typing import Generator
|
||||
from unittest.mock import MagicMock
|
||||
@@ -25,6 +56,26 @@ os.environ.setdefault("ORCHARD_S3_BUCKET", "test-bucket")
|
||||
os.environ.setdefault("ORCHARD_S3_ACCESS_KEY_ID", "test")
|
||||
os.environ.setdefault("ORCHARD_S3_SECRET_ACCESS_KEY", "test")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Admin Credentials Helper
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def get_admin_password() -> str:
|
||||
"""Get the admin password for test authentication.
|
||||
|
||||
Returns the password from ORCHARD_TEST_PASSWORD environment variable,
|
||||
or 'changeme123' as the default for local development.
|
||||
"""
|
||||
return os.environ.get("ORCHARD_TEST_PASSWORD", "changeme123")
|
||||
|
||||
|
||||
def get_admin_username() -> str:
|
||||
"""Get the admin username for test authentication."""
|
||||
return os.environ.get("ORCHARD_TEST_USERNAME", "admin")
|
||||
|
||||
|
||||
# Re-export factory functions for backward compatibility
|
||||
from tests.factories import (
|
||||
create_test_file,
|
||||
@@ -32,6 +83,8 @@ from tests.factories import (
|
||||
compute_md5,
|
||||
compute_sha1,
|
||||
upload_test_file,
|
||||
generate_content,
|
||||
generate_content_with_hash,
|
||||
TEST_CONTENT_HELLO,
|
||||
TEST_HASH_HELLO,
|
||||
TEST_MD5_HELLO,
|
||||
@@ -179,29 +232,64 @@ def test_app():
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@pytest.fixture(scope="session")
|
||||
def integration_client():
|
||||
"""
|
||||
Create an authenticated test client for integration tests.
|
||||
|
||||
Uses the real database and MinIO from docker-compose.local.yml.
|
||||
Authenticates as admin for write operations.
|
||||
Uses the real database and MinIO from docker-compose.local.yml or deployed environment.
|
||||
Authenticates as admin for write operations. Session-scoped to reuse login across tests.
|
||||
|
||||
Environment variables:
|
||||
ORCHARD_TEST_URL: Base URL of the Orchard server (default: http://localhost:8080)
|
||||
ORCHARD_TEST_USERNAME: Admin username for authentication (default: admin)
|
||||
ORCHARD_TEST_PASSWORD: Admin password for authentication (default: changeme123)
|
||||
"""
|
||||
from httpx import Client
|
||||
import httpx
|
||||
|
||||
# Connect to the running orchard-server container
|
||||
# Connect to the running orchard-server container or deployed environment
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
username = os.environ.get("ORCHARD_TEST_USERNAME", "admin")
|
||||
password = os.environ.get("ORCHARD_TEST_PASSWORD", "changeme123")
|
||||
|
||||
with Client(base_url=base_url, timeout=30.0) as client:
|
||||
with httpx.Client(base_url=base_url, timeout=30.0) as client:
|
||||
# Login as admin to enable write operations
|
||||
login_response = client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": username, "password": password},
|
||||
)
|
||||
# If login fails, tests will fail - that's expected if auth is broken
|
||||
if login_response.status_code != 200:
|
||||
# Try to continue without auth for backward compatibility
|
||||
pass
|
||||
pytest.fail(
|
||||
f"Authentication failed against {base_url}: {login_response.status_code} - {login_response.text}. "
|
||||
f"Set ORCHARD_TEST_USERNAME and ORCHARD_TEST_PASSWORD environment variables if using non-default credentials."
|
||||
)
|
||||
|
||||
# Verify cookie was set
|
||||
if not client.cookies:
|
||||
pytest.fail(
|
||||
f"Login succeeded but no session cookie was set. Response headers: {login_response.headers}"
|
||||
)
|
||||
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def auth_client():
|
||||
"""
|
||||
Create a function-scoped test client for authentication tests.
|
||||
|
||||
Unlike integration_client (session-scoped), this creates a fresh client
|
||||
for each test. Use this for tests that manipulate authentication state
|
||||
(login, logout, cookie clearing) to avoid polluting other tests.
|
||||
|
||||
Environment variables:
|
||||
ORCHARD_TEST_URL: Base URL of the Orchard server (default: http://localhost:8080)
|
||||
"""
|
||||
import httpx
|
||||
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with httpx.Client(base_url=base_url, timeout=30.0) as client:
|
||||
yield client
|
||||
|
||||
|
||||
@@ -271,3 +359,41 @@ def test_content():
|
||||
content = f"test-content-{uuid.uuid4().hex}".encode()
|
||||
sha256 = compute_sha256(content)
|
||||
return (content, sha256)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sized_content():
|
||||
"""
|
||||
Factory fixture for generating content of specific sizes.
|
||||
|
||||
Usage:
|
||||
def test_example(sized_content):
|
||||
content, hash = sized_content(1024) # 1KB
|
||||
content, hash = sized_content(1024 * 1024) # 1MB
|
||||
"""
|
||||
def _generate(size: int, seed: int = None):
|
||||
return generate_content_with_hash(size, seed)
|
||||
return _generate
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Size Constants for Tests
|
||||
# =============================================================================
|
||||
|
||||
# Common file sizes for boundary testing
|
||||
SIZE_1B = 1
|
||||
SIZE_1KB = 1024
|
||||
SIZE_10KB = 10 * 1024
|
||||
SIZE_100KB = 100 * 1024
|
||||
SIZE_1MB = 1024 * 1024
|
||||
SIZE_5MB = 5 * 1024 * 1024
|
||||
SIZE_10MB = 10 * 1024 * 1024
|
||||
SIZE_50MB = 50 * 1024 * 1024
|
||||
SIZE_100MB = 100 * 1024 * 1024
|
||||
SIZE_250MB = 250 * 1024 * 1024
|
||||
SIZE_500MB = 500 * 1024 * 1024
|
||||
SIZE_1GB = 1024 * 1024 * 1024
|
||||
|
||||
# Chunk size boundaries (based on typical S3 multipart chunk sizes)
|
||||
CHUNK_SIZE = 64 * 1024 # 64KB typical chunk
|
||||
MULTIPART_THRESHOLD = 100 * 1024 * 1024 # 100MB multipart threshold
|
||||
|
||||
@@ -130,6 +130,41 @@ def upload_test_file(
|
||||
return response.json()
|
||||
|
||||
|
||||
def generate_content(size: int, seed: Optional[int] = None) -> bytes:
|
||||
"""
|
||||
Generate deterministic or random content of a specified size.
|
||||
|
||||
Args:
|
||||
size: Size of content in bytes
|
||||
seed: Optional seed for reproducible content (None for random)
|
||||
|
||||
Returns:
|
||||
Bytes of the specified size
|
||||
"""
|
||||
if size == 0:
|
||||
return b""
|
||||
if seed is not None:
|
||||
import random
|
||||
rng = random.Random(seed)
|
||||
return bytes(rng.randint(0, 255) for _ in range(size))
|
||||
return os.urandom(size)
|
||||
|
||||
|
||||
def generate_content_with_hash(size: int, seed: Optional[int] = None) -> tuple[bytes, str]:
|
||||
"""
|
||||
Generate content of specified size and compute its SHA256 hash.
|
||||
|
||||
Args:
|
||||
size: Size of content in bytes
|
||||
seed: Optional seed for reproducible content
|
||||
|
||||
Returns:
|
||||
Tuple of (content_bytes, sha256_hash)
|
||||
"""
|
||||
content = generate_content(size, seed)
|
||||
return content, compute_sha256(content)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Project/Package Factories
|
||||
# =============================================================================
|
||||
|
||||
@@ -1,39 +1,50 @@
|
||||
"""Integration tests for authentication API endpoints."""
|
||||
"""Integration tests for authentication API endpoints.
|
||||
|
||||
Note: These tests are marked as auth_intensive because they make many login
|
||||
requests. Dev/stage deployments have relaxed rate limits (1000/minute) to
|
||||
allow these tests to run. Production uses strict rate limits (5/minute).
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from uuid import uuid4
|
||||
|
||||
from tests.conftest import get_admin_password, get_admin_username
|
||||
|
||||
|
||||
# Mark all tests in this module as auth_intensive (informational, not excluded from CI)
|
||||
pytestmark = pytest.mark.auth_intensive
|
||||
|
||||
|
||||
class TestAuthLogin:
|
||||
"""Tests for login endpoint."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_login_success(self, integration_client):
|
||||
def test_login_success(self, auth_client):
|
||||
"""Test successful login with default admin credentials."""
|
||||
response = integration_client.post(
|
||||
response = auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["username"] == "admin"
|
||||
assert data["username"] == get_admin_username()
|
||||
assert data["is_admin"] is True
|
||||
assert "orchard_session" in response.cookies
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_login_invalid_password(self, integration_client):
|
||||
def test_login_invalid_password(self, auth_client):
|
||||
"""Test login with wrong password."""
|
||||
response = integration_client.post(
|
||||
response = auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "wrongpassword"},
|
||||
json={"username": get_admin_username(), "password": "wrongpassword"},
|
||||
)
|
||||
assert response.status_code == 401
|
||||
assert "Invalid username or password" in response.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_login_nonexistent_user(self, integration_client):
|
||||
def test_login_nonexistent_user(self, auth_client):
|
||||
"""Test login with non-existent user."""
|
||||
response = integration_client.post(
|
||||
response = auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "nonexistent", "password": "password"},
|
||||
)
|
||||
@@ -44,24 +55,24 @@ class TestAuthLogout:
|
||||
"""Tests for logout endpoint."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_logout_success(self, integration_client):
|
||||
def test_logout_success(self, auth_client):
|
||||
"""Test successful logout."""
|
||||
# First login
|
||||
login_response = integration_client.post(
|
||||
login_response = auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
# Then logout
|
||||
logout_response = integration_client.post("/api/v1/auth/logout")
|
||||
logout_response = auth_client.post("/api/v1/auth/logout")
|
||||
assert logout_response.status_code == 200
|
||||
assert "Logged out successfully" in logout_response.json()["message"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_logout_without_session(self, integration_client):
|
||||
def test_logout_without_session(self, auth_client):
|
||||
"""Test logout without being logged in."""
|
||||
response = integration_client.post("/api/v1/auth/logout")
|
||||
response = auth_client.post("/api/v1/auth/logout")
|
||||
# Should succeed even without session
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -70,84 +81,105 @@ class TestAuthMe:
|
||||
"""Tests for get current user endpoint."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_me_authenticated(self, integration_client):
|
||||
def test_get_me_authenticated(self, auth_client):
|
||||
"""Test getting current user when authenticated."""
|
||||
# Login first
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
|
||||
response = integration_client.get("/api/v1/auth/me")
|
||||
response = auth_client.get("/api/v1/auth/me")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["username"] == "admin"
|
||||
assert data["username"] == get_admin_username()
|
||||
assert data["is_admin"] is True
|
||||
assert "id" in data
|
||||
assert "created_at" in data
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_me_unauthenticated(self, integration_client):
|
||||
def test_get_me_unauthenticated(self, auth_client):
|
||||
"""Test getting current user without authentication."""
|
||||
# Clear any existing cookies
|
||||
integration_client.cookies.clear()
|
||||
auth_client.cookies.clear()
|
||||
|
||||
response = integration_client.get("/api/v1/auth/me")
|
||||
response = auth_client.get("/api/v1/auth/me")
|
||||
assert response.status_code == 401
|
||||
assert "Not authenticated" in response.json()["detail"]
|
||||
|
||||
|
||||
class TestAuthChangePassword:
|
||||
"""Tests for change password endpoint."""
|
||||
"""Tests for change password endpoint.
|
||||
|
||||
Note: These tests use dedicated test users instead of admin to avoid
|
||||
invalidating the integration_client session (which uses admin).
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_change_password_success(self, integration_client):
|
||||
def test_change_password_success(self, auth_client):
|
||||
"""Test successful password change."""
|
||||
# Login first
|
||||
integration_client.post(
|
||||
# Login as admin to create a test user
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
test_username = f"pwchange_{uuid4().hex[:8]}"
|
||||
auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "oldpassword123"},
|
||||
)
|
||||
|
||||
# Login as test user
|
||||
auth_client.cookies.clear()
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "oldpassword123"},
|
||||
)
|
||||
|
||||
# Change password
|
||||
response = integration_client.post(
|
||||
response = auth_client.post(
|
||||
"/api/v1/auth/change-password",
|
||||
json={"current_password": "changeme123", "new_password": "newpassword123"},
|
||||
json={"current_password": "oldpassword123", "new_password": "newpassword123"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify old password no longer works
|
||||
integration_client.cookies.clear()
|
||||
response = integration_client.post(
|
||||
auth_client.cookies.clear()
|
||||
response = auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": test_username, "password": "oldpassword123"},
|
||||
)
|
||||
assert response.status_code == 401
|
||||
|
||||
# Verify new password works
|
||||
response = integration_client.post(
|
||||
response = auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "newpassword123"},
|
||||
json={"username": test_username, "password": "newpassword123"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Reset password back to original for other tests
|
||||
reset_response = integration_client.post(
|
||||
"/api/v1/auth/change-password",
|
||||
json={"current_password": "newpassword123", "new_password": "changeme123"},
|
||||
)
|
||||
assert reset_response.status_code == 200, "Failed to reset admin password back to default"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_change_password_wrong_current(self, integration_client):
|
||||
def test_change_password_wrong_current(self, auth_client):
|
||||
"""Test password change with wrong current password."""
|
||||
# Login first
|
||||
integration_client.post(
|
||||
# Login as admin to create a test user
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
test_username = f"pwwrong_{uuid4().hex[:8]}"
|
||||
auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
response = integration_client.post(
|
||||
# Login as test user
|
||||
auth_client.cookies.clear()
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
response = auth_client.post(
|
||||
"/api/v1/auth/change-password",
|
||||
json={"current_password": "wrongpassword", "new_password": "newpassword"},
|
||||
)
|
||||
@@ -159,16 +191,16 @@ class TestAPIKeys:
|
||||
"""Tests for API key management endpoints."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_create_and_list_api_key(self, integration_client):
|
||||
def test_create_and_list_api_key(self, auth_client):
|
||||
"""Test creating and listing API keys."""
|
||||
# Login first
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
|
||||
# Create API key
|
||||
create_response = integration_client.post(
|
||||
create_response = auth_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": "test-key", "description": "Test API key"},
|
||||
)
|
||||
@@ -182,23 +214,23 @@ class TestAPIKeys:
|
||||
api_key = data["key"]
|
||||
|
||||
# List API keys
|
||||
list_response = integration_client.get("/api/v1/auth/keys")
|
||||
list_response = auth_client.get("/api/v1/auth/keys")
|
||||
assert list_response.status_code == 200
|
||||
keys = list_response.json()
|
||||
assert any(k["id"] == key_id for k in keys)
|
||||
|
||||
# Clean up - delete the key
|
||||
integration_client.delete(f"/api/v1/auth/keys/{key_id}")
|
||||
auth_client.delete(f"/api/v1/auth/keys/{key_id}")
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_use_api_key_for_auth(self, integration_client):
|
||||
def test_use_api_key_for_auth(self, auth_client):
|
||||
"""Test using API key for authentication."""
|
||||
# Login and create API key
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
create_response = integration_client.post(
|
||||
create_response = auth_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": "auth-test-key"},
|
||||
)
|
||||
@@ -206,30 +238,30 @@ class TestAPIKeys:
|
||||
key_id = create_response.json()["id"]
|
||||
|
||||
# Clear cookies and use API key
|
||||
integration_client.cookies.clear()
|
||||
response = integration_client.get(
|
||||
auth_client.cookies.clear()
|
||||
response = auth_client.get(
|
||||
"/api/v1/auth/me",
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["username"] == "admin"
|
||||
assert response.json()["username"] == get_admin_username()
|
||||
|
||||
# Clean up
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
integration_client.delete(f"/api/v1/auth/keys/{key_id}")
|
||||
auth_client.delete(f"/api/v1/auth/keys/{key_id}")
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_delete_api_key(self, integration_client):
|
||||
def test_delete_api_key(self, auth_client):
|
||||
"""Test revoking an API key."""
|
||||
# Login and create API key
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
create_response = integration_client.post(
|
||||
create_response = auth_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": "delete-test-key"},
|
||||
)
|
||||
@@ -237,12 +269,12 @@ class TestAPIKeys:
|
||||
api_key = create_response.json()["key"]
|
||||
|
||||
# Delete the key
|
||||
delete_response = integration_client.delete(f"/api/v1/auth/keys/{key_id}")
|
||||
delete_response = auth_client.delete(f"/api/v1/auth/keys/{key_id}")
|
||||
assert delete_response.status_code == 200
|
||||
|
||||
# Verify key no longer works
|
||||
integration_client.cookies.clear()
|
||||
response = integration_client.get(
|
||||
auth_client.cookies.clear()
|
||||
response = auth_client.get(
|
||||
"/api/v1/auth/me",
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
@@ -253,32 +285,32 @@ class TestAdminUserManagement:
|
||||
"""Tests for admin user management endpoints."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_list_users(self, integration_client):
|
||||
def test_list_users(self, auth_client):
|
||||
"""Test listing users as admin."""
|
||||
# Login as admin
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
|
||||
response = integration_client.get("/api/v1/admin/users")
|
||||
response = auth_client.get("/api/v1/admin/users")
|
||||
assert response.status_code == 200
|
||||
users = response.json()
|
||||
assert len(users) >= 1
|
||||
assert any(u["username"] == "admin" for u in users)
|
||||
assert any(u["username"] == get_admin_username() for u in users)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_create_user(self, integration_client):
|
||||
def test_create_user(self, auth_client):
|
||||
"""Test creating a new user as admin."""
|
||||
# Login as admin
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
|
||||
# Create new user
|
||||
test_username = f"testuser_{uuid4().hex[:8]}"
|
||||
response = integration_client.post(
|
||||
response = auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={
|
||||
"username": test_username,
|
||||
@@ -293,31 +325,31 @@ class TestAdminUserManagement:
|
||||
assert data["is_admin"] is False
|
||||
|
||||
# Verify new user can login
|
||||
integration_client.cookies.clear()
|
||||
login_response = integration_client.post(
|
||||
auth_client.cookies.clear()
|
||||
login_response = auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "testpassword"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_update_user(self, integration_client):
|
||||
def test_update_user(self, auth_client):
|
||||
"""Test updating a user as admin."""
|
||||
# Login as admin
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
|
||||
# Create a test user
|
||||
test_username = f"updateuser_{uuid4().hex[:8]}"
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password"},
|
||||
)
|
||||
|
||||
# Update the user
|
||||
response = integration_client.put(
|
||||
response = auth_client.put(
|
||||
f"/api/v1/admin/users/{test_username}",
|
||||
json={"email": "updated@example.com", "is_admin": True},
|
||||
)
|
||||
@@ -327,59 +359,59 @@ class TestAdminUserManagement:
|
||||
assert data["is_admin"] is True
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_reset_user_password(self, integration_client):
|
||||
def test_reset_user_password(self, auth_client):
|
||||
"""Test resetting a user's password as admin."""
|
||||
# Login as admin
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
|
||||
# Create a test user
|
||||
test_username = f"resetuser_{uuid4().hex[:8]}"
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "oldpassword"},
|
||||
)
|
||||
|
||||
# Reset password
|
||||
response = integration_client.post(
|
||||
response = auth_client.post(
|
||||
f"/api/v1/admin/users/{test_username}/reset-password",
|
||||
json={"new_password": "newpassword"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify new password works
|
||||
integration_client.cookies.clear()
|
||||
login_response = integration_client.post(
|
||||
auth_client.cookies.clear()
|
||||
login_response = auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "newpassword"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_non_admin_cannot_access_admin_endpoints(self, integration_client):
|
||||
def test_non_admin_cannot_access_admin_endpoints(self, auth_client):
|
||||
"""Test that non-admin users cannot access admin endpoints."""
|
||||
# Login as admin and create non-admin user
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
test_username = f"nonadmin_{uuid4().hex[:8]}"
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password", "is_admin": False},
|
||||
)
|
||||
|
||||
# Login as non-admin
|
||||
integration_client.cookies.clear()
|
||||
integration_client.post(
|
||||
auth_client.cookies.clear()
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "password"},
|
||||
)
|
||||
|
||||
# Try to access admin endpoints
|
||||
response = integration_client.get("/api/v1/admin/users")
|
||||
response = auth_client.get("/api/v1/admin/users")
|
||||
assert response.status_code == 403
|
||||
assert "Admin privileges required" in response.json()["detail"]
|
||||
|
||||
@@ -388,28 +420,28 @@ class TestSecurityEdgeCases:
|
||||
"""Tests for security edge cases and validation."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_login_inactive_user(self, integration_client):
|
||||
def test_login_inactive_user(self, auth_client):
|
||||
"""Test that inactive users cannot login."""
|
||||
# Login as admin and create a user
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
test_username = f"inactive_{uuid4().hex[:8]}"
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
# Deactivate the user
|
||||
integration_client.put(
|
||||
auth_client.put(
|
||||
f"/api/v1/admin/users/{test_username}",
|
||||
json={"is_active": False},
|
||||
)
|
||||
|
||||
# Try to login as inactive user
|
||||
integration_client.cookies.clear()
|
||||
response = integration_client.post(
|
||||
auth_client.cookies.clear()
|
||||
response = auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
@@ -417,14 +449,14 @@ class TestSecurityEdgeCases:
|
||||
assert "Invalid username or password" in response.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_password_too_short_on_create(self, integration_client):
|
||||
def test_password_too_short_on_create(self, auth_client):
|
||||
"""Test that short passwords are rejected when creating users."""
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
|
||||
response = integration_client.post(
|
||||
response = auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": f"shortpw_{uuid4().hex[:8]}", "password": "short"},
|
||||
)
|
||||
@@ -432,36 +464,49 @@ class TestSecurityEdgeCases:
|
||||
assert "at least 8 characters" in response.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_password_too_short_on_change(self, integration_client):
|
||||
def test_password_too_short_on_change(self, auth_client):
|
||||
"""Test that short passwords are rejected when changing password."""
|
||||
integration_client.post(
|
||||
# Create test user
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
test_username = f"shortchange_{uuid4().hex[:8]}"
|
||||
auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
response = integration_client.post(
|
||||
# Login as test user
|
||||
auth_client.cookies.clear()
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
response = auth_client.post(
|
||||
"/api/v1/auth/change-password",
|
||||
json={"current_password": "changeme123", "new_password": "short"},
|
||||
json={"current_password": "password123", "new_password": "short"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "at least 8 characters" in response.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_password_too_short_on_reset(self, integration_client):
|
||||
def test_password_too_short_on_reset(self, auth_client):
|
||||
"""Test that short passwords are rejected when resetting password."""
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
|
||||
# Create a test user first
|
||||
test_username = f"resetshort_{uuid4().hex[:8]}"
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
response = integration_client.post(
|
||||
response = auth_client.post(
|
||||
f"/api/v1/admin/users/{test_username}/reset-password",
|
||||
json={"new_password": "short"},
|
||||
)
|
||||
@@ -469,23 +514,23 @@ class TestSecurityEdgeCases:
|
||||
assert "at least 8 characters" in response.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_duplicate_username_rejected(self, integration_client):
|
||||
def test_duplicate_username_rejected(self, auth_client):
|
||||
"""Test that duplicate usernames are rejected."""
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
|
||||
test_username = f"duplicate_{uuid4().hex[:8]}"
|
||||
# Create user first time
|
||||
response1 = integration_client.post(
|
||||
response1 = auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
assert response1.status_code == 200
|
||||
|
||||
# Try to create same username again
|
||||
response2 = integration_client.post(
|
||||
response2 = auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password456"},
|
||||
)
|
||||
@@ -493,14 +538,14 @@ class TestSecurityEdgeCases:
|
||||
assert "already exists" in response2.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_cannot_delete_other_users_api_key(self, integration_client):
|
||||
def test_cannot_delete_other_users_api_key(self, auth_client):
|
||||
"""Test that users cannot delete API keys owned by other users."""
|
||||
# Login as admin and create an API key
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
create_response = integration_client.post(
|
||||
create_response = auth_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": "admin-key"},
|
||||
)
|
||||
@@ -508,253 +553,65 @@ class TestSecurityEdgeCases:
|
||||
|
||||
# Create a non-admin user
|
||||
test_username = f"nonadmin_{uuid4().hex[:8]}"
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
# Login as non-admin
|
||||
integration_client.cookies.clear()
|
||||
integration_client.post(
|
||||
auth_client.cookies.clear()
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
# Try to delete admin's API key
|
||||
response = integration_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
|
||||
response = auth_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
|
||||
assert response.status_code == 403
|
||||
assert "Cannot delete another user's API key" in response.json()["detail"]
|
||||
|
||||
# Cleanup: login as admin and delete the key
|
||||
integration_client.cookies.clear()
|
||||
integration_client.post(
|
||||
auth_client.cookies.clear()
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
integration_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
|
||||
auth_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_sessions_invalidated_on_password_change(self, integration_client):
|
||||
def test_sessions_invalidated_on_password_change(self, auth_client):
|
||||
"""Test that all sessions are invalidated when password is changed."""
|
||||
# Create a test user
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
||||
)
|
||||
test_username = f"sessiontest_{uuid4().hex[:8]}"
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
# Login as test user
|
||||
integration_client.cookies.clear()
|
||||
login_response = integration_client.post(
|
||||
auth_client.cookies.clear()
|
||||
login_response = auth_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
# Verify session works
|
||||
me_response = integration_client.get("/api/v1/auth/me")
|
||||
me_response = auth_client.get("/api/v1/auth/me")
|
||||
assert me_response.status_code == 200
|
||||
|
||||
# Change password
|
||||
integration_client.post(
|
||||
auth_client.post(
|
||||
"/api/v1/auth/change-password",
|
||||
json={"current_password": "password123", "new_password": "newpassword123"},
|
||||
)
|
||||
|
||||
# Old session should be invalidated - try to access /me
|
||||
# (note: the change-password call itself may have cleared the session cookie)
|
||||
me_response2 = integration_client.get("/api/v1/auth/me")
|
||||
# This should fail because all sessions were invalidated
|
||||
assert me_response2.status_code == 401
|
||||
|
||||
|
||||
class TestSecurityEdgeCases:
|
||||
"""Tests for security edge cases and validation."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_login_inactive_user(self, integration_client):
|
||||
"""Test that inactive users cannot login."""
|
||||
# Login as admin and create a user
|
||||
integration_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
)
|
||||
test_username = f"inactive_{uuid4().hex[:8]}"
|
||||
integration_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
# Deactivate the user
|
||||
integration_client.put(
|
||||
f"/api/v1/admin/users/{test_username}",
|
||||
json={"is_active": False},
|
||||
)
|
||||
|
||||
# Try to login as inactive user
|
||||
integration_client.cookies.clear()
|
||||
response = integration_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
assert response.status_code == 401
|
||||
assert "Invalid username or password" in response.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_password_too_short_on_create(self, integration_client):
|
||||
"""Test that short passwords are rejected when creating users."""
|
||||
integration_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
)
|
||||
|
||||
response = integration_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": f"shortpw_{uuid4().hex[:8]}", "password": "short"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "at least 8 characters" in response.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_password_too_short_on_change(self, integration_client):
|
||||
"""Test that short passwords are rejected when changing password."""
|
||||
integration_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
)
|
||||
|
||||
response = integration_client.post(
|
||||
"/api/v1/auth/change-password",
|
||||
json={"current_password": "changeme123", "new_password": "short"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "at least 8 characters" in response.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_password_too_short_on_reset(self, integration_client):
|
||||
"""Test that short passwords are rejected when resetting password."""
|
||||
integration_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
)
|
||||
|
||||
# Create a test user first
|
||||
test_username = f"resetshort_{uuid4().hex[:8]}"
|
||||
integration_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
response = integration_client.post(
|
||||
f"/api/v1/admin/users/{test_username}/reset-password",
|
||||
json={"new_password": "short"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "at least 8 characters" in response.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_duplicate_username_rejected(self, integration_client):
|
||||
"""Test that duplicate usernames are rejected."""
|
||||
integration_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
)
|
||||
|
||||
test_username = f"duplicate_{uuid4().hex[:8]}"
|
||||
# Create user first time
|
||||
response1 = integration_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
assert response1.status_code == 200
|
||||
|
||||
# Try to create same username again
|
||||
response2 = integration_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password456"},
|
||||
)
|
||||
assert response2.status_code == 409
|
||||
assert "already exists" in response2.json()["detail"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_cannot_delete_other_users_api_key(self, integration_client):
|
||||
"""Test that users cannot delete API keys owned by other users."""
|
||||
# Login as admin and create an API key
|
||||
integration_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
)
|
||||
create_response = integration_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": "admin-key"},
|
||||
)
|
||||
admin_key_id = create_response.json()["id"]
|
||||
|
||||
# Create a non-admin user
|
||||
test_username = f"nonadmin_{uuid4().hex[:8]}"
|
||||
integration_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
# Login as non-admin
|
||||
integration_client.cookies.clear()
|
||||
integration_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
# Try to delete admin's API key
|
||||
response = integration_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
|
||||
assert response.status_code == 403
|
||||
assert "Cannot delete another user's API key" in response.json()["detail"]
|
||||
|
||||
# Cleanup: login as admin and delete the key
|
||||
integration_client.cookies.clear()
|
||||
integration_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
)
|
||||
integration_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_sessions_invalidated_on_password_change(self, integration_client):
|
||||
"""Test that all sessions are invalidated when password is changed."""
|
||||
# Create a test user
|
||||
integration_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": "admin", "password": "changeme123"},
|
||||
)
|
||||
test_username = f"sessiontest_{uuid4().hex[:8]}"
|
||||
integration_client.post(
|
||||
"/api/v1/admin/users",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
|
||||
# Login as test user
|
||||
integration_client.cookies.clear()
|
||||
login_response = integration_client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"username": test_username, "password": "password123"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
# Verify session works
|
||||
me_response = integration_client.get("/api/v1/auth/me")
|
||||
assert me_response.status_code == 200
|
||||
|
||||
# Change password
|
||||
integration_client.post(
|
||||
"/api/v1/auth/change-password",
|
||||
json={"current_password": "password123", "new_password": "newpassword123"},
|
||||
)
|
||||
|
||||
# Old session should be invalidated - try to access /me
|
||||
# (note: the change-password call itself may have cleared the session cookie)
|
||||
me_response2 = integration_client.get("/api/v1/auth/me")
|
||||
me_response2 = auth_client.get("/api/v1/auth/me")
|
||||
# This should fail because all sessions were invalidated
|
||||
assert me_response2.status_code == 401
|
||||
|
||||
737
backend/tests/integration/test_concurrent_operations.py
Normal file
737
backend/tests/integration/test_concurrent_operations.py
Normal file
@@ -0,0 +1,737 @@
|
||||
"""
|
||||
Integration tests for concurrent upload and download operations.
|
||||
|
||||
Tests cover:
|
||||
- Concurrent uploads of different files
|
||||
- Concurrent uploads of same file (deduplication race)
|
||||
- Concurrent downloads of same artifact
|
||||
- Concurrent downloads of different artifacts
|
||||
- Mixed concurrent uploads and downloads
|
||||
- Data corruption prevention under concurrency
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import io
|
||||
import os
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from tests.factories import (
|
||||
compute_sha256,
|
||||
upload_test_file,
|
||||
generate_content_with_hash,
|
||||
)
|
||||
|
||||
|
||||
def get_api_key(integration_client):
|
||||
"""Create an API key for concurrent test workers."""
|
||||
import uuid
|
||||
response = integration_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": f"concurrent-test-{uuid.uuid4().hex[:8]}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.json()["key"]
|
||||
return None
|
||||
|
||||
|
||||
class TestConcurrentUploads:
|
||||
"""Tests for concurrent upload operations."""
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_2_concurrent_uploads_different_files(self, integration_client, test_package):
|
||||
"""Test 2 concurrent uploads of different files."""
|
||||
project, package = test_package
|
||||
api_key = get_api_key(integration_client)
|
||||
assert api_key, "Failed to create API key"
|
||||
|
||||
files_data = [
|
||||
generate_content_with_hash(1024, seed=i) for i in range(2)
|
||||
]
|
||||
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def upload_worker(idx, content, expected_hash):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
files = {
|
||||
"file": (f"file-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||
}
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"concurrent-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
results.append((idx, result, expected_hash))
|
||||
else:
|
||||
errors.append(f"Worker {idx}: Status {response.status_code}: {response.text}")
|
||||
except Exception as e:
|
||||
errors.append(f"Worker {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=2) as executor:
|
||||
futures = [
|
||||
executor.submit(upload_worker, i, content, hash)
|
||||
for i, (content, hash) in enumerate(files_data)
|
||||
]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results) == 2
|
||||
|
||||
# Verify each upload returned correct artifact_id
|
||||
for idx, result, expected_hash in results:
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_5_concurrent_uploads_different_files(self, integration_client, test_package):
|
||||
"""Test 5 concurrent uploads of different files."""
|
||||
project, package = test_package
|
||||
api_key = get_api_key(integration_client)
|
||||
assert api_key, "Failed to create API key"
|
||||
|
||||
num_files = 5
|
||||
files_data = [
|
||||
generate_content_with_hash(2048, seed=100 + i) for i in range(num_files)
|
||||
]
|
||||
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def upload_worker(idx, content, expected_hash):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
files = {
|
||||
"file": (f"file-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||
}
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"concurrent5-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
results.append((idx, result, expected_hash))
|
||||
else:
|
||||
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Worker {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=num_files) as executor:
|
||||
futures = [
|
||||
executor.submit(upload_worker, i, content, hash)
|
||||
for i, (content, hash) in enumerate(files_data)
|
||||
]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results) == num_files
|
||||
|
||||
# Verify all uploads have unique artifact_ids
|
||||
artifact_ids = set(r[1]["artifact_id"] for r in results)
|
||||
assert len(artifact_ids) == num_files
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_10_concurrent_uploads_different_files(self, integration_client, test_package):
|
||||
"""Test 10 concurrent uploads of different files."""
|
||||
project, package = test_package
|
||||
api_key = get_api_key(integration_client)
|
||||
assert api_key, "Failed to create API key"
|
||||
|
||||
num_files = 10
|
||||
files_data = [
|
||||
generate_content_with_hash(1024, seed=200 + i) for i in range(num_files)
|
||||
]
|
||||
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def upload_worker(idx, content, expected_hash):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
files = {
|
||||
"file": (f"file-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||
}
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"concurrent10-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
results.append((idx, result, expected_hash))
|
||||
else:
|
||||
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Worker {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=num_files) as executor:
|
||||
futures = [
|
||||
executor.submit(upload_worker, i, content, hash)
|
||||
for i, (content, hash) in enumerate(files_data)
|
||||
]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results) == num_files
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_concurrent_uploads_same_file_deduplication(self, integration_client, test_package):
|
||||
"""Test concurrent uploads of same file handle deduplication correctly."""
|
||||
project, package = test_package
|
||||
api_key = get_api_key(integration_client)
|
||||
assert api_key, "Failed to create API key"
|
||||
|
||||
content, expected_hash = generate_content_with_hash(4096, seed=999)
|
||||
num_concurrent = 5
|
||||
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def upload_worker(idx):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
files = {
|
||||
"file": (f"same-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||
}
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"dedup-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
results.append(response.json())
|
||||
else:
|
||||
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Worker {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=num_concurrent) as executor:
|
||||
futures = [executor.submit(upload_worker, i) for i in range(num_concurrent)]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results) == num_concurrent
|
||||
|
||||
# All should have same artifact_id
|
||||
artifact_ids = set(r["artifact_id"] for r in results)
|
||||
assert len(artifact_ids) == 1
|
||||
assert expected_hash in artifact_ids
|
||||
|
||||
# Verify final ref_count equals number of uploads
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["ref_count"] == num_concurrent
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_concurrent_uploads_to_different_packages(self, integration_client, test_project, unique_test_id):
|
||||
"""Test concurrent uploads to different packages."""
|
||||
project = test_project
|
||||
api_key = get_api_key(integration_client)
|
||||
assert api_key, "Failed to create API key"
|
||||
|
||||
num_packages = 3
|
||||
package_names = []
|
||||
|
||||
# Create multiple packages
|
||||
for i in range(num_packages):
|
||||
pkg_name = f"pkg-{unique_test_id}-{i}"
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/packages",
|
||||
json={"name": pkg_name, "description": f"Package {i}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
package_names.append(pkg_name)
|
||||
|
||||
files_data = [
|
||||
generate_content_with_hash(1024, seed=300 + i) for i in range(num_packages)
|
||||
]
|
||||
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def upload_worker(idx, package, content, expected_hash):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
files = {
|
||||
"file": (f"file-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||
}
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": "latest"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
results.append((package, result, expected_hash))
|
||||
else:
|
||||
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Worker {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=num_packages) as executor:
|
||||
futures = [
|
||||
executor.submit(upload_worker, i, package_names[i], content, hash)
|
||||
for i, (content, hash) in enumerate(files_data)
|
||||
]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results) == num_packages
|
||||
|
||||
|
||||
class TestConcurrentDownloads:
|
||||
"""Tests for concurrent download operations."""
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_2_concurrent_downloads_same_artifact(self, integration_client, test_package):
|
||||
"""Test 2 concurrent downloads of same artifact."""
|
||||
project, package = test_package
|
||||
content, expected_hash = generate_content_with_hash(2048, seed=400)
|
||||
|
||||
# Upload first
|
||||
upload_test_file(integration_client, project, package, content, tag="download-test")
|
||||
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def download_worker(idx):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
response = client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/download-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
results.append((idx, response.content))
|
||||
else:
|
||||
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Worker {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=2) as executor:
|
||||
futures = [executor.submit(download_worker, i) for i in range(2)]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results) == 2
|
||||
|
||||
# All downloads should match original
|
||||
for idx, downloaded in results:
|
||||
assert downloaded == content
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_5_concurrent_downloads_same_artifact(self, integration_client, test_package):
|
||||
"""Test 5 concurrent downloads of same artifact."""
|
||||
project, package = test_package
|
||||
content, expected_hash = generate_content_with_hash(4096, seed=500)
|
||||
|
||||
upload_test_file(integration_client, project, package, content, tag="download5-test")
|
||||
|
||||
num_downloads = 5
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def download_worker(idx):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
response = client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/download5-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
results.append((idx, response.content))
|
||||
else:
|
||||
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Worker {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=num_downloads) as executor:
|
||||
futures = [executor.submit(download_worker, i) for i in range(num_downloads)]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results) == num_downloads
|
||||
|
||||
for idx, downloaded in results:
|
||||
assert downloaded == content
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_10_concurrent_downloads_same_artifact(self, integration_client, test_package):
|
||||
"""Test 10 concurrent downloads of same artifact."""
|
||||
project, package = test_package
|
||||
content, expected_hash = generate_content_with_hash(8192, seed=600)
|
||||
|
||||
upload_test_file(integration_client, project, package, content, tag="download10-test")
|
||||
|
||||
num_downloads = 10
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def download_worker(idx):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
response = client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/download10-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
results.append((idx, response.content))
|
||||
else:
|
||||
errors.append(f"Worker {idx}: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Worker {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=num_downloads) as executor:
|
||||
futures = [executor.submit(download_worker, i) for i in range(num_downloads)]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results) == num_downloads
|
||||
|
||||
for idx, downloaded in results:
|
||||
assert downloaded == content
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_concurrent_downloads_different_artifacts(self, integration_client, test_package):
|
||||
"""Test concurrent downloads of different artifacts."""
|
||||
project, package = test_package
|
||||
|
||||
# Upload multiple files
|
||||
num_files = 5
|
||||
uploads = []
|
||||
for i in range(num_files):
|
||||
content, expected_hash = generate_content_with_hash(1024, seed=700 + i)
|
||||
upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
tag=f"multi-download-{i}"
|
||||
)
|
||||
uploads.append((f"multi-download-{i}", content))
|
||||
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def download_worker(tag, expected_content):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
response = client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/{tag}",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
results.append((tag, response.content, expected_content))
|
||||
else:
|
||||
errors.append(f"Tag {tag}: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Tag {tag}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=num_files) as executor:
|
||||
futures = [
|
||||
executor.submit(download_worker, tag, content)
|
||||
for tag, content in uploads
|
||||
]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results) == num_files
|
||||
|
||||
for tag, downloaded, expected in results:
|
||||
assert downloaded == expected, f"Content mismatch for {tag}"
|
||||
|
||||
|
||||
class TestMixedConcurrentOperations:
|
||||
"""Tests for mixed concurrent upload and download operations."""
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_upload_while_download_in_progress(self, integration_client, test_package):
|
||||
"""Test uploading while a download is in progress."""
|
||||
project, package = test_package
|
||||
api_key = get_api_key(integration_client)
|
||||
assert api_key, "Failed to create API key"
|
||||
|
||||
# Upload initial content
|
||||
content1, hash1 = generate_content_with_hash(10240, seed=800) # 10KB
|
||||
upload_test_file(integration_client, project, package, content1, tag="initial")
|
||||
|
||||
# New content for upload during download
|
||||
content2, hash2 = generate_content_with_hash(10240, seed=801)
|
||||
|
||||
results = {"downloads": [], "uploads": []}
|
||||
errors = []
|
||||
|
||||
def download_worker():
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
response = client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/initial",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
results["downloads"].append(response.content)
|
||||
else:
|
||||
errors.append(f"Download: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Download: {str(e)}")
|
||||
|
||||
def upload_worker():
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
files = {
|
||||
"file": ("new.bin", io.BytesIO(content2), "application/octet-stream")
|
||||
}
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": "during-download"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
results["uploads"].append(response.json())
|
||||
else:
|
||||
errors.append(f"Upload: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Upload: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=2) as executor:
|
||||
futures = [
|
||||
executor.submit(download_worker),
|
||||
executor.submit(upload_worker),
|
||||
]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results["downloads"]) == 1
|
||||
assert len(results["uploads"]) == 1
|
||||
|
||||
# Verify download got correct content
|
||||
assert results["downloads"][0] == content1
|
||||
|
||||
# Verify upload succeeded
|
||||
assert results["uploads"][0]["artifact_id"] == hash2
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_multiple_uploads_and_downloads_simultaneously(self, integration_client, test_package):
|
||||
"""Test multiple uploads and downloads running simultaneously."""
|
||||
project, package = test_package
|
||||
api_key = get_api_key(integration_client)
|
||||
assert api_key, "Failed to create API key"
|
||||
|
||||
# Pre-upload some files for downloading
|
||||
existing_files = []
|
||||
for i in range(3):
|
||||
content, hash = generate_content_with_hash(2048, seed=900 + i)
|
||||
upload_test_file(integration_client, project, package, content, tag=f"existing-{i}")
|
||||
existing_files.append((f"existing-{i}", content))
|
||||
|
||||
# New files for uploading
|
||||
new_files = [
|
||||
generate_content_with_hash(2048, seed=910 + i) for i in range(3)
|
||||
]
|
||||
|
||||
results = {"downloads": [], "uploads": []}
|
||||
errors = []
|
||||
|
||||
def download_worker(tag, expected):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
response = client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/{tag}",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
results["downloads"].append((tag, response.content, expected))
|
||||
else:
|
||||
errors.append(f"Download {tag}: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Download {tag}: {str(e)}")
|
||||
|
||||
def upload_worker(idx, content, expected_hash):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
files = {
|
||||
"file": (f"new-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||
}
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"new-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
results["uploads"].append((idx, response.json(), expected_hash))
|
||||
else:
|
||||
errors.append(f"Upload {idx}: Status {response.status_code}")
|
||||
except Exception as e:
|
||||
errors.append(f"Upload {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=6) as executor:
|
||||
futures = []
|
||||
|
||||
# Submit downloads
|
||||
for tag, content in existing_files:
|
||||
futures.append(executor.submit(download_worker, tag, content))
|
||||
|
||||
# Submit uploads
|
||||
for i, (content, hash) in enumerate(new_files):
|
||||
futures.append(executor.submit(upload_worker, i, content, hash))
|
||||
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results["downloads"]) == 3
|
||||
assert len(results["uploads"]) == 3
|
||||
|
||||
# Verify downloads
|
||||
for tag, downloaded, expected in results["downloads"]:
|
||||
assert downloaded == expected, f"Download mismatch for {tag}"
|
||||
|
||||
# Verify uploads
|
||||
for idx, result, expected_hash in results["uploads"]:
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_no_data_corruption_under_concurrency(self, integration_client, test_package):
|
||||
"""Test that no data corruption occurs under concurrent operations."""
|
||||
project, package = test_package
|
||||
api_key = get_api_key(integration_client)
|
||||
assert api_key, "Failed to create API key"
|
||||
|
||||
# Create content with recognizable patterns
|
||||
num_files = 5
|
||||
files_data = []
|
||||
for i in range(num_files):
|
||||
# Each file has unique repeating pattern for easy corruption detection
|
||||
pattern = bytes([i] * 256)
|
||||
content = pattern * 40 # 10KB each
|
||||
hash = compute_sha256(content)
|
||||
files_data.append((content, hash))
|
||||
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def upload_and_verify(idx, content, expected_hash):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
|
||||
with Client(base_url=base_url, timeout=60.0) as client:
|
||||
# Upload
|
||||
files = {
|
||||
"file": (f"pattern-{idx}.bin", io.BytesIO(content), "application/octet-stream")
|
||||
}
|
||||
upload_resp = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"pattern-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if upload_resp.status_code != 200:
|
||||
errors.append(f"Upload {idx}: Status {upload_resp.status_code}")
|
||||
return
|
||||
|
||||
upload_result = upload_resp.json()
|
||||
if upload_result["artifact_id"] != expected_hash:
|
||||
errors.append(f"Upload {idx}: Hash mismatch")
|
||||
return
|
||||
|
||||
# Immediately download and verify
|
||||
download_resp = client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/pattern-{idx}",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
if download_resp.status_code != 200:
|
||||
errors.append(f"Download {idx}: Status {download_resp.status_code}")
|
||||
return
|
||||
|
||||
if download_resp.content != content:
|
||||
errors.append(f"Worker {idx}: DATA CORRUPTION DETECTED")
|
||||
return
|
||||
|
||||
# Verify the downloaded content hash
|
||||
downloaded_hash = compute_sha256(download_resp.content)
|
||||
if downloaded_hash != expected_hash:
|
||||
errors.append(f"Worker {idx}: Hash verification failed")
|
||||
return
|
||||
|
||||
results.append(idx)
|
||||
|
||||
except Exception as e:
|
||||
errors.append(f"Worker {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=num_files) as executor:
|
||||
futures = [
|
||||
executor.submit(upload_and_verify, i, content, hash)
|
||||
for i, (content, hash) in enumerate(files_data)
|
||||
]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Errors: {errors}"
|
||||
assert len(results) == num_files
|
||||
322
backend/tests/integration/test_error_handling.py
Normal file
322
backend/tests/integration/test_error_handling.py
Normal file
@@ -0,0 +1,322 @@
|
||||
"""
|
||||
Integration tests for error handling in upload and download operations.
|
||||
|
||||
Tests cover:
|
||||
- Timeout handling
|
||||
- Invalid request handling
|
||||
- Resource cleanup on failures
|
||||
- Graceful error responses
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import io
|
||||
import time
|
||||
from tests.factories import (
|
||||
compute_sha256,
|
||||
upload_test_file,
|
||||
generate_content_with_hash,
|
||||
)
|
||||
|
||||
|
||||
class TestUploadErrorHandling:
|
||||
"""Tests for upload error handling."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_to_nonexistent_project_returns_404(
|
||||
self, integration_client, unique_test_id
|
||||
):
|
||||
"""Test upload to nonexistent project returns 404."""
|
||||
content = b"test content for nonexistent project"
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload",
|
||||
files=files,
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_to_nonexistent_package_returns_404(
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test upload to nonexistent package returns 404."""
|
||||
content = b"test content for nonexistent package"
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
||||
files=files,
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_empty_file_rejected(self, integration_client, test_package):
|
||||
"""Test empty file upload is rejected."""
|
||||
project, package = test_package
|
||||
|
||||
files = {"file": ("empty.bin", io.BytesIO(b""), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
)
|
||||
assert response.status_code in [400, 422]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_missing_file_returns_422(self, integration_client, test_package):
|
||||
"""Test upload without file field returns 422."""
|
||||
project, package = test_package
|
||||
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
data={"tag": "no-file-provided"},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_invalid_checksum_format_returns_400(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test upload with invalid checksum format returns 400."""
|
||||
project, package = test_package
|
||||
content = b"checksum format test"
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
headers={"X-Checksum-SHA256": "invalid-hash-format"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_checksum_mismatch_returns_422(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test upload with mismatched checksum returns 422."""
|
||||
project, package = test_package
|
||||
content = b"checksum mismatch test"
|
||||
wrong_hash = "0" * 64 # Valid format but wrong hash
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
headers={"X-Checksum-SHA256": wrong_hash},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_with_correct_checksum_succeeds(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test upload with correct checksum succeeds."""
|
||||
project, package = test_package
|
||||
content = b"correct checksum test"
|
||||
correct_hash = compute_sha256(content)
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
headers={"X-Checksum-SHA256": correct_hash},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["artifact_id"] == correct_hash
|
||||
|
||||
|
||||
class TestDownloadErrorHandling:
|
||||
"""Tests for download error handling."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_nonexistent_tag_returns_404(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test download of nonexistent tag returns 404."""
|
||||
project, package = test_package
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/nonexistent-tag-xyz"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_nonexistent_artifact_returns_404(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test download of nonexistent artifact ID returns 404."""
|
||||
project, package = test_package
|
||||
fake_hash = "a" * 64
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/artifact:{fake_hash}"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_invalid_artifact_id_format(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test download with invalid artifact ID format."""
|
||||
project, package = test_package
|
||||
|
||||
# Too short
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/artifact:abc123"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_from_nonexistent_project_returns_404(
|
||||
self, integration_client, unique_test_id
|
||||
):
|
||||
"""Test download from nonexistent project returns 404."""
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/nonexistent-{unique_test_id}/pkg/+/tag"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_from_nonexistent_package_returns_404(
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test download from nonexistent package returns 404."""
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{test_project}/nonexistent-{unique_test_id}/+/tag"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
class TestTimeoutBehavior:
|
||||
"""Tests for timeout behavior (integration level)."""
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
def test_large_upload_completes_within_reasonable_time(
|
||||
self, integration_client, test_package, sized_content
|
||||
):
|
||||
"""Test that a 10MB upload completes within reasonable time."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(10 * 1024 * 1024, seed=999) # 10MB
|
||||
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="timeout-test"
|
||||
)
|
||||
elapsed = time.time() - start_time
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
# Should complete within 60 seconds for 10MB on local docker
|
||||
assert elapsed < 60, f"Upload took too long: {elapsed:.2f}s"
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
def test_large_download_completes_within_reasonable_time(
|
||||
self, integration_client, test_package, sized_content
|
||||
):
|
||||
"""Test that a 10MB download completes within reasonable time."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(10 * 1024 * 1024, seed=998) # 10MB
|
||||
|
||||
# First upload
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="download-timeout-test"
|
||||
)
|
||||
|
||||
# Then download and time it
|
||||
start_time = time.time()
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/download-timeout-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
elapsed = time.time() - start_time
|
||||
|
||||
assert response.status_code == 200
|
||||
assert len(response.content) == len(content)
|
||||
# Should complete within 60 seconds for 10MB on local docker
|
||||
assert elapsed < 60, f"Download took too long: {elapsed:.2f}s"
|
||||
|
||||
|
||||
class TestResourceCleanup:
|
||||
"""Tests for proper resource cleanup on failures.
|
||||
|
||||
Note: More comprehensive cleanup tests are in test_upload_download_api.py
|
||||
(TestUploadFailureCleanup class) including S3 object cleanup verification.
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_checksum_mismatch_no_orphaned_artifact(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test checksum mismatch doesn't leave orphaned artifact."""
|
||||
project, package = test_package
|
||||
# Use unique content to ensure artifact doesn't exist from prior tests
|
||||
content = f"checksum mismatch orphan test {unique_test_id}".encode()
|
||||
wrong_hash = "0" * 64
|
||||
actual_hash = compute_sha256(content)
|
||||
|
||||
# Verify artifact doesn't exist before test
|
||||
pre_check = integration_client.get(f"/api/v1/artifact/{actual_hash}")
|
||||
assert pre_check.status_code == 404, "Artifact should not exist before test"
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
headers={"X-Checksum-SHA256": wrong_hash},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
|
||||
# Verify no artifact was created with either hash
|
||||
response1 = integration_client.get(f"/api/v1/artifact/{wrong_hash}")
|
||||
response2 = integration_client.get(f"/api/v1/artifact/{actual_hash}")
|
||||
assert response1.status_code == 404
|
||||
assert response2.status_code == 404
|
||||
|
||||
|
||||
class TestGracefulErrorResponses:
|
||||
"""Tests for graceful and informative error responses."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_404_response_has_detail_message(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test 404 responses include a detail message."""
|
||||
project, package = test_package
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/nonexistent-tag"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
data = response.json()
|
||||
assert "detail" in data
|
||||
assert len(data["detail"]) > 0
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_422_response_has_detail_message(self, integration_client, test_package):
|
||||
"""Test 422 responses include a detail message."""
|
||||
project, package = test_package
|
||||
|
||||
# Upload with mismatched checksum
|
||||
content = b"detail message test"
|
||||
wrong_hash = "0" * 64
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
headers={"X-Checksum-SHA256": wrong_hash},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
data = response.json()
|
||||
assert "detail" in data
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_error_response_is_json(self, integration_client, unique_test_id):
|
||||
"""Test error responses are valid JSON."""
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/nonexistent-{unique_test_id}/pkg/+/tag"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
# Should not raise exception - valid JSON
|
||||
data = response.json()
|
||||
assert isinstance(data, dict)
|
||||
768
backend/tests/integration/test_integrity_verification.py
Normal file
768
backend/tests/integration/test_integrity_verification.py
Normal file
@@ -0,0 +1,768 @@
|
||||
"""
|
||||
Integration tests for artifact integrity verification.
|
||||
|
||||
Tests cover:
|
||||
- Round-trip verification (upload -> download -> verify hash)
|
||||
- Consistency check endpoint
|
||||
- Header-based verification
|
||||
- Integrity verification across file sizes
|
||||
- Client-side verification workflow
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import io
|
||||
import hashlib
|
||||
from tests.factories import (
|
||||
compute_sha256,
|
||||
upload_test_file,
|
||||
generate_content_with_hash,
|
||||
s3_object_exists,
|
||||
get_s3_client,
|
||||
get_s3_bucket,
|
||||
)
|
||||
from tests.conftest import (
|
||||
SIZE_1KB,
|
||||
SIZE_10KB,
|
||||
SIZE_100KB,
|
||||
SIZE_1MB,
|
||||
SIZE_10MB,
|
||||
)
|
||||
|
||||
|
||||
class TestRoundTripVerification:
|
||||
"""Tests for complete round-trip integrity verification."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_download_hash_matches(self, integration_client, test_package):
|
||||
"""Test that upload -> download round trip preserves content integrity."""
|
||||
project, package = test_package
|
||||
content = b"Round trip integrity test content"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload and capture returned hash
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="roundtrip"
|
||||
)
|
||||
uploaded_hash = result["artifact_id"]
|
||||
|
||||
# Verify upload returned correct hash
|
||||
assert uploaded_hash == expected_hash
|
||||
|
||||
# Download artifact
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/roundtrip",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Compute hash of downloaded content
|
||||
downloaded_hash = compute_sha256(response.content)
|
||||
|
||||
# All three hashes should match
|
||||
assert downloaded_hash == expected_hash
|
||||
assert downloaded_hash == uploaded_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_response_contains_hash(self, integration_client, test_package):
|
||||
"""Test upload response contains artifact_id which is the SHA256 hash."""
|
||||
project, package = test_package
|
||||
content = b"Upload response hash test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(integration_client, project, package, content)
|
||||
|
||||
assert "artifact_id" in result
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert len(result["artifact_id"]) == 64
|
||||
assert all(c in "0123456789abcdef" for c in result["artifact_id"])
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_header_matches_artifact_id(self, integration_client, test_package):
|
||||
"""Test X-Checksum-SHA256 header matches artifact ID."""
|
||||
project, package = test_package
|
||||
content = b"Header verification test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="header-check"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/header-check",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_etag_matches_artifact_id(self, integration_client, test_package):
|
||||
"""Test ETag header matches artifact ID."""
|
||||
project, package = test_package
|
||||
content = b"ETag verification test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="etag-check"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/etag-check",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
etag = response.headers.get("ETag", "").strip('"')
|
||||
assert etag == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_artifact_endpoint_returns_correct_hash(self, integration_client, test_package):
|
||||
"""Test artifact endpoint returns correct hash/ID."""
|
||||
project, package = test_package
|
||||
content = b"Artifact endpoint hash test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(integration_client, project, package, content)
|
||||
|
||||
# Query artifact directly
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["id"] == expected_hash
|
||||
assert data.get("sha256") == expected_hash
|
||||
|
||||
|
||||
class TestClientSideVerificationWorkflow:
|
||||
"""Tests for client-side verification workflow."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_client_can_verify_before_upload(self, integration_client, test_package):
|
||||
"""Test client can compute hash before upload and verify response matches."""
|
||||
project, package = test_package
|
||||
content = b"Client pre-upload verification test"
|
||||
|
||||
# Client computes hash locally before upload
|
||||
client_hash = compute_sha256(content)
|
||||
|
||||
# Upload
|
||||
result = upload_test_file(integration_client, project, package, content)
|
||||
|
||||
# Client verifies server returned the same hash
|
||||
assert result["artifact_id"] == client_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_client_can_provide_checksum_header(self, integration_client, test_package):
|
||||
"""Test client can provide X-Checksum-SHA256 header for verification."""
|
||||
project, package = test_package
|
||||
content = b"Client checksum header test"
|
||||
client_hash = compute_sha256(content)
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
headers={"X-Checksum-SHA256": client_hash},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["artifact_id"] == client_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_checksum_mismatch_rejected(self, integration_client, test_package):
|
||||
"""Test upload with wrong client checksum is rejected."""
|
||||
project, package = test_package
|
||||
content = b"Checksum mismatch test"
|
||||
wrong_hash = "0" * 64
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
headers={"X-Checksum-SHA256": wrong_hash},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_client_can_verify_after_download(self, integration_client, test_package):
|
||||
"""Test client can verify downloaded content matches header hash."""
|
||||
project, package = test_package
|
||||
content = b"Client post-download verification"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="verify-after"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/verify-after",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Client gets hash from header
|
||||
header_hash = response.headers.get("X-Checksum-SHA256")
|
||||
|
||||
# Client computes hash of downloaded content
|
||||
downloaded_hash = compute_sha256(response.content)
|
||||
|
||||
# Client verifies they match
|
||||
assert downloaded_hash == header_hash
|
||||
|
||||
|
||||
class TestIntegritySizeVariants:
|
||||
"""Tests for integrity verification across different file sizes."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_integrity_1kb(self, integration_client, test_package, sized_content):
|
||||
"""Test integrity verification for 1KB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_1KB, seed=100)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="int-1kb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/int-1kb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_integrity_100kb(self, integration_client, test_package, sized_content):
|
||||
"""Test integrity verification for 100KB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_100KB, seed=101)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="int-100kb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/int-100kb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_integrity_1mb(self, integration_client, test_package, sized_content):
|
||||
"""Test integrity verification for 1MB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_1MB, seed=102)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="int-1mb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/int-1mb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
def test_integrity_10mb(self, integration_client, test_package, sized_content):
|
||||
"""Test integrity verification for 10MB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_10MB, seed=103)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="int-10mb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/int-10mb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||
|
||||
|
||||
class TestConsistencyCheck:
|
||||
"""Tests for the admin consistency check endpoint."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_consistency_check_returns_200(self, integration_client):
|
||||
"""Test consistency check endpoint returns 200."""
|
||||
response = integration_client.get("/api/v1/admin/consistency-check")
|
||||
assert response.status_code == 200
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_consistency_check_response_format(self, integration_client):
|
||||
"""Test consistency check returns expected response format."""
|
||||
response = integration_client.get("/api/v1/admin/consistency-check")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Check expected fields
|
||||
assert "total_artifacts_checked" in data
|
||||
assert "orphaned_s3_objects" in data
|
||||
assert "missing_s3_objects" in data
|
||||
assert "size_mismatches" in data
|
||||
assert "healthy" in data
|
||||
assert "orphaned_s3_keys" in data
|
||||
assert "missing_s3_keys" in data
|
||||
assert "size_mismatch_artifacts" in data
|
||||
# Verify types
|
||||
assert isinstance(data["total_artifacts_checked"], int)
|
||||
assert isinstance(data["orphaned_s3_objects"], int)
|
||||
assert isinstance(data["missing_s3_objects"], int)
|
||||
assert isinstance(data["size_mismatches"], int)
|
||||
assert isinstance(data["healthy"], bool)
|
||||
assert isinstance(data["orphaned_s3_keys"], list)
|
||||
assert isinstance(data["missing_s3_keys"], list)
|
||||
assert isinstance(data["size_mismatch_artifacts"], list)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_consistency_check_after_upload(self, integration_client, test_package):
|
||||
"""Test consistency check passes after valid upload."""
|
||||
project, package = test_package
|
||||
content = b"Consistency check test content"
|
||||
|
||||
# Upload artifact
|
||||
upload_test_file(integration_client, project, package, content)
|
||||
|
||||
# Run consistency check
|
||||
response = integration_client.get("/api/v1/admin/consistency-check")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Verify check ran and no issues
|
||||
assert data["total_artifacts_checked"] >= 1
|
||||
assert data["healthy"] is True
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_consistency_check_limit_parameter(self, integration_client):
|
||||
"""Test consistency check respects limit parameter."""
|
||||
response = integration_client.get(
|
||||
"/api/v1/admin/consistency-check",
|
||||
params={"limit": 10}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Lists should not exceed limit
|
||||
assert len(data["orphaned_s3_keys"]) <= 10
|
||||
assert len(data["missing_s3_keys"]) <= 10
|
||||
assert len(data["size_mismatch_artifacts"]) <= 10
|
||||
|
||||
|
||||
class TestDigestHeader:
|
||||
"""Tests for RFC 3230 Digest header."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_includes_digest_header(self, integration_client, test_package):
|
||||
"""Test download includes Digest header in RFC 3230 format."""
|
||||
project, package = test_package
|
||||
content = b"Digest header test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="digest-test"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/digest-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert "Digest" in response.headers
|
||||
|
||||
# Verify Digest format (sha-256=base64hash)
|
||||
digest = response.headers["Digest"]
|
||||
assert digest.startswith("sha-256=")
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_digest_header_base64_valid(self, integration_client, test_package):
|
||||
"""Test Digest header contains valid base64 encoding."""
|
||||
import base64
|
||||
|
||||
project, package = test_package
|
||||
content = b"Digest base64 test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="digest-b64"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/digest-b64",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
digest = response.headers["Digest"]
|
||||
base64_part = digest.split("=", 1)[1]
|
||||
|
||||
# Should be valid base64
|
||||
try:
|
||||
decoded = base64.b64decode(base64_part)
|
||||
assert len(decoded) == 32 # SHA256 is 32 bytes
|
||||
except Exception as e:
|
||||
pytest.fail(f"Invalid base64 in Digest header: {e}")
|
||||
|
||||
|
||||
class TestVerificationModes:
|
||||
"""Tests for download verification modes."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_pre_verification_mode(self, integration_client, test_package):
|
||||
"""Test pre-verification mode verifies before streaming."""
|
||||
project, package = test_package
|
||||
content = b"Pre-verification mode test"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="pre-verify"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/pre-verify",
|
||||
params={"mode": "proxy", "verify": "true", "verify_mode": "pre"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
# X-Verified header should be true
|
||||
assert response.headers.get("X-Verified") == "true"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_stream_verification_mode(self, integration_client, test_package):
|
||||
"""Test streaming verification mode."""
|
||||
project, package = test_package
|
||||
content = b"Stream verification mode test"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="stream-verify"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/stream-verify",
|
||||
params={"mode": "proxy", "verify": "true", "verify_mode": "stream"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
|
||||
class TestArtifactIntegrityEndpoint:
|
||||
"""Tests for artifact-specific integrity operations."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_artifact_size_matches(self, integration_client, test_package):
|
||||
"""Test artifact endpoint returns correct size."""
|
||||
project, package = test_package
|
||||
content = b"Artifact size test content"
|
||||
expected_size = len(content)
|
||||
|
||||
result = upload_test_file(integration_client, project, package, content)
|
||||
artifact_id = result["artifact_id"]
|
||||
|
||||
response = integration_client.get(f"/api/v1/artifact/{artifact_id}")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["size"] == expected_size
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_content_length_header_matches_size(self, integration_client, test_package):
|
||||
"""Test Content-Length header matches artifact size."""
|
||||
project, package = test_package
|
||||
content = b"Content-Length header test"
|
||||
expected_size = len(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="content-len"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/content-len",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert int(response.headers.get("Content-Length", 0)) == expected_size
|
||||
assert len(response.content) == expected_size
|
||||
|
||||
|
||||
@pytest.mark.requires_direct_s3
|
||||
class TestCorruptionDetection:
|
||||
"""Tests for detecting corrupted S3 objects.
|
||||
|
||||
These tests directly manipulate S3 objects to simulate corruption
|
||||
and verify that the system can detect hash mismatches.
|
||||
|
||||
Note: These tests require direct S3/MinIO access and are skipped in CI
|
||||
where S3 is not directly accessible from the test runner.
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_detection_of_corrupted_content(self, integration_client, test_package):
|
||||
"""Test that corrupted S3 content is detected via hash mismatch.
|
||||
|
||||
Uploads content, then directly modifies the S3 object, then
|
||||
verifies that the downloaded content hash doesn't match.
|
||||
"""
|
||||
project, package = test_package
|
||||
content = b"Original content for corruption test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload original content
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="corrupt-test"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
# Get the S3 object and corrupt it
|
||||
s3_client = get_s3_client()
|
||||
bucket = get_s3_bucket()
|
||||
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||
|
||||
# Replace with corrupted content
|
||||
corrupted_content = b"Corrupted content - different from original!"
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=corrupted_content)
|
||||
|
||||
# Download via proxy (bypasses hash verification)
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/corrupt-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify the downloaded content doesn't match original hash
|
||||
downloaded_hash = compute_sha256(response.content)
|
||||
assert downloaded_hash != expected_hash, "Corruption was not detected - hashes match"
|
||||
assert response.content == corrupted_content
|
||||
|
||||
# The X-Checksum-SHA256 header should still show the original hash (from DB)
|
||||
# but the actual content hash is different
|
||||
header_hash = response.headers.get("X-Checksum-SHA256")
|
||||
assert header_hash == expected_hash # Header shows expected hash
|
||||
assert downloaded_hash != header_hash # But content is corrupted
|
||||
|
||||
# Restore original content for cleanup
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_detection_of_single_bit_flip(self, integration_client, test_package):
|
||||
"""Test detection of a single bit flip in S3 object content."""
|
||||
project, package = test_package
|
||||
content = b"Content for single bit flip detection test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="bitflip-test"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
# Get S3 object and flip a single bit
|
||||
s3_client = get_s3_client()
|
||||
bucket = get_s3_bucket()
|
||||
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||
|
||||
# Flip the first bit of the first byte
|
||||
corrupted_content = bytearray(content)
|
||||
corrupted_content[0] ^= 0x01
|
||||
corrupted_content = bytes(corrupted_content)
|
||||
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=corrupted_content)
|
||||
|
||||
# Download and verify hash mismatch
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/bitflip-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
downloaded_hash = compute_sha256(response.content)
|
||||
assert downloaded_hash != expected_hash, "Single bit flip not detected"
|
||||
|
||||
# Restore original
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_detection_of_truncated_content(self, integration_client, test_package):
|
||||
"""Test detection of truncated S3 object."""
|
||||
project, package = test_package
|
||||
content = b"This is content that will be truncated for testing purposes"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="truncate-test"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
# Get S3 object and truncate it
|
||||
s3_client = get_s3_client()
|
||||
bucket = get_s3_bucket()
|
||||
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||
|
||||
# Truncate to half the original size
|
||||
truncated_content = content[: len(content) // 2]
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=truncated_content)
|
||||
|
||||
# Download and verify hash mismatch
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/truncate-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
downloaded_hash = compute_sha256(response.content)
|
||||
assert downloaded_hash != expected_hash, "Truncation not detected"
|
||||
assert len(response.content) < len(content), "Content was not truncated"
|
||||
|
||||
# Restore original
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_detection_of_appended_content(self, integration_client, test_package):
|
||||
"""Test detection of content with extra bytes appended."""
|
||||
project, package = test_package
|
||||
content = b"Original content"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="append-test"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
# Get S3 object and append extra bytes
|
||||
s3_client = get_s3_client()
|
||||
bucket = get_s3_bucket()
|
||||
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||
|
||||
appended_content = content + b" - extra bytes appended"
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=appended_content)
|
||||
|
||||
# Download and verify hash mismatch
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/append-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
downloaded_hash = compute_sha256(response.content)
|
||||
assert downloaded_hash != expected_hash, "Appended content not detected"
|
||||
assert len(response.content) > len(content), "Content was not extended"
|
||||
|
||||
# Restore original
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_client_detects_hash_mismatch_post_download(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test that a client can detect hash mismatch after downloading corrupted content.
|
||||
|
||||
This simulates the full client verification workflow:
|
||||
1. Download content
|
||||
2. Get expected hash from header
|
||||
3. Compute actual hash of content
|
||||
4. Verify they match (or detect corruption)
|
||||
"""
|
||||
project, package = test_package
|
||||
content = b"Content for client-side corruption detection"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="client-detect"
|
||||
)
|
||||
|
||||
# Corrupt the S3 object
|
||||
s3_client = get_s3_client()
|
||||
bucket = get_s3_bucket()
|
||||
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||
corrupted = b"This is completely different content"
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=corrupted)
|
||||
|
||||
# Simulate client download and verification
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/client-detect",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Client gets expected hash from header
|
||||
header_hash = response.headers.get("X-Checksum-SHA256")
|
||||
|
||||
# Client computes hash of downloaded content
|
||||
actual_hash = compute_sha256(response.content)
|
||||
|
||||
# Client detects the mismatch
|
||||
corruption_detected = actual_hash != header_hash
|
||||
assert corruption_detected, "Client should detect hash mismatch"
|
||||
|
||||
# Restore original
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_consistency_check_detects_size_mismatch(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that consistency check detects size mismatches.
|
||||
|
||||
Uploads content, modifies S3 object size, then runs consistency check.
|
||||
"""
|
||||
project, package = test_package
|
||||
content = b"Content for size mismatch consistency check test " + unique_test_id.encode()
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="size-mismatch"
|
||||
)
|
||||
|
||||
# Modify S3 object to have different size
|
||||
s3_client = get_s3_client()
|
||||
bucket = get_s3_bucket()
|
||||
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||
different_size_content = content + b"extra extra extra"
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=different_size_content)
|
||||
|
||||
# Run consistency check
|
||||
response = integration_client.get("/api/v1/admin/consistency-check")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should detect the size mismatch
|
||||
assert data["size_mismatches"] >= 1 or len(data["size_mismatch_artifacts"]) >= 1
|
||||
|
||||
# Restore original
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_consistency_check_detects_missing_s3_object(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that consistency check detects missing S3 objects.
|
||||
|
||||
Uploads content, deletes S3 object, then runs consistency check.
|
||||
"""
|
||||
project, package = test_package
|
||||
content = b"Content for missing S3 object test " + unique_test_id.encode()
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="missing-s3"
|
||||
)
|
||||
|
||||
# Delete the S3 object
|
||||
s3_client = get_s3_client()
|
||||
bucket = get_s3_bucket()
|
||||
s3_key = f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
|
||||
s3_client.delete_object(Bucket=bucket, Key=s3_key)
|
||||
|
||||
# Run consistency check
|
||||
response = integration_client.get("/api/v1/admin/consistency-check")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should detect the missing S3 object
|
||||
assert data["missing_s3_objects"] >= 1 or len(data["missing_s3_keys"]) >= 1
|
||||
|
||||
# Restore the object for cleanup
|
||||
s3_client.put_object(Bucket=bucket, Key=s3_key, Body=content)
|
||||
552
backend/tests/integration/test_large_uploads.py
Normal file
552
backend/tests/integration/test_large_uploads.py
Normal file
@@ -0,0 +1,552 @@
|
||||
"""
|
||||
Integration tests for large file upload functionality.
|
||||
|
||||
Tests cover:
|
||||
- Large file uploads (100MB, 1GB)
|
||||
- Multipart upload behavior
|
||||
- Upload metrics (duration, throughput)
|
||||
- Memory efficiency during uploads
|
||||
- Upload progress tracking
|
||||
|
||||
Note: Large tests are marked with @pytest.mark.slow and will be skipped
|
||||
by default. Run with `pytest --run-slow` to include them.
|
||||
"""
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import io
|
||||
import time
|
||||
from tests.factories import (
|
||||
compute_sha256,
|
||||
upload_test_file,
|
||||
s3_object_exists,
|
||||
)
|
||||
from tests.conftest import (
|
||||
SIZE_1KB,
|
||||
SIZE_100KB,
|
||||
SIZE_1MB,
|
||||
SIZE_10MB,
|
||||
SIZE_100MB,
|
||||
SIZE_1GB,
|
||||
)
|
||||
|
||||
|
||||
class TestUploadMetrics:
|
||||
"""Tests for upload duration and throughput metrics."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_response_includes_duration_ms(self, integration_client, test_package):
|
||||
"""Test upload response includes duration_ms field."""
|
||||
project, package = test_package
|
||||
content = b"duration test content"
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="duration-test"
|
||||
)
|
||||
|
||||
assert "duration_ms" in result
|
||||
assert result["duration_ms"] is not None
|
||||
assert result["duration_ms"] >= 0
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_response_includes_throughput(self, integration_client, test_package):
|
||||
"""Test upload response includes throughput_mbps field."""
|
||||
project, package = test_package
|
||||
content = b"throughput test content"
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="throughput-test"
|
||||
)
|
||||
|
||||
assert "throughput_mbps" in result
|
||||
# For small files throughput may be very high or None
|
||||
# Just verify the field exists
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_duration_reasonable(
|
||||
self, integration_client, test_package, sized_content
|
||||
):
|
||||
"""Test upload duration is reasonable for file size."""
|
||||
project, package = test_package
|
||||
content, _ = sized_content(SIZE_1MB, seed=100)
|
||||
|
||||
start = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="duration-check"
|
||||
)
|
||||
actual_duration = (time.time() - start) * 1000 # ms
|
||||
|
||||
# Reported duration should be close to actual
|
||||
assert result["duration_ms"] is not None
|
||||
# Allow some variance (network overhead)
|
||||
assert result["duration_ms"] <= actual_duration + 1000 # Within 1s
|
||||
|
||||
|
||||
class TestLargeFileUploads:
|
||||
"""Tests for large file uploads using multipart."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_10mb_file(self, integration_client, test_package, sized_content):
|
||||
"""Test uploading a 10MB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_10MB, seed=200)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="large-10mb"
|
||||
)
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_10MB
|
||||
assert result["duration_ms"] is not None
|
||||
assert result["throughput_mbps"] is not None
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.requires_direct_s3
|
||||
def test_upload_100mb_file(self, integration_client, test_package, sized_content):
|
||||
"""Test uploading a 100MB file (triggers multipart upload)."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_100MB, seed=300)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="large-100mb"
|
||||
)
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_100MB
|
||||
# Verify S3 object exists
|
||||
assert s3_object_exists(expected_hash)
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.large
|
||||
def test_upload_1gb_file(self, integration_client, test_package, sized_content):
|
||||
"""Test uploading a 1GB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_1GB, seed=400)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="large-1gb"
|
||||
)
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_1GB
|
||||
# Should have measurable throughput
|
||||
assert result["throughput_mbps"] is not None
|
||||
assert result["throughput_mbps"] > 0
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_large_file_deduplication(
|
||||
self, integration_client, test_package, sized_content, unique_test_id
|
||||
):
|
||||
"""Test deduplication works for large files."""
|
||||
project, package = test_package
|
||||
# Use unique_test_id to ensure unique content per test run
|
||||
seed = hash(unique_test_id) % 10000
|
||||
content, expected_hash = sized_content(SIZE_10MB, seed=seed)
|
||||
|
||||
# First upload
|
||||
result1 = upload_test_file(
|
||||
integration_client, project, package, content, tag=f"dedup-{unique_test_id}-1"
|
||||
)
|
||||
# Note: may be True if previous test uploaded same content
|
||||
first_dedupe = result1["deduplicated"]
|
||||
|
||||
# Second upload of same content
|
||||
result2 = upload_test_file(
|
||||
integration_client, project, package, content, tag=f"dedup-{unique_test_id}-2"
|
||||
)
|
||||
assert result2["artifact_id"] == expected_hash
|
||||
# Second upload MUST be deduplicated
|
||||
assert result2["deduplicated"] is True
|
||||
|
||||
|
||||
class TestUploadProgress:
|
||||
"""Tests for upload progress tracking endpoint."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_progress_endpoint_returns_not_found_for_invalid_id(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test progress endpoint returns not_found status for invalid upload ID."""
|
||||
project, package = test_package
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/upload/invalid-upload-id/progress"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "not_found"
|
||||
assert data["upload_id"] == "invalid-upload-id"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_progress_endpoint_requires_valid_project(
|
||||
self, integration_client, unique_test_id
|
||||
):
|
||||
"""Test progress endpoint validates project exists."""
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/nonexistent-{unique_test_id}/pkg/upload/upload-id/progress"
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_progress_endpoint_requires_valid_package(
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test progress endpoint validates package exists."""
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{test_project}/nonexistent-{unique_test_id}/upload/upload-id/progress"
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
class TestResumableUploadProgress:
|
||||
"""Tests for progress tracking during resumable uploads."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resumable_upload_init_and_progress(
|
||||
self, integration_client, test_package, sized_content
|
||||
):
|
||||
"""Test initializing resumable upload and checking progress."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_100KB, seed=600)
|
||||
|
||||
# Get API key for auth
|
||||
api_key_response = integration_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": "progress-test-key"},
|
||||
)
|
||||
assert api_key_response.status_code == 200
|
||||
api_key = api_key_response.json()["key"]
|
||||
|
||||
# Initialize resumable upload
|
||||
init_response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload/init",
|
||||
json={
|
||||
"expected_hash": expected_hash,
|
||||
"filename": "progress-test.bin",
|
||||
"size": SIZE_100KB,
|
||||
},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
assert init_response.status_code == 200
|
||||
upload_id = init_response.json().get("upload_id")
|
||||
|
||||
if upload_id:
|
||||
# Check initial progress
|
||||
progress_response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/upload/{upload_id}/progress",
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
assert progress_response.status_code == 200
|
||||
progress = progress_response.json()
|
||||
assert progress["status"] == "in_progress"
|
||||
assert progress["bytes_uploaded"] == 0
|
||||
assert progress["bytes_total"] == SIZE_100KB
|
||||
|
||||
# Abort to clean up
|
||||
integration_client.delete(
|
||||
f"/api/v1/project/{project}/{package}/upload/{upload_id}",
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
|
||||
|
||||
class TestUploadSizeLimits:
|
||||
"""Tests for upload size limit enforcement."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_empty_file_rejected(self, integration_client, test_package):
|
||||
"""Test empty files are rejected."""
|
||||
project, package = test_package
|
||||
|
||||
files = {"file": ("empty.txt", io.BytesIO(b""), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
)
|
||||
|
||||
assert response.status_code in [400, 422]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_minimum_size_accepted(self, integration_client, test_package):
|
||||
"""Test 1-byte file is accepted."""
|
||||
project, package = test_package
|
||||
content = b"X"
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="min-size"
|
||||
)
|
||||
|
||||
assert result["size"] == 1
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_content_length_header_used_in_response(self, integration_client, test_package):
|
||||
"""Test that upload response size matches Content-Length."""
|
||||
project, package = test_package
|
||||
content = b"content length verification test"
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="content-length-test"
|
||||
)
|
||||
|
||||
# Size in response should match actual content length
|
||||
assert result["size"] == len(content)
|
||||
|
||||
|
||||
class TestUploadErrorHandling:
|
||||
"""Tests for upload error handling."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_to_nonexistent_project_returns_404(
|
||||
self, integration_client, unique_test_id
|
||||
):
|
||||
"""Test upload to nonexistent project returns 404."""
|
||||
content = b"test content"
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/nonexistent-{unique_test_id}/pkg/upload",
|
||||
files=files,
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_to_nonexistent_package_returns_404(
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test upload to nonexistent package returns 404."""
|
||||
content = b"test content"
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/nonexistent-{unique_test_id}/upload",
|
||||
files=files,
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_without_file_returns_422(self, integration_client, test_package):
|
||||
"""Test upload without file field returns 422."""
|
||||
project, package = test_package
|
||||
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
data={"tag": "no-file"},
|
||||
)
|
||||
|
||||
assert response.status_code == 422
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_with_invalid_checksum_rejected(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test upload with invalid checksum header format is rejected."""
|
||||
project, package = test_package
|
||||
content = b"checksum test"
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
headers={"X-Checksum-SHA256": "invalid-checksum"},
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_with_mismatched_checksum_rejected(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test upload with wrong checksum is rejected."""
|
||||
project, package = test_package
|
||||
content = b"mismatch test"
|
||||
wrong_hash = "0" * 64
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
headers={"X-Checksum-SHA256": wrong_hash},
|
||||
)
|
||||
|
||||
assert response.status_code == 422
|
||||
assert "verification failed" in response.json().get("detail", "").lower()
|
||||
|
||||
|
||||
class TestResumableUploadCancellation:
|
||||
"""Tests for resumable upload cancellation."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_abort_resumable_upload(self, integration_client, test_package, sized_content):
|
||||
"""Test aborting a resumable upload cleans up properly."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_100KB, seed=700)
|
||||
|
||||
# Get API key for auth
|
||||
api_key_response = integration_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": "abort-test-key"},
|
||||
)
|
||||
assert api_key_response.status_code == 200
|
||||
api_key = api_key_response.json()["key"]
|
||||
|
||||
# Initialize resumable upload
|
||||
init_response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload/init",
|
||||
json={
|
||||
"expected_hash": expected_hash,
|
||||
"filename": "abort-test.bin",
|
||||
"size": SIZE_100KB,
|
||||
},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
assert init_response.status_code == 200
|
||||
upload_id = init_response.json().get("upload_id")
|
||||
|
||||
if upload_id:
|
||||
# Abort the upload (without uploading any parts)
|
||||
abort_response = integration_client.delete(
|
||||
f"/api/v1/project/{project}/{package}/upload/{upload_id}",
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
assert abort_response.status_code in [200, 204]
|
||||
|
||||
# Verify progress shows not_found after abort
|
||||
progress_response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/upload/{upload_id}/progress",
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
assert progress_response.status_code == 200
|
||||
assert progress_response.json()["status"] == "not_found"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_abort_nonexistent_upload(self, integration_client, test_package):
|
||||
"""Test aborting nonexistent upload returns appropriate error."""
|
||||
project, package = test_package
|
||||
|
||||
# Get API key for auth
|
||||
api_key_response = integration_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": "abort-nonexistent-key"},
|
||||
)
|
||||
assert api_key_response.status_code == 200
|
||||
api_key = api_key_response.json()["key"]
|
||||
|
||||
response = integration_client.delete(
|
||||
f"/api/v1/project/{project}/{package}/upload/nonexistent-upload-id",
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
|
||||
# Should return 404 or 200 (idempotent delete)
|
||||
assert response.status_code in [200, 204, 404]
|
||||
|
||||
|
||||
class TestUploadTimeout:
|
||||
"""Tests for upload timeout handling."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_with_short_timeout_succeeds_for_small_file(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test small file upload succeeds with reasonable timeout."""
|
||||
project, package = test_package
|
||||
content = b"small timeout test"
|
||||
|
||||
# httpx client should handle this quickly
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="timeout-small"
|
||||
)
|
||||
|
||||
assert result["artifact_id"] is not None
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_response_duration_under_timeout(
|
||||
self, integration_client, test_package, sized_content
|
||||
):
|
||||
"""Test upload completes within reasonable time."""
|
||||
project, package = test_package
|
||||
content, _ = sized_content(SIZE_1MB, seed=800)
|
||||
|
||||
start = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content, tag="timeout-check"
|
||||
)
|
||||
duration = time.time() - start
|
||||
|
||||
# 1MB should upload in well under 60 seconds on local
|
||||
assert duration < 60
|
||||
assert result["artifact_id"] is not None
|
||||
|
||||
|
||||
class TestConcurrentUploads:
|
||||
"""Tests for concurrent upload handling."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_concurrent_different_files(
|
||||
self, integration_client, test_package, sized_content
|
||||
):
|
||||
"""Test concurrent uploads of different files succeed."""
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
project, package = test_package
|
||||
|
||||
# Get API key for auth
|
||||
api_key_response = integration_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": "concurrent-diff-key"},
|
||||
)
|
||||
assert api_key_response.status_code == 200
|
||||
api_key = api_key_response.json()["key"]
|
||||
|
||||
num_uploads = 3
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def upload_unique_file(idx):
|
||||
try:
|
||||
from httpx import Client
|
||||
|
||||
content, expected_hash = sized_content(SIZE_100KB, seed=900 + idx)
|
||||
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
with Client(base_url=base_url, timeout=30.0) as client:
|
||||
files = {
|
||||
"file": (
|
||||
f"concurrent-{idx}.bin",
|
||||
io.BytesIO(content),
|
||||
"application/octet-stream",
|
||||
)
|
||||
}
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"tag": f"concurrent-diff-{idx}"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
results.append((idx, response.json(), expected_hash))
|
||||
else:
|
||||
errors.append(f"Upload {idx}: {response.status_code} - {response.text}")
|
||||
except Exception as e:
|
||||
errors.append(f"Upload {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=num_uploads) as executor:
|
||||
futures = [executor.submit(upload_unique_file, i) for i in range(num_uploads)]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
assert len(errors) == 0, f"Concurrent upload errors: {errors}"
|
||||
assert len(results) == num_uploads
|
||||
|
||||
# Each upload should have unique artifact ID
|
||||
artifact_ids = set(r[1]["artifact_id"] for r in results)
|
||||
assert len(artifact_ids) == num_uploads
|
||||
|
||||
# Each should match expected hash
|
||||
for idx, result, expected_hash in results:
|
||||
assert result["artifact_id"] == expected_hash
|
||||
583
backend/tests/integration/test_size_boundary.py
Normal file
583
backend/tests/integration/test_size_boundary.py
Normal file
@@ -0,0 +1,583 @@
|
||||
"""
|
||||
Integration tests for upload/download with various file sizes.
|
||||
|
||||
Tests cover:
|
||||
- Small files (0B - 100KB)
|
||||
- Medium files (1MB - 50MB)
|
||||
- Large files (100MB - 1GB) - marked as slow/large
|
||||
- Exact chunk boundaries
|
||||
- Data integrity verification across all sizes
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import io
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from tests.factories import (
|
||||
compute_sha256,
|
||||
upload_test_file,
|
||||
generate_content,
|
||||
generate_content_with_hash,
|
||||
)
|
||||
from tests.conftest import (
|
||||
SIZE_1B,
|
||||
SIZE_1KB,
|
||||
SIZE_10KB,
|
||||
SIZE_100KB,
|
||||
SIZE_1MB,
|
||||
SIZE_5MB,
|
||||
SIZE_10MB,
|
||||
SIZE_50MB,
|
||||
SIZE_100MB,
|
||||
SIZE_250MB,
|
||||
SIZE_500MB,
|
||||
SIZE_1GB,
|
||||
CHUNK_SIZE,
|
||||
MULTIPART_THRESHOLD,
|
||||
)
|
||||
|
||||
|
||||
class TestSmallFileSizes:
|
||||
"""Tests for small file uploads/downloads (0B - 100KB)."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_download_1_byte(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 1 byte file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_1B, seed=1)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="1byte.bin", tag="1byte"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_1B
|
||||
|
||||
# Download and verify
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/1byte",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
assert len(response.content) == SIZE_1B
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_download_1kb(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 1KB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_1KB, seed=2)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="1kb.bin", tag="1kb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_1KB
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/1kb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_download_10kb(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 10KB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_10KB, seed=3)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="10kb.bin", tag="10kb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_10KB
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/10kb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_download_100kb(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 100KB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_100KB, seed=4)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="100kb.bin", tag="100kb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_100KB
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/100kb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
|
||||
class TestMediumFileSizes:
|
||||
"""Tests for medium file uploads/downloads (1MB - 50MB)."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_download_1mb(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 1MB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_1MB, seed=10)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="1mb.bin", tag="1mb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_1MB
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/1mb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert len(response.content) == SIZE_1MB
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_download_5mb(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 5MB file (multipart threshold boundary area)."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_5MB, seed=11)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="5mb.bin", tag="5mb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_5MB
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/5mb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert len(response.content) == SIZE_5MB
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
def test_upload_download_10mb(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 10MB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_10MB, seed=12)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="10mb.bin", tag="10mb"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_10MB
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/10mb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert len(response.content) == SIZE_10MB
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
def test_upload_download_50mb(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 50MB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_50MB, seed=13)
|
||||
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="50mb.bin", tag="50mb"
|
||||
)
|
||||
upload_time = time.time() - start_time
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_50MB
|
||||
|
||||
start_time = time.time()
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/50mb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
download_time = time.time() - start_time
|
||||
|
||||
assert response.status_code == 200
|
||||
assert len(response.content) == SIZE_50MB
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
|
||||
# Log timing for performance tracking
|
||||
print(f"\n50MB upload: {upload_time:.2f}s, download: {download_time:.2f}s")
|
||||
|
||||
|
||||
class TestLargeFileSizes:
|
||||
"""Tests for large file uploads/downloads (100MB - 1GB).
|
||||
|
||||
These tests are marked as slow and large, skipped by default.
|
||||
Run with: pytest -m "large" to include these tests.
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.large
|
||||
def test_upload_download_100mb(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 100MB file (multipart threshold)."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_100MB, seed=100)
|
||||
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="100mb.bin", tag="100mb"
|
||||
)
|
||||
upload_time = time.time() - start_time
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_100MB
|
||||
|
||||
start_time = time.time()
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/100mb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
download_time = time.time() - start_time
|
||||
|
||||
assert response.status_code == 200
|
||||
assert len(response.content) == SIZE_100MB
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
|
||||
print(f"\n100MB upload: {upload_time:.2f}s, download: {download_time:.2f}s")
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.large
|
||||
def test_upload_download_250mb(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 250MB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_250MB, seed=250)
|
||||
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="250mb.bin", tag="250mb"
|
||||
)
|
||||
upload_time = time.time() - start_time
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_250MB
|
||||
|
||||
start_time = time.time()
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/250mb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
download_time = time.time() - start_time
|
||||
|
||||
assert response.status_code == 200
|
||||
assert len(response.content) == SIZE_250MB
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
|
||||
print(f"\n250MB upload: {upload_time:.2f}s, download: {download_time:.2f}s")
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.large
|
||||
def test_upload_download_500mb(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 500MB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_500MB, seed=500)
|
||||
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="500mb.bin", tag="500mb"
|
||||
)
|
||||
upload_time = time.time() - start_time
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_500MB
|
||||
|
||||
start_time = time.time()
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/500mb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
download_time = time.time() - start_time
|
||||
|
||||
assert response.status_code == 200
|
||||
assert len(response.content) == SIZE_500MB
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
|
||||
print(f"\n500MB upload: {upload_time:.2f}s, download: {download_time:.2f}s")
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.large
|
||||
def test_upload_download_1gb(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download of 1GB file.
|
||||
|
||||
This test may take several minutes depending on network/disk speed.
|
||||
"""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_1GB, seed=1024)
|
||||
|
||||
start_time = time.time()
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="1gb.bin", tag="1gb"
|
||||
)
|
||||
upload_time = time.time() - start_time
|
||||
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == SIZE_1GB
|
||||
|
||||
start_time = time.time()
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/1gb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
download_time = time.time() - start_time
|
||||
|
||||
assert response.status_code == 200
|
||||
assert len(response.content) == SIZE_1GB
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
|
||||
print(f"\n1GB upload: {upload_time:.2f}s, download: {download_time:.2f}s")
|
||||
|
||||
|
||||
class TestChunkBoundaries:
|
||||
"""Tests for exact chunk size boundaries."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_download_at_chunk_size(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download at exact chunk size (64KB)."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(CHUNK_SIZE, seed=64)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="chunk.bin", tag="chunk-exact"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == CHUNK_SIZE
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/chunk-exact",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_download_chunk_size_plus_1(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download at chunk size + 1 byte."""
|
||||
project, package = test_package
|
||||
size = CHUNK_SIZE + 1
|
||||
content, expected_hash = sized_content(size, seed=65)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="chunk_plus.bin", tag="chunk-plus"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == size
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/chunk-plus",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_download_chunk_size_minus_1(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download at chunk size - 1 byte."""
|
||||
project, package = test_package
|
||||
size = CHUNK_SIZE - 1
|
||||
content, expected_hash = sized_content(size, seed=63)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="chunk_minus.bin", tag="chunk-minus"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == size
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/chunk-minus",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_download_multiple_chunks(self, integration_client, test_package, sized_content):
|
||||
"""Test upload/download spanning multiple chunks."""
|
||||
project, package = test_package
|
||||
size = CHUNK_SIZE * 3 + 1000 # 3 full chunks + partial
|
||||
content, expected_hash = sized_content(size, seed=300)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="multi_chunk.bin", tag="multi-chunk"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["size"] == size
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/multi-chunk",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
|
||||
class TestDataIntegrity:
|
||||
"""Tests for data integrity with various content types."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_binary_content_integrity(self, integration_client, test_package):
|
||||
"""Test binary content (all byte values 0-255) integrity."""
|
||||
project, package = test_package
|
||||
# Content with all 256 possible byte values
|
||||
content = bytes(range(256)) * 100 # 25.6KB
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="binary.bin", tag="binary"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/binary",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_text_content_integrity(self, integration_client, test_package):
|
||||
"""Test UTF-8 text content integrity."""
|
||||
project, package = test_package
|
||||
content = "Hello, World! 你好世界 🌍 مرحبا العالم".encode("utf-8")
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="text.txt", tag="text"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/text",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
assert response.content.decode("utf-8") == "Hello, World! 你好世界 🌍 مرحبا العالم"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_null_bytes_content_integrity(self, integration_client, test_package):
|
||||
"""Test content with null bytes."""
|
||||
project, package = test_package
|
||||
content = b"before\x00null\x00bytes\x00after"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="nulls.bin", tag="nulls"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/nulls",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
assert b"\x00" in response.content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_unicode_filename_integrity(self, integration_client, test_package):
|
||||
"""Test file with unicode filename."""
|
||||
project, package = test_package
|
||||
content = b"unicode filename test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="文件名.txt", tag="unicode-name"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result["original_name"] == "文件名.txt"
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/unicode-name",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_compressed_content_integrity(self, integration_client, test_package):
|
||||
"""Test gzip-compressed content integrity."""
|
||||
import gzip
|
||||
|
||||
project, package = test_package
|
||||
original = b"This is some text that will be compressed " * 100
|
||||
content = gzip.compress(original)
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="data.gz", tag="compressed"
|
||||
)
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/compressed",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
# Verify we can decompress
|
||||
assert gzip.decompress(response.content) == original
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_hash_verification_matches(self, integration_client, test_package, sized_content):
|
||||
"""Test that computed hash matches artifact_id for various sizes."""
|
||||
project, package = test_package
|
||||
|
||||
sizes = [SIZE_1B, SIZE_1KB, SIZE_10KB, SIZE_100KB, SIZE_1MB]
|
||||
|
||||
for i, size in enumerate(sizes):
|
||||
content, expected_hash = sized_content(size, seed=1000 + i)
|
||||
|
||||
result = upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename=f"hash_test_{size}.bin", tag=f"hash-{size}"
|
||||
)
|
||||
|
||||
# Verify artifact_id matches expected hash
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
# Download and verify hash of downloaded content
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/hash-{size}",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
downloaded_hash = compute_sha256(response.content)
|
||||
assert downloaded_hash == expected_hash
|
||||
535
backend/tests/integration/test_streaming_download.py
Normal file
535
backend/tests/integration/test_streaming_download.py
Normal file
@@ -0,0 +1,535 @@
|
||||
"""
|
||||
Integration tests for streaming download functionality.
|
||||
|
||||
Tests cover:
|
||||
- HTTP Range requests (partial downloads, resume)
|
||||
- Conditional requests (If-None-Match, If-Modified-Since)
|
||||
- Caching headers (Cache-Control, Last-Modified, Accept-Ranges)
|
||||
- Large file streaming
|
||||
- Download modes (proxy, redirect, presigned)
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import io
|
||||
import time
|
||||
from email.utils import formatdate
|
||||
from tests.factories import (
|
||||
compute_sha256,
|
||||
upload_test_file,
|
||||
)
|
||||
from tests.conftest import (
|
||||
SIZE_1KB,
|
||||
SIZE_100KB,
|
||||
SIZE_1MB,
|
||||
)
|
||||
|
||||
|
||||
class TestRangeRequests:
|
||||
"""Tests for HTTP Range request support (partial downloads)."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_range_request_first_bytes(self, integration_client, test_package):
|
||||
"""Test range request for first N bytes."""
|
||||
project, package = test_package
|
||||
content = b"0123456789" * 100 # 1000 bytes
|
||||
upload_test_file(integration_client, project, package, content, tag="range-test")
|
||||
|
||||
# Request first 10 bytes
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/range-test",
|
||||
params={"mode": "proxy"},
|
||||
headers={"Range": "bytes=0-9"},
|
||||
)
|
||||
assert response.status_code == 206 # Partial Content
|
||||
assert response.content == b"0123456789"
|
||||
assert "Content-Range" in response.headers
|
||||
assert response.headers["Content-Range"].startswith("bytes 0-9/")
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_range_request_middle_bytes(self, integration_client, test_package):
|
||||
"""Test range request for bytes in the middle."""
|
||||
project, package = test_package
|
||||
content = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
upload_test_file(integration_client, project, package, content, tag="range-mid")
|
||||
|
||||
# Request bytes 10-19 (KLMNOPQRST)
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/range-mid",
|
||||
params={"mode": "proxy"},
|
||||
headers={"Range": "bytes=10-19"},
|
||||
)
|
||||
assert response.status_code == 206
|
||||
assert response.content == b"KLMNOPQRST"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_range_request_suffix_bytes(self, integration_client, test_package):
|
||||
"""Test range request for last N bytes (suffix range)."""
|
||||
project, package = test_package
|
||||
content = b"0123456789ABCDEF" # 16 bytes
|
||||
upload_test_file(integration_client, project, package, content, tag="range-suffix")
|
||||
|
||||
# Request last 4 bytes
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/range-suffix",
|
||||
params={"mode": "proxy"},
|
||||
headers={"Range": "bytes=-4"},
|
||||
)
|
||||
assert response.status_code == 206
|
||||
assert response.content == b"CDEF"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_range_request_open_ended(self, integration_client, test_package):
|
||||
"""Test range request from offset to end."""
|
||||
project, package = test_package
|
||||
content = b"0123456789"
|
||||
upload_test_file(integration_client, project, package, content, tag="range-open")
|
||||
|
||||
# Request from byte 5 to end
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/range-open",
|
||||
params={"mode": "proxy"},
|
||||
headers={"Range": "bytes=5-"},
|
||||
)
|
||||
assert response.status_code == 206
|
||||
assert response.content == b"56789"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_range_request_includes_accept_ranges_header(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test that range requests include Accept-Ranges header."""
|
||||
project, package = test_package
|
||||
content = b"test content"
|
||||
upload_test_file(integration_client, project, package, content, tag="accept-ranges")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/accept-ranges",
|
||||
params={"mode": "proxy"},
|
||||
headers={"Range": "bytes=0-4"},
|
||||
)
|
||||
assert response.status_code == 206
|
||||
assert response.headers.get("Accept-Ranges") == "bytes"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_full_download_advertises_accept_ranges(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test that full downloads advertise range support."""
|
||||
project, package = test_package
|
||||
content = b"test content"
|
||||
upload_test_file(integration_client, project, package, content, tag="full-accept")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/full-accept",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.headers.get("Accept-Ranges") == "bytes"
|
||||
|
||||
|
||||
class TestConditionalRequests:
|
||||
"""Tests for conditional request handling (304 Not Modified)."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_if_none_match_returns_304(self, integration_client, test_package):
|
||||
"""Test If-None-Match with matching ETag returns 304."""
|
||||
project, package = test_package
|
||||
content = b"conditional request test content"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="cond-etag")
|
||||
|
||||
# Request with matching ETag
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/cond-etag",
|
||||
params={"mode": "proxy"},
|
||||
headers={"If-None-Match": f'"{expected_hash}"'},
|
||||
)
|
||||
assert response.status_code == 304
|
||||
assert response.content == b"" # No body for 304
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_if_none_match_without_quotes(self, integration_client, test_package):
|
||||
"""Test If-None-Match works with or without quotes."""
|
||||
project, package = test_package
|
||||
content = b"etag no quotes test"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="cond-noquote")
|
||||
|
||||
# Request with ETag without quotes
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/cond-noquote",
|
||||
params={"mode": "proxy"},
|
||||
headers={"If-None-Match": expected_hash},
|
||||
)
|
||||
assert response.status_code == 304
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_if_none_match_mismatch_returns_200(self, integration_client, test_package):
|
||||
"""Test If-None-Match with non-matching ETag returns 200."""
|
||||
project, package = test_package
|
||||
content = b"etag mismatch test"
|
||||
upload_test_file(integration_client, project, package, content, tag="cond-mismatch")
|
||||
|
||||
# Request with different ETag
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/cond-mismatch",
|
||||
params={"mode": "proxy"},
|
||||
headers={"If-None-Match": '"different-etag-value"'},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_if_modified_since_returns_304(self, integration_client, test_package):
|
||||
"""Test If-Modified-Since with future date returns 304."""
|
||||
project, package = test_package
|
||||
content = b"modified since test"
|
||||
upload_test_file(integration_client, project, package, content, tag="cond-modified")
|
||||
|
||||
# Request with future date (artifact was definitely created before this)
|
||||
future_date = formatdate(time.time() + 86400, usegmt=True) # Tomorrow
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/cond-modified",
|
||||
params={"mode": "proxy"},
|
||||
headers={"If-Modified-Since": future_date},
|
||||
)
|
||||
assert response.status_code == 304
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_if_modified_since_old_date_returns_200(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test If-Modified-Since with old date returns 200."""
|
||||
project, package = test_package
|
||||
content = b"old date test"
|
||||
upload_test_file(integration_client, project, package, content, tag="cond-old")
|
||||
|
||||
# Request with old date (2020-01-01)
|
||||
old_date = "Wed, 01 Jan 2020 00:00:00 GMT"
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/cond-old",
|
||||
params={"mode": "proxy"},
|
||||
headers={"If-Modified-Since": old_date},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_304_includes_etag(self, integration_client, test_package):
|
||||
"""Test 304 response includes ETag header."""
|
||||
project, package = test_package
|
||||
content = b"304 etag test"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="304-etag")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/304-etag",
|
||||
params={"mode": "proxy"},
|
||||
headers={"If-None-Match": f'"{expected_hash}"'},
|
||||
)
|
||||
assert response.status_code == 304
|
||||
assert response.headers.get("ETag") == f'"{expected_hash}"'
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_304_includes_cache_control(self, integration_client, test_package):
|
||||
"""Test 304 response includes Cache-Control header."""
|
||||
project, package = test_package
|
||||
content = b"304 cache test"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="304-cache")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/304-cache",
|
||||
params={"mode": "proxy"},
|
||||
headers={"If-None-Match": f'"{expected_hash}"'},
|
||||
)
|
||||
assert response.status_code == 304
|
||||
assert "immutable" in response.headers.get("Cache-Control", "")
|
||||
|
||||
|
||||
class TestCachingHeaders:
|
||||
"""Tests for caching headers on download responses."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_includes_cache_control(self, integration_client, test_package):
|
||||
"""Test download response includes Cache-Control header."""
|
||||
project, package = test_package
|
||||
content = b"cache control test"
|
||||
upload_test_file(integration_client, project, package, content, tag="cache-ctl")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/cache-ctl",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
cache_control = response.headers.get("Cache-Control", "")
|
||||
assert "public" in cache_control
|
||||
assert "immutable" in cache_control
|
||||
assert "max-age" in cache_control
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_includes_last_modified(self, integration_client, test_package):
|
||||
"""Test download response includes Last-Modified header."""
|
||||
project, package = test_package
|
||||
content = b"last modified test"
|
||||
upload_test_file(integration_client, project, package, content, tag="last-mod")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/last-mod",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert "Last-Modified" in response.headers
|
||||
# Should be in RFC 7231 format
|
||||
last_modified = response.headers["Last-Modified"]
|
||||
assert "GMT" in last_modified
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_includes_etag(self, integration_client, test_package):
|
||||
"""Test download response includes ETag header."""
|
||||
project, package = test_package
|
||||
content = b"etag header test"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="etag-hdr")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/etag-hdr",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.headers.get("ETag") == f'"{expected_hash}"'
|
||||
|
||||
|
||||
class TestDownloadResume:
|
||||
"""Tests for download resume functionality using range requests."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resume_download_after_partial(self, integration_client, test_package):
|
||||
"""Test resuming download from where it left off."""
|
||||
project, package = test_package
|
||||
content = b"ABCDEFGHIJ" * 100 # 1000 bytes
|
||||
upload_test_file(integration_client, project, package, content, tag="resume-test")
|
||||
|
||||
# Simulate partial download (first 500 bytes)
|
||||
response1 = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/resume-test",
|
||||
params={"mode": "proxy"},
|
||||
headers={"Range": "bytes=0-499"},
|
||||
)
|
||||
assert response1.status_code == 206
|
||||
first_half = response1.content
|
||||
assert len(first_half) == 500
|
||||
|
||||
# Resume from byte 500
|
||||
response2 = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/resume-test",
|
||||
params={"mode": "proxy"},
|
||||
headers={"Range": "bytes=500-"},
|
||||
)
|
||||
assert response2.status_code == 206
|
||||
second_half = response2.content
|
||||
assert len(second_half) == 500
|
||||
|
||||
# Combine and verify
|
||||
combined = first_half + second_half
|
||||
assert combined == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resume_with_etag_verification(self, integration_client, test_package):
|
||||
"""Test that resumed download can verify content hasn't changed."""
|
||||
project, package = test_package
|
||||
content = b"resume etag verification test content"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="resume-etag")
|
||||
|
||||
# Get ETag from first request
|
||||
response1 = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/resume-etag",
|
||||
params={"mode": "proxy"},
|
||||
headers={"Range": "bytes=0-9"},
|
||||
)
|
||||
assert response1.status_code == 206
|
||||
etag = response1.headers.get("ETag")
|
||||
assert etag == f'"{expected_hash}"'
|
||||
|
||||
# Resume with If-Match to ensure content hasn't changed
|
||||
# (Note: If-Match would fail and return 412 if content changed)
|
||||
response2 = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/resume-etag",
|
||||
params={"mode": "proxy"},
|
||||
headers={"Range": "bytes=10-"},
|
||||
)
|
||||
assert response2.status_code == 206
|
||||
# ETag should be the same
|
||||
assert response2.headers.get("ETag") == etag
|
||||
|
||||
|
||||
class TestLargeFileStreaming:
|
||||
"""Tests for streaming large files."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_stream_1mb_file(self, integration_client, test_package, sized_content):
|
||||
"""Test streaming a 1MB file."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_1MB, seed=500)
|
||||
|
||||
upload_test_file(integration_client, project, package, content, tag="stream-1mb")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/stream-1mb",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert len(response.content) == SIZE_1MB
|
||||
assert compute_sha256(response.content) == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_stream_large_file_has_correct_headers(
|
||||
self, integration_client, test_package, sized_content
|
||||
):
|
||||
"""Test that large file streaming has correct headers."""
|
||||
project, package = test_package
|
||||
content, expected_hash = sized_content(SIZE_100KB, seed=501)
|
||||
|
||||
upload_test_file(integration_client, project, package, content, tag="stream-hdr")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/stream-hdr",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert int(response.headers.get("Content-Length", 0)) == SIZE_100KB
|
||||
assert response.headers.get("X-Checksum-SHA256") == expected_hash
|
||||
assert response.headers.get("Accept-Ranges") == "bytes"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_range_request_on_large_file(
|
||||
self, integration_client, test_package, sized_content
|
||||
):
|
||||
"""Test range request on a larger file."""
|
||||
project, package = test_package
|
||||
content, _ = sized_content(SIZE_100KB, seed=502)
|
||||
|
||||
upload_test_file(integration_client, project, package, content, tag="range-large")
|
||||
|
||||
# Request a slice from the middle
|
||||
start = 50000
|
||||
end = 50999
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/range-large",
|
||||
params={"mode": "proxy"},
|
||||
headers={"Range": f"bytes={start}-{end}"},
|
||||
)
|
||||
assert response.status_code == 206
|
||||
assert len(response.content) == 1000
|
||||
assert response.content == content[start : end + 1]
|
||||
|
||||
|
||||
class TestDownloadModes:
|
||||
"""Tests for different download modes."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_proxy_mode_streams_content(self, integration_client, test_package):
|
||||
"""Test proxy mode streams content through backend."""
|
||||
project, package = test_package
|
||||
content = b"proxy mode test content"
|
||||
upload_test_file(integration_client, project, package, content, tag="mode-proxy")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/mode-proxy",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_presigned_mode_returns_url(self, integration_client, test_package):
|
||||
"""Test presigned mode returns JSON with URL."""
|
||||
project, package = test_package
|
||||
content = b"presigned mode test"
|
||||
upload_test_file(integration_client, project, package, content, tag="mode-presign")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/mode-presign",
|
||||
params={"mode": "presigned"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "url" in data
|
||||
assert "expires_at" in data
|
||||
assert data["url"].startswith("http")
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_redirect_mode_returns_302(self, integration_client, test_package):
|
||||
"""Test redirect mode returns 302 to presigned URL."""
|
||||
project, package = test_package
|
||||
content = b"redirect mode test"
|
||||
upload_test_file(integration_client, project, package, content, tag="mode-redir")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/mode-redir",
|
||||
params={"mode": "redirect"},
|
||||
follow_redirects=False,
|
||||
)
|
||||
assert response.status_code == 302
|
||||
assert "Location" in response.headers
|
||||
|
||||
|
||||
class TestIntegrityDuringStreaming:
|
||||
"""Tests for data integrity during streaming downloads."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_checksum_header_matches_content(self, integration_client, test_package):
|
||||
"""Test X-Checksum-SHA256 header matches actual downloaded content."""
|
||||
project, package = test_package
|
||||
content = b"integrity check content"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="integrity")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/integrity",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
header_hash = response.headers.get("X-Checksum-SHA256")
|
||||
actual_hash = compute_sha256(response.content)
|
||||
|
||||
assert header_hash == expected_hash
|
||||
assert actual_hash == expected_hash
|
||||
assert header_hash == actual_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_etag_matches_content_hash(self, integration_client, test_package):
|
||||
"""Test ETag header matches content hash."""
|
||||
project, package = test_package
|
||||
content = b"etag integrity test"
|
||||
expected_hash = compute_sha256(content)
|
||||
upload_test_file(integration_client, project, package, content, tag="etag-int")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/etag-int",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
etag = response.headers.get("ETag", "").strip('"')
|
||||
actual_hash = compute_sha256(response.content)
|
||||
|
||||
assert etag == expected_hash
|
||||
assert actual_hash == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_digest_header_present(self, integration_client, test_package):
|
||||
"""Test Digest header is present in RFC 3230 format."""
|
||||
project, package = test_package
|
||||
content = b"digest header test"
|
||||
upload_test_file(integration_client, project, package, content, tag="digest")
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/digest",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert "Digest" in response.headers
|
||||
assert response.headers["Digest"].startswith("sha-256=")
|
||||
316
backend/tests/integration/test_teams_api.py
Normal file
316
backend/tests/integration/test_teams_api.py
Normal file
@@ -0,0 +1,316 @@
|
||||
"""
|
||||
Integration tests for Teams API endpoints.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestTeamsCRUD:
|
||||
"""Tests for team creation, listing, updating, and deletion."""
|
||||
|
||||
def test_create_team(self, integration_client, unique_test_id):
|
||||
"""Test creating a new team."""
|
||||
team_name = f"Test Team {unique_test_id}"
|
||||
team_slug = f"test-team-{unique_test_id}"
|
||||
|
||||
response = integration_client.post(
|
||||
"/api/v1/teams",
|
||||
json={
|
||||
"name": team_name,
|
||||
"slug": team_slug,
|
||||
"description": "A test team",
|
||||
},
|
||||
)
|
||||
assert response.status_code == 201, f"Failed to create team: {response.text}"
|
||||
|
||||
data = response.json()
|
||||
assert data["name"] == team_name
|
||||
assert data["slug"] == team_slug
|
||||
assert data["description"] == "A test team"
|
||||
assert data["user_role"] == "owner"
|
||||
assert data["member_count"] == 1
|
||||
assert data["project_count"] == 0
|
||||
|
||||
# Cleanup
|
||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
||||
|
||||
def test_create_team_duplicate_slug(self, integration_client, unique_test_id):
|
||||
"""Test that duplicate team slugs are rejected."""
|
||||
team_slug = f"dup-team-{unique_test_id}"
|
||||
|
||||
# Create first team
|
||||
response = integration_client.post(
|
||||
"/api/v1/teams",
|
||||
json={"name": "First Team", "slug": team_slug},
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Try to create second team with same slug
|
||||
response = integration_client.post(
|
||||
"/api/v1/teams",
|
||||
json={"name": "Second Team", "slug": team_slug},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "already exists" in response.json()["detail"].lower()
|
||||
|
||||
# Cleanup
|
||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
||||
|
||||
def test_create_team_invalid_slug(self, integration_client):
|
||||
"""Test that invalid team slugs are rejected."""
|
||||
invalid_slugs = [
|
||||
"UPPERCASE",
|
||||
"with spaces",
|
||||
"-starts-with-hyphen",
|
||||
"ends-with-hyphen-",
|
||||
"has--double--hyphen",
|
||||
]
|
||||
|
||||
for invalid_slug in invalid_slugs:
|
||||
response = integration_client.post(
|
||||
"/api/v1/teams",
|
||||
json={"name": "Test", "slug": invalid_slug},
|
||||
)
|
||||
assert response.status_code == 422, f"Slug '{invalid_slug}' should be invalid"
|
||||
|
||||
def test_list_teams(self, integration_client, unique_test_id):
|
||||
"""Test listing teams the user belongs to."""
|
||||
# Create a team
|
||||
team_slug = f"list-team-{unique_test_id}"
|
||||
integration_client.post(
|
||||
"/api/v1/teams",
|
||||
json={"name": "List Test Team", "slug": team_slug},
|
||||
)
|
||||
|
||||
# List teams
|
||||
response = integration_client.get("/api/v1/teams")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert "items" in data
|
||||
assert "pagination" in data
|
||||
|
||||
# Find our team
|
||||
team = next((t for t in data["items"] if t["slug"] == team_slug), None)
|
||||
assert team is not None
|
||||
assert team["name"] == "List Test Team"
|
||||
|
||||
# Cleanup
|
||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
||||
|
||||
def test_get_team(self, integration_client, unique_test_id):
|
||||
"""Test getting team details."""
|
||||
team_slug = f"get-team-{unique_test_id}"
|
||||
integration_client.post(
|
||||
"/api/v1/teams",
|
||||
json={"name": "Get Test Team", "slug": team_slug, "description": "Test"},
|
||||
)
|
||||
|
||||
response = integration_client.get(f"/api/v1/teams/{team_slug}")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["slug"] == team_slug
|
||||
assert data["name"] == "Get Test Team"
|
||||
assert data["user_role"] == "owner"
|
||||
|
||||
# Cleanup
|
||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
||||
|
||||
def test_get_nonexistent_team(self, integration_client):
|
||||
"""Test getting a team that doesn't exist."""
|
||||
response = integration_client.get("/api/v1/teams/nonexistent-team-12345")
|
||||
assert response.status_code == 404
|
||||
|
||||
def test_update_team(self, integration_client, unique_test_id):
|
||||
"""Test updating team details."""
|
||||
team_slug = f"update-team-{unique_test_id}"
|
||||
integration_client.post(
|
||||
"/api/v1/teams",
|
||||
json={"name": "Original Name", "slug": team_slug},
|
||||
)
|
||||
|
||||
response = integration_client.put(
|
||||
f"/api/v1/teams/{team_slug}",
|
||||
json={"name": "Updated Name", "description": "New description"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["name"] == "Updated Name"
|
||||
assert data["description"] == "New description"
|
||||
assert data["slug"] == team_slug # Slug should not change
|
||||
|
||||
# Cleanup
|
||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
||||
|
||||
def test_delete_team(self, integration_client, unique_test_id):
|
||||
"""Test deleting a team."""
|
||||
team_slug = f"delete-team-{unique_test_id}"
|
||||
integration_client.post(
|
||||
"/api/v1/teams",
|
||||
json={"name": "Delete Test Team", "slug": team_slug},
|
||||
)
|
||||
|
||||
response = integration_client.delete(f"/api/v1/teams/{team_slug}")
|
||||
assert response.status_code == 204
|
||||
|
||||
# Verify team is gone
|
||||
response = integration_client.get(f"/api/v1/teams/{team_slug}")
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestTeamMembers:
|
||||
"""Tests for team membership management."""
|
||||
|
||||
@pytest.fixture
|
||||
def test_team(self, integration_client, unique_test_id):
|
||||
"""Create a test team for member tests."""
|
||||
team_slug = f"member-team-{unique_test_id}"
|
||||
response = integration_client.post(
|
||||
"/api/v1/teams",
|
||||
json={"name": "Member Test Team", "slug": team_slug},
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
yield team_slug
|
||||
|
||||
# Cleanup
|
||||
try:
|
||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def test_list_members(self, integration_client, test_team):
|
||||
"""Test listing team members."""
|
||||
response = integration_client.get(f"/api/v1/teams/{test_team}/members")
|
||||
assert response.status_code == 200
|
||||
|
||||
members = response.json()
|
||||
assert len(members) == 1
|
||||
assert members[0]["role"] == "owner"
|
||||
|
||||
def test_owner_is_first_member(self, integration_client, test_team):
|
||||
"""Test that the team creator is automatically the owner."""
|
||||
response = integration_client.get(f"/api/v1/teams/{test_team}/members")
|
||||
members = response.json()
|
||||
|
||||
assert len(members) >= 1
|
||||
owner = next((m for m in members if m["role"] == "owner"), None)
|
||||
assert owner is not None
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestTeamProjects:
|
||||
"""Tests for team project management."""
|
||||
|
||||
@pytest.fixture
|
||||
def test_team(self, integration_client, unique_test_id):
|
||||
"""Create a test team for project tests."""
|
||||
team_slug = f"proj-team-{unique_test_id}"
|
||||
response = integration_client.post(
|
||||
"/api/v1/teams",
|
||||
json={"name": "Project Test Team", "slug": team_slug},
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
data = response.json()
|
||||
yield {"slug": team_slug, "id": data["id"]}
|
||||
|
||||
# Cleanup
|
||||
try:
|
||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def test_list_team_projects_empty(self, integration_client, test_team):
|
||||
"""Test listing projects in an empty team."""
|
||||
response = integration_client.get(f"/api/v1/teams/{test_team['slug']}/projects")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["items"] == []
|
||||
assert data["pagination"]["total"] == 0
|
||||
|
||||
def test_create_project_in_team(self, integration_client, test_team, unique_test_id):
|
||||
"""Test creating a project within a team."""
|
||||
project_name = f"team-project-{unique_test_id}"
|
||||
|
||||
response = integration_client.post(
|
||||
"/api/v1/projects",
|
||||
json={
|
||||
"name": project_name,
|
||||
"description": "A team project",
|
||||
"team_id": test_team["id"],
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200, f"Failed to create project: {response.text}"
|
||||
|
||||
data = response.json()
|
||||
assert data["team_id"] == test_team["id"]
|
||||
assert data["team_slug"] == test_team["slug"]
|
||||
|
||||
# Verify project appears in team projects list
|
||||
response = integration_client.get(f"/api/v1/teams/{test_team['slug']}/projects")
|
||||
assert response.status_code == 200
|
||||
projects = response.json()["items"]
|
||||
assert any(p["name"] == project_name for p in projects)
|
||||
|
||||
# Cleanup
|
||||
integration_client.delete(f"/api/v1/projects/{project_name}")
|
||||
|
||||
def test_project_team_info_in_response(self, integration_client, test_team, unique_test_id):
|
||||
"""Test that project responses include team info."""
|
||||
project_name = f"team-info-project-{unique_test_id}"
|
||||
|
||||
# Create project in team
|
||||
integration_client.post(
|
||||
"/api/v1/projects",
|
||||
json={"name": project_name, "team_id": test_team["id"]},
|
||||
)
|
||||
|
||||
# Get project and verify team info
|
||||
response = integration_client.get(f"/api/v1/projects/{project_name}")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["team_id"] == test_team["id"]
|
||||
assert data["team_slug"] == test_team["slug"]
|
||||
assert data["team_name"] == "Project Test Team"
|
||||
|
||||
# Cleanup
|
||||
integration_client.delete(f"/api/v1/projects/{project_name}")
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestTeamAuthorization:
|
||||
"""Tests for team-based authorization."""
|
||||
|
||||
def test_cannot_delete_team_with_projects(self, integration_client, unique_test_id):
|
||||
"""Test that teams with projects cannot be deleted."""
|
||||
team_slug = f"nodelete-team-{unique_test_id}"
|
||||
project_name = f"nodelete-project-{unique_test_id}"
|
||||
|
||||
# Create team
|
||||
response = integration_client.post(
|
||||
"/api/v1/teams",
|
||||
json={"name": "No Delete Team", "slug": team_slug},
|
||||
)
|
||||
team_id = response.json()["id"]
|
||||
|
||||
# Create project in team
|
||||
integration_client.post(
|
||||
"/api/v1/projects",
|
||||
json={"name": project_name, "team_id": team_id},
|
||||
)
|
||||
|
||||
# Try to delete team - should fail
|
||||
response = integration_client.delete(f"/api/v1/teams/{team_slug}")
|
||||
assert response.status_code == 400
|
||||
assert "project" in response.json()["detail"].lower()
|
||||
|
||||
# Cleanup - delete project first, then team
|
||||
integration_client.delete(f"/api/v1/projects/{project_name}")
|
||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
||||
@@ -10,6 +10,7 @@ Tests cover:
|
||||
- S3 storage verification
|
||||
"""
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import io
|
||||
import threading
|
||||
@@ -25,6 +26,19 @@ from tests.factories import (
|
||||
class TestUploadBasics:
|
||||
"""Tests for basic upload functionality."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_returns_200(self, integration_client, test_package):
|
||||
"""Test upload with valid file returns 200."""
|
||||
project, package = test_package
|
||||
content = b"valid file upload test"
|
||||
|
||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_returns_artifact_id(self, integration_client, test_package):
|
||||
"""Test upload returns the artifact ID (SHA256 hash)."""
|
||||
@@ -101,6 +115,83 @@ class TestUploadBasics:
|
||||
assert "created_at" in result
|
||||
assert result["created_at"] is not None
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_without_tag_succeeds(self, integration_client, test_package):
|
||||
"""Test upload without tag succeeds (no tag created)."""
|
||||
project, package = test_package
|
||||
content = b"upload without tag test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
files = {"file": ("no_tag.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
# No tag parameter
|
||||
)
|
||||
assert response.status_code == 200
|
||||
result = response.json()
|
||||
assert result["artifact_id"] == expected_hash
|
||||
|
||||
# Verify no tag was created - list tags and check
|
||||
tags_response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/tags"
|
||||
)
|
||||
assert tags_response.status_code == 200
|
||||
tags = tags_response.json()
|
||||
# Filter for tags pointing to this artifact
|
||||
artifact_tags = [t for t in tags.get("items", tags) if t.get("artifact_id") == expected_hash]
|
||||
assert len(artifact_tags) == 0, "Tag should not be created when not specified"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_creates_artifact_in_database(self, integration_client, test_package):
|
||||
"""Test upload creates artifact record in database."""
|
||||
project, package = test_package
|
||||
content = b"database artifact test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(integration_client, project, package, content)
|
||||
|
||||
# Verify artifact exists via API
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.status_code == 200
|
||||
artifact = response.json()
|
||||
assert artifact["id"] == expected_hash
|
||||
assert artifact["size"] == len(content)
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.requires_direct_s3
|
||||
def test_upload_creates_object_in_s3(self, integration_client, test_package):
|
||||
"""Test upload creates object in S3 storage."""
|
||||
project, package = test_package
|
||||
content = b"s3 object creation test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(integration_client, project, package, content)
|
||||
|
||||
# Verify S3 object exists
|
||||
assert s3_object_exists(expected_hash), "S3 object should exist after upload"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_with_tag_creates_tag_record(self, integration_client, test_package):
|
||||
"""Test upload with tag creates tag record."""
|
||||
project, package = test_package
|
||||
content = b"tag creation test"
|
||||
expected_hash = compute_sha256(content)
|
||||
tag_name = "my-tag-v1"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag=tag_name
|
||||
)
|
||||
|
||||
# Verify tag exists
|
||||
tags_response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/tags"
|
||||
)
|
||||
assert tags_response.status_code == 200
|
||||
tags = tags_response.json()
|
||||
tag_names = [t["name"] for t in tags.get("items", tags)]
|
||||
assert tag_name in tag_names
|
||||
|
||||
|
||||
class TestDuplicateUploads:
|
||||
"""Tests for duplicate upload deduplication behavior."""
|
||||
@@ -248,6 +339,23 @@ class TestDownload:
|
||||
assert response.status_code == 200
|
||||
assert response.content == original_content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_by_tag_prefix(self, integration_client, test_package):
|
||||
"""Test downloading artifact using tag: prefix."""
|
||||
project, package = test_package
|
||||
original_content = b"download by tag prefix test"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, original_content, tag="prefix-tag"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/tag:prefix-tag",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == original_content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_nonexistent_tag(self, integration_client, test_package):
|
||||
"""Test downloading nonexistent tag returns 404."""
|
||||
@@ -258,6 +366,33 @@ class TestDownload:
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_nonexistent_artifact(self, integration_client, test_package):
|
||||
"""Test downloading nonexistent artifact ID returns 404."""
|
||||
project, package = test_package
|
||||
fake_hash = "0" * 64
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/artifact:{fake_hash}"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_from_nonexistent_project(self, integration_client, unique_test_id):
|
||||
"""Test downloading from nonexistent project returns 404."""
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/nonexistent-project-{unique_test_id}/somepackage/+/sometag"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_from_nonexistent_package(self, integration_client, test_project, unique_test_id):
|
||||
"""Test downloading from nonexistent package returns 404."""
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/+/sometag"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_content_matches_original(self, integration_client, test_package):
|
||||
"""Test downloaded content matches original exactly."""
|
||||
@@ -275,6 +410,111 @@ class TestDownload:
|
||||
assert response.content == original_content
|
||||
|
||||
|
||||
class TestDownloadHeaders:
|
||||
"""Tests for download response headers."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_content_type_header(self, integration_client, test_package):
|
||||
"""Test download returns correct Content-Type header."""
|
||||
project, package = test_package
|
||||
content = b"content type header test"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename="test.txt", tag="content-type-test"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/content-type-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
# Content-Type should be set (either text/plain or application/octet-stream)
|
||||
assert "content-type" in response.headers
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_content_length_header(self, integration_client, test_package):
|
||||
"""Test download returns correct Content-Length header."""
|
||||
project, package = test_package
|
||||
content = b"content length header test - exactly 41 bytes!"
|
||||
expected_length = len(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="content-length-test"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/content-length-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert "content-length" in response.headers
|
||||
assert int(response.headers["content-length"]) == expected_length
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_content_disposition_header(self, integration_client, test_package):
|
||||
"""Test download returns correct Content-Disposition header."""
|
||||
project, package = test_package
|
||||
content = b"content disposition test"
|
||||
filename = "my-test-file.bin"
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content,
|
||||
filename=filename, tag="disposition-test"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/disposition-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert "content-disposition" in response.headers
|
||||
disposition = response.headers["content-disposition"]
|
||||
assert "attachment" in disposition
|
||||
assert filename in disposition
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_checksum_headers(self, integration_client, test_package):
|
||||
"""Test download returns checksum headers."""
|
||||
project, package = test_package
|
||||
content = b"checksum header test content"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="checksum-headers"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/checksum-headers",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
# Check for checksum headers
|
||||
assert "x-checksum-sha256" in response.headers
|
||||
assert response.headers["x-checksum-sha256"] == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_etag_header(self, integration_client, test_package):
|
||||
"""Test download returns ETag header (artifact ID)."""
|
||||
project, package = test_package
|
||||
content = b"etag header test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project, package, content, tag="etag-test"
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/etag-test",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert "etag" in response.headers
|
||||
# ETag should contain the artifact ID (hash)
|
||||
etag = response.headers["etag"].strip('"')
|
||||
assert etag == expected_hash
|
||||
|
||||
|
||||
class TestConcurrentUploads:
|
||||
"""Tests for concurrent upload handling."""
|
||||
|
||||
@@ -301,7 +541,7 @@ class TestConcurrentUploads:
|
||||
try:
|
||||
from httpx import Client
|
||||
|
||||
base_url = "http://localhost:8080"
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
with Client(base_url=base_url, timeout=30.0) as client:
|
||||
files = {
|
||||
"file": (
|
||||
@@ -397,6 +637,7 @@ class TestUploadFailureCleanup:
|
||||
"""Tests for cleanup when uploads fail."""
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.requires_direct_s3
|
||||
def test_upload_failure_invalid_project_no_orphaned_s3(
|
||||
self, integration_client, unique_test_id
|
||||
):
|
||||
@@ -419,6 +660,7 @@ class TestUploadFailureCleanup:
|
||||
)
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.requires_direct_s3
|
||||
def test_upload_failure_invalid_package_no_orphaned_s3(
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
@@ -466,6 +708,7 @@ class TestS3StorageVerification:
|
||||
"""Tests to verify S3 storage behavior."""
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.requires_direct_s3
|
||||
def test_s3_single_object_after_duplicates(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
@@ -521,6 +764,7 @@ class TestSecurityPathTraversal:
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.requires_direct_s3
|
||||
def test_path_traversal_in_filename_stored_safely(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
|
||||
347
backend/tests/integration/test_version_api.py
Normal file
347
backend/tests/integration/test_version_api.py
Normal file
@@ -0,0 +1,347 @@
|
||||
"""
|
||||
Integration tests for package version API endpoints.
|
||||
|
||||
Tests cover:
|
||||
- Version creation via upload
|
||||
- Version auto-detection from filename
|
||||
- Version listing and retrieval
|
||||
- Download by version prefix
|
||||
- Version deletion
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import io
|
||||
from tests.factories import (
|
||||
compute_sha256,
|
||||
upload_test_file,
|
||||
)
|
||||
|
||||
|
||||
class TestVersionCreation:
|
||||
"""Tests for creating versions via upload."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_with_explicit_version(self, integration_client, test_package):
|
||||
"""Test upload with explicit version parameter creates version record."""
|
||||
project, package = test_package
|
||||
content = b"version creation test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
result = response.json()
|
||||
assert result["artifact_id"] == expected_hash
|
||||
assert result.get("version") == "1.0.0"
|
||||
assert result.get("version_source") == "explicit"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_with_version_and_tag(self, integration_client, test_package):
|
||||
"""Test upload with both version and tag creates both records."""
|
||||
project, package = test_package
|
||||
content = b"version and tag test"
|
||||
|
||||
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"version": "2.0.0", "tag": "latest"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
result = response.json()
|
||||
assert result.get("version") == "2.0.0"
|
||||
|
||||
# Verify tag was also created
|
||||
tags_response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/tags"
|
||||
)
|
||||
assert tags_response.status_code == 200
|
||||
tags = tags_response.json()
|
||||
tag_names = [t["name"] for t in tags.get("items", tags)]
|
||||
assert "latest" in tag_names
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_duplicate_version_same_content_succeeds(self, integration_client, test_package):
|
||||
"""Test uploading same version with same content succeeds (deduplication)."""
|
||||
project, package = test_package
|
||||
content = b"version dedup test"
|
||||
|
||||
# First upload with version
|
||||
files1 = {"file": ("app1.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||
response1 = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files1,
|
||||
data={"version": "3.0.0"},
|
||||
)
|
||||
assert response1.status_code == 200
|
||||
|
||||
# Second upload with same version and same content succeeds
|
||||
files2 = {"file": ("app2.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||
response2 = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files2,
|
||||
data={"version": "3.0.0"},
|
||||
)
|
||||
# This succeeds because it's the same artifact (deduplication)
|
||||
assert response2.status_code == 200
|
||||
|
||||
|
||||
class TestVersionAutoDetection:
|
||||
"""Tests for automatic version detection from filename."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_version_detected_from_filename_tarball(self, integration_client, test_package):
|
||||
"""Test version is auto-detected from tarball filename or metadata."""
|
||||
project, package = test_package
|
||||
content = b"auto detect version tarball"
|
||||
|
||||
files = {"file": ("myapp-1.2.3.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
result = response.json()
|
||||
assert result.get("version") == "1.2.3"
|
||||
# Version source can be 'filename' or 'metadata' depending on detection order
|
||||
assert result.get("version_source") in ["filename", "metadata"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_version_detected_from_filename_zip(self, integration_client, test_package):
|
||||
"""Test version is auto-detected from zip filename."""
|
||||
project, package = test_package
|
||||
content = b"auto detect version zip"
|
||||
|
||||
files = {"file": ("package-2.0.0.zip", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
result = response.json()
|
||||
assert result.get("version") == "2.0.0"
|
||||
assert result.get("version_source") == "filename"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_explicit_version_overrides_filename(self, integration_client, test_package):
|
||||
"""Test explicit version parameter overrides filename detection."""
|
||||
project, package = test_package
|
||||
content = b"explicit override test"
|
||||
|
||||
files = {"file": ("myapp-1.0.0.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"version": "9.9.9"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
result = response.json()
|
||||
assert result.get("version") == "9.9.9"
|
||||
assert result.get("version_source") == "explicit"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_no_version_detected_from_plain_filename(self, integration_client, test_package):
|
||||
"""Test no version is created for filenames without version pattern."""
|
||||
project, package = test_package
|
||||
content = b"no version in filename"
|
||||
|
||||
files = {"file": ("plain-file.bin", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
result = response.json()
|
||||
# Version should be None or not present
|
||||
assert result.get("version") is None
|
||||
|
||||
|
||||
class TestVersionListing:
|
||||
"""Tests for listing and retrieving versions."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_list_versions(self, integration_client, test_package):
|
||||
"""Test listing all versions for a package."""
|
||||
project, package = test_package
|
||||
|
||||
# Create multiple versions
|
||||
for ver in ["1.0.0", "1.1.0", "2.0.0"]:
|
||||
content = f"version {ver} content".encode()
|
||||
files = {"file": (f"app-{ver}.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"version": ver},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# List versions
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/versions"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
versions = [v["version"] for v in data.get("items", data)]
|
||||
assert "1.0.0" in versions
|
||||
assert "1.1.0" in versions
|
||||
assert "2.0.0" in versions
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_specific_version(self, integration_client, test_package):
|
||||
"""Test getting details for a specific version."""
|
||||
project, package = test_package
|
||||
content = b"specific version test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Create version
|
||||
files = {"file": ("app-4.0.0.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||
integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"version": "4.0.0"},
|
||||
)
|
||||
|
||||
# Get version details
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/versions/4.0.0"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["version"] == "4.0.0"
|
||||
assert data["artifact_id"] == expected_hash
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_nonexistent_version_returns_404(self, integration_client, test_package):
|
||||
"""Test getting nonexistent version returns 404."""
|
||||
project, package = test_package
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/versions/99.99.99"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
class TestDownloadByVersion:
|
||||
"""Tests for downloading artifacts by version."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_by_version_prefix(self, integration_client, test_package):
|
||||
"""Test downloading artifact using version: prefix."""
|
||||
project, package = test_package
|
||||
content = b"download by version test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload with version
|
||||
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||
integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"version": "5.0.0"},
|
||||
)
|
||||
|
||||
# Download by version prefix
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/version:5.0.0",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == content
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_download_nonexistent_version_returns_404(self, integration_client, test_package):
|
||||
"""Test downloading nonexistent version returns 404."""
|
||||
project, package = test_package
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/version:99.0.0"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_version_resolution_priority(self, integration_client, test_package):
|
||||
"""Test that version: prefix explicitly resolves to version, not tag."""
|
||||
project, package = test_package
|
||||
version_content = b"this is the version content"
|
||||
tag_content = b"this is the tag content"
|
||||
|
||||
# Create a version 6.0.0
|
||||
files1 = {"file": ("app-v.tar.gz", io.BytesIO(version_content), "application/octet-stream")}
|
||||
integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files1,
|
||||
data={"version": "6.0.0"},
|
||||
)
|
||||
|
||||
# Create a tag named "6.0.0" pointing to different content
|
||||
files2 = {"file": ("app-t.tar.gz", io.BytesIO(tag_content), "application/octet-stream")}
|
||||
integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files2,
|
||||
data={"tag": "6.0.0"},
|
||||
)
|
||||
|
||||
# Download with version: prefix should get version content
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/version:6.0.0",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content == version_content
|
||||
|
||||
# Download with tag: prefix should get tag content
|
||||
response2 = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/+/tag:6.0.0",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
assert response2.status_code == 200
|
||||
assert response2.content == tag_content
|
||||
|
||||
|
||||
class TestVersionDeletion:
|
||||
"""Tests for deleting versions."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_delete_version(self, integration_client, test_package):
|
||||
"""Test deleting a version."""
|
||||
project, package = test_package
|
||||
content = b"delete version test"
|
||||
|
||||
# Create version
|
||||
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
||||
integration_client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"version": "7.0.0"},
|
||||
)
|
||||
|
||||
# Verify version exists
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/versions/7.0.0"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Delete version - returns 204 No Content on success
|
||||
delete_response = integration_client.delete(
|
||||
f"/api/v1/project/{project}/{package}/versions/7.0.0"
|
||||
)
|
||||
assert delete_response.status_code == 204
|
||||
|
||||
# Verify version no longer exists
|
||||
response2 = integration_client.get(
|
||||
f"/api/v1/project/{project}/{package}/versions/7.0.0"
|
||||
)
|
||||
assert response2.status_code == 404
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_delete_nonexistent_version_returns_404(self, integration_client, test_package):
|
||||
"""Test deleting nonexistent version returns 404."""
|
||||
project, package = test_package
|
||||
|
||||
response = integration_client.delete(
|
||||
f"/api/v1/project/{project}/{package}/versions/99.0.0"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
1080
backend/tests/test_dependencies.py
Normal file
1080
backend/tests/test_dependencies.py
Normal file
File diff suppressed because it is too large
Load Diff
95
backend/tests/unit/test_auth.py
Normal file
95
backend/tests/unit/test_auth.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Unit tests for authentication module."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
|
||||
class TestCreateDefaultAdmin:
|
||||
"""Tests for the create_default_admin function."""
|
||||
|
||||
def test_create_default_admin_with_env_password(self):
|
||||
"""Test that ORCHARD_ADMIN_PASSWORD env var sets admin password."""
|
||||
from app.auth import create_default_admin, verify_password
|
||||
|
||||
# Create mock settings with custom password
|
||||
mock_settings = MagicMock()
|
||||
mock_settings.admin_password = "my-custom-password-123"
|
||||
|
||||
# Mock database session
|
||||
mock_db = MagicMock()
|
||||
mock_db.query.return_value.count.return_value = 0 # No existing users
|
||||
|
||||
# Track the user that gets created
|
||||
created_user = None
|
||||
|
||||
def capture_user(user):
|
||||
nonlocal created_user
|
||||
created_user = user
|
||||
|
||||
mock_db.add.side_effect = capture_user
|
||||
|
||||
with patch("app.auth.get_settings", return_value=mock_settings):
|
||||
admin = create_default_admin(mock_db)
|
||||
|
||||
# Verify the user was created
|
||||
assert mock_db.add.called
|
||||
assert created_user is not None
|
||||
assert created_user.username == "admin"
|
||||
assert created_user.is_admin is True
|
||||
# Password should NOT require change when set via env var
|
||||
assert created_user.must_change_password is False
|
||||
# Verify password was hashed correctly
|
||||
assert verify_password("my-custom-password-123", created_user.password_hash)
|
||||
|
||||
def test_create_default_admin_with_default_password(self):
|
||||
"""Test that default password 'changeme123' is used when env var not set."""
|
||||
from app.auth import create_default_admin, verify_password
|
||||
|
||||
# Create mock settings with empty password (default)
|
||||
mock_settings = MagicMock()
|
||||
mock_settings.admin_password = ""
|
||||
|
||||
# Mock database session
|
||||
mock_db = MagicMock()
|
||||
mock_db.query.return_value.count.return_value = 0 # No existing users
|
||||
|
||||
# Track the user that gets created
|
||||
created_user = None
|
||||
|
||||
def capture_user(user):
|
||||
nonlocal created_user
|
||||
created_user = user
|
||||
|
||||
mock_db.add.side_effect = capture_user
|
||||
|
||||
with patch("app.auth.get_settings", return_value=mock_settings):
|
||||
admin = create_default_admin(mock_db)
|
||||
|
||||
# Verify the user was created
|
||||
assert mock_db.add.called
|
||||
assert created_user is not None
|
||||
assert created_user.username == "admin"
|
||||
assert created_user.is_admin is True
|
||||
# Password SHOULD require change when using default
|
||||
assert created_user.must_change_password is True
|
||||
# Verify default password was used
|
||||
assert verify_password("changeme123", created_user.password_hash)
|
||||
|
||||
def test_create_default_admin_skips_when_users_exist(self):
|
||||
"""Test that no admin is created when users already exist."""
|
||||
from app.auth import create_default_admin
|
||||
|
||||
# Create mock settings
|
||||
mock_settings = MagicMock()
|
||||
mock_settings.admin_password = "some-password"
|
||||
|
||||
# Mock database session with existing users
|
||||
mock_db = MagicMock()
|
||||
mock_db.query.return_value.count.return_value = 1 # Users exist
|
||||
|
||||
with patch("app.auth.get_settings", return_value=mock_settings):
|
||||
result = create_default_admin(mock_db)
|
||||
|
||||
# Should return None and not create any user
|
||||
assert result is None
|
||||
assert not mock_db.add.called
|
||||
213
backend/tests/unit/test_team_auth.py
Normal file
213
backend/tests/unit/test_team_auth.py
Normal file
@@ -0,0 +1,213 @@
|
||||
"""
|
||||
Unit tests for TeamAuthorizationService.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
import uuid
|
||||
|
||||
|
||||
class TestTeamRoleHierarchy:
|
||||
"""Tests for team role hierarchy functions."""
|
||||
|
||||
def test_get_team_role_rank(self):
|
||||
"""Test role ranking."""
|
||||
from app.auth import get_team_role_rank
|
||||
|
||||
assert get_team_role_rank("member") == 0
|
||||
assert get_team_role_rank("admin") == 1
|
||||
assert get_team_role_rank("owner") == 2
|
||||
assert get_team_role_rank("invalid") == -1
|
||||
|
||||
def test_has_sufficient_team_role(self):
|
||||
"""Test role sufficiency checks."""
|
||||
from app.auth import has_sufficient_team_role
|
||||
|
||||
# Same role should be sufficient
|
||||
assert has_sufficient_team_role("member", "member") is True
|
||||
assert has_sufficient_team_role("admin", "admin") is True
|
||||
assert has_sufficient_team_role("owner", "owner") is True
|
||||
|
||||
# Higher role should be sufficient for lower requirements
|
||||
assert has_sufficient_team_role("admin", "member") is True
|
||||
assert has_sufficient_team_role("owner", "member") is True
|
||||
assert has_sufficient_team_role("owner", "admin") is True
|
||||
|
||||
# Lower role should NOT be sufficient for higher requirements
|
||||
assert has_sufficient_team_role("member", "admin") is False
|
||||
assert has_sufficient_team_role("member", "owner") is False
|
||||
assert has_sufficient_team_role("admin", "owner") is False
|
||||
|
||||
|
||||
class TestTeamAuthorizationService:
|
||||
"""Tests for TeamAuthorizationService class."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db(self):
|
||||
"""Create a mock database session."""
|
||||
return MagicMock()
|
||||
|
||||
@pytest.fixture
|
||||
def mock_user(self):
|
||||
"""Create a mock user."""
|
||||
user = MagicMock()
|
||||
user.id = uuid.uuid4()
|
||||
user.username = "testuser"
|
||||
user.is_admin = False
|
||||
return user
|
||||
|
||||
@pytest.fixture
|
||||
def mock_admin_user(self):
|
||||
"""Create a mock admin user."""
|
||||
user = MagicMock()
|
||||
user.id = uuid.uuid4()
|
||||
user.username = "adminuser"
|
||||
user.is_admin = True
|
||||
return user
|
||||
|
||||
def test_get_user_team_role_no_user(self, mock_db):
|
||||
"""Test that None is returned for anonymous users."""
|
||||
from app.auth import TeamAuthorizationService
|
||||
|
||||
service = TeamAuthorizationService(mock_db)
|
||||
result = service.get_user_team_role("team-id", None)
|
||||
assert result is None
|
||||
|
||||
def test_get_user_team_role_admin_user(self, mock_db, mock_admin_user):
|
||||
"""Test that system admins who are not members get admin role."""
|
||||
from app.auth import TeamAuthorizationService
|
||||
|
||||
# Mock no membership found
|
||||
mock_db.query.return_value.filter.return_value.first.return_value = None
|
||||
|
||||
service = TeamAuthorizationService(mock_db)
|
||||
result = service.get_user_team_role("team-id", mock_admin_user)
|
||||
assert result == "admin"
|
||||
|
||||
def test_get_user_team_role_member(self, mock_db, mock_user):
|
||||
"""Test getting role for a team member."""
|
||||
from app.auth import TeamAuthorizationService
|
||||
|
||||
# Mock the membership query
|
||||
mock_membership = MagicMock()
|
||||
mock_membership.role = "member"
|
||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
||||
|
||||
service = TeamAuthorizationService(mock_db)
|
||||
result = service.get_user_team_role("team-id", mock_user)
|
||||
assert result == "member"
|
||||
|
||||
def test_get_user_team_role_not_member(self, mock_db, mock_user):
|
||||
"""Test getting role for a non-member."""
|
||||
from app.auth import TeamAuthorizationService
|
||||
|
||||
# Mock no membership found
|
||||
mock_db.query.return_value.filter.return_value.first.return_value = None
|
||||
|
||||
service = TeamAuthorizationService(mock_db)
|
||||
result = service.get_user_team_role("team-id", mock_user)
|
||||
assert result is None
|
||||
|
||||
def test_check_team_access_member(self, mock_db, mock_user):
|
||||
"""Test access check for member requiring member role."""
|
||||
from app.auth import TeamAuthorizationService
|
||||
|
||||
# Mock the membership query
|
||||
mock_membership = MagicMock()
|
||||
mock_membership.role = "member"
|
||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
||||
|
||||
service = TeamAuthorizationService(mock_db)
|
||||
|
||||
# Member should have member access
|
||||
assert service.check_team_access("team-id", mock_user, "member") is True
|
||||
# Member should not have admin access
|
||||
assert service.check_team_access("team-id", mock_user, "admin") is False
|
||||
# Member should not have owner access
|
||||
assert service.check_team_access("team-id", mock_user, "owner") is False
|
||||
|
||||
def test_check_team_access_admin(self, mock_db, mock_user):
|
||||
"""Test access check for admin role."""
|
||||
from app.auth import TeamAuthorizationService
|
||||
|
||||
# Mock admin membership
|
||||
mock_membership = MagicMock()
|
||||
mock_membership.role = "admin"
|
||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
||||
|
||||
service = TeamAuthorizationService(mock_db)
|
||||
|
||||
assert service.check_team_access("team-id", mock_user, "member") is True
|
||||
assert service.check_team_access("team-id", mock_user, "admin") is True
|
||||
assert service.check_team_access("team-id", mock_user, "owner") is False
|
||||
|
||||
def test_check_team_access_owner(self, mock_db, mock_user):
|
||||
"""Test access check for owner role."""
|
||||
from app.auth import TeamAuthorizationService
|
||||
|
||||
# Mock owner membership
|
||||
mock_membership = MagicMock()
|
||||
mock_membership.role = "owner"
|
||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
||||
|
||||
service = TeamAuthorizationService(mock_db)
|
||||
|
||||
assert service.check_team_access("team-id", mock_user, "member") is True
|
||||
assert service.check_team_access("team-id", mock_user, "admin") is True
|
||||
assert service.check_team_access("team-id", mock_user, "owner") is True
|
||||
|
||||
def test_can_create_project(self, mock_db, mock_user):
|
||||
"""Test can_create_project requires admin role."""
|
||||
from app.auth import TeamAuthorizationService
|
||||
|
||||
service = TeamAuthorizationService(mock_db)
|
||||
|
||||
# Member cannot create projects
|
||||
mock_membership = MagicMock()
|
||||
mock_membership.role = "member"
|
||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
||||
assert service.can_create_project("team-id", mock_user) is False
|
||||
|
||||
# Admin can create projects
|
||||
mock_membership.role = "admin"
|
||||
assert service.can_create_project("team-id", mock_user) is True
|
||||
|
||||
# Owner can create projects
|
||||
mock_membership.role = "owner"
|
||||
assert service.can_create_project("team-id", mock_user) is True
|
||||
|
||||
def test_can_manage_members(self, mock_db, mock_user):
|
||||
"""Test can_manage_members requires admin role."""
|
||||
from app.auth import TeamAuthorizationService
|
||||
|
||||
service = TeamAuthorizationService(mock_db)
|
||||
|
||||
# Member cannot manage members
|
||||
mock_membership = MagicMock()
|
||||
mock_membership.role = "member"
|
||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
||||
assert service.can_manage_members("team-id", mock_user) is False
|
||||
|
||||
# Admin can manage members
|
||||
mock_membership.role = "admin"
|
||||
assert service.can_manage_members("team-id", mock_user) is True
|
||||
|
||||
def test_can_delete_team(self, mock_db, mock_user):
|
||||
"""Test can_delete_team requires owner role."""
|
||||
from app.auth import TeamAuthorizationService
|
||||
|
||||
service = TeamAuthorizationService(mock_db)
|
||||
|
||||
# Member cannot delete team
|
||||
mock_membership = MagicMock()
|
||||
mock_membership.role = "member"
|
||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
||||
assert service.can_delete_team("team-id", mock_user) is False
|
||||
|
||||
# Admin cannot delete team
|
||||
mock_membership.role = "admin"
|
||||
assert service.can_delete_team("team-id", mock_user) is False
|
||||
|
||||
# Only owner can delete team
|
||||
mock_membership.role = "owner"
|
||||
assert service.can_delete_team("team-id", mock_user) is True
|
||||
@@ -26,6 +26,8 @@ services:
|
||||
- ORCHARD_REDIS_PORT=6379
|
||||
# Higher rate limit for local development/testing
|
||||
- ORCHARD_LOGIN_RATE_LIMIT=1000/minute
|
||||
# Admin password - set in .env file or environment (see .env.example)
|
||||
- ORCHARD_ADMIN_PASSWORD=${ORCHARD_ADMIN_PASSWORD:-}
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
|
||||
294
docs/integrity-verification.md
Normal file
294
docs/integrity-verification.md
Normal file
@@ -0,0 +1,294 @@
|
||||
# Integrity Verification
|
||||
|
||||
Orchard uses content-addressable storage with SHA256 hashing to ensure artifact integrity. This document describes how integrity verification works and how to use it.
|
||||
|
||||
## How It Works
|
||||
|
||||
### Content-Addressable Storage
|
||||
|
||||
Orchard stores artifacts using their SHA256 hash as the unique identifier. This provides several benefits:
|
||||
|
||||
1. **Automatic deduplication**: Identical content is stored only once
|
||||
2. **Built-in integrity**: The artifact ID *is* the content hash
|
||||
3. **Tamper detection**: Any modification changes the hash, making corruption detectable
|
||||
|
||||
When you upload a file:
|
||||
1. Orchard computes the SHA256 hash of the content
|
||||
2. The hash becomes the artifact ID (64-character hex string)
|
||||
3. The file is stored in S3 at `fruits/{hash[0:2]}/{hash[2:4]}/{hash}`
|
||||
4. The hash and metadata are recorded in the database
|
||||
|
||||
### Hash Format
|
||||
|
||||
- Algorithm: SHA256
|
||||
- Format: 64-character lowercase hexadecimal string
|
||||
- Example: `dffd6021bb2bd5b0af676290809ec3a53191dd81c7f70a4b28688a362182986f`
|
||||
|
||||
## Client-Side Verification
|
||||
|
||||
### Before Upload
|
||||
|
||||
Compute the hash locally before uploading to verify the server received your content correctly:
|
||||
|
||||
```python
|
||||
import hashlib
|
||||
|
||||
def compute_sha256(content: bytes) -> str:
|
||||
return hashlib.sha256(content).hexdigest()
|
||||
|
||||
# Compute hash before upload
|
||||
content = open("myfile.tar.gz", "rb").read()
|
||||
local_hash = compute_sha256(content)
|
||||
|
||||
# Upload the file
|
||||
response = requests.post(
|
||||
f"{base_url}/api/v1/project/{project}/{package}/upload",
|
||||
files={"file": ("myfile.tar.gz", content)},
|
||||
)
|
||||
result = response.json()
|
||||
|
||||
# Verify server computed the same hash
|
||||
assert result["artifact_id"] == local_hash, "Hash mismatch!"
|
||||
```
|
||||
|
||||
### Providing Expected Hash on Upload
|
||||
|
||||
You can provide the expected hash in the upload request. The server will reject the upload if the computed hash doesn't match:
|
||||
|
||||
```python
|
||||
response = requests.post(
|
||||
f"{base_url}/api/v1/project/{project}/{package}/upload",
|
||||
files={"file": ("myfile.tar.gz", content)},
|
||||
headers={"X-Checksum-SHA256": local_hash},
|
||||
)
|
||||
|
||||
# Returns 422 if hash doesn't match
|
||||
if response.status_code == 422:
|
||||
print("Checksum mismatch - upload rejected")
|
||||
```
|
||||
|
||||
### After Download
|
||||
|
||||
Verify downloaded content matches the expected hash using response headers:
|
||||
|
||||
```python
|
||||
response = requests.get(
|
||||
f"{base_url}/api/v1/project/{project}/{package}/+/{tag}",
|
||||
params={"mode": "proxy"},
|
||||
)
|
||||
|
||||
# Get expected hash from header
|
||||
expected_hash = response.headers.get("X-Checksum-SHA256")
|
||||
|
||||
# Compute hash of downloaded content
|
||||
actual_hash = compute_sha256(response.content)
|
||||
|
||||
# Verify
|
||||
if actual_hash != expected_hash:
|
||||
raise Exception(f"Integrity check failed! Expected {expected_hash}, got {actual_hash}")
|
||||
```
|
||||
|
||||
### Response Headers for Verification
|
||||
|
||||
Download responses include multiple headers for verification:
|
||||
|
||||
| Header | Format | Description |
|
||||
|--------|--------|-------------|
|
||||
| `X-Checksum-SHA256` | Hex string | SHA256 hash (64 chars) |
|
||||
| `ETag` | `"<hash>"` | SHA256 hash in quotes |
|
||||
| `Digest` | `sha-256=<base64>` | RFC 3230 format (base64-encoded) |
|
||||
| `Content-Length` | Integer | File size in bytes |
|
||||
|
||||
### Server-Side Verification on Download
|
||||
|
||||
Request server-side verification during download:
|
||||
|
||||
```bash
|
||||
# Pre-verification: Server verifies before streaming (returns 500 if corrupt)
|
||||
curl "${base_url}/api/v1/project/${project}/${package}/+/${tag}?mode=proxy&verify=true&verify_mode=pre"
|
||||
|
||||
# Stream verification: Server verifies while streaming (logs error if corrupt)
|
||||
curl "${base_url}/api/v1/project/${project}/${package}/+/${tag}?mode=proxy&verify=true&verify_mode=stream"
|
||||
```
|
||||
|
||||
The `X-Verified` header indicates whether server-side verification was performed:
|
||||
- `X-Verified: true` - Content was verified by the server
|
||||
|
||||
## Server-Side Consistency Check
|
||||
|
||||
### Consistency Check Endpoint
|
||||
|
||||
Administrators can run a consistency check to verify all stored artifacts:
|
||||
|
||||
```bash
|
||||
curl "${base_url}/api/v1/admin/consistency-check"
|
||||
```
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"total_artifacts_checked": 1234,
|
||||
"healthy": true,
|
||||
"orphaned_s3_objects": 0,
|
||||
"missing_s3_objects": 0,
|
||||
"size_mismatches": 0,
|
||||
"orphaned_s3_keys": [],
|
||||
"missing_s3_keys": [],
|
||||
"size_mismatch_artifacts": []
|
||||
}
|
||||
```
|
||||
|
||||
### What the Check Verifies
|
||||
|
||||
1. **Missing S3 objects**: Database records with no corresponding S3 object
|
||||
2. **Orphaned S3 objects**: S3 objects with no database record
|
||||
3. **Size mismatches**: S3 object size doesn't match database record
|
||||
|
||||
### Running Consistency Checks
|
||||
|
||||
**Manual check:**
|
||||
```bash
|
||||
# Check all artifacts
|
||||
curl "${base_url}/api/v1/admin/consistency-check"
|
||||
|
||||
# Limit results (for large deployments)
|
||||
curl "${base_url}/api/v1/admin/consistency-check?limit=100"
|
||||
```
|
||||
|
||||
**Scheduled checks (recommended):**
|
||||
|
||||
Set up a cron job or Kubernetes CronJob to run periodic checks:
|
||||
|
||||
```yaml
|
||||
# Kubernetes CronJob example
|
||||
apiVersion: batch/v1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: orchard-consistency-check
|
||||
spec:
|
||||
schedule: "0 2 * * *" # Daily at 2 AM
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: check
|
||||
image: curlimages/curl
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
response=$(curl -s "${ORCHARD_URL}/api/v1/admin/consistency-check")
|
||||
healthy=$(echo "$response" | jq -r '.healthy')
|
||||
if [ "$healthy" != "true" ]; then
|
||||
echo "ALERT: Consistency check failed!"
|
||||
echo "$response"
|
||||
exit 1
|
||||
fi
|
||||
echo "Consistency check passed"
|
||||
restartPolicy: OnFailure
|
||||
```
|
||||
|
||||
## Recovery Procedures
|
||||
|
||||
### Corrupted Artifact (Size Mismatch)
|
||||
|
||||
If the consistency check reports size mismatches:
|
||||
|
||||
1. **Identify affected artifacts:**
|
||||
```bash
|
||||
curl "${base_url}/api/v1/admin/consistency-check" | jq '.size_mismatch_artifacts'
|
||||
```
|
||||
|
||||
2. **Check if artifact can be re-uploaded:**
|
||||
- If the original content is available, delete the corrupted artifact and re-upload
|
||||
- The same content will produce the same artifact ID
|
||||
|
||||
3. **If original content is lost:**
|
||||
- The artifact data is corrupted and cannot be recovered
|
||||
- Delete the artifact record and notify affected users
|
||||
- Consider restoring from backup if available
|
||||
|
||||
### Missing S3 Object
|
||||
|
||||
If database records exist but S3 objects are missing:
|
||||
|
||||
1. **Identify affected artifacts:**
|
||||
```bash
|
||||
curl "${base_url}/api/v1/admin/consistency-check" | jq '.missing_s3_keys'
|
||||
```
|
||||
|
||||
2. **Check S3 bucket:**
|
||||
- Verify the S3 bucket exists and is accessible
|
||||
- Check S3 access logs for deletion events
|
||||
- Check if objects were moved or lifecycle-deleted
|
||||
|
||||
3. **Recovery options:**
|
||||
- Restore from S3 versioning (if enabled)
|
||||
- Restore from backup
|
||||
- Re-upload original content (if available)
|
||||
- Delete orphaned database records
|
||||
|
||||
### Orphaned S3 Objects
|
||||
|
||||
If S3 objects exist without database records:
|
||||
|
||||
1. **Identify orphaned objects:**
|
||||
```bash
|
||||
curl "${base_url}/api/v1/admin/consistency-check" | jq '.orphaned_s3_keys'
|
||||
```
|
||||
|
||||
2. **Investigate cause:**
|
||||
- Upload interrupted before database commit?
|
||||
- Database record deleted but S3 cleanup failed?
|
||||
|
||||
3. **Resolution:**
|
||||
- If content is needed, create database record manually
|
||||
- If content is not needed, delete the S3 object to reclaim storage
|
||||
|
||||
### Preventive Measures
|
||||
|
||||
1. **Enable S3 versioning** to recover from accidental deletions
|
||||
2. **Regular backups** of both database and S3 bucket
|
||||
3. **Scheduled consistency checks** to detect issues early
|
||||
4. **Monitoring and alerting** on consistency check failures
|
||||
5. **Audit logging** to track all artifact operations
|
||||
|
||||
## Verification in CI/CD
|
||||
|
||||
### Verifying Artifacts in Pipelines
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# Download and verify artifact in CI pipeline
|
||||
|
||||
ARTIFACT_URL="${ORCHARD_URL}/api/v1/project/${PROJECT}/${PACKAGE}/+/${TAG}"
|
||||
|
||||
# Download with verification headers
|
||||
response=$(curl -s -D - "${ARTIFACT_URL}?mode=proxy" -o artifact.tar.gz)
|
||||
expected_hash=$(echo "$response" | grep -i "X-Checksum-SHA256" | cut -d: -f2 | tr -d ' \r')
|
||||
|
||||
# Compute actual hash
|
||||
actual_hash=$(sha256sum artifact.tar.gz | cut -d' ' -f1)
|
||||
|
||||
# Verify
|
||||
if [ "$actual_hash" != "$expected_hash" ]; then
|
||||
echo "ERROR: Integrity check failed!"
|
||||
echo "Expected: $expected_hash"
|
||||
echo "Actual: $actual_hash"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Integrity verified: $actual_hash"
|
||||
```
|
||||
|
||||
### Using Server-Side Verification
|
||||
|
||||
For critical deployments, use server-side pre-verification:
|
||||
|
||||
```bash
|
||||
# Server verifies before streaming - returns 500 if corrupt
|
||||
curl -f "${ARTIFACT_URL}?mode=proxy&verify=true&verify_mode=pre" -o artifact.tar.gz
|
||||
```
|
||||
|
||||
This ensures the artifact is verified before any bytes are streamed to your pipeline.
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Routes, Route, Navigate, useLocation } from 'react-router-dom';
|
||||
import { AuthProvider, useAuth } from './contexts/AuthContext';
|
||||
import { TeamProvider } from './contexts/TeamContext';
|
||||
import Layout from './components/Layout';
|
||||
import Home from './pages/Home';
|
||||
import ProjectPage from './pages/ProjectPage';
|
||||
@@ -10,6 +11,11 @@ import ChangePasswordPage from './pages/ChangePasswordPage';
|
||||
import APIKeysPage from './pages/APIKeysPage';
|
||||
import AdminUsersPage from './pages/AdminUsersPage';
|
||||
import AdminOIDCPage from './pages/AdminOIDCPage';
|
||||
import ProjectSettingsPage from './pages/ProjectSettingsPage';
|
||||
import TeamsPage from './pages/TeamsPage';
|
||||
import TeamDashboardPage from './pages/TeamDashboardPage';
|
||||
import TeamSettingsPage from './pages/TeamSettingsPage';
|
||||
import TeamMembersPage from './pages/TeamMembersPage';
|
||||
|
||||
// Component that checks if user must change password
|
||||
function RequirePasswordChange({ children }: { children: React.ReactNode }) {
|
||||
@@ -44,7 +50,12 @@ function AppRoutes() {
|
||||
<Route path="/settings/api-keys" element={<APIKeysPage />} />
|
||||
<Route path="/admin/users" element={<AdminUsersPage />} />
|
||||
<Route path="/admin/oidc" element={<AdminOIDCPage />} />
|
||||
<Route path="/teams" element={<TeamsPage />} />
|
||||
<Route path="/teams/:slug" element={<TeamDashboardPage />} />
|
||||
<Route path="/teams/:slug/settings" element={<TeamSettingsPage />} />
|
||||
<Route path="/teams/:slug/members" element={<TeamMembersPage />} />
|
||||
<Route path="/project/:projectName" element={<ProjectPage />} />
|
||||
<Route path="/project/:projectName/settings" element={<ProjectSettingsPage />} />
|
||||
<Route path="/project/:projectName/:packageName" element={<PackagePage />} />
|
||||
</Routes>
|
||||
</Layout>
|
||||
@@ -58,7 +69,9 @@ function AppRoutes() {
|
||||
function App() {
|
||||
return (
|
||||
<AuthProvider>
|
||||
<AppRoutes />
|
||||
<TeamProvider>
|
||||
<AppRoutes />
|
||||
</TeamProvider>
|
||||
</AuthProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -33,6 +33,15 @@ import {
|
||||
OIDCConfigUpdate,
|
||||
OIDCStatus,
|
||||
PackageVersion,
|
||||
ArtifactDependenciesResponse,
|
||||
ReverseDependenciesResponse,
|
||||
DependencyResolutionResponse,
|
||||
TeamDetail,
|
||||
TeamMember,
|
||||
TeamCreate,
|
||||
TeamUpdate,
|
||||
TeamMemberCreate,
|
||||
TeamMemberUpdate,
|
||||
} from './types';
|
||||
|
||||
const API_BASE = '/api/v1';
|
||||
@@ -157,7 +166,7 @@ export async function listProjectsSimple(params: ListParams = {}): Promise<Proje
|
||||
return data.items;
|
||||
}
|
||||
|
||||
export async function createProject(data: { name: string; description?: string; is_public?: boolean }): Promise<Project> {
|
||||
export async function createProject(data: { name: string; description?: string; is_public?: boolean; team_id?: string }): Promise<Project> {
|
||||
const response = await fetch(`${API_BASE}/projects`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -171,6 +180,30 @@ export async function getProject(name: string): Promise<Project> {
|
||||
return handleResponse<Project>(response);
|
||||
}
|
||||
|
||||
export async function updateProject(
|
||||
projectName: string,
|
||||
data: { description?: string; is_public?: boolean }
|
||||
): Promise<Project> {
|
||||
const response = await fetch(`${API_BASE}/projects/${projectName}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(data),
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<Project>(response);
|
||||
}
|
||||
|
||||
export async function deleteProject(projectName: string): Promise<void> {
|
||||
const response = await fetch(`${API_BASE}/projects/${projectName}`, {
|
||||
method: 'DELETE',
|
||||
credentials: 'include',
|
||||
});
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||
throw new Error(error.detail || `HTTP ${response.status}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Package API
|
||||
export async function listPackages(projectName: string, params: PackageListParams = {}): Promise<PaginatedResponse<Package>> {
|
||||
const query = buildQueryString(params as Record<string, unknown>);
|
||||
@@ -488,3 +521,164 @@ export async function deleteVersion(
|
||||
throw new Error(error.detail || `HTTP ${response.status}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Dependency API
|
||||
export async function getArtifactDependencies(artifactId: string): Promise<ArtifactDependenciesResponse> {
|
||||
const response = await fetch(`${API_BASE}/artifact/${artifactId}/dependencies`);
|
||||
return handleResponse<ArtifactDependenciesResponse>(response);
|
||||
}
|
||||
|
||||
export async function getDependenciesByRef(
|
||||
projectName: string,
|
||||
packageName: string,
|
||||
ref: string
|
||||
): Promise<ArtifactDependenciesResponse> {
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/+/${ref}/dependencies`);
|
||||
return handleResponse<ArtifactDependenciesResponse>(response);
|
||||
}
|
||||
|
||||
export async function getReverseDependencies(
|
||||
projectName: string,
|
||||
packageName: string,
|
||||
params: { page?: number; limit?: number } = {}
|
||||
): Promise<ReverseDependenciesResponse> {
|
||||
const query = buildQueryString(params as Record<string, unknown>);
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/reverse-dependencies${query}`);
|
||||
return handleResponse<ReverseDependenciesResponse>(response);
|
||||
}
|
||||
|
||||
export async function resolveDependencies(
|
||||
projectName: string,
|
||||
packageName: string,
|
||||
ref: string
|
||||
): Promise<DependencyResolutionResponse> {
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/+/${ref}/resolve`);
|
||||
return handleResponse<DependencyResolutionResponse>(response);
|
||||
}
|
||||
|
||||
export async function getEnsureFile(
|
||||
projectName: string,
|
||||
packageName: string,
|
||||
ref: string
|
||||
): Promise<string> {
|
||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/+/${ref}/ensure`);
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||
throw new ApiError(error.detail || `HTTP ${response.status}`, response.status);
|
||||
}
|
||||
return response.text();
|
||||
}
|
||||
|
||||
// Team API
|
||||
export async function listTeams(params: ListParams = {}): Promise<PaginatedResponse<TeamDetail>> {
|
||||
const query = buildQueryString(params as Record<string, unknown>);
|
||||
const response = await fetch(`${API_BASE}/teams${query}`, {
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<PaginatedResponse<TeamDetail>>(response);
|
||||
}
|
||||
|
||||
export async function createTeam(data: TeamCreate): Promise<TeamDetail> {
|
||||
const response = await fetch(`${API_BASE}/teams`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(data),
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<TeamDetail>(response);
|
||||
}
|
||||
|
||||
export async function getTeam(slug: string): Promise<TeamDetail> {
|
||||
const response = await fetch(`${API_BASE}/teams/${slug}`, {
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<TeamDetail>(response);
|
||||
}
|
||||
|
||||
export async function updateTeam(slug: string, data: TeamUpdate): Promise<TeamDetail> {
|
||||
const response = await fetch(`${API_BASE}/teams/${slug}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(data),
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<TeamDetail>(response);
|
||||
}
|
||||
|
||||
export async function deleteTeam(slug: string): Promise<void> {
|
||||
const response = await fetch(`${API_BASE}/teams/${slug}`, {
|
||||
method: 'DELETE',
|
||||
credentials: 'include',
|
||||
});
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||
throw new ApiError(error.detail || `HTTP ${response.status}`, response.status);
|
||||
}
|
||||
}
|
||||
|
||||
export async function listTeamMembers(slug: string): Promise<TeamMember[]> {
|
||||
const response = await fetch(`${API_BASE}/teams/${slug}/members`, {
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<TeamMember[]>(response);
|
||||
}
|
||||
|
||||
export async function addTeamMember(slug: string, data: TeamMemberCreate): Promise<TeamMember> {
|
||||
const response = await fetch(`${API_BASE}/teams/${slug}/members`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(data),
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<TeamMember>(response);
|
||||
}
|
||||
|
||||
export async function updateTeamMember(
|
||||
slug: string,
|
||||
username: string,
|
||||
data: TeamMemberUpdate
|
||||
): Promise<TeamMember> {
|
||||
const response = await fetch(`${API_BASE}/teams/${slug}/members/${username}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(data),
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<TeamMember>(response);
|
||||
}
|
||||
|
||||
export async function removeTeamMember(slug: string, username: string): Promise<void> {
|
||||
const response = await fetch(`${API_BASE}/teams/${slug}/members/${username}`, {
|
||||
method: 'DELETE',
|
||||
credentials: 'include',
|
||||
});
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||
throw new ApiError(error.detail || `HTTP ${response.status}`, response.status);
|
||||
}
|
||||
}
|
||||
|
||||
export async function listTeamProjects(
|
||||
slug: string,
|
||||
params: ProjectListParams = {}
|
||||
): Promise<PaginatedResponse<Project>> {
|
||||
const query = buildQueryString(params as Record<string, unknown>);
|
||||
const response = await fetch(`${API_BASE}/teams/${slug}/projects${query}`, {
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<PaginatedResponse<Project>>(response);
|
||||
}
|
||||
|
||||
// User search (for autocomplete)
|
||||
export interface UserSearchResult {
|
||||
id: string;
|
||||
username: string;
|
||||
is_admin: boolean;
|
||||
}
|
||||
|
||||
export async function searchUsers(query: string, limit: number = 10): Promise<UserSearchResult[]> {
|
||||
const response = await fetch(`${API_BASE}/users/search?q=${encodeURIComponent(query)}&limit=${limit}`, {
|
||||
credentials: 'include',
|
||||
});
|
||||
return handleResponse<UserSearchResult[]>(response);
|
||||
}
|
||||
|
||||
@@ -114,3 +114,32 @@
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
/* Access source styling */
|
||||
.access-source {
|
||||
display: inline-block;
|
||||
padding: 0.2rem 0.4rem;
|
||||
border-radius: 4px;
|
||||
font-size: 0.75rem;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.access-source--explicit {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.access-source--team {
|
||||
background: var(--color-info-bg, #e3f2fd);
|
||||
color: var(--color-info, #1976d2);
|
||||
}
|
||||
|
||||
/* Team access row styling */
|
||||
.team-access-row {
|
||||
background: var(--bg-secondary, #fafafa);
|
||||
}
|
||||
|
||||
.team-access-row td.actions .text-muted {
|
||||
font-size: 0.8125rem;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
@@ -208,85 +208,104 @@ export function AccessManagement({ projectName }: AccessManagementProps) {
|
||||
<tr>
|
||||
<th>User</th>
|
||||
<th>Access Level</th>
|
||||
<th>Source</th>
|
||||
<th>Granted</th>
|
||||
<th>Expires</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{permissions.map((p) => (
|
||||
<tr key={p.id}>
|
||||
<td>{p.user_id}</td>
|
||||
<td>
|
||||
{editingUser === p.user_id ? (
|
||||
<select
|
||||
value={editLevel}
|
||||
onChange={(e) => setEditLevel(e.target.value as AccessLevel)}
|
||||
disabled={submitting}
|
||||
>
|
||||
<option value="read">Read</option>
|
||||
<option value="write">Write</option>
|
||||
<option value="admin">Admin</option>
|
||||
</select>
|
||||
) : (
|
||||
<span className={`access-badge access-badge--${p.level}`}>
|
||||
{p.level}
|
||||
</span>
|
||||
)}
|
||||
</td>
|
||||
<td>{new Date(p.created_at).toLocaleDateString()}</td>
|
||||
<td>
|
||||
{editingUser === p.user_id ? (
|
||||
<input
|
||||
type="date"
|
||||
value={editExpiresAt}
|
||||
onChange={(e) => setEditExpiresAt(e.target.value)}
|
||||
disabled={submitting}
|
||||
min={new Date().toISOString().split('T')[0]}
|
||||
/>
|
||||
) : (
|
||||
formatExpiration(p.expires_at)
|
||||
)}
|
||||
</td>
|
||||
<td className="actions">
|
||||
{editingUser === p.user_id ? (
|
||||
<>
|
||||
<button
|
||||
className="btn btn-sm btn-primary"
|
||||
onClick={() => handleUpdate(p.user_id)}
|
||||
{permissions.map((p) => {
|
||||
const isTeamBased = p.source === 'team';
|
||||
return (
|
||||
<tr key={p.id} className={isTeamBased ? 'team-access-row' : ''}>
|
||||
<td>{p.user_id}</td>
|
||||
<td>
|
||||
{editingUser === p.user_id && !isTeamBased ? (
|
||||
<select
|
||||
value={editLevel}
|
||||
onChange={(e) => setEditLevel(e.target.value as AccessLevel)}
|
||||
disabled={submitting}
|
||||
>
|
||||
Save
|
||||
</button>
|
||||
<button
|
||||
className="btn btn-sm"
|
||||
onClick={cancelEdit}
|
||||
<option value="read">Read</option>
|
||||
<option value="write">Write</option>
|
||||
<option value="admin">Admin</option>
|
||||
</select>
|
||||
) : (
|
||||
<span className={`access-badge access-badge--${p.level}`}>
|
||||
{p.level}
|
||||
</span>
|
||||
)}
|
||||
</td>
|
||||
<td>
|
||||
{isTeamBased ? (
|
||||
<span className="access-source access-source--team" title={`Team role: ${p.team_role}`}>
|
||||
Team: {p.team_slug}
|
||||
</span>
|
||||
) : (
|
||||
<span className="access-source access-source--explicit">
|
||||
Explicit
|
||||
</span>
|
||||
)}
|
||||
</td>
|
||||
<td>{new Date(p.created_at).toLocaleDateString()}</td>
|
||||
<td>
|
||||
{editingUser === p.user_id && !isTeamBased ? (
|
||||
<input
|
||||
type="date"
|
||||
value={editExpiresAt}
|
||||
onChange={(e) => setEditExpiresAt(e.target.value)}
|
||||
disabled={submitting}
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<button
|
||||
className="btn btn-sm"
|
||||
onClick={() => startEdit(p)}
|
||||
disabled={submitting}
|
||||
>
|
||||
Edit
|
||||
</button>
|
||||
<button
|
||||
className="btn btn-sm btn-danger"
|
||||
onClick={() => handleRevoke(p.user_id)}
|
||||
disabled={submitting}
|
||||
>
|
||||
Revoke
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
min={new Date().toISOString().split('T')[0]}
|
||||
/>
|
||||
) : (
|
||||
formatExpiration(p.expires_at)
|
||||
)}
|
||||
</td>
|
||||
<td className="actions">
|
||||
{isTeamBased ? (
|
||||
<span className="text-muted" title="Manage access via team settings">
|
||||
Via team
|
||||
</span>
|
||||
) : editingUser === p.user_id ? (
|
||||
<>
|
||||
<button
|
||||
className="btn btn-sm btn-primary"
|
||||
onClick={() => handleUpdate(p.user_id)}
|
||||
disabled={submitting}
|
||||
>
|
||||
Save
|
||||
</button>
|
||||
<button
|
||||
className="btn btn-sm"
|
||||
onClick={cancelEdit}
|
||||
disabled={submitting}
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<button
|
||||
className="btn btn-sm"
|
||||
onClick={() => startEdit(p)}
|
||||
disabled={submitting}
|
||||
>
|
||||
Edit
|
||||
</button>
|
||||
<button
|
||||
className="btn btn-sm btn-danger"
|
||||
onClick={() => handleRevoke(p.user_id)}
|
||||
disabled={submitting}
|
||||
>
|
||||
Revoke
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
)}
|
||||
|
||||
338
frontend/src/components/DependencyGraph.css
Normal file
338
frontend/src/components/DependencyGraph.css
Normal file
@@ -0,0 +1,338 @@
|
||||
/* Dependency Graph Modal */
|
||||
.dependency-graph-modal {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: rgba(0, 0, 0, 0.8);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 1000;
|
||||
padding: 24px;
|
||||
}
|
||||
|
||||
.dependency-graph-content {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
width: 100%;
|
||||
max-width: 1200px;
|
||||
height: 80vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.dependency-graph-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
padding: 16px 20px;
|
||||
border-bottom: 1px solid var(--border-primary);
|
||||
background: var(--bg-tertiary);
|
||||
}
|
||||
|
||||
.dependency-graph-header h2 {
|
||||
margin: 0;
|
||||
font-size: 1.125rem;
|
||||
font-weight: 600;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.dependency-graph-info {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
flex: 1;
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.graph-stats {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.8125rem;
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
background: transparent;
|
||||
border: none;
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
padding: 4px;
|
||||
border-radius: var(--radius-sm);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.close-btn:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.dependency-graph-toolbar {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 12px 20px;
|
||||
border-bottom: 1px solid var(--border-primary);
|
||||
background: var(--bg-secondary);
|
||||
}
|
||||
|
||||
.zoom-level {
|
||||
margin-left: auto;
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-muted);
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
}
|
||||
|
||||
.dependency-graph-container {
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
background:
|
||||
linear-gradient(90deg, var(--border-primary) 1px, transparent 1px),
|
||||
linear-gradient(var(--border-primary) 1px, transparent 1px);
|
||||
background-size: 20px 20px;
|
||||
background-position: center center;
|
||||
}
|
||||
|
||||
.graph-canvas {
|
||||
padding: 40px;
|
||||
min-width: 100%;
|
||||
min-height: 100%;
|
||||
transform-origin: center center;
|
||||
transition: transform 0.1s ease-out;
|
||||
}
|
||||
|
||||
/* Graph Nodes */
|
||||
.graph-node-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
}
|
||||
|
||||
.graph-node {
|
||||
background: var(--bg-tertiary);
|
||||
border: 2px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
padding: 12px 16px;
|
||||
min-width: 200px;
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.graph-node:hover {
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 4px 12px rgba(16, 185, 129, 0.2);
|
||||
}
|
||||
|
||||
.graph-node--root {
|
||||
background: linear-gradient(135deg, rgba(16, 185, 129, 0.15) 0%, rgba(5, 150, 105, 0.15) 100%);
|
||||
border-color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.graph-node--hovered {
|
||||
transform: scale(1.02);
|
||||
}
|
||||
|
||||
.graph-node__header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.graph-node__name {
|
||||
font-weight: 600;
|
||||
color: var(--accent-primary);
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.graph-node__toggle {
|
||||
background: var(--bg-hover);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: 4px;
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
cursor: pointer;
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-secondary);
|
||||
font-weight: 600;
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
.graph-node__toggle:hover {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.graph-node__details {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.graph-node__version {
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.graph-node__size {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Graph Children / Tree Structure */
|
||||
.graph-children {
|
||||
display: flex;
|
||||
padding-left: 24px;
|
||||
margin-top: 8px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.graph-connector {
|
||||
position: absolute;
|
||||
left: 12px;
|
||||
top: 0;
|
||||
bottom: 50%;
|
||||
width: 12px;
|
||||
border-left: 2px solid var(--border-primary);
|
||||
border-bottom: 2px solid var(--border-primary);
|
||||
border-bottom-left-radius: 8px;
|
||||
}
|
||||
|
||||
.graph-children-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.graph-children-list::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: -12px;
|
||||
top: 20px;
|
||||
bottom: 20px;
|
||||
border-left: 2px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.graph-children-list > .graph-node-container {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.graph-children-list > .graph-node-container::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: -12px;
|
||||
top: 20px;
|
||||
width: 12px;
|
||||
border-top: 2px solid var(--border-primary);
|
||||
}
|
||||
|
||||
/* Loading, Error, Empty States */
|
||||
.graph-loading,
|
||||
.graph-error,
|
||||
.graph-empty {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 100%;
|
||||
gap: 16px;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.graph-loading .spinner {
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
border: 3px solid var(--border-primary);
|
||||
border-top-color: var(--accent-primary);
|
||||
border-radius: 50%;
|
||||
animation: spin 1s linear infinite;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
to { transform: rotate(360deg); }
|
||||
}
|
||||
|
||||
.graph-error {
|
||||
color: var(--error-color, #ef4444);
|
||||
}
|
||||
|
||||
.graph-error svg {
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.graph-error p {
|
||||
max-width: 400px;
|
||||
text-align: center;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
/* Tooltip */
|
||||
.graph-tooltip {
|
||||
position: fixed;
|
||||
bottom: 24px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
padding: 12px 16px;
|
||||
font-size: 0.8125rem;
|
||||
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.4);
|
||||
z-index: 1001;
|
||||
}
|
||||
|
||||
.graph-tooltip strong {
|
||||
display: block;
|
||||
color: var(--accent-primary);
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.graph-tooltip div {
|
||||
color: var(--text-secondary);
|
||||
margin-top: 2px;
|
||||
}
|
||||
|
||||
.tooltip-hint {
|
||||
margin-top: 8px;
|
||||
padding-top: 8px;
|
||||
border-top: 1px solid var(--border-primary);
|
||||
color: var(--text-muted);
|
||||
font-size: 0.75rem;
|
||||
}
|
||||
|
||||
/* Responsive */
|
||||
@media (max-width: 768px) {
|
||||
.dependency-graph-modal {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.dependency-graph-content {
|
||||
height: 100vh;
|
||||
border-radius: 0;
|
||||
max-width: none;
|
||||
}
|
||||
|
||||
.dependency-graph-header {
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.dependency-graph-info {
|
||||
flex-basis: 100%;
|
||||
order: 3;
|
||||
margin-top: 8px;
|
||||
}
|
||||
}
|
||||
323
frontend/src/components/DependencyGraph.tsx
Normal file
323
frontend/src/components/DependencyGraph.tsx
Normal file
@@ -0,0 +1,323 @@
|
||||
import { useState, useEffect, useCallback, useRef } from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { ResolvedArtifact, DependencyResolutionResponse, Dependency } from '../types';
|
||||
import { resolveDependencies, getArtifactDependencies } from '../api';
|
||||
import './DependencyGraph.css';
|
||||
|
||||
interface DependencyGraphProps {
|
||||
projectName: string;
|
||||
packageName: string;
|
||||
tagName: string;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
interface GraphNode {
|
||||
id: string;
|
||||
project: string;
|
||||
package: string;
|
||||
version: string | null;
|
||||
size: number;
|
||||
depth: number;
|
||||
children: GraphNode[];
|
||||
isRoot?: boolean;
|
||||
}
|
||||
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const k = 1024;
|
||||
const sizes = ['B', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
function DependencyGraph({ projectName, packageName, tagName, onClose }: DependencyGraphProps) {
|
||||
const navigate = useNavigate();
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [resolution, setResolution] = useState<DependencyResolutionResponse | null>(null);
|
||||
const [graphRoot, setGraphRoot] = useState<GraphNode | null>(null);
|
||||
const [hoveredNode, setHoveredNode] = useState<GraphNode | null>(null);
|
||||
const [zoom, setZoom] = useState(1);
|
||||
const [pan, setPan] = useState({ x: 0, y: 0 });
|
||||
const [isDragging, setIsDragging] = useState(false);
|
||||
const [dragStart, setDragStart] = useState({ x: 0, y: 0 });
|
||||
const [collapsedNodes, setCollapsedNodes] = useState<Set<string>>(new Set());
|
||||
|
||||
// Build graph structure from resolution data
|
||||
const buildGraph = useCallback(async (resolutionData: DependencyResolutionResponse) => {
|
||||
const artifactMap = new Map<string, ResolvedArtifact>();
|
||||
resolutionData.resolved.forEach(artifact => {
|
||||
artifactMap.set(artifact.artifact_id, artifact);
|
||||
});
|
||||
|
||||
// Fetch dependencies for each artifact to build the tree
|
||||
const depsMap = new Map<string, Dependency[]>();
|
||||
|
||||
for (const artifact of resolutionData.resolved) {
|
||||
try {
|
||||
const deps = await getArtifactDependencies(artifact.artifact_id);
|
||||
depsMap.set(artifact.artifact_id, deps.dependencies);
|
||||
} catch {
|
||||
depsMap.set(artifact.artifact_id, []);
|
||||
}
|
||||
}
|
||||
|
||||
// Find the root artifact (the requested one)
|
||||
const rootArtifact = resolutionData.resolved.find(
|
||||
a => a.project === resolutionData.requested.project &&
|
||||
a.package === resolutionData.requested.package
|
||||
);
|
||||
|
||||
if (!rootArtifact) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Build tree recursively
|
||||
const visited = new Set<string>();
|
||||
|
||||
const buildNode = (artifact: ResolvedArtifact, depth: number): GraphNode => {
|
||||
const nodeId = `${artifact.project}/${artifact.package}`;
|
||||
visited.add(artifact.artifact_id);
|
||||
|
||||
const deps = depsMap.get(artifact.artifact_id) || [];
|
||||
const children: GraphNode[] = [];
|
||||
|
||||
for (const dep of deps) {
|
||||
// Find the resolved artifact for this dependency
|
||||
const childArtifact = resolutionData.resolved.find(
|
||||
a => a.project === dep.project && a.package === dep.package
|
||||
);
|
||||
|
||||
if (childArtifact && !visited.has(childArtifact.artifact_id)) {
|
||||
children.push(buildNode(childArtifact, depth + 1));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: nodeId,
|
||||
project: artifact.project,
|
||||
package: artifact.package,
|
||||
version: artifact.version || artifact.tag,
|
||||
size: artifact.size,
|
||||
depth,
|
||||
children,
|
||||
isRoot: depth === 0,
|
||||
};
|
||||
};
|
||||
|
||||
return buildNode(rootArtifact, 0);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
async function loadData() {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const result = await resolveDependencies(projectName, packageName, tagName);
|
||||
setResolution(result);
|
||||
|
||||
const graph = await buildGraph(result);
|
||||
setGraphRoot(graph);
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
// Check if it's a resolution error
|
||||
try {
|
||||
const errorData = JSON.parse(err.message);
|
||||
if (errorData.error === 'circular_dependency') {
|
||||
setError(`Circular dependency detected: ${errorData.cycle?.join(' → ')}`);
|
||||
} else if (errorData.error === 'dependency_conflict') {
|
||||
setError(`Dependency conflict: ${errorData.message}`);
|
||||
} else {
|
||||
setError(err.message);
|
||||
}
|
||||
} catch {
|
||||
setError(err.message);
|
||||
}
|
||||
} else {
|
||||
setError('Failed to load dependency graph');
|
||||
}
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
loadData();
|
||||
}, [projectName, packageName, tagName, buildGraph]);
|
||||
|
||||
const handleNodeClick = (node: GraphNode) => {
|
||||
navigate(`/project/${node.project}/${node.package}`);
|
||||
onClose();
|
||||
};
|
||||
|
||||
const handleNodeToggle = (node: GraphNode, e: React.MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
setCollapsedNodes(prev => {
|
||||
const next = new Set(prev);
|
||||
if (next.has(node.id)) {
|
||||
next.delete(node.id);
|
||||
} else {
|
||||
next.add(node.id);
|
||||
}
|
||||
return next;
|
||||
});
|
||||
};
|
||||
|
||||
const handleWheel = (e: React.WheelEvent) => {
|
||||
e.preventDefault();
|
||||
const delta = e.deltaY > 0 ? -0.1 : 0.1;
|
||||
setZoom(z => Math.max(0.25, Math.min(2, z + delta)));
|
||||
};
|
||||
|
||||
const handleMouseDown = (e: React.MouseEvent) => {
|
||||
if (e.target === containerRef.current || (e.target as HTMLElement).classList.contains('graph-canvas')) {
|
||||
setIsDragging(true);
|
||||
setDragStart({ x: e.clientX - pan.x, y: e.clientY - pan.y });
|
||||
}
|
||||
};
|
||||
|
||||
const handleMouseMove = (e: React.MouseEvent) => {
|
||||
if (isDragging) {
|
||||
setPan({ x: e.clientX - dragStart.x, y: e.clientY - dragStart.y });
|
||||
}
|
||||
};
|
||||
|
||||
const handleMouseUp = () => {
|
||||
setIsDragging(false);
|
||||
};
|
||||
|
||||
const resetView = () => {
|
||||
setZoom(1);
|
||||
setPan({ x: 0, y: 0 });
|
||||
};
|
||||
|
||||
const renderNode = (node: GraphNode, index: number = 0): JSX.Element => {
|
||||
const isCollapsed = collapsedNodes.has(node.id);
|
||||
const hasChildren = node.children.length > 0;
|
||||
|
||||
return (
|
||||
<div key={`${node.id}-${index}`} className="graph-node-container">
|
||||
<div
|
||||
className={`graph-node ${node.isRoot ? 'graph-node--root' : ''} ${hoveredNode?.id === node.id ? 'graph-node--hovered' : ''}`}
|
||||
onClick={() => handleNodeClick(node)}
|
||||
onMouseEnter={() => setHoveredNode(node)}
|
||||
onMouseLeave={() => setHoveredNode(null)}
|
||||
>
|
||||
<div className="graph-node__header">
|
||||
<span className="graph-node__name">{node.project}/{node.package}</span>
|
||||
{hasChildren && (
|
||||
<button
|
||||
className="graph-node__toggle"
|
||||
onClick={(e) => handleNodeToggle(node, e)}
|
||||
title={isCollapsed ? 'Expand' : 'Collapse'}
|
||||
>
|
||||
{isCollapsed ? '+' : '-'}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<div className="graph-node__details">
|
||||
{node.version && <span className="graph-node__version">@ {node.version}</span>}
|
||||
<span className="graph-node__size">{formatBytes(node.size)}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{hasChildren && !isCollapsed && (
|
||||
<div className="graph-children">
|
||||
<div className="graph-connector"></div>
|
||||
<div className="graph-children-list">
|
||||
{node.children.map((child, i) => renderNode(child, i))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="dependency-graph-modal" onClick={onClose}>
|
||||
<div className="dependency-graph-content" onClick={e => e.stopPropagation()}>
|
||||
<div className="dependency-graph-header">
|
||||
<h2>Dependency Graph</h2>
|
||||
<div className="dependency-graph-info">
|
||||
<span>{projectName}/{packageName} @ {tagName}</span>
|
||||
{resolution && (
|
||||
<span className="graph-stats">
|
||||
{resolution.artifact_count} packages • {formatBytes(resolution.total_size)} total
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<button className="close-btn" onClick={onClose} title="Close">
|
||||
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<line x1="18" y1="6" x2="6" y2="18"></line>
|
||||
<line x1="6" y1="6" x2="18" y2="18"></line>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="dependency-graph-toolbar">
|
||||
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.min(2, z + 0.25))}>
|
||||
Zoom In
|
||||
</button>
|
||||
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.max(0.25, z - 0.25))}>
|
||||
Zoom Out
|
||||
</button>
|
||||
<button className="btn btn-secondary btn-small" onClick={resetView}>
|
||||
Reset View
|
||||
</button>
|
||||
<span className="zoom-level">{Math.round(zoom * 100)}%</span>
|
||||
</div>
|
||||
|
||||
<div
|
||||
ref={containerRef}
|
||||
className="dependency-graph-container"
|
||||
onWheel={handleWheel}
|
||||
onMouseDown={handleMouseDown}
|
||||
onMouseMove={handleMouseMove}
|
||||
onMouseUp={handleMouseUp}
|
||||
onMouseLeave={handleMouseUp}
|
||||
>
|
||||
{loading ? (
|
||||
<div className="graph-loading">
|
||||
<div className="spinner"></div>
|
||||
<span>Resolving dependencies...</span>
|
||||
</div>
|
||||
) : error ? (
|
||||
<div className="graph-error">
|
||||
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<circle cx="12" cy="12" r="10"></circle>
|
||||
<line x1="12" y1="8" x2="12" y2="12"></line>
|
||||
<line x1="12" y1="16" x2="12.01" y2="16"></line>
|
||||
</svg>
|
||||
<p>{error}</p>
|
||||
</div>
|
||||
) : graphRoot ? (
|
||||
<div
|
||||
className="graph-canvas"
|
||||
style={{
|
||||
transform: `translate(${pan.x}px, ${pan.y}px) scale(${zoom})`,
|
||||
cursor: isDragging ? 'grabbing' : 'grab',
|
||||
}}
|
||||
>
|
||||
{renderNode(graphRoot)}
|
||||
</div>
|
||||
) : (
|
||||
<div className="graph-empty">No dependencies to display</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{hoveredNode && (
|
||||
<div className="graph-tooltip">
|
||||
<strong>{hoveredNode.project}/{hoveredNode.package}</strong>
|
||||
{hoveredNode.version && <div>Version: {hoveredNode.version}</div>}
|
||||
<div>Size: {formatBytes(hoveredNode.size)}</div>
|
||||
<div className="tooltip-hint">Click to navigate</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DependencyGraph;
|
||||
@@ -284,7 +284,11 @@
|
||||
.footer-brand {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.footer-icon {
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.footer-logo {
|
||||
@@ -292,6 +296,10 @@
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.footer-separator {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.footer-tagline {
|
||||
color: var(--text-secondary);
|
||||
font-size: 0.875rem;
|
||||
|
||||
@@ -2,6 +2,8 @@ import { ReactNode, useState, useRef, useEffect } from 'react';
|
||||
import { Link, NavLink, useLocation, useNavigate } from 'react-router-dom';
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
import { GlobalSearch } from './GlobalSearch';
|
||||
import { listTeams } from '../api';
|
||||
import { TeamDetail } from '../types';
|
||||
import './Layout.css';
|
||||
|
||||
interface LayoutProps {
|
||||
@@ -13,8 +15,22 @@ function Layout({ children }: LayoutProps) {
|
||||
const navigate = useNavigate();
|
||||
const { user, loading, logout } = useAuth();
|
||||
const [showUserMenu, setShowUserMenu] = useState(false);
|
||||
const [userTeams, setUserTeams] = useState<TeamDetail[]>([]);
|
||||
const menuRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
// Fetch user's teams
|
||||
useEffect(() => {
|
||||
if (user) {
|
||||
listTeams({ limit: 10 }).then(data => {
|
||||
setUserTeams(data.items);
|
||||
}).catch(() => {
|
||||
setUserTeams([]);
|
||||
});
|
||||
} else {
|
||||
setUserTeams([]);
|
||||
}
|
||||
}, [user]);
|
||||
|
||||
// Close menu when clicking outside
|
||||
useEffect(() => {
|
||||
function handleClickOutside(event: MouseEvent) {
|
||||
@@ -77,6 +93,20 @@ function Layout({ children }: LayoutProps) {
|
||||
</svg>
|
||||
Dashboard
|
||||
</Link>
|
||||
{user && userTeams.length > 0 && (
|
||||
<Link
|
||||
to={userTeams.length === 1 ? `/teams/${userTeams[0].slug}` : '/teams'}
|
||||
className={location.pathname.startsWith('/teams') ? 'active' : ''}
|
||||
>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
||||
<circle cx="9" cy="7" r="4"/>
|
||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
||||
</svg>
|
||||
{userTeams.length === 1 ? 'Team' : 'Teams'}
|
||||
</Link>
|
||||
)}
|
||||
<a href="/docs" className="nav-link-muted">
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"/>
|
||||
@@ -188,12 +218,21 @@ function Layout({ children }: LayoutProps) {
|
||||
<footer className="footer">
|
||||
<div className="container footer-content">
|
||||
<div className="footer-brand">
|
||||
<svg className="footer-icon" width="18" height="18" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6 14 Q6 8 3 8 Q6 4 6 4 Q6 4 9 8 Q6 8 6 14" fill="currentColor" opacity="0.6"/>
|
||||
<rect x="5.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
|
||||
<path d="M12 12 Q12 5 8 5 Q12 1 12 1 Q12 1 16 5 Q12 5 12 12" fill="currentColor"/>
|
||||
<rect x="11.25" y="11" width="1.5" height="5" fill="currentColor"/>
|
||||
<path d="M18 14 Q18 8 15 8 Q18 4 18 4 Q18 4 21 8 Q18 8 18 14" fill="currentColor" opacity="0.6"/>
|
||||
<rect x="17.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
|
||||
<ellipse cx="12" cy="19" rx="9" ry="1.5" fill="currentColor" opacity="0.3"/>
|
||||
</svg>
|
||||
<span className="footer-logo">Orchard</span>
|
||||
<span className="footer-separator">·</span>
|
||||
<span className="footer-tagline">Content-Addressable Storage</span>
|
||||
</div>
|
||||
<div className="footer-links">
|
||||
<a href="/docs">Documentation</a>
|
||||
<a href="/api/v1">API</a>
|
||||
</div>
|
||||
</div>
|
||||
</footer>
|
||||
|
||||
163
frontend/src/components/TeamSelector.css
Normal file
163
frontend/src/components/TeamSelector.css
Normal file
@@ -0,0 +1,163 @@
|
||||
.team-selector {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.team-selector-trigger {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
padding: 0.375rem 0.75rem;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
color: var(--text-primary);
|
||||
font-size: 0.875rem;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s ease;
|
||||
min-width: 160px;
|
||||
}
|
||||
|
||||
.team-selector-trigger:hover:not(:disabled) {
|
||||
background: var(--bg-tertiary);
|
||||
border-color: var(--border-secondary);
|
||||
}
|
||||
|
||||
.team-selector-trigger:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.team-selector-name {
|
||||
flex: 1;
|
||||
text-align: left;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.team-selector-chevron {
|
||||
transition: transform 0.15s ease;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.team-selector-chevron.open {
|
||||
transform: rotate(180deg);
|
||||
}
|
||||
|
||||
.team-selector-dropdown {
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
left: 0;
|
||||
right: 0;
|
||||
min-width: 240px;
|
||||
margin-top: 0.25rem;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
box-shadow: var(--shadow-lg);
|
||||
z-index: 100;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.team-selector-empty {
|
||||
padding: 1rem;
|
||||
text-align: center;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.team-selector-empty p {
|
||||
margin: 0 0 0.75rem;
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.team-selector-create-link {
|
||||
color: var(--accent-primary);
|
||||
font-size: 0.875rem;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.team-selector-create-link:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.team-selector-list {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0.25rem 0;
|
||||
max-height: 280px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.team-selector-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
width: 100%;
|
||||
padding: 0.5rem 0.75rem;
|
||||
background: none;
|
||||
border: none;
|
||||
color: var(--text-primary);
|
||||
font-size: 0.875rem;
|
||||
cursor: pointer;
|
||||
text-align: left;
|
||||
transition: background 0.1s ease;
|
||||
}
|
||||
|
||||
.team-selector-item:hover {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
.team-selector-item.selected {
|
||||
background: rgba(16, 185, 129, 0.1);
|
||||
}
|
||||
|
||||
.team-selector-item-info {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.team-selector-item-name {
|
||||
display: block;
|
||||
font-weight: 500;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.team-selector-item-meta {
|
||||
display: block;
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.team-selector-item-role {
|
||||
font-size: 0.75rem;
|
||||
text-transform: capitalize;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.team-selector-footer {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
padding: 0.5rem 0.75rem;
|
||||
border-top: 1px solid var(--border-primary);
|
||||
background: var(--bg-tertiary);
|
||||
}
|
||||
|
||||
.team-selector-link {
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-muted);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.team-selector-link:hover {
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.team-selector-link-primary {
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.team-selector-link-primary:hover {
|
||||
color: var(--accent-primary-hover);
|
||||
}
|
||||
141
frontend/src/components/TeamSelector.tsx
Normal file
141
frontend/src/components/TeamSelector.tsx
Normal file
@@ -0,0 +1,141 @@
|
||||
import { useState, useRef, useEffect } from 'react';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { useTeam } from '../contexts/TeamContext';
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
import { TeamDetail } from '../types';
|
||||
import './TeamSelector.css';
|
||||
|
||||
export function TeamSelector() {
|
||||
const { user } = useAuth();
|
||||
const { teams, currentTeam, loading, setCurrentTeam } = useTeam();
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const dropdownRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
// Close dropdown when clicking outside
|
||||
useEffect(() => {
|
||||
function handleClickOutside(event: MouseEvent) {
|
||||
if (dropdownRef.current && !dropdownRef.current.contains(event.target as Node)) {
|
||||
setIsOpen(false);
|
||||
}
|
||||
}
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
return () => document.removeEventListener('mousedown', handleClickOutside);
|
||||
}, []);
|
||||
|
||||
// Don't show if not authenticated
|
||||
if (!user) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const handleTeamSelect = (team: TeamDetail) => {
|
||||
setCurrentTeam(team);
|
||||
setIsOpen(false);
|
||||
};
|
||||
|
||||
const roleColors: Record<string, string> = {
|
||||
owner: 'var(--color-success)',
|
||||
admin: 'var(--color-primary)',
|
||||
member: 'var(--color-text-muted)',
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="team-selector" ref={dropdownRef}>
|
||||
<button
|
||||
className="team-selector-trigger"
|
||||
onClick={() => setIsOpen(!isOpen)}
|
||||
disabled={loading}
|
||||
aria-expanded={isOpen}
|
||||
aria-haspopup="listbox"
|
||||
>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
||||
<circle cx="9" cy="7" r="4"/>
|
||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
||||
</svg>
|
||||
<span className="team-selector-name">
|
||||
{loading ? 'Loading...' : currentTeam?.name || 'Select Team'}
|
||||
</span>
|
||||
<svg
|
||||
className={`team-selector-chevron ${isOpen ? 'open' : ''}`}
|
||||
width="12"
|
||||
height="12"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
>
|
||||
<polyline points="6 9 12 15 18 9"/>
|
||||
</svg>
|
||||
</button>
|
||||
|
||||
{isOpen && (
|
||||
<div className="team-selector-dropdown" role="listbox">
|
||||
{teams.length === 0 ? (
|
||||
<div className="team-selector-empty">
|
||||
<p>You're not a member of any teams yet.</p>
|
||||
<Link
|
||||
to="/teams/new"
|
||||
className="team-selector-create-link"
|
||||
onClick={() => setIsOpen(false)}
|
||||
>
|
||||
Create your first team
|
||||
</Link>
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<ul className="team-selector-list">
|
||||
{teams.map(team => (
|
||||
<li key={team.id}>
|
||||
<button
|
||||
className={`team-selector-item ${currentTeam?.id === team.id ? 'selected' : ''}`}
|
||||
onClick={() => handleTeamSelect(team)}
|
||||
role="option"
|
||||
aria-selected={currentTeam?.id === team.id}
|
||||
>
|
||||
<div className="team-selector-item-info">
|
||||
<span className="team-selector-item-name">{team.name}</span>
|
||||
<span className="team-selector-item-meta">
|
||||
{team.project_count} project{team.project_count !== 1 ? 's' : ''}
|
||||
</span>
|
||||
</div>
|
||||
{team.user_role && (
|
||||
<span
|
||||
className="team-selector-item-role"
|
||||
style={{ color: roleColors[team.user_role] || roleColors.member }}
|
||||
>
|
||||
{team.user_role}
|
||||
</span>
|
||||
)}
|
||||
{currentTeam?.id === team.id && (
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<polyline points="20 6 9 17 4 12"/>
|
||||
</svg>
|
||||
)}
|
||||
</button>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
<div className="team-selector-footer">
|
||||
<Link
|
||||
to="/teams"
|
||||
className="team-selector-link"
|
||||
onClick={() => setIsOpen(false)}
|
||||
>
|
||||
View all teams
|
||||
</Link>
|
||||
<Link
|
||||
to="/teams/new"
|
||||
className="team-selector-link team-selector-link-primary"
|
||||
onClick={() => setIsOpen(false)}
|
||||
>
|
||||
+ New Team
|
||||
</Link>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
105
frontend/src/components/UserAutocomplete.css
Normal file
105
frontend/src/components/UserAutocomplete.css
Normal file
@@ -0,0 +1,105 @@
|
||||
.user-autocomplete {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.user-autocomplete__input-wrapper {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.user-autocomplete__input {
|
||||
width: 100%;
|
||||
padding: 0.625rem 2.5rem 0.625rem 0.75rem;
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.user-autocomplete__input:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.2);
|
||||
}
|
||||
|
||||
.user-autocomplete__spinner {
|
||||
position: absolute;
|
||||
right: 0.75rem;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
border: 2px solid var(--border-primary);
|
||||
border-top-color: var(--accent-primary);
|
||||
border-radius: 50%;
|
||||
animation: spin 0.6s linear infinite;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
to { transform: translateY(-50%) rotate(360deg); }
|
||||
}
|
||||
|
||||
.user-autocomplete__dropdown {
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
left: 0;
|
||||
right: 0;
|
||||
margin-top: 4px;
|
||||
padding: 0.25rem;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
box-shadow: var(--shadow-lg);
|
||||
z-index: 100;
|
||||
max-height: 240px;
|
||||
overflow-y: auto;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.user-autocomplete__option {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
padding: 0.5rem 0.75rem;
|
||||
border-radius: var(--radius-sm);
|
||||
cursor: pointer;
|
||||
transition: background 0.1s;
|
||||
}
|
||||
|
||||
.user-autocomplete__option:hover,
|
||||
.user-autocomplete__option.selected {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
.user-autocomplete__avatar {
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
border-radius: 50%;
|
||||
background: var(--accent-primary);
|
||||
color: white;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-weight: 600;
|
||||
font-size: 0.875rem;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.user-autocomplete__user-info {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.user-autocomplete__username {
|
||||
font-weight: 500;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.user-autocomplete__admin-badge {
|
||||
font-size: 0.6875rem;
|
||||
color: var(--text-muted);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.025em;
|
||||
}
|
||||
171
frontend/src/components/UserAutocomplete.tsx
Normal file
171
frontend/src/components/UserAutocomplete.tsx
Normal file
@@ -0,0 +1,171 @@
|
||||
import { useState, useEffect, useRef, useCallback } from 'react';
|
||||
import { searchUsers, UserSearchResult } from '../api';
|
||||
import './UserAutocomplete.css';
|
||||
|
||||
interface UserAutocompleteProps {
|
||||
value: string;
|
||||
onChange: (username: string) => void;
|
||||
placeholder?: string;
|
||||
disabled?: boolean;
|
||||
autoFocus?: boolean;
|
||||
}
|
||||
|
||||
export function UserAutocomplete({
|
||||
value,
|
||||
onChange,
|
||||
placeholder = 'Search users...',
|
||||
disabled = false,
|
||||
autoFocus = false,
|
||||
}: UserAutocompleteProps) {
|
||||
const [query, setQuery] = useState(value);
|
||||
const [results, setResults] = useState<UserSearchResult[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [selectedIndex, setSelectedIndex] = useState(-1);
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
const debounceRef = useRef<ReturnType<typeof setTimeout>>();
|
||||
|
||||
// Search for users with debounce
|
||||
const doSearch = useCallback(async (searchQuery: string) => {
|
||||
if (searchQuery.length < 1) {
|
||||
setResults([]);
|
||||
setIsOpen(false);
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
try {
|
||||
const users = await searchUsers(searchQuery);
|
||||
setResults(users);
|
||||
setIsOpen(users.length > 0);
|
||||
setSelectedIndex(-1);
|
||||
} catch {
|
||||
setResults([]);
|
||||
setIsOpen(false);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Handle input change with debounce
|
||||
const handleInputChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const newValue = e.target.value;
|
||||
setQuery(newValue);
|
||||
onChange(newValue); // Update parent immediately for form validation
|
||||
|
||||
// Debounce the search
|
||||
if (debounceRef.current) {
|
||||
clearTimeout(debounceRef.current);
|
||||
}
|
||||
debounceRef.current = setTimeout(() => {
|
||||
doSearch(newValue);
|
||||
}, 200);
|
||||
};
|
||||
|
||||
// Handle selecting a user
|
||||
const handleSelect = (user: UserSearchResult) => {
|
||||
setQuery(user.username);
|
||||
onChange(user.username);
|
||||
setIsOpen(false);
|
||||
setResults([]);
|
||||
inputRef.current?.focus();
|
||||
};
|
||||
|
||||
// Handle keyboard navigation
|
||||
const handleKeyDown = (e: React.KeyboardEvent) => {
|
||||
if (!isOpen) return;
|
||||
|
||||
switch (e.key) {
|
||||
case 'ArrowDown':
|
||||
e.preventDefault();
|
||||
setSelectedIndex(prev => (prev < results.length - 1 ? prev + 1 : prev));
|
||||
break;
|
||||
case 'ArrowUp':
|
||||
e.preventDefault();
|
||||
setSelectedIndex(prev => (prev > 0 ? prev - 1 : -1));
|
||||
break;
|
||||
case 'Enter':
|
||||
e.preventDefault();
|
||||
if (selectedIndex >= 0 && results[selectedIndex]) {
|
||||
handleSelect(results[selectedIndex]);
|
||||
}
|
||||
break;
|
||||
case 'Escape':
|
||||
setIsOpen(false);
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
// Close dropdown when clicking outside
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (e: MouseEvent) => {
|
||||
if (containerRef.current && !containerRef.current.contains(e.target as Node)) {
|
||||
setIsOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
return () => document.removeEventListener('mousedown', handleClickOutside);
|
||||
}, []);
|
||||
|
||||
// Sync external value changes
|
||||
useEffect(() => {
|
||||
setQuery(value);
|
||||
}, [value]);
|
||||
|
||||
// Cleanup debounce on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (debounceRef.current) {
|
||||
clearTimeout(debounceRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="user-autocomplete" ref={containerRef}>
|
||||
<div className="user-autocomplete__input-wrapper">
|
||||
<input
|
||||
ref={inputRef}
|
||||
type="text"
|
||||
value={query}
|
||||
onChange={handleInputChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
onFocus={() => query.length >= 1 && results.length > 0 && setIsOpen(true)}
|
||||
placeholder={placeholder}
|
||||
disabled={disabled}
|
||||
autoFocus={autoFocus}
|
||||
autoComplete="off"
|
||||
className="user-autocomplete__input"
|
||||
/>
|
||||
{loading && (
|
||||
<div className="user-autocomplete__spinner" />
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isOpen && results.length > 0 && (
|
||||
<ul className="user-autocomplete__dropdown">
|
||||
{results.map((user, index) => (
|
||||
<li
|
||||
key={user.id}
|
||||
className={`user-autocomplete__option ${index === selectedIndex ? 'selected' : ''}`}
|
||||
onClick={() => handleSelect(user)}
|
||||
onMouseEnter={() => setSelectedIndex(index)}
|
||||
>
|
||||
<div className="user-autocomplete__avatar">
|
||||
{user.username.charAt(0).toUpperCase()}
|
||||
</div>
|
||||
<div className="user-autocomplete__user-info">
|
||||
<span className="user-autocomplete__username">{user.username}</span>
|
||||
{user.is_admin && (
|
||||
<span className="user-autocomplete__admin-badge">Admin</span>
|
||||
)}
|
||||
</div>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
110
frontend/src/contexts/TeamContext.tsx
Normal file
110
frontend/src/contexts/TeamContext.tsx
Normal file
@@ -0,0 +1,110 @@
|
||||
import { createContext, useContext, useState, useEffect, useCallback, ReactNode } from 'react';
|
||||
import { TeamDetail } from '../types';
|
||||
import { listTeams } from '../api';
|
||||
import { useAuth } from './AuthContext';
|
||||
|
||||
const SELECTED_TEAM_KEY = 'orchard_selected_team';
|
||||
|
||||
interface TeamContextType {
|
||||
teams: TeamDetail[];
|
||||
currentTeam: TeamDetail | null;
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
setCurrentTeam: (team: TeamDetail | null) => void;
|
||||
refreshTeams: () => Promise<void>;
|
||||
clearError: () => void;
|
||||
}
|
||||
|
||||
const TeamContext = createContext<TeamContextType | undefined>(undefined);
|
||||
|
||||
interface TeamProviderProps {
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
export function TeamProvider({ children }: TeamProviderProps) {
|
||||
const { user } = useAuth();
|
||||
const [teams, setTeams] = useState<TeamDetail[]>([]);
|
||||
const [currentTeam, setCurrentTeamState] = useState<TeamDetail | null>(null);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const loadTeams = useCallback(async () => {
|
||||
if (!user) {
|
||||
setTeams([]);
|
||||
setCurrentTeamState(null);
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const response = await listTeams({ limit: 100 });
|
||||
setTeams(response.items);
|
||||
|
||||
// Try to restore previously selected team
|
||||
const savedSlug = localStorage.getItem(SELECTED_TEAM_KEY);
|
||||
if (savedSlug) {
|
||||
const savedTeam = response.items.find(t => t.slug === savedSlug);
|
||||
if (savedTeam) {
|
||||
setCurrentTeamState(savedTeam);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-select first team if none selected
|
||||
if (response.items.length > 0 && !currentTeam) {
|
||||
setCurrentTeamState(response.items[0]);
|
||||
localStorage.setItem(SELECTED_TEAM_KEY, response.items[0].slug);
|
||||
}
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to load teams';
|
||||
setError(message);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [user, currentTeam]);
|
||||
|
||||
// Load teams when user changes
|
||||
useEffect(() => {
|
||||
loadTeams();
|
||||
}, [user]); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const setCurrentTeam = useCallback((team: TeamDetail | null) => {
|
||||
setCurrentTeamState(team);
|
||||
if (team) {
|
||||
localStorage.setItem(SELECTED_TEAM_KEY, team.slug);
|
||||
} else {
|
||||
localStorage.removeItem(SELECTED_TEAM_KEY);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const refreshTeams = useCallback(async () => {
|
||||
await loadTeams();
|
||||
}, [loadTeams]);
|
||||
|
||||
const clearError = useCallback(() => {
|
||||
setError(null);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<TeamContext.Provider value={{
|
||||
teams,
|
||||
currentTeam,
|
||||
loading,
|
||||
error,
|
||||
setCurrentTeam,
|
||||
refreshTeams,
|
||||
clearError,
|
||||
}}>
|
||||
{children}
|
||||
</TeamContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export function useTeam() {
|
||||
const context = useContext(TeamContext);
|
||||
if (context === undefined) {
|
||||
throw new Error('useTeam must be used within a TeamProvider');
|
||||
}
|
||||
return context;
|
||||
}
|
||||
@@ -358,6 +358,12 @@
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.page-header__actions {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
/* Package card styles */
|
||||
.package-card__header {
|
||||
display: flex;
|
||||
|
||||
@@ -179,16 +179,18 @@ function Home() {
|
||||
</form>
|
||||
)}
|
||||
|
||||
<div className="list-controls">
|
||||
<FilterDropdown
|
||||
label="Visibility"
|
||||
options={VISIBILITY_OPTIONS}
|
||||
value={visibility}
|
||||
onChange={handleVisibilityChange}
|
||||
/>
|
||||
</div>
|
||||
{user && (
|
||||
<div className="list-controls">
|
||||
<FilterDropdown
|
||||
label="Visibility"
|
||||
options={VISIBILITY_OPTIONS}
|
||||
value={visibility}
|
||||
onChange={handleVisibilityChange}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{hasActiveFilters && (
|
||||
{user && hasActiveFilters && (
|
||||
<FilterChipGroup onClearAll={clearFilters}>
|
||||
{visibility && (
|
||||
<FilterChip
|
||||
|
||||
@@ -127,6 +127,12 @@ h2 {
|
||||
font-size: 0.75rem;
|
||||
}
|
||||
|
||||
/* Action buttons in table */
|
||||
.action-buttons {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
/* Download by Artifact ID Section */
|
||||
.download-by-id-section {
|
||||
margin-top: 32px;
|
||||
@@ -424,6 +430,340 @@ tr:hover .copy-btn {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
/* Dependencies Section */
|
||||
.dependencies-section {
|
||||
margin-top: 32px;
|
||||
background: var(--bg-secondary);
|
||||
}
|
||||
|
||||
.dependencies-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.dependencies-header h3 {
|
||||
margin: 0;
|
||||
color: var(--text-primary);
|
||||
font-size: 1rem;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.dependencies-controls {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.dependencies-controls .btn {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.dependencies-tag-select {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.tag-selector {
|
||||
padding: 8px 12px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
color: var(--text-primary);
|
||||
font-size: 0.875rem;
|
||||
cursor: pointer;
|
||||
min-width: 200px;
|
||||
}
|
||||
|
||||
.tag-selector:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.deps-loading {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.875rem;
|
||||
padding: 16px 0;
|
||||
}
|
||||
|
||||
.deps-error {
|
||||
color: var(--error-color, #ef4444);
|
||||
font-size: 0.875rem;
|
||||
padding: 12px 16px;
|
||||
background: rgba(239, 68, 68, 0.1);
|
||||
border-radius: var(--radius-md);
|
||||
}
|
||||
|
||||
.deps-empty {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.875rem;
|
||||
padding: 16px 0;
|
||||
}
|
||||
|
||||
.deps-summary {
|
||||
color: var(--text-secondary);
|
||||
font-size: 0.875rem;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.deps-summary strong {
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.deps-items {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.dep-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
padding: 12px 16px;
|
||||
background: var(--bg-tertiary);
|
||||
border-radius: var(--radius-md);
|
||||
border: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.dep-link {
|
||||
color: var(--accent-primary);
|
||||
font-weight: 500;
|
||||
text-decoration: none;
|
||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.dep-link:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.dep-constraint {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.8125rem;
|
||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||
}
|
||||
|
||||
.dep-status {
|
||||
margin-left: auto;
|
||||
font-size: 0.875rem;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.dep-status--ok {
|
||||
color: var(--success-color, #10b981);
|
||||
}
|
||||
|
||||
.dep-status--missing {
|
||||
color: var(--warning-color, #f59e0b);
|
||||
}
|
||||
|
||||
/* Tag name link in table */
|
||||
.tag-name-link {
|
||||
color: var(--accent-primary);
|
||||
transition: opacity var(--transition-fast);
|
||||
}
|
||||
|
||||
.tag-name-link:hover {
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
.tag-name-link.selected {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
/* Used By (Reverse Dependencies) Section */
|
||||
.used-by-section {
|
||||
margin-top: 32px;
|
||||
background: var(--bg-secondary);
|
||||
}
|
||||
|
||||
.used-by-section h3 {
|
||||
margin-bottom: 16px;
|
||||
color: var(--text-primary);
|
||||
font-size: 1rem;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.reverse-dep-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.dep-version {
|
||||
color: var(--accent-primary);
|
||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||
font-size: 0.8125rem;
|
||||
background: rgba(16, 185, 129, 0.1);
|
||||
padding: 2px 8px;
|
||||
border-radius: var(--radius-sm);
|
||||
}
|
||||
|
||||
.dep-requires {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.8125rem;
|
||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
.reverse-deps-pagination {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 16px;
|
||||
margin-top: 16px;
|
||||
padding-top: 16px;
|
||||
border-top: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.pagination-info {
|
||||
color: var(--text-secondary);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
/* Ensure File Modal */
|
||||
.modal-overlay {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 1000;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.ensure-file-modal {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
max-width: 700px;
|
||||
width: 100%;
|
||||
max-height: 80vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
box-shadow: 0 20px 50px rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
|
||||
.ensure-file-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 16px 20px;
|
||||
border-bottom: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.ensure-file-header h3 {
|
||||
margin: 0;
|
||||
color: var(--text-primary);
|
||||
font-size: 1rem;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.ensure-file-actions {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.ensure-file-actions .copy-btn {
|
||||
opacity: 1;
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
}
|
||||
|
||||
.modal-close {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
padding: 0;
|
||||
background: transparent;
|
||||
border: none;
|
||||
border-radius: var(--radius-sm);
|
||||
color: var(--text-muted);
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.modal-close:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.ensure-file-content {
|
||||
flex: 1;
|
||||
overflow: auto;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.ensure-file-loading {
|
||||
color: var(--text-muted);
|
||||
text-align: center;
|
||||
padding: 40px 20px;
|
||||
}
|
||||
|
||||
.ensure-file-error {
|
||||
color: var(--error-color, #ef4444);
|
||||
padding: 16px;
|
||||
background: rgba(239, 68, 68, 0.1);
|
||||
border-radius: var(--radius-md);
|
||||
}
|
||||
|
||||
.ensure-file-empty {
|
||||
color: var(--text-muted);
|
||||
text-align: center;
|
||||
padding: 40px 20px;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.ensure-file-yaml {
|
||||
margin: 0;
|
||||
padding: 16px;
|
||||
background: #0d0d0f;
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.ensure-file-yaml code {
|
||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||
font-size: 0.8125rem;
|
||||
color: #e2e8f0;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.ensure-file-footer {
|
||||
padding: 16px 20px;
|
||||
border-top: 1px solid var(--border-primary);
|
||||
background: var(--bg-tertiary);
|
||||
border-radius: 0 0 var(--radius-lg) var(--radius-lg);
|
||||
}
|
||||
|
||||
.ensure-file-hint {
|
||||
margin: 0;
|
||||
color: var(--text-muted);
|
||||
font-size: 0.8125rem;
|
||||
}
|
||||
|
||||
.ensure-file-hint code {
|
||||
background: rgba(0, 0, 0, 0.2);
|
||||
padding: 2px 6px;
|
||||
border-radius: var(--radius-sm);
|
||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
/* Responsive adjustments */
|
||||
@media (max-width: 768px) {
|
||||
.upload-form {
|
||||
@@ -439,4 +779,18 @@ tr:hover .copy-btn {
|
||||
flex-wrap: wrap;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.dependencies-header {
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.tag-selector {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.ensure-file-modal {
|
||||
max-height: 90vh;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { useParams, useSearchParams, useNavigate, useLocation } from 'react-router-dom';
|
||||
import { TagDetail, Package, PaginatedResponse, AccessLevel } from '../types';
|
||||
import { listTags, getDownloadUrl, getPackage, getMyProjectAccess, createTag, UnauthorizedError, ForbiddenError } from '../api';
|
||||
import { useParams, useSearchParams, useNavigate, useLocation, Link } from 'react-router-dom';
|
||||
import { TagDetail, Package, PaginatedResponse, AccessLevel, Dependency, DependentInfo } from '../types';
|
||||
import { listTags, getDownloadUrl, getPackage, getMyProjectAccess, createTag, getArtifactDependencies, getReverseDependencies, getEnsureFile, UnauthorizedError, ForbiddenError } from '../api';
|
||||
import { Breadcrumb } from '../components/Breadcrumb';
|
||||
import { Badge } from '../components/Badge';
|
||||
import { SearchInput } from '../components/SearchInput';
|
||||
@@ -10,6 +10,7 @@ import { DataTable } from '../components/DataTable';
|
||||
import { Pagination } from '../components/Pagination';
|
||||
import { DragDropUpload, UploadResult } from '../components/DragDropUpload';
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
import DependencyGraph from '../components/DependencyGraph';
|
||||
import './Home.css';
|
||||
import './PackagePage.css';
|
||||
|
||||
@@ -68,6 +69,30 @@ function PackagePage() {
|
||||
const [createTagArtifactId, setCreateTagArtifactId] = useState('');
|
||||
const [createTagLoading, setCreateTagLoading] = useState(false);
|
||||
|
||||
// Dependencies state
|
||||
const [selectedTag, setSelectedTag] = useState<TagDetail | null>(null);
|
||||
const [dependencies, setDependencies] = useState<Dependency[]>([]);
|
||||
const [depsLoading, setDepsLoading] = useState(false);
|
||||
const [depsError, setDepsError] = useState<string | null>(null);
|
||||
|
||||
// Reverse dependencies state
|
||||
const [reverseDeps, setReverseDeps] = useState<DependentInfo[]>([]);
|
||||
const [reverseDepsLoading, setReverseDepsLoading] = useState(false);
|
||||
const [reverseDepsError, setReverseDepsError] = useState<string | null>(null);
|
||||
const [reverseDepsPage, setReverseDepsPage] = useState(1);
|
||||
const [reverseDepsTotal, setReverseDepsTotal] = useState(0);
|
||||
const [reverseDepsHasMore, setReverseDepsHasMore] = useState(false);
|
||||
|
||||
// Dependency graph modal state
|
||||
const [showGraph, setShowGraph] = useState(false);
|
||||
|
||||
// Ensure file modal state
|
||||
const [showEnsureFile, setShowEnsureFile] = useState(false);
|
||||
const [ensureFileContent, setEnsureFileContent] = useState<string | null>(null);
|
||||
const [ensureFileLoading, setEnsureFileLoading] = useState(false);
|
||||
const [ensureFileError, setEnsureFileError] = useState<string | null>(null);
|
||||
const [ensureFileTagName, setEnsureFileTagName] = useState<string | null>(null);
|
||||
|
||||
// Derived permissions
|
||||
const canWrite = accessLevel === 'write' || accessLevel === 'admin';
|
||||
|
||||
@@ -128,6 +153,98 @@ function PackagePage() {
|
||||
loadData();
|
||||
}, [loadData]);
|
||||
|
||||
// Auto-select tag when tags are loaded (prefer version from URL, then first tag)
|
||||
// Re-run when package changes to pick up new tags
|
||||
useEffect(() => {
|
||||
if (tagsData?.items && tagsData.items.length > 0) {
|
||||
const versionParam = searchParams.get('version');
|
||||
if (versionParam) {
|
||||
// Find tag matching the version parameter
|
||||
const matchingTag = tagsData.items.find(t => t.version === versionParam);
|
||||
if (matchingTag) {
|
||||
setSelectedTag(matchingTag);
|
||||
setDependencies([]);
|
||||
return;
|
||||
}
|
||||
}
|
||||
// Fall back to first tag
|
||||
setSelectedTag(tagsData.items[0]);
|
||||
setDependencies([]);
|
||||
}
|
||||
}, [tagsData, searchParams, projectName, packageName]);
|
||||
|
||||
// Fetch dependencies when selected tag changes
|
||||
const fetchDependencies = useCallback(async (artifactId: string) => {
|
||||
setDepsLoading(true);
|
||||
setDepsError(null);
|
||||
try {
|
||||
const result = await getArtifactDependencies(artifactId);
|
||||
setDependencies(result.dependencies);
|
||||
} catch (err) {
|
||||
setDepsError(err instanceof Error ? err.message : 'Failed to load dependencies');
|
||||
setDependencies([]);
|
||||
} finally {
|
||||
setDepsLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (selectedTag) {
|
||||
fetchDependencies(selectedTag.artifact_id);
|
||||
}
|
||||
}, [selectedTag, fetchDependencies]);
|
||||
|
||||
// Fetch reverse dependencies
|
||||
const fetchReverseDeps = useCallback(async (pageNum: number = 1) => {
|
||||
if (!projectName || !packageName) return;
|
||||
|
||||
setReverseDepsLoading(true);
|
||||
setReverseDepsError(null);
|
||||
try {
|
||||
const result = await getReverseDependencies(projectName, packageName, { page: pageNum, limit: 10 });
|
||||
setReverseDeps(result.dependents);
|
||||
setReverseDepsTotal(result.pagination.total);
|
||||
setReverseDepsHasMore(result.pagination.has_more);
|
||||
setReverseDepsPage(pageNum);
|
||||
} catch (err) {
|
||||
setReverseDepsError(err instanceof Error ? err.message : 'Failed to load reverse dependencies');
|
||||
setReverseDeps([]);
|
||||
} finally {
|
||||
setReverseDepsLoading(false);
|
||||
}
|
||||
}, [projectName, packageName]);
|
||||
|
||||
useEffect(() => {
|
||||
if (projectName && packageName && !loading) {
|
||||
fetchReverseDeps(1);
|
||||
}
|
||||
}, [projectName, packageName, loading, fetchReverseDeps]);
|
||||
|
||||
// Fetch ensure file for a specific tag
|
||||
const fetchEnsureFileForTag = useCallback(async (tagName: string) => {
|
||||
if (!projectName || !packageName) return;
|
||||
|
||||
setEnsureFileTagName(tagName);
|
||||
setEnsureFileLoading(true);
|
||||
setEnsureFileError(null);
|
||||
try {
|
||||
const content = await getEnsureFile(projectName, packageName, tagName);
|
||||
setEnsureFileContent(content);
|
||||
setShowEnsureFile(true);
|
||||
} catch (err) {
|
||||
setEnsureFileError(err instanceof Error ? err.message : 'Failed to load ensure file');
|
||||
setShowEnsureFile(true);
|
||||
} finally {
|
||||
setEnsureFileLoading(false);
|
||||
}
|
||||
}, [projectName, packageName]);
|
||||
|
||||
// Fetch ensure file for selected tag
|
||||
const fetchEnsureFile = useCallback(async () => {
|
||||
if (!selectedTag) return;
|
||||
fetchEnsureFileForTag(selectedTag.name);
|
||||
}, [selectedTag, fetchEnsureFileForTag]);
|
||||
|
||||
// Keyboard navigation - go back with backspace
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
@@ -202,12 +319,24 @@ function PackagePage() {
|
||||
const tags = tagsData?.items || [];
|
||||
const pagination = tagsData?.pagination;
|
||||
|
||||
const handleTagSelect = (tag: TagDetail) => {
|
||||
setSelectedTag(tag);
|
||||
};
|
||||
|
||||
const columns = [
|
||||
{
|
||||
key: 'name',
|
||||
header: 'Tag',
|
||||
sortable: true,
|
||||
render: (t: TagDetail) => <strong>{t.name}</strong>,
|
||||
render: (t: TagDetail) => (
|
||||
<strong
|
||||
className={`tag-name-link ${selectedTag?.id === t.id ? 'selected' : ''}`}
|
||||
onClick={() => handleTagSelect(t)}
|
||||
style={{ cursor: 'pointer' }}
|
||||
>
|
||||
{t.name}
|
||||
</strong>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: 'version',
|
||||
@@ -261,13 +390,22 @@ function PackagePage() {
|
||||
key: 'actions',
|
||||
header: 'Actions',
|
||||
render: (t: TagDetail) => (
|
||||
<a
|
||||
href={getDownloadUrl(projectName!, packageName!, t.name)}
|
||||
className="btn btn-secondary btn-small"
|
||||
download
|
||||
>
|
||||
Download
|
||||
</a>
|
||||
<div className="action-buttons">
|
||||
<button
|
||||
className="btn btn-secondary btn-small"
|
||||
onClick={() => fetchEnsureFileForTag(t.name)}
|
||||
title="View orchard.ensure file"
|
||||
>
|
||||
Ensure
|
||||
</button>
|
||||
<a
|
||||
href={getDownloadUrl(projectName!, packageName!, t.name)}
|
||||
className="btn btn-secondary btn-small"
|
||||
download
|
||||
>
|
||||
Download
|
||||
</a>
|
||||
</div>
|
||||
),
|
||||
},
|
||||
];
|
||||
@@ -439,6 +577,166 @@ function PackagePage() {
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Dependencies Section */}
|
||||
{tags.length > 0 && (
|
||||
<div className="dependencies-section card">
|
||||
<div className="dependencies-header">
|
||||
<h3>Dependencies</h3>
|
||||
<div className="dependencies-controls">
|
||||
{selectedTag && (
|
||||
<>
|
||||
<button
|
||||
className="btn btn-secondary btn-small"
|
||||
onClick={fetchEnsureFile}
|
||||
disabled={ensureFileLoading}
|
||||
title="View orchard.ensure file"
|
||||
>
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" style={{ marginRight: '6px' }}>
|
||||
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path>
|
||||
<polyline points="14 2 14 8 20 8"></polyline>
|
||||
<line x1="16" y1="13" x2="8" y2="13"></line>
|
||||
<line x1="16" y1="17" x2="8" y2="17"></line>
|
||||
<polyline points="10 9 9 9 8 9"></polyline>
|
||||
</svg>
|
||||
{ensureFileLoading ? 'Loading...' : 'View Ensure File'}
|
||||
</button>
|
||||
<button
|
||||
className="btn btn-secondary btn-small"
|
||||
onClick={() => setShowGraph(true)}
|
||||
title="View full dependency tree"
|
||||
>
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" style={{ marginRight: '6px' }}>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<circle cx="4" cy="4" r="2"></circle>
|
||||
<circle cx="20" cy="4" r="2"></circle>
|
||||
<circle cx="4" cy="20" r="2"></circle>
|
||||
<circle cx="20" cy="20" r="2"></circle>
|
||||
<line x1="9.5" y1="9.5" x2="5.5" y2="5.5"></line>
|
||||
<line x1="14.5" y1="9.5" x2="18.5" y2="5.5"></line>
|
||||
<line x1="9.5" y1="14.5" x2="5.5" y2="18.5"></line>
|
||||
<line x1="14.5" y1="14.5" x2="18.5" y2="18.5"></line>
|
||||
</svg>
|
||||
View Graph
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="dependencies-tag-select">
|
||||
{selectedTag && (
|
||||
<select
|
||||
className="tag-selector"
|
||||
value={selectedTag.id}
|
||||
onChange={(e) => {
|
||||
const tag = tags.find(t => t.id === e.target.value);
|
||||
if (tag) setSelectedTag(tag);
|
||||
}}
|
||||
>
|
||||
{tags.map(t => (
|
||||
<option key={t.id} value={t.id}>
|
||||
{t.name}{t.version ? ` (${t.version})` : ''}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{depsLoading ? (
|
||||
<div className="deps-loading">Loading dependencies...</div>
|
||||
) : depsError ? (
|
||||
<div className="deps-error">{depsError}</div>
|
||||
) : dependencies.length === 0 ? (
|
||||
<div className="deps-empty">
|
||||
{selectedTag ? (
|
||||
<span><strong>{selectedTag.name}</strong> has no dependencies</span>
|
||||
) : (
|
||||
<span>No dependencies</span>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<div className="deps-list">
|
||||
<div className="deps-summary">
|
||||
<strong>{selectedTag?.name}</strong> has {dependencies.length} {dependencies.length === 1 ? 'dependency' : 'dependencies'}:
|
||||
</div>
|
||||
<ul className="deps-items">
|
||||
{dependencies.map((dep) => (
|
||||
<li key={dep.id} className="dep-item">
|
||||
<Link
|
||||
to={`/project/${dep.project}/${dep.package}`}
|
||||
className="dep-link"
|
||||
>
|
||||
{dep.project}/{dep.package}
|
||||
</Link>
|
||||
<span className="dep-constraint">
|
||||
@ {dep.version || dep.tag}
|
||||
</span>
|
||||
<span className="dep-status dep-status--ok" title="Package exists">
|
||||
✓
|
||||
</span>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Used By (Reverse Dependencies) Section */}
|
||||
<div className="used-by-section card">
|
||||
<h3>Used By</h3>
|
||||
|
||||
{reverseDepsLoading ? (
|
||||
<div className="deps-loading">Loading reverse dependencies...</div>
|
||||
) : reverseDepsError ? (
|
||||
<div className="deps-error">{reverseDepsError}</div>
|
||||
) : reverseDeps.length === 0 ? (
|
||||
<div className="deps-empty">No packages depend on this package</div>
|
||||
) : (
|
||||
<div className="reverse-deps-list">
|
||||
<div className="deps-summary">
|
||||
{reverseDepsTotal} {reverseDepsTotal === 1 ? 'package depends' : 'packages depend'} on this:
|
||||
</div>
|
||||
<ul className="deps-items">
|
||||
{reverseDeps.map((dep) => (
|
||||
<li key={dep.artifact_id} className="dep-item reverse-dep-item">
|
||||
<Link
|
||||
to={`/project/${dep.project}/${dep.package}${dep.version ? `?version=${dep.version}` : ''}`}
|
||||
className="dep-link"
|
||||
>
|
||||
{dep.project}/{dep.package}
|
||||
{dep.version && (
|
||||
<span className="dep-version">v{dep.version}</span>
|
||||
)}
|
||||
</Link>
|
||||
<span className="dep-requires">
|
||||
requires @ {dep.constraint_value}
|
||||
</span>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
{(reverseDepsHasMore || reverseDepsPage > 1) && (
|
||||
<div className="reverse-deps-pagination">
|
||||
<button
|
||||
className="btn btn-secondary btn-small"
|
||||
onClick={() => fetchReverseDeps(reverseDepsPage - 1)}
|
||||
disabled={reverseDepsPage <= 1 || reverseDepsLoading}
|
||||
>
|
||||
Previous
|
||||
</button>
|
||||
<span className="pagination-info">Page {reverseDepsPage}</span>
|
||||
<button
|
||||
className="btn btn-secondary btn-small"
|
||||
onClick={() => fetchReverseDeps(reverseDepsPage + 1)}
|
||||
disabled={!reverseDepsHasMore || reverseDepsLoading}
|
||||
>
|
||||
Next
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="download-by-id-section card">
|
||||
<h3>Download by Artifact ID</h3>
|
||||
<div className="download-by-id-form">
|
||||
@@ -522,6 +820,58 @@ function PackagePage() {
|
||||
<code>curl -O {window.location.origin}/api/v1/project/{projectName}/{packageName}/+/v1.0.0</code>
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
{/* Dependency Graph Modal */}
|
||||
{showGraph && selectedTag && (
|
||||
<DependencyGraph
|
||||
projectName={projectName!}
|
||||
packageName={packageName!}
|
||||
tagName={selectedTag.name}
|
||||
onClose={() => setShowGraph(false)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Ensure File Modal */}
|
||||
{showEnsureFile && (
|
||||
<div className="modal-overlay" onClick={() => setShowEnsureFile(false)}>
|
||||
<div className="ensure-file-modal" onClick={(e) => e.stopPropagation()}>
|
||||
<div className="ensure-file-header">
|
||||
<h3>orchard.ensure for {ensureFileTagName}</h3>
|
||||
<div className="ensure-file-actions">
|
||||
{ensureFileContent && (
|
||||
<CopyButton text={ensureFileContent} />
|
||||
)}
|
||||
<button
|
||||
className="modal-close"
|
||||
onClick={() => setShowEnsureFile(false)}
|
||||
title="Close"
|
||||
>
|
||||
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<line x1="18" y1="6" x2="6" y2="18"></line>
|
||||
<line x1="6" y1="6" x2="18" y2="18"></line>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="ensure-file-content">
|
||||
{ensureFileLoading ? (
|
||||
<div className="ensure-file-loading">Loading...</div>
|
||||
) : ensureFileError ? (
|
||||
<div className="ensure-file-error">{ensureFileError}</div>
|
||||
) : ensureFileContent ? (
|
||||
<pre className="ensure-file-yaml"><code>{ensureFileContent}</code></pre>
|
||||
) : (
|
||||
<div className="ensure-file-empty">No dependencies defined for this artifact.</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="ensure-file-footer">
|
||||
<p className="ensure-file-hint">
|
||||
Save this as <code>orchard.ensure</code> in your project root to declare dependencies.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ import { DataTable } from '../components/DataTable';
|
||||
import { SearchInput } from '../components/SearchInput';
|
||||
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
||||
import { Pagination } from '../components/Pagination';
|
||||
import { AccessManagement } from '../components/AccessManagement';
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
import './Home.css';
|
||||
|
||||
@@ -211,15 +210,30 @@ function ProjectPage() {
|
||||
<span className="meta-item">by {project.created_by}</span>
|
||||
</div>
|
||||
</div>
|
||||
{canWrite ? (
|
||||
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
||||
{showForm ? 'Cancel' : '+ New Package'}
|
||||
</button>
|
||||
) : user ? (
|
||||
<span className="text-muted" title="You have read-only access to this project">
|
||||
Read-only access
|
||||
</span>
|
||||
) : null}
|
||||
<div className="page-header__actions">
|
||||
{canAdmin && !project.team_id && (
|
||||
<button
|
||||
className="btn btn-secondary"
|
||||
onClick={() => navigate(`/project/${projectName}/settings`)}
|
||||
title="Project Settings"
|
||||
>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round">
|
||||
<circle cx="12" cy="12" r="3" />
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z" />
|
||||
</svg>
|
||||
Settings
|
||||
</button>
|
||||
)}
|
||||
{canWrite ? (
|
||||
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
||||
{showForm ? 'Cancel' : '+ New Package'}
|
||||
</button>
|
||||
) : user ? (
|
||||
<span className="text-muted" title="You have read-only access to this project">
|
||||
Read-only access
|
||||
</span>
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{error && <div className="error-message">{error}</div>}
|
||||
@@ -371,10 +385,6 @@ function ProjectPage() {
|
||||
onPageChange={handlePageChange}
|
||||
/>
|
||||
)}
|
||||
|
||||
{canAdmin && projectName && (
|
||||
<AccessManagement projectName={projectName} />
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
476
frontend/src/pages/ProjectSettingsPage.css
Normal file
476
frontend/src/pages/ProjectSettingsPage.css
Normal file
@@ -0,0 +1,476 @@
|
||||
.project-settings-page {
|
||||
max-width: 900px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.project-settings-header {
|
||||
margin-bottom: 32px;
|
||||
}
|
||||
|
||||
.project-settings-header h1 {
|
||||
font-size: 1.75rem;
|
||||
font-weight: 600;
|
||||
color: var(--text-primary);
|
||||
margin-bottom: 8px;
|
||||
letter-spacing: -0.02em;
|
||||
}
|
||||
|
||||
.project-settings-subtitle {
|
||||
color: var(--text-tertiary);
|
||||
font-size: 0.9375rem;
|
||||
}
|
||||
|
||||
.project-settings-loading {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 12px;
|
||||
padding: 64px 24px;
|
||||
color: var(--text-tertiary);
|
||||
font-size: 0.9375rem;
|
||||
}
|
||||
|
||||
.project-settings-spinner {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
border: 2px solid var(--border-secondary);
|
||||
border-top-color: var(--accent-primary);
|
||||
border-radius: 50%;
|
||||
animation: project-settings-spin 0.6s linear infinite;
|
||||
}
|
||||
|
||||
@keyframes project-settings-spin {
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
.project-settings-error {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
background: var(--error-bg);
|
||||
border: 1px solid rgba(239, 68, 68, 0.2);
|
||||
color: var(--error);
|
||||
padding: 12px 16px;
|
||||
border-radius: var(--radius-md);
|
||||
margin-bottom: 24px;
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.project-settings-success {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
background: var(--success-bg);
|
||||
border: 1px solid rgba(34, 197, 94, 0.2);
|
||||
color: var(--success);
|
||||
padding: 12px 16px;
|
||||
border-radius: var(--radius-md);
|
||||
margin-bottom: 24px;
|
||||
font-size: 0.875rem;
|
||||
animation: project-settings-fade-in 0.2s ease;
|
||||
}
|
||||
|
||||
@keyframes project-settings-fade-in {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: translateY(-8px);
|
||||
}
|
||||
to {
|
||||
opacity: 1;
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
.project-settings-section {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 24px;
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
.project-settings-section h2 {
|
||||
font-size: 1.125rem;
|
||||
font-weight: 600;
|
||||
color: var(--text-primary);
|
||||
margin-bottom: 20px;
|
||||
padding-bottom: 16px;
|
||||
border-bottom: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.project-settings-form {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.project-settings-form-group {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
.project-settings-form-group label {
|
||||
font-size: 0.8125rem;
|
||||
font-weight: 500;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.project-settings-form-group textarea,
|
||||
.project-settings-form-group input[type="text"] {
|
||||
padding: 12px 14px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-primary);
|
||||
transition: all var(--transition-fast);
|
||||
font-family: inherit;
|
||||
resize: vertical;
|
||||
}
|
||||
|
||||
.project-settings-form-group textarea {
|
||||
min-height: 100px;
|
||||
}
|
||||
|
||||
.project-settings-form-group textarea::placeholder,
|
||||
.project-settings-form-group input::placeholder {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.project-settings-form-group textarea:hover:not(:disabled),
|
||||
.project-settings-form-group input:hover:not(:disabled) {
|
||||
border-color: var(--border-secondary);
|
||||
background: var(--bg-elevated);
|
||||
}
|
||||
|
||||
.project-settings-form-group textarea:focus,
|
||||
.project-settings-form-group input:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||
background: var(--bg-elevated);
|
||||
}
|
||||
|
||||
.project-settings-form-group textarea:disabled,
|
||||
.project-settings-form-group input:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.project-settings-checkbox-group {
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.project-settings-checkbox-label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
cursor: pointer;
|
||||
font-size: 0.875rem;
|
||||
font-weight: 400;
|
||||
color: var(--text-secondary);
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.project-settings-checkbox-label input[type="checkbox"] {
|
||||
position: absolute;
|
||||
opacity: 0;
|
||||
width: 0;
|
||||
height: 0;
|
||||
}
|
||||
|
||||
.project-settings-checkbox-custom {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-secondary);
|
||||
border-radius: var(--radius-sm);
|
||||
transition: all var(--transition-fast);
|
||||
position: relative;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.project-settings-checkbox-label input[type="checkbox"]:checked + .project-settings-checkbox-custom {
|
||||
background: var(--accent-primary);
|
||||
border-color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.project-settings-checkbox-label input[type="checkbox"]:checked + .project-settings-checkbox-custom::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 5px;
|
||||
top: 2px;
|
||||
width: 5px;
|
||||
height: 9px;
|
||||
border: solid white;
|
||||
border-width: 0 2px 2px 0;
|
||||
transform: rotate(45deg);
|
||||
}
|
||||
|
||||
.project-settings-checkbox-label input[type="checkbox"]:focus + .project-settings-checkbox-custom {
|
||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
||||
}
|
||||
|
||||
.project-settings-checkbox-label:hover .project-settings-checkbox-custom {
|
||||
border-color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.project-settings-form-actions {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
gap: 12px;
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
.project-settings-save-button {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
padding: 10px 18px;
|
||||
background: var(--accent-gradient);
|
||||
border: none;
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
font-weight: 500;
|
||||
color: white;
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
min-width: 120px;
|
||||
}
|
||||
|
||||
.project-settings-save-button:hover:not(:disabled) {
|
||||
transform: translateY(-1px);
|
||||
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
|
||||
}
|
||||
|
||||
.project-settings-save-button:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
transform: none;
|
||||
}
|
||||
|
||||
.project-settings-button-spinner {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
border: 2px solid rgba(255, 255, 255, 0.3);
|
||||
border-top-color: white;
|
||||
border-radius: 50%;
|
||||
animation: project-settings-spin 0.6s linear infinite;
|
||||
}
|
||||
|
||||
/* Danger Zone */
|
||||
.project-settings-danger-zone {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid rgba(239, 68, 68, 0.3);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 24px;
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
.project-settings-danger-zone h2 {
|
||||
font-size: 1.125rem;
|
||||
font-weight: 600;
|
||||
color: var(--error);
|
||||
margin-bottom: 20px;
|
||||
padding-bottom: 16px;
|
||||
border-bottom: 1px solid rgba(239, 68, 68, 0.2);
|
||||
}
|
||||
|
||||
.project-settings-danger-item {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: flex-start;
|
||||
gap: 24px;
|
||||
}
|
||||
|
||||
.project-settings-danger-info h3 {
|
||||
font-size: 0.9375rem;
|
||||
font-weight: 600;
|
||||
color: var(--text-primary);
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.project-settings-danger-info p {
|
||||
color: var(--text-tertiary);
|
||||
font-size: 0.8125rem;
|
||||
max-width: 400px;
|
||||
}
|
||||
|
||||
.project-settings-delete-button {
|
||||
padding: 10px 18px;
|
||||
background: transparent;
|
||||
border: 1px solid rgba(239, 68, 68, 0.3);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
font-weight: 500;
|
||||
color: var(--error);
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.project-settings-delete-button:hover:not(:disabled) {
|
||||
background: var(--error-bg);
|
||||
border-color: rgba(239, 68, 68, 0.5);
|
||||
}
|
||||
|
||||
.project-settings-delete-button:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Delete Confirmation */
|
||||
.project-settings-delete-confirm {
|
||||
margin-top: 20px;
|
||||
padding-top: 20px;
|
||||
border-top: 1px solid rgba(239, 68, 68, 0.2);
|
||||
animation: project-settings-fade-in 0.2s ease;
|
||||
}
|
||||
|
||||
.project-settings-delete-confirm p {
|
||||
color: var(--text-secondary);
|
||||
font-size: 0.875rem;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.project-settings-delete-confirm strong {
|
||||
color: var(--text-primary);
|
||||
font-family: 'JetBrains Mono', 'Fira Code', 'SF Mono', Monaco, monospace;
|
||||
background: var(--bg-tertiary);
|
||||
padding: 2px 6px;
|
||||
border-radius: var(--radius-sm);
|
||||
}
|
||||
|
||||
.project-settings-delete-confirm-input {
|
||||
width: 100%;
|
||||
padding: 12px 14px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-primary);
|
||||
transition: all var(--transition-fast);
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.project-settings-delete-confirm-input:focus {
|
||||
outline: none;
|
||||
border-color: var(--error);
|
||||
box-shadow: 0 0 0 3px rgba(239, 68, 68, 0.15);
|
||||
}
|
||||
|
||||
.project-settings-delete-confirm-input::placeholder {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.project-settings-delete-confirm-actions {
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.project-settings-confirm-delete-button {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
padding: 10px 18px;
|
||||
background: var(--error);
|
||||
border: none;
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
font-weight: 500;
|
||||
color: white;
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
min-width: 120px;
|
||||
}
|
||||
|
||||
.project-settings-confirm-delete-button:hover:not(:disabled) {
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.project-settings-confirm-delete-button:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.project-settings-cancel-button {
|
||||
padding: 10px 18px;
|
||||
background: transparent;
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
font-weight: 500;
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
transition: all var(--transition-fast);
|
||||
}
|
||||
|
||||
.project-settings-cancel-button:hover:not(:disabled) {
|
||||
background: var(--bg-hover);
|
||||
border-color: var(--border-secondary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.project-settings-cancel-button:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.project-settings-delete-spinner {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
border: 2px solid rgba(255, 255, 255, 0.3);
|
||||
border-top-color: white;
|
||||
border-radius: 50%;
|
||||
animation: project-settings-spin 0.6s linear infinite;
|
||||
}
|
||||
|
||||
/* Access denied */
|
||||
.project-settings-access-denied {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 80px 24px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.project-settings-access-denied h2 {
|
||||
font-size: 1.5rem;
|
||||
font-weight: 600;
|
||||
color: var(--text-primary);
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.project-settings-access-denied p {
|
||||
color: var(--text-tertiary);
|
||||
font-size: 0.9375rem;
|
||||
max-width: 400px;
|
||||
}
|
||||
|
||||
/* Responsive */
|
||||
@media (max-width: 768px) {
|
||||
.project-settings-danger-item {
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.project-settings-delete-button {
|
||||
align-self: flex-start;
|
||||
}
|
||||
|
||||
.project-settings-delete-confirm-actions {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.project-settings-confirm-delete-button,
|
||||
.project-settings-cancel-button {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
304
frontend/src/pages/ProjectSettingsPage.tsx
Normal file
304
frontend/src/pages/ProjectSettingsPage.tsx
Normal file
@@ -0,0 +1,304 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { useParams, useNavigate } from 'react-router-dom';
|
||||
import { Project } from '../types';
|
||||
import {
|
||||
getProject,
|
||||
updateProject,
|
||||
deleteProject,
|
||||
getMyProjectAccess,
|
||||
UnauthorizedError,
|
||||
ForbiddenError,
|
||||
} from '../api';
|
||||
import { Breadcrumb } from '../components/Breadcrumb';
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
import './ProjectSettingsPage.css';
|
||||
|
||||
function ProjectSettingsPage() {
|
||||
const { projectName } = useParams<{ projectName: string }>();
|
||||
const navigate = useNavigate();
|
||||
const { user } = useAuth();
|
||||
|
||||
const [project, setProject] = useState<Project | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [success, setSuccess] = useState<string | null>(null);
|
||||
const [accessDenied, setAccessDenied] = useState(false);
|
||||
const [canAdmin, setCanAdmin] = useState(false);
|
||||
|
||||
// General settings form state
|
||||
const [description, setDescription] = useState('');
|
||||
const [isPublic, setIsPublic] = useState(false);
|
||||
const [saving, setSaving] = useState(false);
|
||||
|
||||
// Delete confirmation state
|
||||
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
|
||||
const [deleteConfirmText, setDeleteConfirmText] = useState('');
|
||||
const [deleting, setDeleting] = useState(false);
|
||||
|
||||
const loadData = useCallback(async () => {
|
||||
if (!projectName) return;
|
||||
|
||||
try {
|
||||
setLoading(true);
|
||||
setAccessDenied(false);
|
||||
const [projectData, accessResult] = await Promise.all([
|
||||
getProject(projectName),
|
||||
getMyProjectAccess(projectName),
|
||||
]);
|
||||
setProject(projectData);
|
||||
setDescription(projectData.description || '');
|
||||
setIsPublic(projectData.is_public);
|
||||
|
||||
const hasAdminAccess = accessResult.access_level === 'admin';
|
||||
setCanAdmin(hasAdminAccess);
|
||||
|
||||
if (!hasAdminAccess) {
|
||||
setAccessDenied(true);
|
||||
}
|
||||
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
if (err instanceof UnauthorizedError) {
|
||||
navigate('/login', { state: { from: `/project/${projectName}/settings` } });
|
||||
return;
|
||||
}
|
||||
if (err instanceof ForbiddenError) {
|
||||
setAccessDenied(true);
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
setError(err instanceof Error ? err.message : 'Failed to load project');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [projectName, navigate]);
|
||||
|
||||
useEffect(() => {
|
||||
loadData();
|
||||
}, [loadData]);
|
||||
|
||||
const handleSaveSettings = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
if (!projectName) return;
|
||||
|
||||
try {
|
||||
setSaving(true);
|
||||
setError(null);
|
||||
const updatedProject = await updateProject(projectName, {
|
||||
description: description || undefined,
|
||||
is_public: isPublic,
|
||||
});
|
||||
setProject(updatedProject);
|
||||
setSuccess('Settings saved successfully');
|
||||
setTimeout(() => setSuccess(null), 3000);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to save settings');
|
||||
} finally {
|
||||
setSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDeleteProject = async () => {
|
||||
if (!projectName || deleteConfirmText !== projectName) return;
|
||||
|
||||
try {
|
||||
setDeleting(true);
|
||||
setError(null);
|
||||
await deleteProject(projectName);
|
||||
navigate('/');
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to delete project');
|
||||
setDeleting(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCancelDelete = () => {
|
||||
setShowDeleteConfirm(false);
|
||||
setDeleteConfirmText('');
|
||||
};
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="project-settings-page">
|
||||
<Breadcrumb
|
||||
items={[
|
||||
{ label: 'Projects', href: '/' },
|
||||
{ label: projectName || '', href: `/project/${projectName}` },
|
||||
{ label: 'Settings' },
|
||||
]}
|
||||
/>
|
||||
<div className="project-settings-loading">
|
||||
<div className="project-settings-spinner" />
|
||||
<span>Loading...</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (accessDenied || !canAdmin) {
|
||||
return (
|
||||
<div className="project-settings-page">
|
||||
<Breadcrumb
|
||||
items={[
|
||||
{ label: 'Projects', href: '/' },
|
||||
{ label: projectName || '', href: `/project/${projectName}` },
|
||||
{ label: 'Settings' },
|
||||
]}
|
||||
/>
|
||||
<div className="project-settings-access-denied">
|
||||
<h2>Access Denied</h2>
|
||||
<p>You must be a project admin to access settings.</p>
|
||||
{!user && (
|
||||
<p style={{ marginTop: '16px' }}>
|
||||
<a href="/login" className="btn btn-primary">
|
||||
Sign in
|
||||
</a>
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (!project) {
|
||||
return (
|
||||
<div className="project-settings-page">
|
||||
<Breadcrumb
|
||||
items={[
|
||||
{ label: 'Projects', href: '/' },
|
||||
{ label: projectName || '' },
|
||||
]}
|
||||
/>
|
||||
<div className="project-settings-error">Project not found</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="project-settings-page">
|
||||
<Breadcrumb
|
||||
items={[
|
||||
{ label: 'Projects', href: '/' },
|
||||
{ label: project.name, href: `/project/${project.name}` },
|
||||
{ label: 'Settings' },
|
||||
]}
|
||||
/>
|
||||
|
||||
<div className="project-settings-header">
|
||||
<h1>Project Settings</h1>
|
||||
<p className="project-settings-subtitle">Manage settings for {project.name}</p>
|
||||
</div>
|
||||
|
||||
{error && <div className="project-settings-error">{error}</div>}
|
||||
{success && <div className="project-settings-success">{success}</div>}
|
||||
|
||||
{/* General Settings Section */}
|
||||
<div className="project-settings-section">
|
||||
<h2>General</h2>
|
||||
<form className="project-settings-form" onSubmit={handleSaveSettings}>
|
||||
<div className="project-settings-form-group">
|
||||
<label htmlFor="description">Description</label>
|
||||
<textarea
|
||||
id="description"
|
||||
value={description}
|
||||
onChange={(e) => setDescription(e.target.value)}
|
||||
placeholder="Describe your project..."
|
||||
disabled={saving}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="project-settings-form-group project-settings-checkbox-group">
|
||||
<label className="project-settings-checkbox-label">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={isPublic}
|
||||
onChange={(e) => setIsPublic(e.target.checked)}
|
||||
disabled={saving}
|
||||
/>
|
||||
<span className="project-settings-checkbox-custom" />
|
||||
<span>Public project (visible to everyone)</span>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div className="project-settings-form-actions">
|
||||
<button type="submit" className="project-settings-save-button" disabled={saving}>
|
||||
{saving ? (
|
||||
<>
|
||||
<span className="project-settings-button-spinner" />
|
||||
Saving...
|
||||
</>
|
||||
) : (
|
||||
'Save Changes'
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
{/* Danger Zone Section */}
|
||||
<div className="project-settings-danger-zone">
|
||||
<h2>Danger Zone</h2>
|
||||
<div className="project-settings-danger-item">
|
||||
<div className="project-settings-danger-info">
|
||||
<h3>Delete this project</h3>
|
||||
<p>
|
||||
Once you delete a project, there is no going back. This will permanently delete the
|
||||
project, all packages, artifacts, and tags.
|
||||
</p>
|
||||
</div>
|
||||
{!showDeleteConfirm && (
|
||||
<button
|
||||
className="project-settings-delete-button"
|
||||
onClick={() => setShowDeleteConfirm(true)}
|
||||
disabled={deleting}
|
||||
>
|
||||
Delete Project
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{showDeleteConfirm && (
|
||||
<div className="project-settings-delete-confirm">
|
||||
<p>
|
||||
Type <strong>{project.name}</strong> to confirm deletion:
|
||||
</p>
|
||||
<input
|
||||
type="text"
|
||||
className="project-settings-delete-confirm-input"
|
||||
value={deleteConfirmText}
|
||||
onChange={(e) => setDeleteConfirmText(e.target.value)}
|
||||
placeholder={project.name}
|
||||
disabled={deleting}
|
||||
autoFocus
|
||||
/>
|
||||
<div className="project-settings-delete-confirm-actions">
|
||||
<button
|
||||
className="project-settings-confirm-delete-button"
|
||||
onClick={handleDeleteProject}
|
||||
disabled={deleting || deleteConfirmText !== project.name}
|
||||
>
|
||||
{deleting ? (
|
||||
<>
|
||||
<span className="project-settings-delete-spinner" />
|
||||
Deleting...
|
||||
</>
|
||||
) : (
|
||||
'Yes, delete this project'
|
||||
)}
|
||||
</button>
|
||||
<button
|
||||
className="project-settings-cancel-button"
|
||||
onClick={handleCancelDelete}
|
||||
disabled={deleting}
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default ProjectSettingsPage;
|
||||
270
frontend/src/pages/TeamDashboardPage.css
Normal file
270
frontend/src/pages/TeamDashboardPage.css
Normal file
@@ -0,0 +1,270 @@
|
||||
.team-dashboard {
|
||||
padding: 1.5rem 0;
|
||||
}
|
||||
|
||||
.team-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: flex-start;
|
||||
gap: 1.5rem;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.team-header-left {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.team-header-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.team-header h1 {
|
||||
margin: 0;
|
||||
font-size: 1.5rem;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.team-slug {
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.team-description {
|
||||
margin: 0 0 0.5rem;
|
||||
color: var(--text-secondary);
|
||||
font-size: 0.9375rem;
|
||||
max-width: 600px;
|
||||
}
|
||||
|
||||
.team-header-actions {
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.team-section {
|
||||
margin-top: 2rem;
|
||||
}
|
||||
|
||||
.section-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.section-header h2 {
|
||||
margin: 0;
|
||||
font-size: 1.25rem;
|
||||
}
|
||||
|
||||
/* Table utility classes */
|
||||
.text-muted {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.btn-ghost {
|
||||
background: transparent;
|
||||
color: var(--text-muted);
|
||||
border: none;
|
||||
padding: 0.375rem;
|
||||
cursor: pointer;
|
||||
border-radius: var(--radius-sm);
|
||||
}
|
||||
|
||||
.btn-ghost:hover {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.section-footer {
|
||||
margin-top: 1rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.view-all-link {
|
||||
font-size: 0.875rem;
|
||||
color: var(--accent-primary);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.view-all-link:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
/* States */
|
||||
.loading-state,
|
||||
.error-state {
|
||||
text-align: center;
|
||||
padding: 4rem 2rem;
|
||||
}
|
||||
|
||||
.error-state h2 {
|
||||
margin: 0 0 0.5rem;
|
||||
}
|
||||
|
||||
.error-state p {
|
||||
margin: 0 0 1.5rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.empty-state {
|
||||
text-align: center;
|
||||
padding: 2rem;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px dashed var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.empty-state p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.empty-hint {
|
||||
margin-top: 0.5rem !important;
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
/* Buttons */
|
||||
.btn {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
padding: 0.5rem 1rem;
|
||||
border: none;
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
text-decoration: none;
|
||||
transition: all 0.15s ease;
|
||||
}
|
||||
|
||||
.btn-sm {
|
||||
padding: 0.375rem 0.75rem;
|
||||
font-size: 0.8125rem;
|
||||
}
|
||||
|
||||
.btn-primary {
|
||||
background: var(--accent-primary);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-primary:hover {
|
||||
background: var(--accent-primary-hover);
|
||||
}
|
||||
|
||||
.btn-secondary {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
border: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.btn-secondary:hover {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
/* Modal */
|
||||
.modal-overlay {
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 1000;
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.modal-content {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 1.5rem;
|
||||
width: 100%;
|
||||
max-width: 480px;
|
||||
max-height: 90vh;
|
||||
box-shadow: var(--shadow-lg);
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.modal-content h2 {
|
||||
margin: 0 0 1.5rem;
|
||||
font-size: 1.25rem;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
/* Form */
|
||||
.form-group {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.form-group label {
|
||||
display: block;
|
||||
margin-bottom: 0.5rem;
|
||||
font-weight: 500;
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.form-group input[type="text"],
|
||||
.form-group textarea {
|
||||
width: 100%;
|
||||
padding: 0.625rem 0.75rem;
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.form-group input:focus,
|
||||
.form-group textarea:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.2);
|
||||
}
|
||||
|
||||
.form-group textarea {
|
||||
resize: vertical;
|
||||
min-height: 80px;
|
||||
}
|
||||
|
||||
.checkbox-group label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.checkbox-group input[type="checkbox"] {
|
||||
width: 1rem;
|
||||
height: 1rem;
|
||||
}
|
||||
|
||||
.form-hint {
|
||||
display: block;
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-muted);
|
||||
margin-top: 0.375rem;
|
||||
}
|
||||
|
||||
.form-actions {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
gap: 0.75rem;
|
||||
margin-top: 1.5rem;
|
||||
}
|
||||
|
||||
.btn:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.empty-state .btn {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
279
frontend/src/pages/TeamDashboardPage.tsx
Normal file
279
frontend/src/pages/TeamDashboardPage.tsx
Normal file
@@ -0,0 +1,279 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { Link, useParams, useNavigate } from 'react-router-dom';
|
||||
import { TeamDetail, Project, PaginatedResponse } from '../types';
|
||||
import { getTeam, listTeamProjects, createProject } from '../api';
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
import { Badge } from '../components/Badge';
|
||||
import { Breadcrumb } from '../components/Breadcrumb';
|
||||
import { DataTable } from '../components/DataTable';
|
||||
import './TeamDashboardPage.css';
|
||||
|
||||
function TeamDashboardPage() {
|
||||
const { slug } = useParams<{ slug: string }>();
|
||||
const navigate = useNavigate();
|
||||
const { user } = useAuth();
|
||||
const [team, setTeam] = useState<TeamDetail | null>(null);
|
||||
const [projects, setProjects] = useState<PaginatedResponse<Project> | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [showProjectForm, setShowProjectForm] = useState(false);
|
||||
const [newProject, setNewProject] = useState({ name: '', description: '', is_public: true });
|
||||
const [creating, setCreating] = useState(false);
|
||||
|
||||
const loadTeamData = useCallback(async () => {
|
||||
if (!slug) return;
|
||||
try {
|
||||
setLoading(true);
|
||||
const [teamData, projectsData] = await Promise.all([
|
||||
getTeam(slug),
|
||||
listTeamProjects(slug, { limit: 10 }),
|
||||
]);
|
||||
setTeam(teamData);
|
||||
setProjects(projectsData);
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load team');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [slug]);
|
||||
|
||||
useEffect(() => {
|
||||
loadTeamData();
|
||||
}, [loadTeamData]);
|
||||
|
||||
async function handleCreateProject(e: React.FormEvent) {
|
||||
e.preventDefault();
|
||||
if (!team) return;
|
||||
try {
|
||||
setCreating(true);
|
||||
const project = await createProject({ ...newProject, team_id: team.id });
|
||||
setNewProject({ name: '', description: '', is_public: true });
|
||||
setShowProjectForm(false);
|
||||
navigate(`/project/${project.name}`);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to create project');
|
||||
} finally {
|
||||
setCreating(false);
|
||||
}
|
||||
}
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="team-dashboard">
|
||||
<div className="loading-state">Loading team...</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error || !team) {
|
||||
return (
|
||||
<div className="team-dashboard">
|
||||
<div className="error-state">
|
||||
<h2>Error loading team</h2>
|
||||
<p>{error || 'Team not found'}</p>
|
||||
<Link to="/teams" className="btn btn-primary">Back to Teams</Link>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const isAdminOrOwner = team.user_role === 'owner' || team.user_role === 'admin' || user?.is_admin;
|
||||
|
||||
const roleVariants: Record<string, 'success' | 'info' | 'default'> = {
|
||||
owner: 'success',
|
||||
admin: 'info',
|
||||
member: 'default',
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="team-dashboard">
|
||||
<Breadcrumb
|
||||
items={[
|
||||
{ label: 'Teams', href: '/teams' },
|
||||
{ label: team.name },
|
||||
]}
|
||||
/>
|
||||
|
||||
<div className="team-header">
|
||||
<div className="team-header-left">
|
||||
<div className="team-header-title">
|
||||
<h1>{team.name}</h1>
|
||||
{team.user_role && (
|
||||
<Badge variant={roleVariants[team.user_role] || 'default'}>
|
||||
{team.user_role}
|
||||
</Badge>
|
||||
)}
|
||||
<span className="team-slug">@{team.slug}</span>
|
||||
</div>
|
||||
{team.description && (
|
||||
<p className="team-description">{team.description}</p>
|
||||
)}
|
||||
</div>
|
||||
{isAdminOrOwner && (
|
||||
<div className="team-header-actions">
|
||||
<Link to={`/teams/${slug}/members`} className="btn btn-secondary">
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
||||
<circle cx="9" cy="7" r="4"/>
|
||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
||||
</svg>
|
||||
Members
|
||||
</Link>
|
||||
<Link to={`/teams/${slug}/settings`} className="btn btn-secondary">
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<circle cx="12" cy="12" r="3"/>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"/>
|
||||
</svg>
|
||||
Settings
|
||||
</Link>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{showProjectForm && (
|
||||
<div className="modal-overlay" onClick={() => setShowProjectForm(false)}>
|
||||
<div className="modal-content" onClick={e => e.stopPropagation()}>
|
||||
<h2>Create New Project</h2>
|
||||
<form onSubmit={handleCreateProject}>
|
||||
<div className="form-group">
|
||||
<label htmlFor="project-name">Project Name</label>
|
||||
<input
|
||||
id="project-name"
|
||||
type="text"
|
||||
value={newProject.name}
|
||||
onChange={e => setNewProject({ ...newProject, name: e.target.value })}
|
||||
placeholder="my-project"
|
||||
required
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
<div className="form-group">
|
||||
<label htmlFor="project-description">Description (optional)</label>
|
||||
<textarea
|
||||
id="project-description"
|
||||
value={newProject.description}
|
||||
onChange={e => setNewProject({ ...newProject, description: e.target.value })}
|
||||
placeholder="What is this project for?"
|
||||
rows={3}
|
||||
/>
|
||||
</div>
|
||||
<div className="form-group checkbox-group">
|
||||
<label>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={newProject.is_public}
|
||||
onChange={e => setNewProject({ ...newProject, is_public: e.target.checked })}
|
||||
/>
|
||||
Public project
|
||||
</label>
|
||||
<span className="form-hint">Public projects are visible to everyone</span>
|
||||
</div>
|
||||
<div className="form-actions">
|
||||
<button type="button" className="btn btn-secondary" onClick={() => setShowProjectForm(false)}>
|
||||
Cancel
|
||||
</button>
|
||||
<button type="submit" className="btn btn-primary" disabled={creating}>
|
||||
{creating ? 'Creating...' : 'Create Project'}
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="team-section">
|
||||
<div className="section-header">
|
||||
<h2>Projects</h2>
|
||||
{isAdminOrOwner && (
|
||||
<button className="btn btn-primary btn-sm" onClick={() => setShowProjectForm(true)}>
|
||||
+ New Project
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{projects?.items.length === 0 ? (
|
||||
<div className="empty-state">
|
||||
<p>No projects in this team yet.</p>
|
||||
{isAdminOrOwner && (
|
||||
<button className="btn btn-primary" onClick={() => setShowProjectForm(true)}>
|
||||
Create Project
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<DataTable
|
||||
data={projects?.items || []}
|
||||
keyExtractor={(project) => project.id}
|
||||
onRowClick={(project) => navigate(`/project/${project.name}`)}
|
||||
columns={[
|
||||
{
|
||||
key: 'name',
|
||||
header: 'Name',
|
||||
render: (project) => (
|
||||
<Link
|
||||
to={`/project/${project.name}`}
|
||||
className="cell-name"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
{project.name}
|
||||
</Link>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: 'description',
|
||||
header: 'Description',
|
||||
className: 'cell-description',
|
||||
render: (project) => project.description || <span className="text-muted">—</span>,
|
||||
},
|
||||
{
|
||||
key: 'visibility',
|
||||
header: 'Visibility',
|
||||
render: (project) => (
|
||||
<Badge variant={project.is_public ? 'public' : 'private'}>
|
||||
{project.is_public ? 'Public' : 'Private'}
|
||||
</Badge>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: 'created_by',
|
||||
header: 'Created By',
|
||||
render: (project) => <span className="text-muted">{project.created_by}</span>,
|
||||
},
|
||||
...(isAdminOrOwner ? [{
|
||||
key: 'actions',
|
||||
header: '',
|
||||
render: (project: Project) => (
|
||||
<button
|
||||
className="btn btn-sm btn-ghost"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
navigate(`/project/${project.name}/settings`);
|
||||
}}
|
||||
title="Settings"
|
||||
>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<circle cx="12" cy="12" r="3"/>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"/>
|
||||
</svg>
|
||||
</button>
|
||||
),
|
||||
}] : []),
|
||||
]}
|
||||
/>
|
||||
)}
|
||||
|
||||
{projects && projects.pagination.total > 10 && (
|
||||
<div className="section-footer">
|
||||
<Link to={`/teams/${slug}/projects`} className="view-all-link">
|
||||
View all {projects.pagination.total} projects
|
||||
</Link>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default TeamDashboardPage;
|
||||
247
frontend/src/pages/TeamMembersPage.css
Normal file
247
frontend/src/pages/TeamMembersPage.css
Normal file
@@ -0,0 +1,247 @@
|
||||
.team-members {
|
||||
padding: 1.5rem 0;
|
||||
max-width: 800px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.page-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 1.5rem;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.page-header h1 {
|
||||
margin: 0;
|
||||
font-size: 1.75rem;
|
||||
}
|
||||
|
||||
/* Member cell in table */
|
||||
.member-cell {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
}
|
||||
|
||||
.member-avatar {
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
border-radius: 50%;
|
||||
background: var(--accent-primary);
|
||||
color: white;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-weight: 600;
|
||||
font-size: 1rem;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.member-details {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.member-username {
|
||||
font-weight: 500;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.you-badge {
|
||||
font-size: 0.75rem;
|
||||
font-weight: normal;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.member-email {
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-muted);
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.text-muted {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.role-select {
|
||||
padding: 0.375rem 0.75rem;
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.role-select:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
}
|
||||
|
||||
/* Messages */
|
||||
.error-message {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 0.75rem 1rem;
|
||||
margin-bottom: 1rem;
|
||||
background: var(--error-bg);
|
||||
border: 1px solid var(--error);
|
||||
border-radius: var(--radius-md);
|
||||
color: var(--error);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.error-dismiss {
|
||||
background: none;
|
||||
border: none;
|
||||
font-size: 1.25rem;
|
||||
cursor: pointer;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
/* States */
|
||||
.loading-state,
|
||||
.error-state {
|
||||
text-align: center;
|
||||
padding: 4rem 2rem;
|
||||
}
|
||||
|
||||
.error-state h2 {
|
||||
margin: 0 0 0.5rem;
|
||||
}
|
||||
|
||||
.error-state p {
|
||||
margin: 0 0 1.5rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Modal */
|
||||
.modal-overlay {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 1000;
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.modal-content {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 1.5rem;
|
||||
width: 100%;
|
||||
max-width: 400px;
|
||||
box-shadow: var(--shadow-lg);
|
||||
}
|
||||
|
||||
.modal-content h2 {
|
||||
margin: 0 0 1.5rem;
|
||||
font-size: 1.25rem;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
/* Form */
|
||||
.form-group {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.form-group label {
|
||||
display: block;
|
||||
margin-bottom: 0.375rem;
|
||||
font-weight: 500;
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.form-group input,
|
||||
.form-group select {
|
||||
width: 100%;
|
||||
padding: 0.5rem 0.75rem;
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.9375rem;
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.form-group input:focus,
|
||||
.form-group select:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 0 0 2px rgba(16, 185, 129, 0.2);
|
||||
}
|
||||
|
||||
.form-actions {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
gap: 0.75rem;
|
||||
margin-top: 1.5rem;
|
||||
}
|
||||
|
||||
/* Buttons */
|
||||
.btn {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
padding: 0.5rem 1rem;
|
||||
border: none;
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
text-decoration: none;
|
||||
transition: all 0.15s ease;
|
||||
}
|
||||
|
||||
.btn:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.btn-primary {
|
||||
background: var(--accent-primary);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-primary:hover:not(:disabled) {
|
||||
background: var(--accent-primary-hover);
|
||||
}
|
||||
|
||||
.btn-secondary {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
border: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.btn-secondary:hover:not(:disabled) {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
.btn-icon {
|
||||
padding: 0.375rem;
|
||||
}
|
||||
|
||||
.btn-danger-ghost {
|
||||
background: transparent;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.btn-danger-ghost:hover:not(:disabled) {
|
||||
background: var(--error-bg);
|
||||
color: var(--error);
|
||||
}
|
||||
311
frontend/src/pages/TeamMembersPage.tsx
Normal file
311
frontend/src/pages/TeamMembersPage.tsx
Normal file
@@ -0,0 +1,311 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { useParams, Link } from 'react-router-dom';
|
||||
import { TeamDetail, TeamMember, TeamMemberCreate, TeamRole } from '../types';
|
||||
import {
|
||||
getTeam,
|
||||
listTeamMembers,
|
||||
addTeamMember,
|
||||
updateTeamMember,
|
||||
removeTeamMember,
|
||||
} from '../api';
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
import { Badge } from '../components/Badge';
|
||||
import { Breadcrumb } from '../components/Breadcrumb';
|
||||
import { DataTable } from '../components/DataTable';
|
||||
import { UserAutocomplete } from '../components/UserAutocomplete';
|
||||
import './TeamMembersPage.css';
|
||||
|
||||
function TeamMembersPage() {
|
||||
const { slug } = useParams<{ slug: string }>();
|
||||
const { user } = useAuth();
|
||||
const [team, setTeam] = useState<TeamDetail | null>(null);
|
||||
const [members, setMembers] = useState<TeamMember[]>([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [showAddForm, setShowAddForm] = useState(false);
|
||||
const [adding, setAdding] = useState(false);
|
||||
const [newMember, setNewMember] = useState<TeamMemberCreate>({ username: '', role: 'member' });
|
||||
const [editingMember, setEditingMember] = useState<string | null>(null);
|
||||
const [removingMember, setRemovingMember] = useState<string | null>(null);
|
||||
|
||||
const loadData = useCallback(async () => {
|
||||
if (!slug) return;
|
||||
try {
|
||||
setLoading(true);
|
||||
const [teamData, membersData] = await Promise.all([
|
||||
getTeam(slug),
|
||||
listTeamMembers(slug),
|
||||
]);
|
||||
setTeam(teamData);
|
||||
setMembers(membersData);
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load team');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [slug]);
|
||||
|
||||
useEffect(() => {
|
||||
loadData();
|
||||
}, [loadData]);
|
||||
|
||||
async function handleAddMember(e: React.FormEvent) {
|
||||
e.preventDefault();
|
||||
if (!slug) return;
|
||||
try {
|
||||
setAdding(true);
|
||||
setError(null);
|
||||
await addTeamMember(slug, newMember);
|
||||
setNewMember({ username: '', role: 'member' });
|
||||
setShowAddForm(false);
|
||||
loadData();
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to add member');
|
||||
} finally {
|
||||
setAdding(false);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRoleChange(username: string, newRole: TeamRole) {
|
||||
if (!slug) return;
|
||||
try {
|
||||
setEditingMember(username);
|
||||
setError(null);
|
||||
await updateTeamMember(slug, username, { role: newRole });
|
||||
loadData();
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to update member');
|
||||
} finally {
|
||||
setEditingMember(null);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRemoveMember(username: string) {
|
||||
if (!slug) return;
|
||||
if (!confirm(`Remove ${username} from the team?`)) return;
|
||||
try {
|
||||
setRemovingMember(username);
|
||||
setError(null);
|
||||
await removeTeamMember(slug, username);
|
||||
loadData();
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to remove member');
|
||||
} finally {
|
||||
setRemovingMember(null);
|
||||
}
|
||||
}
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="team-members">
|
||||
<div className="loading-state">Loading team members...</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error && !team) {
|
||||
return (
|
||||
<div className="team-members">
|
||||
<div className="error-state">
|
||||
<h2>Error loading team</h2>
|
||||
<p>{error}</p>
|
||||
<Link to="/teams" className="btn btn-primary">Back to Teams</Link>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (!team) return null;
|
||||
|
||||
const isOwner = team.user_role === 'owner' || user?.is_admin;
|
||||
const isAdmin = team.user_role === 'admin' || isOwner;
|
||||
|
||||
const roleVariants: Record<string, 'success' | 'info' | 'default'> = {
|
||||
owner: 'success',
|
||||
admin: 'info',
|
||||
member: 'default',
|
||||
};
|
||||
|
||||
const roles: TeamRole[] = ['owner', 'admin', 'member'];
|
||||
|
||||
return (
|
||||
<div className="team-members">
|
||||
<Breadcrumb
|
||||
items={[
|
||||
{ label: 'Teams', href: '/teams' },
|
||||
{ label: team.name, href: `/teams/${slug}` },
|
||||
{ label: 'Members' },
|
||||
]}
|
||||
/>
|
||||
|
||||
<div className="page-header">
|
||||
<h1>Team Members</h1>
|
||||
{isAdmin && (
|
||||
<button className="btn btn-primary" onClick={() => setShowAddForm(true)}>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M16 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
||||
<circle cx="8.5" cy="7" r="4"/>
|
||||
<line x1="20" y1="8" x2="20" y2="14"/>
|
||||
<line x1="23" y1="11" x2="17" y2="11"/>
|
||||
</svg>
|
||||
Invite Member
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="error-message">
|
||||
{error}
|
||||
<button onClick={() => setError(null)} className="error-dismiss">×</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showAddForm && (
|
||||
<div className="modal-overlay" onClick={() => setShowAddForm(false)}>
|
||||
<div className="modal-content" onClick={e => e.stopPropagation()}>
|
||||
<h2>Invite Member</h2>
|
||||
<form onSubmit={handleAddMember}>
|
||||
<div className="form-group">
|
||||
<label htmlFor="username">Username</label>
|
||||
<UserAutocomplete
|
||||
value={newMember.username}
|
||||
onChange={(username) => setNewMember({ ...newMember, username })}
|
||||
placeholder="Search for a user..."
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
<div className="form-group">
|
||||
<label htmlFor="role">Role</label>
|
||||
<select
|
||||
id="role"
|
||||
value={newMember.role}
|
||||
onChange={e => setNewMember({ ...newMember, role: e.target.value as TeamRole })}
|
||||
>
|
||||
<option value="member">Member - Can view team projects</option>
|
||||
<option value="admin">Admin - Can manage team settings and members</option>
|
||||
{isOwner && (
|
||||
<option value="owner">Owner - Full control, can delete team</option>
|
||||
)}
|
||||
</select>
|
||||
</div>
|
||||
<div className="form-actions">
|
||||
<button type="button" className="btn btn-secondary" onClick={() => setShowAddForm(false)}>
|
||||
Cancel
|
||||
</button>
|
||||
<button type="submit" className="btn btn-primary" disabled={adding}>
|
||||
{adding ? 'Adding...' : 'Add Member'}
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<DataTable
|
||||
data={members}
|
||||
keyExtractor={(member) => member.id}
|
||||
emptyMessage="No members in this team yet."
|
||||
columns={[
|
||||
{
|
||||
key: 'member',
|
||||
header: 'Member',
|
||||
render: (member) => {
|
||||
const isCurrentUser = user?.username === member.username;
|
||||
return (
|
||||
<div className="member-cell">
|
||||
<div className="member-avatar">
|
||||
{member.username.charAt(0).toUpperCase()}
|
||||
</div>
|
||||
<div className="member-details">
|
||||
<span className="member-username">
|
||||
{member.username}
|
||||
{isCurrentUser && <span className="you-badge">(you)</span>}
|
||||
</span>
|
||||
{member.email && (
|
||||
<span className="member-email">{member.email}</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'role',
|
||||
header: 'Role',
|
||||
render: (member) => {
|
||||
const isCurrentUser = user?.username === member.username;
|
||||
const canModify = isAdmin && !isCurrentUser && (isOwner || member.role !== 'owner');
|
||||
|
||||
if (canModify) {
|
||||
return (
|
||||
<select
|
||||
value={member.role}
|
||||
onChange={e => handleRoleChange(member.username, e.target.value as TeamRole)}
|
||||
disabled={editingMember === member.username}
|
||||
className="role-select"
|
||||
onClick={e => e.stopPropagation()}
|
||||
>
|
||||
{roles.map(role => (
|
||||
<option
|
||||
key={role}
|
||||
value={role}
|
||||
disabled={role === 'owner' && !isOwner}
|
||||
>
|
||||
{role.charAt(0).toUpperCase() + role.slice(1)}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Badge variant={roleVariants[member.role] || 'default'}>
|
||||
{member.role}
|
||||
</Badge>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'joined',
|
||||
header: 'Joined',
|
||||
render: (member) => (
|
||||
<span className="text-muted">
|
||||
{new Date(member.created_at).toLocaleDateString()}
|
||||
</span>
|
||||
),
|
||||
},
|
||||
...(isAdmin ? [{
|
||||
key: 'actions',
|
||||
header: '',
|
||||
render: (member: TeamMember) => {
|
||||
const isCurrentUser = user?.username === member.username;
|
||||
const canModify = isAdmin && !isCurrentUser && (isOwner || member.role !== 'owner');
|
||||
|
||||
if (!canModify) return null;
|
||||
|
||||
return (
|
||||
<button
|
||||
className="btn btn-icon btn-danger-ghost"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleRemoveMember(member.username);
|
||||
}}
|
||||
disabled={removingMember === member.username}
|
||||
title="Remove member"
|
||||
>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<path d="M3 6h18"/>
|
||||
<path d="M19 6v14a2 2 0 0 1-2 2H7a2 2 0 0 1-2-2V6"/>
|
||||
<path d="M8 6V4a2 2 0 0 1 2-2h4a2 2 0 0 1 2 2v2"/>
|
||||
</svg>
|
||||
</button>
|
||||
);
|
||||
},
|
||||
}] : []),
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default TeamMembersPage;
|
||||
239
frontend/src/pages/TeamSettingsPage.css
Normal file
239
frontend/src/pages/TeamSettingsPage.css
Normal file
@@ -0,0 +1,239 @@
|
||||
.team-settings {
|
||||
padding: 1.5rem 0;
|
||||
max-width: 640px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.team-settings h1 {
|
||||
margin: 0 0 1.5rem;
|
||||
font-size: 1.75rem;
|
||||
}
|
||||
|
||||
.settings-form {
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.form-section {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 1.5rem;
|
||||
margin-bottom: 1.5rem;
|
||||
}
|
||||
|
||||
.form-section h2 {
|
||||
margin: 0 0 1rem;
|
||||
font-size: 1.125rem;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.form-group {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.form-group label {
|
||||
display: block;
|
||||
margin-bottom: 0.375rem;
|
||||
font-weight: 500;
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.form-group input,
|
||||
.form-group textarea {
|
||||
width: 100%;
|
||||
padding: 0.5rem 0.75rem;
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.9375rem;
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.form-group input:focus,
|
||||
.form-group textarea:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 0 0 2px rgba(16, 185, 129, 0.2);
|
||||
}
|
||||
|
||||
.input-disabled {
|
||||
background: var(--bg-elevated) !important;
|
||||
color: var(--text-muted) !important;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.form-hint {
|
||||
display: block;
|
||||
margin-top: 0.25rem;
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Danger zone */
|
||||
.danger-zone {
|
||||
border-color: var(--error);
|
||||
background: var(--error-bg);
|
||||
}
|
||||
|
||||
.danger-zone h2 {
|
||||
color: var(--error);
|
||||
}
|
||||
|
||||
.danger-warning {
|
||||
margin: 0 0 1rem;
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
/* Messages */
|
||||
.error-message {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 0.75rem 1rem;
|
||||
margin-bottom: 1rem;
|
||||
background: var(--error-bg);
|
||||
border: 1px solid var(--error);
|
||||
border-radius: var(--radius-md);
|
||||
color: var(--error);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.error-dismiss {
|
||||
background: none;
|
||||
border: none;
|
||||
font-size: 1.25rem;
|
||||
cursor: pointer;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.success-message {
|
||||
padding: 0.75rem 1rem;
|
||||
margin-bottom: 1rem;
|
||||
background: var(--success-bg);
|
||||
border: 1px solid var(--success);
|
||||
border-radius: var(--radius-md);
|
||||
color: var(--success);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
/* States */
|
||||
.loading-state,
|
||||
.error-state {
|
||||
text-align: center;
|
||||
padding: 4rem 2rem;
|
||||
}
|
||||
|
||||
.error-state h2 {
|
||||
margin: 0 0 0.5rem;
|
||||
}
|
||||
|
||||
.error-state p {
|
||||
margin: 0 0 1.5rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Modal */
|
||||
.modal-overlay {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 1000;
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.modal-content {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 1.5rem;
|
||||
width: 100%;
|
||||
max-width: 400px;
|
||||
box-shadow: var(--shadow-lg);
|
||||
}
|
||||
|
||||
.modal-content h2 {
|
||||
margin: 0 0 1rem;
|
||||
font-size: 1.25rem;
|
||||
color: var(--error);
|
||||
}
|
||||
|
||||
.modal-content p {
|
||||
margin: 0 0 1rem;
|
||||
font-size: 0.9375rem;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.delete-confirm-input {
|
||||
width: 100%;
|
||||
padding: 0.5rem 0.75rem;
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.9375rem;
|
||||
margin-bottom: 1rem;
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.form-actions {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
gap: 0.75rem;
|
||||
}
|
||||
|
||||
/* Buttons */
|
||||
.btn {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
padding: 0.5rem 1rem;
|
||||
border: none;
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
text-decoration: none;
|
||||
transition: all 0.15s ease;
|
||||
}
|
||||
|
||||
.btn:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.btn-primary {
|
||||
background: var(--accent-primary);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-primary:hover:not(:disabled) {
|
||||
background: var(--accent-primary-hover);
|
||||
}
|
||||
|
||||
.btn-secondary {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
border: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.btn-secondary:hover:not(:disabled) {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
.btn-danger {
|
||||
background: var(--error);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-danger:hover:not(:disabled) {
|
||||
background: #b91c1c;
|
||||
}
|
||||
251
frontend/src/pages/TeamSettingsPage.tsx
Normal file
251
frontend/src/pages/TeamSettingsPage.tsx
Normal file
@@ -0,0 +1,251 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { useParams, useNavigate, Link } from 'react-router-dom';
|
||||
import { TeamDetail, TeamUpdate } from '../types';
|
||||
import { getTeam, updateTeam, deleteTeam } from '../api';
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
import { Breadcrumb } from '../components/Breadcrumb';
|
||||
import './TeamSettingsPage.css';
|
||||
|
||||
function TeamSettingsPage() {
|
||||
const { slug } = useParams<{ slug: string }>();
|
||||
const navigate = useNavigate();
|
||||
const { user } = useAuth();
|
||||
const [team, setTeam] = useState<TeamDetail | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [saving, setSaving] = useState(false);
|
||||
const [deleting, setDeleting] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [successMessage, setSuccessMessage] = useState<string | null>(null);
|
||||
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
|
||||
const [deleteConfirmText, setDeleteConfirmText] = useState('');
|
||||
|
||||
const [formData, setFormData] = useState<TeamUpdate>({
|
||||
name: '',
|
||||
description: '',
|
||||
});
|
||||
|
||||
const loadTeam = useCallback(async () => {
|
||||
if (!slug) return;
|
||||
try {
|
||||
setLoading(true);
|
||||
const teamData = await getTeam(slug);
|
||||
setTeam(teamData);
|
||||
setFormData({
|
||||
name: teamData.name,
|
||||
description: teamData.description || '',
|
||||
});
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load team');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [slug]);
|
||||
|
||||
useEffect(() => {
|
||||
loadTeam();
|
||||
}, [loadTeam]);
|
||||
|
||||
async function handleSubmit(e: React.FormEvent) {
|
||||
e.preventDefault();
|
||||
if (!slug || !team) return;
|
||||
|
||||
try {
|
||||
setSaving(true);
|
||||
setError(null);
|
||||
const updatedTeam = await updateTeam(slug, formData);
|
||||
setTeam(updatedTeam);
|
||||
setSuccessMessage('Settings saved successfully');
|
||||
setTimeout(() => setSuccessMessage(null), 3000);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to save settings');
|
||||
} finally {
|
||||
setSaving(false);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleDelete() {
|
||||
if (!slug || !team) return;
|
||||
if (deleteConfirmText !== team.slug) return;
|
||||
|
||||
try {
|
||||
setDeleting(true);
|
||||
await deleteTeam(slug);
|
||||
navigate('/teams');
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to delete team');
|
||||
setShowDeleteConfirm(false);
|
||||
} finally {
|
||||
setDeleting(false);
|
||||
}
|
||||
}
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="team-settings">
|
||||
<div className="loading-state">Loading team settings...</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error && !team) {
|
||||
return (
|
||||
<div className="team-settings">
|
||||
<div className="error-state">
|
||||
<h2>Error loading team</h2>
|
||||
<p>{error}</p>
|
||||
<Link to="/teams" className="btn btn-primary">Back to Teams</Link>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (!team) return null;
|
||||
|
||||
const isOwner = team.user_role === 'owner' || user?.is_admin;
|
||||
const isAdmin = team.user_role === 'admin' || isOwner;
|
||||
|
||||
if (!isAdmin) {
|
||||
return (
|
||||
<div className="team-settings">
|
||||
<div className="error-state">
|
||||
<h2>Access Denied</h2>
|
||||
<p>You need admin privileges to access team settings.</p>
|
||||
<Link to={`/teams/${slug}`} className="btn btn-primary">Back to Team</Link>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="team-settings">
|
||||
<Breadcrumb
|
||||
items={[
|
||||
{ label: 'Teams', href: '/teams' },
|
||||
{ label: team.name, href: `/teams/${slug}` },
|
||||
{ label: 'Settings' },
|
||||
]}
|
||||
/>
|
||||
|
||||
<h1>Team Settings</h1>
|
||||
|
||||
{error && (
|
||||
<div className="error-message">
|
||||
{error}
|
||||
<button onClick={() => setError(null)} className="error-dismiss">×</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{successMessage && (
|
||||
<div className="success-message">
|
||||
{successMessage}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<form onSubmit={handleSubmit} className="settings-form">
|
||||
<div className="form-section">
|
||||
<h2>General</h2>
|
||||
|
||||
<div className="form-group">
|
||||
<label htmlFor="team-name">Team Name</label>
|
||||
<input
|
||||
id="team-name"
|
||||
type="text"
|
||||
value={formData.name}
|
||||
onChange={e => setFormData({ ...formData, name: e.target.value })}
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="form-group">
|
||||
<label htmlFor="team-slug">Slug</label>
|
||||
<input
|
||||
id="team-slug"
|
||||
type="text"
|
||||
value={team.slug}
|
||||
disabled
|
||||
className="input-disabled"
|
||||
/>
|
||||
<span className="form-hint">Team slug cannot be changed</span>
|
||||
</div>
|
||||
|
||||
<div className="form-group">
|
||||
<label htmlFor="team-description">Description</label>
|
||||
<textarea
|
||||
id="team-description"
|
||||
value={formData.description}
|
||||
onChange={e => setFormData({ ...formData, description: e.target.value })}
|
||||
rows={3}
|
||||
placeholder="What is this team for?"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<button type="submit" className="btn btn-primary" disabled={saving}>
|
||||
{saving ? 'Saving...' : 'Save Changes'}
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
{isOwner && (
|
||||
<div className="form-section danger-zone">
|
||||
<h2>Danger Zone</h2>
|
||||
<p className="danger-warning">
|
||||
Deleting a team is permanent and cannot be undone.
|
||||
You must move or delete all projects in this team first.
|
||||
</p>
|
||||
<button
|
||||
type="button"
|
||||
className="btn btn-danger"
|
||||
onClick={() => setShowDeleteConfirm(true)}
|
||||
>
|
||||
Delete Team
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showDeleteConfirm && (
|
||||
<div className="modal-overlay" onClick={() => setShowDeleteConfirm(false)}>
|
||||
<div className="modal-content" onClick={e => e.stopPropagation()}>
|
||||
<h2>Delete Team</h2>
|
||||
<p>
|
||||
This will permanently delete the team <strong>{team.name}</strong>.
|
||||
This action cannot be undone.
|
||||
</p>
|
||||
<p>
|
||||
To confirm, type <strong>{team.slug}</strong> below:
|
||||
</p>
|
||||
<input
|
||||
type="text"
|
||||
value={deleteConfirmText}
|
||||
onChange={e => setDeleteConfirmText(e.target.value)}
|
||||
placeholder={team.slug}
|
||||
className="delete-confirm-input"
|
||||
/>
|
||||
<div className="form-actions">
|
||||
<button
|
||||
type="button"
|
||||
className="btn btn-secondary"
|
||||
onClick={() => {
|
||||
setShowDeleteConfirm(false);
|
||||
setDeleteConfirmText('');
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
className="btn btn-danger"
|
||||
disabled={deleteConfirmText !== team.slug || deleting}
|
||||
onClick={handleDelete}
|
||||
>
|
||||
{deleting ? 'Deleting...' : 'Delete Team'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default TeamSettingsPage;
|
||||
376
frontend/src/pages/TeamsPage.css
Normal file
376
frontend/src/pages/TeamsPage.css
Normal file
@@ -0,0 +1,376 @@
|
||||
.teams-page {
|
||||
padding: 1.5rem 0;
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
/* Header */
|
||||
.teams-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 1.5rem;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.teams-header h1 {
|
||||
margin: 0;
|
||||
font-size: 1.5rem;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
/* Search */
|
||||
.teams-search {
|
||||
position: relative;
|
||||
margin-bottom: 1.5rem;
|
||||
}
|
||||
|
||||
.teams-search__icon {
|
||||
position: absolute;
|
||||
left: 0.875rem;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
color: var(--text-muted);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.teams-search__input {
|
||||
width: 100%;
|
||||
padding: 0.625rem 2.5rem 0.625rem 2.75rem;
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
background: var(--bg-primary);
|
||||
color: var(--text-primary);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.teams-search__input:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.2);
|
||||
}
|
||||
|
||||
.teams-search__input::placeholder {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.teams-search__clear {
|
||||
position: absolute;
|
||||
right: 0.5rem;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0.375rem;
|
||||
cursor: pointer;
|
||||
color: var(--text-muted);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border-radius: var(--radius-sm);
|
||||
}
|
||||
|
||||
.teams-search__clear:hover {
|
||||
color: var(--text-primary);
|
||||
background: var(--bg-secondary);
|
||||
}
|
||||
|
||||
/* Error */
|
||||
.teams-error {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 0.75rem 1rem;
|
||||
margin-bottom: 1rem;
|
||||
background: var(--error-bg);
|
||||
border: 1px solid var(--error);
|
||||
border-radius: var(--radius-md);
|
||||
color: var(--error);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.teams-error__dismiss {
|
||||
background: none;
|
||||
border: none;
|
||||
font-size: 1.25rem;
|
||||
cursor: pointer;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
/* Loading */
|
||||
.teams-loading {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: 1rem;
|
||||
padding: 4rem 2rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.teams-loading__spinner {
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
border: 3px solid var(--border-primary);
|
||||
border-top-color: var(--accent-primary);
|
||||
border-radius: 50%;
|
||||
animation: teams-spin 0.8s linear infinite;
|
||||
}
|
||||
|
||||
@keyframes teams-spin {
|
||||
to { transform: rotate(360deg); }
|
||||
}
|
||||
|
||||
/* Empty State */
|
||||
.teams-empty-state {
|
||||
text-align: center;
|
||||
padding: 4rem 2rem;
|
||||
background: var(--bg-secondary);
|
||||
border-radius: var(--radius-lg);
|
||||
border: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.teams-empty-icon {
|
||||
color: var(--text-muted);
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.teams-empty-state h2 {
|
||||
margin: 0 0 0.5rem;
|
||||
font-size: 1.25rem;
|
||||
}
|
||||
|
||||
.teams-empty-state p {
|
||||
margin: 0 0 1.5rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Table cell styles */
|
||||
|
||||
.team-name-cell {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 0.125rem;
|
||||
}
|
||||
|
||||
.team-name-link {
|
||||
font-weight: 500;
|
||||
color: var(--text-primary);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.team-name-link:hover {
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.team-slug {
|
||||
font-size: 0.8125rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.team-description-cell {
|
||||
color: var(--text-secondary);
|
||||
max-width: 300px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.text-muted {
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Modal */
|
||||
.modal-overlay {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 1000;
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.modal-content {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
width: 100%;
|
||||
max-width: 480px;
|
||||
box-shadow: var(--shadow-lg);
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.modal-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 1.25rem 1.5rem;
|
||||
border-bottom: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.modal-header h2 {
|
||||
margin: 0;
|
||||
font-size: 1.125rem;
|
||||
font-weight: 600;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.modal-close {
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0.25rem;
|
||||
cursor: pointer;
|
||||
color: var(--text-muted);
|
||||
display: flex;
|
||||
border-radius: var(--radius-sm);
|
||||
}
|
||||
|
||||
.modal-close:hover {
|
||||
color: var(--text-primary);
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
.modal-content form {
|
||||
padding: 1.5rem;
|
||||
}
|
||||
|
||||
/* Form */
|
||||
.form-group {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.form-group label {
|
||||
display: block;
|
||||
margin-bottom: 0.375rem;
|
||||
font-weight: 500;
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.form-group .optional {
|
||||
font-weight: 400;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.form-group input,
|
||||
.form-group textarea {
|
||||
width: 100%;
|
||||
padding: 0.625rem 0.75rem;
|
||||
border: 1px solid var(--border-primary);
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.form-group input:focus,
|
||||
.form-group textarea:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.2);
|
||||
}
|
||||
|
||||
.input-with-prefix {
|
||||
display: flex;
|
||||
align-items: stretch;
|
||||
}
|
||||
|
||||
.input-prefix {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 0 0.75rem;
|
||||
background: var(--bg-elevated);
|
||||
border: 1px solid var(--border-primary);
|
||||
border-right: none;
|
||||
border-radius: var(--radius-md) 0 0 var(--radius-md);
|
||||
color: var(--text-muted);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.input-with-prefix input {
|
||||
border-radius: 0 var(--radius-md) var(--radius-md) 0;
|
||||
}
|
||||
|
||||
.form-hint {
|
||||
display: block;
|
||||
margin-top: 0.25rem;
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.form-actions {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
gap: 0.75rem;
|
||||
margin-top: 1.5rem;
|
||||
padding-top: 1rem;
|
||||
border-top: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
/* Buttons */
|
||||
.btn {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
padding: 0.5rem 1rem;
|
||||
border: none;
|
||||
border-radius: var(--radius-md);
|
||||
font-size: 0.875rem;
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s ease;
|
||||
}
|
||||
|
||||
.btn:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.btn-primary {
|
||||
background: var(--accent-primary);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-primary:hover:not(:disabled) {
|
||||
background: var(--accent-primary-hover);
|
||||
}
|
||||
|
||||
.btn-secondary {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
border: 1px solid var(--border-primary);
|
||||
}
|
||||
|
||||
.btn-secondary:hover:not(:disabled) {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
/* Responsive */
|
||||
@media (max-width: 640px) {
|
||||
.teams-header {
|
||||
flex-direction: column;
|
||||
align-items: stretch;
|
||||
}
|
||||
|
||||
.teams-header .btn {
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.teams-stats {
|
||||
justify-content: space-around;
|
||||
}
|
||||
|
||||
.teams-table-container {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.teams-table {
|
||||
min-width: 600px;
|
||||
}
|
||||
}
|
||||
310
frontend/src/pages/TeamsPage.tsx
Normal file
310
frontend/src/pages/TeamsPage.tsx
Normal file
@@ -0,0 +1,310 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { Link, useNavigate } from 'react-router-dom';
|
||||
import { TeamDetail, TeamCreate, PaginatedResponse } from '../types';
|
||||
import { listTeams, createTeam } from '../api';
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
import { Badge } from '../components/Badge';
|
||||
import { DataTable } from '../components/DataTable';
|
||||
import './TeamsPage.css';
|
||||
|
||||
function TeamsPage() {
|
||||
const navigate = useNavigate();
|
||||
const { user } = useAuth();
|
||||
const [teamsData, setTeamsData] = useState<PaginatedResponse<TeamDetail> | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [showForm, setShowForm] = useState(false);
|
||||
const [newTeam, setNewTeam] = useState<TeamCreate>({ name: '', slug: '', description: '' });
|
||||
const [creating, setCreating] = useState(false);
|
||||
const [slugManuallySet, setSlugManuallySet] = useState(false);
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
|
||||
const loadTeams = useCallback(async () => {
|
||||
try {
|
||||
setLoading(true);
|
||||
const data = await listTeams({ limit: 100 });
|
||||
setTeamsData(data);
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load teams');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
loadTeams();
|
||||
}, [loadTeams]);
|
||||
|
||||
// Auto-generate slug from name
|
||||
const handleNameChange = (name: string) => {
|
||||
setNewTeam(prev => ({
|
||||
...prev,
|
||||
name,
|
||||
slug: slugManuallySet ? prev.slug : name.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-|-$/g, ''),
|
||||
}));
|
||||
};
|
||||
|
||||
const handleSlugChange = (slug: string) => {
|
||||
setSlugManuallySet(true);
|
||||
setNewTeam(prev => ({ ...prev, slug }));
|
||||
};
|
||||
|
||||
async function handleCreateTeam(e: React.FormEvent) {
|
||||
e.preventDefault();
|
||||
try {
|
||||
setCreating(true);
|
||||
const team = await createTeam(newTeam);
|
||||
setNewTeam({ name: '', slug: '', description: '' });
|
||||
setSlugManuallySet(false);
|
||||
setShowForm(false);
|
||||
navigate(`/teams/${team.slug}`);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to create team');
|
||||
} finally {
|
||||
setCreating(false);
|
||||
}
|
||||
}
|
||||
|
||||
const closeModal = () => {
|
||||
setShowForm(false);
|
||||
setNewTeam({ name: '', slug: '', description: '' });
|
||||
setSlugManuallySet(false);
|
||||
};
|
||||
|
||||
// Filter teams by search
|
||||
const filteredTeams = teamsData?.items.filter(team =>
|
||||
team.name.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||
team.slug.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||
(team.description?.toLowerCase().includes(searchQuery.toLowerCase()))
|
||||
) || [];
|
||||
|
||||
const totalTeams = teamsData?.items.length || 0;
|
||||
|
||||
const roleConfig: Record<string, { variant: 'success' | 'info' | 'default'; label: string }> = {
|
||||
owner: { variant: 'success', label: 'Owner' },
|
||||
admin: { variant: 'info', label: 'Admin' },
|
||||
member: { variant: 'default', label: 'Member' },
|
||||
};
|
||||
|
||||
if (!user) {
|
||||
return (
|
||||
<div className="teams-page">
|
||||
<div className="teams-empty-state">
|
||||
<div className="teams-empty-icon">
|
||||
<svg width="64" height="64" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
|
||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
||||
<circle cx="9" cy="7" r="4"/>
|
||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
||||
</svg>
|
||||
</div>
|
||||
<h2>Sign in to view your teams</h2>
|
||||
<p>Teams help you organize projects and collaborate with others.</p>
|
||||
<Link to="/login" className="btn btn-primary">Sign In</Link>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="teams-page">
|
||||
{/* Header */}
|
||||
<div className="teams-header">
|
||||
<h1>Teams</h1>
|
||||
<button className="btn btn-primary" onClick={() => setShowForm(true)}>
|
||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<line x1="12" y1="5" x2="12" y2="19" />
|
||||
<line x1="5" y1="12" x2="19" y2="12" />
|
||||
</svg>
|
||||
Create Team
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Search */}
|
||||
{!loading && totalTeams > 3 && (
|
||||
<div className="teams-search">
|
||||
<svg className="teams-search__icon" width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<circle cx="11" cy="11" r="8"/>
|
||||
<line x1="21" y1="21" x2="16.65" y2="16.65"/>
|
||||
</svg>
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search teams..."
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
className="teams-search__input"
|
||||
/>
|
||||
{searchQuery && (
|
||||
<button className="teams-search__clear" onClick={() => setSearchQuery('')}>
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<line x1="18" y1="6" x2="6" y2="18"/>
|
||||
<line x1="6" y1="6" x2="18" y2="18"/>
|
||||
</svg>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{error && (
|
||||
<div className="teams-error">
|
||||
{error}
|
||||
<button onClick={() => setError(null)} className="teams-error__dismiss">×</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Create Team Modal */}
|
||||
{showForm && (
|
||||
<div className="modal-overlay" onClick={closeModal}>
|
||||
<div className="modal-content" onClick={e => e.stopPropagation()}>
|
||||
<div className="modal-header">
|
||||
<h2>Create New Team</h2>
|
||||
<button className="modal-close" onClick={closeModal}>
|
||||
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<line x1="18" y1="6" x2="6" y2="18"/>
|
||||
<line x1="6" y1="6" x2="18" y2="18"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
<form onSubmit={handleCreateTeam}>
|
||||
<div className="form-group">
|
||||
<label htmlFor="team-name">Team Name</label>
|
||||
<input
|
||||
id="team-name"
|
||||
type="text"
|
||||
value={newTeam.name}
|
||||
onChange={e => handleNameChange(e.target.value)}
|
||||
placeholder="Engineering"
|
||||
required
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
<div className="form-group">
|
||||
<label htmlFor="team-slug">URL Slug</label>
|
||||
<div className="input-with-prefix">
|
||||
<span className="input-prefix">@</span>
|
||||
<input
|
||||
id="team-slug"
|
||||
type="text"
|
||||
value={newTeam.slug}
|
||||
onChange={e => handleSlugChange(e.target.value)}
|
||||
placeholder="engineering"
|
||||
pattern="^[a-z0-9][a-z0-9-]*[a-z0-9]$|^[a-z0-9]$"
|
||||
title="Lowercase letters, numbers, and hyphens only"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
<span className="form-hint">Used in URLs. Lowercase letters, numbers, and hyphens.</span>
|
||||
</div>
|
||||
<div className="form-group">
|
||||
<label htmlFor="team-description">Description <span className="optional">(optional)</span></label>
|
||||
<textarea
|
||||
id="team-description"
|
||||
value={newTeam.description}
|
||||
onChange={e => setNewTeam({ ...newTeam, description: e.target.value })}
|
||||
placeholder="What is this team for?"
|
||||
rows={3}
|
||||
/>
|
||||
</div>
|
||||
<div className="form-actions">
|
||||
<button type="button" className="btn btn-secondary" onClick={closeModal}>
|
||||
Cancel
|
||||
</button>
|
||||
<button type="submit" className="btn btn-primary" disabled={creating}>
|
||||
{creating ? 'Creating...' : 'Create Team'}
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Content */}
|
||||
{loading ? (
|
||||
<div className="teams-loading">
|
||||
<div className="teams-loading__spinner" />
|
||||
<span>Loading teams...</span>
|
||||
</div>
|
||||
) : filteredTeams.length === 0 ? (
|
||||
<div className="teams-empty-state">
|
||||
<div className="teams-empty-icon">
|
||||
<svg width="64" height="64" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
|
||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
||||
<circle cx="9" cy="7" r="4"/>
|
||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
||||
</svg>
|
||||
</div>
|
||||
{searchQuery ? (
|
||||
<>
|
||||
<h2>No teams found</h2>
|
||||
<p>No teams match "{searchQuery}"</p>
|
||||
<button className="btn btn-secondary" onClick={() => setSearchQuery('')}>
|
||||
Clear search
|
||||
</button>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<h2>No teams yet</h2>
|
||||
<p>Create your first team to start organizing your projects.</p>
|
||||
<button className="btn btn-primary" onClick={() => setShowForm(true)}>
|
||||
Create Team
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<DataTable
|
||||
data={filteredTeams}
|
||||
keyExtractor={(team) => team.id}
|
||||
onRowClick={(team) => navigate(`/teams/${team.slug}`)}
|
||||
columns={[
|
||||
{
|
||||
key: 'name',
|
||||
header: 'Name',
|
||||
render: (team) => (
|
||||
<div className="team-name-cell">
|
||||
<Link
|
||||
to={`/teams/${team.slug}`}
|
||||
className="cell-name"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
{team.name}
|
||||
</Link>
|
||||
<span className="team-slug">@{team.slug}</span>
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: 'description',
|
||||
header: 'Description',
|
||||
className: 'cell-description',
|
||||
render: (team) => team.description || <span className="text-muted">—</span>,
|
||||
},
|
||||
{
|
||||
key: 'role',
|
||||
header: 'Role',
|
||||
render: (team) => team.user_role ? (
|
||||
<Badge variant={roleConfig[team.user_role]?.variant || 'default'}>
|
||||
{roleConfig[team.user_role]?.label || team.user_role}
|
||||
</Badge>
|
||||
) : null,
|
||||
},
|
||||
{
|
||||
key: 'members',
|
||||
header: 'Members',
|
||||
render: (team) => <span className="text-muted">{team.member_count}</span>,
|
||||
},
|
||||
{
|
||||
key: 'projects',
|
||||
header: 'Projects',
|
||||
render: (team) => <span className="text-muted">{team.project_count}</span>,
|
||||
},
|
||||
]}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default TeamsPage;
|
||||
@@ -12,6 +12,10 @@ export interface Project {
|
||||
// Access level info (populated when listing projects)
|
||||
access_level?: AccessLevel | null;
|
||||
is_owner?: boolean;
|
||||
// Team info
|
||||
team_id?: string | null;
|
||||
team_slug?: string | null;
|
||||
team_name?: string | null;
|
||||
}
|
||||
|
||||
export interface TagSummary {
|
||||
@@ -316,6 +320,8 @@ export interface UserUpdate {
|
||||
}
|
||||
|
||||
// Access Permission types
|
||||
export type AccessSource = 'explicit' | 'team';
|
||||
|
||||
export interface AccessPermission {
|
||||
id: string;
|
||||
project_id: string;
|
||||
@@ -323,6 +329,9 @@ export interface AccessPermission {
|
||||
level: AccessLevel;
|
||||
created_at: string;
|
||||
expires_at: string | null;
|
||||
source?: AccessSource; // "explicit" or "team"
|
||||
team_slug?: string; // Team slug if source is "team"
|
||||
team_role?: string; // Team role if source is "team"
|
||||
}
|
||||
|
||||
export interface AccessPermissionCreate {
|
||||
@@ -373,3 +382,124 @@ export interface OIDCStatus {
|
||||
enabled: boolean;
|
||||
issuer_url?: string;
|
||||
}
|
||||
|
||||
// Dependency types
|
||||
export interface Dependency {
|
||||
id: string;
|
||||
artifact_id: string;
|
||||
project: string;
|
||||
package: string;
|
||||
version: string | null;
|
||||
tag: string | null;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export interface ArtifactDependenciesResponse {
|
||||
artifact_id: string;
|
||||
dependencies: Dependency[];
|
||||
}
|
||||
|
||||
export interface DependentInfo {
|
||||
artifact_id: string;
|
||||
project: string;
|
||||
package: string;
|
||||
version: string | null;
|
||||
constraint_type: 'version' | 'tag';
|
||||
constraint_value: string;
|
||||
}
|
||||
|
||||
export interface ReverseDependenciesResponse {
|
||||
project: string;
|
||||
package: string;
|
||||
dependents: DependentInfo[];
|
||||
pagination: {
|
||||
page: number;
|
||||
limit: number;
|
||||
total: number;
|
||||
total_pages: number;
|
||||
has_more: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
// Dependency Resolution types
|
||||
export interface ResolvedArtifact {
|
||||
artifact_id: string;
|
||||
project: string;
|
||||
package: string;
|
||||
version: string | null;
|
||||
tag: string | null;
|
||||
size: number;
|
||||
download_url: string;
|
||||
}
|
||||
|
||||
export interface DependencyResolutionResponse {
|
||||
requested: {
|
||||
project: string;
|
||||
package: string;
|
||||
ref: string;
|
||||
};
|
||||
resolved: ResolvedArtifact[];
|
||||
total_size: number;
|
||||
artifact_count: number;
|
||||
}
|
||||
|
||||
export interface DependencyResolutionError {
|
||||
error: 'circular_dependency' | 'dependency_conflict' | 'not_found';
|
||||
message: string;
|
||||
cycle?: string[];
|
||||
conflicts?: Array<{
|
||||
project: string;
|
||||
package: string;
|
||||
requirements: Array<{
|
||||
version: string;
|
||||
required_by: Array<{ path: string }>;
|
||||
}>;
|
||||
}>;
|
||||
}
|
||||
|
||||
// Team types
|
||||
export type TeamRole = 'owner' | 'admin' | 'member';
|
||||
|
||||
export interface Team {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
description: string | null;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
member_count: number;
|
||||
project_count: number;
|
||||
}
|
||||
|
||||
export interface TeamDetail extends Team {
|
||||
user_role: TeamRole | null;
|
||||
}
|
||||
|
||||
export interface TeamMember {
|
||||
id: string;
|
||||
user_id: string;
|
||||
username: string;
|
||||
email: string | null;
|
||||
role: TeamRole;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export interface TeamCreate {
|
||||
name: string;
|
||||
slug: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface TeamUpdate {
|
||||
name?: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface TeamMemberCreate {
|
||||
username: string;
|
||||
role: TeamRole;
|
||||
}
|
||||
|
||||
export interface TeamMemberUpdate {
|
||||
role: TeamRole;
|
||||
}
|
||||
|
||||
@@ -77,6 +77,8 @@ PostgreSQL secret name
|
||||
{{- define "orchard.postgresql.secretName" -}}
|
||||
{{- if .Values.orchard.database.existingSecret }}
|
||||
{{- .Values.orchard.database.existingSecret }}
|
||||
{{- else if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||
{{- printf "%s-db-credentials" (include "orchard.fullname" .) }}
|
||||
{{- else if .Values.postgresql.enabled }}
|
||||
{{- printf "%s-postgresql" .Release.Name }}
|
||||
{{- else }}
|
||||
@@ -90,6 +92,8 @@ PostgreSQL password key in secret
|
||||
{{- define "orchard.postgresql.passwordKey" -}}
|
||||
{{- if .Values.orchard.database.existingSecret -}}
|
||||
{{- .Values.orchard.database.existingSecretPasswordKey -}}
|
||||
{{- else if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled -}}
|
||||
password
|
||||
{{- else if .Values.postgresql.enabled -}}
|
||||
password
|
||||
{{- else -}}
|
||||
@@ -137,3 +141,16 @@ MinIO secret name
|
||||
{{- printf "%s-s3-secret" (include "orchard.fullname" .) }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Auth secret name (for admin password)
|
||||
*/}}
|
||||
{{- define "orchard.auth.secretName" -}}
|
||||
{{- if and .Values.orchard.auth .Values.orchard.auth.existingSecret }}
|
||||
{{- .Values.orchard.auth.existingSecret }}
|
||||
{{- else if and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled }}
|
||||
{{- printf "%s-auth-credentials" (include "orchard.fullname" .) }}
|
||||
{{- else }}
|
||||
{{- printf "%s-auth-secret" (include "orchard.fullname" .) }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
@@ -69,6 +69,8 @@ spec:
|
||||
containerPort: {{ .Values.orchard.server.port }}
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: ORCHARD_ENV
|
||||
value: {{ .Values.orchard.env | default "development" | quote }}
|
||||
- name: ORCHARD_SERVER_HOST
|
||||
value: {{ .Values.orchard.server.host | quote }}
|
||||
- name: ORCHARD_SERVER_PORT
|
||||
@@ -77,8 +79,16 @@ spec:
|
||||
value: {{ include "orchard.postgresql.host" . | quote }}
|
||||
- name: ORCHARD_DATABASE_PORT
|
||||
value: {{ .Values.orchard.database.port | quote }}
|
||||
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||
- name: ORCHARD_DATABASE_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ include "orchard.postgresql.secretName" . }}
|
||||
key: username
|
||||
{{- else }}
|
||||
- name: ORCHARD_DATABASE_USER
|
||||
value: {{ .Values.orchard.database.user | default .Values.postgresql.auth.username | quote }}
|
||||
{{- end }}
|
||||
- name: ORCHARD_DATABASE_DBNAME
|
||||
value: {{ .Values.orchard.database.dbname | default .Values.postgresql.auth.database | quote }}
|
||||
- name: ORCHARD_DATABASE_SSLMODE
|
||||
@@ -96,6 +106,7 @@ spec:
|
||||
value: {{ .Values.orchard.s3.bucket | quote }}
|
||||
- name: ORCHARD_S3_USE_PATH_STYLE
|
||||
value: {{ .Values.orchard.s3.usePathStyle | quote }}
|
||||
{{- if or .Values.minio.enabled .Values.orchard.s3.existingSecret .Values.orchard.s3.accessKeyId }}
|
||||
- name: ORCHARD_S3_ACCESS_KEY_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
@@ -106,16 +117,76 @@ spec:
|
||||
secretKeyRef:
|
||||
name: {{ include "orchard.minio.secretName" . }}
|
||||
key: {{ if .Values.minio.enabled }}root-password{{ else }}{{ .Values.orchard.s3.existingSecretSecretKeyKey }}{{ end }}
|
||||
{{- end }}
|
||||
- name: ORCHARD_DOWNLOAD_MODE
|
||||
value: {{ .Values.orchard.download.mode | quote }}
|
||||
- name: ORCHARD_PRESIGNED_URL_EXPIRY
|
||||
value: {{ .Values.orchard.download.presignedUrlExpiry | quote }}
|
||||
{{- if .Values.orchard.rateLimit }}
|
||||
{{- if .Values.orchard.rateLimit.login }}
|
||||
- name: ORCHARD_LOGIN_RATE_LIMIT
|
||||
value: {{ .Values.orchard.rateLimit.login | quote }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if .Values.orchard.database.poolSize }}
|
||||
- name: ORCHARD_DATABASE_POOL_SIZE
|
||||
value: {{ .Values.orchard.database.poolSize | quote }}
|
||||
{{- end }}
|
||||
{{- if .Values.orchard.database.maxOverflow }}
|
||||
- name: ORCHARD_DATABASE_MAX_OVERFLOW
|
||||
value: {{ .Values.orchard.database.maxOverflow | quote }}
|
||||
{{- end }}
|
||||
{{- if .Values.orchard.database.poolTimeout }}
|
||||
- name: ORCHARD_DATABASE_POOL_TIMEOUT
|
||||
value: {{ .Values.orchard.database.poolTimeout | quote }}
|
||||
{{- end }}
|
||||
{{- if .Values.orchard.auth }}
|
||||
{{- if or .Values.orchard.auth.secretsManager .Values.orchard.auth.existingSecret .Values.orchard.auth.adminPassword }}
|
||||
- name: ORCHARD_ADMIN_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ include "orchard.auth.secretName" . }}
|
||||
key: admin-password
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if or (and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled) (and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled) }}
|
||||
volumeMounts:
|
||||
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||
- name: db-secrets
|
||||
mountPath: /mnt/secrets-store/db
|
||||
readOnly: true
|
||||
{{- end }}
|
||||
{{- if and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled }}
|
||||
- name: auth-secrets
|
||||
mountPath: /mnt/secrets-store/auth
|
||||
readOnly: true
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
livenessProbe:
|
||||
{{- toYaml .Values.livenessProbe | nindent 12 }}
|
||||
readinessProbe:
|
||||
{{- toYaml .Values.readinessProbe | nindent 12 }}
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
{{- if or (and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled) (and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled) }}
|
||||
volumes:
|
||||
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||
- name: db-secrets
|
||||
csi:
|
||||
driver: secrets-store.csi.k8s.io
|
||||
readOnly: true
|
||||
volumeAttributes:
|
||||
secretProviderClass: {{ include "orchard.fullname" . }}-db-secret
|
||||
{{- end }}
|
||||
{{- if and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled }}
|
||||
- name: auth-secrets
|
||||
csi:
|
||||
driver: secrets-store.csi.k8s.io
|
||||
readOnly: true
|
||||
volumeAttributes:
|
||||
secretProviderClass: {{ include "orchard.fullname" . }}-auth-secret
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
||||
51
helm/orchard/templates/secret-provider-class.yaml
Normal file
51
helm/orchard/templates/secret-provider-class.yaml
Normal file
@@ -0,0 +1,51 @@
|
||||
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||
apiVersion: secrets-store.csi.x-k8s.io/v1
|
||||
kind: SecretProviderClass
|
||||
metadata:
|
||||
name: {{ include "orchard.fullname" . }}-db-secret
|
||||
labels:
|
||||
{{- include "orchard.labels" . | nindent 4 }}
|
||||
spec:
|
||||
provider: aws
|
||||
parameters:
|
||||
objects: |
|
||||
- objectName: "{{ .Values.orchard.database.secretsManager.secretArn }}"
|
||||
objectType: "secretsmanager"
|
||||
jmesPath:
|
||||
- path: username
|
||||
objectAlias: db-username
|
||||
- path: password
|
||||
objectAlias: db-password
|
||||
secretObjects:
|
||||
- secretName: {{ include "orchard.fullname" . }}-db-credentials
|
||||
type: Opaque
|
||||
data:
|
||||
- objectName: db-username
|
||||
key: username
|
||||
- objectName: db-password
|
||||
key: password
|
||||
{{- end }}
|
||||
---
|
||||
{{- if and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled }}
|
||||
apiVersion: secrets-store.csi.x-k8s.io/v1
|
||||
kind: SecretProviderClass
|
||||
metadata:
|
||||
name: {{ include "orchard.fullname" . }}-auth-secret
|
||||
labels:
|
||||
{{- include "orchard.labels" . | nindent 4 }}
|
||||
spec:
|
||||
provider: aws
|
||||
parameters:
|
||||
objects: |
|
||||
- objectName: "{{ .Values.orchard.auth.secretsManager.secretArn }}"
|
||||
objectType: "secretsmanager"
|
||||
jmesPath:
|
||||
- path: admin_password
|
||||
objectAlias: admin-password
|
||||
secretObjects:
|
||||
- secretName: {{ include "orchard.fullname" . }}-auth-credentials
|
||||
type: Opaque
|
||||
data:
|
||||
- objectName: admin-password
|
||||
key: admin-password
|
||||
{{- end }}
|
||||
@@ -22,3 +22,15 @@ data:
|
||||
access-key-id: {{ .Values.orchard.s3.accessKeyId | b64enc | quote }}
|
||||
secret-access-key: {{ .Values.orchard.s3.secretAccessKey | b64enc | quote }}
|
||||
{{- end }}
|
||||
---
|
||||
{{- if and .Values.orchard.auth .Values.orchard.auth.adminPassword (not .Values.orchard.auth.existingSecret) (not (and .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled)) }}
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: {{ include "orchard.fullname" . }}-auth-secret
|
||||
labels:
|
||||
{{- include "orchard.labels" . | nindent 4 }}
|
||||
type: Opaque
|
||||
data:
|
||||
admin-password: {{ .Values.orchard.auth.adminPassword | b64enc | quote }}
|
||||
{{- end }}
|
||||
|
||||
@@ -42,6 +42,7 @@ ingress:
|
||||
className: "nginx"
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "letsencrypt"
|
||||
nginx.ingress.kubernetes.io/proxy-body-size: "0" # Disable body size limit for uploads
|
||||
hosts:
|
||||
- host: orchard-dev.common.global.bsf.tools # Overridden by CI
|
||||
paths:
|
||||
@@ -52,15 +53,16 @@ ingress:
|
||||
hosts:
|
||||
- orchard-dev.common.global.bsf.tools # Overridden by CI
|
||||
|
||||
# Lighter resources for ephemeral environments
|
||||
# Resources for dev/feature environments
|
||||
# Bumped to handle concurrent integration tests
|
||||
# Note: memory requests must equal limits per cluster policy
|
||||
resources:
|
||||
limits:
|
||||
cpu: 250m
|
||||
memory: 256Mi
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
requests:
|
||||
cpu: 100m
|
||||
memory: 256Mi
|
||||
cpu: 200m
|
||||
memory: 512Mi
|
||||
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
@@ -84,10 +86,15 @@ tolerations: []
|
||||
affinity: {}
|
||||
|
||||
orchard:
|
||||
env: "development" # Allows seed data for testing
|
||||
server:
|
||||
host: "0.0.0.0"
|
||||
port: 8080
|
||||
|
||||
# Authentication settings
|
||||
# Admin password is set via CI variable (DEV_ADMIN_PASSWORD) passed as --set flag
|
||||
# This keeps the password out of version control
|
||||
|
||||
database:
|
||||
host: ""
|
||||
port: 5432
|
||||
@@ -97,6 +104,10 @@ orchard:
|
||||
sslmode: disable
|
||||
existingSecret: ""
|
||||
existingSecretPasswordKey: "password"
|
||||
# Increased pool settings for concurrent integration tests
|
||||
poolSize: 10
|
||||
maxOverflow: 20
|
||||
poolTimeout: 60
|
||||
|
||||
s3:
|
||||
endpoint: ""
|
||||
@@ -113,6 +124,10 @@ orchard:
|
||||
mode: "presigned"
|
||||
presignedUrlExpiry: 3600
|
||||
|
||||
# Relaxed rate limits for dev/feature environments (allows integration tests to run)
|
||||
rateLimit:
|
||||
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests
|
||||
|
||||
# PostgreSQL - ephemeral, no persistence
|
||||
postgresql:
|
||||
enabled: true
|
||||
@@ -128,15 +143,16 @@ postgresql:
|
||||
primary:
|
||||
persistence:
|
||||
enabled: false
|
||||
# Resources with memory requests = limits per cluster policy
|
||||
# Bumped resources for concurrent integration tests
|
||||
# Note: memory requests must equal limits per cluster policy
|
||||
resourcesPreset: "none"
|
||||
resources:
|
||||
limits:
|
||||
cpu: 250m
|
||||
memory: 256Mi
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
requests:
|
||||
cpu: 100m
|
||||
memory: 256Mi
|
||||
cpu: 200m
|
||||
memory: 512Mi
|
||||
# Volume permissions init container
|
||||
volumePermissions:
|
||||
resourcesPreset: "none"
|
||||
@@ -162,15 +178,16 @@ minio:
|
||||
defaultBuckets: "orchard-artifacts"
|
||||
persistence:
|
||||
enabled: false
|
||||
# Resources with memory requests = limits per cluster policy
|
||||
# Bumped resources for concurrent integration tests
|
||||
# Note: memory requests must equal limits per cluster policy
|
||||
resourcesPreset: "none" # Disable preset to use explicit resources
|
||||
resources:
|
||||
limits:
|
||||
cpu: 250m
|
||||
memory: 256Mi
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
requests:
|
||||
cpu: 100m
|
||||
memory: 256Mi
|
||||
cpu: 200m
|
||||
memory: 512Mi
|
||||
# Init container resources
|
||||
defaultInitContainers:
|
||||
volumePermissions:
|
||||
|
||||
@@ -4,7 +4,7 @@ replicaCount: 1
|
||||
|
||||
image:
|
||||
repository: registry.global.bsf.tools/esv/bsf/bsf-integration/orchard/orchard-mvp
|
||||
pullPolicy: IfNotPresent # Don't always pull in prod
|
||||
pullPolicy: Always
|
||||
tag: "latest" # Overridden by CI
|
||||
|
||||
imagePullSecrets:
|
||||
@@ -19,7 +19,8 @@ initContainer:
|
||||
serviceAccount:
|
||||
create: true
|
||||
automount: true
|
||||
annotations: {}
|
||||
annotations:
|
||||
eks.amazonaws.com/role-arn: arn:aws-us-gov:iam::052673043337:role/service-orchard-prd
|
||||
name: "orchard"
|
||||
|
||||
podAnnotations: {}
|
||||
@@ -41,6 +42,7 @@ ingress:
|
||||
className: "nginx"
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "letsencrypt"
|
||||
nginx.ingress.kubernetes.io/proxy-body-size: "0" # Disable body size limit for uploads
|
||||
hosts:
|
||||
- host: orchard.common.global.bsf.tools
|
||||
paths:
|
||||
@@ -86,126 +88,50 @@ tolerations: []
|
||||
affinity: {}
|
||||
|
||||
orchard:
|
||||
env: "production" # Disables seed data
|
||||
server:
|
||||
host: "0.0.0.0"
|
||||
port: 8080
|
||||
|
||||
# Database configuration (used when postgresql.enabled is false)
|
||||
# TODO: Configure for managed PostgreSQL when ready
|
||||
database:
|
||||
host: ""
|
||||
port: 5432
|
||||
user: orchard
|
||||
password: ""
|
||||
dbname: orchard
|
||||
sslmode: disable
|
||||
existingSecret: ""
|
||||
existingSecretPasswordKey: "password"
|
||||
# Authentication settings
|
||||
auth:
|
||||
# Admin password from AWS Secrets Manager
|
||||
secretsManager:
|
||||
enabled: true
|
||||
secretArn: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:orch-prod-creds-0nhqkY"
|
||||
|
||||
# S3 configuration (used when minio.enabled is false)
|
||||
# TODO: Configure for real S3 when ready
|
||||
# Database configuration - uses AWS Secrets Manager via CSI driver
|
||||
database:
|
||||
host: "orchard-prd.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com"
|
||||
port: 5432
|
||||
dbname: orchard_prod
|
||||
sslmode: require
|
||||
secretsManager:
|
||||
enabled: true
|
||||
secretArn: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:rds!cluster-0afc8af5-f644-4284-92fb-2ed545490f92-3v9uXV"
|
||||
|
||||
# S3 configuration - uses IRSA for credentials
|
||||
s3:
|
||||
endpoint: ""
|
||||
region: us-east-1
|
||||
bucket: orchard-artifacts
|
||||
accessKeyId: ""
|
||||
secretAccessKey: ""
|
||||
usePathStyle: true
|
||||
existingSecret: ""
|
||||
existingSecretAccessKeyKey: "access-key-id"
|
||||
existingSecretSecretKeyKey: "secret-access-key"
|
||||
endpoint: "" # Empty = use AWS default
|
||||
region: us-gov-west-1
|
||||
bucket: orchard-artifacts-prod
|
||||
usePathStyle: false # Real S3 uses virtual-hosted style
|
||||
|
||||
download:
|
||||
mode: "presigned"
|
||||
presignedUrlExpiry: 3600
|
||||
|
||||
# PostgreSQL subchart - MVP uses subchart, switch to managed later
|
||||
# PostgreSQL subchart - disabled in prod, using RDS
|
||||
postgresql:
|
||||
enabled: true
|
||||
image:
|
||||
registry: containers.global.bsf.tools
|
||||
repository: bitnami/postgresql
|
||||
tag: "15"
|
||||
pullPolicy: IfNotPresent
|
||||
auth:
|
||||
username: orchard
|
||||
password: orchard-prod-password # TODO: Use existingSecret
|
||||
database: orchard
|
||||
primary:
|
||||
persistence:
|
||||
enabled: true # Enable persistence for prod
|
||||
size: 20Gi
|
||||
resourcesPreset: "none"
|
||||
resources:
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
requests:
|
||||
cpu: 250m
|
||||
memory: 512Mi
|
||||
volumePermissions:
|
||||
resourcesPreset: "none"
|
||||
resources:
|
||||
limits:
|
||||
cpu: 100m
|
||||
memory: 128Mi
|
||||
requests:
|
||||
cpu: 50m
|
||||
memory: 128Mi
|
||||
enabled: false
|
||||
|
||||
# MinIO subchart - MVP uses subchart, switch to real S3 later
|
||||
# MinIO subchart - disabled in prod, using real S3
|
||||
minio:
|
||||
enabled: true
|
||||
image:
|
||||
registry: containers.global.bsf.tools
|
||||
repository: bitnami/minio
|
||||
tag: "latest"
|
||||
pullPolicy: IfNotPresent
|
||||
auth:
|
||||
rootUser: minioadmin
|
||||
rootPassword: minioadmin-prod # TODO: Use existingSecret
|
||||
defaultBuckets: "orchard-artifacts"
|
||||
persistence:
|
||||
enabled: true # Enable persistence for prod
|
||||
size: 100Gi
|
||||
resourcesPreset: "none"
|
||||
resources:
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
requests:
|
||||
cpu: 250m
|
||||
memory: 512Mi
|
||||
defaultInitContainers:
|
||||
volumePermissions:
|
||||
resourcesPreset: "none"
|
||||
resources:
|
||||
limits:
|
||||
cpu: 100m
|
||||
memory: 128Mi
|
||||
requests:
|
||||
cpu: 50m
|
||||
memory: 128Mi
|
||||
provisioning:
|
||||
resources:
|
||||
limits:
|
||||
cpu: 200m
|
||||
memory: 256Mi
|
||||
requests:
|
||||
cpu: 100m
|
||||
memory: 256Mi
|
||||
enabled: false
|
||||
|
||||
# MinIO external ingress for presigned URL access
|
||||
# MinIO ingress - disabled in prod, using real S3
|
||||
minioIngress:
|
||||
enabled: true
|
||||
className: "nginx"
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "letsencrypt"
|
||||
nginx.ingress.kubernetes.io/proxy-body-size: "0"
|
||||
host: "minio-orchard.common.global.bsf.tools"
|
||||
tls:
|
||||
enabled: true
|
||||
secretName: minio-prod-tls
|
||||
enabled: false
|
||||
|
||||
redis:
|
||||
enabled: false
|
||||
|
||||
@@ -19,7 +19,8 @@ initContainer:
|
||||
serviceAccount:
|
||||
create: true
|
||||
automount: true
|
||||
annotations: {}
|
||||
annotations:
|
||||
eks.amazonaws.com/role-arn: arn:aws-us-gov:iam::052673043337:role/service-orchard-stage
|
||||
name: "orchard"
|
||||
|
||||
podAnnotations: {}
|
||||
@@ -41,6 +42,7 @@ ingress:
|
||||
className: "nginx"
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "letsencrypt"
|
||||
nginx.ingress.kubernetes.io/proxy-body-size: "0" # Disable body size limit for uploads
|
||||
hosts:
|
||||
- host: orchard-stage.common.global.bsf.tools
|
||||
paths:
|
||||
@@ -88,130 +90,52 @@ affinity: {}
|
||||
|
||||
# Orchard server configuration
|
||||
orchard:
|
||||
env: "development" # Allows seed data for testing
|
||||
server:
|
||||
host: "0.0.0.0"
|
||||
port: 8080
|
||||
|
||||
# Database configuration (used when postgresql.enabled is false)
|
||||
database:
|
||||
host: ""
|
||||
port: 5432
|
||||
user: orchard
|
||||
password: ""
|
||||
dbname: orchard
|
||||
sslmode: disable
|
||||
existingSecret: ""
|
||||
existingSecretPasswordKey: "password"
|
||||
# Authentication settings
|
||||
# Admin password is set via CI variable (STAGE_ADMIN_PASSWORD) passed as --set flag
|
||||
# This keeps the password out of version control
|
||||
|
||||
# S3 configuration (used when minio.enabled is false)
|
||||
# Database configuration - uses AWS Secrets Manager via CSI driver
|
||||
database:
|
||||
host: "orchard-stage.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com"
|
||||
port: 5432
|
||||
dbname: postgres
|
||||
sslmode: require
|
||||
secretsManager:
|
||||
enabled: true
|
||||
secretArn: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:rds!cluster-a573672b-1a38-4665-a654-1b7df37b5297-IaeFQL"
|
||||
|
||||
# S3 configuration - uses IRSA for credentials
|
||||
s3:
|
||||
endpoint: ""
|
||||
region: us-east-1
|
||||
bucket: orchard-artifacts
|
||||
accessKeyId: ""
|
||||
secretAccessKey: ""
|
||||
usePathStyle: true
|
||||
existingSecret: ""
|
||||
existingSecretAccessKeyKey: "access-key-id"
|
||||
existingSecretSecretKeyKey: "secret-access-key"
|
||||
endpoint: "" # Empty = use AWS default
|
||||
region: us-gov-west-1
|
||||
bucket: orchard-artifacts-stage
|
||||
usePathStyle: false # Real S3 uses virtual-hosted style
|
||||
|
||||
# Download configuration
|
||||
download:
|
||||
mode: "presigned" # presigned, redirect, or proxy
|
||||
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
||||
|
||||
# PostgreSQL subchart configuration
|
||||
# Relaxed rate limits for stage (allows CI integration tests to run)
|
||||
rateLimit:
|
||||
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests
|
||||
|
||||
# PostgreSQL subchart - disabled in stage, using RDS
|
||||
postgresql:
|
||||
enabled: true
|
||||
image:
|
||||
registry: containers.global.bsf.tools
|
||||
repository: bitnami/postgresql
|
||||
tag: "15"
|
||||
pullPolicy: IfNotPresent
|
||||
auth:
|
||||
username: orchard
|
||||
password: orchard-password
|
||||
database: orchard
|
||||
primary:
|
||||
persistence:
|
||||
enabled: false
|
||||
size: 10Gi
|
||||
# Resources with memory requests = limits per cluster policy
|
||||
resourcesPreset: "none"
|
||||
resources:
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
requests:
|
||||
cpu: 250m
|
||||
memory: 512Mi
|
||||
# Volume permissions init container
|
||||
volumePermissions:
|
||||
resourcesPreset: "none"
|
||||
resources:
|
||||
limits:
|
||||
cpu: 100m
|
||||
memory: 128Mi
|
||||
requests:
|
||||
cpu: 50m
|
||||
memory: 128Mi
|
||||
enabled: false
|
||||
|
||||
# MinIO subchart configuration
|
||||
# MinIO subchart - disabled in stage, using real S3
|
||||
minio:
|
||||
enabled: true
|
||||
image:
|
||||
registry: containers.global.bsf.tools
|
||||
repository: bitnami/minio
|
||||
tag: "latest"
|
||||
pullPolicy: IfNotPresent
|
||||
auth:
|
||||
rootUser: minioadmin
|
||||
rootPassword: minioadmin
|
||||
defaultBuckets: "orchard-artifacts"
|
||||
persistence:
|
||||
enabled: false
|
||||
size: 50Gi
|
||||
# Resources with memory requests = limits per cluster policy
|
||||
resourcesPreset: "none" # Disable preset to use explicit resources
|
||||
resources:
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
requests:
|
||||
cpu: 250m
|
||||
memory: 512Mi
|
||||
# Init container resources
|
||||
defaultInitContainers:
|
||||
volumePermissions:
|
||||
resourcesPreset: "none"
|
||||
resources:
|
||||
limits:
|
||||
cpu: 100m
|
||||
memory: 128Mi
|
||||
requests:
|
||||
cpu: 50m
|
||||
memory: 128Mi
|
||||
# Provisioning job resources
|
||||
provisioning:
|
||||
resources:
|
||||
limits:
|
||||
cpu: 200m
|
||||
memory: 256Mi
|
||||
requests:
|
||||
cpu: 100m
|
||||
memory: 256Mi
|
||||
enabled: false
|
||||
|
||||
# MinIO external ingress for presigned URL access (separate from subchart ingress)
|
||||
# MinIO ingress - disabled in stage, using real S3
|
||||
minioIngress:
|
||||
enabled: true
|
||||
className: "nginx"
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "letsencrypt"
|
||||
nginx.ingress.kubernetes.io/proxy-body-size: "0" # Disable body size limit for uploads
|
||||
host: "minio-orch-stage.common.global.bsf.tools"
|
||||
tls:
|
||||
enabled: true
|
||||
secretName: minio-tls
|
||||
enabled: false
|
||||
|
||||
# Redis subchart configuration (for future caching)
|
||||
redis:
|
||||
|
||||
@@ -120,6 +120,17 @@ orchard:
|
||||
mode: "presigned" # presigned, redirect, or proxy
|
||||
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
||||
|
||||
# Authentication settings
|
||||
auth:
|
||||
# Option 1: Plain admin password (creates K8s secret)
|
||||
adminPassword: ""
|
||||
# Option 2: Use existing K8s secret (must have 'admin-password' key)
|
||||
existingSecret: ""
|
||||
# Option 3: AWS Secrets Manager
|
||||
# secretsManager:
|
||||
# enabled: false
|
||||
# secretArn: "" # Secret must have 'admin_password' field
|
||||
|
||||
# PostgreSQL subchart configuration
|
||||
postgresql:
|
||||
enabled: true
|
||||
|
||||
48
migrations/008_artifact_dependencies.sql
Normal file
48
migrations/008_artifact_dependencies.sql
Normal file
@@ -0,0 +1,48 @@
|
||||
-- Migration 008: Artifact Dependencies
|
||||
-- Adds support for declaring dependencies between artifacts
|
||||
-- Part of Package Dependency Management feature (#76)
|
||||
|
||||
-- Create artifact_dependencies table
|
||||
CREATE TABLE IF NOT EXISTS artifact_dependencies (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
artifact_id VARCHAR(64) NOT NULL REFERENCES artifacts(id) ON DELETE CASCADE,
|
||||
dependency_project VARCHAR(255) NOT NULL,
|
||||
dependency_package VARCHAR(255) NOT NULL,
|
||||
version_constraint VARCHAR(255),
|
||||
tag_constraint VARCHAR(255),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
-- Exactly one of version_constraint or tag_constraint must be set
|
||||
CONSTRAINT check_constraint_type CHECK (
|
||||
(version_constraint IS NOT NULL AND tag_constraint IS NULL) OR
|
||||
(version_constraint IS NULL AND tag_constraint IS NOT NULL)
|
||||
),
|
||||
|
||||
-- Each artifact can only have one dependency on a specific project/package
|
||||
CONSTRAINT unique_artifact_dependency UNIQUE (artifact_id, dependency_project, dependency_package)
|
||||
);
|
||||
|
||||
-- Index for fast lookups by artifact_id (get all deps for an artifact)
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_artifact_id
|
||||
ON artifact_dependencies(artifact_id);
|
||||
|
||||
-- Index for reverse dependency lookups (find what depends on a package)
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_target
|
||||
ON artifact_dependencies(dependency_project, dependency_package);
|
||||
|
||||
-- Index for finding dependencies with specific version constraints
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_version
|
||||
ON artifact_dependencies(dependency_project, dependency_package, version_constraint)
|
||||
WHERE version_constraint IS NOT NULL;
|
||||
|
||||
-- Index for finding dependencies with specific tag constraints
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_tag
|
||||
ON artifact_dependencies(dependency_project, dependency_package, tag_constraint)
|
||||
WHERE tag_constraint IS NOT NULL;
|
||||
|
||||
COMMENT ON TABLE artifact_dependencies IS 'Stores dependencies declared by artifacts on other packages';
|
||||
COMMENT ON COLUMN artifact_dependencies.artifact_id IS 'The artifact that declares this dependency';
|
||||
COMMENT ON COLUMN artifact_dependencies.dependency_project IS 'Project name of the dependency';
|
||||
COMMENT ON COLUMN artifact_dependencies.dependency_package IS 'Package name of the dependency';
|
||||
COMMENT ON COLUMN artifact_dependencies.version_constraint IS 'Exact version required (mutually exclusive with tag_constraint)';
|
||||
COMMENT ON COLUMN artifact_dependencies.tag_constraint IS 'Tag name required (mutually exclusive with version_constraint)';
|
||||
62
migrations/009_teams.sql
Normal file
62
migrations/009_teams.sql
Normal file
@@ -0,0 +1,62 @@
|
||||
-- Migration 009: Teams and Multi-Tenancy
|
||||
-- Adds support for team-based multi-tenancy
|
||||
-- Part of Multi-Tenancy with Teams feature
|
||||
|
||||
-- Create teams table
|
||||
CREATE TABLE IF NOT EXISTS teams (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
slug VARCHAR(255) NOT NULL UNIQUE,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
created_by VARCHAR(255) NOT NULL,
|
||||
settings JSONB DEFAULT '{}'::jsonb,
|
||||
|
||||
-- Slug must be lowercase alphanumeric with hyphens
|
||||
CONSTRAINT check_team_slug_format CHECK (slug ~ '^[a-z0-9][a-z0-9-]*[a-z0-9]$' OR slug ~ '^[a-z0-9]$')
|
||||
);
|
||||
|
||||
-- Create team_memberships table
|
||||
CREATE TABLE IF NOT EXISTS team_memberships (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
team_id UUID NOT NULL REFERENCES teams(id) ON DELETE CASCADE,
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
role VARCHAR(20) NOT NULL DEFAULT 'member',
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
invited_by VARCHAR(255),
|
||||
|
||||
-- Each user can only be a member of a team once
|
||||
CONSTRAINT unique_team_membership UNIQUE (team_id, user_id),
|
||||
|
||||
-- Role must be one of: owner, admin, member
|
||||
CONSTRAINT check_team_role CHECK (role IN ('owner', 'admin', 'member'))
|
||||
);
|
||||
|
||||
-- Add team_id column to projects table (nullable for migration compatibility)
|
||||
ALTER TABLE projects ADD COLUMN IF NOT EXISTS team_id UUID REFERENCES teams(id) ON DELETE SET NULL;
|
||||
|
||||
-- Indexes for teams table
|
||||
CREATE INDEX IF NOT EXISTS idx_teams_slug ON teams(slug);
|
||||
CREATE INDEX IF NOT EXISTS idx_teams_created_by ON teams(created_by);
|
||||
CREATE INDEX IF NOT EXISTS idx_teams_created_at ON teams(created_at);
|
||||
|
||||
-- Indexes for team_memberships table
|
||||
CREATE INDEX IF NOT EXISTS idx_team_memberships_team_id ON team_memberships(team_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_team_memberships_user_id ON team_memberships(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_team_memberships_role ON team_memberships(role);
|
||||
CREATE INDEX IF NOT EXISTS idx_team_memberships_team_role ON team_memberships(team_id, role);
|
||||
|
||||
-- Index for projects team_id
|
||||
CREATE INDEX IF NOT EXISTS idx_projects_team_id ON projects(team_id);
|
||||
|
||||
-- Comments
|
||||
COMMENT ON TABLE teams IS 'Teams serve as organizational containers for projects';
|
||||
COMMENT ON COLUMN teams.slug IS 'URL-friendly unique identifier (lowercase alphanumeric with hyphens)';
|
||||
COMMENT ON COLUMN teams.settings IS 'JSON object for team-specific settings';
|
||||
|
||||
COMMENT ON TABLE team_memberships IS 'Maps users to teams with their roles';
|
||||
COMMENT ON COLUMN team_memberships.role IS 'User role in the team: owner, admin, or member';
|
||||
COMMENT ON COLUMN team_memberships.invited_by IS 'Username of the user who invited this member';
|
||||
|
||||
COMMENT ON COLUMN projects.team_id IS 'Optional team that owns this project';
|
||||
99
migrations/009b_migrate_projects.sql
Normal file
99
migrations/009b_migrate_projects.sql
Normal file
@@ -0,0 +1,99 @@
|
||||
-- Migration 009b: Migrate Existing Projects to Personal Teams
|
||||
-- Creates personal teams for existing users and assigns their projects to those teams.
|
||||
-- This migration is idempotent and can be run multiple times safely.
|
||||
|
||||
-- Create personal teams for users who own projects but don't have a personal team yet
|
||||
INSERT INTO teams (name, slug, description, created_by, settings)
|
||||
SELECT DISTINCT
|
||||
u.username || '''s Team' AS name,
|
||||
LOWER(u.username) || '-personal' AS slug,
|
||||
'Personal team for ' || u.username AS description,
|
||||
u.username AS created_by,
|
||||
'{"personal": true}'::jsonb AS settings
|
||||
FROM users u
|
||||
JOIN projects p ON p.created_by = u.username
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM teams t
|
||||
WHERE t.slug = LOWER(u.username) || '-personal'
|
||||
)
|
||||
AND p.team_id IS NULL
|
||||
ON CONFLICT (slug) DO NOTHING;
|
||||
|
||||
-- Add users as owners of their personal teams
|
||||
INSERT INTO team_memberships (team_id, user_id, role, invited_by)
|
||||
SELECT
|
||||
t.id AS team_id,
|
||||
u.id AS user_id,
|
||||
'owner' AS role,
|
||||
u.username AS invited_by
|
||||
FROM teams t
|
||||
JOIN users u ON t.created_by = u.username
|
||||
WHERE t.slug LIKE '%-personal'
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM team_memberships tm
|
||||
WHERE tm.team_id = t.id
|
||||
AND tm.user_id = u.id
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Assign projects without a team to their creator's personal team
|
||||
UPDATE projects p
|
||||
SET team_id = t.id
|
||||
FROM teams t
|
||||
WHERE t.slug = LOWER(p.created_by) || '-personal'
|
||||
AND p.team_id IS NULL;
|
||||
|
||||
-- Handle orphaned projects (created_by doesn't match any user)
|
||||
-- Create a special orphaned projects team if there are any
|
||||
DO $$
|
||||
DECLARE
|
||||
orphan_count INTEGER;
|
||||
orphan_team_id UUID;
|
||||
BEGIN
|
||||
-- Count orphaned projects
|
||||
SELECT COUNT(*) INTO orphan_count
|
||||
FROM projects p
|
||||
WHERE p.team_id IS NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM users u WHERE u.username = p.created_by
|
||||
);
|
||||
|
||||
IF orphan_count > 0 THEN
|
||||
-- Create or get the orphaned projects team
|
||||
INSERT INTO teams (name, slug, description, created_by, settings)
|
||||
VALUES (
|
||||
'Orphaned Projects',
|
||||
'orphaned-projects',
|
||||
'Projects whose original creators no longer exist',
|
||||
'system',
|
||||
'{"system": true}'::jsonb
|
||||
)
|
||||
ON CONFLICT (slug) DO UPDATE SET name = teams.name
|
||||
RETURNING id INTO orphan_team_id;
|
||||
|
||||
-- Assign orphaned projects to this team
|
||||
UPDATE projects
|
||||
SET team_id = orphan_team_id
|
||||
WHERE team_id IS NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM users u WHERE u.username = projects.created_by
|
||||
);
|
||||
|
||||
RAISE NOTICE 'Migrated % orphaned project(s) to orphaned-projects team', orphan_count;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Log migration results
|
||||
DO $$
|
||||
DECLARE
|
||||
teams_created INTEGER;
|
||||
memberships_created INTEGER;
|
||||
projects_migrated INTEGER;
|
||||
BEGIN
|
||||
SELECT COUNT(*) INTO teams_created FROM teams WHERE slug LIKE '%-personal';
|
||||
SELECT COUNT(*) INTO memberships_created FROM team_memberships;
|
||||
SELECT COUNT(*) INTO projects_migrated FROM projects WHERE team_id IS NOT NULL;
|
||||
|
||||
RAISE NOTICE 'Migration complete: % personal teams, % memberships, % projects with teams',
|
||||
teams_created, memberships_created, projects_migrated;
|
||||
END $$;
|
||||
Reference in New Issue
Block a user