Compare commits
3 Commits
feature/tr
...
fix/ci-pro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0e01e17dfe | ||
|
|
ca46ab1ea0 | ||
|
|
a01c45cb64 |
@@ -1,7 +0,0 @@
|
|||||||
# Orchard Local Development Environment
|
|
||||||
# Copy this file to .env and customize as needed
|
|
||||||
# Note: .env is gitignored and will not be committed
|
|
||||||
|
|
||||||
# Admin account password (required for local development)
|
|
||||||
# This sets the initial admin password when the database is first created
|
|
||||||
ORCHARD_ADMIN_PASSWORD=changeme123
|
|
||||||
210
.gitlab-ci.yml
210
.gitlab-ci.yml
@@ -11,6 +11,12 @@ variables:
|
|||||||
# Environment URLs (used by deploy and test jobs)
|
# Environment URLs (used by deploy and test jobs)
|
||||||
STAGE_URL: https://orchard-stage.common.global.bsf.tools
|
STAGE_URL: https://orchard-stage.common.global.bsf.tools
|
||||||
PROD_URL: https://orchard.common.global.bsf.tools
|
PROD_URL: https://orchard.common.global.bsf.tools
|
||||||
|
# Stage environment AWS resources (used by reset job)
|
||||||
|
STAGE_RDS_HOST: orchard-stage.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com
|
||||||
|
STAGE_RDS_DBNAME: postgres
|
||||||
|
STAGE_SECRET_ARN: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:rds!cluster-a573672b-1a38-4665-a654-1b7df37b5297-IaeFQL"
|
||||||
|
STAGE_S3_BUCKET: orchard-artifacts-stage
|
||||||
|
AWS_REGION: us-gov-west-1
|
||||||
# Shared pip cache directory
|
# Shared pip cache directory
|
||||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.pip-cache"
|
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.pip-cache"
|
||||||
|
|
||||||
@@ -88,18 +94,10 @@ cve_sbom_analysis:
|
|||||||
when: never
|
when: never
|
||||||
- when: on_success
|
- when: on_success
|
||||||
|
|
||||||
# Disable prosper_setup for tag pipelines since no build/analysis jobs run
|
# Override release job to wait for stage integration tests before creating tag
|
||||||
# (image is already built when commit was on main, and deploy uses helm directly)
|
|
||||||
prosper_setup:
|
|
||||||
rules:
|
|
||||||
- if: '$CI_COMMIT_TAG'
|
|
||||||
when: never
|
|
||||||
- when: on_success
|
|
||||||
|
|
||||||
# Override release job to wait for stage deployment and smoke tests before creating tag
|
|
||||||
# This ensures the tag (which triggers prod deploy) is only created after stage passes
|
# This ensures the tag (which triggers prod deploy) is only created after stage passes
|
||||||
release:
|
release:
|
||||||
needs: [smoke_test_stage, changelog]
|
needs: [integration_test_stage, changelog]
|
||||||
|
|
||||||
# Full integration test suite template (for feature/stage deployments)
|
# Full integration test suite template (for feature/stage deployments)
|
||||||
# Runs the complete pytest integration test suite against the deployed environment
|
# Runs the complete pytest integration test suite against the deployed environment
|
||||||
@@ -119,9 +117,6 @@ release:
|
|||||||
- pip install --index-url "$PIP_INDEX_URL" pytest pytest-asyncio httpx
|
- pip install --index-url "$PIP_INDEX_URL" pytest pytest-asyncio httpx
|
||||||
script:
|
script:
|
||||||
- cd backend
|
- cd backend
|
||||||
# Debug: Print environment variables for test configuration
|
|
||||||
- echo "ORCHARD_TEST_URL=$ORCHARD_TEST_URL"
|
|
||||||
- echo "ORCHARD_TEST_PASSWORD is set to '${ORCHARD_TEST_PASSWORD:-NOT SET}'"
|
|
||||||
# Run full integration test suite, excluding:
|
# Run full integration test suite, excluding:
|
||||||
# - large/slow tests
|
# - large/slow tests
|
||||||
# - requires_direct_s3 tests (can't access MinIO from outside K8s cluster)
|
# - requires_direct_s3 tests (can't access MinIO from outside K8s cluster)
|
||||||
@@ -201,14 +196,102 @@ release:
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
PYTEST_SCRIPT
|
PYTEST_SCRIPT
|
||||||
|
|
||||||
|
# Integration tests for stage deployment (full suite)
|
||||||
|
integration_test_stage:
|
||||||
|
<<: *integration_test_template
|
||||||
|
needs: [deploy_stage]
|
||||||
|
variables:
|
||||||
|
ORCHARD_TEST_URL: $STAGE_URL
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||||
|
when: on_success
|
||||||
|
|
||||||
|
# Reset stage environment after integration tests (clean slate for next run)
|
||||||
|
# Calls the /api/v1/admin/factory-reset endpoint which handles DB and S3 cleanup
|
||||||
|
reset_stage:
|
||||||
|
stage: deploy
|
||||||
|
needs: [integration_test_stage]
|
||||||
|
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||||
|
timeout: 5m
|
||||||
|
retry: 1 # Retry once on transient failures
|
||||||
|
before_script:
|
||||||
|
- pip install --index-url "$PIP_INDEX_URL" httpx
|
||||||
|
script:
|
||||||
|
- |
|
||||||
|
python - <<'RESET_SCRIPT'
|
||||||
|
import httpx
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
|
BASE_URL = os.environ.get("STAGE_URL", "")
|
||||||
|
ADMIN_USER = "admin"
|
||||||
|
ADMIN_PASS = "changeme123" # Default admin password
|
||||||
|
MAX_RETRIES = 3
|
||||||
|
RETRY_DELAY = 5 # seconds
|
||||||
|
|
||||||
|
if not BASE_URL:
|
||||||
|
print("ERROR: STAGE_URL environment variable not set")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"=== Resetting stage environment at {BASE_URL} ===")
|
||||||
|
|
||||||
|
def do_reset():
|
||||||
|
with httpx.Client(base_url=BASE_URL, timeout=120.0) as client:
|
||||||
|
# Login as admin
|
||||||
|
print("Logging in as admin...")
|
||||||
|
login_response = client.post(
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
json={"username": ADMIN_USER, "password": ADMIN_PASS},
|
||||||
|
)
|
||||||
|
if login_response.status_code != 200:
|
||||||
|
raise Exception(f"Login failed: {login_response.status_code} - {login_response.text}")
|
||||||
|
print("Login successful")
|
||||||
|
|
||||||
|
# Call factory reset endpoint
|
||||||
|
print("Calling factory reset endpoint...")
|
||||||
|
reset_response = client.post(
|
||||||
|
"/api/v1/admin/factory-reset",
|
||||||
|
headers={"X-Confirm-Reset": "yes-delete-all-data"},
|
||||||
|
)
|
||||||
|
|
||||||
|
if reset_response.status_code == 200:
|
||||||
|
result = reset_response.json()
|
||||||
|
print("Factory reset successful!")
|
||||||
|
print(f" Database tables dropped: {result['results']['database_tables_dropped']}")
|
||||||
|
print(f" S3 objects deleted: {result['results']['s3_objects_deleted']}")
|
||||||
|
print(f" Database reinitialized: {result['results']['database_reinitialized']}")
|
||||||
|
print(f" Seeded: {result['results']['seeded']}")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
raise Exception(f"Factory reset failed: {reset_response.status_code} - {reset_response.text}")
|
||||||
|
|
||||||
|
# Retry loop
|
||||||
|
for attempt in range(1, MAX_RETRIES + 1):
|
||||||
|
try:
|
||||||
|
print(f"Attempt {attempt}/{MAX_RETRIES}")
|
||||||
|
if do_reset():
|
||||||
|
sys.exit(0)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Attempt {attempt} failed: {e}")
|
||||||
|
if attempt < MAX_RETRIES:
|
||||||
|
print(f"Retrying in {RETRY_DELAY} seconds...")
|
||||||
|
time.sleep(RETRY_DELAY)
|
||||||
|
else:
|
||||||
|
print("All retry attempts failed")
|
||||||
|
sys.exit(1)
|
||||||
|
RESET_SCRIPT
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||||
|
when: on_success
|
||||||
|
allow_failure: true # Don't fail pipeline if reset has issues
|
||||||
|
|
||||||
# Integration tests for feature deployment (full suite)
|
# Integration tests for feature deployment (full suite)
|
||||||
# Uses DEV_ADMIN_PASSWORD CI variable (same as deploy_feature)
|
|
||||||
integration_test_feature:
|
integration_test_feature:
|
||||||
<<: *integration_test_template
|
<<: *integration_test_template
|
||||||
needs: [deploy_feature]
|
needs: [deploy_feature]
|
||||||
variables:
|
variables:
|
||||||
ORCHARD_TEST_URL: https://orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools
|
ORCHARD_TEST_URL: https://orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools
|
||||||
ORCHARD_TEST_PASSWORD: $DEV_ADMIN_PASSWORD
|
|
||||||
rules:
|
rules:
|
||||||
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
||||||
when: on_success
|
when: on_success
|
||||||
@@ -311,88 +394,9 @@ frontend_tests:
|
|||||||
echo "Health check failed after 30 attempts"
|
echo "Health check failed after 30 attempts"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
# Ephemeral test deployment in stage namespace (main branch only)
|
# Deploy to stage (main branch)
|
||||||
# Runs integration tests before promoting to long-running stage
|
|
||||||
deploy_test:
|
|
||||||
<<: *deploy_template
|
|
||||||
variables:
|
|
||||||
NAMESPACE: orch-stage-namespace
|
|
||||||
VALUES_FILE: helm/orchard/values-dev.yaml
|
|
||||||
BASE_URL: https://orchard-test.common.global.bsf.tools
|
|
||||||
before_script:
|
|
||||||
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
|
||||||
- *helm_setup
|
|
||||||
script:
|
|
||||||
- echo "Deploying ephemeral test environment"
|
|
||||||
- cd $CI_PROJECT_DIR
|
|
||||||
- |
|
|
||||||
helm upgrade --install orchard-test ./helm/orchard \
|
|
||||||
--namespace $NAMESPACE \
|
|
||||||
-f $VALUES_FILE \
|
|
||||||
--set image.tag=git.linux-amd64-$CI_COMMIT_SHA \
|
|
||||||
--set orchard.auth.adminPassword=$STAGE_ADMIN_PASSWORD \
|
|
||||||
--set ingress.hosts[0].host=orchard-test.common.global.bsf.tools \
|
|
||||||
--set ingress.tls[0].hosts[0]=orchard-test.common.global.bsf.tools \
|
|
||||||
--set ingress.tls[0].secretName=orchard-test-tls \
|
|
||||||
--set minioIngress.host=minio-test.common.global.bsf.tools \
|
|
||||||
--set minioIngress.tls.secretName=minio-test-tls \
|
|
||||||
--wait \
|
|
||||||
--atomic \
|
|
||||||
--timeout 10m
|
|
||||||
- kubectl rollout status deployment/orchard-test-server -n $NAMESPACE --timeout=10m
|
|
||||||
- *verify_deployment
|
|
||||||
environment:
|
|
||||||
name: test
|
|
||||||
url: https://orchard-test.common.global.bsf.tools
|
|
||||||
on_stop: cleanup_test
|
|
||||||
kubernetes:
|
|
||||||
agent: esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
|
||||||
rules:
|
|
||||||
- if: '$CI_COMMIT_BRANCH == "main"'
|
|
||||||
when: on_success
|
|
||||||
|
|
||||||
# Integration tests for ephemeral test deployment (main branch)
|
|
||||||
# Runs against orchard-test before promoting to long-running stage
|
|
||||||
integration_test_main:
|
|
||||||
<<: *integration_test_template
|
|
||||||
needs: [deploy_test]
|
|
||||||
variables:
|
|
||||||
ORCHARD_TEST_URL: https://orchard-test.common.global.bsf.tools
|
|
||||||
ORCHARD_TEST_PASSWORD: $STAGE_ADMIN_PASSWORD
|
|
||||||
rules:
|
|
||||||
- if: '$CI_COMMIT_BRANCH == "main"'
|
|
||||||
when: on_success
|
|
||||||
|
|
||||||
# Cleanup ephemeral test deployment after integration tests
|
|
||||||
cleanup_test:
|
|
||||||
stage: deploy
|
|
||||||
needs: [integration_test_main]
|
|
||||||
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
|
|
||||||
timeout: 5m
|
|
||||||
variables:
|
|
||||||
NAMESPACE: orch-stage-namespace
|
|
||||||
GIT_STRATEGY: none
|
|
||||||
before_script:
|
|
||||||
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
|
||||||
script:
|
|
||||||
- echo "Cleaning up ephemeral test deployment orchard-test"
|
|
||||||
- helm uninstall orchard-test --namespace $NAMESPACE || true
|
|
||||||
environment:
|
|
||||||
name: test
|
|
||||||
action: stop
|
|
||||||
kubernetes:
|
|
||||||
agent: esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
|
||||||
rules:
|
|
||||||
- if: '$CI_COMMIT_BRANCH == "main"'
|
|
||||||
when: on_success
|
|
||||||
allow_failure: true
|
|
||||||
|
|
||||||
# Deploy to long-running stage (main branch, after ephemeral tests pass)
|
|
||||||
deploy_stage:
|
deploy_stage:
|
||||||
stage: deploy
|
<<: *deploy_template
|
||||||
# Wait for ephemeral test to pass before promoting to long-running stage
|
|
||||||
needs: [cleanup_test]
|
|
||||||
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
|
|
||||||
variables:
|
variables:
|
||||||
NAMESPACE: orch-stage-namespace
|
NAMESPACE: orch-stage-namespace
|
||||||
VALUES_FILE: helm/orchard/values-stage.yaml
|
VALUES_FILE: helm/orchard/values-stage.yaml
|
||||||
@@ -401,14 +405,13 @@ deploy_stage:
|
|||||||
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
||||||
- *helm_setup
|
- *helm_setup
|
||||||
script:
|
script:
|
||||||
- echo "Deploying to long-running stage environment"
|
- echo "Deploying to stage environment"
|
||||||
- cd $CI_PROJECT_DIR
|
- cd $CI_PROJECT_DIR
|
||||||
- |
|
- |
|
||||||
helm upgrade --install orchard-stage ./helm/orchard \
|
helm upgrade --install orchard-stage ./helm/orchard \
|
||||||
--namespace $NAMESPACE \
|
--namespace $NAMESPACE \
|
||||||
-f $VALUES_FILE \
|
-f $VALUES_FILE \
|
||||||
--set image.tag=git.linux-amd64-$CI_COMMIT_SHA \
|
--set image.tag=git.linux-amd64-$CI_COMMIT_SHA \
|
||||||
--set orchard.auth.adminPassword=$STAGE_ADMIN_PASSWORD \
|
|
||||||
--wait \
|
--wait \
|
||||||
--atomic \
|
--atomic \
|
||||||
--timeout 10m
|
--timeout 10m
|
||||||
@@ -423,16 +426,6 @@ deploy_stage:
|
|||||||
- if: '$CI_COMMIT_BRANCH == "main"'
|
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||||
when: on_success
|
when: on_success
|
||||||
|
|
||||||
# Smoke test for long-running stage (after promotion)
|
|
||||||
smoke_test_stage:
|
|
||||||
<<: *smoke_test_template
|
|
||||||
needs: [deploy_stage]
|
|
||||||
variables:
|
|
||||||
ORCHARD_TEST_URL: $STAGE_URL
|
|
||||||
rules:
|
|
||||||
- if: '$CI_COMMIT_BRANCH == "main"'
|
|
||||||
when: on_success
|
|
||||||
|
|
||||||
# Deploy feature branch to dev namespace
|
# Deploy feature branch to dev namespace
|
||||||
deploy_feature:
|
deploy_feature:
|
||||||
<<: *deploy_template
|
<<: *deploy_template
|
||||||
@@ -450,7 +443,6 @@ deploy_feature:
|
|||||||
--namespace $NAMESPACE \
|
--namespace $NAMESPACE \
|
||||||
-f $VALUES_FILE \
|
-f $VALUES_FILE \
|
||||||
--set image.tag=git.linux-amd64-$CI_COMMIT_SHA \
|
--set image.tag=git.linux-amd64-$CI_COMMIT_SHA \
|
||||||
--set orchard.auth.adminPassword=$DEV_ADMIN_PASSWORD \
|
|
||||||
--set ingress.hosts[0].host=orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools \
|
--set ingress.hosts[0].host=orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools \
|
||||||
--set ingress.tls[0].hosts[0]=orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools \
|
--set ingress.tls[0].hosts[0]=orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools \
|
||||||
--set ingress.tls[0].secretName=orchard-$CI_COMMIT_REF_SLUG-tls \
|
--set ingress.tls[0].secretName=orchard-$CI_COMMIT_REF_SLUG-tls \
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
# Gitleaks configuration
|
|
||||||
# https://github.com/gitleaks/gitleaks#configuration
|
|
||||||
|
|
||||||
[allowlist]
|
|
||||||
# Test files that contain variable names matching secret patterns (e.g., s3_key)
|
|
||||||
paths = [
|
|
||||||
'''backend/tests/.*\.py''',
|
|
||||||
]
|
|
||||||
216
CHANGELOG.md
216
CHANGELOG.md
@@ -6,227 +6,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
|||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
### Added
|
|
||||||
- Added transparent PyPI proxy implementing PEP 503 Simple API (#108)
|
|
||||||
- `GET /pypi/simple/` - package index (proxied from upstream)
|
|
||||||
- `GET /pypi/simple/{package}/` - version list with rewritten download links
|
|
||||||
- `GET /pypi/simple/{package}/{filename}` - download with automatic caching
|
|
||||||
- Allows `pip install --index-url https://orchard.../pypi/simple/ <package>`
|
|
||||||
- Artifacts cached on first access through configured upstream sources
|
|
||||||
- Added `POST /api/v1/cache/resolve` endpoint to cache packages by coordinates instead of URL (#108)
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Upstream sources table text is now centered under column headers (#108)
|
|
||||||
- ENV badge now appears inline with source name instead of separate column (#108)
|
|
||||||
- Test and Edit buttons now have more prominent button styling (#108)
|
|
||||||
- Reduced footer padding for cleaner layout (#108)
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Fixed purge_seed_data crash when deleting access permissions - was comparing UUID to VARCHAR column (#107)
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Upstream source connectivity test no longer follows redirects, fixing "Exceeded maximum allowed redirects" error with Artifactory proxies (#107)
|
|
||||||
- Test runs automatically after saving a new or updated upstream source (#107)
|
|
||||||
- Test status now shows as colored dots (green=success, red=error) instead of text badges (#107)
|
|
||||||
- Clicking red dot shows error details in a modal (#107)
|
|
||||||
- Source name column no longer wraps text for better table layout (#107)
|
|
||||||
- Renamed "Cache Management" page to "Upstream Sources" (#107)
|
|
||||||
- Moved Delete button from table row to edit modal for cleaner table layout (#107)
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
- Removed `is_public` field from upstream sources - all sources are now treated as internal/private (#107)
|
|
||||||
- Removed `allow_public_internet` (air-gap mode) setting from cache settings - not needed for enterprise proxy use case (#107)
|
|
||||||
- Removed seeding of public registry URLs (npm-public, pypi-public, maven-central, docker-hub) (#107)
|
|
||||||
- Removed "Public" badge and checkbox from upstream sources UI (#107)
|
|
||||||
- Removed "Allow Public Internet" toggle from cache settings UI (#107)
|
|
||||||
- Removed "Global Settings" section from cache management UI - auto-create system projects is always enabled (#107)
|
|
||||||
- Removed unused CacheSettings frontend types and API functions (#107)
|
|
||||||
|
|
||||||
### Added
|
|
||||||
- Added `ORCHARD_PURGE_SEED_DATA` environment variable support to stage helm values to remove seed data from long-running deployments (#107)
|
|
||||||
- Added frontend system projects visual distinction (#105)
|
|
||||||
- "Cache" badge for system projects in project list
|
|
||||||
- "System Cache" badge on project detail page
|
|
||||||
- Added `is_system` field to Project type
|
|
||||||
- Added frontend admin page for upstream sources and cache settings (#75)
|
|
||||||
- New `/admin/cache` page accessible from user menu (admin only)
|
|
||||||
- Upstream sources table with create/edit/delete/test connectivity
|
|
||||||
- Cache settings section with air-gap mode and auto-create system projects toggles
|
|
||||||
- Visual indicators for env-defined sources (locked, cannot be modified)
|
|
||||||
- Environment variable override badges when settings are overridden
|
|
||||||
- API client functions for all cache admin operations
|
|
||||||
- Added environment variable overrides for cache configuration (#74)
|
|
||||||
- `ORCHARD_CACHE_ALLOW_PUBLIC_INTERNET` - Override allow_public_internet (air-gap mode)
|
|
||||||
- `ORCHARD_CACHE_AUTO_CREATE_SYSTEM_PROJECTS` - Override auto_create_system_projects
|
|
||||||
- `ORCHARD_UPSTREAM__{NAME}__*` - Define upstream sources via env vars
|
|
||||||
- Env-defined sources appear in API with `source: "env"` marker
|
|
||||||
- Env-defined sources cannot be modified/deleted via API (400 error)
|
|
||||||
- Cache settings response includes `*_env_override` fields when overridden
|
|
||||||
- 7 unit tests for env var parsing and configuration
|
|
||||||
- Added Global Cache Settings Admin API (#73)
|
|
||||||
- `GET /api/v1/admin/cache-settings` - Retrieve current cache settings
|
|
||||||
- `PUT /api/v1/admin/cache-settings` - Update cache settings (partial updates)
|
|
||||||
- Admin-only access with audit logging
|
|
||||||
- Controls `allow_public_internet` (air-gap mode) and `auto_create_system_projects`
|
|
||||||
- 7 integration tests for settings management
|
|
||||||
- Added Upstream Sources Admin API for managing cache sources (#72)
|
|
||||||
- `GET /api/v1/admin/upstream-sources` - List sources with filtering
|
|
||||||
- `POST /api/v1/admin/upstream-sources` - Create source with auth configuration
|
|
||||||
- `GET /api/v1/admin/upstream-sources/{id}` - Get source details
|
|
||||||
- `PUT /api/v1/admin/upstream-sources/{id}` - Update source (partial updates)
|
|
||||||
- `DELETE /api/v1/admin/upstream-sources/{id}` - Delete source
|
|
||||||
- `POST /api/v1/admin/upstream-sources/{id}/test` - Test connectivity
|
|
||||||
- Admin-only access with audit logging
|
|
||||||
- Credentials never exposed (only has_password/has_headers flags)
|
|
||||||
- 13 integration tests for all CRUD operations
|
|
||||||
- Added system project restrictions and management (#71)
|
|
||||||
- System projects (`_npm`, `_pypi`, etc.) cannot be deleted (returns 403)
|
|
||||||
- System projects cannot be made private (must remain public)
|
|
||||||
- `GET /api/v1/system-projects` endpoint to list all system cache projects
|
|
||||||
- 5 integration tests for system project restrictions
|
|
||||||
- Added Cache API endpoint for fetching and storing artifacts from upstream URLs (#70)
|
|
||||||
- `POST /api/v1/cache` endpoint to cache artifacts from upstream registries
|
|
||||||
- URL parsing helpers to extract package name/version from npm, PyPI, Maven URLs
|
|
||||||
- Automatic system project creation (`_npm`, `_pypi`, `_maven`, etc.)
|
|
||||||
- URL-to-artifact provenance tracking via `cached_urls` table
|
|
||||||
- Optional user project cross-referencing for custom organization
|
|
||||||
- Cache hit returns existing artifact without re-fetching
|
|
||||||
- Air-gap mode enforcement (blocks public URLs when disabled)
|
|
||||||
- Hash verification for downloaded artifacts
|
|
||||||
- 21 unit tests for URL parsing and cache endpoint
|
|
||||||
- Added HTTP client for fetching artifacts from upstream sources (#69)
|
|
||||||
- `UpstreamClient` class in `backend/app/upstream.py` with streaming downloads
|
|
||||||
- SHA256 hash computation while streaming (doesn't load large files into memory)
|
|
||||||
- Auth support: none, basic auth, bearer token, API key (custom headers)
|
|
||||||
- URL-to-source matching by URL prefix with priority ordering
|
|
||||||
- Configuration options: timeouts, retries with exponential backoff, redirect limits, max file size
|
|
||||||
- Air-gap mode enforcement via `allow_public_internet` setting
|
|
||||||
- Response header capture for provenance tracking
|
|
||||||
- Proper error handling with custom exception types
|
|
||||||
- Connection test method for upstream source validation
|
|
||||||
- 33 unit tests for client functionality
|
|
||||||
- Added upstream artifact caching schema for hermetic builds (#68)
|
|
||||||
- `upstream_sources` table for configuring upstream registries (npm, PyPI, Maven, etc.)
|
|
||||||
- `cache_settings` table for global settings including air-gap mode
|
|
||||||
- `cached_urls` table for URL-to-artifact provenance tracking
|
|
||||||
- `is_system` column on projects for system cache projects (_npm, _pypi, etc.)
|
|
||||||
- Support for multiple auth types: none, basic auth, bearer token, API key
|
|
||||||
- Fernet encryption for credentials using `ORCHARD_CACHE_ENCRYPTION_KEY`
|
|
||||||
- Default upstream sources seeded (npm-public, pypi-public, maven-central, docker-hub) - disabled by default
|
|
||||||
- Migration `010_upstream_caching.sql`
|
|
||||||
- Added team-based multi-tenancy for organizing projects and collaboration (#88-#104)
|
|
||||||
- Teams serve as organizational containers for projects
|
|
||||||
- Users can belong to multiple teams with different roles (owner, admin, member)
|
|
||||||
- Projects can optionally belong to a team
|
|
||||||
- Added database schema for teams (#88):
|
|
||||||
- `teams` table with id, name, slug, description, settings, timestamps
|
|
||||||
- `team_memberships` table mapping users to teams with roles
|
|
||||||
- `team_id` column on projects table for team association
|
|
||||||
- Migrations `009_teams.sql` and `009b_migrate_projects.sql`
|
|
||||||
- Added Team and TeamMembership ORM models with relationships (#89)
|
|
||||||
- Added TeamAuthorizationService for team-level access control (#90):
|
|
||||||
- Team owner/admin gets admin access to all team projects
|
|
||||||
- Team member gets read access to team projects (upgradeable by explicit permission)
|
|
||||||
- Role hierarchy: owner > admin > member
|
|
||||||
- Added Team API endpoints (#92, #93, #94, #95):
|
|
||||||
- `GET /api/v1/teams` - List teams user belongs to (paginated)
|
|
||||||
- `POST /api/v1/teams` - Create team (creator becomes owner)
|
|
||||||
- `GET /api/v1/teams/{slug}` - Get team details
|
|
||||||
- `PUT /api/v1/teams/{slug}` - Update team (requires admin)
|
|
||||||
- `DELETE /api/v1/teams/{slug}` - Delete team (requires owner)
|
|
||||||
- `GET /api/v1/teams/{slug}/members` - List team members
|
|
||||||
- `POST /api/v1/teams/{slug}/members` - Add member (requires admin)
|
|
||||||
- `PUT /api/v1/teams/{slug}/members/{username}` - Update member role
|
|
||||||
- `DELETE /api/v1/teams/{slug}/members/{username}` - Remove member
|
|
||||||
- `GET /api/v1/teams/{slug}/projects` - List team projects (paginated)
|
|
||||||
- Updated project creation to support optional team assignment (#95)
|
|
||||||
- Updated project responses to include team info (team_id, team_slug, team_name)
|
|
||||||
- Added frontend team management (#97-#104):
|
|
||||||
- TeamContext provider for managing current team selection
|
|
||||||
- TeamSelector dropdown component (persists selection in localStorage)
|
|
||||||
- Teams list page at `/teams`
|
|
||||||
- Team dashboard page at `/teams/{slug}` with inline project creation
|
|
||||||
- Team settings page at `/teams/{slug}/settings`
|
|
||||||
- Team members page at `/teams/{slug}/members`
|
|
||||||
- Teams navigation link in header (authenticated users only)
|
|
||||||
- Updated seed data to create a "Demo Team" and assign all seed projects to it
|
|
||||||
- Added TypeScript types and API client functions for teams
|
|
||||||
- Access management now shows team-based permissions alongside explicit permissions
|
|
||||||
- Team-based access displayed as read-only with "Source" column indicating origin
|
|
||||||
- Team members with access show team slug and role
|
|
||||||
- Added integration tests for team CRUD, membership, and project operations
|
|
||||||
- Redesigned teams portal with modern card-based layout
|
|
||||||
- Card grid view with team avatar, name, slug, role badge, and stats
|
|
||||||
- Stats bar showing total teams, owned teams, and total projects
|
|
||||||
- Search functionality for filtering teams (appears when >3 teams)
|
|
||||||
- Empty states for no teams and no search results
|
|
||||||
- Added user autocomplete component for team member invitations
|
|
||||||
- `GET /api/v1/users/search` endpoint for username prefix search
|
|
||||||
- Dropdown shows matching users as you type
|
|
||||||
- Keyboard navigation support (arrow keys, enter, escape)
|
|
||||||
- Debounced search to reduce API calls
|
|
||||||
- Added unit tests for TeamAuthorizationService
|
|
||||||
- Added `ORCHARD_ADMIN_PASSWORD` environment variable to configure initial admin password (#87)
|
|
||||||
- When set, admin user is created with the specified password (no password change required)
|
|
||||||
- When not set, defaults to `changeme123` and requires password change on first login
|
|
||||||
- Added Helm chart support for admin password via multiple sources (#87):
|
|
||||||
- `orchard.auth.adminPassword` - plain value (creates K8s secret)
|
|
||||||
- `orchard.auth.existingSecret` - reference existing K8s secret
|
|
||||||
- `orchard.auth.secretsManager` - AWS Secrets Manager integration
|
|
||||||
- Added `.env.example` template for local development (#87)
|
|
||||||
- Added `.env` file support in docker-compose.local.yml (#87)
|
|
||||||
- Added Project Settings page accessible to project admins (#65)
|
|
||||||
- General settings section for editing description and visibility
|
|
||||||
- Access Management section (moved from project page)
|
|
||||||
- Danger Zone section with inline delete confirmation requiring project name
|
|
||||||
- Settings button (gear icon) on project page header for admins
|
|
||||||
- Added artifact dependency management system (#76, #77, #78, #79, #80, #81)
|
|
||||||
- `artifact_dependencies` table with version/tag constraints and check constraints
|
|
||||||
- `ArtifactDependency` SQLAlchemy model with indexes for fast lookups
|
|
||||||
- Ensure file parsing (`orchard.ensure` YAML format) during artifact upload
|
|
||||||
- Circular dependency detection at upload time (rejected with 400)
|
|
||||||
- Dependency conflict detection at resolution time (409 with conflict details)
|
|
||||||
- Added dependency API endpoints (#78, #79):
|
|
||||||
- `GET /api/v1/artifact/{artifact_id}/dependencies` - Get dependencies by artifact ID
|
|
||||||
- `GET /api/v1/project/{project}/{package}/+/{ref}/dependencies` - Get dependencies by ref
|
|
||||||
- `GET /api/v1/project/{project}/{package}/reverse-dependencies` - Get reverse dependencies (paginated)
|
|
||||||
- `GET /api/v1/project/{project}/{package}/+/{ref}/resolve` - Resolve full dependency tree
|
|
||||||
- Added dependency resolution with topological sorting (#79)
|
|
||||||
- Returns flat list of all artifacts needed in dependency order
|
|
||||||
- Includes download URLs, sizes, and version info for each artifact
|
|
||||||
- Added frontend dependency visualization (#84, #85, #86):
|
|
||||||
- Dependencies section on package page showing direct dependencies for selected tag
|
|
||||||
- Tag/version selector to switch between artifacts
|
|
||||||
- "Used By" section showing reverse dependencies with pagination
|
|
||||||
- Interactive dependency graph modal with:
|
|
||||||
- Tree visualization with collapsible nodes
|
|
||||||
- Zoom (mouse wheel + buttons) and pan (click-drag)
|
|
||||||
- Click to navigate to package
|
|
||||||
- Hover tooltip with package details
|
|
||||||
- Error display for circular dependencies and conflicts
|
|
||||||
- Added migration `008_artifact_dependencies.sql` for dependency schema
|
|
||||||
- Added `dependencies.py` module with parsing, validation, and resolution logic
|
|
||||||
- Added comprehensive integration tests for all dependency features
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Added pre-test stage reset to ensure known environment state before integration tests (#54)
|
|
||||||
- Upload endpoint now accepts optional `ensure` file parameter for declaring dependencies
|
|
||||||
- Updated upload API documentation with ensure file format and examples
|
|
||||||
- Converted teams list and team projects to use DataTable component for consistent styling
|
|
||||||
- Centered team members and team settings page content
|
|
||||||
- Added orchard logo icon and dot separator to footer
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Fixed dark theme styling for team pages - modals, forms, and dropdowns now use correct theme variables
|
|
||||||
- Fixed UserAutocomplete and TeamSelector dropdown backgrounds for dark theme
|
|
||||||
|
|
||||||
## [0.5.1] - 2026-01-23
|
|
||||||
### Changed
|
### Changed
|
||||||
- Simplified tag pipeline to only run deploy and smoke tests (image already built on main) (#54)
|
- Simplified tag pipeline to only run deploy and smoke tests (image already built on main) (#54)
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Fixed production CI deployment namespace to use correct `orch-namespace` (#54)
|
- Fixed production CI deployment namespace to use correct `orch-namespace` (#54)
|
||||||
- Added gitleaks config to allowlist test files from secret scanning (#54)
|
|
||||||
|
|
||||||
## [0.5.0] - 2026-01-23
|
## [0.5.0] - 2026-01-23
|
||||||
### Added
|
### Added
|
||||||
|
|||||||
@@ -360,36 +360,21 @@ def create_default_admin(db: Session) -> Optional[User]:
|
|||||||
"""Create the default admin user if no users exist.
|
"""Create the default admin user if no users exist.
|
||||||
|
|
||||||
Returns the created user, or None if users already exist.
|
Returns the created user, or None if users already exist.
|
||||||
|
|
||||||
The admin password can be set via ORCHARD_ADMIN_PASSWORD environment variable.
|
|
||||||
If not set, defaults to 'changeme123' and requires password change on first login.
|
|
||||||
"""
|
"""
|
||||||
# Check if any users exist
|
# Check if any users exist
|
||||||
user_count = db.query(User).count()
|
user_count = db.query(User).count()
|
||||||
if user_count > 0:
|
if user_count > 0:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
settings = get_settings()
|
|
||||||
|
|
||||||
# Use configured password or default
|
|
||||||
password = settings.admin_password if settings.admin_password else "changeme123"
|
|
||||||
# Only require password change if using the default password
|
|
||||||
must_change = not settings.admin_password
|
|
||||||
|
|
||||||
# Create default admin
|
# Create default admin
|
||||||
auth_service = AuthService(db)
|
auth_service = AuthService(db)
|
||||||
admin = auth_service.create_user(
|
admin = auth_service.create_user(
|
||||||
username="admin",
|
username="admin",
|
||||||
password=password,
|
password="changeme123",
|
||||||
is_admin=True,
|
is_admin=True,
|
||||||
must_change_password=must_change,
|
must_change_password=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
if settings.admin_password:
|
|
||||||
logger.info("Created default admin user with configured password")
|
|
||||||
else:
|
|
||||||
logger.info("Created default admin user with default password (changeme123)")
|
|
||||||
|
|
||||||
return admin
|
return admin
|
||||||
|
|
||||||
|
|
||||||
@@ -658,51 +643,32 @@ class AuthorizationService:
|
|||||||
self, project_id: str, user: Optional[User]
|
self, project_id: str, user: Optional[User]
|
||||||
) -> Optional[str]:
|
) -> Optional[str]:
|
||||||
"""Get the user's access level for a project.
|
"""Get the user's access level for a project.
|
||||||
|
|
||||||
Returns the highest access level the user has, or None if no access.
|
Returns the highest access level the user has, or None if no access.
|
||||||
Checks in order:
|
Checks in order:
|
||||||
1. System admin - gets admin access to all projects
|
1. System admin - gets admin access to all projects
|
||||||
2. Project owner (created_by) - gets admin access
|
2. Project owner (created_by) - gets admin access
|
||||||
3. Team-based access (owner/admin gets admin, member gets read)
|
3. Explicit permission in access_permissions table
|
||||||
4. Explicit permission in access_permissions table
|
|
||||||
5. Public access
|
|
||||||
"""
|
"""
|
||||||
from .models import Project, AccessPermission, TeamMembership
|
from .models import Project, AccessPermission
|
||||||
|
|
||||||
# Get the project
|
# Get the project
|
||||||
project = self.db.query(Project).filter(Project.id == project_id).first()
|
project = self.db.query(Project).filter(Project.id == project_id).first()
|
||||||
if not project:
|
if not project:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Anonymous users only get access to public projects
|
# Anonymous users only get access to public projects
|
||||||
if not user:
|
if not user:
|
||||||
return "read" if project.is_public else None
|
return "read" if project.is_public else None
|
||||||
|
|
||||||
# System admins get admin access everywhere
|
# System admins get admin access everywhere
|
||||||
if user.is_admin:
|
if user.is_admin:
|
||||||
return "admin"
|
return "admin"
|
||||||
|
|
||||||
# Project owner gets admin access
|
# Project owner gets admin access
|
||||||
if project.created_by == user.username:
|
if project.created_by == user.username:
|
||||||
return "admin"
|
return "admin"
|
||||||
|
|
||||||
# Check team-based access if project belongs to a team
|
|
||||||
if project.team_id:
|
|
||||||
membership = (
|
|
||||||
self.db.query(TeamMembership)
|
|
||||||
.filter(
|
|
||||||
TeamMembership.team_id == project.team_id,
|
|
||||||
TeamMembership.user_id == user.id,
|
|
||||||
)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
if membership:
|
|
||||||
# Team owner/admin gets admin on all team projects
|
|
||||||
if membership.role in ("owner", "admin"):
|
|
||||||
return "admin"
|
|
||||||
# Team member gets read access (upgradeable by explicit permission)
|
|
||||||
# Continue checking explicit permissions for potential upgrade
|
|
||||||
|
|
||||||
# Check explicit permissions
|
# Check explicit permissions
|
||||||
permission = (
|
permission = (
|
||||||
self.db.query(AccessPermission)
|
self.db.query(AccessPermission)
|
||||||
@@ -712,27 +678,13 @@ class AuthorizationService:
|
|||||||
)
|
)
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
|
|
||||||
if permission:
|
if permission:
|
||||||
# Check expiration
|
# Check expiration
|
||||||
if permission.expires_at and permission.expires_at < datetime.now(timezone.utc):
|
if permission.expires_at and permission.expires_at < datetime.now(timezone.utc):
|
||||||
pass # Permission expired, fall through
|
return "read" if project.is_public else None
|
||||||
else:
|
return permission.level
|
||||||
return permission.level
|
|
||||||
|
|
||||||
# Team member gets read access if no explicit permission
|
|
||||||
if project.team_id:
|
|
||||||
membership = (
|
|
||||||
self.db.query(TeamMembership)
|
|
||||||
.filter(
|
|
||||||
TeamMembership.team_id == project.team_id,
|
|
||||||
TeamMembership.user_id == user.id,
|
|
||||||
)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
if membership:
|
|
||||||
return "read"
|
|
||||||
|
|
||||||
# Fall back to public access
|
# Fall back to public access
|
||||||
return "read" if project.is_public else None
|
return "read" if project.is_public else None
|
||||||
|
|
||||||
@@ -917,226 +869,6 @@ def check_project_access(
|
|||||||
return project
|
return project
|
||||||
|
|
||||||
|
|
||||||
# --- Team Authorization ---
|
|
||||||
|
|
||||||
# Team roles in order of increasing privilege
|
|
||||||
TEAM_ROLES = ["member", "admin", "owner"]
|
|
||||||
|
|
||||||
|
|
||||||
def get_team_role_rank(role: str) -> int:
|
|
||||||
"""Get numeric rank for team role comparison."""
|
|
||||||
try:
|
|
||||||
return TEAM_ROLES.index(role)
|
|
||||||
except ValueError:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
|
|
||||||
def has_sufficient_team_role(user_role: str, required_role: str) -> bool:
|
|
||||||
"""Check if user_role is sufficient for required_role.
|
|
||||||
|
|
||||||
Role hierarchy: owner > admin > member
|
|
||||||
"""
|
|
||||||
return get_team_role_rank(user_role) >= get_team_role_rank(required_role)
|
|
||||||
|
|
||||||
|
|
||||||
class TeamAuthorizationService:
|
|
||||||
"""Service for checking team-level authorization."""
|
|
||||||
|
|
||||||
def __init__(self, db: Session):
|
|
||||||
self.db = db
|
|
||||||
|
|
||||||
def get_user_team_role(
|
|
||||||
self, team_id: str, user: Optional[User]
|
|
||||||
) -> Optional[str]:
|
|
||||||
"""Get the user's role in a team.
|
|
||||||
|
|
||||||
Returns the role ('owner', 'admin', 'member') or None if not a member.
|
|
||||||
System admins who are not team members are treated as team admins.
|
|
||||||
"""
|
|
||||||
from .models import Team, TeamMembership
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Check actual membership first
|
|
||||||
membership = (
|
|
||||||
self.db.query(TeamMembership)
|
|
||||||
.filter(
|
|
||||||
TeamMembership.team_id == team_id,
|
|
||||||
TeamMembership.user_id == user.id,
|
|
||||||
)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
|
|
||||||
if membership:
|
|
||||||
return membership.role
|
|
||||||
|
|
||||||
# System admins who are not members get admin access
|
|
||||||
if user.is_admin:
|
|
||||||
return "admin"
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def check_team_access(
|
|
||||||
self,
|
|
||||||
team_id: str,
|
|
||||||
user: Optional[User],
|
|
||||||
required_role: str = "member",
|
|
||||||
) -> bool:
|
|
||||||
"""Check if user has required role in team.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
team_id: Team ID to check
|
|
||||||
user: User to check (None means no access)
|
|
||||||
required_role: Minimum required role ('member', 'admin', 'owner')
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if user has sufficient role, False otherwise
|
|
||||||
"""
|
|
||||||
user_role = self.get_user_team_role(team_id, user)
|
|
||||||
if not user_role:
|
|
||||||
return False
|
|
||||||
return has_sufficient_team_role(user_role, required_role)
|
|
||||||
|
|
||||||
def can_create_project(self, team_id: str, user: Optional[User]) -> bool:
|
|
||||||
"""Check if user can create projects in team (requires admin+)."""
|
|
||||||
return self.check_team_access(team_id, user, "admin")
|
|
||||||
|
|
||||||
def can_manage_members(self, team_id: str, user: Optional[User]) -> bool:
|
|
||||||
"""Check if user can manage team members (requires admin+)."""
|
|
||||||
return self.check_team_access(team_id, user, "admin")
|
|
||||||
|
|
||||||
def can_delete_team(self, team_id: str, user: Optional[User]) -> bool:
|
|
||||||
"""Check if user can delete the team (requires owner)."""
|
|
||||||
return self.check_team_access(team_id, user, "owner")
|
|
||||||
|
|
||||||
def get_team_by_slug(self, slug: str) -> Optional["Team"]:
|
|
||||||
"""Get a team by its slug."""
|
|
||||||
from .models import Team
|
|
||||||
|
|
||||||
return self.db.query(Team).filter(Team.slug == slug).first()
|
|
||||||
|
|
||||||
def get_user_teams(self, user: User) -> list:
|
|
||||||
"""Get all teams a user is a member of."""
|
|
||||||
from .models import Team, TeamMembership
|
|
||||||
|
|
||||||
return (
|
|
||||||
self.db.query(Team)
|
|
||||||
.join(TeamMembership)
|
|
||||||
.filter(TeamMembership.user_id == user.id)
|
|
||||||
.order_by(Team.name)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_team_authorization_service(db: Session = Depends(get_db)) -> TeamAuthorizationService:
|
|
||||||
"""Get a TeamAuthorizationService instance."""
|
|
||||||
return TeamAuthorizationService(db)
|
|
||||||
|
|
||||||
|
|
||||||
class TeamAccessChecker:
|
|
||||||
"""Dependency for checking team access in route handlers."""
|
|
||||||
|
|
||||||
def __init__(self, required_role: str = "member"):
|
|
||||||
self.required_role = required_role
|
|
||||||
|
|
||||||
def __call__(
|
|
||||||
self,
|
|
||||||
slug: str,
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user: Optional[User] = Depends(get_current_user_optional),
|
|
||||||
) -> User:
|
|
||||||
"""Check if user has required role in team.
|
|
||||||
|
|
||||||
Raises 404 if team not found, 401 if not authenticated, 403 if insufficient role.
|
|
||||||
Returns the current user.
|
|
||||||
"""
|
|
||||||
from .models import Team
|
|
||||||
|
|
||||||
# Find team by slug
|
|
||||||
team = db.query(Team).filter(Team.slug == slug).first()
|
|
||||||
if not team:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail=f"Team '{slug}' not found",
|
|
||||||
)
|
|
||||||
|
|
||||||
if not current_user:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Authentication required",
|
|
||||||
headers={"WWW-Authenticate": "Bearer"},
|
|
||||||
)
|
|
||||||
|
|
||||||
auth_service = TeamAuthorizationService(db)
|
|
||||||
|
|
||||||
if not auth_service.check_team_access(str(team.id), current_user, self.required_role):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail=f"Insufficient team permissions. Required role: {self.required_role}",
|
|
||||||
)
|
|
||||||
|
|
||||||
return current_user
|
|
||||||
|
|
||||||
|
|
||||||
# Pre-configured team access checkers
|
|
||||||
require_team_member = TeamAccessChecker("member")
|
|
||||||
require_team_admin = TeamAccessChecker("admin")
|
|
||||||
require_team_owner = TeamAccessChecker("owner")
|
|
||||||
|
|
||||||
|
|
||||||
def check_team_access(
|
|
||||||
db: Session,
|
|
||||||
team_slug: str,
|
|
||||||
user: Optional[User],
|
|
||||||
required_role: str = "member",
|
|
||||||
) -> "Team":
|
|
||||||
"""Check if user has required role in team.
|
|
||||||
|
|
||||||
This is a helper function for use in route handlers.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
team_slug: Slug of the team
|
|
||||||
user: Current user (can be None for no access)
|
|
||||||
required_role: Required team role (member, admin, owner)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The Team object if access is granted
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
HTTPException 404: Team not found
|
|
||||||
HTTPException 401: Authentication required
|
|
||||||
HTTPException 403: Insufficient permissions
|
|
||||||
"""
|
|
||||||
from .models import Team
|
|
||||||
|
|
||||||
# Find team by slug
|
|
||||||
team = db.query(Team).filter(Team.slug == team_slug).first()
|
|
||||||
if not team:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail=f"Team '{team_slug}' not found",
|
|
||||||
)
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Authentication required",
|
|
||||||
headers={"WWW-Authenticate": "Bearer"},
|
|
||||||
)
|
|
||||||
|
|
||||||
auth_service = TeamAuthorizationService(db)
|
|
||||||
|
|
||||||
if not auth_service.check_team_access(str(team.id), user, required_role):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail=f"Insufficient team permissions. Required role: {required_role}",
|
|
||||||
)
|
|
||||||
|
|
||||||
return team
|
|
||||||
|
|
||||||
|
|
||||||
# --- OIDC Configuration Service ---
|
# --- OIDC Configuration Service ---
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,316 +0,0 @@
|
|||||||
"""
|
|
||||||
Cache service for upstream artifact caching.
|
|
||||||
|
|
||||||
Provides URL parsing, system project management, and caching logic
|
|
||||||
for the upstream caching feature.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import Optional
|
|
||||||
from urllib.parse import urlparse, unquote
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# System project names for each source type
|
|
||||||
SYSTEM_PROJECT_NAMES = {
|
|
||||||
"npm": "_npm",
|
|
||||||
"pypi": "_pypi",
|
|
||||||
"maven": "_maven",
|
|
||||||
"docker": "_docker",
|
|
||||||
"helm": "_helm",
|
|
||||||
"nuget": "_nuget",
|
|
||||||
"deb": "_deb",
|
|
||||||
"rpm": "_rpm",
|
|
||||||
"generic": "_generic",
|
|
||||||
}
|
|
||||||
|
|
||||||
# System project descriptions
|
|
||||||
SYSTEM_PROJECT_DESCRIPTIONS = {
|
|
||||||
"npm": "System cache for npm packages",
|
|
||||||
"pypi": "System cache for PyPI packages",
|
|
||||||
"maven": "System cache for Maven packages",
|
|
||||||
"docker": "System cache for Docker images",
|
|
||||||
"helm": "System cache for Helm charts",
|
|
||||||
"nuget": "System cache for NuGet packages",
|
|
||||||
"deb": "System cache for Debian packages",
|
|
||||||
"rpm": "System cache for RPM packages",
|
|
||||||
"generic": "System cache for generic artifacts",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ParsedUrl:
|
|
||||||
"""Parsed URL information for caching."""
|
|
||||||
|
|
||||||
package_name: str
|
|
||||||
version: Optional[str] = None
|
|
||||||
filename: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
def parse_npm_url(url: str) -> Optional[ParsedUrl]:
|
|
||||||
"""
|
|
||||||
Parse npm registry URL to extract package name and version.
|
|
||||||
|
|
||||||
Formats:
|
|
||||||
- https://registry.npmjs.org/{package}/-/{package}-{version}.tgz
|
|
||||||
- https://registry.npmjs.org/@{scope}/{package}/-/{package}-{version}.tgz
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
- https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz
|
|
||||||
- https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz
|
|
||||||
"""
|
|
||||||
parsed = urlparse(url)
|
|
||||||
path = unquote(parsed.path)
|
|
||||||
|
|
||||||
# Pattern for scoped packages: /@scope/package/-/package-version.tgz
|
|
||||||
scoped_pattern = r"^/@([^/]+)/([^/]+)/-/\2-(.+)\.tgz$"
|
|
||||||
match = re.match(scoped_pattern, path)
|
|
||||||
if match:
|
|
||||||
scope, name, version = match.groups()
|
|
||||||
return ParsedUrl(
|
|
||||||
package_name=f"@{scope}/{name}",
|
|
||||||
version=version,
|
|
||||||
filename=f"{name}-{version}.tgz",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Pattern for unscoped packages: /package/-/package-version.tgz
|
|
||||||
unscoped_pattern = r"^/([^/@]+)/-/\1-(.+)\.tgz$"
|
|
||||||
match = re.match(unscoped_pattern, path)
|
|
||||||
if match:
|
|
||||||
name, version = match.groups()
|
|
||||||
return ParsedUrl(
|
|
||||||
package_name=name,
|
|
||||||
version=version,
|
|
||||||
filename=f"{name}-{version}.tgz",
|
|
||||||
)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def parse_pypi_url(url: str) -> Optional[ParsedUrl]:
|
|
||||||
"""
|
|
||||||
Parse PyPI URL to extract package name and version.
|
|
||||||
|
|
||||||
Formats:
|
|
||||||
- https://files.pythonhosted.org/packages/.../package-version.tar.gz
|
|
||||||
- https://files.pythonhosted.org/packages/.../package-version-py3-none-any.whl
|
|
||||||
- https://pypi.org/packages/.../package-version.tar.gz
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
- https://files.pythonhosted.org/packages/ab/cd/requests-2.28.0.tar.gz
|
|
||||||
- https://files.pythonhosted.org/packages/ab/cd/requests-2.28.0-py3-none-any.whl
|
|
||||||
"""
|
|
||||||
parsed = urlparse(url)
|
|
||||||
path = unquote(parsed.path)
|
|
||||||
|
|
||||||
# Get the filename from the path
|
|
||||||
filename = path.split("/")[-1]
|
|
||||||
if not filename:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Handle wheel files: package-version-py3-none-any.whl
|
|
||||||
wheel_pattern = r"^([a-zA-Z0-9_-]+)-(\d+[^-]*)-.*\.whl$"
|
|
||||||
match = re.match(wheel_pattern, filename)
|
|
||||||
if match:
|
|
||||||
name, version = match.groups()
|
|
||||||
# Normalize package name (PyPI uses underscores internally)
|
|
||||||
name = name.replace("_", "-").lower()
|
|
||||||
return ParsedUrl(
|
|
||||||
package_name=name,
|
|
||||||
version=version,
|
|
||||||
filename=filename,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle source distributions: package-version.tar.gz or package-version.zip
|
|
||||||
sdist_pattern = r"^([a-zA-Z0-9_-]+)-(\d+(?:\.\d+)*(?:[a-zA-Z0-9_.+-]*)?)(?:\.tar\.gz|\.zip|\.tar\.bz2)$"
|
|
||||||
match = re.match(sdist_pattern, filename)
|
|
||||||
if match:
|
|
||||||
name, version = match.groups()
|
|
||||||
name = name.replace("_", "-").lower()
|
|
||||||
return ParsedUrl(
|
|
||||||
package_name=name,
|
|
||||||
version=version,
|
|
||||||
filename=filename,
|
|
||||||
)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def parse_maven_url(url: str) -> Optional[ParsedUrl]:
|
|
||||||
"""
|
|
||||||
Parse Maven repository URL to extract artifact info.
|
|
||||||
|
|
||||||
Format:
|
|
||||||
- https://repo1.maven.org/maven2/{group}/{artifact}/{version}/{artifact}-{version}.jar
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
- https://repo1.maven.org/maven2/org/apache/commons/commons-lang3/3.12.0/commons-lang3-3.12.0.jar
|
|
||||||
- https://repo1.maven.org/maven2/com/google/guava/guava/31.1-jre/guava-31.1-jre.jar
|
|
||||||
"""
|
|
||||||
parsed = urlparse(url)
|
|
||||||
path = unquote(parsed.path)
|
|
||||||
|
|
||||||
# Find /maven2/ or similar repository path
|
|
||||||
maven2_idx = path.find("/maven2/")
|
|
||||||
if maven2_idx >= 0:
|
|
||||||
path = path[maven2_idx + 8:] # Remove /maven2/
|
|
||||||
elif path.startswith("/"):
|
|
||||||
path = path[1:]
|
|
||||||
|
|
||||||
parts = path.split("/")
|
|
||||||
if len(parts) < 4:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Last part is filename, before that is version, before that is artifact
|
|
||||||
filename = parts[-1]
|
|
||||||
version = parts[-2]
|
|
||||||
artifact = parts[-3]
|
|
||||||
group = ".".join(parts[:-3])
|
|
||||||
|
|
||||||
# Verify filename matches expected pattern
|
|
||||||
if not filename.startswith(f"{artifact}-{version}"):
|
|
||||||
return None
|
|
||||||
|
|
||||||
return ParsedUrl(
|
|
||||||
package_name=f"{group}:{artifact}",
|
|
||||||
version=version,
|
|
||||||
filename=filename,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_docker_url(url: str) -> Optional[ParsedUrl]:
|
|
||||||
"""
|
|
||||||
Parse Docker registry URL to extract image info.
|
|
||||||
|
|
||||||
Note: Docker registries are more complex (manifests, blobs, etc.)
|
|
||||||
This handles basic blob/manifest URLs.
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
- https://registry-1.docker.io/v2/library/nginx/blobs/sha256:abc123
|
|
||||||
- https://registry-1.docker.io/v2/myuser/myimage/manifests/latest
|
|
||||||
"""
|
|
||||||
parsed = urlparse(url)
|
|
||||||
path = unquote(parsed.path)
|
|
||||||
|
|
||||||
# Pattern: /v2/{namespace}/{image}/blobs/{digest} or /manifests/{tag}
|
|
||||||
pattern = r"^/v2/([^/]+(?:/[^/]+)?)/([^/]+)/(blobs|manifests)/(.+)$"
|
|
||||||
match = re.match(pattern, path)
|
|
||||||
if match:
|
|
||||||
namespace, image, artifact_type, reference = match.groups()
|
|
||||||
if namespace == "library":
|
|
||||||
package_name = image
|
|
||||||
else:
|
|
||||||
package_name = f"{namespace}/{image}"
|
|
||||||
|
|
||||||
# For manifests, the reference is the tag
|
|
||||||
version = reference if artifact_type == "manifests" else None
|
|
||||||
|
|
||||||
return ParsedUrl(
|
|
||||||
package_name=package_name,
|
|
||||||
version=version,
|
|
||||||
filename=f"{image}-{reference}" if version else reference,
|
|
||||||
)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def parse_generic_url(url: str) -> ParsedUrl:
|
|
||||||
"""
|
|
||||||
Parse a generic URL to extract filename.
|
|
||||||
|
|
||||||
Attempts to extract meaningful package name and version from filename.
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
- https://example.com/downloads/myapp-1.2.3.tar.gz
|
|
||||||
- https://github.com/user/repo/releases/download/v1.0/release.zip
|
|
||||||
"""
|
|
||||||
parsed = urlparse(url)
|
|
||||||
path = unquote(parsed.path)
|
|
||||||
filename = path.split("/")[-1] or "artifact"
|
|
||||||
|
|
||||||
# List of known compound and simple extensions
|
|
||||||
known_extensions = [
|
|
||||||
".tar.gz", ".tar.bz2", ".tar.xz",
|
|
||||||
".zip", ".tgz", ".gz", ".jar", ".war", ".deb", ".rpm"
|
|
||||||
]
|
|
||||||
|
|
||||||
# Strip extension from filename first
|
|
||||||
base_name = filename
|
|
||||||
matched_ext = None
|
|
||||||
for ext in known_extensions:
|
|
||||||
if filename.endswith(ext):
|
|
||||||
base_name = filename[:-len(ext)]
|
|
||||||
matched_ext = ext
|
|
||||||
break
|
|
||||||
|
|
||||||
if matched_ext is None:
|
|
||||||
# Unknown extension, return filename as package name
|
|
||||||
return ParsedUrl(
|
|
||||||
package_name=filename,
|
|
||||||
version=None,
|
|
||||||
filename=filename,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Try to extract version from base_name
|
|
||||||
# Pattern: name-version or name_version
|
|
||||||
# Version starts with digit(s) and can include dots, dashes, and alphanumeric suffixes
|
|
||||||
version_pattern = r"^(.+?)[-_](v?\d+(?:\.\d+)*(?:[-_][a-zA-Z0-9]+)?)$"
|
|
||||||
match = re.match(version_pattern, base_name)
|
|
||||||
if match:
|
|
||||||
name, version = match.groups()
|
|
||||||
return ParsedUrl(
|
|
||||||
package_name=name,
|
|
||||||
version=version,
|
|
||||||
filename=filename,
|
|
||||||
)
|
|
||||||
|
|
||||||
# No version found, use base_name as package name
|
|
||||||
return ParsedUrl(
|
|
||||||
package_name=base_name,
|
|
||||||
version=None,
|
|
||||||
filename=filename,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_url(url: str, source_type: str) -> ParsedUrl:
|
|
||||||
"""
|
|
||||||
Parse URL to extract package name and version based on source type.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url: The URL to parse.
|
|
||||||
source_type: The source type (npm, pypi, maven, docker, etc.)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
ParsedUrl with extracted information.
|
|
||||||
"""
|
|
||||||
parsed = None
|
|
||||||
|
|
||||||
if source_type == "npm":
|
|
||||||
parsed = parse_npm_url(url)
|
|
||||||
elif source_type == "pypi":
|
|
||||||
parsed = parse_pypi_url(url)
|
|
||||||
elif source_type == "maven":
|
|
||||||
parsed = parse_maven_url(url)
|
|
||||||
elif source_type == "docker":
|
|
||||||
parsed = parse_docker_url(url)
|
|
||||||
|
|
||||||
# Fall back to generic parsing if type-specific parsing fails
|
|
||||||
if parsed is None:
|
|
||||||
parsed = parse_generic_url(url)
|
|
||||||
|
|
||||||
return parsed
|
|
||||||
|
|
||||||
|
|
||||||
def get_system_project_name(source_type: str) -> str:
|
|
||||||
"""Get the system project name for a source type."""
|
|
||||||
return SYSTEM_PROJECT_NAMES.get(source_type, "_generic")
|
|
||||||
|
|
||||||
|
|
||||||
def get_system_project_description(source_type: str) -> str:
|
|
||||||
"""Get the system project description for a source type."""
|
|
||||||
return SYSTEM_PROJECT_DESCRIPTIONS.get(
|
|
||||||
source_type, "System cache for artifacts"
|
|
||||||
)
|
|
||||||
@@ -1,8 +1,5 @@
|
|||||||
from pydantic_settings import BaseSettings
|
from pydantic_settings import BaseSettings
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from typing import Optional
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
class Settings(BaseSettings):
|
class Settings(BaseSettings):
|
||||||
@@ -56,14 +53,6 @@ class Settings(BaseSettings):
|
|||||||
log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
|
log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
|
||||||
log_format: str = "auto" # "json", "standard", or "auto" (json in production)
|
log_format: str = "auto" # "json", "standard", or "auto" (json in production)
|
||||||
|
|
||||||
# Initial admin user settings
|
|
||||||
admin_password: str = "" # Initial admin password (if empty, uses 'changeme123')
|
|
||||||
|
|
||||||
# Cache settings
|
|
||||||
cache_encryption_key: str = "" # Fernet key for encrypting upstream credentials (auto-generated if empty)
|
|
||||||
# Global cache settings override (None = use DB value, True/False = override DB)
|
|
||||||
cache_auto_create_system_projects: Optional[bool] = None # Override auto_create_system_projects
|
|
||||||
|
|
||||||
# JWT Authentication settings (optional, for external identity providers)
|
# JWT Authentication settings (optional, for external identity providers)
|
||||||
jwt_enabled: bool = False # Enable JWT token validation
|
jwt_enabled: bool = False # Enable JWT token validation
|
||||||
jwt_secret: str = "" # Secret key for HS256, or leave empty for RS256 with JWKS
|
jwt_secret: str = "" # Secret key for HS256, or leave empty for RS256 with JWKS
|
||||||
@@ -96,110 +85,3 @@ class Settings(BaseSettings):
|
|||||||
@lru_cache()
|
@lru_cache()
|
||||||
def get_settings() -> Settings:
|
def get_settings() -> Settings:
|
||||||
return Settings()
|
return Settings()
|
||||||
|
|
||||||
|
|
||||||
class EnvUpstreamSource:
|
|
||||||
"""Represents an upstream source defined via environment variables."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
url: str,
|
|
||||||
source_type: str = "generic",
|
|
||||||
enabled: bool = True,
|
|
||||||
auth_type: str = "none",
|
|
||||||
username: Optional[str] = None,
|
|
||||||
password: Optional[str] = None,
|
|
||||||
priority: int = 100,
|
|
||||||
):
|
|
||||||
self.name = name
|
|
||||||
self.url = url
|
|
||||||
self.source_type = source_type
|
|
||||||
self.enabled = enabled
|
|
||||||
self.auth_type = auth_type
|
|
||||||
self.username = username
|
|
||||||
self.password = password
|
|
||||||
self.priority = priority
|
|
||||||
self.source = "env" # Mark as env-defined
|
|
||||||
|
|
||||||
|
|
||||||
def parse_upstream_sources_from_env() -> list[EnvUpstreamSource]:
|
|
||||||
"""
|
|
||||||
Parse upstream sources from environment variables.
|
|
||||||
|
|
||||||
Uses double underscore (__) as separator to allow source names with single underscores.
|
|
||||||
Pattern: ORCHARD_UPSTREAM__{NAME}__FIELD
|
|
||||||
|
|
||||||
Example:
|
|
||||||
ORCHARD_UPSTREAM__NPM_PRIVATE__URL=https://npm.corp.com
|
|
||||||
ORCHARD_UPSTREAM__NPM_PRIVATE__TYPE=npm
|
|
||||||
ORCHARD_UPSTREAM__NPM_PRIVATE__ENABLED=true
|
|
||||||
ORCHARD_UPSTREAM__NPM_PRIVATE__AUTH_TYPE=basic
|
|
||||||
ORCHARD_UPSTREAM__NPM_PRIVATE__USERNAME=reader
|
|
||||||
ORCHARD_UPSTREAM__NPM_PRIVATE__PASSWORD=secret
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of EnvUpstreamSource objects parsed from environment variables.
|
|
||||||
"""
|
|
||||||
# Pattern: ORCHARD_UPSTREAM__{NAME}__{FIELD}
|
|
||||||
pattern = re.compile(r"^ORCHARD_UPSTREAM__([A-Z0-9_]+)__([A-Z_]+)$", re.IGNORECASE)
|
|
||||||
|
|
||||||
# Collect all env vars matching the pattern, grouped by source name
|
|
||||||
sources_data: dict[str, dict[str, str]] = {}
|
|
||||||
|
|
||||||
for key, value in os.environ.items():
|
|
||||||
match = pattern.match(key)
|
|
||||||
if match:
|
|
||||||
source_name = match.group(1).lower() # Normalize to lowercase
|
|
||||||
field = match.group(2).upper()
|
|
||||||
if source_name not in sources_data:
|
|
||||||
sources_data[source_name] = {}
|
|
||||||
sources_data[source_name][field] = value
|
|
||||||
|
|
||||||
# Build source objects from collected data
|
|
||||||
sources: list[EnvUpstreamSource] = []
|
|
||||||
|
|
||||||
for name, data in sources_data.items():
|
|
||||||
# URL is required
|
|
||||||
url = data.get("URL")
|
|
||||||
if not url:
|
|
||||||
continue # Skip sources without URL
|
|
||||||
|
|
||||||
# Parse boolean fields
|
|
||||||
def parse_bool(val: Optional[str], default: bool) -> bool:
|
|
||||||
if val is None:
|
|
||||||
return default
|
|
||||||
return val.lower() in ("true", "1", "yes", "on")
|
|
||||||
|
|
||||||
# Parse integer fields
|
|
||||||
def parse_int(val: Optional[str], default: int) -> int:
|
|
||||||
if val is None:
|
|
||||||
return default
|
|
||||||
try:
|
|
||||||
return int(val)
|
|
||||||
except ValueError:
|
|
||||||
return default
|
|
||||||
|
|
||||||
source = EnvUpstreamSource(
|
|
||||||
name=name.replace("_", "-"), # Convert underscores to hyphens for readability
|
|
||||||
url=url,
|
|
||||||
source_type=data.get("TYPE", "generic").lower(),
|
|
||||||
enabled=parse_bool(data.get("ENABLED"), True),
|
|
||||||
auth_type=data.get("AUTH_TYPE", "none").lower(),
|
|
||||||
username=data.get("USERNAME"),
|
|
||||||
password=data.get("PASSWORD"),
|
|
||||||
priority=parse_int(data.get("PRIORITY"), 100),
|
|
||||||
)
|
|
||||||
sources.append(source)
|
|
||||||
|
|
||||||
return sources
|
|
||||||
|
|
||||||
|
|
||||||
@lru_cache()
|
|
||||||
def get_env_upstream_sources() -> tuple[EnvUpstreamSource, ...]:
|
|
||||||
"""
|
|
||||||
Get cached list of upstream sources from environment variables.
|
|
||||||
|
|
||||||
Returns a tuple for hashability (required by lru_cache).
|
|
||||||
"""
|
|
||||||
return tuple(parse_upstream_sources_from_env())
|
|
||||||
|
|||||||
@@ -1,34 +1,17 @@
|
|||||||
from sqlalchemy import create_engine, text, event
|
from sqlalchemy import create_engine, text, event
|
||||||
from sqlalchemy.orm import sessionmaker, Session
|
from sqlalchemy.orm import sessionmaker, Session
|
||||||
from sqlalchemy.pool import QueuePool
|
from sqlalchemy.pool import QueuePool
|
||||||
from typing import Generator, NamedTuple
|
from typing import Generator
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
import hashlib
|
|
||||||
|
|
||||||
from .config import get_settings
|
from .config import get_settings
|
||||||
from .models import Base
|
from .models import Base
|
||||||
from .purge_seed_data import should_purge_seed_data, purge_seed_data
|
|
||||||
|
|
||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Migration(NamedTuple):
|
|
||||||
"""A database migration with a unique name and SQL to execute."""
|
|
||||||
name: str
|
|
||||||
sql: str
|
|
||||||
|
|
||||||
|
|
||||||
# PostgreSQL error codes that indicate "already exists" - safe to skip
|
|
||||||
SAFE_PG_ERROR_CODES = {
|
|
||||||
"42P07", # duplicate_table
|
|
||||||
"42701", # duplicate_column
|
|
||||||
"42710", # duplicate_object (index, constraint, etc.)
|
|
||||||
"42P16", # invalid_table_definition (e.g., column already exists)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Build connect_args with query timeout if configured
|
# Build connect_args with query timeout if configured
|
||||||
connect_args = {}
|
connect_args = {}
|
||||||
if settings.database_query_timeout > 0:
|
if settings.database_query_timeout > 0:
|
||||||
@@ -81,533 +64,236 @@ def init_db():
|
|||||||
# Run migrations for schema updates
|
# Run migrations for schema updates
|
||||||
_run_migrations()
|
_run_migrations()
|
||||||
|
|
||||||
# Purge seed data if requested (for transitioning to production-like environment)
|
|
||||||
if should_purge_seed_data():
|
|
||||||
db = SessionLocal()
|
|
||||||
try:
|
|
||||||
purge_seed_data(db)
|
|
||||||
finally:
|
|
||||||
db.close()
|
|
||||||
|
|
||||||
|
|
||||||
def _ensure_migrations_table(conn) -> None:
|
|
||||||
"""Create the migrations tracking table if it doesn't exist."""
|
|
||||||
conn.execute(text("""
|
|
||||||
CREATE TABLE IF NOT EXISTS _schema_migrations (
|
|
||||||
name VARCHAR(255) PRIMARY KEY,
|
|
||||||
checksum VARCHAR(64) NOT NULL,
|
|
||||||
applied_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
|
||||||
);
|
|
||||||
"""))
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
|
|
||||||
def _get_applied_migrations(conn) -> dict[str, str]:
|
|
||||||
"""Get all applied migrations and their checksums."""
|
|
||||||
result = conn.execute(text(
|
|
||||||
"SELECT name, checksum FROM _schema_migrations"
|
|
||||||
))
|
|
||||||
return {row[0]: row[1] for row in result}
|
|
||||||
|
|
||||||
|
|
||||||
def _compute_checksum(sql: str) -> str:
|
|
||||||
"""Compute a checksum for migration SQL to detect changes."""
|
|
||||||
return hashlib.sha256(sql.strip().encode()).hexdigest()[:16]
|
|
||||||
|
|
||||||
|
|
||||||
def _is_safe_error(exception: Exception) -> bool:
|
|
||||||
"""Check if the error indicates the migration was already applied."""
|
|
||||||
# Check for psycopg2 errors with pgcode attribute
|
|
||||||
original = getattr(exception, "orig", None)
|
|
||||||
if original is not None:
|
|
||||||
pgcode = getattr(original, "pgcode", None)
|
|
||||||
if pgcode in SAFE_PG_ERROR_CODES:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Fallback: check error message for common "already exists" patterns
|
|
||||||
error_str = str(exception).lower()
|
|
||||||
safe_patterns = [
|
|
||||||
"already exists",
|
|
||||||
"duplicate key",
|
|
||||||
"relation .* already exists",
|
|
||||||
"column .* already exists",
|
|
||||||
]
|
|
||||||
return any(pattern in error_str for pattern in safe_patterns)
|
|
||||||
|
|
||||||
|
|
||||||
def _record_migration(conn, name: str, checksum: str) -> None:
|
|
||||||
"""Record a migration as applied."""
|
|
||||||
conn.execute(text(
|
|
||||||
"INSERT INTO _schema_migrations (name, checksum) VALUES (:name, :checksum)"
|
|
||||||
), {"name": name, "checksum": checksum})
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
|
|
||||||
def _run_migrations():
|
def _run_migrations():
|
||||||
"""Run manual migrations for schema updates with tracking and error detection."""
|
"""Run manual migrations for schema updates"""
|
||||||
migrations = [
|
migrations = [
|
||||||
Migration(
|
# Add format_metadata column to artifacts table
|
||||||
name="001_add_format_metadata",
|
"""
|
||||||
sql="""
|
DO $$
|
||||||
DO $$
|
BEGIN
|
||||||
BEGIN
|
IF NOT EXISTS (
|
||||||
IF NOT EXISTS (
|
SELECT 1 FROM information_schema.columns
|
||||||
SELECT 1 FROM information_schema.columns
|
WHERE table_name = 'artifacts' AND column_name = 'format_metadata'
|
||||||
WHERE table_name = 'artifacts' AND column_name = 'format_metadata'
|
) THEN
|
||||||
) THEN
|
ALTER TABLE artifacts ADD COLUMN format_metadata JSONB DEFAULT '{}';
|
||||||
ALTER TABLE artifacts ADD COLUMN format_metadata JSONB DEFAULT '{}';
|
END IF;
|
||||||
END IF;
|
END $$;
|
||||||
END $$;
|
""",
|
||||||
""",
|
# Add format column to packages table
|
||||||
),
|
"""
|
||||||
Migration(
|
DO $$
|
||||||
name="002_add_package_format",
|
BEGIN
|
||||||
sql="""
|
IF NOT EXISTS (
|
||||||
DO $$
|
SELECT 1 FROM information_schema.columns
|
||||||
BEGIN
|
WHERE table_name = 'packages' AND column_name = 'format'
|
||||||
IF NOT EXISTS (
|
) THEN
|
||||||
SELECT 1 FROM information_schema.columns
|
ALTER TABLE packages ADD COLUMN format VARCHAR(50) DEFAULT 'generic' NOT NULL;
|
||||||
WHERE table_name = 'packages' AND column_name = 'format'
|
CREATE INDEX IF NOT EXISTS idx_packages_format ON packages(format);
|
||||||
) THEN
|
END IF;
|
||||||
ALTER TABLE packages ADD COLUMN format VARCHAR(50) DEFAULT 'generic' NOT NULL;
|
END $$;
|
||||||
CREATE INDEX IF NOT EXISTS idx_packages_format ON packages(format);
|
""",
|
||||||
END IF;
|
# Add platform column to packages table
|
||||||
END $$;
|
"""
|
||||||
""",
|
DO $$
|
||||||
),
|
BEGIN
|
||||||
Migration(
|
IF NOT EXISTS (
|
||||||
name="003_add_package_platform",
|
SELECT 1 FROM information_schema.columns
|
||||||
sql="""
|
WHERE table_name = 'packages' AND column_name = 'platform'
|
||||||
DO $$
|
) THEN
|
||||||
BEGIN
|
ALTER TABLE packages ADD COLUMN platform VARCHAR(50) DEFAULT 'any' NOT NULL;
|
||||||
IF NOT EXISTS (
|
CREATE INDEX IF NOT EXISTS idx_packages_platform ON packages(platform);
|
||||||
SELECT 1 FROM information_schema.columns
|
END IF;
|
||||||
WHERE table_name = 'packages' AND column_name = 'platform'
|
END $$;
|
||||||
) THEN
|
""",
|
||||||
ALTER TABLE packages ADD COLUMN platform VARCHAR(50) DEFAULT 'any' NOT NULL;
|
# Add ref_count index and constraints for artifacts
|
||||||
CREATE INDEX IF NOT EXISTS idx_packages_platform ON packages(platform);
|
"""
|
||||||
END IF;
|
DO $$
|
||||||
END $$;
|
BEGIN
|
||||||
""",
|
-- Add ref_count index
|
||||||
),
|
IF NOT EXISTS (
|
||||||
Migration(
|
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_artifacts_ref_count'
|
||||||
name="004_add_ref_count_index_constraint",
|
) THEN
|
||||||
sql="""
|
CREATE INDEX idx_artifacts_ref_count ON artifacts(ref_count);
|
||||||
DO $$
|
END IF;
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_artifacts_ref_count'
|
|
||||||
) THEN
|
|
||||||
CREATE INDEX idx_artifacts_ref_count ON artifacts(ref_count);
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
IF NOT EXISTS (
|
-- Add ref_count >= 0 constraint
|
||||||
SELECT 1 FROM pg_constraint WHERE conname = 'check_ref_count_non_negative'
|
IF NOT EXISTS (
|
||||||
) THEN
|
SELECT 1 FROM pg_constraint WHERE conname = 'check_ref_count_non_negative'
|
||||||
ALTER TABLE artifacts ADD CONSTRAINT check_ref_count_non_negative CHECK (ref_count >= 0);
|
) THEN
|
||||||
END IF;
|
ALTER TABLE artifacts ADD CONSTRAINT check_ref_count_non_negative CHECK (ref_count >= 0);
|
||||||
END $$;
|
END IF;
|
||||||
""",
|
END $$;
|
||||||
),
|
""",
|
||||||
Migration(
|
# Add composite indexes for packages and tags
|
||||||
name="005_add_composite_indexes",
|
"""
|
||||||
sql="""
|
DO $$
|
||||||
DO $$
|
BEGIN
|
||||||
BEGIN
|
-- Composite index for package lookup by project and name
|
||||||
IF NOT EXISTS (
|
IF NOT EXISTS (
|
||||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_packages_project_name'
|
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_packages_project_name'
|
||||||
) THEN
|
) THEN
|
||||||
CREATE UNIQUE INDEX idx_packages_project_name ON packages(project_id, name);
|
CREATE UNIQUE INDEX idx_packages_project_name ON packages(project_id, name);
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
IF NOT EXISTS (
|
-- Composite index for tag lookup by package and name
|
||||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_name'
|
IF NOT EXISTS (
|
||||||
) THEN
|
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_name'
|
||||||
CREATE UNIQUE INDEX idx_tags_package_name ON tags(package_id, name);
|
) THEN
|
||||||
END IF;
|
CREATE UNIQUE INDEX idx_tags_package_name ON tags(package_id, name);
|
||||||
|
END IF;
|
||||||
|
|
||||||
IF NOT EXISTS (
|
-- Composite index for recent tags queries
|
||||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_created_at'
|
IF NOT EXISTS (
|
||||||
) THEN
|
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_created_at'
|
||||||
CREATE INDEX idx_tags_package_created_at ON tags(package_id, created_at);
|
) THEN
|
||||||
|
CREATE INDEX idx_tags_package_created_at ON tags(package_id, created_at);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
""",
|
||||||
|
# Add package_versions indexes and triggers (007_package_versions.sql)
|
||||||
|
"""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
-- Create indexes for package_versions if table exists
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
||||||
|
-- Indexes for common queries
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_package_versions_package_id') THEN
|
||||||
|
CREATE INDEX idx_package_versions_package_id ON package_versions(package_id);
|
||||||
END IF;
|
END IF;
|
||||||
END $$;
|
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_package_versions_artifact_id') THEN
|
||||||
""",
|
CREATE INDEX idx_package_versions_artifact_id ON package_versions(artifact_id);
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="006_add_package_versions_indexes",
|
|
||||||
sql="""
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_package_versions_package_id') THEN
|
|
||||||
CREATE INDEX idx_package_versions_package_id ON package_versions(package_id);
|
|
||||||
END IF;
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_package_versions_artifact_id') THEN
|
|
||||||
CREATE INDEX idx_package_versions_artifact_id ON package_versions(artifact_id);
|
|
||||||
END IF;
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_package_versions_package_version') THEN
|
|
||||||
CREATE INDEX idx_package_versions_package_version ON package_versions(package_id, version);
|
|
||||||
END IF;
|
|
||||||
END IF;
|
END IF;
|
||||||
END $$;
|
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_package_versions_package_version') THEN
|
||||||
""",
|
CREATE INDEX idx_package_versions_package_version ON package_versions(package_id, version);
|
||||||
),
|
END IF;
|
||||||
Migration(
|
END IF;
|
||||||
name="007_create_ref_count_trigger_functions",
|
END $$;
|
||||||
sql="""
|
""",
|
||||||
CREATE OR REPLACE FUNCTION increment_artifact_ref_count()
|
# Create ref_count trigger functions for tags (ensures triggers exist even if initial migration wasn't run)
|
||||||
RETURNS TRIGGER AS $$
|
"""
|
||||||
BEGIN
|
CREATE OR REPLACE FUNCTION increment_artifact_ref_count()
|
||||||
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
RETURNS TRIGGER AS $$
|
||||||
RETURN NEW;
|
BEGIN
|
||||||
END;
|
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||||
$$ LANGUAGE plpgsql;
|
RETURN NEW;
|
||||||
|
END;
|
||||||
CREATE OR REPLACE FUNCTION decrement_artifact_ref_count()
|
$$ LANGUAGE plpgsql;
|
||||||
RETURNS TRIGGER AS $$
|
""",
|
||||||
BEGIN
|
"""
|
||||||
|
CREATE OR REPLACE FUNCTION decrement_artifact_ref_count()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||||
|
RETURN OLD;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
CREATE OR REPLACE FUNCTION update_artifact_ref_count()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
IF OLD.artifact_id != NEW.artifact_id THEN
|
||||||
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||||
RETURN OLD;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION update_artifact_ref_count()
|
|
||||||
RETURNS TRIGGER AS $$
|
|
||||||
BEGIN
|
|
||||||
IF OLD.artifact_id != NEW.artifact_id THEN
|
|
||||||
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
|
||||||
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
|
||||||
END IF;
|
|
||||||
RETURN NEW;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="008_create_tags_ref_count_triggers",
|
|
||||||
sql="""
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
|
|
||||||
CREATE TRIGGER tags_ref_count_insert_trigger
|
|
||||||
AFTER INSERT ON tags
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION increment_artifact_ref_count();
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
|
|
||||||
CREATE TRIGGER tags_ref_count_delete_trigger
|
|
||||||
AFTER DELETE ON tags
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION decrement_artifact_ref_count();
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
|
|
||||||
CREATE TRIGGER tags_ref_count_update_trigger
|
|
||||||
AFTER UPDATE ON tags
|
|
||||||
FOR EACH ROW
|
|
||||||
WHEN (OLD.artifact_id IS DISTINCT FROM NEW.artifact_id)
|
|
||||||
EXECUTE FUNCTION update_artifact_ref_count();
|
|
||||||
END $$;
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="009_create_version_ref_count_functions",
|
|
||||||
sql="""
|
|
||||||
CREATE OR REPLACE FUNCTION increment_version_ref_count()
|
|
||||||
RETURNS TRIGGER AS $$
|
|
||||||
BEGIN
|
|
||||||
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||||
RETURN NEW;
|
END IF;
|
||||||
END;
|
RETURN NEW;
|
||||||
$$ LANGUAGE plpgsql;
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
""",
|
||||||
|
# Create triggers for tags ref_count management
|
||||||
|
"""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
-- Drop and recreate triggers to ensure they're current
|
||||||
|
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
|
||||||
|
CREATE TRIGGER tags_ref_count_insert_trigger
|
||||||
|
AFTER INSERT ON tags
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION increment_artifact_ref_count();
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION decrement_version_ref_count()
|
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
|
||||||
RETURNS TRIGGER AS $$
|
CREATE TRIGGER tags_ref_count_delete_trigger
|
||||||
BEGIN
|
AFTER DELETE ON tags
|
||||||
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
FOR EACH ROW
|
||||||
RETURN OLD;
|
EXECUTE FUNCTION decrement_artifact_ref_count();
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="010_create_package_versions_triggers",
|
|
||||||
sql="""
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
|
||||||
DROP TRIGGER IF EXISTS package_versions_ref_count_insert ON package_versions;
|
|
||||||
CREATE TRIGGER package_versions_ref_count_insert
|
|
||||||
AFTER INSERT ON package_versions
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION increment_version_ref_count();
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS package_versions_ref_count_delete ON package_versions;
|
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
|
||||||
CREATE TRIGGER package_versions_ref_count_delete
|
CREATE TRIGGER tags_ref_count_update_trigger
|
||||||
AFTER DELETE ON package_versions
|
AFTER UPDATE ON tags
|
||||||
FOR EACH ROW
|
FOR EACH ROW
|
||||||
EXECUTE FUNCTION decrement_version_ref_count();
|
WHEN (OLD.artifact_id IS DISTINCT FROM NEW.artifact_id)
|
||||||
END IF;
|
EXECUTE FUNCTION update_artifact_ref_count();
|
||||||
END $$;
|
END $$;
|
||||||
""",
|
""",
|
||||||
),
|
# Create ref_count trigger functions for package_versions
|
||||||
Migration(
|
"""
|
||||||
name="011_migrate_semver_tags_to_versions",
|
CREATE OR REPLACE FUNCTION increment_version_ref_count()
|
||||||
sql=r"""
|
RETURNS TRIGGER AS $$
|
||||||
DO $$
|
BEGIN
|
||||||
BEGIN
|
UPDATE artifacts SET ref_count = ref_count + 1 WHERE id = NEW.artifact_id;
|
||||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
RETURN NEW;
|
||||||
INSERT INTO package_versions (id, package_id, artifact_id, version, version_source, created_by, created_at)
|
END;
|
||||||
SELECT
|
$$ LANGUAGE plpgsql;
|
||||||
gen_random_uuid(),
|
""",
|
||||||
t.package_id,
|
"""
|
||||||
t.artifact_id,
|
CREATE OR REPLACE FUNCTION decrement_version_ref_count()
|
||||||
CASE WHEN t.name LIKE 'v%' THEN substring(t.name from 2) ELSE t.name END,
|
RETURNS TRIGGER AS $$
|
||||||
'migrated_from_tag',
|
BEGIN
|
||||||
t.created_by,
|
UPDATE artifacts SET ref_count = ref_count - 1 WHERE id = OLD.artifact_id;
|
||||||
t.created_at
|
RETURN OLD;
|
||||||
FROM tags t
|
END;
|
||||||
WHERE t.name ~ '^v?[0-9]+\.[0-9]+(\.[0-9]+)?([-.][a-zA-Z0-9]+)?$'
|
$$ LANGUAGE plpgsql;
|
||||||
ON CONFLICT (package_id, version) DO NOTHING;
|
""",
|
||||||
END IF;
|
# Create triggers for package_versions ref_count
|
||||||
END $$;
|
"""
|
||||||
""",
|
DO $$
|
||||||
),
|
BEGIN
|
||||||
Migration(
|
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
||||||
name="012_create_teams_table",
|
-- Drop and recreate triggers to ensure they're current
|
||||||
sql="""
|
DROP TRIGGER IF EXISTS package_versions_ref_count_insert ON package_versions;
|
||||||
CREATE TABLE IF NOT EXISTS teams (
|
CREATE TRIGGER package_versions_ref_count_insert
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
AFTER INSERT ON package_versions
|
||||||
name VARCHAR(255) NOT NULL,
|
FOR EACH ROW
|
||||||
slug VARCHAR(255) NOT NULL UNIQUE,
|
EXECUTE FUNCTION increment_version_ref_count();
|
||||||
description TEXT,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
|
||||||
created_by VARCHAR(255) NOT NULL,
|
|
||||||
settings JSONB DEFAULT '{}'
|
|
||||||
);
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="013_create_team_memberships_table",
|
|
||||||
sql="""
|
|
||||||
CREATE TABLE IF NOT EXISTS team_memberships (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
team_id UUID NOT NULL REFERENCES teams(id) ON DELETE CASCADE,
|
|
||||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
|
||||||
role VARCHAR(50) NOT NULL DEFAULT 'member',
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
|
||||||
invited_by VARCHAR(255),
|
|
||||||
CONSTRAINT team_memberships_unique UNIQUE (team_id, user_id),
|
|
||||||
CONSTRAINT team_memberships_role_check CHECK (role IN ('owner', 'admin', 'member'))
|
|
||||||
);
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="014_add_team_id_to_projects",
|
|
||||||
sql="""
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM information_schema.columns
|
|
||||||
WHERE table_name = 'projects' AND column_name = 'team_id'
|
|
||||||
) THEN
|
|
||||||
ALTER TABLE projects ADD COLUMN team_id UUID REFERENCES teams(id) ON DELETE SET NULL;
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_projects_team_id ON projects(team_id);
|
|
||||||
END IF;
|
|
||||||
END $$;
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="015_add_teams_indexes",
|
|
||||||
sql="""
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_teams_slug') THEN
|
|
||||||
CREATE INDEX idx_teams_slug ON teams(slug);
|
|
||||||
END IF;
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_teams_created_by') THEN
|
|
||||||
CREATE INDEX idx_teams_created_by ON teams(created_by);
|
|
||||||
END IF;
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_team_memberships_team_id') THEN
|
|
||||||
CREATE INDEX idx_team_memberships_team_id ON team_memberships(team_id);
|
|
||||||
END IF;
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_team_memberships_user_id') THEN
|
|
||||||
CREATE INDEX idx_team_memberships_user_id ON team_memberships(user_id);
|
|
||||||
END IF;
|
|
||||||
END $$;
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="016_add_is_system_to_projects",
|
|
||||||
sql="""
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM information_schema.columns
|
|
||||||
WHERE table_name = 'projects' AND column_name = 'is_system'
|
|
||||||
) THEN
|
|
||||||
ALTER TABLE projects ADD COLUMN is_system BOOLEAN NOT NULL DEFAULT FALSE;
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_projects_is_system ON projects(is_system);
|
|
||||||
END IF;
|
|
||||||
END $$;
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="017_create_upstream_sources",
|
|
||||||
sql="""
|
|
||||||
CREATE TABLE IF NOT EXISTS upstream_sources (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
name VARCHAR(255) NOT NULL UNIQUE,
|
|
||||||
source_type VARCHAR(50) NOT NULL DEFAULT 'generic',
|
|
||||||
url VARCHAR(2048) NOT NULL,
|
|
||||||
enabled BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
auth_type VARCHAR(20) NOT NULL DEFAULT 'none',
|
|
||||||
username VARCHAR(255),
|
|
||||||
password_encrypted BYTEA,
|
|
||||||
headers_encrypted BYTEA,
|
|
||||||
priority INTEGER NOT NULL DEFAULT 100,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
|
||||||
CONSTRAINT check_source_type CHECK (
|
|
||||||
source_type IN ('npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic')
|
|
||||||
),
|
|
||||||
CONSTRAINT check_auth_type CHECK (
|
|
||||||
auth_type IN ('none', 'basic', 'bearer', 'api_key')
|
|
||||||
),
|
|
||||||
CONSTRAINT check_priority_positive CHECK (priority > 0)
|
|
||||||
);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_upstream_sources_enabled ON upstream_sources(enabled);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_upstream_sources_source_type ON upstream_sources(source_type);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_upstream_sources_priority ON upstream_sources(priority);
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="018_create_cache_settings",
|
|
||||||
sql="""
|
|
||||||
CREATE TABLE IF NOT EXISTS cache_settings (
|
|
||||||
id INTEGER PRIMARY KEY DEFAULT 1,
|
|
||||||
auto_create_system_projects BOOLEAN NOT NULL DEFAULT TRUE,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
|
||||||
CONSTRAINT check_cache_settings_singleton CHECK (id = 1)
|
|
||||||
);
|
|
||||||
INSERT INTO cache_settings (id, auto_create_system_projects)
|
|
||||||
VALUES (1, TRUE)
|
|
||||||
ON CONFLICT (id) DO NOTHING;
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="019_create_cached_urls",
|
|
||||||
sql="""
|
|
||||||
CREATE TABLE IF NOT EXISTS cached_urls (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
url VARCHAR(4096) NOT NULL,
|
|
||||||
url_hash VARCHAR(64) NOT NULL UNIQUE,
|
|
||||||
artifact_id VARCHAR(64) NOT NULL REFERENCES artifacts(id),
|
|
||||||
source_id UUID REFERENCES upstream_sources(id) ON DELETE SET NULL,
|
|
||||||
fetched_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
response_headers JSONB DEFAULT '{}',
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
|
||||||
);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_cached_urls_url_hash ON cached_urls(url_hash);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_cached_urls_artifact_id ON cached_urls(artifact_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_cached_urls_source_id ON cached_urls(source_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_cached_urls_fetched_at ON cached_urls(fetched_at);
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="020_seed_default_upstream_sources",
|
|
||||||
sql="""
|
|
||||||
-- Originally seeded public sources, but these are no longer used.
|
|
||||||
-- Migration 023 deletes any previously seeded sources.
|
|
||||||
-- This migration is now a no-op for fresh installs.
|
|
||||||
SELECT 1;
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
Migration(
|
|
||||||
name="021_remove_is_public_from_upstream_sources",
|
|
||||||
sql="""
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
-- Drop the index if it exists
|
|
||||||
DROP INDEX IF EXISTS idx_upstream_sources_is_public;
|
|
||||||
|
|
||||||
-- Drop the column if it exists
|
DROP TRIGGER IF EXISTS package_versions_ref_count_delete ON package_versions;
|
||||||
IF EXISTS (
|
CREATE TRIGGER package_versions_ref_count_delete
|
||||||
SELECT 1 FROM information_schema.columns
|
AFTER DELETE ON package_versions
|
||||||
WHERE table_name = 'upstream_sources' AND column_name = 'is_public'
|
FOR EACH ROW
|
||||||
) THEN
|
EXECUTE FUNCTION decrement_version_ref_count();
|
||||||
ALTER TABLE upstream_sources DROP COLUMN is_public;
|
END IF;
|
||||||
END IF;
|
END $$;
|
||||||
END $$;
|
""",
|
||||||
""",
|
# Migrate existing semver tags to package_versions
|
||||||
),
|
r"""
|
||||||
Migration(
|
DO $$
|
||||||
name="022_remove_allow_public_internet_from_cache_settings",
|
BEGIN
|
||||||
sql="""
|
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
||||||
DO $$
|
-- Migrate tags that look like versions (v1.0.0, 1.2.3, 2.0.0-beta, etc.)
|
||||||
BEGIN
|
INSERT INTO package_versions (package_id, artifact_id, version, version_source, created_by, created_at)
|
||||||
IF EXISTS (
|
SELECT
|
||||||
SELECT 1 FROM information_schema.columns
|
t.package_id,
|
||||||
WHERE table_name = 'cache_settings' AND column_name = 'allow_public_internet'
|
t.artifact_id,
|
||||||
) THEN
|
CASE WHEN t.name LIKE 'v%' THEN substring(t.name from 2) ELSE t.name END,
|
||||||
ALTER TABLE cache_settings DROP COLUMN allow_public_internet;
|
'migrated_from_tag',
|
||||||
END IF;
|
t.created_by,
|
||||||
END $$;
|
t.created_at
|
||||||
""",
|
FROM tags t
|
||||||
),
|
WHERE t.name ~ '^v?[0-9]+\.[0-9]+(\.[0-9]+)?([-.][a-zA-Z0-9]+)?$'
|
||||||
Migration(
|
ON CONFLICT (package_id, version) DO NOTHING;
|
||||||
name="023_delete_seeded_public_sources",
|
END IF;
|
||||||
sql="""
|
END $$;
|
||||||
-- Delete the seeded public sources that were added by migration 020
|
""",
|
||||||
DELETE FROM upstream_sources
|
|
||||||
WHERE name IN ('npm-public', 'pypi-public', 'maven-central', 'docker-hub');
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
with engine.connect() as conn:
|
with engine.connect() as conn:
|
||||||
# Ensure migrations tracking table exists
|
|
||||||
_ensure_migrations_table(conn)
|
|
||||||
|
|
||||||
# Get already-applied migrations
|
|
||||||
applied = _get_applied_migrations(conn)
|
|
||||||
|
|
||||||
for migration in migrations:
|
for migration in migrations:
|
||||||
checksum = _compute_checksum(migration.sql)
|
|
||||||
|
|
||||||
# Check if migration was already applied
|
|
||||||
if migration.name in applied:
|
|
||||||
stored_checksum = applied[migration.name]
|
|
||||||
if stored_checksum != checksum:
|
|
||||||
logger.warning(
|
|
||||||
f"Migration '{migration.name}' has changed since it was applied! "
|
|
||||||
f"Stored checksum: {stored_checksum}, current: {checksum}"
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Run the migration
|
|
||||||
try:
|
try:
|
||||||
logger.info(f"Running migration: {migration.name}")
|
conn.execute(text(migration))
|
||||||
conn.execute(text(migration.sql))
|
|
||||||
conn.commit()
|
conn.commit()
|
||||||
_record_migration(conn, migration.name, checksum)
|
|
||||||
logger.info(f"Migration '{migration.name}' applied successfully")
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
conn.rollback()
|
logger.warning(f"Migration failed (may already be applied): {e}")
|
||||||
if _is_safe_error(e):
|
|
||||||
# Migration was already applied (schema already exists)
|
|
||||||
logger.info(
|
|
||||||
f"Migration '{migration.name}' already applied (schema exists), recording as complete"
|
|
||||||
)
|
|
||||||
_record_migration(conn, migration.name, checksum)
|
|
||||||
else:
|
|
||||||
# Real error - fail hard
|
|
||||||
logger.error(f"Migration '{migration.name}' failed: {e}")
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Migration '{migration.name}' failed with error: {e}"
|
|
||||||
) from e
|
|
||||||
|
|
||||||
|
|
||||||
def get_db() -> Generator[Session, None, None]:
|
def get_db() -> Generator[Session, None, None]:
|
||||||
|
|||||||
@@ -1,723 +0,0 @@
|
|||||||
"""
|
|
||||||
Dependency management module for artifact dependencies.
|
|
||||||
|
|
||||||
Handles:
|
|
||||||
- Parsing orchard.ensure files
|
|
||||||
- Storing dependencies in the database
|
|
||||||
- Querying dependencies and reverse dependencies
|
|
||||||
- Dependency resolution with topological sorting
|
|
||||||
- Circular dependency detection
|
|
||||||
- Conflict detection
|
|
||||||
"""
|
|
||||||
|
|
||||||
import yaml
|
|
||||||
from typing import List, Dict, Any, Optional, Set, Tuple
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from sqlalchemy import and_
|
|
||||||
|
|
||||||
from .models import (
|
|
||||||
Project,
|
|
||||||
Package,
|
|
||||||
Artifact,
|
|
||||||
Tag,
|
|
||||||
ArtifactDependency,
|
|
||||||
PackageVersion,
|
|
||||||
)
|
|
||||||
from .schemas import (
|
|
||||||
EnsureFileContent,
|
|
||||||
EnsureFileDependency,
|
|
||||||
DependencyResponse,
|
|
||||||
ArtifactDependenciesResponse,
|
|
||||||
DependentInfo,
|
|
||||||
ReverseDependenciesResponse,
|
|
||||||
ResolvedArtifact,
|
|
||||||
DependencyResolutionResponse,
|
|
||||||
DependencyConflict,
|
|
||||||
PaginationMeta,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DependencyError(Exception):
|
|
||||||
"""Base exception for dependency errors."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class CircularDependencyError(DependencyError):
|
|
||||||
"""Raised when a circular dependency is detected."""
|
|
||||||
def __init__(self, cycle: List[str]):
|
|
||||||
self.cycle = cycle
|
|
||||||
super().__init__(f"Circular dependency detected: {' -> '.join(cycle)}")
|
|
||||||
|
|
||||||
|
|
||||||
class DependencyConflictError(DependencyError):
|
|
||||||
"""Raised when conflicting dependency versions are detected."""
|
|
||||||
def __init__(self, conflicts: List[DependencyConflict]):
|
|
||||||
self.conflicts = conflicts
|
|
||||||
super().__init__(f"Dependency conflicts detected: {len(conflicts)} conflict(s)")
|
|
||||||
|
|
||||||
|
|
||||||
class DependencyNotFoundError(DependencyError):
|
|
||||||
"""Raised when a dependency cannot be resolved."""
|
|
||||||
def __init__(self, project: str, package: str, constraint: str):
|
|
||||||
self.project = project
|
|
||||||
self.package = package
|
|
||||||
self.constraint = constraint
|
|
||||||
super().__init__(f"Dependency not found: {project}/{package}@{constraint}")
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidEnsureFileError(DependencyError):
|
|
||||||
"""Raised when the ensure file is invalid."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DependencyDepthExceededError(DependencyError):
|
|
||||||
"""Raised when dependency resolution exceeds max depth."""
|
|
||||||
def __init__(self, max_depth: int):
|
|
||||||
self.max_depth = max_depth
|
|
||||||
super().__init__(f"Dependency resolution exceeded maximum depth of {max_depth}")
|
|
||||||
|
|
||||||
|
|
||||||
# Safety limits to prevent DoS attacks
|
|
||||||
MAX_DEPENDENCY_DEPTH = 50 # Maximum levels of nested dependencies
|
|
||||||
MAX_DEPENDENCIES_PER_ARTIFACT = 200 # Maximum direct dependencies per artifact
|
|
||||||
|
|
||||||
|
|
||||||
def parse_ensure_file(content: bytes) -> EnsureFileContent:
|
|
||||||
"""
|
|
||||||
Parse an orchard.ensure file.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
content: Raw bytes of the ensure file
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Parsed EnsureFileContent
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
InvalidEnsureFileError: If the file is invalid YAML or has wrong structure
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
data = yaml.safe_load(content.decode('utf-8'))
|
|
||||||
except yaml.YAMLError as e:
|
|
||||||
raise InvalidEnsureFileError(f"Invalid YAML: {e}")
|
|
||||||
except UnicodeDecodeError as e:
|
|
||||||
raise InvalidEnsureFileError(f"Invalid encoding: {e}")
|
|
||||||
|
|
||||||
if data is None:
|
|
||||||
return EnsureFileContent(dependencies=[])
|
|
||||||
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
raise InvalidEnsureFileError("Ensure file must be a YAML dictionary")
|
|
||||||
|
|
||||||
dependencies = []
|
|
||||||
deps_data = data.get('dependencies', [])
|
|
||||||
|
|
||||||
if not isinstance(deps_data, list):
|
|
||||||
raise InvalidEnsureFileError("'dependencies' must be a list")
|
|
||||||
|
|
||||||
# Safety limit: prevent DoS through excessive dependencies
|
|
||||||
if len(deps_data) > MAX_DEPENDENCIES_PER_ARTIFACT:
|
|
||||||
raise InvalidEnsureFileError(
|
|
||||||
f"Too many dependencies: {len(deps_data)} exceeds maximum of {MAX_DEPENDENCIES_PER_ARTIFACT}"
|
|
||||||
)
|
|
||||||
|
|
||||||
for i, dep in enumerate(deps_data):
|
|
||||||
if not isinstance(dep, dict):
|
|
||||||
raise InvalidEnsureFileError(f"Dependency {i} must be a dictionary")
|
|
||||||
|
|
||||||
project = dep.get('project')
|
|
||||||
package = dep.get('package')
|
|
||||||
version = dep.get('version')
|
|
||||||
tag = dep.get('tag')
|
|
||||||
|
|
||||||
if not project:
|
|
||||||
raise InvalidEnsureFileError(f"Dependency {i} missing 'project'")
|
|
||||||
if not package:
|
|
||||||
raise InvalidEnsureFileError(f"Dependency {i} missing 'package'")
|
|
||||||
if not version and not tag:
|
|
||||||
raise InvalidEnsureFileError(
|
|
||||||
f"Dependency {i} must have either 'version' or 'tag'"
|
|
||||||
)
|
|
||||||
if version and tag:
|
|
||||||
raise InvalidEnsureFileError(
|
|
||||||
f"Dependency {i} cannot have both 'version' and 'tag'"
|
|
||||||
)
|
|
||||||
|
|
||||||
dependencies.append(EnsureFileDependency(
|
|
||||||
project=project,
|
|
||||||
package=package,
|
|
||||||
version=version,
|
|
||||||
tag=tag,
|
|
||||||
))
|
|
||||||
|
|
||||||
return EnsureFileContent(dependencies=dependencies)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_dependencies(
|
|
||||||
db: Session,
|
|
||||||
dependencies: List[EnsureFileDependency],
|
|
||||||
) -> List[str]:
|
|
||||||
"""
|
|
||||||
Validate that all dependency projects exist.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
dependencies: List of dependencies to validate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of error messages (empty if all valid)
|
|
||||||
"""
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
for dep in dependencies:
|
|
||||||
project = db.query(Project).filter(Project.name == dep.project).first()
|
|
||||||
if not project:
|
|
||||||
errors.append(f"Project '{dep.project}' not found")
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|
||||||
|
|
||||||
def store_dependencies(
|
|
||||||
db: Session,
|
|
||||||
artifact_id: str,
|
|
||||||
dependencies: List[EnsureFileDependency],
|
|
||||||
) -> List[ArtifactDependency]:
|
|
||||||
"""
|
|
||||||
Store dependencies for an artifact.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
artifact_id: The artifact ID that has these dependencies
|
|
||||||
dependencies: List of dependencies to store
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of created ArtifactDependency objects
|
|
||||||
"""
|
|
||||||
created = []
|
|
||||||
|
|
||||||
for dep in dependencies:
|
|
||||||
artifact_dep = ArtifactDependency(
|
|
||||||
artifact_id=artifact_id,
|
|
||||||
dependency_project=dep.project,
|
|
||||||
dependency_package=dep.package,
|
|
||||||
version_constraint=dep.version,
|
|
||||||
tag_constraint=dep.tag,
|
|
||||||
)
|
|
||||||
db.add(artifact_dep)
|
|
||||||
created.append(artifact_dep)
|
|
||||||
|
|
||||||
return created
|
|
||||||
|
|
||||||
|
|
||||||
def get_artifact_dependencies(
|
|
||||||
db: Session,
|
|
||||||
artifact_id: str,
|
|
||||||
) -> List[DependencyResponse]:
|
|
||||||
"""
|
|
||||||
Get all dependencies for an artifact.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
artifact_id: The artifact ID
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of DependencyResponse objects
|
|
||||||
"""
|
|
||||||
deps = db.query(ArtifactDependency).filter(
|
|
||||||
ArtifactDependency.artifact_id == artifact_id
|
|
||||||
).all()
|
|
||||||
|
|
||||||
return [DependencyResponse.from_orm_model(dep) for dep in deps]
|
|
||||||
|
|
||||||
|
|
||||||
def get_reverse_dependencies(
|
|
||||||
db: Session,
|
|
||||||
project_name: str,
|
|
||||||
package_name: str,
|
|
||||||
page: int = 1,
|
|
||||||
limit: int = 50,
|
|
||||||
) -> ReverseDependenciesResponse:
|
|
||||||
"""
|
|
||||||
Get all artifacts that depend on a given package.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
project_name: Target project name
|
|
||||||
package_name: Target package name
|
|
||||||
page: Page number (1-indexed)
|
|
||||||
limit: Results per page
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
ReverseDependenciesResponse with dependents and pagination
|
|
||||||
"""
|
|
||||||
# Query dependencies that point to this project/package
|
|
||||||
query = db.query(ArtifactDependency).filter(
|
|
||||||
ArtifactDependency.dependency_project == project_name,
|
|
||||||
ArtifactDependency.dependency_package == package_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
total = query.count()
|
|
||||||
offset = (page - 1) * limit
|
|
||||||
deps = query.offset(offset).limit(limit).all()
|
|
||||||
|
|
||||||
dependents = []
|
|
||||||
for dep in deps:
|
|
||||||
# Get artifact info to find the project/package/version
|
|
||||||
artifact = db.query(Artifact).filter(Artifact.id == dep.artifact_id).first()
|
|
||||||
if not artifact:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Find which package this artifact belongs to via tags or versions
|
|
||||||
tag = db.query(Tag).filter(Tag.artifact_id == dep.artifact_id).first()
|
|
||||||
if tag:
|
|
||||||
pkg = db.query(Package).filter(Package.id == tag.package_id).first()
|
|
||||||
if pkg:
|
|
||||||
proj = db.query(Project).filter(Project.id == pkg.project_id).first()
|
|
||||||
if proj:
|
|
||||||
# Get version if available
|
|
||||||
version_record = db.query(PackageVersion).filter(
|
|
||||||
PackageVersion.artifact_id == dep.artifact_id,
|
|
||||||
PackageVersion.package_id == pkg.id,
|
|
||||||
).first()
|
|
||||||
|
|
||||||
dependents.append(DependentInfo(
|
|
||||||
artifact_id=dep.artifact_id,
|
|
||||||
project=proj.name,
|
|
||||||
package=pkg.name,
|
|
||||||
version=version_record.version if version_record else None,
|
|
||||||
constraint_type="version" if dep.version_constraint else "tag",
|
|
||||||
constraint_value=dep.version_constraint or dep.tag_constraint,
|
|
||||||
))
|
|
||||||
|
|
||||||
total_pages = (total + limit - 1) // limit
|
|
||||||
|
|
||||||
return ReverseDependenciesResponse(
|
|
||||||
project=project_name,
|
|
||||||
package=package_name,
|
|
||||||
dependents=dependents,
|
|
||||||
pagination=PaginationMeta(
|
|
||||||
page=page,
|
|
||||||
limit=limit,
|
|
||||||
total=total,
|
|
||||||
total_pages=total_pages,
|
|
||||||
has_more=page < total_pages,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _resolve_dependency_to_artifact(
|
|
||||||
db: Session,
|
|
||||||
project_name: str,
|
|
||||||
package_name: str,
|
|
||||||
version: Optional[str],
|
|
||||||
tag: Optional[str],
|
|
||||||
) -> Optional[Tuple[str, str, int]]:
|
|
||||||
"""
|
|
||||||
Resolve a dependency constraint to an artifact ID.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
project_name: Project name
|
|
||||||
package_name: Package name
|
|
||||||
version: Version constraint (exact)
|
|
||||||
tag: Tag constraint
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (artifact_id, resolved_version_or_tag, size) or None if not found
|
|
||||||
"""
|
|
||||||
# Get project and package
|
|
||||||
project = db.query(Project).filter(Project.name == project_name).first()
|
|
||||||
if not project:
|
|
||||||
return None
|
|
||||||
|
|
||||||
package = db.query(Package).filter(
|
|
||||||
Package.project_id == project.id,
|
|
||||||
Package.name == package_name,
|
|
||||||
).first()
|
|
||||||
if not package:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if version:
|
|
||||||
# Look up by version
|
|
||||||
pkg_version = db.query(PackageVersion).filter(
|
|
||||||
PackageVersion.package_id == package.id,
|
|
||||||
PackageVersion.version == version,
|
|
||||||
).first()
|
|
||||||
if pkg_version:
|
|
||||||
artifact = db.query(Artifact).filter(
|
|
||||||
Artifact.id == pkg_version.artifact_id
|
|
||||||
).first()
|
|
||||||
if artifact:
|
|
||||||
return (artifact.id, version, artifact.size)
|
|
||||||
|
|
||||||
# Also check if there's a tag with this exact name
|
|
||||||
tag_record = db.query(Tag).filter(
|
|
||||||
Tag.package_id == package.id,
|
|
||||||
Tag.name == version,
|
|
||||||
).first()
|
|
||||||
if tag_record:
|
|
||||||
artifact = db.query(Artifact).filter(
|
|
||||||
Artifact.id == tag_record.artifact_id
|
|
||||||
).first()
|
|
||||||
if artifact:
|
|
||||||
return (artifact.id, version, artifact.size)
|
|
||||||
|
|
||||||
if tag:
|
|
||||||
# Look up by tag
|
|
||||||
tag_record = db.query(Tag).filter(
|
|
||||||
Tag.package_id == package.id,
|
|
||||||
Tag.name == tag,
|
|
||||||
).first()
|
|
||||||
if tag_record:
|
|
||||||
artifact = db.query(Artifact).filter(
|
|
||||||
Artifact.id == tag_record.artifact_id
|
|
||||||
).first()
|
|
||||||
if artifact:
|
|
||||||
return (artifact.id, tag, artifact.size)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _detect_package_cycle(
|
|
||||||
db: Session,
|
|
||||||
project_name: str,
|
|
||||||
package_name: str,
|
|
||||||
target_project: str,
|
|
||||||
target_package: str,
|
|
||||||
visiting: Set[str],
|
|
||||||
visited: Set[str],
|
|
||||||
path: List[str],
|
|
||||||
) -> Optional[List[str]]:
|
|
||||||
"""
|
|
||||||
Detect cycles at the package level using DFS.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
project_name: Current project being visited
|
|
||||||
package_name: Current package being visited
|
|
||||||
target_project: The project we're checking for cycles back to
|
|
||||||
target_package: The package we're checking for cycles back to
|
|
||||||
visiting: Set of package keys currently in the recursion stack
|
|
||||||
visited: Set of fully processed package keys
|
|
||||||
path: Current path for cycle reporting
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Cycle path if detected, None otherwise
|
|
||||||
"""
|
|
||||||
pkg_key = f"{project_name}/{package_name}"
|
|
||||||
|
|
||||||
# Check if we've reached the target package (cycle detected)
|
|
||||||
if project_name == target_project and package_name == target_package:
|
|
||||||
return path + [pkg_key]
|
|
||||||
|
|
||||||
if pkg_key in visiting:
|
|
||||||
# Unexpected internal cycle
|
|
||||||
return None
|
|
||||||
|
|
||||||
if pkg_key in visited:
|
|
||||||
return None
|
|
||||||
|
|
||||||
visiting.add(pkg_key)
|
|
||||||
path.append(pkg_key)
|
|
||||||
|
|
||||||
# Get the package and find any artifacts with dependencies
|
|
||||||
project = db.query(Project).filter(Project.name == project_name).first()
|
|
||||||
if project:
|
|
||||||
package = db.query(Package).filter(
|
|
||||||
Package.project_id == project.id,
|
|
||||||
Package.name == package_name,
|
|
||||||
).first()
|
|
||||||
if package:
|
|
||||||
# Find all artifacts in this package via tags
|
|
||||||
tags = db.query(Tag).filter(Tag.package_id == package.id).all()
|
|
||||||
artifact_ids = {t.artifact_id for t in tags}
|
|
||||||
|
|
||||||
# Get dependencies from all artifacts in this package
|
|
||||||
for artifact_id in artifact_ids:
|
|
||||||
deps = db.query(ArtifactDependency).filter(
|
|
||||||
ArtifactDependency.artifact_id == artifact_id
|
|
||||||
).all()
|
|
||||||
|
|
||||||
for dep in deps:
|
|
||||||
cycle = _detect_package_cycle(
|
|
||||||
db,
|
|
||||||
dep.dependency_project,
|
|
||||||
dep.dependency_package,
|
|
||||||
target_project,
|
|
||||||
target_package,
|
|
||||||
visiting,
|
|
||||||
visited,
|
|
||||||
path,
|
|
||||||
)
|
|
||||||
if cycle:
|
|
||||||
return cycle
|
|
||||||
|
|
||||||
path.pop()
|
|
||||||
visiting.remove(pkg_key)
|
|
||||||
visited.add(pkg_key)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def check_circular_dependencies(
|
|
||||||
db: Session,
|
|
||||||
artifact_id: str,
|
|
||||||
new_dependencies: List[EnsureFileDependency],
|
|
||||||
project_name: Optional[str] = None,
|
|
||||||
package_name: Optional[str] = None,
|
|
||||||
) -> Optional[List[str]]:
|
|
||||||
"""
|
|
||||||
Check if adding the new dependencies would create a circular dependency.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
artifact_id: The artifact that will have these dependencies
|
|
||||||
new_dependencies: Dependencies to be added
|
|
||||||
project_name: Project name (optional, will try to look up from tag if not provided)
|
|
||||||
package_name: Package name (optional, will try to look up from tag if not provided)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Cycle path if detected, None otherwise
|
|
||||||
"""
|
|
||||||
# First, get the package info for this artifact to build path labels
|
|
||||||
if project_name and package_name:
|
|
||||||
current_path = f"{project_name}/{package_name}"
|
|
||||||
else:
|
|
||||||
# Try to look up from tag
|
|
||||||
artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first()
|
|
||||||
if not artifact:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Find package for this artifact
|
|
||||||
tag = db.query(Tag).filter(Tag.artifact_id == artifact_id).first()
|
|
||||||
if not tag:
|
|
||||||
return None
|
|
||||||
|
|
||||||
package = db.query(Package).filter(Package.id == tag.package_id).first()
|
|
||||||
if not package:
|
|
||||||
return None
|
|
||||||
|
|
||||||
project = db.query(Project).filter(Project.id == package.project_id).first()
|
|
||||||
if not project:
|
|
||||||
return None
|
|
||||||
|
|
||||||
current_path = f"{project.name}/{package.name}"
|
|
||||||
|
|
||||||
# Extract target project and package from current_path
|
|
||||||
if "/" in current_path:
|
|
||||||
target_project, target_package = current_path.split("/", 1)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# For each new dependency, check if it would create a cycle back to our package
|
|
||||||
for dep in new_dependencies:
|
|
||||||
# Check if this dependency (transitively) depends on us at the package level
|
|
||||||
visiting: Set[str] = set()
|
|
||||||
visited: Set[str] = set()
|
|
||||||
path: List[str] = [current_path]
|
|
||||||
|
|
||||||
# Check from the dependency's package
|
|
||||||
cycle = _detect_package_cycle(
|
|
||||||
db,
|
|
||||||
dep.project,
|
|
||||||
dep.package,
|
|
||||||
target_project,
|
|
||||||
target_package,
|
|
||||||
visiting,
|
|
||||||
visited,
|
|
||||||
path,
|
|
||||||
)
|
|
||||||
if cycle:
|
|
||||||
return cycle
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def resolve_dependencies(
|
|
||||||
db: Session,
|
|
||||||
project_name: str,
|
|
||||||
package_name: str,
|
|
||||||
ref: str,
|
|
||||||
base_url: str,
|
|
||||||
) -> DependencyResolutionResponse:
|
|
||||||
"""
|
|
||||||
Resolve all dependencies for an artifact recursively.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
project_name: Project name
|
|
||||||
package_name: Package name
|
|
||||||
ref: Tag or version reference
|
|
||||||
base_url: Base URL for download URLs
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
DependencyResolutionResponse with all resolved artifacts
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
DependencyNotFoundError: If a dependency cannot be resolved
|
|
||||||
CircularDependencyError: If circular dependencies are detected
|
|
||||||
DependencyConflictError: If conflicting versions are required
|
|
||||||
"""
|
|
||||||
# Resolve the initial artifact
|
|
||||||
project = db.query(Project).filter(Project.name == project_name).first()
|
|
||||||
if not project:
|
|
||||||
raise DependencyNotFoundError(project_name, package_name, ref)
|
|
||||||
|
|
||||||
package = db.query(Package).filter(
|
|
||||||
Package.project_id == project.id,
|
|
||||||
Package.name == package_name,
|
|
||||||
).first()
|
|
||||||
if not package:
|
|
||||||
raise DependencyNotFoundError(project_name, package_name, ref)
|
|
||||||
|
|
||||||
# Try to find artifact by tag or version
|
|
||||||
resolved = _resolve_dependency_to_artifact(
|
|
||||||
db, project_name, package_name, ref, ref
|
|
||||||
)
|
|
||||||
if not resolved:
|
|
||||||
raise DependencyNotFoundError(project_name, package_name, ref)
|
|
||||||
|
|
||||||
root_artifact_id, root_version, root_size = resolved
|
|
||||||
|
|
||||||
# Track resolved artifacts and their versions
|
|
||||||
resolved_artifacts: Dict[str, ResolvedArtifact] = {}
|
|
||||||
# Track version requirements for conflict detection
|
|
||||||
version_requirements: Dict[str, List[Dict[str, Any]]] = {} # pkg_key -> [(version, required_by)]
|
|
||||||
# Track visiting/visited for cycle detection
|
|
||||||
visiting: Set[str] = set()
|
|
||||||
visited: Set[str] = set()
|
|
||||||
# Resolution order (topological)
|
|
||||||
resolution_order: List[str] = []
|
|
||||||
|
|
||||||
def _resolve_recursive(
|
|
||||||
artifact_id: str,
|
|
||||||
proj_name: str,
|
|
||||||
pkg_name: str,
|
|
||||||
version_or_tag: str,
|
|
||||||
size: int,
|
|
||||||
required_by: Optional[str],
|
|
||||||
depth: int = 0,
|
|
||||||
):
|
|
||||||
"""Recursively resolve dependencies with cycle/conflict detection."""
|
|
||||||
# Safety limit: prevent DoS through deeply nested dependencies
|
|
||||||
if depth > MAX_DEPENDENCY_DEPTH:
|
|
||||||
raise DependencyDepthExceededError(MAX_DEPENDENCY_DEPTH)
|
|
||||||
|
|
||||||
pkg_key = f"{proj_name}/{pkg_name}"
|
|
||||||
|
|
||||||
# Cycle detection (at artifact level)
|
|
||||||
if artifact_id in visiting:
|
|
||||||
# Build cycle path
|
|
||||||
raise CircularDependencyError([pkg_key, pkg_key])
|
|
||||||
|
|
||||||
# Conflict detection - check if we've seen this package before with a different version
|
|
||||||
if pkg_key in version_requirements:
|
|
||||||
existing_versions = {r["version"] for r in version_requirements[pkg_key]}
|
|
||||||
if version_or_tag not in existing_versions:
|
|
||||||
# Conflict detected - same package, different version
|
|
||||||
requirements = version_requirements[pkg_key] + [
|
|
||||||
{"version": version_or_tag, "required_by": required_by}
|
|
||||||
]
|
|
||||||
raise DependencyConflictError([
|
|
||||||
DependencyConflict(
|
|
||||||
project=proj_name,
|
|
||||||
package=pkg_name,
|
|
||||||
requirements=[
|
|
||||||
{
|
|
||||||
"version": r["version"],
|
|
||||||
"required_by": [{"path": r["required_by"]}] if r["required_by"] else []
|
|
||||||
}
|
|
||||||
for r in requirements
|
|
||||||
],
|
|
||||||
)
|
|
||||||
])
|
|
||||||
# Same version already resolved - skip
|
|
||||||
if artifact_id in visited:
|
|
||||||
return
|
|
||||||
|
|
||||||
if artifact_id in visited:
|
|
||||||
return
|
|
||||||
|
|
||||||
visiting.add(artifact_id)
|
|
||||||
|
|
||||||
# Track version requirement
|
|
||||||
if pkg_key not in version_requirements:
|
|
||||||
version_requirements[pkg_key] = []
|
|
||||||
version_requirements[pkg_key].append({
|
|
||||||
"version": version_or_tag,
|
|
||||||
"required_by": required_by,
|
|
||||||
})
|
|
||||||
|
|
||||||
# Get dependencies
|
|
||||||
deps = db.query(ArtifactDependency).filter(
|
|
||||||
ArtifactDependency.artifact_id == artifact_id
|
|
||||||
).all()
|
|
||||||
|
|
||||||
# Resolve each dependency first (depth-first)
|
|
||||||
for dep in deps:
|
|
||||||
resolved_dep = _resolve_dependency_to_artifact(
|
|
||||||
db,
|
|
||||||
dep.dependency_project,
|
|
||||||
dep.dependency_package,
|
|
||||||
dep.version_constraint,
|
|
||||||
dep.tag_constraint,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not resolved_dep:
|
|
||||||
constraint = dep.version_constraint or dep.tag_constraint
|
|
||||||
raise DependencyNotFoundError(
|
|
||||||
dep.dependency_project,
|
|
||||||
dep.dependency_package,
|
|
||||||
constraint,
|
|
||||||
)
|
|
||||||
|
|
||||||
dep_artifact_id, dep_version, dep_size = resolved_dep
|
|
||||||
_resolve_recursive(
|
|
||||||
dep_artifact_id,
|
|
||||||
dep.dependency_project,
|
|
||||||
dep.dependency_package,
|
|
||||||
dep_version,
|
|
||||||
dep_size,
|
|
||||||
pkg_key,
|
|
||||||
depth + 1,
|
|
||||||
)
|
|
||||||
|
|
||||||
visiting.remove(artifact_id)
|
|
||||||
visited.add(artifact_id)
|
|
||||||
|
|
||||||
# Add to resolution order (dependencies before dependents)
|
|
||||||
resolution_order.append(artifact_id)
|
|
||||||
|
|
||||||
# Store resolved artifact info
|
|
||||||
resolved_artifacts[artifact_id] = ResolvedArtifact(
|
|
||||||
artifact_id=artifact_id,
|
|
||||||
project=proj_name,
|
|
||||||
package=pkg_name,
|
|
||||||
version=version_or_tag,
|
|
||||||
size=size,
|
|
||||||
download_url=f"{base_url}/api/v1/project/{proj_name}/{pkg_name}/+/{version_or_tag}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Start resolution from root
|
|
||||||
_resolve_recursive(
|
|
||||||
root_artifact_id,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
root_version,
|
|
||||||
root_size,
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build response in topological order
|
|
||||||
resolved_list = [resolved_artifacts[aid] for aid in resolution_order]
|
|
||||||
total_size = sum(r.size for r in resolved_list)
|
|
||||||
|
|
||||||
return DependencyResolutionResponse(
|
|
||||||
requested={
|
|
||||||
"project": project_name,
|
|
||||||
"package": package_name,
|
|
||||||
"ref": ref,
|
|
||||||
},
|
|
||||||
resolved=resolved_list,
|
|
||||||
total_size=total_size,
|
|
||||||
artifact_count=len(resolved_list),
|
|
||||||
)
|
|
||||||
@@ -1,160 +0,0 @@
|
|||||||
"""
|
|
||||||
Encryption utilities for sensitive data storage.
|
|
||||||
|
|
||||||
Uses Fernet symmetric encryption for credentials like upstream passwords.
|
|
||||||
The encryption key is sourced from ORCHARD_CACHE_ENCRYPTION_KEY environment variable.
|
|
||||||
If not set, a random key is generated on startup (with a warning).
|
|
||||||
"""
|
|
||||||
|
|
||||||
import base64
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import secrets
|
|
||||||
from functools import lru_cache
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from cryptography.fernet import Fernet, InvalidToken
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Module-level storage for auto-generated key (only used if env var not set)
|
|
||||||
_generated_key: Optional[bytes] = None
|
|
||||||
|
|
||||||
|
|
||||||
def _get_key_from_env() -> Optional[bytes]:
|
|
||||||
"""Get encryption key from environment variable."""
|
|
||||||
key_str = os.environ.get("ORCHARD_CACHE_ENCRYPTION_KEY", "")
|
|
||||||
if not key_str:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Support both raw base64 and url-safe base64 formats
|
|
||||||
try:
|
|
||||||
# Try to decode as-is (Fernet keys are url-safe base64)
|
|
||||||
key_bytes = key_str.encode("utf-8")
|
|
||||||
# Validate it's a valid Fernet key by trying to create a Fernet instance
|
|
||||||
Fernet(key_bytes)
|
|
||||||
return key_bytes
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Try base64 decoding if it's a raw 32-byte key encoded as base64
|
|
||||||
try:
|
|
||||||
decoded = base64.urlsafe_b64decode(key_str)
|
|
||||||
if len(decoded) == 32:
|
|
||||||
# Re-encode as url-safe base64 for Fernet
|
|
||||||
key_bytes = base64.urlsafe_b64encode(decoded)
|
|
||||||
Fernet(key_bytes)
|
|
||||||
return key_bytes
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
logger.error(
|
|
||||||
"ORCHARD_CACHE_ENCRYPTION_KEY is set but invalid. "
|
|
||||||
"Must be a valid Fernet key (32 bytes, url-safe base64 encoded). "
|
|
||||||
"Generate one with: python -c \"from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())\""
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_encryption_key() -> bytes:
|
|
||||||
"""
|
|
||||||
Get the Fernet encryption key.
|
|
||||||
|
|
||||||
Returns the key from ORCHARD_CACHE_ENCRYPTION_KEY if set and valid,
|
|
||||||
otherwise generates a random key (with a warning logged).
|
|
||||||
|
|
||||||
The generated key is cached for the lifetime of the process.
|
|
||||||
"""
|
|
||||||
global _generated_key
|
|
||||||
|
|
||||||
# Try to get from environment
|
|
||||||
env_key = _get_key_from_env()
|
|
||||||
if env_key:
|
|
||||||
return env_key
|
|
||||||
|
|
||||||
# Generate a new key if needed
|
|
||||||
if _generated_key is None:
|
|
||||||
_generated_key = Fernet.generate_key()
|
|
||||||
logger.warning(
|
|
||||||
"ORCHARD_CACHE_ENCRYPTION_KEY not set - using auto-generated key. "
|
|
||||||
"Encrypted credentials will be lost on restart! "
|
|
||||||
"Set ORCHARD_CACHE_ENCRYPTION_KEY for persistent encryption. "
|
|
||||||
"Generate a key with: python -c \"from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())\""
|
|
||||||
)
|
|
||||||
|
|
||||||
return _generated_key
|
|
||||||
|
|
||||||
|
|
||||||
@lru_cache(maxsize=1)
|
|
||||||
def _get_fernet() -> Fernet:
|
|
||||||
"""Get a cached Fernet instance."""
|
|
||||||
return Fernet(get_encryption_key())
|
|
||||||
|
|
||||||
|
|
||||||
def encrypt_value(plaintext: str) -> bytes:
|
|
||||||
"""
|
|
||||||
Encrypt a string value using Fernet.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
plaintext: The string to encrypt
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Encrypted bytes (includes Fernet token with timestamp)
|
|
||||||
"""
|
|
||||||
if not plaintext:
|
|
||||||
raise ValueError("Cannot encrypt empty value")
|
|
||||||
|
|
||||||
fernet = _get_fernet()
|
|
||||||
return fernet.encrypt(plaintext.encode("utf-8"))
|
|
||||||
|
|
||||||
|
|
||||||
def decrypt_value(ciphertext: bytes) -> str:
|
|
||||||
"""
|
|
||||||
Decrypt a Fernet-encrypted value.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
ciphertext: The encrypted bytes
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decrypted string
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
InvalidToken: If decryption fails (wrong key or corrupted data)
|
|
||||||
"""
|
|
||||||
if not ciphertext:
|
|
||||||
raise ValueError("Cannot decrypt empty value")
|
|
||||||
|
|
||||||
fernet = _get_fernet()
|
|
||||||
return fernet.decrypt(ciphertext).decode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
def can_decrypt(ciphertext: bytes) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a value can be decrypted with the current key.
|
|
||||||
|
|
||||||
Useful for checking if credentials are still valid after key rotation.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
ciphertext: The encrypted bytes
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if decryption succeeds, False otherwise
|
|
||||||
"""
|
|
||||||
if not ciphertext:
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
decrypt_value(ciphertext)
|
|
||||||
return True
|
|
||||||
except (InvalidToken, ValueError):
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def generate_key() -> str:
|
|
||||||
"""
|
|
||||||
Generate a new Fernet encryption key.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A valid Fernet key as a string (url-safe base64 encoded)
|
|
||||||
"""
|
|
||||||
return Fernet.generate_key().decode("utf-8")
|
|
||||||
@@ -11,7 +11,6 @@ from slowapi.errors import RateLimitExceeded
|
|||||||
from .config import get_settings
|
from .config import get_settings
|
||||||
from .database import init_db, SessionLocal
|
from .database import init_db, SessionLocal
|
||||||
from .routes import router
|
from .routes import router
|
||||||
from .pypi_proxy import router as pypi_router
|
|
||||||
from .seed import seed_database
|
from .seed import seed_database
|
||||||
from .auth import create_default_admin
|
from .auth import create_default_admin
|
||||||
from .rate_limit import limiter
|
from .rate_limit import limiter
|
||||||
@@ -66,7 +65,6 @@ app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
|||||||
|
|
||||||
# Include API routes
|
# Include API routes
|
||||||
app.include_router(router)
|
app.include_router(router)
|
||||||
app.include_router(pypi_router)
|
|
||||||
|
|
||||||
# Serve static files (React build) if the directory exists
|
# Serve static files (React build) if the directory exists
|
||||||
static_dir = os.path.join(os.path.dirname(__file__), "..", "..", "frontend", "dist")
|
static_dir = os.path.join(os.path.dirname(__file__), "..", "..", "frontend", "dist")
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from sqlalchemy import (
|
|||||||
Index,
|
Index,
|
||||||
JSON,
|
JSON,
|
||||||
ARRAY,
|
ARRAY,
|
||||||
LargeBinary,
|
|
||||||
)
|
)
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
from sqlalchemy.orm import relationship, declarative_base
|
from sqlalchemy.orm import relationship, declarative_base
|
||||||
@@ -28,13 +27,11 @@ class Project(Base):
|
|||||||
name = Column(String(255), unique=True, nullable=False)
|
name = Column(String(255), unique=True, nullable=False)
|
||||||
description = Column(Text)
|
description = Column(Text)
|
||||||
is_public = Column(Boolean, default=True)
|
is_public = Column(Boolean, default=True)
|
||||||
is_system = Column(Boolean, default=False, nullable=False)
|
|
||||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
updated_at = Column(
|
updated_at = Column(
|
||||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
||||||
)
|
)
|
||||||
created_by = Column(String(255), nullable=False)
|
created_by = Column(String(255), nullable=False)
|
||||||
team_id = Column(UUID(as_uuid=True), ForeignKey("teams.id", ondelete="SET NULL"))
|
|
||||||
|
|
||||||
packages = relationship(
|
packages = relationship(
|
||||||
"Package", back_populates="project", cascade="all, delete-orphan"
|
"Package", back_populates="project", cascade="all, delete-orphan"
|
||||||
@@ -42,13 +39,10 @@ class Project(Base):
|
|||||||
permissions = relationship(
|
permissions = relationship(
|
||||||
"AccessPermission", back_populates="project", cascade="all, delete-orphan"
|
"AccessPermission", back_populates="project", cascade="all, delete-orphan"
|
||||||
)
|
)
|
||||||
team = relationship("Team", back_populates="projects")
|
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index("idx_projects_name", "name"),
|
Index("idx_projects_name", "name"),
|
||||||
Index("idx_projects_created_by", "created_by"),
|
Index("idx_projects_created_by", "created_by"),
|
||||||
Index("idx_projects_team_id", "team_id"),
|
|
||||||
Index("idx_projects_is_system", "is_system"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -123,9 +117,6 @@ class Artifact(Base):
|
|||||||
tags = relationship("Tag", back_populates="artifact")
|
tags = relationship("Tag", back_populates="artifact")
|
||||||
uploads = relationship("Upload", back_populates="artifact")
|
uploads = relationship("Upload", back_populates="artifact")
|
||||||
versions = relationship("PackageVersion", back_populates="artifact")
|
versions = relationship("PackageVersion", back_populates="artifact")
|
||||||
dependencies = relationship(
|
|
||||||
"ArtifactDependency", back_populates="artifact", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sha256(self) -> str:
|
def sha256(self) -> str:
|
||||||
@@ -375,9 +366,6 @@ class User(Base):
|
|||||||
sessions = relationship(
|
sessions = relationship(
|
||||||
"Session", back_populates="user", cascade="all, delete-orphan"
|
"Session", back_populates="user", cascade="all, delete-orphan"
|
||||||
)
|
)
|
||||||
team_memberships = relationship(
|
|
||||||
"TeamMembership", back_populates="user", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index("idx_users_username", "username"),
|
Index("idx_users_username", "username"),
|
||||||
@@ -519,287 +507,3 @@ class PackageHistory(Base):
|
|||||||
Index("idx_package_history_changed_at", "changed_at"),
|
Index("idx_package_history_changed_at", "changed_at"),
|
||||||
Index("idx_package_history_package_changed_at", "package_id", "changed_at"),
|
Index("idx_package_history_package_changed_at", "package_id", "changed_at"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ArtifactDependency(Base):
|
|
||||||
"""Dependency declared by an artifact on another package.
|
|
||||||
|
|
||||||
Each artifact can declare dependencies on other packages, specifying either
|
|
||||||
an exact version or a tag. This enables recursive dependency resolution.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "artifact_dependencies"
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
artifact_id = Column(
|
|
||||||
String(64),
|
|
||||||
ForeignKey("artifacts.id", ondelete="CASCADE"),
|
|
||||||
nullable=False,
|
|
||||||
)
|
|
||||||
dependency_project = Column(String(255), nullable=False)
|
|
||||||
dependency_package = Column(String(255), nullable=False)
|
|
||||||
version_constraint = Column(String(255), nullable=True)
|
|
||||||
tag_constraint = Column(String(255), nullable=True)
|
|
||||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
|
||||||
|
|
||||||
# Relationship to the artifact that declares this dependency
|
|
||||||
artifact = relationship("Artifact", back_populates="dependencies")
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
# Exactly one of version_constraint or tag_constraint must be set
|
|
||||||
CheckConstraint(
|
|
||||||
"(version_constraint IS NOT NULL AND tag_constraint IS NULL) OR "
|
|
||||||
"(version_constraint IS NULL AND tag_constraint IS NOT NULL)",
|
|
||||||
name="check_constraint_type",
|
|
||||||
),
|
|
||||||
# Each artifact can only depend on a specific project/package once
|
|
||||||
Index(
|
|
||||||
"idx_artifact_dependencies_artifact_id",
|
|
||||||
"artifact_id",
|
|
||||||
),
|
|
||||||
Index(
|
|
||||||
"idx_artifact_dependencies_target",
|
|
||||||
"dependency_project",
|
|
||||||
"dependency_package",
|
|
||||||
),
|
|
||||||
Index(
|
|
||||||
"idx_artifact_dependencies_unique",
|
|
||||||
"artifact_id",
|
|
||||||
"dependency_project",
|
|
||||||
"dependency_package",
|
|
||||||
unique=True,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Team(Base):
|
|
||||||
"""Team for organizing projects and users."""
|
|
||||||
|
|
||||||
__tablename__ = "teams"
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
name = Column(String(255), nullable=False)
|
|
||||||
slug = Column(String(255), unique=True, nullable=False)
|
|
||||||
description = Column(Text)
|
|
||||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
|
||||||
updated_at = Column(
|
|
||||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
|
||||||
)
|
|
||||||
created_by = Column(String(255), nullable=False)
|
|
||||||
settings = Column(JSON, default=dict)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
memberships = relationship(
|
|
||||||
"TeamMembership", back_populates="team", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
projects = relationship("Project", back_populates="team")
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index("idx_teams_slug", "slug"),
|
|
||||||
Index("idx_teams_created_by", "created_by"),
|
|
||||||
Index("idx_teams_created_at", "created_at"),
|
|
||||||
CheckConstraint(
|
|
||||||
"slug ~ '^[a-z0-9][a-z0-9-]*[a-z0-9]$' OR slug ~ '^[a-z0-9]$'",
|
|
||||||
name="check_team_slug_format",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TeamMembership(Base):
|
|
||||||
"""Maps users to teams with their roles."""
|
|
||||||
|
|
||||||
__tablename__ = "team_memberships"
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
team_id = Column(
|
|
||||||
UUID(as_uuid=True),
|
|
||||||
ForeignKey("teams.id", ondelete="CASCADE"),
|
|
||||||
nullable=False,
|
|
||||||
)
|
|
||||||
user_id = Column(
|
|
||||||
UUID(as_uuid=True),
|
|
||||||
ForeignKey("users.id", ondelete="CASCADE"),
|
|
||||||
nullable=False,
|
|
||||||
)
|
|
||||||
role = Column(String(20), nullable=False, default="member")
|
|
||||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
|
||||||
invited_by = Column(String(255))
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
team = relationship("Team", back_populates="memberships")
|
|
||||||
user = relationship("User", back_populates="team_memberships")
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index("idx_team_memberships_team_id", "team_id"),
|
|
||||||
Index("idx_team_memberships_user_id", "user_id"),
|
|
||||||
Index("idx_team_memberships_role", "role"),
|
|
||||||
Index("idx_team_memberships_team_role", "team_id", "role"),
|
|
||||||
Index("idx_team_memberships_unique", "team_id", "user_id", unique=True),
|
|
||||||
CheckConstraint(
|
|
||||||
"role IN ('owner', 'admin', 'member')",
|
|
||||||
name="check_team_role",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Upstream Caching Models
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
# Valid source types for upstream registries
|
|
||||||
SOURCE_TYPES = ["npm", "pypi", "maven", "docker", "helm", "nuget", "deb", "rpm", "generic"]
|
|
||||||
|
|
||||||
# Valid authentication types
|
|
||||||
AUTH_TYPES = ["none", "basic", "bearer", "api_key"]
|
|
||||||
|
|
||||||
|
|
||||||
class UpstreamSource(Base):
|
|
||||||
"""Configuration for an upstream artifact registry.
|
|
||||||
|
|
||||||
Stores connection details and authentication for upstream registries
|
|
||||||
like npm, PyPI, Maven Central, or private Artifactory instances.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "upstream_sources"
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
name = Column(String(255), unique=True, nullable=False)
|
|
||||||
source_type = Column(String(50), default="generic", nullable=False)
|
|
||||||
url = Column(String(2048), nullable=False)
|
|
||||||
enabled = Column(Boolean, default=False, nullable=False)
|
|
||||||
auth_type = Column(String(20), default="none", nullable=False)
|
|
||||||
username = Column(String(255))
|
|
||||||
password_encrypted = Column(LargeBinary)
|
|
||||||
headers_encrypted = Column(LargeBinary)
|
|
||||||
priority = Column(Integer, default=100, nullable=False)
|
|
||||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
|
||||||
updated_at = Column(
|
|
||||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
cached_urls = relationship("CachedUrl", back_populates="source")
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index("idx_upstream_sources_enabled", "enabled"),
|
|
||||||
Index("idx_upstream_sources_source_type", "source_type"),
|
|
||||||
Index("idx_upstream_sources_priority", "priority"),
|
|
||||||
CheckConstraint(
|
|
||||||
"source_type IN ('npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic')",
|
|
||||||
name="check_source_type",
|
|
||||||
),
|
|
||||||
CheckConstraint(
|
|
||||||
"auth_type IN ('none', 'basic', 'bearer', 'api_key')",
|
|
||||||
name="check_auth_type",
|
|
||||||
),
|
|
||||||
CheckConstraint("priority > 0", name="check_priority_positive"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def set_password(self, password: str) -> None:
|
|
||||||
"""Encrypt and store a password/token."""
|
|
||||||
from .encryption import encrypt_value
|
|
||||||
|
|
||||||
if password:
|
|
||||||
self.password_encrypted = encrypt_value(password)
|
|
||||||
else:
|
|
||||||
self.password_encrypted = None
|
|
||||||
|
|
||||||
def get_password(self) -> str | None:
|
|
||||||
"""Decrypt and return the stored password/token."""
|
|
||||||
from .encryption import decrypt_value
|
|
||||||
|
|
||||||
if self.password_encrypted:
|
|
||||||
try:
|
|
||||||
return decrypt_value(self.password_encrypted)
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
return None
|
|
||||||
|
|
||||||
def has_password(self) -> bool:
|
|
||||||
"""Check if a password/token is stored."""
|
|
||||||
return self.password_encrypted is not None
|
|
||||||
|
|
||||||
def set_headers(self, headers: dict) -> None:
|
|
||||||
"""Encrypt and store custom headers as JSON."""
|
|
||||||
from .encryption import encrypt_value
|
|
||||||
import json
|
|
||||||
|
|
||||||
if headers:
|
|
||||||
self.headers_encrypted = encrypt_value(json.dumps(headers))
|
|
||||||
else:
|
|
||||||
self.headers_encrypted = None
|
|
||||||
|
|
||||||
def get_headers(self) -> dict | None:
|
|
||||||
"""Decrypt and return custom headers."""
|
|
||||||
from .encryption import decrypt_value
|
|
||||||
import json
|
|
||||||
|
|
||||||
if self.headers_encrypted:
|
|
||||||
try:
|
|
||||||
return json.loads(decrypt_value(self.headers_encrypted))
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class CacheSettings(Base):
|
|
||||||
"""Global cache settings (singleton table).
|
|
||||||
|
|
||||||
Controls behavior of the upstream caching system.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "cache_settings"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, default=1)
|
|
||||||
auto_create_system_projects = Column(Boolean, default=True, nullable=False)
|
|
||||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
|
||||||
updated_at = Column(
|
|
||||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
|
||||||
)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
CheckConstraint("id = 1", name="check_cache_settings_singleton"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class CachedUrl(Base):
|
|
||||||
"""Tracks URL to artifact mappings for provenance.
|
|
||||||
|
|
||||||
Records which URLs have been cached and maps them to their stored artifacts.
|
|
||||||
Enables "is this URL already cached?" lookups and audit trails.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "cached_urls"
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
url = Column(String(4096), nullable=False)
|
|
||||||
url_hash = Column(String(64), unique=True, nullable=False)
|
|
||||||
artifact_id = Column(
|
|
||||||
String(64), ForeignKey("artifacts.id"), nullable=False
|
|
||||||
)
|
|
||||||
source_id = Column(
|
|
||||||
UUID(as_uuid=True),
|
|
||||||
ForeignKey("upstream_sources.id", ondelete="SET NULL"),
|
|
||||||
)
|
|
||||||
fetched_at = Column(DateTime(timezone=True), default=datetime.utcnow, nullable=False)
|
|
||||||
response_headers = Column(JSON, default=dict)
|
|
||||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
artifact = relationship("Artifact")
|
|
||||||
source = relationship("UpstreamSource", back_populates="cached_urls")
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index("idx_cached_urls_url_hash", "url_hash"),
|
|
||||||
Index("idx_cached_urls_artifact_id", "artifact_id"),
|
|
||||||
Index("idx_cached_urls_source_id", "source_id"),
|
|
||||||
Index("idx_cached_urls_fetched_at", "fetched_at"),
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def compute_url_hash(url: str) -> str:
|
|
||||||
"""Compute SHA256 hash of a URL for fast lookups."""
|
|
||||||
import hashlib
|
|
||||||
return hashlib.sha256(url.encode("utf-8")).hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,212 +0,0 @@
|
|||||||
"""
|
|
||||||
Purge seed/demo data from the database.
|
|
||||||
|
|
||||||
This is used when transitioning an environment from dev/test to production-like.
|
|
||||||
Triggered by setting ORCHARD_PURGE_SEED_DATA=true environment variable.
|
|
||||||
"""
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from .models import (
|
|
||||||
Project,
|
|
||||||
Package,
|
|
||||||
Artifact,
|
|
||||||
Tag,
|
|
||||||
Upload,
|
|
||||||
PackageVersion,
|
|
||||||
ArtifactDependency,
|
|
||||||
Team,
|
|
||||||
TeamMembership,
|
|
||||||
User,
|
|
||||||
AccessPermission,
|
|
||||||
)
|
|
||||||
from .storage import get_storage
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Seed data identifiers (from seed.py)
|
|
||||||
SEED_PROJECT_NAMES = [
|
|
||||||
"frontend-libs",
|
|
||||||
"backend-services",
|
|
||||||
"mobile-apps",
|
|
||||||
"internal-tools",
|
|
||||||
]
|
|
||||||
|
|
||||||
SEED_TEAM_SLUG = "demo-team"
|
|
||||||
|
|
||||||
SEED_USERNAMES = [
|
|
||||||
"alice",
|
|
||||||
"bob",
|
|
||||||
"charlie",
|
|
||||||
"diana",
|
|
||||||
"eve",
|
|
||||||
"frank",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def should_purge_seed_data() -> bool:
|
|
||||||
"""Check if seed data should be purged based on environment variable."""
|
|
||||||
return os.environ.get("ORCHARD_PURGE_SEED_DATA", "").lower() == "true"
|
|
||||||
|
|
||||||
|
|
||||||
def purge_seed_data(db: Session) -> dict:
|
|
||||||
"""
|
|
||||||
Purge all seed/demo data from the database.
|
|
||||||
|
|
||||||
Returns a dict with counts of deleted items.
|
|
||||||
"""
|
|
||||||
logger.warning("PURGING SEED DATA - This will delete demo projects, users, and teams")
|
|
||||||
|
|
||||||
results = {
|
|
||||||
"dependencies_deleted": 0,
|
|
||||||
"tags_deleted": 0,
|
|
||||||
"versions_deleted": 0,
|
|
||||||
"uploads_deleted": 0,
|
|
||||||
"artifacts_deleted": 0,
|
|
||||||
"packages_deleted": 0,
|
|
||||||
"projects_deleted": 0,
|
|
||||||
"permissions_deleted": 0,
|
|
||||||
"team_memberships_deleted": 0,
|
|
||||||
"users_deleted": 0,
|
|
||||||
"teams_deleted": 0,
|
|
||||||
"s3_objects_deleted": 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
storage = get_storage()
|
|
||||||
|
|
||||||
# Find seed projects
|
|
||||||
seed_projects = db.query(Project).filter(Project.name.in_(SEED_PROJECT_NAMES)).all()
|
|
||||||
seed_project_ids = [p.id for p in seed_projects]
|
|
||||||
|
|
||||||
if not seed_projects:
|
|
||||||
logger.info("No seed projects found, nothing to purge")
|
|
||||||
return results
|
|
||||||
|
|
||||||
logger.info(f"Found {len(seed_projects)} seed projects to purge")
|
|
||||||
|
|
||||||
# Find packages in seed projects
|
|
||||||
seed_packages = db.query(Package).filter(Package.project_id.in_(seed_project_ids)).all()
|
|
||||||
seed_package_ids = [p.id for p in seed_packages]
|
|
||||||
|
|
||||||
# Find artifacts in seed packages (via uploads)
|
|
||||||
seed_uploads = db.query(Upload).filter(Upload.package_id.in_(seed_package_ids)).all()
|
|
||||||
seed_artifact_ids = list(set(u.artifact_id for u in seed_uploads))
|
|
||||||
|
|
||||||
# Delete in order (respecting foreign keys)
|
|
||||||
|
|
||||||
# 1. Delete artifact dependencies
|
|
||||||
if seed_artifact_ids:
|
|
||||||
count = db.query(ArtifactDependency).filter(
|
|
||||||
ArtifactDependency.artifact_id.in_(seed_artifact_ids)
|
|
||||||
).delete(synchronize_session=False)
|
|
||||||
results["dependencies_deleted"] = count
|
|
||||||
logger.info(f"Deleted {count} artifact dependencies")
|
|
||||||
|
|
||||||
# 2. Delete tags
|
|
||||||
if seed_package_ids:
|
|
||||||
count = db.query(Tag).filter(Tag.package_id.in_(seed_package_ids)).delete(
|
|
||||||
synchronize_session=False
|
|
||||||
)
|
|
||||||
results["tags_deleted"] = count
|
|
||||||
logger.info(f"Deleted {count} tags")
|
|
||||||
|
|
||||||
# 3. Delete package versions
|
|
||||||
if seed_package_ids:
|
|
||||||
count = db.query(PackageVersion).filter(
|
|
||||||
PackageVersion.package_id.in_(seed_package_ids)
|
|
||||||
).delete(synchronize_session=False)
|
|
||||||
results["versions_deleted"] = count
|
|
||||||
logger.info(f"Deleted {count} package versions")
|
|
||||||
|
|
||||||
# 4. Delete uploads
|
|
||||||
if seed_package_ids:
|
|
||||||
count = db.query(Upload).filter(Upload.package_id.in_(seed_package_ids)).delete(
|
|
||||||
synchronize_session=False
|
|
||||||
)
|
|
||||||
results["uploads_deleted"] = count
|
|
||||||
logger.info(f"Deleted {count} uploads")
|
|
||||||
|
|
||||||
# 5. Delete S3 objects for seed artifacts
|
|
||||||
if seed_artifact_ids:
|
|
||||||
seed_artifacts = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).all()
|
|
||||||
for artifact in seed_artifacts:
|
|
||||||
if artifact.s3_key:
|
|
||||||
try:
|
|
||||||
storage.client.delete_object(Bucket=storage.bucket, Key=artifact.s3_key)
|
|
||||||
results["s3_objects_deleted"] += 1
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Failed to delete S3 object {artifact.s3_key}: {e}")
|
|
||||||
logger.info(f"Deleted {results['s3_objects_deleted']} S3 objects")
|
|
||||||
|
|
||||||
# 6. Delete artifacts (only those with ref_count that would be 0 after our deletions)
|
|
||||||
# Since we deleted all tags/versions pointing to these artifacts, we can delete them
|
|
||||||
if seed_artifact_ids:
|
|
||||||
count = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).delete(
|
|
||||||
synchronize_session=False
|
|
||||||
)
|
|
||||||
results["artifacts_deleted"] = count
|
|
||||||
logger.info(f"Deleted {count} artifacts")
|
|
||||||
|
|
||||||
# 7. Delete packages
|
|
||||||
if seed_package_ids:
|
|
||||||
count = db.query(Package).filter(Package.id.in_(seed_package_ids)).delete(
|
|
||||||
synchronize_session=False
|
|
||||||
)
|
|
||||||
results["packages_deleted"] = count
|
|
||||||
logger.info(f"Deleted {count} packages")
|
|
||||||
|
|
||||||
# 8. Delete access permissions for seed projects
|
|
||||||
if seed_project_ids:
|
|
||||||
count = db.query(AccessPermission).filter(
|
|
||||||
AccessPermission.project_id.in_(seed_project_ids)
|
|
||||||
).delete(synchronize_session=False)
|
|
||||||
results["permissions_deleted"] = count
|
|
||||||
logger.info(f"Deleted {count} access permissions")
|
|
||||||
|
|
||||||
# 9. Delete seed projects
|
|
||||||
count = db.query(Project).filter(Project.name.in_(SEED_PROJECT_NAMES)).delete(
|
|
||||||
synchronize_session=False
|
|
||||||
)
|
|
||||||
results["projects_deleted"] = count
|
|
||||||
logger.info(f"Deleted {count} projects")
|
|
||||||
|
|
||||||
# 10. Find and delete seed team
|
|
||||||
seed_team = db.query(Team).filter(Team.slug == SEED_TEAM_SLUG).first()
|
|
||||||
if seed_team:
|
|
||||||
# Delete team memberships first
|
|
||||||
count = db.query(TeamMembership).filter(
|
|
||||||
TeamMembership.team_id == seed_team.id
|
|
||||||
).delete(synchronize_session=False)
|
|
||||||
results["team_memberships_deleted"] = count
|
|
||||||
logger.info(f"Deleted {count} team memberships")
|
|
||||||
|
|
||||||
# Delete the team
|
|
||||||
db.delete(seed_team)
|
|
||||||
results["teams_deleted"] = 1
|
|
||||||
logger.info(f"Deleted team: {SEED_TEAM_SLUG}")
|
|
||||||
|
|
||||||
# 11. Delete seed users (but NOT admin)
|
|
||||||
seed_users = db.query(User).filter(User.username.in_(SEED_USERNAMES)).all()
|
|
||||||
for user in seed_users:
|
|
||||||
# Delete any remaining team memberships for this user
|
|
||||||
db.query(TeamMembership).filter(TeamMembership.user_id == user.id).delete(
|
|
||||||
synchronize_session=False
|
|
||||||
)
|
|
||||||
# Delete any access permissions for this user
|
|
||||||
# Note: AccessPermission.user_id is VARCHAR (username), not UUID
|
|
||||||
db.query(AccessPermission).filter(AccessPermission.user_id == user.username).delete(
|
|
||||||
synchronize_session=False
|
|
||||||
)
|
|
||||||
db.delete(user)
|
|
||||||
results["users_deleted"] += 1
|
|
||||||
|
|
||||||
if results["users_deleted"] > 0:
|
|
||||||
logger.info(f"Deleted {results['users_deleted']} seed users")
|
|
||||||
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
logger.warning("SEED DATA PURGE COMPLETE")
|
|
||||||
logger.info(f"Purge results: {results}")
|
|
||||||
|
|
||||||
return results
|
|
||||||
@@ -1,534 +0,0 @@
|
|||||||
"""
|
|
||||||
Transparent PyPI proxy implementing PEP 503 (Simple API).
|
|
||||||
|
|
||||||
Provides endpoints that allow pip to use Orchard as a PyPI index URL.
|
|
||||||
Artifacts are cached on first access through configured upstream sources.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
from typing import Optional
|
|
||||||
from urllib.parse import urljoin, urlparse, quote, unquote
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
|
||||||
from fastapi.responses import StreamingResponse, HTMLResponse
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from .database import get_db
|
|
||||||
from .models import UpstreamSource, CachedUrl, Artifact, Project, Package, Tag
|
|
||||||
from .storage import S3Storage, get_storage
|
|
||||||
from .upstream import (
|
|
||||||
UpstreamClient,
|
|
||||||
UpstreamClientConfig,
|
|
||||||
UpstreamHTTPError,
|
|
||||||
UpstreamConnectionError,
|
|
||||||
UpstreamTimeoutError,
|
|
||||||
)
|
|
||||||
from .config import get_env_upstream_sources
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/pypi", tags=["pypi-proxy"])
|
|
||||||
|
|
||||||
# Timeout configuration for proxy requests
|
|
||||||
PROXY_CONNECT_TIMEOUT = 30.0
|
|
||||||
PROXY_READ_TIMEOUT = 60.0
|
|
||||||
|
|
||||||
|
|
||||||
def _get_pypi_upstream_sources(db: Session) -> list[UpstreamSource]:
|
|
||||||
"""Get all enabled upstream sources configured for PyPI."""
|
|
||||||
# Get database sources
|
|
||||||
db_sources = (
|
|
||||||
db.query(UpstreamSource)
|
|
||||||
.filter(
|
|
||||||
UpstreamSource.source_type == "pypi",
|
|
||||||
UpstreamSource.enabled == True,
|
|
||||||
)
|
|
||||||
.order_by(UpstreamSource.priority)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get env sources
|
|
||||||
env_sources = [
|
|
||||||
s for s in get_env_upstream_sources()
|
|
||||||
if s.source_type == "pypi" and s.enabled
|
|
||||||
]
|
|
||||||
|
|
||||||
# Combine and sort by priority
|
|
||||||
all_sources = list(db_sources) + list(env_sources)
|
|
||||||
return sorted(all_sources, key=lambda s: s.priority)
|
|
||||||
|
|
||||||
|
|
||||||
def _build_auth_headers(source) -> dict:
|
|
||||||
"""Build authentication headers for an upstream source."""
|
|
||||||
headers = {}
|
|
||||||
|
|
||||||
if hasattr(source, 'auth_type'):
|
|
||||||
if source.auth_type == "bearer":
|
|
||||||
password = source.get_password() if hasattr(source, 'get_password') else getattr(source, 'password', None)
|
|
||||||
if password:
|
|
||||||
headers["Authorization"] = f"Bearer {password}"
|
|
||||||
elif source.auth_type == "api_key":
|
|
||||||
custom_headers = source.get_headers() if hasattr(source, 'get_headers') else {}
|
|
||||||
if custom_headers:
|
|
||||||
headers.update(custom_headers)
|
|
||||||
|
|
||||||
return headers
|
|
||||||
|
|
||||||
|
|
||||||
def _get_basic_auth(source) -> Optional[tuple[str, str]]:
|
|
||||||
"""Get basic auth credentials if applicable."""
|
|
||||||
if hasattr(source, 'auth_type') and source.auth_type == "basic":
|
|
||||||
username = getattr(source, 'username', None)
|
|
||||||
if username:
|
|
||||||
password = source.get_password() if hasattr(source, 'get_password') else getattr(source, 'password', '')
|
|
||||||
return (username, password or '')
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _rewrite_package_links(html: str, base_url: str, package_name: str) -> str:
|
|
||||||
"""
|
|
||||||
Rewrite download links in a PyPI simple page to go through our proxy.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
html: The HTML content from upstream
|
|
||||||
base_url: Our server's base URL
|
|
||||||
package_name: The package name for the URL path
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
HTML with rewritten download links
|
|
||||||
"""
|
|
||||||
# Pattern to match href attributes in anchor tags
|
|
||||||
# PyPI simple pages have links like:
|
|
||||||
# <a href="https://files.pythonhosted.org/packages/.../file.tar.gz#sha256=...">file.tar.gz</a>
|
|
||||||
|
|
||||||
def replace_href(match):
|
|
||||||
original_url = match.group(1)
|
|
||||||
# Extract the filename from the URL
|
|
||||||
parsed = urlparse(original_url)
|
|
||||||
path_parts = parsed.path.split('/')
|
|
||||||
filename = path_parts[-1] if path_parts else ''
|
|
||||||
|
|
||||||
# Keep the hash fragment if present
|
|
||||||
fragment = f"#{parsed.fragment}" if parsed.fragment else ""
|
|
||||||
|
|
||||||
# Encode the original URL for safe transmission
|
|
||||||
encoded_url = quote(original_url.split('#')[0], safe='')
|
|
||||||
|
|
||||||
# Build new URL pointing to our proxy
|
|
||||||
new_url = f"{base_url}/pypi/simple/{package_name}/{filename}?upstream={encoded_url}{fragment}"
|
|
||||||
|
|
||||||
return f'href="{new_url}"'
|
|
||||||
|
|
||||||
# Match href="..." patterns
|
|
||||||
rewritten = re.sub(r'href="([^"]+)"', replace_href, html)
|
|
||||||
|
|
||||||
return rewritten
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/simple/")
|
|
||||||
async def pypi_simple_index(
|
|
||||||
request: Request,
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
PyPI Simple API index - lists all packages.
|
|
||||||
|
|
||||||
Proxies to the first available upstream PyPI source.
|
|
||||||
"""
|
|
||||||
sources = _get_pypi_upstream_sources(db)
|
|
||||||
|
|
||||||
if not sources:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=503,
|
|
||||||
detail="No PyPI upstream sources configured"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Try each source in priority order
|
|
||||||
last_error = None
|
|
||||||
for source in sources:
|
|
||||||
try:
|
|
||||||
headers = {"User-Agent": "Orchard-PyPI-Proxy/1.0"}
|
|
||||||
headers.update(_build_auth_headers(source))
|
|
||||||
auth = _get_basic_auth(source)
|
|
||||||
|
|
||||||
simple_url = source.url.rstrip('/') + '/simple/'
|
|
||||||
|
|
||||||
timeout = httpx.Timeout(PROXY_READ_TIMEOUT, connect=PROXY_CONNECT_TIMEOUT)
|
|
||||||
|
|
||||||
with httpx.Client(timeout=timeout, follow_redirects=False) as client:
|
|
||||||
response = client.get(
|
|
||||||
simple_url,
|
|
||||||
headers=headers,
|
|
||||||
auth=auth,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle redirects manually to avoid loops
|
|
||||||
if response.status_code in (301, 302, 303, 307, 308):
|
|
||||||
redirect_url = response.headers.get('location')
|
|
||||||
if redirect_url:
|
|
||||||
# Follow the redirect once
|
|
||||||
response = client.get(
|
|
||||||
redirect_url,
|
|
||||||
headers=headers,
|
|
||||||
auth=auth,
|
|
||||||
follow_redirects=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
# Return the index as-is (links are to package pages, not files)
|
|
||||||
# We could rewrite these too, but for now just proxy
|
|
||||||
content = response.text
|
|
||||||
|
|
||||||
# Rewrite package links to go through our proxy
|
|
||||||
base_url = str(request.base_url).rstrip('/')
|
|
||||||
content = re.sub(
|
|
||||||
r'href="([^"]+)/"',
|
|
||||||
lambda m: f'href="{base_url}/pypi/simple/{m.group(1)}/"',
|
|
||||||
content
|
|
||||||
)
|
|
||||||
|
|
||||||
return HTMLResponse(content=content)
|
|
||||||
|
|
||||||
last_error = f"HTTP {response.status_code}"
|
|
||||||
|
|
||||||
except httpx.ConnectError as e:
|
|
||||||
last_error = f"Connection failed: {e}"
|
|
||||||
logger.warning(f"PyPI proxy: failed to connect to {source.url}: {e}")
|
|
||||||
except httpx.TimeoutException as e:
|
|
||||||
last_error = f"Timeout: {e}"
|
|
||||||
logger.warning(f"PyPI proxy: timeout connecting to {source.url}: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
last_error = str(e)
|
|
||||||
logger.warning(f"PyPI proxy: error fetching from {source.url}: {e}")
|
|
||||||
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=502,
|
|
||||||
detail=f"Failed to fetch package index from upstream: {last_error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/simple/{package_name}/")
|
|
||||||
async def pypi_package_versions(
|
|
||||||
request: Request,
|
|
||||||
package_name: str,
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
PyPI Simple API package page - lists all versions/files for a package.
|
|
||||||
|
|
||||||
Proxies to upstream and rewrites download links to go through our cache.
|
|
||||||
"""
|
|
||||||
sources = _get_pypi_upstream_sources(db)
|
|
||||||
|
|
||||||
if not sources:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=503,
|
|
||||||
detail="No PyPI upstream sources configured"
|
|
||||||
)
|
|
||||||
|
|
||||||
base_url = str(request.base_url).rstrip('/')
|
|
||||||
|
|
||||||
# Normalize package name (PEP 503)
|
|
||||||
normalized_name = re.sub(r'[-_.]+', '-', package_name).lower()
|
|
||||||
|
|
||||||
# Try each source in priority order
|
|
||||||
last_error = None
|
|
||||||
for source in sources:
|
|
||||||
try:
|
|
||||||
headers = {"User-Agent": "Orchard-PyPI-Proxy/1.0"}
|
|
||||||
headers.update(_build_auth_headers(source))
|
|
||||||
auth = _get_basic_auth(source)
|
|
||||||
|
|
||||||
package_url = source.url.rstrip('/') + f'/simple/{normalized_name}/'
|
|
||||||
|
|
||||||
timeout = httpx.Timeout(PROXY_READ_TIMEOUT, connect=PROXY_CONNECT_TIMEOUT)
|
|
||||||
|
|
||||||
with httpx.Client(timeout=timeout, follow_redirects=False) as client:
|
|
||||||
response = client.get(
|
|
||||||
package_url,
|
|
||||||
headers=headers,
|
|
||||||
auth=auth,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle redirects manually
|
|
||||||
redirect_count = 0
|
|
||||||
while response.status_code in (301, 302, 303, 307, 308) and redirect_count < 5:
|
|
||||||
redirect_url = response.headers.get('location')
|
|
||||||
if not redirect_url:
|
|
||||||
break
|
|
||||||
|
|
||||||
# Make redirect URL absolute if needed
|
|
||||||
if not redirect_url.startswith('http'):
|
|
||||||
redirect_url = urljoin(package_url, redirect_url)
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
redirect_url,
|
|
||||||
headers=headers,
|
|
||||||
auth=auth,
|
|
||||||
follow_redirects=False,
|
|
||||||
)
|
|
||||||
redirect_count += 1
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
content = response.text
|
|
||||||
|
|
||||||
# Rewrite download links to go through our proxy
|
|
||||||
content = _rewrite_package_links(content, base_url, normalized_name)
|
|
||||||
|
|
||||||
return HTMLResponse(content=content)
|
|
||||||
|
|
||||||
if response.status_code == 404:
|
|
||||||
# Package not found in this source, try next
|
|
||||||
last_error = f"Package not found in {source.name}"
|
|
||||||
continue
|
|
||||||
|
|
||||||
last_error = f"HTTP {response.status_code}"
|
|
||||||
|
|
||||||
except httpx.ConnectError as e:
|
|
||||||
last_error = f"Connection failed: {e}"
|
|
||||||
logger.warning(f"PyPI proxy: failed to connect to {source.url}: {e}")
|
|
||||||
except httpx.TimeoutException as e:
|
|
||||||
last_error = f"Timeout: {e}"
|
|
||||||
logger.warning(f"PyPI proxy: timeout connecting to {source.url}: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
last_error = str(e)
|
|
||||||
logger.warning(f"PyPI proxy: error fetching {package_name} from {source.url}: {e}")
|
|
||||||
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=404,
|
|
||||||
detail=f"Package '{package_name}' not found: {last_error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/simple/{package_name}/{filename}")
|
|
||||||
async def pypi_download_file(
|
|
||||||
request: Request,
|
|
||||||
package_name: str,
|
|
||||||
filename: str,
|
|
||||||
upstream: Optional[str] = None,
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
storage: S3Storage = Depends(get_storage),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Download a package file, caching it in Orchard.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
package_name: The package name
|
|
||||||
filename: The filename to download
|
|
||||||
upstream: URL-encoded upstream URL to fetch from
|
|
||||||
"""
|
|
||||||
if not upstream:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail="Missing 'upstream' query parameter with source URL"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Decode the upstream URL
|
|
||||||
upstream_url = unquote(upstream)
|
|
||||||
|
|
||||||
# Check if we already have this URL cached
|
|
||||||
url_hash = hashlib.sha256(upstream_url.encode()).hexdigest()
|
|
||||||
cached_url = db.query(CachedUrl).filter(CachedUrl.url_hash == url_hash).first()
|
|
||||||
|
|
||||||
if cached_url:
|
|
||||||
# Serve from cache
|
|
||||||
artifact = db.query(Artifact).filter(Artifact.id == cached_url.artifact_id).first()
|
|
||||||
if artifact:
|
|
||||||
logger.info(f"PyPI proxy: serving cached {filename} (artifact {artifact.id[:12]})")
|
|
||||||
|
|
||||||
# Stream from S3
|
|
||||||
try:
|
|
||||||
content_stream = storage.get_artifact_stream(artifact.id)
|
|
||||||
|
|
||||||
return StreamingResponse(
|
|
||||||
content_stream,
|
|
||||||
media_type=artifact.content_type or "application/octet-stream",
|
|
||||||
headers={
|
|
||||||
"Content-Disposition": f'attachment; filename="{filename}"',
|
|
||||||
"Content-Length": str(artifact.size),
|
|
||||||
"X-Checksum-SHA256": artifact.id,
|
|
||||||
"X-Cache": "HIT",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"PyPI proxy: error streaming cached artifact: {e}")
|
|
||||||
# Fall through to fetch from upstream
|
|
||||||
|
|
||||||
# Not cached - fetch from upstream
|
|
||||||
sources = _get_pypi_upstream_sources(db)
|
|
||||||
|
|
||||||
# Find a source that matches the upstream URL
|
|
||||||
matched_source = None
|
|
||||||
for source in sources:
|
|
||||||
source_url = getattr(source, 'url', '')
|
|
||||||
# Check if the upstream URL could come from this source
|
|
||||||
# (This is a loose check - the URL might be from files.pythonhosted.org)
|
|
||||||
if urlparse(upstream_url).netloc in source_url or True: # Allow any source for now
|
|
||||||
matched_source = source
|
|
||||||
break
|
|
||||||
|
|
||||||
if not matched_source and sources:
|
|
||||||
matched_source = sources[0] # Use first source for auth if available
|
|
||||||
|
|
||||||
try:
|
|
||||||
headers = {"User-Agent": "Orchard-PyPI-Proxy/1.0"}
|
|
||||||
if matched_source:
|
|
||||||
headers.update(_build_auth_headers(matched_source))
|
|
||||||
auth = _get_basic_auth(matched_source) if matched_source else None
|
|
||||||
|
|
||||||
timeout = httpx.Timeout(300.0, connect=PROXY_CONNECT_TIMEOUT) # 5 minutes for large files
|
|
||||||
|
|
||||||
# Fetch the file
|
|
||||||
logger.info(f"PyPI proxy: fetching {filename} from {upstream_url}")
|
|
||||||
|
|
||||||
with httpx.Client(timeout=timeout, follow_redirects=False) as client:
|
|
||||||
response = client.get(
|
|
||||||
upstream_url,
|
|
||||||
headers=headers,
|
|
||||||
auth=auth,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle redirects manually
|
|
||||||
redirect_count = 0
|
|
||||||
while response.status_code in (301, 302, 303, 307, 308) and redirect_count < 5:
|
|
||||||
redirect_url = response.headers.get('location')
|
|
||||||
if not redirect_url:
|
|
||||||
break
|
|
||||||
|
|
||||||
if not redirect_url.startswith('http'):
|
|
||||||
redirect_url = urljoin(upstream_url, redirect_url)
|
|
||||||
|
|
||||||
logger.info(f"PyPI proxy: following redirect to {redirect_url}")
|
|
||||||
|
|
||||||
# Don't send auth to different hosts
|
|
||||||
redirect_headers = {"User-Agent": "Orchard-PyPI-Proxy/1.0"}
|
|
||||||
redirect_auth = None
|
|
||||||
if urlparse(redirect_url).netloc == urlparse(upstream_url).netloc:
|
|
||||||
redirect_headers.update(headers)
|
|
||||||
redirect_auth = auth
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
redirect_url,
|
|
||||||
headers=redirect_headers,
|
|
||||||
auth=redirect_auth,
|
|
||||||
follow_redirects=False,
|
|
||||||
)
|
|
||||||
redirect_count += 1
|
|
||||||
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=response.status_code,
|
|
||||||
detail=f"Upstream returned {response.status_code}"
|
|
||||||
)
|
|
||||||
|
|
||||||
content = response.content
|
|
||||||
content_type = response.headers.get('content-type', 'application/octet-stream')
|
|
||||||
|
|
||||||
# Compute hash
|
|
||||||
sha256 = hashlib.sha256(content).hexdigest()
|
|
||||||
size = len(content)
|
|
||||||
|
|
||||||
logger.info(f"PyPI proxy: downloaded {filename}, {size} bytes, sha256={sha256[:12]}")
|
|
||||||
|
|
||||||
# Store in S3
|
|
||||||
from io import BytesIO
|
|
||||||
artifact = storage.store_artifact(
|
|
||||||
file_obj=BytesIO(content),
|
|
||||||
filename=filename,
|
|
||||||
content_type=content_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if artifact already exists
|
|
||||||
existing = db.query(Artifact).filter(Artifact.id == sha256).first()
|
|
||||||
if existing:
|
|
||||||
# Increment ref count
|
|
||||||
existing.ref_count += 1
|
|
||||||
db.flush()
|
|
||||||
else:
|
|
||||||
# Create artifact record
|
|
||||||
new_artifact = Artifact(
|
|
||||||
id=sha256,
|
|
||||||
filename=filename,
|
|
||||||
content_type=content_type,
|
|
||||||
size=size,
|
|
||||||
ref_count=1,
|
|
||||||
)
|
|
||||||
db.add(new_artifact)
|
|
||||||
db.flush()
|
|
||||||
|
|
||||||
# Create/get system project and package
|
|
||||||
system_project = db.query(Project).filter(Project.name == "_pypi").first()
|
|
||||||
if not system_project:
|
|
||||||
system_project = Project(
|
|
||||||
name="_pypi",
|
|
||||||
description="System project for cached PyPI packages",
|
|
||||||
visibility="private",
|
|
||||||
)
|
|
||||||
db.add(system_project)
|
|
||||||
db.flush()
|
|
||||||
|
|
||||||
# Normalize package name
|
|
||||||
normalized_name = re.sub(r'[-_.]+', '-', package_name).lower()
|
|
||||||
|
|
||||||
package = db.query(Package).filter(
|
|
||||||
Package.project_id == system_project.id,
|
|
||||||
Package.name == normalized_name,
|
|
||||||
).first()
|
|
||||||
if not package:
|
|
||||||
package = Package(
|
|
||||||
project_id=system_project.id,
|
|
||||||
name=normalized_name,
|
|
||||||
description=f"PyPI package: {normalized_name}",
|
|
||||||
)
|
|
||||||
db.add(package)
|
|
||||||
db.flush()
|
|
||||||
|
|
||||||
# Create tag with filename
|
|
||||||
existing_tag = db.query(Tag).filter(
|
|
||||||
Tag.package_id == package.id,
|
|
||||||
Tag.name == filename,
|
|
||||||
).first()
|
|
||||||
if not existing_tag:
|
|
||||||
tag = Tag(
|
|
||||||
package_id=package.id,
|
|
||||||
name=filename,
|
|
||||||
artifact_id=sha256,
|
|
||||||
)
|
|
||||||
db.add(tag)
|
|
||||||
|
|
||||||
# Cache the URL mapping
|
|
||||||
existing_cached = db.query(CachedUrl).filter(CachedUrl.url_hash == url_hash).first()
|
|
||||||
if not existing_cached:
|
|
||||||
cached_url_record = CachedUrl(
|
|
||||||
url_hash=url_hash,
|
|
||||||
url=upstream_url,
|
|
||||||
artifact_id=sha256,
|
|
||||||
)
|
|
||||||
db.add(cached_url_record)
|
|
||||||
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
# Return the file
|
|
||||||
return Response(
|
|
||||||
content=content,
|
|
||||||
media_type=content_type,
|
|
||||||
headers={
|
|
||||||
"Content-Disposition": f'attachment; filename="{filename}"',
|
|
||||||
"Content-Length": str(size),
|
|
||||||
"X-Checksum-SHA256": sha256,
|
|
||||||
"X-Cache": "MISS",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
except httpx.ConnectError as e:
|
|
||||||
raise HTTPException(status_code=502, detail=f"Connection failed: {e}")
|
|
||||||
except httpx.TimeoutException as e:
|
|
||||||
raise HTTPException(status_code=504, detail=f"Timeout: {e}")
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(f"PyPI proxy: error downloading {filename}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -25,7 +25,6 @@ class ProjectCreate(BaseModel):
|
|||||||
name: str
|
name: str
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
is_public: bool = True
|
is_public: bool = True
|
||||||
team_id: Optional[UUID] = None
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectResponse(BaseModel):
|
class ProjectResponse(BaseModel):
|
||||||
@@ -36,9 +35,6 @@ class ProjectResponse(BaseModel):
|
|||||||
created_at: datetime
|
created_at: datetime
|
||||||
updated_at: datetime
|
updated_at: datetime
|
||||||
created_by: str
|
created_by: str
|
||||||
team_id: Optional[UUID] = None
|
|
||||||
team_slug: Optional[str] = None
|
|
||||||
team_name: Optional[str] = None
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
@@ -911,9 +907,6 @@ class AccessPermissionResponse(BaseModel):
|
|||||||
level: str
|
level: str
|
||||||
created_at: datetime
|
created_at: datetime
|
||||||
expires_at: Optional[datetime]
|
expires_at: Optional[datetime]
|
||||||
source: Optional[str] = "explicit" # "explicit" or "team"
|
|
||||||
team_slug: Optional[str] = None # Team slug if source is "team"
|
|
||||||
team_role: Optional[str] = None # Team role if source is "team"
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
@@ -923,550 +916,3 @@ class ProjectWithAccessResponse(ProjectResponse):
|
|||||||
"""Project response with user's access level"""
|
"""Project response with user's access level"""
|
||||||
user_access_level: Optional[str] = None
|
user_access_level: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
# Artifact Dependency schemas
|
|
||||||
class DependencyCreate(BaseModel):
|
|
||||||
"""Schema for creating a dependency"""
|
|
||||||
project: str
|
|
||||||
package: str
|
|
||||||
version: Optional[str] = None
|
|
||||||
tag: Optional[str] = None
|
|
||||||
|
|
||||||
@field_validator('version', 'tag')
|
|
||||||
@classmethod
|
|
||||||
def validate_constraint(cls, v, info):
|
|
||||||
return v
|
|
||||||
|
|
||||||
def model_post_init(self, __context):
|
|
||||||
"""Validate that exactly one of version or tag is set"""
|
|
||||||
if self.version is None and self.tag is None:
|
|
||||||
raise ValueError("Either 'version' or 'tag' must be specified")
|
|
||||||
if self.version is not None and self.tag is not None:
|
|
||||||
raise ValueError("Cannot specify both 'version' and 'tag'")
|
|
||||||
|
|
||||||
|
|
||||||
class DependencyResponse(BaseModel):
|
|
||||||
"""Schema for dependency response"""
|
|
||||||
id: UUID
|
|
||||||
artifact_id: str
|
|
||||||
project: str
|
|
||||||
package: str
|
|
||||||
version: Optional[str] = None
|
|
||||||
tag: Optional[str] = None
|
|
||||||
created_at: datetime
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_orm_model(cls, dep) -> "DependencyResponse":
|
|
||||||
"""Create from ORM model with field mapping"""
|
|
||||||
return cls(
|
|
||||||
id=dep.id,
|
|
||||||
artifact_id=dep.artifact_id,
|
|
||||||
project=dep.dependency_project,
|
|
||||||
package=dep.dependency_package,
|
|
||||||
version=dep.version_constraint,
|
|
||||||
tag=dep.tag_constraint,
|
|
||||||
created_at=dep.created_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ArtifactDependenciesResponse(BaseModel):
|
|
||||||
"""Response containing all dependencies for an artifact"""
|
|
||||||
artifact_id: str
|
|
||||||
dependencies: List[DependencyResponse]
|
|
||||||
|
|
||||||
|
|
||||||
class DependentInfo(BaseModel):
|
|
||||||
"""Information about an artifact that depends on a package"""
|
|
||||||
artifact_id: str
|
|
||||||
project: str
|
|
||||||
package: str
|
|
||||||
version: Optional[str] = None
|
|
||||||
constraint_type: str # 'version' or 'tag'
|
|
||||||
constraint_value: str
|
|
||||||
|
|
||||||
|
|
||||||
class ReverseDependenciesResponse(BaseModel):
|
|
||||||
"""Response containing packages that depend on a given package"""
|
|
||||||
project: str
|
|
||||||
package: str
|
|
||||||
dependents: List[DependentInfo]
|
|
||||||
pagination: PaginationMeta
|
|
||||||
|
|
||||||
|
|
||||||
class EnsureFileDependency(BaseModel):
|
|
||||||
"""Dependency entry from orchard.ensure file"""
|
|
||||||
project: str
|
|
||||||
package: str
|
|
||||||
version: Optional[str] = None
|
|
||||||
tag: Optional[str] = None
|
|
||||||
|
|
||||||
@field_validator('version', 'tag')
|
|
||||||
@classmethod
|
|
||||||
def validate_constraint(cls, v, info):
|
|
||||||
return v
|
|
||||||
|
|
||||||
def model_post_init(self, __context):
|
|
||||||
"""Validate that exactly one of version or tag is set"""
|
|
||||||
if self.version is None and self.tag is None:
|
|
||||||
raise ValueError("Either 'version' or 'tag' must be specified")
|
|
||||||
if self.version is not None and self.tag is not None:
|
|
||||||
raise ValueError("Cannot specify both 'version' and 'tag'")
|
|
||||||
|
|
||||||
|
|
||||||
class EnsureFileContent(BaseModel):
|
|
||||||
"""Parsed content of orchard.ensure file"""
|
|
||||||
dependencies: List[EnsureFileDependency] = []
|
|
||||||
|
|
||||||
|
|
||||||
class ResolvedArtifact(BaseModel):
|
|
||||||
"""A resolved artifact in the dependency tree"""
|
|
||||||
artifact_id: str
|
|
||||||
project: str
|
|
||||||
package: str
|
|
||||||
version: Optional[str] = None
|
|
||||||
tag: Optional[str] = None
|
|
||||||
size: int
|
|
||||||
download_url: str
|
|
||||||
|
|
||||||
|
|
||||||
class DependencyResolutionResponse(BaseModel):
|
|
||||||
"""Response from dependency resolution endpoint"""
|
|
||||||
requested: Dict[str, str] # project, package, ref
|
|
||||||
resolved: List[ResolvedArtifact]
|
|
||||||
total_size: int
|
|
||||||
artifact_count: int
|
|
||||||
|
|
||||||
|
|
||||||
class DependencyConflict(BaseModel):
|
|
||||||
"""Details about a dependency conflict"""
|
|
||||||
project: str
|
|
||||||
package: str
|
|
||||||
requirements: List[Dict[str, Any]] # version/tag and required_by info
|
|
||||||
|
|
||||||
|
|
||||||
class DependencyConflictError(BaseModel):
|
|
||||||
"""Error response for dependency conflicts"""
|
|
||||||
error: str = "dependency_conflict"
|
|
||||||
message: str
|
|
||||||
conflicts: List[DependencyConflict]
|
|
||||||
|
|
||||||
|
|
||||||
class CircularDependencyError(BaseModel):
|
|
||||||
"""Error response for circular dependencies"""
|
|
||||||
error: str = "circular_dependency"
|
|
||||||
message: str
|
|
||||||
cycle: List[str] # List of "project/package" strings showing the cycle
|
|
||||||
|
|
||||||
|
|
||||||
# Team schemas
|
|
||||||
TEAM_ROLES = ["owner", "admin", "member"]
|
|
||||||
RESERVED_TEAM_SLUGS = {"new", "api", "admin", "settings", "members", "projects", "search"}
|
|
||||||
|
|
||||||
|
|
||||||
class TeamCreate(BaseModel):
|
|
||||||
"""Create a new team"""
|
|
||||||
name: str
|
|
||||||
slug: str
|
|
||||||
description: Optional[str] = None
|
|
||||||
|
|
||||||
@field_validator('name')
|
|
||||||
@classmethod
|
|
||||||
def validate_name(cls, v: str) -> str:
|
|
||||||
"""Validate team name."""
|
|
||||||
if not v or not v.strip():
|
|
||||||
raise ValueError("Name cannot be empty")
|
|
||||||
if len(v) > 255:
|
|
||||||
raise ValueError("Name must be 255 characters or less")
|
|
||||||
return v.strip()
|
|
||||||
|
|
||||||
@field_validator('slug')
|
|
||||||
@classmethod
|
|
||||||
def validate_slug(cls, v: str) -> str:
|
|
||||||
"""Validate team slug format (lowercase alphanumeric with hyphens)."""
|
|
||||||
import re
|
|
||||||
if not v:
|
|
||||||
raise ValueError("Slug cannot be empty")
|
|
||||||
if len(v) < 2:
|
|
||||||
raise ValueError("Slug must be at least 2 characters")
|
|
||||||
if len(v) > 255:
|
|
||||||
raise ValueError("Slug must be 255 characters or less")
|
|
||||||
if not re.match(r'^[a-z0-9][a-z0-9-]*[a-z0-9]$', v) and not re.match(r'^[a-z0-9]$', v):
|
|
||||||
raise ValueError(
|
|
||||||
"Slug must be lowercase alphanumeric with hyphens, "
|
|
||||||
"starting and ending with alphanumeric characters"
|
|
||||||
)
|
|
||||||
if '--' in v:
|
|
||||||
raise ValueError("Slug cannot contain consecutive hyphens")
|
|
||||||
if v in RESERVED_TEAM_SLUGS:
|
|
||||||
raise ValueError(f"Slug '{v}' is reserved and cannot be used")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('description')
|
|
||||||
@classmethod
|
|
||||||
def validate_description(cls, v: Optional[str]) -> Optional[str]:
|
|
||||||
"""Validate team description."""
|
|
||||||
if v is not None and len(v) > 2000:
|
|
||||||
raise ValueError("Description must be 2000 characters or less")
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
class TeamUpdate(BaseModel):
|
|
||||||
"""Update team details"""
|
|
||||||
name: Optional[str] = None
|
|
||||||
description: Optional[str] = None
|
|
||||||
|
|
||||||
@field_validator('name')
|
|
||||||
@classmethod
|
|
||||||
def validate_name(cls, v: Optional[str]) -> Optional[str]:
|
|
||||||
"""Validate team name."""
|
|
||||||
if v is not None:
|
|
||||||
if not v.strip():
|
|
||||||
raise ValueError("Name cannot be empty")
|
|
||||||
if len(v) > 255:
|
|
||||||
raise ValueError("Name must be 255 characters or less")
|
|
||||||
return v.strip()
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('description')
|
|
||||||
@classmethod
|
|
||||||
def validate_description(cls, v: Optional[str]) -> Optional[str]:
|
|
||||||
"""Validate team description."""
|
|
||||||
if v is not None and len(v) > 2000:
|
|
||||||
raise ValueError("Description must be 2000 characters or less")
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
class TeamResponse(BaseModel):
|
|
||||||
"""Team response with basic info"""
|
|
||||||
id: UUID
|
|
||||||
name: str
|
|
||||||
slug: str
|
|
||||||
description: Optional[str]
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
member_count: int = 0
|
|
||||||
project_count: int = 0
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
class TeamDetailResponse(TeamResponse):
|
|
||||||
"""Team response with user's role"""
|
|
||||||
user_role: Optional[str] = None # 'owner', 'admin', 'member', or None
|
|
||||||
|
|
||||||
|
|
||||||
class TeamMemberCreate(BaseModel):
|
|
||||||
"""Add a member to a team"""
|
|
||||||
username: str
|
|
||||||
role: str = "member"
|
|
||||||
|
|
||||||
@field_validator('role')
|
|
||||||
@classmethod
|
|
||||||
def validate_role(cls, v: str) -> str:
|
|
||||||
if v not in TEAM_ROLES:
|
|
||||||
raise ValueError(f"Role must be one of: {', '.join(TEAM_ROLES)}")
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
class TeamMemberUpdate(BaseModel):
|
|
||||||
"""Update a team member's role"""
|
|
||||||
role: str
|
|
||||||
|
|
||||||
@field_validator('role')
|
|
||||||
@classmethod
|
|
||||||
def validate_role(cls, v: str) -> str:
|
|
||||||
if v not in TEAM_ROLES:
|
|
||||||
raise ValueError(f"Role must be one of: {', '.join(TEAM_ROLES)}")
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
class TeamMemberResponse(BaseModel):
|
|
||||||
"""Team member response"""
|
|
||||||
id: UUID
|
|
||||||
user_id: UUID
|
|
||||||
username: str
|
|
||||||
email: Optional[str]
|
|
||||||
role: str
|
|
||||||
created_at: datetime
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Upstream Caching Schemas
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
# Valid source types
|
|
||||||
SOURCE_TYPES = ["npm", "pypi", "maven", "docker", "helm", "nuget", "deb", "rpm", "generic"]
|
|
||||||
|
|
||||||
# Valid auth types
|
|
||||||
AUTH_TYPES = ["none", "basic", "bearer", "api_key"]
|
|
||||||
|
|
||||||
|
|
||||||
class UpstreamSourceCreate(BaseModel):
|
|
||||||
"""Create a new upstream source"""
|
|
||||||
name: str
|
|
||||||
source_type: str = "generic"
|
|
||||||
url: str
|
|
||||||
enabled: bool = False
|
|
||||||
auth_type: str = "none"
|
|
||||||
username: Optional[str] = None
|
|
||||||
password: Optional[str] = None # Write-only
|
|
||||||
headers: Optional[dict] = None # Write-only, custom headers
|
|
||||||
priority: int = 100
|
|
||||||
|
|
||||||
@field_validator('name')
|
|
||||||
@classmethod
|
|
||||||
def validate_name(cls, v: str) -> str:
|
|
||||||
v = v.strip()
|
|
||||||
if not v:
|
|
||||||
raise ValueError("name cannot be empty")
|
|
||||||
if len(v) > 255:
|
|
||||||
raise ValueError("name must be 255 characters or less")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('source_type')
|
|
||||||
@classmethod
|
|
||||||
def validate_source_type(cls, v: str) -> str:
|
|
||||||
if v not in SOURCE_TYPES:
|
|
||||||
raise ValueError(f"source_type must be one of: {', '.join(SOURCE_TYPES)}")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('url')
|
|
||||||
@classmethod
|
|
||||||
def validate_url(cls, v: str) -> str:
|
|
||||||
v = v.strip()
|
|
||||||
if not v:
|
|
||||||
raise ValueError("url cannot be empty")
|
|
||||||
if not (v.startswith('http://') or v.startswith('https://')):
|
|
||||||
raise ValueError("url must start with http:// or https://")
|
|
||||||
if len(v) > 2048:
|
|
||||||
raise ValueError("url must be 2048 characters or less")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('auth_type')
|
|
||||||
@classmethod
|
|
||||||
def validate_auth_type(cls, v: str) -> str:
|
|
||||||
if v not in AUTH_TYPES:
|
|
||||||
raise ValueError(f"auth_type must be one of: {', '.join(AUTH_TYPES)}")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('priority')
|
|
||||||
@classmethod
|
|
||||||
def validate_priority(cls, v: int) -> int:
|
|
||||||
if v <= 0:
|
|
||||||
raise ValueError("priority must be greater than 0")
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
class UpstreamSourceUpdate(BaseModel):
|
|
||||||
"""Update an upstream source (partial)"""
|
|
||||||
name: Optional[str] = None
|
|
||||||
source_type: Optional[str] = None
|
|
||||||
url: Optional[str] = None
|
|
||||||
enabled: Optional[bool] = None
|
|
||||||
auth_type: Optional[str] = None
|
|
||||||
username: Optional[str] = None
|
|
||||||
password: Optional[str] = None # Write-only, None = keep existing, empty string = clear
|
|
||||||
headers: Optional[dict] = None # Write-only
|
|
||||||
priority: Optional[int] = None
|
|
||||||
|
|
||||||
@field_validator('name')
|
|
||||||
@classmethod
|
|
||||||
def validate_name(cls, v: Optional[str]) -> Optional[str]:
|
|
||||||
if v is not None:
|
|
||||||
v = v.strip()
|
|
||||||
if not v:
|
|
||||||
raise ValueError("name cannot be empty")
|
|
||||||
if len(v) > 255:
|
|
||||||
raise ValueError("name must be 255 characters or less")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('source_type')
|
|
||||||
@classmethod
|
|
||||||
def validate_source_type(cls, v: Optional[str]) -> Optional[str]:
|
|
||||||
if v is not None and v not in SOURCE_TYPES:
|
|
||||||
raise ValueError(f"source_type must be one of: {', '.join(SOURCE_TYPES)}")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('url')
|
|
||||||
@classmethod
|
|
||||||
def validate_url(cls, v: Optional[str]) -> Optional[str]:
|
|
||||||
if v is not None:
|
|
||||||
v = v.strip()
|
|
||||||
if not v:
|
|
||||||
raise ValueError("url cannot be empty")
|
|
||||||
if not (v.startswith('http://') or v.startswith('https://')):
|
|
||||||
raise ValueError("url must start with http:// or https://")
|
|
||||||
if len(v) > 2048:
|
|
||||||
raise ValueError("url must be 2048 characters or less")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('auth_type')
|
|
||||||
@classmethod
|
|
||||||
def validate_auth_type(cls, v: Optional[str]) -> Optional[str]:
|
|
||||||
if v is not None and v not in AUTH_TYPES:
|
|
||||||
raise ValueError(f"auth_type must be one of: {', '.join(AUTH_TYPES)}")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('priority')
|
|
||||||
@classmethod
|
|
||||||
def validate_priority(cls, v: Optional[int]) -> Optional[int]:
|
|
||||||
if v is not None and v <= 0:
|
|
||||||
raise ValueError("priority must be greater than 0")
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
class UpstreamSourceResponse(BaseModel):
|
|
||||||
"""Upstream source response (credentials never included)"""
|
|
||||||
id: UUID
|
|
||||||
name: str
|
|
||||||
source_type: str
|
|
||||||
url: str
|
|
||||||
enabled: bool
|
|
||||||
auth_type: str
|
|
||||||
username: Optional[str]
|
|
||||||
has_password: bool # True if password is set
|
|
||||||
has_headers: bool # True if custom headers are set
|
|
||||||
priority: int
|
|
||||||
source: str = "database" # "database" or "env" (env = defined via environment variables)
|
|
||||||
created_at: Optional[datetime] = None # May be None for legacy/env data
|
|
||||||
updated_at: Optional[datetime] = None # May be None for legacy/env data
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
class CacheSettingsResponse(BaseModel):
|
|
||||||
"""Global cache settings response"""
|
|
||||||
auto_create_system_projects: bool
|
|
||||||
auto_create_system_projects_env_override: Optional[bool] = None # Set if overridden by env var
|
|
||||||
created_at: Optional[datetime] = None # May be None for legacy data
|
|
||||||
updated_at: Optional[datetime] = None # May be None for legacy data
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
class CacheSettingsUpdate(BaseModel):
|
|
||||||
"""Update cache settings (partial)"""
|
|
||||||
auto_create_system_projects: Optional[bool] = None
|
|
||||||
|
|
||||||
|
|
||||||
class CachedUrlResponse(BaseModel):
|
|
||||||
"""Cached URL response"""
|
|
||||||
id: UUID
|
|
||||||
url: str
|
|
||||||
url_hash: str
|
|
||||||
artifact_id: str
|
|
||||||
source_id: Optional[UUID]
|
|
||||||
source_name: Optional[str] = None # Populated from join
|
|
||||||
fetched_at: datetime
|
|
||||||
created_at: datetime
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
class CacheRequest(BaseModel):
|
|
||||||
"""Request to cache an artifact from an upstream URL"""
|
|
||||||
url: str
|
|
||||||
source_type: str
|
|
||||||
package_name: Optional[str] = None # Auto-derived from URL if not provided
|
|
||||||
tag: Optional[str] = None # Auto-derived from URL if not provided
|
|
||||||
user_project: Optional[str] = None # Cross-reference to user project
|
|
||||||
user_package: Optional[str] = None
|
|
||||||
user_tag: Optional[str] = None
|
|
||||||
expected_hash: Optional[str] = None # Verify downloaded content
|
|
||||||
|
|
||||||
@field_validator('url')
|
|
||||||
@classmethod
|
|
||||||
def validate_url(cls, v: str) -> str:
|
|
||||||
v = v.strip()
|
|
||||||
if not v:
|
|
||||||
raise ValueError("url cannot be empty")
|
|
||||||
if not (v.startswith('http://') or v.startswith('https://')):
|
|
||||||
raise ValueError("url must start with http:// or https://")
|
|
||||||
if len(v) > 4096:
|
|
||||||
raise ValueError("url must be 4096 characters or less")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('source_type')
|
|
||||||
@classmethod
|
|
||||||
def validate_source_type(cls, v: str) -> str:
|
|
||||||
if v not in SOURCE_TYPES:
|
|
||||||
raise ValueError(f"source_type must be one of: {', '.join(SOURCE_TYPES)}")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('expected_hash')
|
|
||||||
@classmethod
|
|
||||||
def validate_expected_hash(cls, v: Optional[str]) -> Optional[str]:
|
|
||||||
if v is not None:
|
|
||||||
v = v.strip().lower()
|
|
||||||
# Remove sha256: prefix if present
|
|
||||||
if v.startswith('sha256:'):
|
|
||||||
v = v[7:]
|
|
||||||
# Validate hex format
|
|
||||||
if len(v) != 64 or not all(c in '0123456789abcdef' for c in v):
|
|
||||||
raise ValueError("expected_hash must be a 64-character hex string (SHA256)")
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
class CacheResponse(BaseModel):
|
|
||||||
"""Response from caching an artifact"""
|
|
||||||
artifact_id: str
|
|
||||||
sha256: str
|
|
||||||
size: int
|
|
||||||
content_type: Optional[str]
|
|
||||||
already_cached: bool
|
|
||||||
source_url: str
|
|
||||||
source_name: Optional[str]
|
|
||||||
system_project: str
|
|
||||||
system_package: str
|
|
||||||
system_tag: Optional[str]
|
|
||||||
user_reference: Optional[str] = None # e.g., "my-app/npm-deps:lodash-4.17.21"
|
|
||||||
|
|
||||||
|
|
||||||
class CacheResolveRequest(BaseModel):
|
|
||||||
"""Request to cache an artifact by package coordinates (no URL required).
|
|
||||||
|
|
||||||
The server will construct the appropriate URL based on source_type and
|
|
||||||
configured upstream sources.
|
|
||||||
"""
|
|
||||||
source_type: str
|
|
||||||
package: str
|
|
||||||
version: str
|
|
||||||
user_project: Optional[str] = None
|
|
||||||
user_package: Optional[str] = None
|
|
||||||
user_tag: Optional[str] = None
|
|
||||||
|
|
||||||
@field_validator('source_type')
|
|
||||||
@classmethod
|
|
||||||
def validate_source_type(cls, v: str) -> str:
|
|
||||||
if v not in SOURCE_TYPES:
|
|
||||||
raise ValueError(f"source_type must be one of: {', '.join(SOURCE_TYPES)}")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('package')
|
|
||||||
@classmethod
|
|
||||||
def validate_package(cls, v: str) -> str:
|
|
||||||
v = v.strip()
|
|
||||||
if not v:
|
|
||||||
raise ValueError("package cannot be empty")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@field_validator('version')
|
|
||||||
@classmethod
|
|
||||||
def validate_version(cls, v: str) -> str:
|
|
||||||
v = v.strip()
|
|
||||||
if not v:
|
|
||||||
raise ValueError("version cannot be empty")
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,9 +5,8 @@ import hashlib
|
|||||||
import logging
|
import logging
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from .models import Project, Package, Artifact, Tag, Upload, PackageVersion, ArtifactDependency, Team, TeamMembership, User
|
from .models import Project, Package, Artifact, Tag, Upload, PackageVersion
|
||||||
from .storage import get_storage
|
from .storage import get_storage
|
||||||
from .auth import hash_password
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -124,17 +123,6 @@ TEST_ARTIFACTS = [
|
|||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
# Dependencies to create (source artifact -> dependency)
|
|
||||||
# Format: (source_project, source_package, source_version, dep_project, dep_package, version_constraint, tag_constraint)
|
|
||||||
TEST_DEPENDENCIES = [
|
|
||||||
# ui-components v1.1.0 depends on design-tokens v1.0.0
|
|
||||||
("frontend-libs", "ui-components", "1.1.0", "frontend-libs", "design-tokens", "1.0.0", None),
|
|
||||||
# auth-lib v1.0.0 depends on common-utils v2.0.0
|
|
||||||
("backend-services", "auth-lib", "1.0.0", "backend-services", "common-utils", "2.0.0", None),
|
|
||||||
# auth-lib v1.0.0 also depends on design-tokens (stable tag)
|
|
||||||
("backend-services", "auth-lib", "1.0.0", "frontend-libs", "design-tokens", None, "latest"),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def is_database_empty(db: Session) -> bool:
|
def is_database_empty(db: Session) -> bool:
|
||||||
"""Check if the database has any projects."""
|
"""Check if the database has any projects."""
|
||||||
@@ -150,80 +138,6 @@ def seed_database(db: Session) -> None:
|
|||||||
logger.info("Seeding database with test data...")
|
logger.info("Seeding database with test data...")
|
||||||
storage = get_storage()
|
storage = get_storage()
|
||||||
|
|
||||||
# Find or use admin user for team ownership
|
|
||||||
admin_user = db.query(User).filter(User.username == "admin").first()
|
|
||||||
team_owner_username = admin_user.username if admin_user else "seed-user"
|
|
||||||
|
|
||||||
# Create a demo team
|
|
||||||
demo_team = Team(
|
|
||||||
name="Demo Team",
|
|
||||||
slug="demo-team",
|
|
||||||
description="A demonstration team with sample projects",
|
|
||||||
created_by=team_owner_username,
|
|
||||||
)
|
|
||||||
db.add(demo_team)
|
|
||||||
db.flush()
|
|
||||||
|
|
||||||
# Add admin user as team owner if they exist
|
|
||||||
if admin_user:
|
|
||||||
membership = TeamMembership(
|
|
||||||
team_id=demo_team.id,
|
|
||||||
user_id=admin_user.id,
|
|
||||||
role="owner",
|
|
||||||
invited_by=team_owner_username,
|
|
||||||
)
|
|
||||||
db.add(membership)
|
|
||||||
db.flush()
|
|
||||||
|
|
||||||
logger.info(f"Created team: {demo_team.name} ({demo_team.slug})")
|
|
||||||
|
|
||||||
# Create test users with various roles
|
|
||||||
test_users = [
|
|
||||||
{"username": "alice", "email": "alice@example.com", "role": "admin"},
|
|
||||||
{"username": "bob", "email": "bob@example.com", "role": "admin"},
|
|
||||||
{"username": "charlie", "email": "charlie@example.com", "role": "member"},
|
|
||||||
{"username": "diana", "email": "diana@example.com", "role": "member"},
|
|
||||||
{"username": "eve", "email": "eve@example.com", "role": "member"},
|
|
||||||
{"username": "frank", "email": None, "role": "member"},
|
|
||||||
]
|
|
||||||
|
|
||||||
for user_data in test_users:
|
|
||||||
# Check if user already exists
|
|
||||||
existing_user = db.query(User).filter(User.username == user_data["username"]).first()
|
|
||||||
if existing_user:
|
|
||||||
test_user = existing_user
|
|
||||||
else:
|
|
||||||
# Create the user with password same as username
|
|
||||||
test_user = User(
|
|
||||||
username=user_data["username"],
|
|
||||||
email=user_data["email"],
|
|
||||||
password_hash=hash_password(user_data["username"]),
|
|
||||||
is_admin=False,
|
|
||||||
is_active=True,
|
|
||||||
must_change_password=False,
|
|
||||||
)
|
|
||||||
db.add(test_user)
|
|
||||||
db.flush()
|
|
||||||
logger.info(f"Created test user: {user_data['username']}")
|
|
||||||
|
|
||||||
# Add to demo team with specified role
|
|
||||||
existing_membership = db.query(TeamMembership).filter(
|
|
||||||
TeamMembership.team_id == demo_team.id,
|
|
||||||
TeamMembership.user_id == test_user.id,
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if not existing_membership:
|
|
||||||
membership = TeamMembership(
|
|
||||||
team_id=demo_team.id,
|
|
||||||
user_id=test_user.id,
|
|
||||||
role=user_data["role"],
|
|
||||||
invited_by=team_owner_username,
|
|
||||||
)
|
|
||||||
db.add(membership)
|
|
||||||
logger.info(f"Added {user_data['username']} to {demo_team.slug} as {user_data['role']}")
|
|
||||||
|
|
||||||
db.flush()
|
|
||||||
|
|
||||||
# Create projects and packages
|
# Create projects and packages
|
||||||
project_map = {}
|
project_map = {}
|
||||||
package_map = {}
|
package_map = {}
|
||||||
@@ -233,8 +147,7 @@ def seed_database(db: Session) -> None:
|
|||||||
name=project_data["name"],
|
name=project_data["name"],
|
||||||
description=project_data["description"],
|
description=project_data["description"],
|
||||||
is_public=project_data["is_public"],
|
is_public=project_data["is_public"],
|
||||||
created_by=team_owner_username,
|
created_by="seed-user",
|
||||||
team_id=demo_team.id, # Assign to demo team
|
|
||||||
)
|
)
|
||||||
db.add(project)
|
db.add(project)
|
||||||
db.flush() # Get the ID
|
db.flush() # Get the ID
|
||||||
@@ -250,7 +163,7 @@ def seed_database(db: Session) -> None:
|
|||||||
db.flush()
|
db.flush()
|
||||||
package_map[(project_data["name"], package_data["name"])] = package
|
package_map[(project_data["name"], package_data["name"])] = package
|
||||||
|
|
||||||
logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages (assigned to {demo_team.slug})")
|
logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages")
|
||||||
|
|
||||||
# Create artifacts, tags, and versions
|
# Create artifacts, tags, and versions
|
||||||
artifact_count = 0
|
artifact_count = 0
|
||||||
@@ -288,7 +201,7 @@ def seed_database(db: Session) -> None:
|
|||||||
size=size,
|
size=size,
|
||||||
content_type=artifact_data["content_type"],
|
content_type=artifact_data["content_type"],
|
||||||
original_name=artifact_data["filename"],
|
original_name=artifact_data["filename"],
|
||||||
created_by=team_owner_username,
|
created_by="seed-user",
|
||||||
s3_key=s3_key,
|
s3_key=s3_key,
|
||||||
ref_count=ref_count,
|
ref_count=ref_count,
|
||||||
)
|
)
|
||||||
@@ -311,7 +224,7 @@ def seed_database(db: Session) -> None:
|
|||||||
artifact_id=sha256_hash,
|
artifact_id=sha256_hash,
|
||||||
version=artifact_data["version"],
|
version=artifact_data["version"],
|
||||||
version_source="explicit",
|
version_source="explicit",
|
||||||
created_by=team_owner_username,
|
created_by="seed-user",
|
||||||
)
|
)
|
||||||
db.add(version)
|
db.add(version)
|
||||||
version_count += 1
|
version_count += 1
|
||||||
@@ -322,45 +235,11 @@ def seed_database(db: Session) -> None:
|
|||||||
package_id=package.id,
|
package_id=package.id,
|
||||||
name=tag_name,
|
name=tag_name,
|
||||||
artifact_id=sha256_hash,
|
artifact_id=sha256_hash,
|
||||||
created_by=team_owner_username,
|
created_by="seed-user",
|
||||||
)
|
)
|
||||||
db.add(tag)
|
db.add(tag)
|
||||||
tag_count += 1
|
tag_count += 1
|
||||||
|
|
||||||
db.flush()
|
|
||||||
|
|
||||||
# Create dependencies
|
|
||||||
dependency_count = 0
|
|
||||||
for dep_data in TEST_DEPENDENCIES:
|
|
||||||
src_project, src_package, src_version, dep_project, dep_package, version_constraint, tag_constraint = dep_data
|
|
||||||
|
|
||||||
# Find the source artifact by looking up its version
|
|
||||||
src_pkg = package_map.get((src_project, src_package))
|
|
||||||
if not src_pkg:
|
|
||||||
logger.warning(f"Source package not found: {src_project}/{src_package}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Find the artifact for this version
|
|
||||||
src_version_record = db.query(PackageVersion).filter(
|
|
||||||
PackageVersion.package_id == src_pkg.id,
|
|
||||||
PackageVersion.version == src_version,
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if not src_version_record:
|
|
||||||
logger.warning(f"Source version not found: {src_project}/{src_package}@{src_version}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Create the dependency
|
|
||||||
dependency = ArtifactDependency(
|
|
||||||
artifact_id=src_version_record.artifact_id,
|
|
||||||
dependency_project=dep_project,
|
|
||||||
dependency_package=dep_package,
|
|
||||||
version_constraint=version_constraint,
|
|
||||||
tag_constraint=tag_constraint,
|
|
||||||
)
|
|
||||||
db.add(dependency)
|
|
||||||
dependency_count += 1
|
|
||||||
|
|
||||||
db.commit()
|
db.commit()
|
||||||
logger.info(f"Created {artifact_count} artifacts, {tag_count} tags, {version_count} versions, and {dependency_count} dependencies")
|
logger.info(f"Created {artifact_count} artifacts, {tag_count} tags, and {version_count} versions")
|
||||||
logger.info("Database seeding complete")
|
logger.info("Database seeding complete")
|
||||||
|
|||||||
@@ -1,565 +0,0 @@
|
|||||||
"""
|
|
||||||
HTTP client for fetching artifacts from upstream sources.
|
|
||||||
|
|
||||||
Provides streaming downloads with SHA256 computation, authentication support,
|
|
||||||
and automatic source matching based on URL prefixes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import tempfile
|
|
||||||
import time
|
|
||||||
from dataclasses import dataclass, field
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import BinaryIO, Optional, TYPE_CHECKING
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from .models import CacheSettings, UpstreamSource
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class UpstreamError(Exception):
|
|
||||||
"""Base exception for upstream client errors."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class UpstreamConnectionError(UpstreamError):
|
|
||||||
"""Connection to upstream failed (network error, DNS, etc.)."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class UpstreamTimeoutError(UpstreamError):
|
|
||||||
"""Request to upstream timed out."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class UpstreamHTTPError(UpstreamError):
|
|
||||||
"""Upstream returned an HTTP error response."""
|
|
||||||
|
|
||||||
def __init__(self, message: str, status_code: int, response_headers: dict = None):
|
|
||||||
super().__init__(message)
|
|
||||||
self.status_code = status_code
|
|
||||||
self.response_headers = response_headers or {}
|
|
||||||
|
|
||||||
|
|
||||||
class UpstreamSSLError(UpstreamError):
|
|
||||||
"""SSL/TLS error when connecting to upstream."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class FileSizeExceededError(UpstreamError):
|
|
||||||
"""File size exceeds the maximum allowed."""
|
|
||||||
|
|
||||||
def __init__(self, message: str, content_length: int, max_size: int):
|
|
||||||
super().__init__(message)
|
|
||||||
self.content_length = content_length
|
|
||||||
self.max_size = max_size
|
|
||||||
|
|
||||||
|
|
||||||
class SourceNotFoundError(UpstreamError):
|
|
||||||
"""No matching upstream source found for URL."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SourceDisabledError(UpstreamError):
|
|
||||||
"""The matching upstream source is disabled."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class FetchResult:
|
|
||||||
"""Result of fetching an artifact from upstream."""
|
|
||||||
|
|
||||||
content: BinaryIO # File-like object with content
|
|
||||||
sha256: str # SHA256 hash of content
|
|
||||||
size: int # Size in bytes
|
|
||||||
content_type: Optional[str] # Content-Type header
|
|
||||||
response_headers: dict # All response headers for provenance
|
|
||||||
source_name: Optional[str] = None # Name of matched upstream source
|
|
||||||
temp_path: Optional[Path] = None # Path to temp file (for cleanup)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Close and clean up resources."""
|
|
||||||
if self.content:
|
|
||||||
try:
|
|
||||||
self.content.close()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
if self.temp_path and self.temp_path.exists():
|
|
||||||
try:
|
|
||||||
self.temp_path.unlink()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class UpstreamClientConfig:
|
|
||||||
"""Configuration for the upstream client."""
|
|
||||||
|
|
||||||
connect_timeout: float = 30.0 # Connection timeout in seconds
|
|
||||||
read_timeout: float = 300.0 # Read timeout in seconds (5 minutes for large files)
|
|
||||||
max_retries: int = 3 # Maximum number of retry attempts
|
|
||||||
retry_backoff_base: float = 1.0 # Base delay for exponential backoff
|
|
||||||
retry_backoff_max: float = 30.0 # Maximum delay between retries
|
|
||||||
follow_redirects: bool = True # Whether to follow redirects
|
|
||||||
max_redirects: int = 5 # Maximum number of redirects to follow
|
|
||||||
max_file_size: Optional[int] = None # Maximum file size (None = unlimited)
|
|
||||||
verify_ssl: bool = True # Verify SSL certificates
|
|
||||||
user_agent: str = "Orchard-UpstreamClient/1.0"
|
|
||||||
|
|
||||||
|
|
||||||
class UpstreamClient:
|
|
||||||
"""
|
|
||||||
HTTP client for fetching artifacts from upstream sources.
|
|
||||||
|
|
||||||
Supports streaming downloads, multiple authentication methods,
|
|
||||||
automatic source matching, and air-gap mode enforcement.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
sources: list[UpstreamSource] = None,
|
|
||||||
cache_settings: CacheSettings = None,
|
|
||||||
config: UpstreamClientConfig = None,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Initialize the upstream client.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
sources: List of upstream sources for URL matching and auth.
|
|
||||||
Should be sorted by priority (lowest first).
|
|
||||||
cache_settings: Global cache settings including air-gap mode.
|
|
||||||
config: Client configuration options.
|
|
||||||
"""
|
|
||||||
self.sources = sources or []
|
|
||||||
self.cache_settings = cache_settings
|
|
||||||
self.config = config or UpstreamClientConfig()
|
|
||||||
|
|
||||||
# Sort sources by priority (lower = higher priority)
|
|
||||||
self.sources = sorted(self.sources, key=lambda s: s.priority)
|
|
||||||
|
|
||||||
def _match_source(self, url: str) -> Optional[UpstreamSource]:
|
|
||||||
"""
|
|
||||||
Find the upstream source that matches the given URL.
|
|
||||||
|
|
||||||
Matches by URL prefix, returns the highest priority match.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url: The URL to match.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The matching UpstreamSource or None if no match.
|
|
||||||
"""
|
|
||||||
for source in self.sources:
|
|
||||||
# Check if URL starts with source URL (prefix match)
|
|
||||||
if url.startswith(source.url.rstrip("/")):
|
|
||||||
return source
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _build_auth_headers(self, source: UpstreamSource) -> dict:
|
|
||||||
"""
|
|
||||||
Build authentication headers for the given source.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
source: The upstream source with auth configuration.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dictionary of headers to add to the request.
|
|
||||||
"""
|
|
||||||
headers = {}
|
|
||||||
|
|
||||||
if source.auth_type == "none":
|
|
||||||
pass
|
|
||||||
elif source.auth_type == "basic":
|
|
||||||
# httpx handles basic auth via auth parameter, but we can also
|
|
||||||
# do it manually if needed. We'll use the auth parameter instead.
|
|
||||||
pass
|
|
||||||
elif source.auth_type == "bearer":
|
|
||||||
password = source.get_password()
|
|
||||||
if password:
|
|
||||||
headers["Authorization"] = f"Bearer {password}"
|
|
||||||
elif source.auth_type == "api_key":
|
|
||||||
# API key auth uses custom headers
|
|
||||||
custom_headers = source.get_headers()
|
|
||||||
if custom_headers:
|
|
||||||
headers.update(custom_headers)
|
|
||||||
|
|
||||||
return headers
|
|
||||||
|
|
||||||
def _get_basic_auth(self, source: UpstreamSource) -> Optional[tuple[str, str]]:
|
|
||||||
"""
|
|
||||||
Get basic auth credentials if applicable.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
source: The upstream source.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (username, password) or None.
|
|
||||||
"""
|
|
||||||
if source.auth_type == "basic" and source.username:
|
|
||||||
password = source.get_password() or ""
|
|
||||||
return (source.username, password)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _should_retry(self, error: Exception, attempt: int) -> bool:
|
|
||||||
"""
|
|
||||||
Determine if a request should be retried.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
error: The exception that occurred.
|
|
||||||
attempt: Current attempt number (0-indexed).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if the request should be retried.
|
|
||||||
"""
|
|
||||||
if attempt >= self.config.max_retries - 1:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Retry on connection errors and timeouts
|
|
||||||
if isinstance(error, (httpx.ConnectError, httpx.ConnectTimeout)):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Retry on read timeouts
|
|
||||||
if isinstance(error, httpx.ReadTimeout):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Retry on certain HTTP errors (502, 503, 504)
|
|
||||||
if isinstance(error, httpx.HTTPStatusError):
|
|
||||||
return error.response.status_code in (502, 503, 504)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _calculate_backoff(self, attempt: int) -> float:
|
|
||||||
"""
|
|
||||||
Calculate backoff delay for retry.
|
|
||||||
|
|
||||||
Uses exponential backoff with jitter.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
attempt: Current attempt number (0-indexed).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Delay in seconds.
|
|
||||||
"""
|
|
||||||
import random
|
|
||||||
|
|
||||||
delay = self.config.retry_backoff_base * (2**attempt)
|
|
||||||
# Add jitter (±25%)
|
|
||||||
delay *= 0.75 + random.random() * 0.5
|
|
||||||
return min(delay, self.config.retry_backoff_max)
|
|
||||||
|
|
||||||
def fetch(self, url: str, expected_hash: Optional[str] = None) -> FetchResult:
|
|
||||||
"""
|
|
||||||
Fetch an artifact from the given URL.
|
|
||||||
|
|
||||||
Streams the response to a temp file while computing the SHA256 hash.
|
|
||||||
Handles authentication, retries, and error cases.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url: The URL to fetch.
|
|
||||||
expected_hash: Optional expected SHA256 hash for verification.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
FetchResult with content, hash, size, and headers.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
SourceDisabledError: If the matching source is disabled.
|
|
||||||
UpstreamConnectionError: On connection failures.
|
|
||||||
UpstreamTimeoutError: On timeout.
|
|
||||||
UpstreamHTTPError: On HTTP error responses.
|
|
||||||
UpstreamSSLError: On SSL/TLS errors.
|
|
||||||
FileSizeExceededError: If Content-Length exceeds max_file_size.
|
|
||||||
"""
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
# Match URL to source
|
|
||||||
source = self._match_source(url)
|
|
||||||
|
|
||||||
# Check if source is enabled (if we have a match)
|
|
||||||
if source is not None and not source.enabled:
|
|
||||||
raise SourceDisabledError(
|
|
||||||
f"Upstream source '{source.name}' is disabled"
|
|
||||||
)
|
|
||||||
|
|
||||||
source_name = source.name if source else None
|
|
||||||
logger.info(
|
|
||||||
f"Fetching URL: {url} (source: {source_name or 'none'})"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build request parameters
|
|
||||||
headers = {"User-Agent": self.config.user_agent}
|
|
||||||
auth = None
|
|
||||||
|
|
||||||
if source:
|
|
||||||
headers.update(self._build_auth_headers(source))
|
|
||||||
auth = self._get_basic_auth(source)
|
|
||||||
|
|
||||||
timeout = httpx.Timeout(
|
|
||||||
connect=self.config.connect_timeout,
|
|
||||||
read=self.config.read_timeout,
|
|
||||||
write=30.0,
|
|
||||||
pool=10.0,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Attempt fetch with retries
|
|
||||||
last_error = None
|
|
||||||
for attempt in range(self.config.max_retries):
|
|
||||||
try:
|
|
||||||
return self._do_fetch(
|
|
||||||
url=url,
|
|
||||||
headers=headers,
|
|
||||||
auth=auth,
|
|
||||||
timeout=timeout,
|
|
||||||
source_name=source_name,
|
|
||||||
start_time=start_time,
|
|
||||||
expected_hash=expected_hash,
|
|
||||||
)
|
|
||||||
except (
|
|
||||||
httpx.ConnectError,
|
|
||||||
httpx.ConnectTimeout,
|
|
||||||
httpx.ReadTimeout,
|
|
||||||
httpx.HTTPStatusError,
|
|
||||||
) as e:
|
|
||||||
last_error = e
|
|
||||||
if self._should_retry(e, attempt):
|
|
||||||
delay = self._calculate_backoff(attempt)
|
|
||||||
logger.warning(
|
|
||||||
f"Fetch failed (attempt {attempt + 1}/{self.config.max_retries}), "
|
|
||||||
f"retrying in {delay:.1f}s: {e}"
|
|
||||||
)
|
|
||||||
time.sleep(delay)
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
# Convert final error to our exception types
|
|
||||||
self._raise_upstream_error(last_error, url)
|
|
||||||
|
|
||||||
def _do_fetch(
|
|
||||||
self,
|
|
||||||
url: str,
|
|
||||||
headers: dict,
|
|
||||||
auth: Optional[tuple[str, str]],
|
|
||||||
timeout: httpx.Timeout,
|
|
||||||
source_name: Optional[str],
|
|
||||||
start_time: float,
|
|
||||||
expected_hash: Optional[str] = None,
|
|
||||||
) -> FetchResult:
|
|
||||||
"""
|
|
||||||
Perform the actual fetch operation.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url: URL to fetch.
|
|
||||||
headers: Request headers.
|
|
||||||
auth: Basic auth credentials or None.
|
|
||||||
timeout: Request timeout configuration.
|
|
||||||
source_name: Name of matched source for logging.
|
|
||||||
start_time: Request start time for timing.
|
|
||||||
expected_hash: Optional expected hash for verification.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
FetchResult with content and metadata.
|
|
||||||
"""
|
|
||||||
with httpx.Client(
|
|
||||||
timeout=timeout,
|
|
||||||
follow_redirects=self.config.follow_redirects,
|
|
||||||
max_redirects=self.config.max_redirects,
|
|
||||||
verify=self.config.verify_ssl,
|
|
||||||
) as client:
|
|
||||||
with client.stream("GET", url, headers=headers, auth=auth) as response:
|
|
||||||
# Check for HTTP errors
|
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
# Check Content-Length against max size
|
|
||||||
content_length = response.headers.get("content-length")
|
|
||||||
if content_length:
|
|
||||||
content_length = int(content_length)
|
|
||||||
if (
|
|
||||||
self.config.max_file_size
|
|
||||||
and content_length > self.config.max_file_size
|
|
||||||
):
|
|
||||||
raise FileSizeExceededError(
|
|
||||||
f"File size {content_length} exceeds maximum {self.config.max_file_size}",
|
|
||||||
content_length,
|
|
||||||
self.config.max_file_size,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Stream to temp file while computing hash
|
|
||||||
hasher = hashlib.sha256()
|
|
||||||
size = 0
|
|
||||||
|
|
||||||
# Create temp file
|
|
||||||
temp_file = tempfile.NamedTemporaryFile(
|
|
||||||
delete=False, prefix="orchard_upstream_"
|
|
||||||
)
|
|
||||||
temp_path = Path(temp_file.name)
|
|
||||||
|
|
||||||
try:
|
|
||||||
for chunk in response.iter_bytes(chunk_size=65536):
|
|
||||||
temp_file.write(chunk)
|
|
||||||
hasher.update(chunk)
|
|
||||||
size += len(chunk)
|
|
||||||
|
|
||||||
# Check size while streaming if max_file_size is set
|
|
||||||
if self.config.max_file_size and size > self.config.max_file_size:
|
|
||||||
temp_file.close()
|
|
||||||
temp_path.unlink()
|
|
||||||
raise FileSizeExceededError(
|
|
||||||
f"Downloaded size {size} exceeds maximum {self.config.max_file_size}",
|
|
||||||
size,
|
|
||||||
self.config.max_file_size,
|
|
||||||
)
|
|
||||||
|
|
||||||
temp_file.close()
|
|
||||||
|
|
||||||
sha256 = hasher.hexdigest()
|
|
||||||
|
|
||||||
# Verify hash if expected
|
|
||||||
if expected_hash and sha256 != expected_hash.lower():
|
|
||||||
temp_path.unlink()
|
|
||||||
raise UpstreamError(
|
|
||||||
f"Hash mismatch: expected {expected_hash}, got {sha256}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Capture response headers
|
|
||||||
response_headers = dict(response.headers)
|
|
||||||
|
|
||||||
# Get content type
|
|
||||||
content_type = response.headers.get("content-type")
|
|
||||||
|
|
||||||
elapsed = time.time() - start_time
|
|
||||||
logger.info(
|
|
||||||
f"Fetched {url}: {size} bytes, sha256={sha256[:12]}..., "
|
|
||||||
f"source={source_name}, time={elapsed:.2f}s"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Return file handle positioned at start
|
|
||||||
content = open(temp_path, "rb")
|
|
||||||
|
|
||||||
return FetchResult(
|
|
||||||
content=content,
|
|
||||||
sha256=sha256,
|
|
||||||
size=size,
|
|
||||||
content_type=content_type,
|
|
||||||
response_headers=response_headers,
|
|
||||||
source_name=source_name,
|
|
||||||
temp_path=temp_path,
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
# Clean up on error
|
|
||||||
try:
|
|
||||||
temp_file.close()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
if temp_path.exists():
|
|
||||||
temp_path.unlink()
|
|
||||||
raise
|
|
||||||
|
|
||||||
def _raise_upstream_error(self, error: Exception, url: str):
|
|
||||||
"""
|
|
||||||
Convert httpx exception to appropriate UpstreamError.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
error: The httpx exception.
|
|
||||||
url: The URL that was being fetched.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
Appropriate UpstreamError subclass.
|
|
||||||
"""
|
|
||||||
if error is None:
|
|
||||||
raise UpstreamError(f"Unknown error fetching {url}")
|
|
||||||
|
|
||||||
if isinstance(error, httpx.ConnectError):
|
|
||||||
raise UpstreamConnectionError(
|
|
||||||
f"Failed to connect to upstream: {error}"
|
|
||||||
) from error
|
|
||||||
|
|
||||||
if isinstance(error, (httpx.ConnectTimeout, httpx.ReadTimeout)):
|
|
||||||
raise UpstreamTimeoutError(
|
|
||||||
f"Request timed out: {error}"
|
|
||||||
) from error
|
|
||||||
|
|
||||||
if isinstance(error, httpx.HTTPStatusError):
|
|
||||||
raise UpstreamHTTPError(
|
|
||||||
f"HTTP {error.response.status_code}: {error}",
|
|
||||||
error.response.status_code,
|
|
||||||
dict(error.response.headers),
|
|
||||||
) from error
|
|
||||||
|
|
||||||
# Check for SSL errors in the error chain
|
|
||||||
if "ssl" in str(error).lower() or "certificate" in str(error).lower():
|
|
||||||
raise UpstreamSSLError(f"SSL/TLS error: {error}") from error
|
|
||||||
|
|
||||||
raise UpstreamError(f"Error fetching {url}: {error}") from error
|
|
||||||
|
|
||||||
def test_connection(self, source: UpstreamSource) -> tuple[bool, Optional[str], Optional[int]]:
|
|
||||||
"""
|
|
||||||
Test connectivity to an upstream source.
|
|
||||||
|
|
||||||
Performs a HEAD request to the source URL to verify connectivity
|
|
||||||
and authentication. Does not follow redirects - a 3xx response
|
|
||||||
is considered successful since it proves the server is reachable.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
source: The upstream source to test.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (success, error_message, status_code).
|
|
||||||
"""
|
|
||||||
headers = {"User-Agent": self.config.user_agent}
|
|
||||||
headers.update(self._build_auth_headers(source))
|
|
||||||
auth = self._get_basic_auth(source)
|
|
||||||
|
|
||||||
timeout = httpx.Timeout(
|
|
||||||
connect=self.config.connect_timeout,
|
|
||||||
read=30.0,
|
|
||||||
write=30.0,
|
|
||||||
pool=10.0,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with httpx.Client(
|
|
||||||
timeout=timeout,
|
|
||||||
verify=self.config.verify_ssl,
|
|
||||||
) as client:
|
|
||||||
response = client.head(
|
|
||||||
source.url,
|
|
||||||
headers=headers,
|
|
||||||
auth=auth,
|
|
||||||
follow_redirects=False,
|
|
||||||
)
|
|
||||||
# Consider 2xx and 3xx as success, also 405 (Method Not Allowed)
|
|
||||||
# since some servers don't support HEAD
|
|
||||||
if response.status_code < 400 or response.status_code == 405:
|
|
||||||
return (True, None, response.status_code)
|
|
||||||
else:
|
|
||||||
return (
|
|
||||||
False,
|
|
||||||
f"HTTP {response.status_code}",
|
|
||||||
response.status_code,
|
|
||||||
)
|
|
||||||
except httpx.ConnectError as e:
|
|
||||||
return (False, f"Connection failed: {e}", None)
|
|
||||||
except httpx.ConnectTimeout as e:
|
|
||||||
return (False, f"Connection timed out: {e}", None)
|
|
||||||
except httpx.ReadTimeout as e:
|
|
||||||
return (False, f"Read timed out: {e}", None)
|
|
||||||
except httpx.TooManyRedirects as e:
|
|
||||||
return (False, f"Too many redirects: {e}", None)
|
|
||||||
except Exception as e:
|
|
||||||
return (False, f"Error: {e}", None)
|
|
||||||
@@ -11,10 +11,10 @@ python-jose[cryptography]==3.3.0
|
|||||||
passlib[bcrypt]==1.7.4
|
passlib[bcrypt]==1.7.4
|
||||||
bcrypt==4.0.1
|
bcrypt==4.0.1
|
||||||
slowapi==0.1.9
|
slowapi==0.1.9
|
||||||
httpx>=0.25.0
|
|
||||||
|
|
||||||
# Test dependencies
|
# Test dependencies
|
||||||
pytest>=7.4.0
|
pytest>=7.4.0
|
||||||
pytest-asyncio>=0.21.0
|
pytest-asyncio>=0.21.0
|
||||||
pytest-cov>=4.1.0
|
pytest-cov>=4.1.0
|
||||||
|
httpx>=0.25.0
|
||||||
moto[s3]>=4.2.0
|
moto[s3]>=4.2.0
|
||||||
|
|||||||
@@ -56,26 +56,6 @@ os.environ.setdefault("ORCHARD_S3_BUCKET", "test-bucket")
|
|||||||
os.environ.setdefault("ORCHARD_S3_ACCESS_KEY_ID", "test")
|
os.environ.setdefault("ORCHARD_S3_ACCESS_KEY_ID", "test")
|
||||||
os.environ.setdefault("ORCHARD_S3_SECRET_ACCESS_KEY", "test")
|
os.environ.setdefault("ORCHARD_S3_SECRET_ACCESS_KEY", "test")
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Admin Credentials Helper
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
def get_admin_password() -> str:
|
|
||||||
"""Get the admin password for test authentication.
|
|
||||||
|
|
||||||
Returns the password from ORCHARD_TEST_PASSWORD environment variable,
|
|
||||||
or 'changeme123' as the default for local development.
|
|
||||||
"""
|
|
||||||
return os.environ.get("ORCHARD_TEST_PASSWORD", "changeme123")
|
|
||||||
|
|
||||||
|
|
||||||
def get_admin_username() -> str:
|
|
||||||
"""Get the admin username for test authentication."""
|
|
||||||
return os.environ.get("ORCHARD_TEST_USERNAME", "admin")
|
|
||||||
|
|
||||||
|
|
||||||
# Re-export factory functions for backward compatibility
|
# Re-export factory functions for backward compatibility
|
||||||
from tests.factories import (
|
from tests.factories import (
|
||||||
create_test_file,
|
create_test_file,
|
||||||
|
|||||||
@@ -8,8 +8,6 @@ allow these tests to run. Production uses strict rate limits (5/minute).
|
|||||||
import pytest
|
import pytest
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from tests.conftest import get_admin_password, get_admin_username
|
|
||||||
|
|
||||||
|
|
||||||
# Mark all tests in this module as auth_intensive (informational, not excluded from CI)
|
# Mark all tests in this module as auth_intensive (informational, not excluded from CI)
|
||||||
pytestmark = pytest.mark.auth_intensive
|
pytestmark = pytest.mark.auth_intensive
|
||||||
@@ -23,11 +21,11 @@ class TestAuthLogin:
|
|||||||
"""Test successful login with default admin credentials."""
|
"""Test successful login with default admin credentials."""
|
||||||
response = auth_client.post(
|
response = auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
assert data["username"] == get_admin_username()
|
assert data["username"] == "admin"
|
||||||
assert data["is_admin"] is True
|
assert data["is_admin"] is True
|
||||||
assert "orchard_session" in response.cookies
|
assert "orchard_session" in response.cookies
|
||||||
|
|
||||||
@@ -36,7 +34,7 @@ class TestAuthLogin:
|
|||||||
"""Test login with wrong password."""
|
"""Test login with wrong password."""
|
||||||
response = auth_client.post(
|
response = auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": "wrongpassword"},
|
json={"username": "admin", "password": "wrongpassword"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 401
|
assert response.status_code == 401
|
||||||
assert "Invalid username or password" in response.json()["detail"]
|
assert "Invalid username or password" in response.json()["detail"]
|
||||||
@@ -60,7 +58,7 @@ class TestAuthLogout:
|
|||||||
# First login
|
# First login
|
||||||
login_response = auth_client.post(
|
login_response = auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
assert login_response.status_code == 200
|
assert login_response.status_code == 200
|
||||||
|
|
||||||
@@ -86,13 +84,13 @@ class TestAuthMe:
|
|||||||
# Login first
|
# Login first
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
|
|
||||||
response = auth_client.get("/api/v1/auth/me")
|
response = auth_client.get("/api/v1/auth/me")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
assert data["username"] == get_admin_username()
|
assert data["username"] == "admin"
|
||||||
assert data["is_admin"] is True
|
assert data["is_admin"] is True
|
||||||
assert "id" in data
|
assert "id" in data
|
||||||
assert "created_at" in data
|
assert "created_at" in data
|
||||||
@@ -121,7 +119,7 @@ class TestAuthChangePassword:
|
|||||||
# Login as admin to create a test user
|
# Login as admin to create a test user
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
test_username = f"pwchange_{uuid4().hex[:8]}"
|
test_username = f"pwchange_{uuid4().hex[:8]}"
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
@@ -164,7 +162,7 @@ class TestAuthChangePassword:
|
|||||||
# Login as admin to create a test user
|
# Login as admin to create a test user
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
test_username = f"pwwrong_{uuid4().hex[:8]}"
|
test_username = f"pwwrong_{uuid4().hex[:8]}"
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
@@ -196,7 +194,7 @@ class TestAPIKeys:
|
|||||||
# Login first
|
# Login first
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create API key
|
# Create API key
|
||||||
@@ -228,7 +226,7 @@ class TestAPIKeys:
|
|||||||
# Login and create API key
|
# Login and create API key
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
create_response = auth_client.post(
|
create_response = auth_client.post(
|
||||||
"/api/v1/auth/keys",
|
"/api/v1/auth/keys",
|
||||||
@@ -244,12 +242,12 @@ class TestAPIKeys:
|
|||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["username"] == get_admin_username()
|
assert response.json()["username"] == "admin"
|
||||||
|
|
||||||
# Clean up
|
# Clean up
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
auth_client.delete(f"/api/v1/auth/keys/{key_id}")
|
auth_client.delete(f"/api/v1/auth/keys/{key_id}")
|
||||||
|
|
||||||
@@ -259,7 +257,7 @@ class TestAPIKeys:
|
|||||||
# Login and create API key
|
# Login and create API key
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
create_response = auth_client.post(
|
create_response = auth_client.post(
|
||||||
"/api/v1/auth/keys",
|
"/api/v1/auth/keys",
|
||||||
@@ -290,14 +288,14 @@ class TestAdminUserManagement:
|
|||||||
# Login as admin
|
# Login as admin
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
|
|
||||||
response = auth_client.get("/api/v1/admin/users")
|
response = auth_client.get("/api/v1/admin/users")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
users = response.json()
|
users = response.json()
|
||||||
assert len(users) >= 1
|
assert len(users) >= 1
|
||||||
assert any(u["username"] == get_admin_username() for u in users)
|
assert any(u["username"] == "admin" for u in users)
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_create_user(self, auth_client):
|
def test_create_user(self, auth_client):
|
||||||
@@ -305,7 +303,7 @@ class TestAdminUserManagement:
|
|||||||
# Login as admin
|
# Login as admin
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create new user
|
# Create new user
|
||||||
@@ -338,7 +336,7 @@ class TestAdminUserManagement:
|
|||||||
# Login as admin
|
# Login as admin
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a test user
|
# Create a test user
|
||||||
@@ -364,7 +362,7 @@ class TestAdminUserManagement:
|
|||||||
# Login as admin
|
# Login as admin
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a test user
|
# Create a test user
|
||||||
@@ -395,7 +393,7 @@ class TestAdminUserManagement:
|
|||||||
# Login as admin and create non-admin user
|
# Login as admin and create non-admin user
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
test_username = f"nonadmin_{uuid4().hex[:8]}"
|
test_username = f"nonadmin_{uuid4().hex[:8]}"
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
@@ -425,7 +423,7 @@ class TestSecurityEdgeCases:
|
|||||||
# Login as admin and create a user
|
# Login as admin and create a user
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
test_username = f"inactive_{uuid4().hex[:8]}"
|
test_username = f"inactive_{uuid4().hex[:8]}"
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
@@ -453,7 +451,7 @@ class TestSecurityEdgeCases:
|
|||||||
"""Test that short passwords are rejected when creating users."""
|
"""Test that short passwords are rejected when creating users."""
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
|
|
||||||
response = auth_client.post(
|
response = auth_client.post(
|
||||||
@@ -469,7 +467,7 @@ class TestSecurityEdgeCases:
|
|||||||
# Create test user
|
# Create test user
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
test_username = f"shortchange_{uuid4().hex[:8]}"
|
test_username = f"shortchange_{uuid4().hex[:8]}"
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
@@ -496,7 +494,7 @@ class TestSecurityEdgeCases:
|
|||||||
"""Test that short passwords are rejected when resetting password."""
|
"""Test that short passwords are rejected when resetting password."""
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a test user first
|
# Create a test user first
|
||||||
@@ -518,7 +516,7 @@ class TestSecurityEdgeCases:
|
|||||||
"""Test that duplicate usernames are rejected."""
|
"""Test that duplicate usernames are rejected."""
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
|
|
||||||
test_username = f"duplicate_{uuid4().hex[:8]}"
|
test_username = f"duplicate_{uuid4().hex[:8]}"
|
||||||
@@ -543,7 +541,7 @@ class TestSecurityEdgeCases:
|
|||||||
# Login as admin and create an API key
|
# Login as admin and create an API key
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
create_response = auth_client.post(
|
create_response = auth_client.post(
|
||||||
"/api/v1/auth/keys",
|
"/api/v1/auth/keys",
|
||||||
@@ -574,7 +572,7 @@ class TestSecurityEdgeCases:
|
|||||||
auth_client.cookies.clear()
|
auth_client.cookies.clear()
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
auth_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
|
auth_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
|
||||||
|
|
||||||
@@ -584,7 +582,7 @@ class TestSecurityEdgeCases:
|
|||||||
# Create a test user
|
# Create a test user
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
"/api/v1/auth/login",
|
"/api/v1/auth/login",
|
||||||
json={"username": get_admin_username(), "password": get_admin_password()},
|
json={"username": "admin", "password": "changeme123"},
|
||||||
)
|
)
|
||||||
test_username = f"sessiontest_{uuid4().hex[:8]}"
|
test_username = f"sessiontest_{uuid4().hex[:8]}"
|
||||||
auth_client.post(
|
auth_client.post(
|
||||||
|
|||||||
@@ -1,93 +0,0 @@
|
|||||||
"""Integration tests for PyPI transparent proxy."""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import pytest
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
|
|
||||||
def get_base_url():
|
|
||||||
"""Get the base URL for the Orchard server from environment."""
|
|
||||||
return os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
|
||||||
|
|
||||||
|
|
||||||
class TestPyPIProxyEndpoints:
|
|
||||||
"""Tests for PyPI proxy endpoints.
|
|
||||||
|
|
||||||
These endpoints are public (no auth required) since pip needs to use them.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_pypi_simple_index_no_sources(self):
|
|
||||||
"""Test that /pypi/simple/ returns 503 when no sources configured."""
|
|
||||||
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
|
|
||||||
response = client.get("/pypi/simple/")
|
|
||||||
# Should return 503 when no PyPI upstream sources are configured
|
|
||||||
assert response.status_code == 503
|
|
||||||
assert "No PyPI upstream sources configured" in response.json()["detail"]
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_pypi_package_no_sources(self):
|
|
||||||
"""Test that /pypi/simple/{package}/ returns 503 when no sources configured."""
|
|
||||||
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
|
|
||||||
response = client.get("/pypi/simple/requests/")
|
|
||||||
assert response.status_code == 503
|
|
||||||
assert "No PyPI upstream sources configured" in response.json()["detail"]
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_pypi_download_missing_upstream_param(self):
|
|
||||||
"""Test that /pypi/simple/{package}/{filename} requires upstream param."""
|
|
||||||
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
|
|
||||||
response = client.get("/pypi/simple/requests/requests-2.31.0.tar.gz")
|
|
||||||
assert response.status_code == 400
|
|
||||||
assert "upstream" in response.json()["detail"].lower()
|
|
||||||
|
|
||||||
|
|
||||||
class TestPyPILinkRewriting:
|
|
||||||
"""Tests for URL rewriting in PyPI proxy responses."""
|
|
||||||
|
|
||||||
def test_rewrite_package_links(self):
|
|
||||||
"""Test that download links are rewritten to go through proxy."""
|
|
||||||
from app.pypi_proxy import _rewrite_package_links
|
|
||||||
|
|
||||||
html = '''
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<a href="https://files.pythonhosted.org/packages/ab/cd/requests-2.31.0.tar.gz#sha256=abc123">requests-2.31.0.tar.gz</a>
|
|
||||||
<a href="https://files.pythonhosted.org/packages/ef/gh/requests-2.31.0-py3-none-any.whl#sha256=def456">requests-2.31.0-py3-none-any.whl</a>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
'''
|
|
||||||
|
|
||||||
result = _rewrite_package_links(html, "http://localhost:8080", "requests")
|
|
||||||
|
|
||||||
# Links should be rewritten to go through our proxy
|
|
||||||
assert "/pypi/simple/requests/requests-2.31.0.tar.gz?upstream=" in result
|
|
||||||
assert "/pypi/simple/requests/requests-2.31.0-py3-none-any.whl?upstream=" in result
|
|
||||||
# Original URLs should be encoded in upstream param
|
|
||||||
assert "files.pythonhosted.org" in result
|
|
||||||
# Hash fragments should be preserved
|
|
||||||
assert "#sha256=abc123" in result
|
|
||||||
assert "#sha256=def456" in result
|
|
||||||
|
|
||||||
|
|
||||||
class TestPyPIPackageNormalization:
|
|
||||||
"""Tests for PyPI package name normalization."""
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_package_name_normalized(self):
|
|
||||||
"""Test that package names are normalized per PEP 503."""
|
|
||||||
# These should all be treated the same:
|
|
||||||
# requests, Requests, requests_, requests-
|
|
||||||
# The endpoint normalizes to lowercase with hyphens
|
|
||||||
|
|
||||||
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
|
|
||||||
# Without upstream sources, we get 503, but the normalization
|
|
||||||
# happens before the source lookup
|
|
||||||
response = client.get("/pypi/simple/Requests/")
|
|
||||||
assert response.status_code == 503 # No sources, but path was valid
|
|
||||||
|
|
||||||
response = client.get("/pypi/simple/some_package/")
|
|
||||||
assert response.status_code == 503
|
|
||||||
|
|
||||||
response = client.get("/pypi/simple/some-package/")
|
|
||||||
assert response.status_code == 503
|
|
||||||
@@ -1,316 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for Teams API endpoints.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestTeamsCRUD:
|
|
||||||
"""Tests for team creation, listing, updating, and deletion."""
|
|
||||||
|
|
||||||
def test_create_team(self, integration_client, unique_test_id):
|
|
||||||
"""Test creating a new team."""
|
|
||||||
team_name = f"Test Team {unique_test_id}"
|
|
||||||
team_slug = f"test-team-{unique_test_id}"
|
|
||||||
|
|
||||||
response = integration_client.post(
|
|
||||||
"/api/v1/teams",
|
|
||||||
json={
|
|
||||||
"name": team_name,
|
|
||||||
"slug": team_slug,
|
|
||||||
"description": "A test team",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert response.status_code == 201, f"Failed to create team: {response.text}"
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["name"] == team_name
|
|
||||||
assert data["slug"] == team_slug
|
|
||||||
assert data["description"] == "A test team"
|
|
||||||
assert data["user_role"] == "owner"
|
|
||||||
assert data["member_count"] == 1
|
|
||||||
assert data["project_count"] == 0
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
|
||||||
|
|
||||||
def test_create_team_duplicate_slug(self, integration_client, unique_test_id):
|
|
||||||
"""Test that duplicate team slugs are rejected."""
|
|
||||||
team_slug = f"dup-team-{unique_test_id}"
|
|
||||||
|
|
||||||
# Create first team
|
|
||||||
response = integration_client.post(
|
|
||||||
"/api/v1/teams",
|
|
||||||
json={"name": "First Team", "slug": team_slug},
|
|
||||||
)
|
|
||||||
assert response.status_code == 201
|
|
||||||
|
|
||||||
# Try to create second team with same slug
|
|
||||||
response = integration_client.post(
|
|
||||||
"/api/v1/teams",
|
|
||||||
json={"name": "Second Team", "slug": team_slug},
|
|
||||||
)
|
|
||||||
assert response.status_code == 400
|
|
||||||
assert "already exists" in response.json()["detail"].lower()
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
|
||||||
|
|
||||||
def test_create_team_invalid_slug(self, integration_client):
|
|
||||||
"""Test that invalid team slugs are rejected."""
|
|
||||||
invalid_slugs = [
|
|
||||||
"UPPERCASE",
|
|
||||||
"with spaces",
|
|
||||||
"-starts-with-hyphen",
|
|
||||||
"ends-with-hyphen-",
|
|
||||||
"has--double--hyphen",
|
|
||||||
]
|
|
||||||
|
|
||||||
for invalid_slug in invalid_slugs:
|
|
||||||
response = integration_client.post(
|
|
||||||
"/api/v1/teams",
|
|
||||||
json={"name": "Test", "slug": invalid_slug},
|
|
||||||
)
|
|
||||||
assert response.status_code == 422, f"Slug '{invalid_slug}' should be invalid"
|
|
||||||
|
|
||||||
def test_list_teams(self, integration_client, unique_test_id):
|
|
||||||
"""Test listing teams the user belongs to."""
|
|
||||||
# Create a team
|
|
||||||
team_slug = f"list-team-{unique_test_id}"
|
|
||||||
integration_client.post(
|
|
||||||
"/api/v1/teams",
|
|
||||||
json={"name": "List Test Team", "slug": team_slug},
|
|
||||||
)
|
|
||||||
|
|
||||||
# List teams
|
|
||||||
response = integration_client.get("/api/v1/teams")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert "items" in data
|
|
||||||
assert "pagination" in data
|
|
||||||
|
|
||||||
# Find our team
|
|
||||||
team = next((t for t in data["items"] if t["slug"] == team_slug), None)
|
|
||||||
assert team is not None
|
|
||||||
assert team["name"] == "List Test Team"
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
|
||||||
|
|
||||||
def test_get_team(self, integration_client, unique_test_id):
|
|
||||||
"""Test getting team details."""
|
|
||||||
team_slug = f"get-team-{unique_test_id}"
|
|
||||||
integration_client.post(
|
|
||||||
"/api/v1/teams",
|
|
||||||
json={"name": "Get Test Team", "slug": team_slug, "description": "Test"},
|
|
||||||
)
|
|
||||||
|
|
||||||
response = integration_client.get(f"/api/v1/teams/{team_slug}")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["slug"] == team_slug
|
|
||||||
assert data["name"] == "Get Test Team"
|
|
||||||
assert data["user_role"] == "owner"
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
|
||||||
|
|
||||||
def test_get_nonexistent_team(self, integration_client):
|
|
||||||
"""Test getting a team that doesn't exist."""
|
|
||||||
response = integration_client.get("/api/v1/teams/nonexistent-team-12345")
|
|
||||||
assert response.status_code == 404
|
|
||||||
|
|
||||||
def test_update_team(self, integration_client, unique_test_id):
|
|
||||||
"""Test updating team details."""
|
|
||||||
team_slug = f"update-team-{unique_test_id}"
|
|
||||||
integration_client.post(
|
|
||||||
"/api/v1/teams",
|
|
||||||
json={"name": "Original Name", "slug": team_slug},
|
|
||||||
)
|
|
||||||
|
|
||||||
response = integration_client.put(
|
|
||||||
f"/api/v1/teams/{team_slug}",
|
|
||||||
json={"name": "Updated Name", "description": "New description"},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["name"] == "Updated Name"
|
|
||||||
assert data["description"] == "New description"
|
|
||||||
assert data["slug"] == team_slug # Slug should not change
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
|
||||||
|
|
||||||
def test_delete_team(self, integration_client, unique_test_id):
|
|
||||||
"""Test deleting a team."""
|
|
||||||
team_slug = f"delete-team-{unique_test_id}"
|
|
||||||
integration_client.post(
|
|
||||||
"/api/v1/teams",
|
|
||||||
json={"name": "Delete Test Team", "slug": team_slug},
|
|
||||||
)
|
|
||||||
|
|
||||||
response = integration_client.delete(f"/api/v1/teams/{team_slug}")
|
|
||||||
assert response.status_code == 204
|
|
||||||
|
|
||||||
# Verify team is gone
|
|
||||||
response = integration_client.get(f"/api/v1/teams/{team_slug}")
|
|
||||||
assert response.status_code == 404
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestTeamMembers:
|
|
||||||
"""Tests for team membership management."""
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def test_team(self, integration_client, unique_test_id):
|
|
||||||
"""Create a test team for member tests."""
|
|
||||||
team_slug = f"member-team-{unique_test_id}"
|
|
||||||
response = integration_client.post(
|
|
||||||
"/api/v1/teams",
|
|
||||||
json={"name": "Member Test Team", "slug": team_slug},
|
|
||||||
)
|
|
||||||
assert response.status_code == 201
|
|
||||||
|
|
||||||
yield team_slug
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
try:
|
|
||||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_list_members(self, integration_client, test_team):
|
|
||||||
"""Test listing team members."""
|
|
||||||
response = integration_client.get(f"/api/v1/teams/{test_team}/members")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
members = response.json()
|
|
||||||
assert len(members) == 1
|
|
||||||
assert members[0]["role"] == "owner"
|
|
||||||
|
|
||||||
def test_owner_is_first_member(self, integration_client, test_team):
|
|
||||||
"""Test that the team creator is automatically the owner."""
|
|
||||||
response = integration_client.get(f"/api/v1/teams/{test_team}/members")
|
|
||||||
members = response.json()
|
|
||||||
|
|
||||||
assert len(members) >= 1
|
|
||||||
owner = next((m for m in members if m["role"] == "owner"), None)
|
|
||||||
assert owner is not None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestTeamProjects:
|
|
||||||
"""Tests for team project management."""
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def test_team(self, integration_client, unique_test_id):
|
|
||||||
"""Create a test team for project tests."""
|
|
||||||
team_slug = f"proj-team-{unique_test_id}"
|
|
||||||
response = integration_client.post(
|
|
||||||
"/api/v1/teams",
|
|
||||||
json={"name": "Project Test Team", "slug": team_slug},
|
|
||||||
)
|
|
||||||
assert response.status_code == 201
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
yield {"slug": team_slug, "id": data["id"]}
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
try:
|
|
||||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_list_team_projects_empty(self, integration_client, test_team):
|
|
||||||
"""Test listing projects in an empty team."""
|
|
||||||
response = integration_client.get(f"/api/v1/teams/{test_team['slug']}/projects")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["items"] == []
|
|
||||||
assert data["pagination"]["total"] == 0
|
|
||||||
|
|
||||||
def test_create_project_in_team(self, integration_client, test_team, unique_test_id):
|
|
||||||
"""Test creating a project within a team."""
|
|
||||||
project_name = f"team-project-{unique_test_id}"
|
|
||||||
|
|
||||||
response = integration_client.post(
|
|
||||||
"/api/v1/projects",
|
|
||||||
json={
|
|
||||||
"name": project_name,
|
|
||||||
"description": "A team project",
|
|
||||||
"team_id": test_team["id"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200, f"Failed to create project: {response.text}"
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["team_id"] == test_team["id"]
|
|
||||||
assert data["team_slug"] == test_team["slug"]
|
|
||||||
|
|
||||||
# Verify project appears in team projects list
|
|
||||||
response = integration_client.get(f"/api/v1/teams/{test_team['slug']}/projects")
|
|
||||||
assert response.status_code == 200
|
|
||||||
projects = response.json()["items"]
|
|
||||||
assert any(p["name"] == project_name for p in projects)
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
integration_client.delete(f"/api/v1/projects/{project_name}")
|
|
||||||
|
|
||||||
def test_project_team_info_in_response(self, integration_client, test_team, unique_test_id):
|
|
||||||
"""Test that project responses include team info."""
|
|
||||||
project_name = f"team-info-project-{unique_test_id}"
|
|
||||||
|
|
||||||
# Create project in team
|
|
||||||
integration_client.post(
|
|
||||||
"/api/v1/projects",
|
|
||||||
json={"name": project_name, "team_id": test_team["id"]},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get project and verify team info
|
|
||||||
response = integration_client.get(f"/api/v1/projects/{project_name}")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["team_id"] == test_team["id"]
|
|
||||||
assert data["team_slug"] == test_team["slug"]
|
|
||||||
assert data["team_name"] == "Project Test Team"
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
integration_client.delete(f"/api/v1/projects/{project_name}")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestTeamAuthorization:
|
|
||||||
"""Tests for team-based authorization."""
|
|
||||||
|
|
||||||
def test_cannot_delete_team_with_projects(self, integration_client, unique_test_id):
|
|
||||||
"""Test that teams with projects cannot be deleted."""
|
|
||||||
team_slug = f"nodelete-team-{unique_test_id}"
|
|
||||||
project_name = f"nodelete-project-{unique_test_id}"
|
|
||||||
|
|
||||||
# Create team
|
|
||||||
response = integration_client.post(
|
|
||||||
"/api/v1/teams",
|
|
||||||
json={"name": "No Delete Team", "slug": team_slug},
|
|
||||||
)
|
|
||||||
team_id = response.json()["id"]
|
|
||||||
|
|
||||||
# Create project in team
|
|
||||||
integration_client.post(
|
|
||||||
"/api/v1/projects",
|
|
||||||
json={"name": project_name, "team_id": team_id},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Try to delete team - should fail
|
|
||||||
response = integration_client.delete(f"/api/v1/teams/{team_slug}")
|
|
||||||
assert response.status_code == 400
|
|
||||||
assert "project" in response.json()["detail"].lower()
|
|
||||||
|
|
||||||
# Cleanup - delete project first, then team
|
|
||||||
integration_client.delete(f"/api/v1/projects/{project_name}")
|
|
||||||
integration_client.delete(f"/api/v1/teams/{team_slug}")
|
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,95 +0,0 @@
|
|||||||
"""Unit tests for authentication module."""
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from unittest.mock import patch, MagicMock
|
|
||||||
|
|
||||||
|
|
||||||
class TestCreateDefaultAdmin:
|
|
||||||
"""Tests for the create_default_admin function."""
|
|
||||||
|
|
||||||
def test_create_default_admin_with_env_password(self):
|
|
||||||
"""Test that ORCHARD_ADMIN_PASSWORD env var sets admin password."""
|
|
||||||
from app.auth import create_default_admin, verify_password
|
|
||||||
|
|
||||||
# Create mock settings with custom password
|
|
||||||
mock_settings = MagicMock()
|
|
||||||
mock_settings.admin_password = "my-custom-password-123"
|
|
||||||
|
|
||||||
# Mock database session
|
|
||||||
mock_db = MagicMock()
|
|
||||||
mock_db.query.return_value.count.return_value = 0 # No existing users
|
|
||||||
|
|
||||||
# Track the user that gets created
|
|
||||||
created_user = None
|
|
||||||
|
|
||||||
def capture_user(user):
|
|
||||||
nonlocal created_user
|
|
||||||
created_user = user
|
|
||||||
|
|
||||||
mock_db.add.side_effect = capture_user
|
|
||||||
|
|
||||||
with patch("app.auth.get_settings", return_value=mock_settings):
|
|
||||||
admin = create_default_admin(mock_db)
|
|
||||||
|
|
||||||
# Verify the user was created
|
|
||||||
assert mock_db.add.called
|
|
||||||
assert created_user is not None
|
|
||||||
assert created_user.username == "admin"
|
|
||||||
assert created_user.is_admin is True
|
|
||||||
# Password should NOT require change when set via env var
|
|
||||||
assert created_user.must_change_password is False
|
|
||||||
# Verify password was hashed correctly
|
|
||||||
assert verify_password("my-custom-password-123", created_user.password_hash)
|
|
||||||
|
|
||||||
def test_create_default_admin_with_default_password(self):
|
|
||||||
"""Test that default password 'changeme123' is used when env var not set."""
|
|
||||||
from app.auth import create_default_admin, verify_password
|
|
||||||
|
|
||||||
# Create mock settings with empty password (default)
|
|
||||||
mock_settings = MagicMock()
|
|
||||||
mock_settings.admin_password = ""
|
|
||||||
|
|
||||||
# Mock database session
|
|
||||||
mock_db = MagicMock()
|
|
||||||
mock_db.query.return_value.count.return_value = 0 # No existing users
|
|
||||||
|
|
||||||
# Track the user that gets created
|
|
||||||
created_user = None
|
|
||||||
|
|
||||||
def capture_user(user):
|
|
||||||
nonlocal created_user
|
|
||||||
created_user = user
|
|
||||||
|
|
||||||
mock_db.add.side_effect = capture_user
|
|
||||||
|
|
||||||
with patch("app.auth.get_settings", return_value=mock_settings):
|
|
||||||
admin = create_default_admin(mock_db)
|
|
||||||
|
|
||||||
# Verify the user was created
|
|
||||||
assert mock_db.add.called
|
|
||||||
assert created_user is not None
|
|
||||||
assert created_user.username == "admin"
|
|
||||||
assert created_user.is_admin is True
|
|
||||||
# Password SHOULD require change when using default
|
|
||||||
assert created_user.must_change_password is True
|
|
||||||
# Verify default password was used
|
|
||||||
assert verify_password("changeme123", created_user.password_hash)
|
|
||||||
|
|
||||||
def test_create_default_admin_skips_when_users_exist(self):
|
|
||||||
"""Test that no admin is created when users already exist."""
|
|
||||||
from app.auth import create_default_admin
|
|
||||||
|
|
||||||
# Create mock settings
|
|
||||||
mock_settings = MagicMock()
|
|
||||||
mock_settings.admin_password = "some-password"
|
|
||||||
|
|
||||||
# Mock database session with existing users
|
|
||||||
mock_db = MagicMock()
|
|
||||||
mock_db.query.return_value.count.return_value = 1 # Users exist
|
|
||||||
|
|
||||||
with patch("app.auth.get_settings", return_value=mock_settings):
|
|
||||||
result = create_default_admin(mock_db)
|
|
||||||
|
|
||||||
# Should return None and not create any user
|
|
||||||
assert result is None
|
|
||||||
assert not mock_db.add.called
|
|
||||||
@@ -1,213 +0,0 @@
|
|||||||
"""
|
|
||||||
Unit tests for TeamAuthorizationService.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from unittest.mock import MagicMock, patch
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
|
|
||||||
class TestTeamRoleHierarchy:
|
|
||||||
"""Tests for team role hierarchy functions."""
|
|
||||||
|
|
||||||
def test_get_team_role_rank(self):
|
|
||||||
"""Test role ranking."""
|
|
||||||
from app.auth import get_team_role_rank
|
|
||||||
|
|
||||||
assert get_team_role_rank("member") == 0
|
|
||||||
assert get_team_role_rank("admin") == 1
|
|
||||||
assert get_team_role_rank("owner") == 2
|
|
||||||
assert get_team_role_rank("invalid") == -1
|
|
||||||
|
|
||||||
def test_has_sufficient_team_role(self):
|
|
||||||
"""Test role sufficiency checks."""
|
|
||||||
from app.auth import has_sufficient_team_role
|
|
||||||
|
|
||||||
# Same role should be sufficient
|
|
||||||
assert has_sufficient_team_role("member", "member") is True
|
|
||||||
assert has_sufficient_team_role("admin", "admin") is True
|
|
||||||
assert has_sufficient_team_role("owner", "owner") is True
|
|
||||||
|
|
||||||
# Higher role should be sufficient for lower requirements
|
|
||||||
assert has_sufficient_team_role("admin", "member") is True
|
|
||||||
assert has_sufficient_team_role("owner", "member") is True
|
|
||||||
assert has_sufficient_team_role("owner", "admin") is True
|
|
||||||
|
|
||||||
# Lower role should NOT be sufficient for higher requirements
|
|
||||||
assert has_sufficient_team_role("member", "admin") is False
|
|
||||||
assert has_sufficient_team_role("member", "owner") is False
|
|
||||||
assert has_sufficient_team_role("admin", "owner") is False
|
|
||||||
|
|
||||||
|
|
||||||
class TestTeamAuthorizationService:
|
|
||||||
"""Tests for TeamAuthorizationService class."""
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_db(self):
|
|
||||||
"""Create a mock database session."""
|
|
||||||
return MagicMock()
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_user(self):
|
|
||||||
"""Create a mock user."""
|
|
||||||
user = MagicMock()
|
|
||||||
user.id = uuid.uuid4()
|
|
||||||
user.username = "testuser"
|
|
||||||
user.is_admin = False
|
|
||||||
return user
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_admin_user(self):
|
|
||||||
"""Create a mock admin user."""
|
|
||||||
user = MagicMock()
|
|
||||||
user.id = uuid.uuid4()
|
|
||||||
user.username = "adminuser"
|
|
||||||
user.is_admin = True
|
|
||||||
return user
|
|
||||||
|
|
||||||
def test_get_user_team_role_no_user(self, mock_db):
|
|
||||||
"""Test that None is returned for anonymous users."""
|
|
||||||
from app.auth import TeamAuthorizationService
|
|
||||||
|
|
||||||
service = TeamAuthorizationService(mock_db)
|
|
||||||
result = service.get_user_team_role("team-id", None)
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
def test_get_user_team_role_admin_user(self, mock_db, mock_admin_user):
|
|
||||||
"""Test that system admins who are not members get admin role."""
|
|
||||||
from app.auth import TeamAuthorizationService
|
|
||||||
|
|
||||||
# Mock no membership found
|
|
||||||
mock_db.query.return_value.filter.return_value.first.return_value = None
|
|
||||||
|
|
||||||
service = TeamAuthorizationService(mock_db)
|
|
||||||
result = service.get_user_team_role("team-id", mock_admin_user)
|
|
||||||
assert result == "admin"
|
|
||||||
|
|
||||||
def test_get_user_team_role_member(self, mock_db, mock_user):
|
|
||||||
"""Test getting role for a team member."""
|
|
||||||
from app.auth import TeamAuthorizationService
|
|
||||||
|
|
||||||
# Mock the membership query
|
|
||||||
mock_membership = MagicMock()
|
|
||||||
mock_membership.role = "member"
|
|
||||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
|
||||||
|
|
||||||
service = TeamAuthorizationService(mock_db)
|
|
||||||
result = service.get_user_team_role("team-id", mock_user)
|
|
||||||
assert result == "member"
|
|
||||||
|
|
||||||
def test_get_user_team_role_not_member(self, mock_db, mock_user):
|
|
||||||
"""Test getting role for a non-member."""
|
|
||||||
from app.auth import TeamAuthorizationService
|
|
||||||
|
|
||||||
# Mock no membership found
|
|
||||||
mock_db.query.return_value.filter.return_value.first.return_value = None
|
|
||||||
|
|
||||||
service = TeamAuthorizationService(mock_db)
|
|
||||||
result = service.get_user_team_role("team-id", mock_user)
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
def test_check_team_access_member(self, mock_db, mock_user):
|
|
||||||
"""Test access check for member requiring member role."""
|
|
||||||
from app.auth import TeamAuthorizationService
|
|
||||||
|
|
||||||
# Mock the membership query
|
|
||||||
mock_membership = MagicMock()
|
|
||||||
mock_membership.role = "member"
|
|
||||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
|
||||||
|
|
||||||
service = TeamAuthorizationService(mock_db)
|
|
||||||
|
|
||||||
# Member should have member access
|
|
||||||
assert service.check_team_access("team-id", mock_user, "member") is True
|
|
||||||
# Member should not have admin access
|
|
||||||
assert service.check_team_access("team-id", mock_user, "admin") is False
|
|
||||||
# Member should not have owner access
|
|
||||||
assert service.check_team_access("team-id", mock_user, "owner") is False
|
|
||||||
|
|
||||||
def test_check_team_access_admin(self, mock_db, mock_user):
|
|
||||||
"""Test access check for admin role."""
|
|
||||||
from app.auth import TeamAuthorizationService
|
|
||||||
|
|
||||||
# Mock admin membership
|
|
||||||
mock_membership = MagicMock()
|
|
||||||
mock_membership.role = "admin"
|
|
||||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
|
||||||
|
|
||||||
service = TeamAuthorizationService(mock_db)
|
|
||||||
|
|
||||||
assert service.check_team_access("team-id", mock_user, "member") is True
|
|
||||||
assert service.check_team_access("team-id", mock_user, "admin") is True
|
|
||||||
assert service.check_team_access("team-id", mock_user, "owner") is False
|
|
||||||
|
|
||||||
def test_check_team_access_owner(self, mock_db, mock_user):
|
|
||||||
"""Test access check for owner role."""
|
|
||||||
from app.auth import TeamAuthorizationService
|
|
||||||
|
|
||||||
# Mock owner membership
|
|
||||||
mock_membership = MagicMock()
|
|
||||||
mock_membership.role = "owner"
|
|
||||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
|
||||||
|
|
||||||
service = TeamAuthorizationService(mock_db)
|
|
||||||
|
|
||||||
assert service.check_team_access("team-id", mock_user, "member") is True
|
|
||||||
assert service.check_team_access("team-id", mock_user, "admin") is True
|
|
||||||
assert service.check_team_access("team-id", mock_user, "owner") is True
|
|
||||||
|
|
||||||
def test_can_create_project(self, mock_db, mock_user):
|
|
||||||
"""Test can_create_project requires admin role."""
|
|
||||||
from app.auth import TeamAuthorizationService
|
|
||||||
|
|
||||||
service = TeamAuthorizationService(mock_db)
|
|
||||||
|
|
||||||
# Member cannot create projects
|
|
||||||
mock_membership = MagicMock()
|
|
||||||
mock_membership.role = "member"
|
|
||||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
|
||||||
assert service.can_create_project("team-id", mock_user) is False
|
|
||||||
|
|
||||||
# Admin can create projects
|
|
||||||
mock_membership.role = "admin"
|
|
||||||
assert service.can_create_project("team-id", mock_user) is True
|
|
||||||
|
|
||||||
# Owner can create projects
|
|
||||||
mock_membership.role = "owner"
|
|
||||||
assert service.can_create_project("team-id", mock_user) is True
|
|
||||||
|
|
||||||
def test_can_manage_members(self, mock_db, mock_user):
|
|
||||||
"""Test can_manage_members requires admin role."""
|
|
||||||
from app.auth import TeamAuthorizationService
|
|
||||||
|
|
||||||
service = TeamAuthorizationService(mock_db)
|
|
||||||
|
|
||||||
# Member cannot manage members
|
|
||||||
mock_membership = MagicMock()
|
|
||||||
mock_membership.role = "member"
|
|
||||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
|
||||||
assert service.can_manage_members("team-id", mock_user) is False
|
|
||||||
|
|
||||||
# Admin can manage members
|
|
||||||
mock_membership.role = "admin"
|
|
||||||
assert service.can_manage_members("team-id", mock_user) is True
|
|
||||||
|
|
||||||
def test_can_delete_team(self, mock_db, mock_user):
|
|
||||||
"""Test can_delete_team requires owner role."""
|
|
||||||
from app.auth import TeamAuthorizationService
|
|
||||||
|
|
||||||
service = TeamAuthorizationService(mock_db)
|
|
||||||
|
|
||||||
# Member cannot delete team
|
|
||||||
mock_membership = MagicMock()
|
|
||||||
mock_membership.role = "member"
|
|
||||||
mock_db.query.return_value.filter.return_value.first.return_value = mock_membership
|
|
||||||
assert service.can_delete_team("team-id", mock_user) is False
|
|
||||||
|
|
||||||
# Admin cannot delete team
|
|
||||||
mock_membership.role = "admin"
|
|
||||||
assert service.can_delete_team("team-id", mock_user) is False
|
|
||||||
|
|
||||||
# Only owner can delete team
|
|
||||||
mock_membership.role = "owner"
|
|
||||||
assert service.can_delete_team("team-id", mock_user) is True
|
|
||||||
@@ -26,8 +26,6 @@ services:
|
|||||||
- ORCHARD_REDIS_PORT=6379
|
- ORCHARD_REDIS_PORT=6379
|
||||||
# Higher rate limit for local development/testing
|
# Higher rate limit for local development/testing
|
||||||
- ORCHARD_LOGIN_RATE_LIMIT=1000/minute
|
- ORCHARD_LOGIN_RATE_LIMIT=1000/minute
|
||||||
# Admin password - set in .env file or environment (see .env.example)
|
|
||||||
- ORCHARD_ADMIN_PASSWORD=${ORCHARD_ADMIN_PASSWORD:-}
|
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|||||||
@@ -1,672 +0,0 @@
|
|||||||
# Epic: Upstream Artifact Caching for Hermetic Builds
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
Orchard will act as a permanent, content-addressable cache for upstream artifacts (npm, PyPI, Maven, Docker, etc.). Once an artifact is cached, it is stored forever by SHA256 hash - enabling reproducible builds years later regardless of whether the upstream source still exists.
|
|
||||||
|
|
||||||
## Problem Statement
|
|
||||||
|
|
||||||
Build reproducibility is critical for enterprise environments:
|
|
||||||
- Packages get deleted, yanked, or modified upstream
|
|
||||||
- Registries go down or change URLs
|
|
||||||
- Version constraints resolve differently over time
|
|
||||||
- Air-gapped environments cannot access public internet
|
|
||||||
|
|
||||||
Teams need to guarantee that a build from 5 years ago produces the exact same output today.
|
|
||||||
|
|
||||||
## Solution
|
|
||||||
|
|
||||||
Orchard becomes "the cache that never forgets":
|
|
||||||
|
|
||||||
1. **Fetch once, store forever** - When a build needs `lodash@4.17.21`, Orchard fetches it from npm, stores it by SHA256 hash, and never deletes it
|
|
||||||
2. **Content-addressable** - Same hash = same bytes, guaranteed
|
|
||||||
3. **Format-agnostic** - Orchard doesn't need to understand npm/PyPI/Maven protocols; the client provides the URL, Orchard fetches and stores
|
|
||||||
4. **Air-gap support** - Disable public internet entirely, only allow configured private upstreams
|
|
||||||
|
|
||||||
## User Workflow
|
|
||||||
|
|
||||||
```
|
|
||||||
1. Build tool resolves dependencies npm install / pip install / mvn resolve
|
|
||||||
↓
|
|
||||||
2. Generate lockfile with URLs package-lock.json / requirements.txt
|
|
||||||
↓
|
|
||||||
3. Cache all URLs in Orchard orchard cache --file urls.txt
|
|
||||||
↓
|
|
||||||
4. Pin by SHA256 hash lodash = "sha256:abc123..."
|
|
||||||
↓
|
|
||||||
5. Future builds fetch by hash Always get exact same bytes
|
|
||||||
```
|
|
||||||
|
|
||||||
## Key Features
|
|
||||||
|
|
||||||
- **Multiple upstream sources** - Configure npm, PyPI, Maven Central, private Artifactory, etc.
|
|
||||||
- **Per-source authentication** - Basic auth, bearer tokens, API keys
|
|
||||||
- **System cache projects** - `_npm`, `_pypi`, `_maven` organize cached packages by format
|
|
||||||
- **Cross-referencing** - Link cached artifacts to user projects for visibility
|
|
||||||
- **URL tracking** - Know which URLs map to which hashes, audit provenance
|
|
||||||
- **Air-gap mode** - Global kill switch for all public internet access
|
|
||||||
- **Environment variable config** - 12-factor friendly for containerized deployments
|
|
||||||
|
|
||||||
## Architecture
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────────┐
|
|
||||||
│ Orchard Server │
|
|
||||||
├─────────────────────────────────────────────────────────────────┤
|
|
||||||
│ POST /api/v1/cache │
|
|
||||||
│ ├── Check if URL already cached (url_hash lookup) │
|
|
||||||
│ ├── Match URL to upstream source (get auth) │
|
|
||||||
│ ├── Fetch via UpstreamClient (stream + compute SHA256) │
|
|
||||||
│ ├── Store artifact in S3 (content-addressable) │
|
|
||||||
│ ├── Create tag in system project (_npm/lodash:4.17.21) │
|
|
||||||
│ ├── Optionally create tag in user project │
|
|
||||||
│ └── Record in cached_urls table (provenance) │
|
|
||||||
├─────────────────────────────────────────────────────────────────┤
|
|
||||||
│ Tables │
|
|
||||||
│ ├── upstream_sources (npm-public, pypi-public, artifactory) │
|
|
||||||
│ ├── cache_settings (allow_public_internet, etc.) │
|
|
||||||
│ ├── cached_urls (url → artifact_id mapping) │
|
|
||||||
│ └── projects.is_system (for _npm, _pypi, etc.) │
|
|
||||||
└─────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## Issues Summary
|
|
||||||
|
|
||||||
| Issue | Title | Status | Dependencies |
|
|
||||||
|-------|-------|--------|--------------|
|
|
||||||
| #68 | Schema: Upstream Sources & Cache Tracking | ✅ Complete | None |
|
|
||||||
| #69 | HTTP Client: Generic URL Fetcher | Pending | None |
|
|
||||||
| #70 | Cache API Endpoint | Pending | #68, #69 |
|
|
||||||
| #71 | System Projects (Cache Namespaces) | Pending | #68, #70 |
|
|
||||||
| #72 | Upstream Sources Admin API | Pending | #68 |
|
|
||||||
| #73 | Global Cache Settings API | Pending | #68 |
|
|
||||||
| #74 | Environment Variable Overrides | Pending | #68, #72, #73 |
|
|
||||||
| #75 | Frontend: Upstream Sources Management | Pending | #72, #73 |
|
|
||||||
| #105 | Frontend: System Projects Integration | Pending | #71 |
|
|
||||||
| #77 | CLI: Cache Command | Pending | #70 |
|
|
||||||
|
|
||||||
## Implementation Phases
|
|
||||||
|
|
||||||
**Phase 1 - Core (MVP):**
|
|
||||||
- #68 Schema ✅
|
|
||||||
- #69 HTTP Client
|
|
||||||
- #70 Cache API
|
|
||||||
- #71 System Projects
|
|
||||||
|
|
||||||
**Phase 2 - Admin:**
|
|
||||||
- #72 Upstream Sources API
|
|
||||||
- #73 Cache Settings API
|
|
||||||
- #74 Environment Variables
|
|
||||||
|
|
||||||
**Phase 3 - Frontend:**
|
|
||||||
- #75 Upstream Sources UI
|
|
||||||
- #105 System Projects UI
|
|
||||||
|
|
||||||
**Phase 4 - CLI:**
|
|
||||||
- #77 Cache Command
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Issue #68: Schema - Upstream Sources & Cache Tracking
|
|
||||||
|
|
||||||
**Status: ✅ Complete**
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
Create database schema for flexible multi-source upstream configuration and URL-to-artifact tracking. This replaces the previous singleton proxy_config design with a more flexible model supporting multiple upstream sources, air-gap mode, and provenance tracking.
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
- [x] `upstream_sources` table:
|
|
||||||
- id (UUID, primary key)
|
|
||||||
- name (VARCHAR(255), unique, e.g., "npm-public", "artifactory-private")
|
|
||||||
- source_type (VARCHAR(50), enum: npm, pypi, maven, docker, helm, nuget, deb, rpm, generic)
|
|
||||||
- url (VARCHAR(2048), base URL of upstream)
|
|
||||||
- enabled (BOOLEAN, default false)
|
|
||||||
- is_public (BOOLEAN, true if this is a public internet source)
|
|
||||||
- auth_type (VARCHAR(20), enum: none, basic, bearer, api_key)
|
|
||||||
- username (VARCHAR(255), nullable)
|
|
||||||
- password_encrypted (BYTEA, nullable, Fernet encrypted)
|
|
||||||
- headers_encrypted (BYTEA, nullable, for custom headers like API keys)
|
|
||||||
- priority (INTEGER, default 100, lower = checked first)
|
|
||||||
- created_at, updated_at timestamps
|
|
||||||
- [x] `cache_settings` table (singleton, id always 1):
|
|
||||||
- id (INTEGER, primary key, check id = 1)
|
|
||||||
- allow_public_internet (BOOLEAN, default true, air-gap kill switch)
|
|
||||||
- auto_create_system_projects (BOOLEAN, default true)
|
|
||||||
- created_at, updated_at timestamps
|
|
||||||
- [x] `cached_urls` table:
|
|
||||||
- id (UUID, primary key)
|
|
||||||
- url (VARCHAR(4096), original URL fetched)
|
|
||||||
- url_hash (VARCHAR(64), SHA256 of URL for fast lookup, indexed)
|
|
||||||
- artifact_id (VARCHAR(64), FK to artifacts)
|
|
||||||
- source_id (UUID, FK to upstream_sources, nullable for manual imports)
|
|
||||||
- fetched_at (TIMESTAMP WITH TIME ZONE)
|
|
||||||
- response_headers (JSONB, original upstream headers for provenance)
|
|
||||||
- created_at timestamp
|
|
||||||
- [x] Add `is_system` BOOLEAN column to projects table (default false)
|
|
||||||
- [x] Migration SQL file in migrations/
|
|
||||||
- [x] Runtime migration in database.py
|
|
||||||
- [x] SQLAlchemy models for all new tables
|
|
||||||
- [x] Pydantic schemas for API input/output (passwords write-only)
|
|
||||||
- [x] Encryption helpers for password/headers fields
|
|
||||||
- [x] Seed default upstream sources (disabled by default):
|
|
||||||
- npm-public: https://registry.npmjs.org
|
|
||||||
- pypi-public: https://pypi.org/simple
|
|
||||||
- maven-central: https://repo1.maven.org/maven2
|
|
||||||
- docker-hub: https://registry-1.docker.io
|
|
||||||
- [x] Unit tests for models and schemas
|
|
||||||
|
|
||||||
## Files Modified
|
|
||||||
|
|
||||||
- `migrations/010_upstream_caching.sql`
|
|
||||||
- `backend/app/database.py` (migrations 016-020)
|
|
||||||
- `backend/app/models.py` (UpstreamSource, CacheSettings, CachedUrl, Project.is_system)
|
|
||||||
- `backend/app/schemas.py` (all caching schemas)
|
|
||||||
- `backend/app/encryption.py` (renamed env var)
|
|
||||||
- `backend/app/config.py` (renamed setting)
|
|
||||||
- `backend/tests/test_upstream_caching.py` (37 tests)
|
|
||||||
- `frontend/src/components/Layout.tsx` (footer tagline)
|
|
||||||
- `CHANGELOG.md`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Issue #69: HTTP Client - Generic URL Fetcher
|
|
||||||
|
|
||||||
**Status: Pending**
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
Create a reusable HTTP client for fetching artifacts from upstream sources. Supports multiple auth methods, streaming for large files, and computes SHA256 while downloading.
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
- [ ] `UpstreamClient` class in `backend/app/upstream.py`
|
|
||||||
- [ ] `fetch(url)` method that:
|
|
||||||
- Streams response body (doesn't load large files into memory)
|
|
||||||
- Computes SHA256 hash while streaming
|
|
||||||
- Returns file content, hash, size, and response headers
|
|
||||||
- [ ] Auth support based on upstream source configuration:
|
|
||||||
- None (anonymous)
|
|
||||||
- Basic auth (username/password)
|
|
||||||
- Bearer token (Authorization: Bearer {token})
|
|
||||||
- API key (custom header name/value)
|
|
||||||
- [ ] URL-to-source matching:
|
|
||||||
- Match URL to configured upstream source by URL prefix
|
|
||||||
- Apply auth from matched source
|
|
||||||
- Respect source priority for multiple matches
|
|
||||||
- [ ] Configuration options:
|
|
||||||
- Timeout (connect and read, default 30s/300s)
|
|
||||||
- Max retries (default 3)
|
|
||||||
- Follow redirects (default true, max 5)
|
|
||||||
- Max file size (reject if Content-Length exceeds limit)
|
|
||||||
- [ ] Respect `allow_public_internet` setting:
|
|
||||||
- If false, reject URLs matching `is_public=true` sources
|
|
||||||
- If false, reject URLs not matching any configured source
|
|
||||||
- [ ] Capture response headers for provenance tracking
|
|
||||||
- [ ] Proper error handling:
|
|
||||||
- Connection errors (retry with backoff)
|
|
||||||
- HTTP errors (4xx, 5xx)
|
|
||||||
- Timeout errors
|
|
||||||
- SSL/TLS errors
|
|
||||||
- [ ] Logging for debugging (URL, source matched, status, timing)
|
|
||||||
- [ ] Unit tests with mocked HTTP responses
|
|
||||||
- [ ] Integration tests against httpbin.org or similar (optional, marked)
|
|
||||||
|
|
||||||
## Technical Notes
|
|
||||||
|
|
||||||
- Use `httpx` for async HTTP support (already in requirements)
|
|
||||||
- Stream to temp file to avoid memory issues with large artifacts
|
|
||||||
- Consider checksum verification if upstream provides it (e.g., npm provides shasum)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Issue #70: Cache API Endpoint
|
|
||||||
|
|
||||||
**Status: Pending**
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
API endpoint to cache an artifact from an upstream URL. This is the core endpoint that fetches from upstream, stores in Orchard, and creates appropriate tags.
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
- [ ] `POST /api/v1/cache` endpoint
|
|
||||||
- [ ] Request body:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"url": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
|
||||||
"source_type": "npm",
|
|
||||||
"package_name": "lodash",
|
|
||||||
"tag": "4.17.21",
|
|
||||||
"user_project": "my-app",
|
|
||||||
"user_package": "npm-deps",
|
|
||||||
"user_tag": "lodash-4.17.21",
|
|
||||||
"expected_hash": "sha256:abc123..."
|
|
||||||
}
|
|
||||||
```
|
|
||||||
- `url` (required): URL to fetch
|
|
||||||
- `source_type` (required): Determines system project (_npm, _pypi, etc.)
|
|
||||||
- `package_name` (optional): Package name in system project, derived from URL if not provided
|
|
||||||
- `tag` (optional): Tag name in system project, derived from URL if not provided
|
|
||||||
- `user_project`, `user_package`, `user_tag` (optional): Cross-reference in user's project
|
|
||||||
- `expected_hash` (optional): Verify downloaded content matches
|
|
||||||
- [ ] Response:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"artifact_id": "abc123...",
|
|
||||||
"sha256": "abc123...",
|
|
||||||
"size": 12345,
|
|
||||||
"content_type": "application/gzip",
|
|
||||||
"already_cached": false,
|
|
||||||
"source_url": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
|
||||||
"source_name": "npm-public",
|
|
||||||
"system_project": "_npm",
|
|
||||||
"system_package": "lodash",
|
|
||||||
"system_tag": "4.17.21",
|
|
||||||
"user_reference": "my-app/npm-deps:lodash-4.17.21"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
- [ ] Behavior:
|
|
||||||
- Check if URL already cached (by url_hash in cached_urls)
|
|
||||||
- If cached: return existing artifact, optionally create user tag
|
|
||||||
- If not cached: fetch via UpstreamClient, store artifact, create tags
|
|
||||||
- Create/get system project if needed (e.g., `_npm`)
|
|
||||||
- Create package in system project (e.g., `_npm/lodash`)
|
|
||||||
- Create tag in system project (e.g., `_npm/lodash:4.17.21`)
|
|
||||||
- If user reference provided, create tag in user's project
|
|
||||||
- Record in cached_urls table with provenance
|
|
||||||
- [ ] Error handling:
|
|
||||||
- 400: Invalid request (bad URL format, missing required fields)
|
|
||||||
- 403: Air-gap mode enabled and URL is from public source
|
|
||||||
- 404: Upstream returned 404
|
|
||||||
- 409: Hash mismatch (if expected_hash provided)
|
|
||||||
- 502: Upstream fetch failed (connection error, timeout)
|
|
||||||
- 503: Upstream source disabled
|
|
||||||
- [ ] Authentication required (any authenticated user can cache)
|
|
||||||
- [ ] Audit logging for cache operations
|
|
||||||
- [ ] Integration tests covering success and error cases
|
|
||||||
|
|
||||||
## Technical Notes
|
|
||||||
|
|
||||||
- URL parsing for package_name/tag derivation is format-specific:
|
|
||||||
- npm: `/{package}/-/{package}-{version}.tgz` → package=lodash, tag=4.17.21
|
|
||||||
- pypi: `/packages/.../requests-2.28.0.tar.gz` → package=requests, tag=2.28.0
|
|
||||||
- maven: `/{group}/{artifact}/{version}/{artifact}-{version}.jar`
|
|
||||||
- Deduplication: if same SHA256 already exists, just create new tag pointing to it
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Issue #71: System Projects (Cache Namespaces)
|
|
||||||
|
|
||||||
**Status: Pending**
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
Implement auto-created system projects for organizing cached artifacts by format type. These are special projects that provide a browsable namespace for all cached upstream packages.
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
- [ ] System project names: `_npm`, `_pypi`, `_maven`, `_docker`, `_helm`, `_nuget`, `_deb`, `_rpm`, `_generic`
|
|
||||||
- [ ] Auto-creation:
|
|
||||||
- Created automatically on first cache request for that format
|
|
||||||
- Created by cache endpoint, not at startup
|
|
||||||
- Uses system user as creator (`created_by = "system"`)
|
|
||||||
- [ ] System project properties:
|
|
||||||
- `is_system = true`
|
|
||||||
- `is_public = true` (readable by all authenticated users)
|
|
||||||
- `description` = "System cache for {format} packages"
|
|
||||||
- [ ] Restrictions:
|
|
||||||
- Cannot be deleted (return 403 with message)
|
|
||||||
- Cannot be renamed
|
|
||||||
- Cannot change `is_public` to false
|
|
||||||
- Only admins can modify description
|
|
||||||
- [ ] Helper function: `get_or_create_system_project(source_type)` in routes.py or new cache.py module
|
|
||||||
- [ ] Update project deletion endpoint to check `is_system` flag
|
|
||||||
- [ ] Update project update endpoint to enforce restrictions
|
|
||||||
- [ ] Query helper: list all system projects for UI dropdown
|
|
||||||
- [ ] Unit tests for restrictions
|
|
||||||
- [ ] Integration tests for auto-creation and restrictions
|
|
||||||
|
|
||||||
## Technical Notes
|
|
||||||
|
|
||||||
- System projects are identified by `is_system=true`, not just naming convention
|
|
||||||
- The `_` prefix is a convention for display purposes
|
|
||||||
- Packages within system projects follow upstream naming (e.g., `_npm/lodash`, `_npm/@types/node`)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Issue #72: Upstream Sources Admin API
|
|
||||||
|
|
||||||
**Status: Pending**
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
CRUD API endpoints for managing upstream sources configuration. Admin-only access.
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
- [ ] `GET /api/v1/admin/upstream-sources` - List all upstream sources
|
|
||||||
- Returns array of sources with id, name, source_type, url, enabled, is_public, auth_type, priority, has_credentials, created_at, updated_at
|
|
||||||
- Supports `?enabled=true/false` filter
|
|
||||||
- Supports `?source_type=npm,pypi` filter
|
|
||||||
- Passwords/tokens never returned
|
|
||||||
- [ ] `POST /api/v1/admin/upstream-sources` - Create upstream source
|
|
||||||
- Request: name, source_type, url, enabled, is_public, auth_type, username, password, headers, priority
|
|
||||||
- Validates unique name
|
|
||||||
- Validates URL format
|
|
||||||
- Encrypts password/headers before storage
|
|
||||||
- Returns created source (without secrets)
|
|
||||||
- [ ] `GET /api/v1/admin/upstream-sources/{id}` - Get source details
|
|
||||||
- Returns source with `has_credentials` boolean, not actual credentials
|
|
||||||
- [ ] `PUT /api/v1/admin/upstream-sources/{id}` - Update source
|
|
||||||
- Partial update supported
|
|
||||||
- If password provided, re-encrypt; if omitted, keep existing
|
|
||||||
- Special value `password: null` clears credentials
|
|
||||||
- [ ] `DELETE /api/v1/admin/upstream-sources/{id}` - Delete source
|
|
||||||
- Returns 400 if source has cached_urls referencing it (optional: cascade or reassign)
|
|
||||||
- [ ] `POST /api/v1/admin/upstream-sources/{id}/test` - Test connectivity
|
|
||||||
- Attempts HEAD request to source URL
|
|
||||||
- Returns success/failure with status code and timing
|
|
||||||
- Does not cache anything
|
|
||||||
- [ ] All endpoints require admin role
|
|
||||||
- [ ] Audit logging for all mutations
|
|
||||||
- [ ] Pydantic schemas: UpstreamSourceCreate, UpstreamSourceUpdate, UpstreamSourceResponse
|
|
||||||
- [ ] Integration tests for all endpoints
|
|
||||||
|
|
||||||
## Technical Notes
|
|
||||||
|
|
||||||
- Test endpoint should respect auth configuration to verify credentials work
|
|
||||||
- Consider adding `last_used_at` and `last_error` fields for observability (future enhancement)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Issue #73: Global Cache Settings API
|
|
||||||
|
|
||||||
**Status: Pending**
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
API endpoints for managing global cache settings including air-gap mode.
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
- [ ] `GET /api/v1/admin/cache-settings` - Get current settings
|
|
||||||
- Returns: allow_public_internet, auto_create_system_projects, created_at, updated_at
|
|
||||||
- [ ] `PUT /api/v1/admin/cache-settings` - Update settings
|
|
||||||
- Partial update supported
|
|
||||||
- Returns updated settings
|
|
||||||
- [ ] Settings fields:
|
|
||||||
- `allow_public_internet` (boolean): When false, blocks all requests to sources marked `is_public=true`
|
|
||||||
- `auto_create_system_projects` (boolean): When false, system projects must be created manually
|
|
||||||
- [ ] Admin-only access
|
|
||||||
- [ ] Audit logging for changes (especially air-gap mode changes)
|
|
||||||
- [ ] Pydantic schemas: CacheSettingsResponse, CacheSettingsUpdate
|
|
||||||
- [ ] Initialize singleton row on first access if not exists
|
|
||||||
- [ ] Integration tests
|
|
||||||
|
|
||||||
## Technical Notes
|
|
||||||
|
|
||||||
- Air-gap mode change should be logged prominently (security-relevant)
|
|
||||||
- Consider requiring confirmation header for disabling air-gap mode (similar to factory reset)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Issue #74: Environment Variable Overrides
|
|
||||||
|
|
||||||
**Status: Pending**
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
Allow cache and upstream configuration via environment variables for containerized deployments. Environment variables override database settings following 12-factor app principles.
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
- [ ] Global settings overrides:
|
|
||||||
- `ORCHARD_CACHE_ALLOW_PUBLIC_INTERNET=true/false`
|
|
||||||
- `ORCHARD_CACHE_AUTO_CREATE_SYSTEM_PROJECTS=true/false`
|
|
||||||
- `ORCHARD_CACHE_ENCRYPTION_KEY` (Fernet key for credential encryption)
|
|
||||||
- [ ] Upstream source definition via env vars:
|
|
||||||
- `ORCHARD_UPSTREAM__{NAME}__URL` (double underscore as separator)
|
|
||||||
- `ORCHARD_UPSTREAM__{NAME}__TYPE` (npm, pypi, maven, etc.)
|
|
||||||
- `ORCHARD_UPSTREAM__{NAME}__ENABLED` (true/false)
|
|
||||||
- `ORCHARD_UPSTREAM__{NAME}__IS_PUBLIC` (true/false)
|
|
||||||
- `ORCHARD_UPSTREAM__{NAME}__AUTH_TYPE` (none, basic, bearer, api_key)
|
|
||||||
- `ORCHARD_UPSTREAM__{NAME}__USERNAME`
|
|
||||||
- `ORCHARD_UPSTREAM__{NAME}__PASSWORD`
|
|
||||||
- `ORCHARD_UPSTREAM__{NAME}__PRIORITY`
|
|
||||||
- Example: `ORCHARD_UPSTREAM__NPM_PRIVATE__URL=https://npm.corp.com`
|
|
||||||
- [ ] Env var sources:
|
|
||||||
- Loaded at startup
|
|
||||||
- Merged with database sources
|
|
||||||
- Env var sources have `source = "env"` marker
|
|
||||||
- Cannot be modified via API (return 400)
|
|
||||||
- Cannot be deleted via API (return 400)
|
|
||||||
- [ ] Update Settings class in config.py
|
|
||||||
- [ ] Update get/list endpoints to include env-defined sources
|
|
||||||
- [ ] Document all env vars in CLAUDE.md
|
|
||||||
- [ ] Unit tests for env var parsing
|
|
||||||
- [ ] Integration tests with env vars set
|
|
||||||
|
|
||||||
## Technical Notes
|
|
||||||
|
|
||||||
- Double underscore (`__`) separator allows source names with single underscores
|
|
||||||
- Env-defined sources should appear in API responses but marked as read-only
|
|
||||||
- Consider startup validation that warns about invalid env var combinations
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Issue #75: Frontend - Upstream Sources Management
|
|
||||||
|
|
||||||
**Status: Pending**
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
Admin UI for managing upstream sources and cache settings.
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
- [ ] New admin page: `/admin/cache` or `/admin/upstream-sources`
|
|
||||||
- [ ] Upstream sources section:
|
|
||||||
- Table listing all sources with: name, type, URL, enabled toggle, public badge, priority, actions
|
|
||||||
- Visual distinction for env-defined sources (locked icon, no edit/delete)
|
|
||||||
- Create button opens modal/form
|
|
||||||
- Edit button for DB-defined sources
|
|
||||||
- Delete with confirmation modal
|
|
||||||
- Test connection button with status indicator
|
|
||||||
- [ ] Create/edit form fields:
|
|
||||||
- Name (text, required)
|
|
||||||
- Source type (dropdown)
|
|
||||||
- URL (text, required)
|
|
||||||
- Priority (number)
|
|
||||||
- Is public (checkbox)
|
|
||||||
- Enabled (checkbox)
|
|
||||||
- Auth type (dropdown: none, basic, bearer, api_key)
|
|
||||||
- Conditional auth fields based on type:
|
|
||||||
- Basic: username, password
|
|
||||||
- Bearer: token
|
|
||||||
- API key: header name, header value
|
|
||||||
- Password fields masked, "unchanged" placeholder on edit
|
|
||||||
- [ ] Cache settings section:
|
|
||||||
- Air-gap mode toggle with warning
|
|
||||||
- Auto-create system projects toggle
|
|
||||||
- "Air-gap mode" shows prominent warning banner when enabled
|
|
||||||
- [ ] Link from main admin navigation
|
|
||||||
- [ ] Loading and error states
|
|
||||||
- [ ] Success/error toast notifications
|
|
||||||
|
|
||||||
## Technical Notes
|
|
||||||
|
|
||||||
- Use existing admin page patterns from user management
|
|
||||||
- Air-gap toggle should require confirmation (modal with warning text)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Issue #105: Frontend - System Projects Integration
|
|
||||||
|
|
||||||
**Status: Pending**
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
Integrate system projects into the frontend UI with appropriate visual treatment and navigation.
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
- [ ] Home page project dropdown:
|
|
||||||
- System projects shown in separate "Cached Packages" section
|
|
||||||
- Visual distinction (icon, different background, or badge)
|
|
||||||
- Format icon for each type (npm, pypi, maven, etc.)
|
|
||||||
- [ ] Project list/grid:
|
|
||||||
- System projects can be filtered: "Show system projects" toggle
|
|
||||||
- Or separate tab: "Projects" | "Package Cache"
|
|
||||||
- [ ] System project page:
|
|
||||||
- "System Cache" badge in header
|
|
||||||
- Description explains this is auto-managed cache
|
|
||||||
- Settings/delete buttons hidden or disabled
|
|
||||||
- Shows format type prominently
|
|
||||||
- [ ] Package page within system project:
|
|
||||||
- Shows "Cached from" with source URL (linked)
|
|
||||||
- Shows "First cached" timestamp
|
|
||||||
- Shows which upstream source provided it
|
|
||||||
- [ ] Artifact page:
|
|
||||||
- If artifact came from cache, show provenance:
|
|
||||||
- Original URL
|
|
||||||
- Upstream source name
|
|
||||||
- Fetch timestamp
|
|
||||||
- [ ] Search includes system projects (with filter option)
|
|
||||||
|
|
||||||
## Technical Notes
|
|
||||||
|
|
||||||
- Use React context or query params for system project filtering
|
|
||||||
- Consider dedicated route: `/cache/npm/lodash` as alias for `/_npm/lodash`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Issue #77: CLI - Cache Command
|
|
||||||
|
|
||||||
**Status: Pending**
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
Add a new `orchard cache` command to the existing CLI for caching artifacts from upstream URLs. This integrates with the new cache API endpoint and can optionally update `orchard.ensure` with cached artifacts.
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
- [ ] New command: `orchard cache <url>` in `orchard/commands/cache.py`
|
|
||||||
- [ ] Basic usage:
|
|
||||||
```bash
|
|
||||||
# Cache a URL, print artifact info
|
|
||||||
orchard cache https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz
|
|
||||||
|
|
||||||
# Output:
|
|
||||||
# Caching https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz...
|
|
||||||
# Source type: npm
|
|
||||||
# Package: lodash
|
|
||||||
# Version: 4.17.21
|
|
||||||
#
|
|
||||||
# Successfully cached artifact
|
|
||||||
# Artifact ID: abc123...
|
|
||||||
# Size: 1.2 MB
|
|
||||||
# System project: _npm
|
|
||||||
# System package: lodash
|
|
||||||
# System tag: 4.17.21
|
|
||||||
```
|
|
||||||
- [ ] Options:
|
|
||||||
| Option | Description |
|
|
||||||
|--------|-------------|
|
|
||||||
| `--type, -t TYPE` | Source type: npm, pypi, maven, docker, helm, generic (auto-detected from URL if not provided) |
|
|
||||||
| `--package, -p NAME` | Package name in system project (auto-derived from URL if not provided) |
|
|
||||||
| `--tag TAG` | Tag name in system project (auto-derived from URL if not provided) |
|
|
||||||
| `--project PROJECT` | Also create tag in this user project |
|
|
||||||
| `--user-package PKG` | Package name in user project (required if --project specified) |
|
|
||||||
| `--user-tag TAG` | Tag name in user project (default: same as system tag) |
|
|
||||||
| `--expected-hash HASH` | Verify downloaded content matches this SHA256 |
|
|
||||||
| `--add` | Add to orchard.ensure after caching |
|
|
||||||
| `--add-path PATH` | Extraction path for --add (default: `<package>/`) |
|
|
||||||
| `--file, -f FILE` | Path to orchard.ensure file |
|
|
||||||
| `--verbose, -v` | Show detailed output |
|
|
||||||
- [ ] URL type auto-detection:
|
|
||||||
- `registry.npmjs.org` → npm
|
|
||||||
- `pypi.org` or `files.pythonhosted.org` → pypi
|
|
||||||
- `repo1.maven.org` or contains `/maven2/` → maven
|
|
||||||
- `registry-1.docker.io` or `docker.io` → docker
|
|
||||||
- Otherwise → generic
|
|
||||||
- [ ] Package/version extraction from URL patterns:
|
|
||||||
- npm: `/{package}/-/{package}-{version}.tgz`
|
|
||||||
- pypi: `/packages/.../requests-{version}.tar.gz`
|
|
||||||
- maven: `/{group}/{artifact}/{version}/{artifact}-{version}.jar`
|
|
||||||
- [ ] Add `cache_artifact()` function to `orchard/api.py`
|
|
||||||
- [ ] Integration with `--add` flag:
|
|
||||||
- Parse existing orchard.ensure
|
|
||||||
- Add new dependency entry pointing to cached artifact
|
|
||||||
- Use artifact_id (SHA256) for hermetic pinning
|
|
||||||
- [ ] Batch mode: `orchard cache --file urls.txt`
|
|
||||||
- One URL per line
|
|
||||||
- Lines starting with `#` are comments
|
|
||||||
- Report success/failure for each
|
|
||||||
- [ ] Exit codes:
|
|
||||||
- 0: Success (or already cached)
|
|
||||||
- 1: Fetch failed
|
|
||||||
- 2: Hash mismatch
|
|
||||||
- 3: Air-gap mode blocked request
|
|
||||||
- [ ] Error handling consistent with existing CLI patterns
|
|
||||||
- [ ] Unit tests in `test/test_cache.py`
|
|
||||||
- [ ] Update README.md with cache command documentation
|
|
||||||
|
|
||||||
## Technical Notes
|
|
||||||
|
|
||||||
- Follow existing Click patterns from other commands
|
|
||||||
- Use `get_auth_headers()` from `orchard/auth.py`
|
|
||||||
- URL parsing can use `urllib.parse`
|
|
||||||
- Consider adding URL pattern registry for extensibility
|
|
||||||
- The `--add` flag should integrate with existing ensure file parsing in `orchard/ensure.py`
|
|
||||||
|
|
||||||
## Example Workflows
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Simple: cache a single URL
|
|
||||||
orchard cache https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz
|
|
||||||
|
|
||||||
# Cache and add to orchard.ensure for current project
|
|
||||||
orchard cache https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz \
|
|
||||||
--add --add-path libs/lodash/
|
|
||||||
|
|
||||||
# Cache with explicit metadata
|
|
||||||
orchard cache https://internal.corp/files/custom-lib.tar.gz \
|
|
||||||
--type generic \
|
|
||||||
--package custom-lib \
|
|
||||||
--tag v1.0.0
|
|
||||||
|
|
||||||
# Cache and cross-reference to user project
|
|
||||||
orchard cache https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz \
|
|
||||||
--project my-app \
|
|
||||||
--user-package npm-deps \
|
|
||||||
--user-tag lodash-4.17.21
|
|
||||||
|
|
||||||
# Batch cache from file
|
|
||||||
orchard cache --file deps-urls.txt
|
|
||||||
|
|
||||||
# Verify hash while caching
|
|
||||||
orchard cache https://example.com/file.tar.gz \
|
|
||||||
--expected-hash sha256:abc123...
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Out of Scope (Future Enhancements)
|
|
||||||
|
|
||||||
- Automatic transitive dependency resolution (client's responsibility)
|
|
||||||
- Lockfile parsing (`package-lock.json`, `requirements.txt`) - stretch goal for CLI
|
|
||||||
- Cache eviction policies (we cache forever by design)
|
|
||||||
- Mirroring/sync between Orchard instances
|
|
||||||
- Format-specific metadata extraction (npm package.json parsing, etc.)
|
|
||||||
|
|
||||||
## Success Criteria
|
|
||||||
|
|
||||||
- [ ] Can cache any URL and retrieve by SHA256 hash
|
|
||||||
- [ ] Cached artifacts persist indefinitely
|
|
||||||
- [ ] Air-gap mode blocks all public internet access
|
|
||||||
- [ ] Multiple upstream sources with different auth
|
|
||||||
- [ ] System projects organize cached packages by format
|
|
||||||
- [ ] CLI can cache URLs and update orchard.ensure
|
|
||||||
- [ ] Admin UI for upstream source management
|
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
import { Routes, Route, Navigate, useLocation } from 'react-router-dom';
|
import { Routes, Route, Navigate, useLocation } from 'react-router-dom';
|
||||||
import { AuthProvider, useAuth } from './contexts/AuthContext';
|
import { AuthProvider, useAuth } from './contexts/AuthContext';
|
||||||
import { TeamProvider } from './contexts/TeamContext';
|
|
||||||
import Layout from './components/Layout';
|
import Layout from './components/Layout';
|
||||||
import Home from './pages/Home';
|
import Home from './pages/Home';
|
||||||
import ProjectPage from './pages/ProjectPage';
|
import ProjectPage from './pages/ProjectPage';
|
||||||
@@ -11,12 +10,6 @@ import ChangePasswordPage from './pages/ChangePasswordPage';
|
|||||||
import APIKeysPage from './pages/APIKeysPage';
|
import APIKeysPage from './pages/APIKeysPage';
|
||||||
import AdminUsersPage from './pages/AdminUsersPage';
|
import AdminUsersPage from './pages/AdminUsersPage';
|
||||||
import AdminOIDCPage from './pages/AdminOIDCPage';
|
import AdminOIDCPage from './pages/AdminOIDCPage';
|
||||||
import AdminCachePage from './pages/AdminCachePage';
|
|
||||||
import ProjectSettingsPage from './pages/ProjectSettingsPage';
|
|
||||||
import TeamsPage from './pages/TeamsPage';
|
|
||||||
import TeamDashboardPage from './pages/TeamDashboardPage';
|
|
||||||
import TeamSettingsPage from './pages/TeamSettingsPage';
|
|
||||||
import TeamMembersPage from './pages/TeamMembersPage';
|
|
||||||
|
|
||||||
// Component that checks if user must change password
|
// Component that checks if user must change password
|
||||||
function RequirePasswordChange({ children }: { children: React.ReactNode }) {
|
function RequirePasswordChange({ children }: { children: React.ReactNode }) {
|
||||||
@@ -51,13 +44,7 @@ function AppRoutes() {
|
|||||||
<Route path="/settings/api-keys" element={<APIKeysPage />} />
|
<Route path="/settings/api-keys" element={<APIKeysPage />} />
|
||||||
<Route path="/admin/users" element={<AdminUsersPage />} />
|
<Route path="/admin/users" element={<AdminUsersPage />} />
|
||||||
<Route path="/admin/oidc" element={<AdminOIDCPage />} />
|
<Route path="/admin/oidc" element={<AdminOIDCPage />} />
|
||||||
<Route path="/admin/cache" element={<AdminCachePage />} />
|
|
||||||
<Route path="/teams" element={<TeamsPage />} />
|
|
||||||
<Route path="/teams/:slug" element={<TeamDashboardPage />} />
|
|
||||||
<Route path="/teams/:slug/settings" element={<TeamSettingsPage />} />
|
|
||||||
<Route path="/teams/:slug/members" element={<TeamMembersPage />} />
|
|
||||||
<Route path="/project/:projectName" element={<ProjectPage />} />
|
<Route path="/project/:projectName" element={<ProjectPage />} />
|
||||||
<Route path="/project/:projectName/settings" element={<ProjectSettingsPage />} />
|
|
||||||
<Route path="/project/:projectName/:packageName" element={<PackagePage />} />
|
<Route path="/project/:projectName/:packageName" element={<PackagePage />} />
|
||||||
</Routes>
|
</Routes>
|
||||||
</Layout>
|
</Layout>
|
||||||
@@ -71,9 +58,7 @@ function AppRoutes() {
|
|||||||
function App() {
|
function App() {
|
||||||
return (
|
return (
|
||||||
<AuthProvider>
|
<AuthProvider>
|
||||||
<TeamProvider>
|
<AppRoutes />
|
||||||
<AppRoutes />
|
|
||||||
</TeamProvider>
|
|
||||||
</AuthProvider>
|
</AuthProvider>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -33,19 +33,6 @@ import {
|
|||||||
OIDCConfigUpdate,
|
OIDCConfigUpdate,
|
||||||
OIDCStatus,
|
OIDCStatus,
|
||||||
PackageVersion,
|
PackageVersion,
|
||||||
ArtifactDependenciesResponse,
|
|
||||||
ReverseDependenciesResponse,
|
|
||||||
DependencyResolutionResponse,
|
|
||||||
TeamDetail,
|
|
||||||
TeamMember,
|
|
||||||
TeamCreate,
|
|
||||||
TeamUpdate,
|
|
||||||
TeamMemberCreate,
|
|
||||||
TeamMemberUpdate,
|
|
||||||
UpstreamSource,
|
|
||||||
UpstreamSourceCreate,
|
|
||||||
UpstreamSourceUpdate,
|
|
||||||
UpstreamSourceTestResult,
|
|
||||||
} from './types';
|
} from './types';
|
||||||
|
|
||||||
const API_BASE = '/api/v1';
|
const API_BASE = '/api/v1';
|
||||||
@@ -170,7 +157,7 @@ export async function listProjectsSimple(params: ListParams = {}): Promise<Proje
|
|||||||
return data.items;
|
return data.items;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function createProject(data: { name: string; description?: string; is_public?: boolean; team_id?: string }): Promise<Project> {
|
export async function createProject(data: { name: string; description?: string; is_public?: boolean }): Promise<Project> {
|
||||||
const response = await fetch(`${API_BASE}/projects`, {
|
const response = await fetch(`${API_BASE}/projects`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
@@ -184,30 +171,6 @@ export async function getProject(name: string): Promise<Project> {
|
|||||||
return handleResponse<Project>(response);
|
return handleResponse<Project>(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function updateProject(
|
|
||||||
projectName: string,
|
|
||||||
data: { description?: string; is_public?: boolean }
|
|
||||||
): Promise<Project> {
|
|
||||||
const response = await fetch(`${API_BASE}/projects/${projectName}`, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<Project>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function deleteProject(projectName: string): Promise<void> {
|
|
||||||
const response = await fetch(`${API_BASE}/projects/${projectName}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
if (!response.ok) {
|
|
||||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
|
||||||
throw new Error(error.detail || `HTTP ${response.status}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Package API
|
// Package API
|
||||||
export async function listPackages(projectName: string, params: PackageListParams = {}): Promise<PaginatedResponse<Package>> {
|
export async function listPackages(projectName: string, params: PackageListParams = {}): Promise<PaginatedResponse<Package>> {
|
||||||
const query = buildQueryString(params as Record<string, unknown>);
|
const query = buildQueryString(params as Record<string, unknown>);
|
||||||
@@ -525,224 +488,3 @@ export async function deleteVersion(
|
|||||||
throw new Error(error.detail || `HTTP ${response.status}`);
|
throw new Error(error.detail || `HTTP ${response.status}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Dependency API
|
|
||||||
export async function getArtifactDependencies(artifactId: string): Promise<ArtifactDependenciesResponse> {
|
|
||||||
const response = await fetch(`${API_BASE}/artifact/${artifactId}/dependencies`);
|
|
||||||
return handleResponse<ArtifactDependenciesResponse>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getDependenciesByRef(
|
|
||||||
projectName: string,
|
|
||||||
packageName: string,
|
|
||||||
ref: string
|
|
||||||
): Promise<ArtifactDependenciesResponse> {
|
|
||||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/+/${ref}/dependencies`);
|
|
||||||
return handleResponse<ArtifactDependenciesResponse>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getReverseDependencies(
|
|
||||||
projectName: string,
|
|
||||||
packageName: string,
|
|
||||||
params: { page?: number; limit?: number } = {}
|
|
||||||
): Promise<ReverseDependenciesResponse> {
|
|
||||||
const query = buildQueryString(params as Record<string, unknown>);
|
|
||||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/reverse-dependencies${query}`);
|
|
||||||
return handleResponse<ReverseDependenciesResponse>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function resolveDependencies(
|
|
||||||
projectName: string,
|
|
||||||
packageName: string,
|
|
||||||
ref: string
|
|
||||||
): Promise<DependencyResolutionResponse> {
|
|
||||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/+/${ref}/resolve`);
|
|
||||||
return handleResponse<DependencyResolutionResponse>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getEnsureFile(
|
|
||||||
projectName: string,
|
|
||||||
packageName: string,
|
|
||||||
ref: string
|
|
||||||
): Promise<string> {
|
|
||||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/+/${ref}/ensure`);
|
|
||||||
if (!response.ok) {
|
|
||||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
|
||||||
throw new ApiError(error.detail || `HTTP ${response.status}`, response.status);
|
|
||||||
}
|
|
||||||
return response.text();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Team API
|
|
||||||
export async function listTeams(params: ListParams = {}): Promise<PaginatedResponse<TeamDetail>> {
|
|
||||||
const query = buildQueryString(params as Record<string, unknown>);
|
|
||||||
const response = await fetch(`${API_BASE}/teams${query}`, {
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<PaginatedResponse<TeamDetail>>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function createTeam(data: TeamCreate): Promise<TeamDetail> {
|
|
||||||
const response = await fetch(`${API_BASE}/teams`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<TeamDetail>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getTeam(slug: string): Promise<TeamDetail> {
|
|
||||||
const response = await fetch(`${API_BASE}/teams/${slug}`, {
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<TeamDetail>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function updateTeam(slug: string, data: TeamUpdate): Promise<TeamDetail> {
|
|
||||||
const response = await fetch(`${API_BASE}/teams/${slug}`, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<TeamDetail>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function deleteTeam(slug: string): Promise<void> {
|
|
||||||
const response = await fetch(`${API_BASE}/teams/${slug}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
if (!response.ok) {
|
|
||||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
|
||||||
throw new ApiError(error.detail || `HTTP ${response.status}`, response.status);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function listTeamMembers(slug: string): Promise<TeamMember[]> {
|
|
||||||
const response = await fetch(`${API_BASE}/teams/${slug}/members`, {
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<TeamMember[]>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function addTeamMember(slug: string, data: TeamMemberCreate): Promise<TeamMember> {
|
|
||||||
const response = await fetch(`${API_BASE}/teams/${slug}/members`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<TeamMember>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function updateTeamMember(
|
|
||||||
slug: string,
|
|
||||||
username: string,
|
|
||||||
data: TeamMemberUpdate
|
|
||||||
): Promise<TeamMember> {
|
|
||||||
const response = await fetch(`${API_BASE}/teams/${slug}/members/${username}`, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<TeamMember>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function removeTeamMember(slug: string, username: string): Promise<void> {
|
|
||||||
const response = await fetch(`${API_BASE}/teams/${slug}/members/${username}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
if (!response.ok) {
|
|
||||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
|
||||||
throw new ApiError(error.detail || `HTTP ${response.status}`, response.status);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function listTeamProjects(
|
|
||||||
slug: string,
|
|
||||||
params: ProjectListParams = {}
|
|
||||||
): Promise<PaginatedResponse<Project>> {
|
|
||||||
const query = buildQueryString(params as Record<string, unknown>);
|
|
||||||
const response = await fetch(`${API_BASE}/teams/${slug}/projects${query}`, {
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<PaginatedResponse<Project>>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
// User search (for autocomplete)
|
|
||||||
export interface UserSearchResult {
|
|
||||||
id: string;
|
|
||||||
username: string;
|
|
||||||
is_admin: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function searchUsers(query: string, limit: number = 10): Promise<UserSearchResult[]> {
|
|
||||||
const response = await fetch(`${API_BASE}/users/search?q=${encodeURIComponent(query)}&limit=${limit}`, {
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<UserSearchResult[]>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Upstream Sources Admin API
|
|
||||||
export interface UpstreamSourceListParams {
|
|
||||||
enabled?: boolean;
|
|
||||||
source_type?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function listUpstreamSources(params: UpstreamSourceListParams = {}): Promise<UpstreamSource[]> {
|
|
||||||
const query = buildQueryString(params as Record<string, unknown>);
|
|
||||||
const response = await fetch(`${API_BASE}/admin/upstream-sources${query}`, {
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<UpstreamSource[]>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function createUpstreamSource(data: UpstreamSourceCreate): Promise<UpstreamSource> {
|
|
||||||
const response = await fetch(`${API_BASE}/admin/upstream-sources`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<UpstreamSource>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getUpstreamSource(id: string): Promise<UpstreamSource> {
|
|
||||||
const response = await fetch(`${API_BASE}/admin/upstream-sources/${id}`, {
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<UpstreamSource>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function updateUpstreamSource(id: string, data: UpstreamSourceUpdate): Promise<UpstreamSource> {
|
|
||||||
const response = await fetch(`${API_BASE}/admin/upstream-sources/${id}`, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<UpstreamSource>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function deleteUpstreamSource(id: string): Promise<void> {
|
|
||||||
const response = await fetch(`${API_BASE}/admin/upstream-sources/${id}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
if (!response.ok) {
|
|
||||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
|
||||||
throw new ApiError(error.detail || `HTTP ${response.status}`, response.status);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function testUpstreamSource(id: string): Promise<UpstreamSourceTestResult> {
|
|
||||||
const response = await fetch(`${API_BASE}/admin/upstream-sources/${id}/test`, {
|
|
||||||
method: 'POST',
|
|
||||||
credentials: 'include',
|
|
||||||
});
|
|
||||||
return handleResponse<UpstreamSourceTestResult>(response);
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -114,32 +114,3 @@
|
|||||||
font-size: 0.875rem;
|
font-size: 0.875rem;
|
||||||
color: var(--text-primary);
|
color: var(--text-primary);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Access source styling */
|
|
||||||
.access-source {
|
|
||||||
display: inline-block;
|
|
||||||
padding: 0.2rem 0.4rem;
|
|
||||||
border-radius: 4px;
|
|
||||||
font-size: 0.75rem;
|
|
||||||
font-weight: 500;
|
|
||||||
}
|
|
||||||
|
|
||||||
.access-source--explicit {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.access-source--team {
|
|
||||||
background: var(--color-info-bg, #e3f2fd);
|
|
||||||
color: var(--color-info, #1976d2);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Team access row styling */
|
|
||||||
.team-access-row {
|
|
||||||
background: var(--bg-secondary, #fafafa);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-access-row td.actions .text-muted {
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -208,104 +208,85 @@ export function AccessManagement({ projectName }: AccessManagementProps) {
|
|||||||
<tr>
|
<tr>
|
||||||
<th>User</th>
|
<th>User</th>
|
||||||
<th>Access Level</th>
|
<th>Access Level</th>
|
||||||
<th>Source</th>
|
|
||||||
<th>Granted</th>
|
<th>Granted</th>
|
||||||
<th>Expires</th>
|
<th>Expires</th>
|
||||||
<th>Actions</th>
|
<th>Actions</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
{permissions.map((p) => {
|
{permissions.map((p) => (
|
||||||
const isTeamBased = p.source === 'team';
|
<tr key={p.id}>
|
||||||
return (
|
<td>{p.user_id}</td>
|
||||||
<tr key={p.id} className={isTeamBased ? 'team-access-row' : ''}>
|
<td>
|
||||||
<td>{p.user_id}</td>
|
{editingUser === p.user_id ? (
|
||||||
<td>
|
<select
|
||||||
{editingUser === p.user_id && !isTeamBased ? (
|
value={editLevel}
|
||||||
<select
|
onChange={(e) => setEditLevel(e.target.value as AccessLevel)}
|
||||||
value={editLevel}
|
disabled={submitting}
|
||||||
onChange={(e) => setEditLevel(e.target.value as AccessLevel)}
|
>
|
||||||
|
<option value="read">Read</option>
|
||||||
|
<option value="write">Write</option>
|
||||||
|
<option value="admin">Admin</option>
|
||||||
|
</select>
|
||||||
|
) : (
|
||||||
|
<span className={`access-badge access-badge--${p.level}`}>
|
||||||
|
{p.level}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td>{new Date(p.created_at).toLocaleDateString()}</td>
|
||||||
|
<td>
|
||||||
|
{editingUser === p.user_id ? (
|
||||||
|
<input
|
||||||
|
type="date"
|
||||||
|
value={editExpiresAt}
|
||||||
|
onChange={(e) => setEditExpiresAt(e.target.value)}
|
||||||
|
disabled={submitting}
|
||||||
|
min={new Date().toISOString().split('T')[0]}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
formatExpiration(p.expires_at)
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="actions">
|
||||||
|
{editingUser === p.user_id ? (
|
||||||
|
<>
|
||||||
|
<button
|
||||||
|
className="btn btn-sm btn-primary"
|
||||||
|
onClick={() => handleUpdate(p.user_id)}
|
||||||
disabled={submitting}
|
disabled={submitting}
|
||||||
>
|
>
|
||||||
<option value="read">Read</option>
|
Save
|
||||||
<option value="write">Write</option>
|
</button>
|
||||||
<option value="admin">Admin</option>
|
<button
|
||||||
</select>
|
className="btn btn-sm"
|
||||||
) : (
|
onClick={cancelEdit}
|
||||||
<span className={`access-badge access-badge--${p.level}`}>
|
|
||||||
{p.level}
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
{isTeamBased ? (
|
|
||||||
<span className="access-source access-source--team" title={`Team role: ${p.team_role}`}>
|
|
||||||
Team: {p.team_slug}
|
|
||||||
</span>
|
|
||||||
) : (
|
|
||||||
<span className="access-source access-source--explicit">
|
|
||||||
Explicit
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</td>
|
|
||||||
<td>{new Date(p.created_at).toLocaleDateString()}</td>
|
|
||||||
<td>
|
|
||||||
{editingUser === p.user_id && !isTeamBased ? (
|
|
||||||
<input
|
|
||||||
type="date"
|
|
||||||
value={editExpiresAt}
|
|
||||||
onChange={(e) => setEditExpiresAt(e.target.value)}
|
|
||||||
disabled={submitting}
|
disabled={submitting}
|
||||||
min={new Date().toISOString().split('T')[0]}
|
>
|
||||||
/>
|
Cancel
|
||||||
) : (
|
</button>
|
||||||
formatExpiration(p.expires_at)
|
</>
|
||||||
)}
|
) : (
|
||||||
</td>
|
<>
|
||||||
<td className="actions">
|
<button
|
||||||
{isTeamBased ? (
|
className="btn btn-sm"
|
||||||
<span className="text-muted" title="Manage access via team settings">
|
onClick={() => startEdit(p)}
|
||||||
Via team
|
disabled={submitting}
|
||||||
</span>
|
>
|
||||||
) : editingUser === p.user_id ? (
|
Edit
|
||||||
<>
|
</button>
|
||||||
<button
|
<button
|
||||||
className="btn btn-sm btn-primary"
|
className="btn btn-sm btn-danger"
|
||||||
onClick={() => handleUpdate(p.user_id)}
|
onClick={() => handleRevoke(p.user_id)}
|
||||||
disabled={submitting}
|
disabled={submitting}
|
||||||
>
|
>
|
||||||
Save
|
Revoke
|
||||||
</button>
|
</button>
|
||||||
<button
|
</>
|
||||||
className="btn btn-sm"
|
)}
|
||||||
onClick={cancelEdit}
|
</td>
|
||||||
disabled={submitting}
|
</tr>
|
||||||
>
|
))}
|
||||||
Cancel
|
|
||||||
</button>
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<button
|
|
||||||
className="btn btn-sm"
|
|
||||||
onClick={() => startEdit(p)}
|
|
||||||
disabled={submitting}
|
|
||||||
>
|
|
||||||
Edit
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
className="btn btn-sm btn-danger"
|
|
||||||
onClick={() => handleRevoke(p.user_id)}
|
|
||||||
disabled={submitting}
|
|
||||||
>
|
|
||||||
Revoke
|
|
||||||
</button>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -1,338 +0,0 @@
|
|||||||
/* Dependency Graph Modal */
|
|
||||||
.dependency-graph-modal {
|
|
||||||
position: fixed;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
bottom: 0;
|
|
||||||
background: rgba(0, 0, 0, 0.8);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
z-index: 1000;
|
|
||||||
padding: 24px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependency-graph-content {
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
width: 100%;
|
|
||||||
max-width: 1200px;
|
|
||||||
height: 80vh;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependency-graph-header {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 16px;
|
|
||||||
padding: 16px 20px;
|
|
||||||
border-bottom: 1px solid var(--border-primary);
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependency-graph-header h2 {
|
|
||||||
margin: 0;
|
|
||||||
font-size: 1.125rem;
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependency-graph-info {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 12px;
|
|
||||||
flex: 1;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-stats {
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.close-btn {
|
|
||||||
background: transparent;
|
|
||||||
border: none;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
cursor: pointer;
|
|
||||||
padding: 4px;
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.close-btn:hover {
|
|
||||||
background: var(--bg-hover);
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependency-graph-toolbar {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
padding: 12px 20px;
|
|
||||||
border-bottom: 1px solid var(--border-primary);
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.zoom-level {
|
|
||||||
margin-left: auto;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-family: 'JetBrains Mono', monospace;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependency-graph-container {
|
|
||||||
flex: 1;
|
|
||||||
overflow: hidden;
|
|
||||||
position: relative;
|
|
||||||
background:
|
|
||||||
linear-gradient(90deg, var(--border-primary) 1px, transparent 1px),
|
|
||||||
linear-gradient(var(--border-primary) 1px, transparent 1px);
|
|
||||||
background-size: 20px 20px;
|
|
||||||
background-position: center center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-canvas {
|
|
||||||
padding: 40px;
|
|
||||||
min-width: 100%;
|
|
||||||
min-height: 100%;
|
|
||||||
transform-origin: center center;
|
|
||||||
transition: transform 0.1s ease-out;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Graph Nodes */
|
|
||||||
.graph-node-container {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: flex-start;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
border: 2px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
padding: 12px 16px;
|
|
||||||
min-width: 200px;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: all var(--transition-fast);
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node:hover {
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
box-shadow: 0 4px 12px rgba(16, 185, 129, 0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node--root {
|
|
||||||
background: linear-gradient(135deg, rgba(16, 185, 129, 0.15) 0%, rgba(5, 150, 105, 0.15) 100%);
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node--hovered {
|
|
||||||
transform: scale(1.02);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node__header {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
margin-bottom: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node__name {
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--accent-primary);
|
|
||||||
font-family: 'JetBrains Mono', monospace;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node__toggle {
|
|
||||||
background: var(--bg-hover);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: 4px;
|
|
||||||
width: 20px;
|
|
||||||
height: 20px;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
cursor: pointer;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
font-weight: 600;
|
|
||||||
margin-left: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node__toggle:hover {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node__details {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 12px;
|
|
||||||
font-size: 0.75rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node__version {
|
|
||||||
font-family: 'JetBrains Mono', monospace;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node__size {
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Graph Children / Tree Structure */
|
|
||||||
.graph-children {
|
|
||||||
display: flex;
|
|
||||||
padding-left: 24px;
|
|
||||||
margin-top: 8px;
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-connector {
|
|
||||||
position: absolute;
|
|
||||||
left: 12px;
|
|
||||||
top: 0;
|
|
||||||
bottom: 50%;
|
|
||||||
width: 12px;
|
|
||||||
border-left: 2px solid var(--border-primary);
|
|
||||||
border-bottom: 2px solid var(--border-primary);
|
|
||||||
border-bottom-left-radius: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-children-list {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 8px;
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-children-list::before {
|
|
||||||
content: '';
|
|
||||||
position: absolute;
|
|
||||||
left: -12px;
|
|
||||||
top: 20px;
|
|
||||||
bottom: 20px;
|
|
||||||
border-left: 2px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-children-list > .graph-node-container {
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-children-list > .graph-node-container::before {
|
|
||||||
content: '';
|
|
||||||
position: absolute;
|
|
||||||
left: -12px;
|
|
||||||
top: 20px;
|
|
||||||
width: 12px;
|
|
||||||
border-top: 2px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Loading, Error, Empty States */
|
|
||||||
.graph-loading,
|
|
||||||
.graph-error,
|
|
||||||
.graph-empty {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
height: 100%;
|
|
||||||
gap: 16px;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-loading .spinner {
|
|
||||||
width: 32px;
|
|
||||||
height: 32px;
|
|
||||||
border: 3px solid var(--border-primary);
|
|
||||||
border-top-color: var(--accent-primary);
|
|
||||||
border-radius: 50%;
|
|
||||||
animation: spin 1s linear infinite;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes spin {
|
|
||||||
to { transform: rotate(360deg); }
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-error {
|
|
||||||
color: var(--error-color, #ef4444);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-error svg {
|
|
||||||
opacity: 0.6;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-error p {
|
|
||||||
max-width: 400px;
|
|
||||||
text-align: center;
|
|
||||||
line-height: 1.5;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Tooltip */
|
|
||||||
.graph-tooltip {
|
|
||||||
position: fixed;
|
|
||||||
bottom: 24px;
|
|
||||||
left: 50%;
|
|
||||||
transform: translateX(-50%);
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
padding: 12px 16px;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.4);
|
|
||||||
z-index: 1001;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-tooltip strong {
|
|
||||||
display: block;
|
|
||||||
color: var(--accent-primary);
|
|
||||||
font-family: 'JetBrains Mono', monospace;
|
|
||||||
margin-bottom: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-tooltip div {
|
|
||||||
color: var(--text-secondary);
|
|
||||||
margin-top: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tooltip-hint {
|
|
||||||
margin-top: 8px;
|
|
||||||
padding-top: 8px;
|
|
||||||
border-top: 1px solid var(--border-primary);
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 0.75rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Responsive */
|
|
||||||
@media (max-width: 768px) {
|
|
||||||
.dependency-graph-modal {
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependency-graph-content {
|
|
||||||
height: 100vh;
|
|
||||||
border-radius: 0;
|
|
||||||
max-width: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependency-graph-header {
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependency-graph-info {
|
|
||||||
flex-basis: 100%;
|
|
||||||
order: 3;
|
|
||||||
margin-top: 8px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,323 +0,0 @@
|
|||||||
import { useState, useEffect, useCallback, useRef } from 'react';
|
|
||||||
import { useNavigate } from 'react-router-dom';
|
|
||||||
import { ResolvedArtifact, DependencyResolutionResponse, Dependency } from '../types';
|
|
||||||
import { resolveDependencies, getArtifactDependencies } from '../api';
|
|
||||||
import './DependencyGraph.css';
|
|
||||||
|
|
||||||
interface DependencyGraphProps {
|
|
||||||
projectName: string;
|
|
||||||
packageName: string;
|
|
||||||
tagName: string;
|
|
||||||
onClose: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface GraphNode {
|
|
||||||
id: string;
|
|
||||||
project: string;
|
|
||||||
package: string;
|
|
||||||
version: string | null;
|
|
||||||
size: number;
|
|
||||||
depth: number;
|
|
||||||
children: GraphNode[];
|
|
||||||
isRoot?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatBytes(bytes: number): string {
|
|
||||||
if (bytes === 0) return '0 B';
|
|
||||||
const k = 1024;
|
|
||||||
const sizes = ['B', 'KB', 'MB', 'GB'];
|
|
||||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
||||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
|
|
||||||
}
|
|
||||||
|
|
||||||
function DependencyGraph({ projectName, packageName, tagName, onClose }: DependencyGraphProps) {
|
|
||||||
const navigate = useNavigate();
|
|
||||||
const containerRef = useRef<HTMLDivElement>(null);
|
|
||||||
|
|
||||||
const [loading, setLoading] = useState(true);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
const [resolution, setResolution] = useState<DependencyResolutionResponse | null>(null);
|
|
||||||
const [graphRoot, setGraphRoot] = useState<GraphNode | null>(null);
|
|
||||||
const [hoveredNode, setHoveredNode] = useState<GraphNode | null>(null);
|
|
||||||
const [zoom, setZoom] = useState(1);
|
|
||||||
const [pan, setPan] = useState({ x: 0, y: 0 });
|
|
||||||
const [isDragging, setIsDragging] = useState(false);
|
|
||||||
const [dragStart, setDragStart] = useState({ x: 0, y: 0 });
|
|
||||||
const [collapsedNodes, setCollapsedNodes] = useState<Set<string>>(new Set());
|
|
||||||
|
|
||||||
// Build graph structure from resolution data
|
|
||||||
const buildGraph = useCallback(async (resolutionData: DependencyResolutionResponse) => {
|
|
||||||
const artifactMap = new Map<string, ResolvedArtifact>();
|
|
||||||
resolutionData.resolved.forEach(artifact => {
|
|
||||||
artifactMap.set(artifact.artifact_id, artifact);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Fetch dependencies for each artifact to build the tree
|
|
||||||
const depsMap = new Map<string, Dependency[]>();
|
|
||||||
|
|
||||||
for (const artifact of resolutionData.resolved) {
|
|
||||||
try {
|
|
||||||
const deps = await getArtifactDependencies(artifact.artifact_id);
|
|
||||||
depsMap.set(artifact.artifact_id, deps.dependencies);
|
|
||||||
} catch {
|
|
||||||
depsMap.set(artifact.artifact_id, []);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the root artifact (the requested one)
|
|
||||||
const rootArtifact = resolutionData.resolved.find(
|
|
||||||
a => a.project === resolutionData.requested.project &&
|
|
||||||
a.package === resolutionData.requested.package
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!rootArtifact) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build tree recursively
|
|
||||||
const visited = new Set<string>();
|
|
||||||
|
|
||||||
const buildNode = (artifact: ResolvedArtifact, depth: number): GraphNode => {
|
|
||||||
const nodeId = `${artifact.project}/${artifact.package}`;
|
|
||||||
visited.add(artifact.artifact_id);
|
|
||||||
|
|
||||||
const deps = depsMap.get(artifact.artifact_id) || [];
|
|
||||||
const children: GraphNode[] = [];
|
|
||||||
|
|
||||||
for (const dep of deps) {
|
|
||||||
// Find the resolved artifact for this dependency
|
|
||||||
const childArtifact = resolutionData.resolved.find(
|
|
||||||
a => a.project === dep.project && a.package === dep.package
|
|
||||||
);
|
|
||||||
|
|
||||||
if (childArtifact && !visited.has(childArtifact.artifact_id)) {
|
|
||||||
children.push(buildNode(childArtifact, depth + 1));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: nodeId,
|
|
||||||
project: artifact.project,
|
|
||||||
package: artifact.package,
|
|
||||||
version: artifact.version || artifact.tag,
|
|
||||||
size: artifact.size,
|
|
||||||
depth,
|
|
||||||
children,
|
|
||||||
isRoot: depth === 0,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
return buildNode(rootArtifact, 0);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
async function loadData() {
|
|
||||||
setLoading(true);
|
|
||||||
setError(null);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const result = await resolveDependencies(projectName, packageName, tagName);
|
|
||||||
setResolution(result);
|
|
||||||
|
|
||||||
const graph = await buildGraph(result);
|
|
||||||
setGraphRoot(graph);
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof Error) {
|
|
||||||
// Check if it's a resolution error
|
|
||||||
try {
|
|
||||||
const errorData = JSON.parse(err.message);
|
|
||||||
if (errorData.error === 'circular_dependency') {
|
|
||||||
setError(`Circular dependency detected: ${errorData.cycle?.join(' → ')}`);
|
|
||||||
} else if (errorData.error === 'dependency_conflict') {
|
|
||||||
setError(`Dependency conflict: ${errorData.message}`);
|
|
||||||
} else {
|
|
||||||
setError(err.message);
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
setError(err.message);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
setError('Failed to load dependency graph');
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
loadData();
|
|
||||||
}, [projectName, packageName, tagName, buildGraph]);
|
|
||||||
|
|
||||||
const handleNodeClick = (node: GraphNode) => {
|
|
||||||
navigate(`/project/${node.project}/${node.package}`);
|
|
||||||
onClose();
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleNodeToggle = (node: GraphNode, e: React.MouseEvent) => {
|
|
||||||
e.stopPropagation();
|
|
||||||
setCollapsedNodes(prev => {
|
|
||||||
const next = new Set(prev);
|
|
||||||
if (next.has(node.id)) {
|
|
||||||
next.delete(node.id);
|
|
||||||
} else {
|
|
||||||
next.add(node.id);
|
|
||||||
}
|
|
||||||
return next;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleWheel = (e: React.WheelEvent) => {
|
|
||||||
e.preventDefault();
|
|
||||||
const delta = e.deltaY > 0 ? -0.1 : 0.1;
|
|
||||||
setZoom(z => Math.max(0.25, Math.min(2, z + delta)));
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleMouseDown = (e: React.MouseEvent) => {
|
|
||||||
if (e.target === containerRef.current || (e.target as HTMLElement).classList.contains('graph-canvas')) {
|
|
||||||
setIsDragging(true);
|
|
||||||
setDragStart({ x: e.clientX - pan.x, y: e.clientY - pan.y });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleMouseMove = (e: React.MouseEvent) => {
|
|
||||||
if (isDragging) {
|
|
||||||
setPan({ x: e.clientX - dragStart.x, y: e.clientY - dragStart.y });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleMouseUp = () => {
|
|
||||||
setIsDragging(false);
|
|
||||||
};
|
|
||||||
|
|
||||||
const resetView = () => {
|
|
||||||
setZoom(1);
|
|
||||||
setPan({ x: 0, y: 0 });
|
|
||||||
};
|
|
||||||
|
|
||||||
const renderNode = (node: GraphNode, index: number = 0): JSX.Element => {
|
|
||||||
const isCollapsed = collapsedNodes.has(node.id);
|
|
||||||
const hasChildren = node.children.length > 0;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div key={`${node.id}-${index}`} className="graph-node-container">
|
|
||||||
<div
|
|
||||||
className={`graph-node ${node.isRoot ? 'graph-node--root' : ''} ${hoveredNode?.id === node.id ? 'graph-node--hovered' : ''}`}
|
|
||||||
onClick={() => handleNodeClick(node)}
|
|
||||||
onMouseEnter={() => setHoveredNode(node)}
|
|
||||||
onMouseLeave={() => setHoveredNode(null)}
|
|
||||||
>
|
|
||||||
<div className="graph-node__header">
|
|
||||||
<span className="graph-node__name">{node.project}/{node.package}</span>
|
|
||||||
{hasChildren && (
|
|
||||||
<button
|
|
||||||
className="graph-node__toggle"
|
|
||||||
onClick={(e) => handleNodeToggle(node, e)}
|
|
||||||
title={isCollapsed ? 'Expand' : 'Collapse'}
|
|
||||||
>
|
|
||||||
{isCollapsed ? '+' : '-'}
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
<div className="graph-node__details">
|
|
||||||
{node.version && <span className="graph-node__version">@ {node.version}</span>}
|
|
||||||
<span className="graph-node__size">{formatBytes(node.size)}</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{hasChildren && !isCollapsed && (
|
|
||||||
<div className="graph-children">
|
|
||||||
<div className="graph-connector"></div>
|
|
||||||
<div className="graph-children-list">
|
|
||||||
{node.children.map((child, i) => renderNode(child, i))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="dependency-graph-modal" onClick={onClose}>
|
|
||||||
<div className="dependency-graph-content" onClick={e => e.stopPropagation()}>
|
|
||||||
<div className="dependency-graph-header">
|
|
||||||
<h2>Dependency Graph</h2>
|
|
||||||
<div className="dependency-graph-info">
|
|
||||||
<span>{projectName}/{packageName} @ {tagName}</span>
|
|
||||||
{resolution && (
|
|
||||||
<span className="graph-stats">
|
|
||||||
{resolution.artifact_count} packages • {formatBytes(resolution.total_size)} total
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
<button className="close-btn" onClick={onClose} title="Close">
|
|
||||||
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<line x1="18" y1="6" x2="6" y2="18"></line>
|
|
||||||
<line x1="6" y1="6" x2="18" y2="18"></line>
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="dependency-graph-toolbar">
|
|
||||||
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.min(2, z + 0.25))}>
|
|
||||||
Zoom In
|
|
||||||
</button>
|
|
||||||
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.max(0.25, z - 0.25))}>
|
|
||||||
Zoom Out
|
|
||||||
</button>
|
|
||||||
<button className="btn btn-secondary btn-small" onClick={resetView}>
|
|
||||||
Reset View
|
|
||||||
</button>
|
|
||||||
<span className="zoom-level">{Math.round(zoom * 100)}%</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div
|
|
||||||
ref={containerRef}
|
|
||||||
className="dependency-graph-container"
|
|
||||||
onWheel={handleWheel}
|
|
||||||
onMouseDown={handleMouseDown}
|
|
||||||
onMouseMove={handleMouseMove}
|
|
||||||
onMouseUp={handleMouseUp}
|
|
||||||
onMouseLeave={handleMouseUp}
|
|
||||||
>
|
|
||||||
{loading ? (
|
|
||||||
<div className="graph-loading">
|
|
||||||
<div className="spinner"></div>
|
|
||||||
<span>Resolving dependencies...</span>
|
|
||||||
</div>
|
|
||||||
) : error ? (
|
|
||||||
<div className="graph-error">
|
|
||||||
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<circle cx="12" cy="12" r="10"></circle>
|
|
||||||
<line x1="12" y1="8" x2="12" y2="12"></line>
|
|
||||||
<line x1="12" y1="16" x2="12.01" y2="16"></line>
|
|
||||||
</svg>
|
|
||||||
<p>{error}</p>
|
|
||||||
</div>
|
|
||||||
) : graphRoot ? (
|
|
||||||
<div
|
|
||||||
className="graph-canvas"
|
|
||||||
style={{
|
|
||||||
transform: `translate(${pan.x}px, ${pan.y}px) scale(${zoom})`,
|
|
||||||
cursor: isDragging ? 'grabbing' : 'grab',
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{renderNode(graphRoot)}
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<div className="graph-empty">No dependencies to display</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{hoveredNode && (
|
|
||||||
<div className="graph-tooltip">
|
|
||||||
<strong>{hoveredNode.project}/{hoveredNode.package}</strong>
|
|
||||||
{hoveredNode.version && <div>Version: {hoveredNode.version}</div>}
|
|
||||||
<div>Size: {formatBytes(hoveredNode.size)}</div>
|
|
||||||
<div className="tooltip-hint">Click to navigate</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default DependencyGraph;
|
|
||||||
@@ -272,7 +272,7 @@
|
|||||||
.footer {
|
.footer {
|
||||||
background: var(--bg-secondary);
|
background: var(--bg-secondary);
|
||||||
border-top: 1px solid var(--border-primary);
|
border-top: 1px solid var(--border-primary);
|
||||||
padding: 12px 0;
|
padding: 24px 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.footer-content {
|
.footer-content {
|
||||||
@@ -284,11 +284,7 @@
|
|||||||
.footer-brand {
|
.footer-brand {
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
gap: 8px;
|
gap: 12px;
|
||||||
}
|
|
||||||
|
|
||||||
.footer-icon {
|
|
||||||
color: var(--accent-primary);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.footer-logo {
|
.footer-logo {
|
||||||
@@ -296,10 +292,6 @@
|
|||||||
color: var(--text-primary);
|
color: var(--text-primary);
|
||||||
}
|
}
|
||||||
|
|
||||||
.footer-separator {
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.footer-tagline {
|
.footer-tagline {
|
||||||
color: var(--text-secondary);
|
color: var(--text-secondary);
|
||||||
font-size: 0.875rem;
|
font-size: 0.875rem;
|
||||||
|
|||||||
@@ -2,8 +2,6 @@ import { ReactNode, useState, useRef, useEffect } from 'react';
|
|||||||
import { Link, NavLink, useLocation, useNavigate } from 'react-router-dom';
|
import { Link, NavLink, useLocation, useNavigate } from 'react-router-dom';
|
||||||
import { useAuth } from '../contexts/AuthContext';
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
import { GlobalSearch } from './GlobalSearch';
|
import { GlobalSearch } from './GlobalSearch';
|
||||||
import { listTeams } from '../api';
|
|
||||||
import { TeamDetail } from '../types';
|
|
||||||
import './Layout.css';
|
import './Layout.css';
|
||||||
|
|
||||||
interface LayoutProps {
|
interface LayoutProps {
|
||||||
@@ -15,22 +13,8 @@ function Layout({ children }: LayoutProps) {
|
|||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
const { user, loading, logout } = useAuth();
|
const { user, loading, logout } = useAuth();
|
||||||
const [showUserMenu, setShowUserMenu] = useState(false);
|
const [showUserMenu, setShowUserMenu] = useState(false);
|
||||||
const [userTeams, setUserTeams] = useState<TeamDetail[]>([]);
|
|
||||||
const menuRef = useRef<HTMLDivElement>(null);
|
const menuRef = useRef<HTMLDivElement>(null);
|
||||||
|
|
||||||
// Fetch user's teams
|
|
||||||
useEffect(() => {
|
|
||||||
if (user) {
|
|
||||||
listTeams({ limit: 10 }).then(data => {
|
|
||||||
setUserTeams(data.items);
|
|
||||||
}).catch(() => {
|
|
||||||
setUserTeams([]);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
setUserTeams([]);
|
|
||||||
}
|
|
||||||
}, [user]);
|
|
||||||
|
|
||||||
// Close menu when clicking outside
|
// Close menu when clicking outside
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
function handleClickOutside(event: MouseEvent) {
|
function handleClickOutside(event: MouseEvent) {
|
||||||
@@ -93,20 +77,6 @@ function Layout({ children }: LayoutProps) {
|
|||||||
</svg>
|
</svg>
|
||||||
Dashboard
|
Dashboard
|
||||||
</Link>
|
</Link>
|
||||||
{user && userTeams.length > 0 && (
|
|
||||||
<Link
|
|
||||||
to={userTeams.length === 1 ? `/teams/${userTeams[0].slug}` : '/teams'}
|
|
||||||
className={location.pathname.startsWith('/teams') ? 'active' : ''}
|
|
||||||
>
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
|
||||||
<circle cx="9" cy="7" r="4"/>
|
|
||||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
|
||||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
|
||||||
</svg>
|
|
||||||
{userTeams.length === 1 ? 'Team' : 'Teams'}
|
|
||||||
</Link>
|
|
||||||
)}
|
|
||||||
<a href="/docs" className="nav-link-muted">
|
<a href="/docs" className="nav-link-muted">
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"/>
|
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"/>
|
||||||
@@ -183,18 +153,6 @@ function Layout({ children }: LayoutProps) {
|
|||||||
</svg>
|
</svg>
|
||||||
SSO Configuration
|
SSO Configuration
|
||||||
</NavLink>
|
</NavLink>
|
||||||
<NavLink
|
|
||||||
to="/admin/cache"
|
|
||||||
className="user-menu-item"
|
|
||||||
onClick={() => setShowUserMenu(false)}
|
|
||||||
>
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"/>
|
|
||||||
<polyline points="3.27 6.96 12 12.01 20.73 6.96"/>
|
|
||||||
<line x1="12" y1="22.08" x2="12" y2="12"/>
|
|
||||||
</svg>
|
|
||||||
Cache Management
|
|
||||||
</NavLink>
|
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
<div className="user-menu-divider"></div>
|
<div className="user-menu-divider"></div>
|
||||||
@@ -230,21 +188,12 @@ function Layout({ children }: LayoutProps) {
|
|||||||
<footer className="footer">
|
<footer className="footer">
|
||||||
<div className="container footer-content">
|
<div className="container footer-content">
|
||||||
<div className="footer-brand">
|
<div className="footer-brand">
|
||||||
<svg className="footer-icon" width="18" height="18" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
|
||||||
<path d="M6 14 Q6 8 3 8 Q6 4 6 4 Q6 4 9 8 Q6 8 6 14" fill="currentColor" opacity="0.6"/>
|
|
||||||
<rect x="5.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
|
|
||||||
<path d="M12 12 Q12 5 8 5 Q12 1 12 1 Q12 1 16 5 Q12 5 12 12" fill="currentColor"/>
|
|
||||||
<rect x="11.25" y="11" width="1.5" height="5" fill="currentColor"/>
|
|
||||||
<path d="M18 14 Q18 8 15 8 Q18 4 18 4 Q18 4 21 8 Q18 8 18 14" fill="currentColor" opacity="0.6"/>
|
|
||||||
<rect x="17.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
|
|
||||||
<ellipse cx="12" cy="19" rx="9" ry="1.5" fill="currentColor" opacity="0.3"/>
|
|
||||||
</svg>
|
|
||||||
<span className="footer-logo">Orchard</span>
|
<span className="footer-logo">Orchard</span>
|
||||||
<span className="footer-separator">·</span>
|
<span className="footer-tagline">Content-Addressable Storage</span>
|
||||||
<span className="footer-tagline">The cache that never forgets</span>
|
|
||||||
</div>
|
</div>
|
||||||
<div className="footer-links">
|
<div className="footer-links">
|
||||||
<a href="/docs">Documentation</a>
|
<a href="/docs">Documentation</a>
|
||||||
|
<a href="/api/v1">API</a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</footer>
|
</footer>
|
||||||
|
|||||||
@@ -1,163 +0,0 @@
|
|||||||
.team-selector {
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-trigger {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.5rem;
|
|
||||||
padding: 0.375rem 0.75rem;
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: all 0.15s ease;
|
|
||||||
min-width: 160px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-trigger:hover:not(:disabled) {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
border-color: var(--border-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-trigger:disabled {
|
|
||||||
opacity: 0.6;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-name {
|
|
||||||
flex: 1;
|
|
||||||
text-align: left;
|
|
||||||
overflow: hidden;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-chevron {
|
|
||||||
transition: transform 0.15s ease;
|
|
||||||
flex-shrink: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-chevron.open {
|
|
||||||
transform: rotate(180deg);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-dropdown {
|
|
||||||
position: absolute;
|
|
||||||
top: 100%;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
min-width: 240px;
|
|
||||||
margin-top: 0.25rem;
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
box-shadow: var(--shadow-lg);
|
|
||||||
z-index: 100;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-empty {
|
|
||||||
padding: 1rem;
|
|
||||||
text-align: center;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-empty p {
|
|
||||||
margin: 0 0 0.75rem;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-create-link {
|
|
||||||
color: var(--accent-primary);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-create-link:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-list {
|
|
||||||
list-style: none;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0.25rem 0;
|
|
||||||
max-height: 280px;
|
|
||||||
overflow-y: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-item {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.5rem;
|
|
||||||
width: 100%;
|
|
||||||
padding: 0.5rem 0.75rem;
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
cursor: pointer;
|
|
||||||
text-align: left;
|
|
||||||
transition: background 0.1s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-item:hover {
|
|
||||||
background: var(--bg-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-item.selected {
|
|
||||||
background: rgba(16, 185, 129, 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-item-info {
|
|
||||||
flex: 1;
|
|
||||||
min-width: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-item-name {
|
|
||||||
display: block;
|
|
||||||
font-weight: 500;
|
|
||||||
overflow: hidden;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-item-meta {
|
|
||||||
display: block;
|
|
||||||
font-size: 0.75rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-item-role {
|
|
||||||
font-size: 0.75rem;
|
|
||||||
text-transform: capitalize;
|
|
||||||
flex-shrink: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-footer {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
padding: 0.5rem 0.75rem;
|
|
||||||
border-top: 1px solid var(--border-primary);
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-link {
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-link:hover {
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-link-primary {
|
|
||||||
color: var(--accent-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-selector-link-primary:hover {
|
|
||||||
color: var(--accent-primary-hover);
|
|
||||||
}
|
|
||||||
@@ -1,141 +0,0 @@
|
|||||||
import { useState, useRef, useEffect } from 'react';
|
|
||||||
import { Link } from 'react-router-dom';
|
|
||||||
import { useTeam } from '../contexts/TeamContext';
|
|
||||||
import { useAuth } from '../contexts/AuthContext';
|
|
||||||
import { TeamDetail } from '../types';
|
|
||||||
import './TeamSelector.css';
|
|
||||||
|
|
||||||
export function TeamSelector() {
|
|
||||||
const { user } = useAuth();
|
|
||||||
const { teams, currentTeam, loading, setCurrentTeam } = useTeam();
|
|
||||||
const [isOpen, setIsOpen] = useState(false);
|
|
||||||
const dropdownRef = useRef<HTMLDivElement>(null);
|
|
||||||
|
|
||||||
// Close dropdown when clicking outside
|
|
||||||
useEffect(() => {
|
|
||||||
function handleClickOutside(event: MouseEvent) {
|
|
||||||
if (dropdownRef.current && !dropdownRef.current.contains(event.target as Node)) {
|
|
||||||
setIsOpen(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
document.addEventListener('mousedown', handleClickOutside);
|
|
||||||
return () => document.removeEventListener('mousedown', handleClickOutside);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
// Don't show if not authenticated
|
|
||||||
if (!user) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleTeamSelect = (team: TeamDetail) => {
|
|
||||||
setCurrentTeam(team);
|
|
||||||
setIsOpen(false);
|
|
||||||
};
|
|
||||||
|
|
||||||
const roleColors: Record<string, string> = {
|
|
||||||
owner: 'var(--color-success)',
|
|
||||||
admin: 'var(--color-primary)',
|
|
||||||
member: 'var(--color-text-muted)',
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="team-selector" ref={dropdownRef}>
|
|
||||||
<button
|
|
||||||
className="team-selector-trigger"
|
|
||||||
onClick={() => setIsOpen(!isOpen)}
|
|
||||||
disabled={loading}
|
|
||||||
aria-expanded={isOpen}
|
|
||||||
aria-haspopup="listbox"
|
|
||||||
>
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
|
||||||
<circle cx="9" cy="7" r="4"/>
|
|
||||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
|
||||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
|
||||||
</svg>
|
|
||||||
<span className="team-selector-name">
|
|
||||||
{loading ? 'Loading...' : currentTeam?.name || 'Select Team'}
|
|
||||||
</span>
|
|
||||||
<svg
|
|
||||||
className={`team-selector-chevron ${isOpen ? 'open' : ''}`}
|
|
||||||
width="12"
|
|
||||||
height="12"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
strokeWidth="2"
|
|
||||||
>
|
|
||||||
<polyline points="6 9 12 15 18 9"/>
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
|
|
||||||
{isOpen && (
|
|
||||||
<div className="team-selector-dropdown" role="listbox">
|
|
||||||
{teams.length === 0 ? (
|
|
||||||
<div className="team-selector-empty">
|
|
||||||
<p>You're not a member of any teams yet.</p>
|
|
||||||
<Link
|
|
||||||
to="/teams/new"
|
|
||||||
className="team-selector-create-link"
|
|
||||||
onClick={() => setIsOpen(false)}
|
|
||||||
>
|
|
||||||
Create your first team
|
|
||||||
</Link>
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<ul className="team-selector-list">
|
|
||||||
{teams.map(team => (
|
|
||||||
<li key={team.id}>
|
|
||||||
<button
|
|
||||||
className={`team-selector-item ${currentTeam?.id === team.id ? 'selected' : ''}`}
|
|
||||||
onClick={() => handleTeamSelect(team)}
|
|
||||||
role="option"
|
|
||||||
aria-selected={currentTeam?.id === team.id}
|
|
||||||
>
|
|
||||||
<div className="team-selector-item-info">
|
|
||||||
<span className="team-selector-item-name">{team.name}</span>
|
|
||||||
<span className="team-selector-item-meta">
|
|
||||||
{team.project_count} project{team.project_count !== 1 ? 's' : ''}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
{team.user_role && (
|
|
||||||
<span
|
|
||||||
className="team-selector-item-role"
|
|
||||||
style={{ color: roleColors[team.user_role] || roleColors.member }}
|
|
||||||
>
|
|
||||||
{team.user_role}
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
{currentTeam?.id === team.id && (
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<polyline points="20 6 9 17 4 12"/>
|
|
||||||
</svg>
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
</li>
|
|
||||||
))}
|
|
||||||
</ul>
|
|
||||||
<div className="team-selector-footer">
|
|
||||||
<Link
|
|
||||||
to="/teams"
|
|
||||||
className="team-selector-link"
|
|
||||||
onClick={() => setIsOpen(false)}
|
|
||||||
>
|
|
||||||
View all teams
|
|
||||||
</Link>
|
|
||||||
<Link
|
|
||||||
to="/teams/new"
|
|
||||||
className="team-selector-link team-selector-link-primary"
|
|
||||||
onClick={() => setIsOpen(false)}
|
|
||||||
>
|
|
||||||
+ New Team
|
|
||||||
</Link>
|
|
||||||
</div>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
.user-autocomplete {
|
|
||||||
position: relative;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.user-autocomplete__input-wrapper {
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.user-autocomplete__input {
|
|
||||||
width: 100%;
|
|
||||||
padding: 0.625rem 2.5rem 0.625rem 0.75rem;
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.user-autocomplete__input:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.user-autocomplete__spinner {
|
|
||||||
position: absolute;
|
|
||||||
right: 0.75rem;
|
|
||||||
top: 50%;
|
|
||||||
transform: translateY(-50%);
|
|
||||||
width: 16px;
|
|
||||||
height: 16px;
|
|
||||||
border: 2px solid var(--border-primary);
|
|
||||||
border-top-color: var(--accent-primary);
|
|
||||||
border-radius: 50%;
|
|
||||||
animation: spin 0.6s linear infinite;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes spin {
|
|
||||||
to { transform: translateY(-50%) rotate(360deg); }
|
|
||||||
}
|
|
||||||
|
|
||||||
.user-autocomplete__dropdown {
|
|
||||||
position: absolute;
|
|
||||||
top: 100%;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
margin-top: 4px;
|
|
||||||
padding: 0.25rem;
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
box-shadow: var(--shadow-lg);
|
|
||||||
z-index: 100;
|
|
||||||
max-height: 240px;
|
|
||||||
overflow-y: auto;
|
|
||||||
list-style: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.user-autocomplete__option {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.75rem;
|
|
||||||
padding: 0.5rem 0.75rem;
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
cursor: pointer;
|
|
||||||
transition: background 0.1s;
|
|
||||||
}
|
|
||||||
|
|
||||||
.user-autocomplete__option:hover,
|
|
||||||
.user-autocomplete__option.selected {
|
|
||||||
background: var(--bg-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
.user-autocomplete__avatar {
|
|
||||||
width: 32px;
|
|
||||||
height: 32px;
|
|
||||||
border-radius: 50%;
|
|
||||||
background: var(--accent-primary);
|
|
||||||
color: white;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
font-weight: 600;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
flex-shrink: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.user-autocomplete__user-info {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
min-width: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.user-autocomplete__username {
|
|
||||||
font-weight: 500;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.user-autocomplete__admin-badge {
|
|
||||||
font-size: 0.6875rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.025em;
|
|
||||||
}
|
|
||||||
@@ -1,171 +0,0 @@
|
|||||||
import { useState, useEffect, useRef, useCallback } from 'react';
|
|
||||||
import { searchUsers, UserSearchResult } from '../api';
|
|
||||||
import './UserAutocomplete.css';
|
|
||||||
|
|
||||||
interface UserAutocompleteProps {
|
|
||||||
value: string;
|
|
||||||
onChange: (username: string) => void;
|
|
||||||
placeholder?: string;
|
|
||||||
disabled?: boolean;
|
|
||||||
autoFocus?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function UserAutocomplete({
|
|
||||||
value,
|
|
||||||
onChange,
|
|
||||||
placeholder = 'Search users...',
|
|
||||||
disabled = false,
|
|
||||||
autoFocus = false,
|
|
||||||
}: UserAutocompleteProps) {
|
|
||||||
const [query, setQuery] = useState(value);
|
|
||||||
const [results, setResults] = useState<UserSearchResult[]>([]);
|
|
||||||
const [loading, setLoading] = useState(false);
|
|
||||||
const [isOpen, setIsOpen] = useState(false);
|
|
||||||
const [selectedIndex, setSelectedIndex] = useState(-1);
|
|
||||||
const containerRef = useRef<HTMLDivElement>(null);
|
|
||||||
const inputRef = useRef<HTMLInputElement>(null);
|
|
||||||
const debounceRef = useRef<ReturnType<typeof setTimeout>>();
|
|
||||||
|
|
||||||
// Search for users with debounce
|
|
||||||
const doSearch = useCallback(async (searchQuery: string) => {
|
|
||||||
if (searchQuery.length < 1) {
|
|
||||||
setResults([]);
|
|
||||||
setIsOpen(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
setLoading(true);
|
|
||||||
try {
|
|
||||||
const users = await searchUsers(searchQuery);
|
|
||||||
setResults(users);
|
|
||||||
setIsOpen(users.length > 0);
|
|
||||||
setSelectedIndex(-1);
|
|
||||||
} catch {
|
|
||||||
setResults([]);
|
|
||||||
setIsOpen(false);
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
// Handle input change with debounce
|
|
||||||
const handleInputChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
|
||||||
const newValue = e.target.value;
|
|
||||||
setQuery(newValue);
|
|
||||||
onChange(newValue); // Update parent immediately for form validation
|
|
||||||
|
|
||||||
// Debounce the search
|
|
||||||
if (debounceRef.current) {
|
|
||||||
clearTimeout(debounceRef.current);
|
|
||||||
}
|
|
||||||
debounceRef.current = setTimeout(() => {
|
|
||||||
doSearch(newValue);
|
|
||||||
}, 200);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Handle selecting a user
|
|
||||||
const handleSelect = (user: UserSearchResult) => {
|
|
||||||
setQuery(user.username);
|
|
||||||
onChange(user.username);
|
|
||||||
setIsOpen(false);
|
|
||||||
setResults([]);
|
|
||||||
inputRef.current?.focus();
|
|
||||||
};
|
|
||||||
|
|
||||||
// Handle keyboard navigation
|
|
||||||
const handleKeyDown = (e: React.KeyboardEvent) => {
|
|
||||||
if (!isOpen) return;
|
|
||||||
|
|
||||||
switch (e.key) {
|
|
||||||
case 'ArrowDown':
|
|
||||||
e.preventDefault();
|
|
||||||
setSelectedIndex(prev => (prev < results.length - 1 ? prev + 1 : prev));
|
|
||||||
break;
|
|
||||||
case 'ArrowUp':
|
|
||||||
e.preventDefault();
|
|
||||||
setSelectedIndex(prev => (prev > 0 ? prev - 1 : -1));
|
|
||||||
break;
|
|
||||||
case 'Enter':
|
|
||||||
e.preventDefault();
|
|
||||||
if (selectedIndex >= 0 && results[selectedIndex]) {
|
|
||||||
handleSelect(results[selectedIndex]);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'Escape':
|
|
||||||
setIsOpen(false);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Close dropdown when clicking outside
|
|
||||||
useEffect(() => {
|
|
||||||
const handleClickOutside = (e: MouseEvent) => {
|
|
||||||
if (containerRef.current && !containerRef.current.contains(e.target as Node)) {
|
|
||||||
setIsOpen(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
document.addEventListener('mousedown', handleClickOutside);
|
|
||||||
return () => document.removeEventListener('mousedown', handleClickOutside);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
// Sync external value changes
|
|
||||||
useEffect(() => {
|
|
||||||
setQuery(value);
|
|
||||||
}, [value]);
|
|
||||||
|
|
||||||
// Cleanup debounce on unmount
|
|
||||||
useEffect(() => {
|
|
||||||
return () => {
|
|
||||||
if (debounceRef.current) {
|
|
||||||
clearTimeout(debounceRef.current);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="user-autocomplete" ref={containerRef}>
|
|
||||||
<div className="user-autocomplete__input-wrapper">
|
|
||||||
<input
|
|
||||||
ref={inputRef}
|
|
||||||
type="text"
|
|
||||||
value={query}
|
|
||||||
onChange={handleInputChange}
|
|
||||||
onKeyDown={handleKeyDown}
|
|
||||||
onFocus={() => query.length >= 1 && results.length > 0 && setIsOpen(true)}
|
|
||||||
placeholder={placeholder}
|
|
||||||
disabled={disabled}
|
|
||||||
autoFocus={autoFocus}
|
|
||||||
autoComplete="off"
|
|
||||||
className="user-autocomplete__input"
|
|
||||||
/>
|
|
||||||
{loading && (
|
|
||||||
<div className="user-autocomplete__spinner" />
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{isOpen && results.length > 0 && (
|
|
||||||
<ul className="user-autocomplete__dropdown">
|
|
||||||
{results.map((user, index) => (
|
|
||||||
<li
|
|
||||||
key={user.id}
|
|
||||||
className={`user-autocomplete__option ${index === selectedIndex ? 'selected' : ''}`}
|
|
||||||
onClick={() => handleSelect(user)}
|
|
||||||
onMouseEnter={() => setSelectedIndex(index)}
|
|
||||||
>
|
|
||||||
<div className="user-autocomplete__avatar">
|
|
||||||
{user.username.charAt(0).toUpperCase()}
|
|
||||||
</div>
|
|
||||||
<div className="user-autocomplete__user-info">
|
|
||||||
<span className="user-autocomplete__username">{user.username}</span>
|
|
||||||
{user.is_admin && (
|
|
||||||
<span className="user-autocomplete__admin-badge">Admin</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
))}
|
|
||||||
</ul>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,110 +0,0 @@
|
|||||||
import { createContext, useContext, useState, useEffect, useCallback, ReactNode } from 'react';
|
|
||||||
import { TeamDetail } from '../types';
|
|
||||||
import { listTeams } from '../api';
|
|
||||||
import { useAuth } from './AuthContext';
|
|
||||||
|
|
||||||
const SELECTED_TEAM_KEY = 'orchard_selected_team';
|
|
||||||
|
|
||||||
interface TeamContextType {
|
|
||||||
teams: TeamDetail[];
|
|
||||||
currentTeam: TeamDetail | null;
|
|
||||||
loading: boolean;
|
|
||||||
error: string | null;
|
|
||||||
setCurrentTeam: (team: TeamDetail | null) => void;
|
|
||||||
refreshTeams: () => Promise<void>;
|
|
||||||
clearError: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
const TeamContext = createContext<TeamContextType | undefined>(undefined);
|
|
||||||
|
|
||||||
interface TeamProviderProps {
|
|
||||||
children: ReactNode;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function TeamProvider({ children }: TeamProviderProps) {
|
|
||||||
const { user } = useAuth();
|
|
||||||
const [teams, setTeams] = useState<TeamDetail[]>([]);
|
|
||||||
const [currentTeam, setCurrentTeamState] = useState<TeamDetail | null>(null);
|
|
||||||
const [loading, setLoading] = useState(false);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
const loadTeams = useCallback(async () => {
|
|
||||||
if (!user) {
|
|
||||||
setTeams([]);
|
|
||||||
setCurrentTeamState(null);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
setLoading(true);
|
|
||||||
setError(null);
|
|
||||||
try {
|
|
||||||
const response = await listTeams({ limit: 100 });
|
|
||||||
setTeams(response.items);
|
|
||||||
|
|
||||||
// Try to restore previously selected team
|
|
||||||
const savedSlug = localStorage.getItem(SELECTED_TEAM_KEY);
|
|
||||||
if (savedSlug) {
|
|
||||||
const savedTeam = response.items.find(t => t.slug === savedSlug);
|
|
||||||
if (savedTeam) {
|
|
||||||
setCurrentTeamState(savedTeam);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Auto-select first team if none selected
|
|
||||||
if (response.items.length > 0 && !currentTeam) {
|
|
||||||
setCurrentTeamState(response.items[0]);
|
|
||||||
localStorage.setItem(SELECTED_TEAM_KEY, response.items[0].slug);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
const message = err instanceof Error ? err.message : 'Failed to load teams';
|
|
||||||
setError(message);
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}, [user, currentTeam]);
|
|
||||||
|
|
||||||
// Load teams when user changes
|
|
||||||
useEffect(() => {
|
|
||||||
loadTeams();
|
|
||||||
}, [user]); // eslint-disable-line react-hooks/exhaustive-deps
|
|
||||||
|
|
||||||
const setCurrentTeam = useCallback((team: TeamDetail | null) => {
|
|
||||||
setCurrentTeamState(team);
|
|
||||||
if (team) {
|
|
||||||
localStorage.setItem(SELECTED_TEAM_KEY, team.slug);
|
|
||||||
} else {
|
|
||||||
localStorage.removeItem(SELECTED_TEAM_KEY);
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const refreshTeams = useCallback(async () => {
|
|
||||||
await loadTeams();
|
|
||||||
}, [loadTeams]);
|
|
||||||
|
|
||||||
const clearError = useCallback(() => {
|
|
||||||
setError(null);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<TeamContext.Provider value={{
|
|
||||||
teams,
|
|
||||||
currentTeam,
|
|
||||||
loading,
|
|
||||||
error,
|
|
||||||
setCurrentTeam,
|
|
||||||
refreshTeams,
|
|
||||||
clearError,
|
|
||||||
}}>
|
|
||||||
{children}
|
|
||||||
</TeamContext.Provider>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function useTeam() {
|
|
||||||
const context = useContext(TeamContext);
|
|
||||||
if (context === undefined) {
|
|
||||||
throw new Error('useTeam must be used within a TeamProvider');
|
|
||||||
}
|
|
||||||
return context;
|
|
||||||
}
|
|
||||||
@@ -1,371 +0,0 @@
|
|||||||
.admin-cache-page {
|
|
||||||
padding: 2rem;
|
|
||||||
max-width: 1400px;
|
|
||||||
margin: 0 auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.admin-cache-page h1 {
|
|
||||||
margin-bottom: 2rem;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.admin-cache-page h2 {
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 1.25rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Success/Error Messages */
|
|
||||||
.success-message {
|
|
||||||
padding: 0.75rem 1rem;
|
|
||||||
background-color: #d4edda;
|
|
||||||
border: 1px solid #c3e6cb;
|
|
||||||
border-radius: 4px;
|
|
||||||
color: #155724;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-message {
|
|
||||||
padding: 0.75rem 1rem;
|
|
||||||
background-color: #f8d7da;
|
|
||||||
border: 1px solid #f5c6cb;
|
|
||||||
border-radius: 4px;
|
|
||||||
color: #721c24;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Sources Section */
|
|
||||||
.sources-section {
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-color);
|
|
||||||
border-radius: 8px;
|
|
||||||
padding: 1.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-header {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-header h2 {
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Sources Table */
|
|
||||||
.sources-table {
|
|
||||||
width: 100%;
|
|
||||||
border-collapse: collapse;
|
|
||||||
background: var(--bg-primary);
|
|
||||||
border-radius: 4px;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
.sources-table th,
|
|
||||||
.sources-table td {
|
|
||||||
padding: 0.75rem 1rem;
|
|
||||||
text-align: center;
|
|
||||||
border-bottom: 1px solid var(--border-color);
|
|
||||||
}
|
|
||||||
|
|
||||||
.sources-table th {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
font-size: 0.85rem;
|
|
||||||
text-transform: uppercase;
|
|
||||||
}
|
|
||||||
|
|
||||||
.sources-table tr:last-child td {
|
|
||||||
border-bottom: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.sources-table tr.disabled-row {
|
|
||||||
opacity: 0.6;
|
|
||||||
}
|
|
||||||
|
|
||||||
.source-name {
|
|
||||||
font-weight: 500;
|
|
||||||
color: var(--text-primary);
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Name column should be left-aligned */
|
|
||||||
.sources-table td:first-child {
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
.url-cell {
|
|
||||||
font-family: monospace;
|
|
||||||
font-size: 0.9rem;
|
|
||||||
max-width: 300px;
|
|
||||||
overflow: hidden;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
white-space: nowrap;
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Badges */
|
|
||||||
.env-badge,
|
|
||||||
.status-badge {
|
|
||||||
display: inline-block;
|
|
||||||
padding: 0.2rem 0.5rem;
|
|
||||||
border-radius: 4px;
|
|
||||||
font-size: 0.75rem;
|
|
||||||
font-weight: 500;
|
|
||||||
margin-left: 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.env-badge {
|
|
||||||
background-color: #fff3e0;
|
|
||||||
color: #e65100;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-badge.enabled {
|
|
||||||
background-color: #e8f5e9;
|
|
||||||
color: #2e7d32;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-badge.disabled {
|
|
||||||
background-color: #ffebee;
|
|
||||||
color: #c62828;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Actions */
|
|
||||||
.actions-cell {
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.actions-cell .btn {
|
|
||||||
margin-right: 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.actions-cell .btn:last-child {
|
|
||||||
margin-right: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.test-cell {
|
|
||||||
text-align: center;
|
|
||||||
width: 2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.test-dot {
|
|
||||||
font-size: 1rem;
|
|
||||||
cursor: default;
|
|
||||||
}
|
|
||||||
|
|
||||||
.test-dot.success {
|
|
||||||
color: #2e7d32;
|
|
||||||
}
|
|
||||||
|
|
||||||
.test-dot.failure {
|
|
||||||
color: #c62828;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
.test-dot.failure:hover {
|
|
||||||
color: #b71c1c;
|
|
||||||
}
|
|
||||||
|
|
||||||
.test-dot.testing {
|
|
||||||
color: #1976d2;
|
|
||||||
animation: pulse 1s infinite;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes pulse {
|
|
||||||
0%, 100% { opacity: 1; }
|
|
||||||
50% { opacity: 0.4; }
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Error Modal */
|
|
||||||
.error-modal-content {
|
|
||||||
background: var(--bg-primary);
|
|
||||||
border-radius: 8px;
|
|
||||||
padding: 2rem;
|
|
||||||
width: 100%;
|
|
||||||
max-width: 500px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-modal-content h3 {
|
|
||||||
margin-top: 0;
|
|
||||||
color: #c62828;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-modal-content .error-details {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
padding: 1rem;
|
|
||||||
border-radius: 4px;
|
|
||||||
font-family: monospace;
|
|
||||||
font-size: 0.9rem;
|
|
||||||
word-break: break-word;
|
|
||||||
white-space: pre-wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-modal-content .modal-actions {
|
|
||||||
display: flex;
|
|
||||||
justify-content: flex-end;
|
|
||||||
margin-top: 1.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Buttons */
|
|
||||||
.btn {
|
|
||||||
padding: 0.5rem 1rem;
|
|
||||||
border: 1px solid var(--border-color);
|
|
||||||
border-radius: 4px;
|
|
||||||
background: var(--bg-primary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
cursor: pointer;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn:hover {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn:disabled {
|
|
||||||
opacity: 0.6;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary {
|
|
||||||
background-color: var(--color-primary);
|
|
||||||
border-color: var(--color-primary);
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary:hover {
|
|
||||||
background-color: var(--color-primary-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-danger {
|
|
||||||
background-color: #dc3545;
|
|
||||||
border-color: #dc3545;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-danger:hover {
|
|
||||||
background-color: #c82333;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-sm {
|
|
||||||
padding: 0.25rem 0.75rem;
|
|
||||||
font-size: 0.8rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary {
|
|
||||||
background-color: var(--bg-tertiary);
|
|
||||||
border-color: var(--border-color);
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-weight: 500;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary:hover {
|
|
||||||
background-color: var(--bg-secondary);
|
|
||||||
border-color: var(--text-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.empty-message {
|
|
||||||
color: var(--text-secondary);
|
|
||||||
font-style: italic;
|
|
||||||
padding: 2rem;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Modal */
|
|
||||||
.modal-overlay {
|
|
||||||
position: fixed;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
bottom: 0;
|
|
||||||
background: rgba(0, 0, 0, 0.5);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
z-index: 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content {
|
|
||||||
background: var(--bg-primary);
|
|
||||||
border-radius: 8px;
|
|
||||||
padding: 2rem;
|
|
||||||
width: 100%;
|
|
||||||
max-width: 600px;
|
|
||||||
max-height: 90vh;
|
|
||||||
overflow-y: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content h2 {
|
|
||||||
margin-top: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Form */
|
|
||||||
.form-group {
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group label {
|
|
||||||
display: block;
|
|
||||||
margin-bottom: 0.5rem;
|
|
||||||
font-weight: 500;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group input,
|
|
||||||
.form-group select {
|
|
||||||
width: 100%;
|
|
||||||
padding: 0.5rem;
|
|
||||||
border: 1px solid var(--border-color);
|
|
||||||
border-radius: 4px;
|
|
||||||
background: var(--bg-primary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group input:focus,
|
|
||||||
.form-group select:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--color-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-row {
|
|
||||||
display: flex;
|
|
||||||
gap: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-row .form-group {
|
|
||||||
flex: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.checkbox-group label {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.5rem;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
.checkbox-group input[type="checkbox"] {
|
|
||||||
width: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.help-text {
|
|
||||||
display: block;
|
|
||||||
font-size: 0.8rem;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
margin-top: 0.25rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-actions {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
margin-top: 1.5rem;
|
|
||||||
padding-top: 1rem;
|
|
||||||
border-top: 1px solid var(--border-color);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-actions-right {
|
|
||||||
display: flex;
|
|
||||||
gap: 0.5rem;
|
|
||||||
}
|
|
||||||
@@ -1,503 +0,0 @@
|
|||||||
import { useState, useEffect } from 'react';
|
|
||||||
import { useNavigate } from 'react-router-dom';
|
|
||||||
import { useAuth } from '../contexts/AuthContext';
|
|
||||||
import {
|
|
||||||
listUpstreamSources,
|
|
||||||
createUpstreamSource,
|
|
||||||
updateUpstreamSource,
|
|
||||||
deleteUpstreamSource,
|
|
||||||
testUpstreamSource,
|
|
||||||
} from '../api';
|
|
||||||
import { UpstreamSource, SourceType, AuthType } from '../types';
|
|
||||||
import './AdminCachePage.css';
|
|
||||||
|
|
||||||
const SOURCE_TYPES: SourceType[] = ['npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic'];
|
|
||||||
const AUTH_TYPES: AuthType[] = ['none', 'basic', 'bearer', 'api_key'];
|
|
||||||
|
|
||||||
function AdminCachePage() {
|
|
||||||
const { user, loading: authLoading } = useAuth();
|
|
||||||
const navigate = useNavigate();
|
|
||||||
|
|
||||||
// Upstream sources state
|
|
||||||
const [sources, setSources] = useState<UpstreamSource[]>([]);
|
|
||||||
const [loadingSources, setLoadingSources] = useState(true);
|
|
||||||
const [sourcesError, setSourcesError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
// Create/Edit form state
|
|
||||||
const [showForm, setShowForm] = useState(false);
|
|
||||||
const [editingSource, setEditingSource] = useState<UpstreamSource | null>(null);
|
|
||||||
const [formData, setFormData] = useState({
|
|
||||||
name: '',
|
|
||||||
source_type: 'generic' as SourceType,
|
|
||||||
url: '',
|
|
||||||
enabled: true,
|
|
||||||
auth_type: 'none' as AuthType,
|
|
||||||
username: '',
|
|
||||||
password: '',
|
|
||||||
priority: 100,
|
|
||||||
});
|
|
||||||
const [formError, setFormError] = useState<string | null>(null);
|
|
||||||
const [isSaving, setIsSaving] = useState(false);
|
|
||||||
|
|
||||||
// Test result state
|
|
||||||
const [testingId, setTestingId] = useState<string | null>(null);
|
|
||||||
const [testResults, setTestResults] = useState<Record<string, { success: boolean; message: string }>>({});
|
|
||||||
|
|
||||||
// Delete confirmation state
|
|
||||||
const [deletingId, setDeletingId] = useState<string | null>(null);
|
|
||||||
|
|
||||||
// Success message
|
|
||||||
const [successMessage, setSuccessMessage] = useState<string | null>(null);
|
|
||||||
|
|
||||||
// Error modal state
|
|
||||||
const [showErrorModal, setShowErrorModal] = useState(false);
|
|
||||||
const [selectedError, setSelectedError] = useState<{ sourceName: string; error: string } | null>(null);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (!authLoading && !user) {
|
|
||||||
navigate('/login', { state: { from: '/admin/cache' } });
|
|
||||||
}
|
|
||||||
}, [user, authLoading, navigate]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (user && user.is_admin) {
|
|
||||||
loadSources();
|
|
||||||
}
|
|
||||||
}, [user]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (successMessage) {
|
|
||||||
const timer = setTimeout(() => setSuccessMessage(null), 3000);
|
|
||||||
return () => clearTimeout(timer);
|
|
||||||
}
|
|
||||||
}, [successMessage]);
|
|
||||||
|
|
||||||
async function loadSources() {
|
|
||||||
setLoadingSources(true);
|
|
||||||
setSourcesError(null);
|
|
||||||
try {
|
|
||||||
const data = await listUpstreamSources();
|
|
||||||
setSources(data);
|
|
||||||
} catch (err) {
|
|
||||||
setSourcesError(err instanceof Error ? err.message : 'Failed to load sources');
|
|
||||||
} finally {
|
|
||||||
setLoadingSources(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function openCreateForm() {
|
|
||||||
setEditingSource(null);
|
|
||||||
setFormData({
|
|
||||||
name: '',
|
|
||||||
source_type: 'generic',
|
|
||||||
url: '',
|
|
||||||
enabled: true,
|
|
||||||
auth_type: 'none',
|
|
||||||
username: '',
|
|
||||||
password: '',
|
|
||||||
priority: 100,
|
|
||||||
});
|
|
||||||
setFormError(null);
|
|
||||||
setShowForm(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
function openEditForm(source: UpstreamSource) {
|
|
||||||
setEditingSource(source);
|
|
||||||
setFormData({
|
|
||||||
name: source.name,
|
|
||||||
source_type: source.source_type,
|
|
||||||
url: source.url,
|
|
||||||
enabled: source.enabled,
|
|
||||||
auth_type: source.auth_type,
|
|
||||||
username: source.username || '',
|
|
||||||
password: '',
|
|
||||||
priority: source.priority,
|
|
||||||
});
|
|
||||||
setFormError(null);
|
|
||||||
setShowForm(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleFormSubmit(e: React.FormEvent) {
|
|
||||||
e.preventDefault();
|
|
||||||
if (!formData.name.trim()) {
|
|
||||||
setFormError('Name is required');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!formData.url.trim()) {
|
|
||||||
setFormError('URL is required');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
setIsSaving(true);
|
|
||||||
setFormError(null);
|
|
||||||
|
|
||||||
try {
|
|
||||||
let savedSourceId: string | null = null;
|
|
||||||
|
|
||||||
if (editingSource) {
|
|
||||||
// Update existing source
|
|
||||||
await updateUpstreamSource(editingSource.id, {
|
|
||||||
name: formData.name.trim(),
|
|
||||||
source_type: formData.source_type,
|
|
||||||
url: formData.url.trim(),
|
|
||||||
enabled: formData.enabled,
|
|
||||||
auth_type: formData.auth_type,
|
|
||||||
username: formData.username.trim() || undefined,
|
|
||||||
password: formData.password || undefined,
|
|
||||||
priority: formData.priority,
|
|
||||||
});
|
|
||||||
savedSourceId = editingSource.id;
|
|
||||||
setSuccessMessage('Source updated successfully');
|
|
||||||
} else {
|
|
||||||
// Create new source
|
|
||||||
const newSource = await createUpstreamSource({
|
|
||||||
name: formData.name.trim(),
|
|
||||||
source_type: formData.source_type,
|
|
||||||
url: formData.url.trim(),
|
|
||||||
enabled: formData.enabled,
|
|
||||||
auth_type: formData.auth_type,
|
|
||||||
username: formData.username.trim() || undefined,
|
|
||||||
password: formData.password || undefined,
|
|
||||||
priority: formData.priority,
|
|
||||||
});
|
|
||||||
savedSourceId = newSource.id;
|
|
||||||
setSuccessMessage('Source created successfully');
|
|
||||||
}
|
|
||||||
setShowForm(false);
|
|
||||||
await loadSources();
|
|
||||||
|
|
||||||
// Auto-test the source after save
|
|
||||||
if (savedSourceId) {
|
|
||||||
testSourceById(savedSourceId);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
setFormError(err instanceof Error ? err.message : 'Failed to save source');
|
|
||||||
} finally {
|
|
||||||
setIsSaving(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleDelete(source: UpstreamSource) {
|
|
||||||
if (!window.confirm(`Delete upstream source "${source.name}"? This cannot be undone.`)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
setDeletingId(source.id);
|
|
||||||
try {
|
|
||||||
await deleteUpstreamSource(source.id);
|
|
||||||
setSuccessMessage(`Source "${source.name}" deleted`);
|
|
||||||
await loadSources();
|
|
||||||
} catch (err) {
|
|
||||||
setSourcesError(err instanceof Error ? err.message : 'Failed to delete source');
|
|
||||||
} finally {
|
|
||||||
setDeletingId(null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleTest(source: UpstreamSource) {
|
|
||||||
testSourceById(source.id);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testSourceById(sourceId: string) {
|
|
||||||
setTestingId(sourceId);
|
|
||||||
setTestResults((prev) => ({ ...prev, [sourceId]: { success: true, message: 'Testing...' } }));
|
|
||||||
|
|
||||||
try {
|
|
||||||
const result = await testUpstreamSource(sourceId);
|
|
||||||
setTestResults((prev) => ({
|
|
||||||
...prev,
|
|
||||||
[sourceId]: {
|
|
||||||
success: result.success,
|
|
||||||
message: result.success
|
|
||||||
? `OK (${result.elapsed_ms}ms)`
|
|
||||||
: result.error || `HTTP ${result.status_code}`,
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
} catch (err) {
|
|
||||||
setTestResults((prev) => ({
|
|
||||||
...prev,
|
|
||||||
[sourceId]: {
|
|
||||||
success: false,
|
|
||||||
message: err instanceof Error ? err.message : 'Test failed',
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
} finally {
|
|
||||||
setTestingId(null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function showError(sourceName: string, error: string) {
|
|
||||||
setSelectedError({ sourceName, error });
|
|
||||||
setShowErrorModal(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (authLoading) {
|
|
||||||
return <div className="admin-cache-page">Loading...</div>;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!user?.is_admin) {
|
|
||||||
return (
|
|
||||||
<div className="admin-cache-page">
|
|
||||||
<div className="error-message">Access denied. Admin privileges required.</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="admin-cache-page">
|
|
||||||
<h1>Upstream Sources</h1>
|
|
||||||
|
|
||||||
{successMessage && <div className="success-message">{successMessage}</div>}
|
|
||||||
|
|
||||||
{/* Upstream Sources Section */}
|
|
||||||
<section className="sources-section">
|
|
||||||
<div className="section-header">
|
|
||||||
<button className="btn btn-primary" onClick={openCreateForm}>
|
|
||||||
Add Source
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{loadingSources ? (
|
|
||||||
<p>Loading sources...</p>
|
|
||||||
) : sourcesError ? (
|
|
||||||
<div className="error-message">{sourcesError}</div>
|
|
||||||
) : sources.length === 0 ? (
|
|
||||||
<p className="empty-message">No upstream sources configured.</p>
|
|
||||||
) : (
|
|
||||||
<table className="sources-table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Name</th>
|
|
||||||
<th>Type</th>
|
|
||||||
<th>URL</th>
|
|
||||||
<th>Priority</th>
|
|
||||||
<th>Status</th>
|
|
||||||
<th>Test</th>
|
|
||||||
<th>Actions</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
{sources.map((source) => (
|
|
||||||
<tr key={source.id} className={source.enabled ? '' : 'disabled-row'}>
|
|
||||||
<td>
|
|
||||||
<span className="source-name">{source.name}</span>
|
|
||||||
{source.source === 'env' && (
|
|
||||||
<span className="env-badge" title="Defined via environment variable">ENV</span>
|
|
||||||
)}
|
|
||||||
</td>
|
|
||||||
<td>{source.source_type}</td>
|
|
||||||
<td className="url-cell" title={source.url}>{source.url}</td>
|
|
||||||
<td>{source.priority}</td>
|
|
||||||
<td>
|
|
||||||
<span className={`status-badge ${source.enabled ? 'enabled' : 'disabled'}`}>
|
|
||||||
{source.enabled ? 'Enabled' : 'Disabled'}
|
|
||||||
</span>
|
|
||||||
</td>
|
|
||||||
<td className="test-cell">
|
|
||||||
{testingId === source.id ? (
|
|
||||||
<span className="test-dot testing" title="Testing...">●</span>
|
|
||||||
) : testResults[source.id] ? (
|
|
||||||
testResults[source.id].success ? (
|
|
||||||
<span className="test-dot success" title={testResults[source.id].message}>●</span>
|
|
||||||
) : (
|
|
||||||
<span
|
|
||||||
className="test-dot failure"
|
|
||||||
title="Click to see error"
|
|
||||||
onClick={() => showError(source.name, testResults[source.id].message)}
|
|
||||||
>●</span>
|
|
||||||
)
|
|
||||||
) : null}
|
|
||||||
</td>
|
|
||||||
<td className="actions-cell">
|
|
||||||
<button
|
|
||||||
className="btn btn-sm btn-secondary"
|
|
||||||
onClick={() => handleTest(source)}
|
|
||||||
disabled={testingId === source.id}
|
|
||||||
>
|
|
||||||
Test
|
|
||||||
</button>
|
|
||||||
{source.source !== 'env' && (
|
|
||||||
<button className="btn btn-sm btn-secondary" onClick={() => openEditForm(source)}>
|
|
||||||
Edit
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
))}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
)}
|
|
||||||
</section>
|
|
||||||
|
|
||||||
{/* Create/Edit Modal */}
|
|
||||||
{showForm && (
|
|
||||||
<div className="modal-overlay" onClick={() => setShowForm(false)}>
|
|
||||||
<div className="modal-content" onClick={(e) => e.stopPropagation()}>
|
|
||||||
<h2>{editingSource ? 'Edit Upstream Source' : 'Add Upstream Source'}</h2>
|
|
||||||
<form onSubmit={handleFormSubmit}>
|
|
||||||
{formError && <div className="error-message">{formError}</div>}
|
|
||||||
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="name">Name</label>
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
id="name"
|
|
||||||
value={formData.name}
|
|
||||||
onChange={(e) => setFormData({ ...formData, name: e.target.value })}
|
|
||||||
placeholder="e.g., npm-private"
|
|
||||||
required
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="form-row">
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="source_type">Type</label>
|
|
||||||
<select
|
|
||||||
id="source_type"
|
|
||||||
value={formData.source_type}
|
|
||||||
onChange={(e) => setFormData({ ...formData, source_type: e.target.value as SourceType })}
|
|
||||||
>
|
|
||||||
{SOURCE_TYPES.map((type) => (
|
|
||||||
<option key={type} value={type}>
|
|
||||||
{type}
|
|
||||||
</option>
|
|
||||||
))}
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="priority">Priority</label>
|
|
||||||
<input
|
|
||||||
type="number"
|
|
||||||
id="priority"
|
|
||||||
value={formData.priority}
|
|
||||||
onChange={(e) => setFormData({ ...formData, priority: parseInt(e.target.value) || 100 })}
|
|
||||||
min="1"
|
|
||||||
/>
|
|
||||||
<span className="help-text">Lower = higher priority</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="url">URL</label>
|
|
||||||
<input
|
|
||||||
type="url"
|
|
||||||
id="url"
|
|
||||||
value={formData.url}
|
|
||||||
onChange={(e) => setFormData({ ...formData, url: e.target.value })}
|
|
||||||
placeholder="https://registry.example.com"
|
|
||||||
required
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="form-row">
|
|
||||||
<div className="form-group checkbox-group">
|
|
||||||
<label>
|
|
||||||
<input
|
|
||||||
type="checkbox"
|
|
||||||
checked={formData.enabled}
|
|
||||||
onChange={(e) => setFormData({ ...formData, enabled: e.target.checked })}
|
|
||||||
/>
|
|
||||||
Enabled
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="auth_type">Authentication</label>
|
|
||||||
<select
|
|
||||||
id="auth_type"
|
|
||||||
value={formData.auth_type}
|
|
||||||
onChange={(e) => setFormData({ ...formData, auth_type: e.target.value as AuthType })}
|
|
||||||
>
|
|
||||||
{AUTH_TYPES.map((type) => (
|
|
||||||
<option key={type} value={type}>
|
|
||||||
{type === 'none' ? 'None' : type === 'api_key' ? 'API Key' : type.charAt(0).toUpperCase() + type.slice(1)}
|
|
||||||
</option>
|
|
||||||
))}
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{formData.auth_type !== 'none' && (
|
|
||||||
<div className="form-row">
|
|
||||||
{(formData.auth_type === 'basic' || formData.auth_type === 'api_key') && (
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="username">{formData.auth_type === 'api_key' ? 'Header Name' : 'Username'}</label>
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
id="username"
|
|
||||||
value={formData.username}
|
|
||||||
onChange={(e) => setFormData({ ...formData, username: e.target.value })}
|
|
||||||
placeholder={formData.auth_type === 'api_key' ? 'X-API-Key' : 'username'}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="password">
|
|
||||||
{formData.auth_type === 'bearer'
|
|
||||||
? 'Token'
|
|
||||||
: formData.auth_type === 'api_key'
|
|
||||||
? 'API Key Value'
|
|
||||||
: 'Password'}
|
|
||||||
</label>
|
|
||||||
<input
|
|
||||||
type="password"
|
|
||||||
id="password"
|
|
||||||
value={formData.password}
|
|
||||||
onChange={(e) => setFormData({ ...formData, password: e.target.value })}
|
|
||||||
placeholder={editingSource ? '(unchanged)' : ''}
|
|
||||||
/>
|
|
||||||
{editingSource && (
|
|
||||||
<span className="help-text">Leave empty to keep existing {formData.auth_type === 'bearer' ? 'token' : 'credentials'}</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="form-actions">
|
|
||||||
{editingSource && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="btn btn-danger"
|
|
||||||
onClick={() => {
|
|
||||||
handleDelete(editingSource);
|
|
||||||
setShowForm(false);
|
|
||||||
}}
|
|
||||||
disabled={deletingId === editingSource.id}
|
|
||||||
>
|
|
||||||
{deletingId === editingSource.id ? 'Deleting...' : 'Delete'}
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
<div className="form-actions-right">
|
|
||||||
<button type="button" className="btn" onClick={() => setShowForm(false)}>
|
|
||||||
Cancel
|
|
||||||
</button>
|
|
||||||
<button type="submit" className="btn btn-primary" disabled={isSaving}>
|
|
||||||
{isSaving ? 'Saving...' : editingSource ? 'Update' : 'Create'}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Error Details Modal */}
|
|
||||||
{showErrorModal && selectedError && (
|
|
||||||
<div className="modal-overlay" onClick={() => setShowErrorModal(false)}>
|
|
||||||
<div className="error-modal-content" onClick={(e) => e.stopPropagation()}>
|
|
||||||
<h3>Connection Error: {selectedError.sourceName}</h3>
|
|
||||||
<div className="error-details">{selectedError.error}</div>
|
|
||||||
<div className="modal-actions">
|
|
||||||
<button className="btn" onClick={() => setShowErrorModal(false)}>
|
|
||||||
Close
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default AdminCachePage;
|
|
||||||
@@ -358,12 +358,6 @@
|
|||||||
gap: 4px;
|
gap: 4px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.page-header__actions {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Package card styles */
|
/* Package card styles */
|
||||||
.package-card__header {
|
.package-card__header {
|
||||||
display: flex;
|
display: flex;
|
||||||
@@ -493,16 +487,3 @@
|
|||||||
gap: 6px;
|
gap: 6px;
|
||||||
flex-wrap: wrap;
|
flex-wrap: wrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Cell name styles */
|
|
||||||
.cell-name {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* System project badge */
|
|
||||||
.system-badge {
|
|
||||||
font-size: 0.7rem;
|
|
||||||
padding: 2px 6px;
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -179,18 +179,16 @@ function Home() {
|
|||||||
</form>
|
</form>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{user && (
|
<div className="list-controls">
|
||||||
<div className="list-controls">
|
<FilterDropdown
|
||||||
<FilterDropdown
|
label="Visibility"
|
||||||
label="Visibility"
|
options={VISIBILITY_OPTIONS}
|
||||||
options={VISIBILITY_OPTIONS}
|
value={visibility}
|
||||||
value={visibility}
|
onChange={handleVisibilityChange}
|
||||||
onChange={handleVisibilityChange}
|
/>
|
||||||
/>
|
</div>
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{user && hasActiveFilters && (
|
{hasActiveFilters && (
|
||||||
<FilterChipGroup onClearAll={clearFilters}>
|
<FilterChipGroup onClearAll={clearFilters}>
|
||||||
{visibility && (
|
{visibility && (
|
||||||
<FilterChip
|
<FilterChip
|
||||||
@@ -224,9 +222,6 @@ function Home() {
|
|||||||
<span className="cell-name">
|
<span className="cell-name">
|
||||||
{!project.is_public && <LockIcon />}
|
{!project.is_public && <LockIcon />}
|
||||||
{project.name}
|
{project.name}
|
||||||
{project.is_system && (
|
|
||||||
<Badge variant="warning" className="system-badge">Cache</Badge>
|
|
||||||
)}
|
|
||||||
</span>
|
</span>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -127,12 +127,6 @@ h2 {
|
|||||||
font-size: 0.75rem;
|
font-size: 0.75rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Action buttons in table */
|
|
||||||
.action-buttons {
|
|
||||||
display: flex;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Download by Artifact ID Section */
|
/* Download by Artifact ID Section */
|
||||||
.download-by-id-section {
|
.download-by-id-section {
|
||||||
margin-top: 32px;
|
margin-top: 32px;
|
||||||
@@ -430,340 +424,6 @@ tr:hover .copy-btn {
|
|||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Dependencies Section */
|
|
||||||
.dependencies-section {
|
|
||||||
margin-top: 32px;
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependencies-header {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: space-between;
|
|
||||||
margin-bottom: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependencies-header h3 {
|
|
||||||
margin: 0;
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 1rem;
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependencies-controls {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependencies-controls .btn {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependencies-tag-select {
|
|
||||||
margin-bottom: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tag-selector {
|
|
||||||
padding: 8px 12px;
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
cursor: pointer;
|
|
||||||
min-width: 200px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tag-selector:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.deps-loading {
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
padding: 16px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.deps-error {
|
|
||||||
color: var(--error-color, #ef4444);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
padding: 12px 16px;
|
|
||||||
background: rgba(239, 68, 68, 0.1);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
}
|
|
||||||
|
|
||||||
.deps-empty {
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
padding: 16px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.deps-summary {
|
|
||||||
color: var(--text-secondary);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
margin-bottom: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.deps-summary strong {
|
|
||||||
color: var(--accent-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.deps-items {
|
|
||||||
list-style: none;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dep-item {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 12px;
|
|
||||||
padding: 12px 16px;
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dep-link {
|
|
||||||
color: var(--accent-primary);
|
|
||||||
font-weight: 500;
|
|
||||||
text-decoration: none;
|
|
||||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dep-link:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dep-constraint {
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dep-status {
|
|
||||||
margin-left: auto;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dep-status--ok {
|
|
||||||
color: var(--success-color, #10b981);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dep-status--missing {
|
|
||||||
color: var(--warning-color, #f59e0b);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Tag name link in table */
|
|
||||||
.tag-name-link {
|
|
||||||
color: var(--accent-primary);
|
|
||||||
transition: opacity var(--transition-fast);
|
|
||||||
}
|
|
||||||
|
|
||||||
.tag-name-link:hover {
|
|
||||||
opacity: 0.8;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tag-name-link.selected {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Used By (Reverse Dependencies) Section */
|
|
||||||
.used-by-section {
|
|
||||||
margin-top: 32px;
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.used-by-section h3 {
|
|
||||||
margin-bottom: 16px;
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 1rem;
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
.reverse-dep-item {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 12px;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dep-version {
|
|
||||||
color: var(--accent-primary);
|
|
||||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
background: rgba(16, 185, 129, 0.1);
|
|
||||||
padding: 2px 8px;
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dep-requires {
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
|
||||||
margin-left: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.reverse-deps-pagination {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
gap: 16px;
|
|
||||||
margin-top: 16px;
|
|
||||||
padding-top: 16px;
|
|
||||||
border-top: 1px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.pagination-info {
|
|
||||||
color: var(--text-secondary);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Ensure File Modal */
|
|
||||||
.modal-overlay {
|
|
||||||
position: fixed;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
bottom: 0;
|
|
||||||
background: rgba(0, 0, 0, 0.7);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
z-index: 1000;
|
|
||||||
padding: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-modal {
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
max-width: 700px;
|
|
||||||
width: 100%;
|
|
||||||
max-height: 80vh;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
box-shadow: 0 20px 50px rgba(0, 0, 0, 0.5);
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-header {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: space-between;
|
|
||||||
padding: 16px 20px;
|
|
||||||
border-bottom: 1px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-header h3 {
|
|
||||||
margin: 0;
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 1rem;
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-actions {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-actions .copy-btn {
|
|
||||||
opacity: 1;
|
|
||||||
width: 32px;
|
|
||||||
height: 32px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-close {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
width: 32px;
|
|
||||||
height: 32px;
|
|
||||||
padding: 0;
|
|
||||||
background: transparent;
|
|
||||||
border: none;
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
color: var(--text-muted);
|
|
||||||
cursor: pointer;
|
|
||||||
transition: all var(--transition-fast);
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-close:hover {
|
|
||||||
background: var(--bg-hover);
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-content {
|
|
||||||
flex: 1;
|
|
||||||
overflow: auto;
|
|
||||||
padding: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-loading {
|
|
||||||
color: var(--text-muted);
|
|
||||||
text-align: center;
|
|
||||||
padding: 40px 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-error {
|
|
||||||
color: var(--error-color, #ef4444);
|
|
||||||
padding: 16px;
|
|
||||||
background: rgba(239, 68, 68, 0.1);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-empty {
|
|
||||||
color: var(--text-muted);
|
|
||||||
text-align: center;
|
|
||||||
padding: 40px 20px;
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-yaml {
|
|
||||||
margin: 0;
|
|
||||||
padding: 16px;
|
|
||||||
background: #0d0d0f;
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
overflow-x: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-yaml code {
|
|
||||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: #e2e8f0;
|
|
||||||
white-space: pre;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-footer {
|
|
||||||
padding: 16px 20px;
|
|
||||||
border-top: 1px solid var(--border-primary);
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
border-radius: 0 0 var(--radius-lg) var(--radius-lg);
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-hint {
|
|
||||||
margin: 0;
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-hint code {
|
|
||||||
background: rgba(0, 0, 0, 0.2);
|
|
||||||
padding: 2px 6px;
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
|
||||||
color: var(--accent-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Responsive adjustments */
|
/* Responsive adjustments */
|
||||||
@media (max-width: 768px) {
|
@media (max-width: 768px) {
|
||||||
.upload-form {
|
.upload-form {
|
||||||
@@ -779,18 +439,4 @@ tr:hover .copy-btn {
|
|||||||
flex-wrap: wrap;
|
flex-wrap: wrap;
|
||||||
gap: 12px;
|
gap: 12px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.dependencies-header {
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: flex-start;
|
|
||||||
gap: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tag-selector {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ensure-file-modal {
|
|
||||||
max-height: 90vh;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { useState, useEffect, useCallback } from 'react';
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
import { useParams, useSearchParams, useNavigate, useLocation, Link } from 'react-router-dom';
|
import { useParams, useSearchParams, useNavigate, useLocation } from 'react-router-dom';
|
||||||
import { TagDetail, Package, PaginatedResponse, AccessLevel, Dependency, DependentInfo } from '../types';
|
import { TagDetail, Package, PaginatedResponse, AccessLevel } from '../types';
|
||||||
import { listTags, getDownloadUrl, getPackage, getMyProjectAccess, createTag, getArtifactDependencies, getReverseDependencies, getEnsureFile, UnauthorizedError, ForbiddenError } from '../api';
|
import { listTags, getDownloadUrl, getPackage, getMyProjectAccess, createTag, UnauthorizedError, ForbiddenError } from '../api';
|
||||||
import { Breadcrumb } from '../components/Breadcrumb';
|
import { Breadcrumb } from '../components/Breadcrumb';
|
||||||
import { Badge } from '../components/Badge';
|
import { Badge } from '../components/Badge';
|
||||||
import { SearchInput } from '../components/SearchInput';
|
import { SearchInput } from '../components/SearchInput';
|
||||||
@@ -10,7 +10,6 @@ import { DataTable } from '../components/DataTable';
|
|||||||
import { Pagination } from '../components/Pagination';
|
import { Pagination } from '../components/Pagination';
|
||||||
import { DragDropUpload, UploadResult } from '../components/DragDropUpload';
|
import { DragDropUpload, UploadResult } from '../components/DragDropUpload';
|
||||||
import { useAuth } from '../contexts/AuthContext';
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
import DependencyGraph from '../components/DependencyGraph';
|
|
||||||
import './Home.css';
|
import './Home.css';
|
||||||
import './PackagePage.css';
|
import './PackagePage.css';
|
||||||
|
|
||||||
@@ -69,30 +68,6 @@ function PackagePage() {
|
|||||||
const [createTagArtifactId, setCreateTagArtifactId] = useState('');
|
const [createTagArtifactId, setCreateTagArtifactId] = useState('');
|
||||||
const [createTagLoading, setCreateTagLoading] = useState(false);
|
const [createTagLoading, setCreateTagLoading] = useState(false);
|
||||||
|
|
||||||
// Dependencies state
|
|
||||||
const [selectedTag, setSelectedTag] = useState<TagDetail | null>(null);
|
|
||||||
const [dependencies, setDependencies] = useState<Dependency[]>([]);
|
|
||||||
const [depsLoading, setDepsLoading] = useState(false);
|
|
||||||
const [depsError, setDepsError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
// Reverse dependencies state
|
|
||||||
const [reverseDeps, setReverseDeps] = useState<DependentInfo[]>([]);
|
|
||||||
const [reverseDepsLoading, setReverseDepsLoading] = useState(false);
|
|
||||||
const [reverseDepsError, setReverseDepsError] = useState<string | null>(null);
|
|
||||||
const [reverseDepsPage, setReverseDepsPage] = useState(1);
|
|
||||||
const [reverseDepsTotal, setReverseDepsTotal] = useState(0);
|
|
||||||
const [reverseDepsHasMore, setReverseDepsHasMore] = useState(false);
|
|
||||||
|
|
||||||
// Dependency graph modal state
|
|
||||||
const [showGraph, setShowGraph] = useState(false);
|
|
||||||
|
|
||||||
// Ensure file modal state
|
|
||||||
const [showEnsureFile, setShowEnsureFile] = useState(false);
|
|
||||||
const [ensureFileContent, setEnsureFileContent] = useState<string | null>(null);
|
|
||||||
const [ensureFileLoading, setEnsureFileLoading] = useState(false);
|
|
||||||
const [ensureFileError, setEnsureFileError] = useState<string | null>(null);
|
|
||||||
const [ensureFileTagName, setEnsureFileTagName] = useState<string | null>(null);
|
|
||||||
|
|
||||||
// Derived permissions
|
// Derived permissions
|
||||||
const canWrite = accessLevel === 'write' || accessLevel === 'admin';
|
const canWrite = accessLevel === 'write' || accessLevel === 'admin';
|
||||||
|
|
||||||
@@ -153,98 +128,6 @@ function PackagePage() {
|
|||||||
loadData();
|
loadData();
|
||||||
}, [loadData]);
|
}, [loadData]);
|
||||||
|
|
||||||
// Auto-select tag when tags are loaded (prefer version from URL, then first tag)
|
|
||||||
// Re-run when package changes to pick up new tags
|
|
||||||
useEffect(() => {
|
|
||||||
if (tagsData?.items && tagsData.items.length > 0) {
|
|
||||||
const versionParam = searchParams.get('version');
|
|
||||||
if (versionParam) {
|
|
||||||
// Find tag matching the version parameter
|
|
||||||
const matchingTag = tagsData.items.find(t => t.version === versionParam);
|
|
||||||
if (matchingTag) {
|
|
||||||
setSelectedTag(matchingTag);
|
|
||||||
setDependencies([]);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Fall back to first tag
|
|
||||||
setSelectedTag(tagsData.items[0]);
|
|
||||||
setDependencies([]);
|
|
||||||
}
|
|
||||||
}, [tagsData, searchParams, projectName, packageName]);
|
|
||||||
|
|
||||||
// Fetch dependencies when selected tag changes
|
|
||||||
const fetchDependencies = useCallback(async (artifactId: string) => {
|
|
||||||
setDepsLoading(true);
|
|
||||||
setDepsError(null);
|
|
||||||
try {
|
|
||||||
const result = await getArtifactDependencies(artifactId);
|
|
||||||
setDependencies(result.dependencies);
|
|
||||||
} catch (err) {
|
|
||||||
setDepsError(err instanceof Error ? err.message : 'Failed to load dependencies');
|
|
||||||
setDependencies([]);
|
|
||||||
} finally {
|
|
||||||
setDepsLoading(false);
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (selectedTag) {
|
|
||||||
fetchDependencies(selectedTag.artifact_id);
|
|
||||||
}
|
|
||||||
}, [selectedTag, fetchDependencies]);
|
|
||||||
|
|
||||||
// Fetch reverse dependencies
|
|
||||||
const fetchReverseDeps = useCallback(async (pageNum: number = 1) => {
|
|
||||||
if (!projectName || !packageName) return;
|
|
||||||
|
|
||||||
setReverseDepsLoading(true);
|
|
||||||
setReverseDepsError(null);
|
|
||||||
try {
|
|
||||||
const result = await getReverseDependencies(projectName, packageName, { page: pageNum, limit: 10 });
|
|
||||||
setReverseDeps(result.dependents);
|
|
||||||
setReverseDepsTotal(result.pagination.total);
|
|
||||||
setReverseDepsHasMore(result.pagination.has_more);
|
|
||||||
setReverseDepsPage(pageNum);
|
|
||||||
} catch (err) {
|
|
||||||
setReverseDepsError(err instanceof Error ? err.message : 'Failed to load reverse dependencies');
|
|
||||||
setReverseDeps([]);
|
|
||||||
} finally {
|
|
||||||
setReverseDepsLoading(false);
|
|
||||||
}
|
|
||||||
}, [projectName, packageName]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (projectName && packageName && !loading) {
|
|
||||||
fetchReverseDeps(1);
|
|
||||||
}
|
|
||||||
}, [projectName, packageName, loading, fetchReverseDeps]);
|
|
||||||
|
|
||||||
// Fetch ensure file for a specific tag
|
|
||||||
const fetchEnsureFileForTag = useCallback(async (tagName: string) => {
|
|
||||||
if (!projectName || !packageName) return;
|
|
||||||
|
|
||||||
setEnsureFileTagName(tagName);
|
|
||||||
setEnsureFileLoading(true);
|
|
||||||
setEnsureFileError(null);
|
|
||||||
try {
|
|
||||||
const content = await getEnsureFile(projectName, packageName, tagName);
|
|
||||||
setEnsureFileContent(content);
|
|
||||||
setShowEnsureFile(true);
|
|
||||||
} catch (err) {
|
|
||||||
setEnsureFileError(err instanceof Error ? err.message : 'Failed to load ensure file');
|
|
||||||
setShowEnsureFile(true);
|
|
||||||
} finally {
|
|
||||||
setEnsureFileLoading(false);
|
|
||||||
}
|
|
||||||
}, [projectName, packageName]);
|
|
||||||
|
|
||||||
// Fetch ensure file for selected tag
|
|
||||||
const fetchEnsureFile = useCallback(async () => {
|
|
||||||
if (!selectedTag) return;
|
|
||||||
fetchEnsureFileForTag(selectedTag.name);
|
|
||||||
}, [selectedTag, fetchEnsureFileForTag]);
|
|
||||||
|
|
||||||
// Keyboard navigation - go back with backspace
|
// Keyboard navigation - go back with backspace
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const handleKeyDown = (e: KeyboardEvent) => {
|
const handleKeyDown = (e: KeyboardEvent) => {
|
||||||
@@ -319,24 +202,12 @@ function PackagePage() {
|
|||||||
const tags = tagsData?.items || [];
|
const tags = tagsData?.items || [];
|
||||||
const pagination = tagsData?.pagination;
|
const pagination = tagsData?.pagination;
|
||||||
|
|
||||||
const handleTagSelect = (tag: TagDetail) => {
|
|
||||||
setSelectedTag(tag);
|
|
||||||
};
|
|
||||||
|
|
||||||
const columns = [
|
const columns = [
|
||||||
{
|
{
|
||||||
key: 'name',
|
key: 'name',
|
||||||
header: 'Tag',
|
header: 'Tag',
|
||||||
sortable: true,
|
sortable: true,
|
||||||
render: (t: TagDetail) => (
|
render: (t: TagDetail) => <strong>{t.name}</strong>,
|
||||||
<strong
|
|
||||||
className={`tag-name-link ${selectedTag?.id === t.id ? 'selected' : ''}`}
|
|
||||||
onClick={() => handleTagSelect(t)}
|
|
||||||
style={{ cursor: 'pointer' }}
|
|
||||||
>
|
|
||||||
{t.name}
|
|
||||||
</strong>
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'version',
|
key: 'version',
|
||||||
@@ -390,22 +261,13 @@ function PackagePage() {
|
|||||||
key: 'actions',
|
key: 'actions',
|
||||||
header: 'Actions',
|
header: 'Actions',
|
||||||
render: (t: TagDetail) => (
|
render: (t: TagDetail) => (
|
||||||
<div className="action-buttons">
|
<a
|
||||||
<button
|
href={getDownloadUrl(projectName!, packageName!, t.name)}
|
||||||
className="btn btn-secondary btn-small"
|
className="btn btn-secondary btn-small"
|
||||||
onClick={() => fetchEnsureFileForTag(t.name)}
|
download
|
||||||
title="View orchard.ensure file"
|
>
|
||||||
>
|
Download
|
||||||
Ensure
|
</a>
|
||||||
</button>
|
|
||||||
<a
|
|
||||||
href={getDownloadUrl(projectName!, packageName!, t.name)}
|
|
||||||
className="btn btn-secondary btn-small"
|
|
||||||
download
|
|
||||||
>
|
|
||||||
Download
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
@@ -577,166 +439,6 @@ function PackagePage() {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Dependencies Section */}
|
|
||||||
{tags.length > 0 && (
|
|
||||||
<div className="dependencies-section card">
|
|
||||||
<div className="dependencies-header">
|
|
||||||
<h3>Dependencies</h3>
|
|
||||||
<div className="dependencies-controls">
|
|
||||||
{selectedTag && (
|
|
||||||
<>
|
|
||||||
<button
|
|
||||||
className="btn btn-secondary btn-small"
|
|
||||||
onClick={fetchEnsureFile}
|
|
||||||
disabled={ensureFileLoading}
|
|
||||||
title="View orchard.ensure file"
|
|
||||||
>
|
|
||||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" style={{ marginRight: '6px' }}>
|
|
||||||
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path>
|
|
||||||
<polyline points="14 2 14 8 20 8"></polyline>
|
|
||||||
<line x1="16" y1="13" x2="8" y2="13"></line>
|
|
||||||
<line x1="16" y1="17" x2="8" y2="17"></line>
|
|
||||||
<polyline points="10 9 9 9 8 9"></polyline>
|
|
||||||
</svg>
|
|
||||||
{ensureFileLoading ? 'Loading...' : 'View Ensure File'}
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
className="btn btn-secondary btn-small"
|
|
||||||
onClick={() => setShowGraph(true)}
|
|
||||||
title="View full dependency tree"
|
|
||||||
>
|
|
||||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" style={{ marginRight: '6px' }}>
|
|
||||||
<circle cx="12" cy="12" r="3"></circle>
|
|
||||||
<circle cx="4" cy="4" r="2"></circle>
|
|
||||||
<circle cx="20" cy="4" r="2"></circle>
|
|
||||||
<circle cx="4" cy="20" r="2"></circle>
|
|
||||||
<circle cx="20" cy="20" r="2"></circle>
|
|
||||||
<line x1="9.5" y1="9.5" x2="5.5" y2="5.5"></line>
|
|
||||||
<line x1="14.5" y1="9.5" x2="18.5" y2="5.5"></line>
|
|
||||||
<line x1="9.5" y1="14.5" x2="5.5" y2="18.5"></line>
|
|
||||||
<line x1="14.5" y1="14.5" x2="18.5" y2="18.5"></line>
|
|
||||||
</svg>
|
|
||||||
View Graph
|
|
||||||
</button>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="dependencies-tag-select">
|
|
||||||
{selectedTag && (
|
|
||||||
<select
|
|
||||||
className="tag-selector"
|
|
||||||
value={selectedTag.id}
|
|
||||||
onChange={(e) => {
|
|
||||||
const tag = tags.find(t => t.id === e.target.value);
|
|
||||||
if (tag) setSelectedTag(tag);
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{tags.map(t => (
|
|
||||||
<option key={t.id} value={t.id}>
|
|
||||||
{t.name}{t.version ? ` (${t.version})` : ''}
|
|
||||||
</option>
|
|
||||||
))}
|
|
||||||
</select>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{depsLoading ? (
|
|
||||||
<div className="deps-loading">Loading dependencies...</div>
|
|
||||||
) : depsError ? (
|
|
||||||
<div className="deps-error">{depsError}</div>
|
|
||||||
) : dependencies.length === 0 ? (
|
|
||||||
<div className="deps-empty">
|
|
||||||
{selectedTag ? (
|
|
||||||
<span><strong>{selectedTag.name}</strong> has no dependencies</span>
|
|
||||||
) : (
|
|
||||||
<span>No dependencies</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<div className="deps-list">
|
|
||||||
<div className="deps-summary">
|
|
||||||
<strong>{selectedTag?.name}</strong> has {dependencies.length} {dependencies.length === 1 ? 'dependency' : 'dependencies'}:
|
|
||||||
</div>
|
|
||||||
<ul className="deps-items">
|
|
||||||
{dependencies.map((dep) => (
|
|
||||||
<li key={dep.id} className="dep-item">
|
|
||||||
<Link
|
|
||||||
to={`/project/${dep.project}/${dep.package}`}
|
|
||||||
className="dep-link"
|
|
||||||
>
|
|
||||||
{dep.project}/{dep.package}
|
|
||||||
</Link>
|
|
||||||
<span className="dep-constraint">
|
|
||||||
@ {dep.version || dep.tag}
|
|
||||||
</span>
|
|
||||||
<span className="dep-status dep-status--ok" title="Package exists">
|
|
||||||
✓
|
|
||||||
</span>
|
|
||||||
</li>
|
|
||||||
))}
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Used By (Reverse Dependencies) Section */}
|
|
||||||
<div className="used-by-section card">
|
|
||||||
<h3>Used By</h3>
|
|
||||||
|
|
||||||
{reverseDepsLoading ? (
|
|
||||||
<div className="deps-loading">Loading reverse dependencies...</div>
|
|
||||||
) : reverseDepsError ? (
|
|
||||||
<div className="deps-error">{reverseDepsError}</div>
|
|
||||||
) : reverseDeps.length === 0 ? (
|
|
||||||
<div className="deps-empty">No packages depend on this package</div>
|
|
||||||
) : (
|
|
||||||
<div className="reverse-deps-list">
|
|
||||||
<div className="deps-summary">
|
|
||||||
{reverseDepsTotal} {reverseDepsTotal === 1 ? 'package depends' : 'packages depend'} on this:
|
|
||||||
</div>
|
|
||||||
<ul className="deps-items">
|
|
||||||
{reverseDeps.map((dep) => (
|
|
||||||
<li key={dep.artifact_id} className="dep-item reverse-dep-item">
|
|
||||||
<Link
|
|
||||||
to={`/project/${dep.project}/${dep.package}${dep.version ? `?version=${dep.version}` : ''}`}
|
|
||||||
className="dep-link"
|
|
||||||
>
|
|
||||||
{dep.project}/{dep.package}
|
|
||||||
{dep.version && (
|
|
||||||
<span className="dep-version">v{dep.version}</span>
|
|
||||||
)}
|
|
||||||
</Link>
|
|
||||||
<span className="dep-requires">
|
|
||||||
requires @ {dep.constraint_value}
|
|
||||||
</span>
|
|
||||||
</li>
|
|
||||||
))}
|
|
||||||
</ul>
|
|
||||||
{(reverseDepsHasMore || reverseDepsPage > 1) && (
|
|
||||||
<div className="reverse-deps-pagination">
|
|
||||||
<button
|
|
||||||
className="btn btn-secondary btn-small"
|
|
||||||
onClick={() => fetchReverseDeps(reverseDepsPage - 1)}
|
|
||||||
disabled={reverseDepsPage <= 1 || reverseDepsLoading}
|
|
||||||
>
|
|
||||||
Previous
|
|
||||||
</button>
|
|
||||||
<span className="pagination-info">Page {reverseDepsPage}</span>
|
|
||||||
<button
|
|
||||||
className="btn btn-secondary btn-small"
|
|
||||||
onClick={() => fetchReverseDeps(reverseDepsPage + 1)}
|
|
||||||
disabled={!reverseDepsHasMore || reverseDepsLoading}
|
|
||||||
>
|
|
||||||
Next
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="download-by-id-section card">
|
<div className="download-by-id-section card">
|
||||||
<h3>Download by Artifact ID</h3>
|
<h3>Download by Artifact ID</h3>
|
||||||
<div className="download-by-id-form">
|
<div className="download-by-id-form">
|
||||||
@@ -820,58 +522,6 @@ function PackagePage() {
|
|||||||
<code>curl -O {window.location.origin}/api/v1/project/{projectName}/{packageName}/+/v1.0.0</code>
|
<code>curl -O {window.location.origin}/api/v1/project/{projectName}/{packageName}/+/v1.0.0</code>
|
||||||
</pre>
|
</pre>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Dependency Graph Modal */}
|
|
||||||
{showGraph && selectedTag && (
|
|
||||||
<DependencyGraph
|
|
||||||
projectName={projectName!}
|
|
||||||
packageName={packageName!}
|
|
||||||
tagName={selectedTag.name}
|
|
||||||
onClose={() => setShowGraph(false)}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Ensure File Modal */}
|
|
||||||
{showEnsureFile && (
|
|
||||||
<div className="modal-overlay" onClick={() => setShowEnsureFile(false)}>
|
|
||||||
<div className="ensure-file-modal" onClick={(e) => e.stopPropagation()}>
|
|
||||||
<div className="ensure-file-header">
|
|
||||||
<h3>orchard.ensure for {ensureFileTagName}</h3>
|
|
||||||
<div className="ensure-file-actions">
|
|
||||||
{ensureFileContent && (
|
|
||||||
<CopyButton text={ensureFileContent} />
|
|
||||||
)}
|
|
||||||
<button
|
|
||||||
className="modal-close"
|
|
||||||
onClick={() => setShowEnsureFile(false)}
|
|
||||||
title="Close"
|
|
||||||
>
|
|
||||||
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<line x1="18" y1="6" x2="6" y2="18"></line>
|
|
||||||
<line x1="6" y1="6" x2="18" y2="18"></line>
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="ensure-file-content">
|
|
||||||
{ensureFileLoading ? (
|
|
||||||
<div className="ensure-file-loading">Loading...</div>
|
|
||||||
) : ensureFileError ? (
|
|
||||||
<div className="ensure-file-error">{ensureFileError}</div>
|
|
||||||
) : ensureFileContent ? (
|
|
||||||
<pre className="ensure-file-yaml"><code>{ensureFileContent}</code></pre>
|
|
||||||
) : (
|
|
||||||
<div className="ensure-file-empty">No dependencies defined for this artifact.</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
<div className="ensure-file-footer">
|
|
||||||
<p className="ensure-file-hint">
|
|
||||||
Save this as <code>orchard.ensure</code> in your project root to declare dependencies.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import { DataTable } from '../components/DataTable';
|
|||||||
import { SearchInput } from '../components/SearchInput';
|
import { SearchInput } from '../components/SearchInput';
|
||||||
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
|
||||||
import { Pagination } from '../components/Pagination';
|
import { Pagination } from '../components/Pagination';
|
||||||
|
import { AccessManagement } from '../components/AccessManagement';
|
||||||
import { useAuth } from '../contexts/AuthContext';
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
import './Home.css';
|
import './Home.css';
|
||||||
|
|
||||||
@@ -195,9 +196,6 @@ function ProjectPage() {
|
|||||||
<Badge variant={project.is_public ? 'public' : 'private'}>
|
<Badge variant={project.is_public ? 'public' : 'private'}>
|
||||||
{project.is_public ? 'Public' : 'Private'}
|
{project.is_public ? 'Public' : 'Private'}
|
||||||
</Badge>
|
</Badge>
|
||||||
{project.is_system && (
|
|
||||||
<Badge variant="warning">System Cache</Badge>
|
|
||||||
)}
|
|
||||||
{accessLevel && (
|
{accessLevel && (
|
||||||
<Badge variant={accessLevel === 'admin' ? 'success' : accessLevel === 'write' ? 'info' : 'default'}>
|
<Badge variant={accessLevel === 'admin' ? 'success' : accessLevel === 'write' ? 'info' : 'default'}>
|
||||||
{isOwner ? 'Owner' : accessLevel.charAt(0).toUpperCase() + accessLevel.slice(1)}
|
{isOwner ? 'Owner' : accessLevel.charAt(0).toUpperCase() + accessLevel.slice(1)}
|
||||||
@@ -213,30 +211,15 @@ function ProjectPage() {
|
|||||||
<span className="meta-item">by {project.created_by}</span>
|
<span className="meta-item">by {project.created_by}</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="page-header__actions">
|
{canWrite ? (
|
||||||
{canAdmin && !project.team_id && (
|
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
||||||
<button
|
{showForm ? 'Cancel' : '+ New Package'}
|
||||||
className="btn btn-secondary"
|
</button>
|
||||||
onClick={() => navigate(`/project/${projectName}/settings`)}
|
) : user ? (
|
||||||
title="Project Settings"
|
<span className="text-muted" title="You have read-only access to this project">
|
||||||
>
|
Read-only access
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round">
|
</span>
|
||||||
<circle cx="12" cy="12" r="3" />
|
) : null}
|
||||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z" />
|
|
||||||
</svg>
|
|
||||||
Settings
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
{canWrite ? (
|
|
||||||
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
|
||||||
{showForm ? 'Cancel' : '+ New Package'}
|
|
||||||
</button>
|
|
||||||
) : user ? (
|
|
||||||
<span className="text-muted" title="You have read-only access to this project">
|
|
||||||
Read-only access
|
|
||||||
</span>
|
|
||||||
) : null}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{error && <div className="error-message">{error}</div>}
|
{error && <div className="error-message">{error}</div>}
|
||||||
@@ -388,6 +371,10 @@ function ProjectPage() {
|
|||||||
onPageChange={handlePageChange}
|
onPageChange={handlePageChange}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{canAdmin && projectName && (
|
||||||
|
<AccessManagement projectName={projectName} />
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,476 +0,0 @@
|
|||||||
.project-settings-page {
|
|
||||||
max-width: 900px;
|
|
||||||
margin: 0 auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-header {
|
|
||||||
margin-bottom: 32px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-header h1 {
|
|
||||||
font-size: 1.75rem;
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--text-primary);
|
|
||||||
margin-bottom: 8px;
|
|
||||||
letter-spacing: -0.02em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-subtitle {
|
|
||||||
color: var(--text-tertiary);
|
|
||||||
font-size: 0.9375rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-loading {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
gap: 12px;
|
|
||||||
padding: 64px 24px;
|
|
||||||
color: var(--text-tertiary);
|
|
||||||
font-size: 0.9375rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-spinner {
|
|
||||||
width: 20px;
|
|
||||||
height: 20px;
|
|
||||||
border: 2px solid var(--border-secondary);
|
|
||||||
border-top-color: var(--accent-primary);
|
|
||||||
border-radius: 50%;
|
|
||||||
animation: project-settings-spin 0.6s linear infinite;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes project-settings-spin {
|
|
||||||
to {
|
|
||||||
transform: rotate(360deg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-error {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 10px;
|
|
||||||
background: var(--error-bg);
|
|
||||||
border: 1px solid rgba(239, 68, 68, 0.2);
|
|
||||||
color: var(--error);
|
|
||||||
padding: 12px 16px;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
margin-bottom: 24px;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-success {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 10px;
|
|
||||||
background: var(--success-bg);
|
|
||||||
border: 1px solid rgba(34, 197, 94, 0.2);
|
|
||||||
color: var(--success);
|
|
||||||
padding: 12px 16px;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
margin-bottom: 24px;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
animation: project-settings-fade-in 0.2s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes project-settings-fade-in {
|
|
||||||
from {
|
|
||||||
opacity: 0;
|
|
||||||
transform: translateY(-8px);
|
|
||||||
}
|
|
||||||
to {
|
|
||||||
opacity: 1;
|
|
||||||
transform: translateY(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-section {
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
padding: 24px;
|
|
||||||
margin-bottom: 24px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-section h2 {
|
|
||||||
font-size: 1.125rem;
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--text-primary);
|
|
||||||
margin-bottom: 20px;
|
|
||||||
padding-bottom: 16px;
|
|
||||||
border-bottom: 1px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-form {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-form-group {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 6px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-form-group label {
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
font-weight: 500;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-form-group textarea,
|
|
||||||
.project-settings-form-group input[type="text"] {
|
|
||||||
padding: 12px 14px;
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--text-primary);
|
|
||||||
transition: all var(--transition-fast);
|
|
||||||
font-family: inherit;
|
|
||||||
resize: vertical;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-form-group textarea {
|
|
||||||
min-height: 100px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-form-group textarea::placeholder,
|
|
||||||
.project-settings-form-group input::placeholder {
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-form-group textarea:hover:not(:disabled),
|
|
||||||
.project-settings-form-group input:hover:not(:disabled) {
|
|
||||||
border-color: var(--border-secondary);
|
|
||||||
background: var(--bg-elevated);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-form-group textarea:focus,
|
|
||||||
.project-settings-form-group input:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
|
||||||
background: var(--bg-elevated);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-form-group textarea:disabled,
|
|
||||||
.project-settings-form-group input:disabled {
|
|
||||||
opacity: 0.6;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-checkbox-group {
|
|
||||||
flex-direction: row;
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-checkbox-label {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 10px;
|
|
||||||
cursor: pointer;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 400;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
user-select: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-checkbox-label input[type="checkbox"] {
|
|
||||||
position: absolute;
|
|
||||||
opacity: 0;
|
|
||||||
width: 0;
|
|
||||||
height: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-checkbox-custom {
|
|
||||||
width: 18px;
|
|
||||||
height: 18px;
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
border: 1px solid var(--border-secondary);
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
transition: all var(--transition-fast);
|
|
||||||
position: relative;
|
|
||||||
flex-shrink: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-checkbox-label input[type="checkbox"]:checked + .project-settings-checkbox-custom {
|
|
||||||
background: var(--accent-primary);
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-checkbox-label input[type="checkbox"]:checked + .project-settings-checkbox-custom::after {
|
|
||||||
content: '';
|
|
||||||
position: absolute;
|
|
||||||
left: 5px;
|
|
||||||
top: 2px;
|
|
||||||
width: 5px;
|
|
||||||
height: 9px;
|
|
||||||
border: solid white;
|
|
||||||
border-width: 0 2px 2px 0;
|
|
||||||
transform: rotate(45deg);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-checkbox-label input[type="checkbox"]:focus + .project-settings-checkbox-custom {
|
|
||||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-checkbox-label:hover .project-settings-checkbox-custom {
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-form-actions {
|
|
||||||
display: flex;
|
|
||||||
justify-content: flex-end;
|
|
||||||
gap: 12px;
|
|
||||||
margin-top: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-save-button {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
gap: 8px;
|
|
||||||
padding: 10px 18px;
|
|
||||||
background: var(--accent-gradient);
|
|
||||||
border: none;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 500;
|
|
||||||
color: white;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: all var(--transition-fast);
|
|
||||||
min-width: 120px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-save-button:hover:not(:disabled) {
|
|
||||||
transform: translateY(-1px);
|
|
||||||
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-save-button:disabled {
|
|
||||||
opacity: 0.5;
|
|
||||||
cursor: not-allowed;
|
|
||||||
transform: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-button-spinner {
|
|
||||||
width: 14px;
|
|
||||||
height: 14px;
|
|
||||||
border: 2px solid rgba(255, 255, 255, 0.3);
|
|
||||||
border-top-color: white;
|
|
||||||
border-radius: 50%;
|
|
||||||
animation: project-settings-spin 0.6s linear infinite;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Danger Zone */
|
|
||||||
.project-settings-danger-zone {
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid rgba(239, 68, 68, 0.3);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
padding: 24px;
|
|
||||||
margin-bottom: 24px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-danger-zone h2 {
|
|
||||||
font-size: 1.125rem;
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--error);
|
|
||||||
margin-bottom: 20px;
|
|
||||||
padding-bottom: 16px;
|
|
||||||
border-bottom: 1px solid rgba(239, 68, 68, 0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-danger-item {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: flex-start;
|
|
||||||
gap: 24px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-danger-info h3 {
|
|
||||||
font-size: 0.9375rem;
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--text-primary);
|
|
||||||
margin-bottom: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-danger-info p {
|
|
||||||
color: var(--text-tertiary);
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
max-width: 400px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-button {
|
|
||||||
padding: 10px 18px;
|
|
||||||
background: transparent;
|
|
||||||
border: 1px solid rgba(239, 68, 68, 0.3);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 500;
|
|
||||||
color: var(--error);
|
|
||||||
cursor: pointer;
|
|
||||||
transition: all var(--transition-fast);
|
|
||||||
flex-shrink: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-button:hover:not(:disabled) {
|
|
||||||
background: var(--error-bg);
|
|
||||||
border-color: rgba(239, 68, 68, 0.5);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-button:disabled {
|
|
||||||
opacity: 0.5;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Delete Confirmation */
|
|
||||||
.project-settings-delete-confirm {
|
|
||||||
margin-top: 20px;
|
|
||||||
padding-top: 20px;
|
|
||||||
border-top: 1px solid rgba(239, 68, 68, 0.2);
|
|
||||||
animation: project-settings-fade-in 0.2s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-confirm p {
|
|
||||||
color: var(--text-secondary);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
margin-bottom: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-confirm strong {
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-family: 'JetBrains Mono', 'Fira Code', 'SF Mono', Monaco, monospace;
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
padding: 2px 6px;
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-confirm-input {
|
|
||||||
width: 100%;
|
|
||||||
padding: 12px 14px;
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--text-primary);
|
|
||||||
transition: all var(--transition-fast);
|
|
||||||
margin-bottom: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-confirm-input:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--error);
|
|
||||||
box-shadow: 0 0 0 3px rgba(239, 68, 68, 0.15);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-confirm-input::placeholder {
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-confirm-actions {
|
|
||||||
display: flex;
|
|
||||||
gap: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-confirm-delete-button {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
gap: 8px;
|
|
||||||
padding: 10px 18px;
|
|
||||||
background: var(--error);
|
|
||||||
border: none;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 500;
|
|
||||||
color: white;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: all var(--transition-fast);
|
|
||||||
min-width: 120px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-confirm-delete-button:hover:not(:disabled) {
|
|
||||||
opacity: 0.9;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-confirm-delete-button:disabled {
|
|
||||||
opacity: 0.5;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-cancel-button {
|
|
||||||
padding: 10px 18px;
|
|
||||||
background: transparent;
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 500;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
cursor: pointer;
|
|
||||||
transition: all var(--transition-fast);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-cancel-button:hover:not(:disabled) {
|
|
||||||
background: var(--bg-hover);
|
|
||||||
border-color: var(--border-secondary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-cancel-button:disabled {
|
|
||||||
opacity: 0.5;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-spinner {
|
|
||||||
width: 14px;
|
|
||||||
height: 14px;
|
|
||||||
border: 2px solid rgba(255, 255, 255, 0.3);
|
|
||||||
border-top-color: white;
|
|
||||||
border-radius: 50%;
|
|
||||||
animation: project-settings-spin 0.6s linear infinite;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Access denied */
|
|
||||||
.project-settings-access-denied {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
padding: 80px 24px;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-access-denied h2 {
|
|
||||||
font-size: 1.5rem;
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--text-primary);
|
|
||||||
margin-bottom: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-access-denied p {
|
|
||||||
color: var(--text-tertiary);
|
|
||||||
font-size: 0.9375rem;
|
|
||||||
max-width: 400px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Responsive */
|
|
||||||
@media (max-width: 768px) {
|
|
||||||
.project-settings-danger-item {
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-button {
|
|
||||||
align-self: flex-start;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-delete-confirm-actions {
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-settings-confirm-delete-button,
|
|
||||||
.project-settings-cancel-button {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,304 +0,0 @@
|
|||||||
import { useState, useEffect, useCallback } from 'react';
|
|
||||||
import { useParams, useNavigate } from 'react-router-dom';
|
|
||||||
import { Project } from '../types';
|
|
||||||
import {
|
|
||||||
getProject,
|
|
||||||
updateProject,
|
|
||||||
deleteProject,
|
|
||||||
getMyProjectAccess,
|
|
||||||
UnauthorizedError,
|
|
||||||
ForbiddenError,
|
|
||||||
} from '../api';
|
|
||||||
import { Breadcrumb } from '../components/Breadcrumb';
|
|
||||||
import { useAuth } from '../contexts/AuthContext';
|
|
||||||
import './ProjectSettingsPage.css';
|
|
||||||
|
|
||||||
function ProjectSettingsPage() {
|
|
||||||
const { projectName } = useParams<{ projectName: string }>();
|
|
||||||
const navigate = useNavigate();
|
|
||||||
const { user } = useAuth();
|
|
||||||
|
|
||||||
const [project, setProject] = useState<Project | null>(null);
|
|
||||||
const [loading, setLoading] = useState(true);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
const [success, setSuccess] = useState<string | null>(null);
|
|
||||||
const [accessDenied, setAccessDenied] = useState(false);
|
|
||||||
const [canAdmin, setCanAdmin] = useState(false);
|
|
||||||
|
|
||||||
// General settings form state
|
|
||||||
const [description, setDescription] = useState('');
|
|
||||||
const [isPublic, setIsPublic] = useState(false);
|
|
||||||
const [saving, setSaving] = useState(false);
|
|
||||||
|
|
||||||
// Delete confirmation state
|
|
||||||
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
|
|
||||||
const [deleteConfirmText, setDeleteConfirmText] = useState('');
|
|
||||||
const [deleting, setDeleting] = useState(false);
|
|
||||||
|
|
||||||
const loadData = useCallback(async () => {
|
|
||||||
if (!projectName) return;
|
|
||||||
|
|
||||||
try {
|
|
||||||
setLoading(true);
|
|
||||||
setAccessDenied(false);
|
|
||||||
const [projectData, accessResult] = await Promise.all([
|
|
||||||
getProject(projectName),
|
|
||||||
getMyProjectAccess(projectName),
|
|
||||||
]);
|
|
||||||
setProject(projectData);
|
|
||||||
setDescription(projectData.description || '');
|
|
||||||
setIsPublic(projectData.is_public);
|
|
||||||
|
|
||||||
const hasAdminAccess = accessResult.access_level === 'admin';
|
|
||||||
setCanAdmin(hasAdminAccess);
|
|
||||||
|
|
||||||
if (!hasAdminAccess) {
|
|
||||||
setAccessDenied(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
setError(null);
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof UnauthorizedError) {
|
|
||||||
navigate('/login', { state: { from: `/project/${projectName}/settings` } });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (err instanceof ForbiddenError) {
|
|
||||||
setAccessDenied(true);
|
|
||||||
setLoading(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to load project');
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}, [projectName, navigate]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
loadData();
|
|
||||||
}, [loadData]);
|
|
||||||
|
|
||||||
const handleSaveSettings = async (e: React.FormEvent) => {
|
|
||||||
e.preventDefault();
|
|
||||||
if (!projectName) return;
|
|
||||||
|
|
||||||
try {
|
|
||||||
setSaving(true);
|
|
||||||
setError(null);
|
|
||||||
const updatedProject = await updateProject(projectName, {
|
|
||||||
description: description || undefined,
|
|
||||||
is_public: isPublic,
|
|
||||||
});
|
|
||||||
setProject(updatedProject);
|
|
||||||
setSuccess('Settings saved successfully');
|
|
||||||
setTimeout(() => setSuccess(null), 3000);
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to save settings');
|
|
||||||
} finally {
|
|
||||||
setSaving(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleDeleteProject = async () => {
|
|
||||||
if (!projectName || deleteConfirmText !== projectName) return;
|
|
||||||
|
|
||||||
try {
|
|
||||||
setDeleting(true);
|
|
||||||
setError(null);
|
|
||||||
await deleteProject(projectName);
|
|
||||||
navigate('/');
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to delete project');
|
|
||||||
setDeleting(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleCancelDelete = () => {
|
|
||||||
setShowDeleteConfirm(false);
|
|
||||||
setDeleteConfirmText('');
|
|
||||||
};
|
|
||||||
|
|
||||||
if (loading) {
|
|
||||||
return (
|
|
||||||
<div className="project-settings-page">
|
|
||||||
<Breadcrumb
|
|
||||||
items={[
|
|
||||||
{ label: 'Projects', href: '/' },
|
|
||||||
{ label: projectName || '', href: `/project/${projectName}` },
|
|
||||||
{ label: 'Settings' },
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
<div className="project-settings-loading">
|
|
||||||
<div className="project-settings-spinner" />
|
|
||||||
<span>Loading...</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (accessDenied || !canAdmin) {
|
|
||||||
return (
|
|
||||||
<div className="project-settings-page">
|
|
||||||
<Breadcrumb
|
|
||||||
items={[
|
|
||||||
{ label: 'Projects', href: '/' },
|
|
||||||
{ label: projectName || '', href: `/project/${projectName}` },
|
|
||||||
{ label: 'Settings' },
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
<div className="project-settings-access-denied">
|
|
||||||
<h2>Access Denied</h2>
|
|
||||||
<p>You must be a project admin to access settings.</p>
|
|
||||||
{!user && (
|
|
||||||
<p style={{ marginTop: '16px' }}>
|
|
||||||
<a href="/login" className="btn btn-primary">
|
|
||||||
Sign in
|
|
||||||
</a>
|
|
||||||
</p>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!project) {
|
|
||||||
return (
|
|
||||||
<div className="project-settings-page">
|
|
||||||
<Breadcrumb
|
|
||||||
items={[
|
|
||||||
{ label: 'Projects', href: '/' },
|
|
||||||
{ label: projectName || '' },
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
<div className="project-settings-error">Project not found</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="project-settings-page">
|
|
||||||
<Breadcrumb
|
|
||||||
items={[
|
|
||||||
{ label: 'Projects', href: '/' },
|
|
||||||
{ label: project.name, href: `/project/${project.name}` },
|
|
||||||
{ label: 'Settings' },
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<div className="project-settings-header">
|
|
||||||
<h1>Project Settings</h1>
|
|
||||||
<p className="project-settings-subtitle">Manage settings for {project.name}</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{error && <div className="project-settings-error">{error}</div>}
|
|
||||||
{success && <div className="project-settings-success">{success}</div>}
|
|
||||||
|
|
||||||
{/* General Settings Section */}
|
|
||||||
<div className="project-settings-section">
|
|
||||||
<h2>General</h2>
|
|
||||||
<form className="project-settings-form" onSubmit={handleSaveSettings}>
|
|
||||||
<div className="project-settings-form-group">
|
|
||||||
<label htmlFor="description">Description</label>
|
|
||||||
<textarea
|
|
||||||
id="description"
|
|
||||||
value={description}
|
|
||||||
onChange={(e) => setDescription(e.target.value)}
|
|
||||||
placeholder="Describe your project..."
|
|
||||||
disabled={saving}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="project-settings-form-group project-settings-checkbox-group">
|
|
||||||
<label className="project-settings-checkbox-label">
|
|
||||||
<input
|
|
||||||
type="checkbox"
|
|
||||||
checked={isPublic}
|
|
||||||
onChange={(e) => setIsPublic(e.target.checked)}
|
|
||||||
disabled={saving}
|
|
||||||
/>
|
|
||||||
<span className="project-settings-checkbox-custom" />
|
|
||||||
<span>Public project (visible to everyone)</span>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="project-settings-form-actions">
|
|
||||||
<button type="submit" className="project-settings-save-button" disabled={saving}>
|
|
||||||
{saving ? (
|
|
||||||
<>
|
|
||||||
<span className="project-settings-button-spinner" />
|
|
||||||
Saving...
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
'Save Changes'
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Danger Zone Section */}
|
|
||||||
<div className="project-settings-danger-zone">
|
|
||||||
<h2>Danger Zone</h2>
|
|
||||||
<div className="project-settings-danger-item">
|
|
||||||
<div className="project-settings-danger-info">
|
|
||||||
<h3>Delete this project</h3>
|
|
||||||
<p>
|
|
||||||
Once you delete a project, there is no going back. This will permanently delete the
|
|
||||||
project, all packages, artifacts, and tags.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
{!showDeleteConfirm && (
|
|
||||||
<button
|
|
||||||
className="project-settings-delete-button"
|
|
||||||
onClick={() => setShowDeleteConfirm(true)}
|
|
||||||
disabled={deleting}
|
|
||||||
>
|
|
||||||
Delete Project
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{showDeleteConfirm && (
|
|
||||||
<div className="project-settings-delete-confirm">
|
|
||||||
<p>
|
|
||||||
Type <strong>{project.name}</strong> to confirm deletion:
|
|
||||||
</p>
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
className="project-settings-delete-confirm-input"
|
|
||||||
value={deleteConfirmText}
|
|
||||||
onChange={(e) => setDeleteConfirmText(e.target.value)}
|
|
||||||
placeholder={project.name}
|
|
||||||
disabled={deleting}
|
|
||||||
autoFocus
|
|
||||||
/>
|
|
||||||
<div className="project-settings-delete-confirm-actions">
|
|
||||||
<button
|
|
||||||
className="project-settings-confirm-delete-button"
|
|
||||||
onClick={handleDeleteProject}
|
|
||||||
disabled={deleting || deleteConfirmText !== project.name}
|
|
||||||
>
|
|
||||||
{deleting ? (
|
|
||||||
<>
|
|
||||||
<span className="project-settings-delete-spinner" />
|
|
||||||
Deleting...
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
'Yes, delete this project'
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
className="project-settings-cancel-button"
|
|
||||||
onClick={handleCancelDelete}
|
|
||||||
disabled={deleting}
|
|
||||||
>
|
|
||||||
Cancel
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default ProjectSettingsPage;
|
|
||||||
@@ -1,270 +0,0 @@
|
|||||||
.team-dashboard {
|
|
||||||
padding: 1.5rem 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-header {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: flex-start;
|
|
||||||
gap: 1.5rem;
|
|
||||||
margin-bottom: 2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-header-left {
|
|
||||||
flex: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-header-title {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.75rem;
|
|
||||||
margin-bottom: 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-header h1 {
|
|
||||||
margin: 0;
|
|
||||||
font-size: 1.5rem;
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-slug {
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-description {
|
|
||||||
margin: 0 0 0.5rem;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
font-size: 0.9375rem;
|
|
||||||
max-width: 600px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-header-actions {
|
|
||||||
display: flex;
|
|
||||||
gap: 0.5rem;
|
|
||||||
flex-shrink: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-section {
|
|
||||||
margin-top: 2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-header {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-header h2 {
|
|
||||||
margin: 0;
|
|
||||||
font-size: 1.25rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Table utility classes */
|
|
||||||
.text-muted {
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-ghost {
|
|
||||||
background: transparent;
|
|
||||||
color: var(--text-muted);
|
|
||||||
border: none;
|
|
||||||
padding: 0.375rem;
|
|
||||||
cursor: pointer;
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-ghost:hover {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-footer {
|
|
||||||
margin-top: 1rem;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.view-all-link {
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--accent-primary);
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.view-all-link:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* States */
|
|
||||||
.loading-state,
|
|
||||||
.error-state {
|
|
||||||
text-align: center;
|
|
||||||
padding: 4rem 2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-state h2 {
|
|
||||||
margin: 0 0 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-state p {
|
|
||||||
margin: 0 0 1.5rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.empty-state {
|
|
||||||
text-align: center;
|
|
||||||
padding: 2rem;
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px dashed var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.empty-state p {
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.empty-hint {
|
|
||||||
margin-top: 0.5rem !important;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Buttons */
|
|
||||||
.btn {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.5rem;
|
|
||||||
padding: 0.5rem 1rem;
|
|
||||||
border: none;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 500;
|
|
||||||
cursor: pointer;
|
|
||||||
text-decoration: none;
|
|
||||||
transition: all 0.15s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-sm {
|
|
||||||
padding: 0.375rem 0.75rem;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary {
|
|
||||||
background: var(--accent-primary);
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary:hover {
|
|
||||||
background: var(--accent-primary-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary:hover {
|
|
||||||
background: var(--bg-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Modal */
|
|
||||||
.modal-overlay {
|
|
||||||
position: fixed;
|
|
||||||
inset: 0;
|
|
||||||
background: rgba(0, 0, 0, 0.7);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
z-index: 1000;
|
|
||||||
padding: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content {
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
padding: 1.5rem;
|
|
||||||
width: 100%;
|
|
||||||
max-width: 480px;
|
|
||||||
max-height: 90vh;
|
|
||||||
box-shadow: var(--shadow-lg);
|
|
||||||
overflow-y: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content h2 {
|
|
||||||
margin: 0 0 1.5rem;
|
|
||||||
font-size: 1.25rem;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Form */
|
|
||||||
.form-group {
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group label {
|
|
||||||
display: block;
|
|
||||||
margin-bottom: 0.5rem;
|
|
||||||
font-weight: 500;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group input[type="text"],
|
|
||||||
.form-group textarea {
|
|
||||||
width: 100%;
|
|
||||||
padding: 0.625rem 0.75rem;
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group input:focus,
|
|
||||||
.form-group textarea:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group textarea {
|
|
||||||
resize: vertical;
|
|
||||||
min-height: 80px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.checkbox-group label {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.5rem;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
.checkbox-group input[type="checkbox"] {
|
|
||||||
width: 1rem;
|
|
||||||
height: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-hint {
|
|
||||||
display: block;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
margin-top: 0.375rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-actions {
|
|
||||||
display: flex;
|
|
||||||
justify-content: flex-end;
|
|
||||||
gap: 0.75rem;
|
|
||||||
margin-top: 1.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn:disabled {
|
|
||||||
opacity: 0.6;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.empty-state .btn {
|
|
||||||
margin-top: 1rem;
|
|
||||||
}
|
|
||||||
@@ -1,279 +0,0 @@
|
|||||||
import { useState, useEffect, useCallback } from 'react';
|
|
||||||
import { Link, useParams, useNavigate } from 'react-router-dom';
|
|
||||||
import { TeamDetail, Project, PaginatedResponse } from '../types';
|
|
||||||
import { getTeam, listTeamProjects, createProject } from '../api';
|
|
||||||
import { useAuth } from '../contexts/AuthContext';
|
|
||||||
import { Badge } from '../components/Badge';
|
|
||||||
import { Breadcrumb } from '../components/Breadcrumb';
|
|
||||||
import { DataTable } from '../components/DataTable';
|
|
||||||
import './TeamDashboardPage.css';
|
|
||||||
|
|
||||||
function TeamDashboardPage() {
|
|
||||||
const { slug } = useParams<{ slug: string }>();
|
|
||||||
const navigate = useNavigate();
|
|
||||||
const { user } = useAuth();
|
|
||||||
const [team, setTeam] = useState<TeamDetail | null>(null);
|
|
||||||
const [projects, setProjects] = useState<PaginatedResponse<Project> | null>(null);
|
|
||||||
const [loading, setLoading] = useState(true);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
const [showProjectForm, setShowProjectForm] = useState(false);
|
|
||||||
const [newProject, setNewProject] = useState({ name: '', description: '', is_public: true });
|
|
||||||
const [creating, setCreating] = useState(false);
|
|
||||||
|
|
||||||
const loadTeamData = useCallback(async () => {
|
|
||||||
if (!slug) return;
|
|
||||||
try {
|
|
||||||
setLoading(true);
|
|
||||||
const [teamData, projectsData] = await Promise.all([
|
|
||||||
getTeam(slug),
|
|
||||||
listTeamProjects(slug, { limit: 10 }),
|
|
||||||
]);
|
|
||||||
setTeam(teamData);
|
|
||||||
setProjects(projectsData);
|
|
||||||
setError(null);
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to load team');
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}, [slug]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
loadTeamData();
|
|
||||||
}, [loadTeamData]);
|
|
||||||
|
|
||||||
async function handleCreateProject(e: React.FormEvent) {
|
|
||||||
e.preventDefault();
|
|
||||||
if (!team) return;
|
|
||||||
try {
|
|
||||||
setCreating(true);
|
|
||||||
const project = await createProject({ ...newProject, team_id: team.id });
|
|
||||||
setNewProject({ name: '', description: '', is_public: true });
|
|
||||||
setShowProjectForm(false);
|
|
||||||
navigate(`/project/${project.name}`);
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to create project');
|
|
||||||
} finally {
|
|
||||||
setCreating(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (loading) {
|
|
||||||
return (
|
|
||||||
<div className="team-dashboard">
|
|
||||||
<div className="loading-state">Loading team...</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (error || !team) {
|
|
||||||
return (
|
|
||||||
<div className="team-dashboard">
|
|
||||||
<div className="error-state">
|
|
||||||
<h2>Error loading team</h2>
|
|
||||||
<p>{error || 'Team not found'}</p>
|
|
||||||
<Link to="/teams" className="btn btn-primary">Back to Teams</Link>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const isAdminOrOwner = team.user_role === 'owner' || team.user_role === 'admin' || user?.is_admin;
|
|
||||||
|
|
||||||
const roleVariants: Record<string, 'success' | 'info' | 'default'> = {
|
|
||||||
owner: 'success',
|
|
||||||
admin: 'info',
|
|
||||||
member: 'default',
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="team-dashboard">
|
|
||||||
<Breadcrumb
|
|
||||||
items={[
|
|
||||||
{ label: 'Teams', href: '/teams' },
|
|
||||||
{ label: team.name },
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<div className="team-header">
|
|
||||||
<div className="team-header-left">
|
|
||||||
<div className="team-header-title">
|
|
||||||
<h1>{team.name}</h1>
|
|
||||||
{team.user_role && (
|
|
||||||
<Badge variant={roleVariants[team.user_role] || 'default'}>
|
|
||||||
{team.user_role}
|
|
||||||
</Badge>
|
|
||||||
)}
|
|
||||||
<span className="team-slug">@{team.slug}</span>
|
|
||||||
</div>
|
|
||||||
{team.description && (
|
|
||||||
<p className="team-description">{team.description}</p>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
{isAdminOrOwner && (
|
|
||||||
<div className="team-header-actions">
|
|
||||||
<Link to={`/teams/${slug}/members`} className="btn btn-secondary">
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
|
||||||
<circle cx="9" cy="7" r="4"/>
|
|
||||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
|
||||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
|
||||||
</svg>
|
|
||||||
Members
|
|
||||||
</Link>
|
|
||||||
<Link to={`/teams/${slug}/settings`} className="btn btn-secondary">
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<circle cx="12" cy="12" r="3"/>
|
|
||||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"/>
|
|
||||||
</svg>
|
|
||||||
Settings
|
|
||||||
</Link>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{showProjectForm && (
|
|
||||||
<div className="modal-overlay" onClick={() => setShowProjectForm(false)}>
|
|
||||||
<div className="modal-content" onClick={e => e.stopPropagation()}>
|
|
||||||
<h2>Create New Project</h2>
|
|
||||||
<form onSubmit={handleCreateProject}>
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="project-name">Project Name</label>
|
|
||||||
<input
|
|
||||||
id="project-name"
|
|
||||||
type="text"
|
|
||||||
value={newProject.name}
|
|
||||||
onChange={e => setNewProject({ ...newProject, name: e.target.value })}
|
|
||||||
placeholder="my-project"
|
|
||||||
required
|
|
||||||
autoFocus
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="project-description">Description (optional)</label>
|
|
||||||
<textarea
|
|
||||||
id="project-description"
|
|
||||||
value={newProject.description}
|
|
||||||
onChange={e => setNewProject({ ...newProject, description: e.target.value })}
|
|
||||||
placeholder="What is this project for?"
|
|
||||||
rows={3}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div className="form-group checkbox-group">
|
|
||||||
<label>
|
|
||||||
<input
|
|
||||||
type="checkbox"
|
|
||||||
checked={newProject.is_public}
|
|
||||||
onChange={e => setNewProject({ ...newProject, is_public: e.target.checked })}
|
|
||||||
/>
|
|
||||||
Public project
|
|
||||||
</label>
|
|
||||||
<span className="form-hint">Public projects are visible to everyone</span>
|
|
||||||
</div>
|
|
||||||
<div className="form-actions">
|
|
||||||
<button type="button" className="btn btn-secondary" onClick={() => setShowProjectForm(false)}>
|
|
||||||
Cancel
|
|
||||||
</button>
|
|
||||||
<button type="submit" className="btn btn-primary" disabled={creating}>
|
|
||||||
{creating ? 'Creating...' : 'Create Project'}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="team-section">
|
|
||||||
<div className="section-header">
|
|
||||||
<h2>Projects</h2>
|
|
||||||
{isAdminOrOwner && (
|
|
||||||
<button className="btn btn-primary btn-sm" onClick={() => setShowProjectForm(true)}>
|
|
||||||
+ New Project
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{projects?.items.length === 0 ? (
|
|
||||||
<div className="empty-state">
|
|
||||||
<p>No projects in this team yet.</p>
|
|
||||||
{isAdminOrOwner && (
|
|
||||||
<button className="btn btn-primary" onClick={() => setShowProjectForm(true)}>
|
|
||||||
Create Project
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<DataTable
|
|
||||||
data={projects?.items || []}
|
|
||||||
keyExtractor={(project) => project.id}
|
|
||||||
onRowClick={(project) => navigate(`/project/${project.name}`)}
|
|
||||||
columns={[
|
|
||||||
{
|
|
||||||
key: 'name',
|
|
||||||
header: 'Name',
|
|
||||||
render: (project) => (
|
|
||||||
<Link
|
|
||||||
to={`/project/${project.name}`}
|
|
||||||
className="cell-name"
|
|
||||||
onClick={(e) => e.stopPropagation()}
|
|
||||||
>
|
|
||||||
{project.name}
|
|
||||||
</Link>
|
|
||||||
),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'description',
|
|
||||||
header: 'Description',
|
|
||||||
className: 'cell-description',
|
|
||||||
render: (project) => project.description || <span className="text-muted">—</span>,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'visibility',
|
|
||||||
header: 'Visibility',
|
|
||||||
render: (project) => (
|
|
||||||
<Badge variant={project.is_public ? 'public' : 'private'}>
|
|
||||||
{project.is_public ? 'Public' : 'Private'}
|
|
||||||
</Badge>
|
|
||||||
),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'created_by',
|
|
||||||
header: 'Created By',
|
|
||||||
render: (project) => <span className="text-muted">{project.created_by}</span>,
|
|
||||||
},
|
|
||||||
...(isAdminOrOwner ? [{
|
|
||||||
key: 'actions',
|
|
||||||
header: '',
|
|
||||||
render: (project: Project) => (
|
|
||||||
<button
|
|
||||||
className="btn btn-sm btn-ghost"
|
|
||||||
onClick={(e) => {
|
|
||||||
e.stopPropagation();
|
|
||||||
navigate(`/project/${project.name}/settings`);
|
|
||||||
}}
|
|
||||||
title="Settings"
|
|
||||||
>
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<circle cx="12" cy="12" r="3"/>
|
|
||||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"/>
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
),
|
|
||||||
}] : []),
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{projects && projects.pagination.total > 10 && (
|
|
||||||
<div className="section-footer">
|
|
||||||
<Link to={`/teams/${slug}/projects`} className="view-all-link">
|
|
||||||
View all {projects.pagination.total} projects
|
|
||||||
</Link>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default TeamDashboardPage;
|
|
||||||
@@ -1,247 +0,0 @@
|
|||||||
.team-members {
|
|
||||||
padding: 1.5rem 0;
|
|
||||||
max-width: 800px;
|
|
||||||
margin: 0 auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.page-header {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
margin-bottom: 1.5rem;
|
|
||||||
gap: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.page-header h1 {
|
|
||||||
margin: 0;
|
|
||||||
font-size: 1.75rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Member cell in table */
|
|
||||||
.member-cell {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.75rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.member-avatar {
|
|
||||||
width: 40px;
|
|
||||||
height: 40px;
|
|
||||||
border-radius: 50%;
|
|
||||||
background: var(--accent-primary);
|
|
||||||
color: white;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
font-weight: 600;
|
|
||||||
font-size: 1rem;
|
|
||||||
flex-shrink: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.member-details {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
min-width: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.member-username {
|
|
||||||
font-weight: 500;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.you-badge {
|
|
||||||
font-size: 0.75rem;
|
|
||||||
font-weight: normal;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.member-email {
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
overflow: hidden;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.text-muted {
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.role-select {
|
|
||||||
padding: 0.375rem 0.75rem;
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
.role-select:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Messages */
|
|
||||||
.error-message {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: space-between;
|
|
||||||
padding: 0.75rem 1rem;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
background: var(--error-bg);
|
|
||||||
border: 1px solid var(--error);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
color: var(--error);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-dismiss {
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
font-size: 1.25rem;
|
|
||||||
cursor: pointer;
|
|
||||||
color: inherit;
|
|
||||||
padding: 0;
|
|
||||||
line-height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* States */
|
|
||||||
.loading-state,
|
|
||||||
.error-state {
|
|
||||||
text-align: center;
|
|
||||||
padding: 4rem 2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-state h2 {
|
|
||||||
margin: 0 0 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-state p {
|
|
||||||
margin: 0 0 1.5rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Modal */
|
|
||||||
.modal-overlay {
|
|
||||||
position: fixed;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
bottom: 0;
|
|
||||||
background: rgba(0, 0, 0, 0.7);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
z-index: 1000;
|
|
||||||
padding: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content {
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
padding: 1.5rem;
|
|
||||||
width: 100%;
|
|
||||||
max-width: 400px;
|
|
||||||
box-shadow: var(--shadow-lg);
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content h2 {
|
|
||||||
margin: 0 0 1.5rem;
|
|
||||||
font-size: 1.25rem;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Form */
|
|
||||||
.form-group {
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group label {
|
|
||||||
display: block;
|
|
||||||
margin-bottom: 0.375rem;
|
|
||||||
font-weight: 500;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group input,
|
|
||||||
.form-group select {
|
|
||||||
width: 100%;
|
|
||||||
padding: 0.5rem 0.75rem;
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.9375rem;
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group input:focus,
|
|
||||||
.form-group select:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
box-shadow: 0 0 0 2px rgba(16, 185, 129, 0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-actions {
|
|
||||||
display: flex;
|
|
||||||
justify-content: flex-end;
|
|
||||||
gap: 0.75rem;
|
|
||||||
margin-top: 1.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Buttons */
|
|
||||||
.btn {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.5rem;
|
|
||||||
padding: 0.5rem 1rem;
|
|
||||||
border: none;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 500;
|
|
||||||
cursor: pointer;
|
|
||||||
text-decoration: none;
|
|
||||||
transition: all 0.15s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn:disabled {
|
|
||||||
opacity: 0.6;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary {
|
|
||||||
background: var(--accent-primary);
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary:hover:not(:disabled) {
|
|
||||||
background: var(--accent-primary-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary:hover:not(:disabled) {
|
|
||||||
background: var(--bg-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-icon {
|
|
||||||
padding: 0.375rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-danger-ghost {
|
|
||||||
background: transparent;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-danger-ghost:hover:not(:disabled) {
|
|
||||||
background: var(--error-bg);
|
|
||||||
color: var(--error);
|
|
||||||
}
|
|
||||||
@@ -1,311 +0,0 @@
|
|||||||
import { useState, useEffect, useCallback } from 'react';
|
|
||||||
import { useParams, Link } from 'react-router-dom';
|
|
||||||
import { TeamDetail, TeamMember, TeamMemberCreate, TeamRole } from '../types';
|
|
||||||
import {
|
|
||||||
getTeam,
|
|
||||||
listTeamMembers,
|
|
||||||
addTeamMember,
|
|
||||||
updateTeamMember,
|
|
||||||
removeTeamMember,
|
|
||||||
} from '../api';
|
|
||||||
import { useAuth } from '../contexts/AuthContext';
|
|
||||||
import { Badge } from '../components/Badge';
|
|
||||||
import { Breadcrumb } from '../components/Breadcrumb';
|
|
||||||
import { DataTable } from '../components/DataTable';
|
|
||||||
import { UserAutocomplete } from '../components/UserAutocomplete';
|
|
||||||
import './TeamMembersPage.css';
|
|
||||||
|
|
||||||
function TeamMembersPage() {
|
|
||||||
const { slug } = useParams<{ slug: string }>();
|
|
||||||
const { user } = useAuth();
|
|
||||||
const [team, setTeam] = useState<TeamDetail | null>(null);
|
|
||||||
const [members, setMembers] = useState<TeamMember[]>([]);
|
|
||||||
const [loading, setLoading] = useState(true);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
const [showAddForm, setShowAddForm] = useState(false);
|
|
||||||
const [adding, setAdding] = useState(false);
|
|
||||||
const [newMember, setNewMember] = useState<TeamMemberCreate>({ username: '', role: 'member' });
|
|
||||||
const [editingMember, setEditingMember] = useState<string | null>(null);
|
|
||||||
const [removingMember, setRemovingMember] = useState<string | null>(null);
|
|
||||||
|
|
||||||
const loadData = useCallback(async () => {
|
|
||||||
if (!slug) return;
|
|
||||||
try {
|
|
||||||
setLoading(true);
|
|
||||||
const [teamData, membersData] = await Promise.all([
|
|
||||||
getTeam(slug),
|
|
||||||
listTeamMembers(slug),
|
|
||||||
]);
|
|
||||||
setTeam(teamData);
|
|
||||||
setMembers(membersData);
|
|
||||||
setError(null);
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to load team');
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}, [slug]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
loadData();
|
|
||||||
}, [loadData]);
|
|
||||||
|
|
||||||
async function handleAddMember(e: React.FormEvent) {
|
|
||||||
e.preventDefault();
|
|
||||||
if (!slug) return;
|
|
||||||
try {
|
|
||||||
setAdding(true);
|
|
||||||
setError(null);
|
|
||||||
await addTeamMember(slug, newMember);
|
|
||||||
setNewMember({ username: '', role: 'member' });
|
|
||||||
setShowAddForm(false);
|
|
||||||
loadData();
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to add member');
|
|
||||||
} finally {
|
|
||||||
setAdding(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleRoleChange(username: string, newRole: TeamRole) {
|
|
||||||
if (!slug) return;
|
|
||||||
try {
|
|
||||||
setEditingMember(username);
|
|
||||||
setError(null);
|
|
||||||
await updateTeamMember(slug, username, { role: newRole });
|
|
||||||
loadData();
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to update member');
|
|
||||||
} finally {
|
|
||||||
setEditingMember(null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleRemoveMember(username: string) {
|
|
||||||
if (!slug) return;
|
|
||||||
if (!confirm(`Remove ${username} from the team?`)) return;
|
|
||||||
try {
|
|
||||||
setRemovingMember(username);
|
|
||||||
setError(null);
|
|
||||||
await removeTeamMember(slug, username);
|
|
||||||
loadData();
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to remove member');
|
|
||||||
} finally {
|
|
||||||
setRemovingMember(null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (loading) {
|
|
||||||
return (
|
|
||||||
<div className="team-members">
|
|
||||||
<div className="loading-state">Loading team members...</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (error && !team) {
|
|
||||||
return (
|
|
||||||
<div className="team-members">
|
|
||||||
<div className="error-state">
|
|
||||||
<h2>Error loading team</h2>
|
|
||||||
<p>{error}</p>
|
|
||||||
<Link to="/teams" className="btn btn-primary">Back to Teams</Link>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!team) return null;
|
|
||||||
|
|
||||||
const isOwner = team.user_role === 'owner' || user?.is_admin;
|
|
||||||
const isAdmin = team.user_role === 'admin' || isOwner;
|
|
||||||
|
|
||||||
const roleVariants: Record<string, 'success' | 'info' | 'default'> = {
|
|
||||||
owner: 'success',
|
|
||||||
admin: 'info',
|
|
||||||
member: 'default',
|
|
||||||
};
|
|
||||||
|
|
||||||
const roles: TeamRole[] = ['owner', 'admin', 'member'];
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="team-members">
|
|
||||||
<Breadcrumb
|
|
||||||
items={[
|
|
||||||
{ label: 'Teams', href: '/teams' },
|
|
||||||
{ label: team.name, href: `/teams/${slug}` },
|
|
||||||
{ label: 'Members' },
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<div className="page-header">
|
|
||||||
<h1>Team Members</h1>
|
|
||||||
{isAdmin && (
|
|
||||||
<button className="btn btn-primary" onClick={() => setShowAddForm(true)}>
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<path d="M16 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
|
||||||
<circle cx="8.5" cy="7" r="4"/>
|
|
||||||
<line x1="20" y1="8" x2="20" y2="14"/>
|
|
||||||
<line x1="23" y1="11" x2="17" y2="11"/>
|
|
||||||
</svg>
|
|
||||||
Invite Member
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{error && (
|
|
||||||
<div className="error-message">
|
|
||||||
{error}
|
|
||||||
<button onClick={() => setError(null)} className="error-dismiss">×</button>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{showAddForm && (
|
|
||||||
<div className="modal-overlay" onClick={() => setShowAddForm(false)}>
|
|
||||||
<div className="modal-content" onClick={e => e.stopPropagation()}>
|
|
||||||
<h2>Invite Member</h2>
|
|
||||||
<form onSubmit={handleAddMember}>
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="username">Username</label>
|
|
||||||
<UserAutocomplete
|
|
||||||
value={newMember.username}
|
|
||||||
onChange={(username) => setNewMember({ ...newMember, username })}
|
|
||||||
placeholder="Search for a user..."
|
|
||||||
autoFocus
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="role">Role</label>
|
|
||||||
<select
|
|
||||||
id="role"
|
|
||||||
value={newMember.role}
|
|
||||||
onChange={e => setNewMember({ ...newMember, role: e.target.value as TeamRole })}
|
|
||||||
>
|
|
||||||
<option value="member">Member - Can view team projects</option>
|
|
||||||
<option value="admin">Admin - Can manage team settings and members</option>
|
|
||||||
{isOwner && (
|
|
||||||
<option value="owner">Owner - Full control, can delete team</option>
|
|
||||||
)}
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
<div className="form-actions">
|
|
||||||
<button type="button" className="btn btn-secondary" onClick={() => setShowAddForm(false)}>
|
|
||||||
Cancel
|
|
||||||
</button>
|
|
||||||
<button type="submit" className="btn btn-primary" disabled={adding}>
|
|
||||||
{adding ? 'Adding...' : 'Add Member'}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<DataTable
|
|
||||||
data={members}
|
|
||||||
keyExtractor={(member) => member.id}
|
|
||||||
emptyMessage="No members in this team yet."
|
|
||||||
columns={[
|
|
||||||
{
|
|
||||||
key: 'member',
|
|
||||||
header: 'Member',
|
|
||||||
render: (member) => {
|
|
||||||
const isCurrentUser = user?.username === member.username;
|
|
||||||
return (
|
|
||||||
<div className="member-cell">
|
|
||||||
<div className="member-avatar">
|
|
||||||
{member.username.charAt(0).toUpperCase()}
|
|
||||||
</div>
|
|
||||||
<div className="member-details">
|
|
||||||
<span className="member-username">
|
|
||||||
{member.username}
|
|
||||||
{isCurrentUser && <span className="you-badge">(you)</span>}
|
|
||||||
</span>
|
|
||||||
{member.email && (
|
|
||||||
<span className="member-email">{member.email}</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'role',
|
|
||||||
header: 'Role',
|
|
||||||
render: (member) => {
|
|
||||||
const isCurrentUser = user?.username === member.username;
|
|
||||||
const canModify = isAdmin && !isCurrentUser && (isOwner || member.role !== 'owner');
|
|
||||||
|
|
||||||
if (canModify) {
|
|
||||||
return (
|
|
||||||
<select
|
|
||||||
value={member.role}
|
|
||||||
onChange={e => handleRoleChange(member.username, e.target.value as TeamRole)}
|
|
||||||
disabled={editingMember === member.username}
|
|
||||||
className="role-select"
|
|
||||||
onClick={e => e.stopPropagation()}
|
|
||||||
>
|
|
||||||
{roles.map(role => (
|
|
||||||
<option
|
|
||||||
key={role}
|
|
||||||
value={role}
|
|
||||||
disabled={role === 'owner' && !isOwner}
|
|
||||||
>
|
|
||||||
{role.charAt(0).toUpperCase() + role.slice(1)}
|
|
||||||
</option>
|
|
||||||
))}
|
|
||||||
</select>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return (
|
|
||||||
<Badge variant={roleVariants[member.role] || 'default'}>
|
|
||||||
{member.role}
|
|
||||||
</Badge>
|
|
||||||
);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'joined',
|
|
||||||
header: 'Joined',
|
|
||||||
render: (member) => (
|
|
||||||
<span className="text-muted">
|
|
||||||
{new Date(member.created_at).toLocaleDateString()}
|
|
||||||
</span>
|
|
||||||
),
|
|
||||||
},
|
|
||||||
...(isAdmin ? [{
|
|
||||||
key: 'actions',
|
|
||||||
header: '',
|
|
||||||
render: (member: TeamMember) => {
|
|
||||||
const isCurrentUser = user?.username === member.username;
|
|
||||||
const canModify = isAdmin && !isCurrentUser && (isOwner || member.role !== 'owner');
|
|
||||||
|
|
||||||
if (!canModify) return null;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<button
|
|
||||||
className="btn btn-icon btn-danger-ghost"
|
|
||||||
onClick={(e) => {
|
|
||||||
e.stopPropagation();
|
|
||||||
handleRemoveMember(member.username);
|
|
||||||
}}
|
|
||||||
disabled={removingMember === member.username}
|
|
||||||
title="Remove member"
|
|
||||||
>
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<path d="M3 6h18"/>
|
|
||||||
<path d="M19 6v14a2 2 0 0 1-2 2H7a2 2 0 0 1-2-2V6"/>
|
|
||||||
<path d="M8 6V4a2 2 0 0 1 2-2h4a2 2 0 0 1 2 2v2"/>
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
);
|
|
||||||
},
|
|
||||||
}] : []),
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default TeamMembersPage;
|
|
||||||
@@ -1,239 +0,0 @@
|
|||||||
.team-settings {
|
|
||||||
padding: 1.5rem 0;
|
|
||||||
max-width: 640px;
|
|
||||||
margin: 0 auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-settings h1 {
|
|
||||||
margin: 0 0 1.5rem;
|
|
||||||
font-size: 1.75rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.settings-form {
|
|
||||||
margin-bottom: 2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-section {
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
padding: 1.5rem;
|
|
||||||
margin-bottom: 1.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-section h2 {
|
|
||||||
margin: 0 0 1rem;
|
|
||||||
font-size: 1.125rem;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group {
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group label {
|
|
||||||
display: block;
|
|
||||||
margin-bottom: 0.375rem;
|
|
||||||
font-weight: 500;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group input,
|
|
||||||
.form-group textarea {
|
|
||||||
width: 100%;
|
|
||||||
padding: 0.5rem 0.75rem;
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.9375rem;
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group input:focus,
|
|
||||||
.form-group textarea:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
box-shadow: 0 0 0 2px rgba(16, 185, 129, 0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.input-disabled {
|
|
||||||
background: var(--bg-elevated) !important;
|
|
||||||
color: var(--text-muted) !important;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-hint {
|
|
||||||
display: block;
|
|
||||||
margin-top: 0.25rem;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Danger zone */
|
|
||||||
.danger-zone {
|
|
||||||
border-color: var(--error);
|
|
||||||
background: var(--error-bg);
|
|
||||||
}
|
|
||||||
|
|
||||||
.danger-zone h2 {
|
|
||||||
color: var(--error);
|
|
||||||
}
|
|
||||||
|
|
||||||
.danger-warning {
|
|
||||||
margin: 0 0 1rem;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Messages */
|
|
||||||
.error-message {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: space-between;
|
|
||||||
padding: 0.75rem 1rem;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
background: var(--error-bg);
|
|
||||||
border: 1px solid var(--error);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
color: var(--error);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-dismiss {
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
font-size: 1.25rem;
|
|
||||||
cursor: pointer;
|
|
||||||
color: inherit;
|
|
||||||
padding: 0;
|
|
||||||
line-height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.success-message {
|
|
||||||
padding: 0.75rem 1rem;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
background: var(--success-bg);
|
|
||||||
border: 1px solid var(--success);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
color: var(--success);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* States */
|
|
||||||
.loading-state,
|
|
||||||
.error-state {
|
|
||||||
text-align: center;
|
|
||||||
padding: 4rem 2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-state h2 {
|
|
||||||
margin: 0 0 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-state p {
|
|
||||||
margin: 0 0 1.5rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Modal */
|
|
||||||
.modal-overlay {
|
|
||||||
position: fixed;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
bottom: 0;
|
|
||||||
background: rgba(0, 0, 0, 0.7);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
z-index: 1000;
|
|
||||||
padding: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content {
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
padding: 1.5rem;
|
|
||||||
width: 100%;
|
|
||||||
max-width: 400px;
|
|
||||||
box-shadow: var(--shadow-lg);
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content h2 {
|
|
||||||
margin: 0 0 1rem;
|
|
||||||
font-size: 1.25rem;
|
|
||||||
color: var(--error);
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content p {
|
|
||||||
margin: 0 0 1rem;
|
|
||||||
font-size: 0.9375rem;
|
|
||||||
color: var(--text-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.delete-confirm-input {
|
|
||||||
width: 100%;
|
|
||||||
padding: 0.5rem 0.75rem;
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.9375rem;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-actions {
|
|
||||||
display: flex;
|
|
||||||
justify-content: flex-end;
|
|
||||||
gap: 0.75rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Buttons */
|
|
||||||
.btn {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.5rem;
|
|
||||||
padding: 0.5rem 1rem;
|
|
||||||
border: none;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 500;
|
|
||||||
cursor: pointer;
|
|
||||||
text-decoration: none;
|
|
||||||
transition: all 0.15s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn:disabled {
|
|
||||||
opacity: 0.6;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary {
|
|
||||||
background: var(--accent-primary);
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary:hover:not(:disabled) {
|
|
||||||
background: var(--accent-primary-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary:hover:not(:disabled) {
|
|
||||||
background: var(--bg-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-danger {
|
|
||||||
background: var(--error);
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-danger:hover:not(:disabled) {
|
|
||||||
background: #b91c1c;
|
|
||||||
}
|
|
||||||
@@ -1,251 +0,0 @@
|
|||||||
import { useState, useEffect, useCallback } from 'react';
|
|
||||||
import { useParams, useNavigate, Link } from 'react-router-dom';
|
|
||||||
import { TeamDetail, TeamUpdate } from '../types';
|
|
||||||
import { getTeam, updateTeam, deleteTeam } from '../api';
|
|
||||||
import { useAuth } from '../contexts/AuthContext';
|
|
||||||
import { Breadcrumb } from '../components/Breadcrumb';
|
|
||||||
import './TeamSettingsPage.css';
|
|
||||||
|
|
||||||
function TeamSettingsPage() {
|
|
||||||
const { slug } = useParams<{ slug: string }>();
|
|
||||||
const navigate = useNavigate();
|
|
||||||
const { user } = useAuth();
|
|
||||||
const [team, setTeam] = useState<TeamDetail | null>(null);
|
|
||||||
const [loading, setLoading] = useState(true);
|
|
||||||
const [saving, setSaving] = useState(false);
|
|
||||||
const [deleting, setDeleting] = useState(false);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
const [successMessage, setSuccessMessage] = useState<string | null>(null);
|
|
||||||
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
|
|
||||||
const [deleteConfirmText, setDeleteConfirmText] = useState('');
|
|
||||||
|
|
||||||
const [formData, setFormData] = useState<TeamUpdate>({
|
|
||||||
name: '',
|
|
||||||
description: '',
|
|
||||||
});
|
|
||||||
|
|
||||||
const loadTeam = useCallback(async () => {
|
|
||||||
if (!slug) return;
|
|
||||||
try {
|
|
||||||
setLoading(true);
|
|
||||||
const teamData = await getTeam(slug);
|
|
||||||
setTeam(teamData);
|
|
||||||
setFormData({
|
|
||||||
name: teamData.name,
|
|
||||||
description: teamData.description || '',
|
|
||||||
});
|
|
||||||
setError(null);
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to load team');
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}, [slug]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
loadTeam();
|
|
||||||
}, [loadTeam]);
|
|
||||||
|
|
||||||
async function handleSubmit(e: React.FormEvent) {
|
|
||||||
e.preventDefault();
|
|
||||||
if (!slug || !team) return;
|
|
||||||
|
|
||||||
try {
|
|
||||||
setSaving(true);
|
|
||||||
setError(null);
|
|
||||||
const updatedTeam = await updateTeam(slug, formData);
|
|
||||||
setTeam(updatedTeam);
|
|
||||||
setSuccessMessage('Settings saved successfully');
|
|
||||||
setTimeout(() => setSuccessMessage(null), 3000);
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to save settings');
|
|
||||||
} finally {
|
|
||||||
setSaving(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleDelete() {
|
|
||||||
if (!slug || !team) return;
|
|
||||||
if (deleteConfirmText !== team.slug) return;
|
|
||||||
|
|
||||||
try {
|
|
||||||
setDeleting(true);
|
|
||||||
await deleteTeam(slug);
|
|
||||||
navigate('/teams');
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to delete team');
|
|
||||||
setShowDeleteConfirm(false);
|
|
||||||
} finally {
|
|
||||||
setDeleting(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (loading) {
|
|
||||||
return (
|
|
||||||
<div className="team-settings">
|
|
||||||
<div className="loading-state">Loading team settings...</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (error && !team) {
|
|
||||||
return (
|
|
||||||
<div className="team-settings">
|
|
||||||
<div className="error-state">
|
|
||||||
<h2>Error loading team</h2>
|
|
||||||
<p>{error}</p>
|
|
||||||
<Link to="/teams" className="btn btn-primary">Back to Teams</Link>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!team) return null;
|
|
||||||
|
|
||||||
const isOwner = team.user_role === 'owner' || user?.is_admin;
|
|
||||||
const isAdmin = team.user_role === 'admin' || isOwner;
|
|
||||||
|
|
||||||
if (!isAdmin) {
|
|
||||||
return (
|
|
||||||
<div className="team-settings">
|
|
||||||
<div className="error-state">
|
|
||||||
<h2>Access Denied</h2>
|
|
||||||
<p>You need admin privileges to access team settings.</p>
|
|
||||||
<Link to={`/teams/${slug}`} className="btn btn-primary">Back to Team</Link>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="team-settings">
|
|
||||||
<Breadcrumb
|
|
||||||
items={[
|
|
||||||
{ label: 'Teams', href: '/teams' },
|
|
||||||
{ label: team.name, href: `/teams/${slug}` },
|
|
||||||
{ label: 'Settings' },
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<h1>Team Settings</h1>
|
|
||||||
|
|
||||||
{error && (
|
|
||||||
<div className="error-message">
|
|
||||||
{error}
|
|
||||||
<button onClick={() => setError(null)} className="error-dismiss">×</button>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{successMessage && (
|
|
||||||
<div className="success-message">
|
|
||||||
{successMessage}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<form onSubmit={handleSubmit} className="settings-form">
|
|
||||||
<div className="form-section">
|
|
||||||
<h2>General</h2>
|
|
||||||
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="team-name">Team Name</label>
|
|
||||||
<input
|
|
||||||
id="team-name"
|
|
||||||
type="text"
|
|
||||||
value={formData.name}
|
|
||||||
onChange={e => setFormData({ ...formData, name: e.target.value })}
|
|
||||||
required
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="team-slug">Slug</label>
|
|
||||||
<input
|
|
||||||
id="team-slug"
|
|
||||||
type="text"
|
|
||||||
value={team.slug}
|
|
||||||
disabled
|
|
||||||
className="input-disabled"
|
|
||||||
/>
|
|
||||||
<span className="form-hint">Team slug cannot be changed</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="team-description">Description</label>
|
|
||||||
<textarea
|
|
||||||
id="team-description"
|
|
||||||
value={formData.description}
|
|
||||||
onChange={e => setFormData({ ...formData, description: e.target.value })}
|
|
||||||
rows={3}
|
|
||||||
placeholder="What is this team for?"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<button type="submit" className="btn btn-primary" disabled={saving}>
|
|
||||||
{saving ? 'Saving...' : 'Save Changes'}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
{isOwner && (
|
|
||||||
<div className="form-section danger-zone">
|
|
||||||
<h2>Danger Zone</h2>
|
|
||||||
<p className="danger-warning">
|
|
||||||
Deleting a team is permanent and cannot be undone.
|
|
||||||
You must move or delete all projects in this team first.
|
|
||||||
</p>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="btn btn-danger"
|
|
||||||
onClick={() => setShowDeleteConfirm(true)}
|
|
||||||
>
|
|
||||||
Delete Team
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{showDeleteConfirm && (
|
|
||||||
<div className="modal-overlay" onClick={() => setShowDeleteConfirm(false)}>
|
|
||||||
<div className="modal-content" onClick={e => e.stopPropagation()}>
|
|
||||||
<h2>Delete Team</h2>
|
|
||||||
<p>
|
|
||||||
This will permanently delete the team <strong>{team.name}</strong>.
|
|
||||||
This action cannot be undone.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
To confirm, type <strong>{team.slug}</strong> below:
|
|
||||||
</p>
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
value={deleteConfirmText}
|
|
||||||
onChange={e => setDeleteConfirmText(e.target.value)}
|
|
||||||
placeholder={team.slug}
|
|
||||||
className="delete-confirm-input"
|
|
||||||
/>
|
|
||||||
<div className="form-actions">
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="btn btn-secondary"
|
|
||||||
onClick={() => {
|
|
||||||
setShowDeleteConfirm(false);
|
|
||||||
setDeleteConfirmText('');
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
Cancel
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="btn btn-danger"
|
|
||||||
disabled={deleteConfirmText !== team.slug || deleting}
|
|
||||||
onClick={handleDelete}
|
|
||||||
>
|
|
||||||
{deleting ? 'Deleting...' : 'Delete Team'}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default TeamSettingsPage;
|
|
||||||
@@ -1,376 +0,0 @@
|
|||||||
.teams-page {
|
|
||||||
padding: 1.5rem 0;
|
|
||||||
max-width: 1200px;
|
|
||||||
margin: 0 auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Header */
|
|
||||||
.teams-header {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
margin-bottom: 1.5rem;
|
|
||||||
gap: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-header h1 {
|
|
||||||
margin: 0;
|
|
||||||
font-size: 1.5rem;
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Search */
|
|
||||||
.teams-search {
|
|
||||||
position: relative;
|
|
||||||
margin-bottom: 1.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-search__icon {
|
|
||||||
position: absolute;
|
|
||||||
left: 0.875rem;
|
|
||||||
top: 50%;
|
|
||||||
transform: translateY(-50%);
|
|
||||||
color: var(--text-muted);
|
|
||||||
pointer-events: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-search__input {
|
|
||||||
width: 100%;
|
|
||||||
padding: 0.625rem 2.5rem 0.625rem 2.75rem;
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
background: var(--bg-primary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-search__input:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-search__input::placeholder {
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-search__clear {
|
|
||||||
position: absolute;
|
|
||||||
right: 0.5rem;
|
|
||||||
top: 50%;
|
|
||||||
transform: translateY(-50%);
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
padding: 0.375rem;
|
|
||||||
cursor: pointer;
|
|
||||||
color: var(--text-muted);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-search__clear:hover {
|
|
||||||
color: var(--text-primary);
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Error */
|
|
||||||
.teams-error {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: space-between;
|
|
||||||
padding: 0.75rem 1rem;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
background: var(--error-bg);
|
|
||||||
border: 1px solid var(--error);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
color: var(--error);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-error__dismiss {
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
font-size: 1.25rem;
|
|
||||||
cursor: pointer;
|
|
||||||
color: inherit;
|
|
||||||
padding: 0;
|
|
||||||
line-height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Loading */
|
|
||||||
.teams-loading {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: center;
|
|
||||||
gap: 1rem;
|
|
||||||
padding: 4rem 2rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-loading__spinner {
|
|
||||||
width: 32px;
|
|
||||||
height: 32px;
|
|
||||||
border: 3px solid var(--border-primary);
|
|
||||||
border-top-color: var(--accent-primary);
|
|
||||||
border-radius: 50%;
|
|
||||||
animation: teams-spin 0.8s linear infinite;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes teams-spin {
|
|
||||||
to { transform: rotate(360deg); }
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Empty State */
|
|
||||||
.teams-empty-state {
|
|
||||||
text-align: center;
|
|
||||||
padding: 4rem 2rem;
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-empty-icon {
|
|
||||||
color: var(--text-muted);
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-empty-state h2 {
|
|
||||||
margin: 0 0 0.5rem;
|
|
||||||
font-size: 1.25rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-empty-state p {
|
|
||||||
margin: 0 0 1.5rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Table cell styles */
|
|
||||||
|
|
||||||
.team-name-cell {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 0.125rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-name-link {
|
|
||||||
font-weight: 500;
|
|
||||||
color: var(--text-primary);
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-name-link:hover {
|
|
||||||
color: var(--accent-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-slug {
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.team-description-cell {
|
|
||||||
color: var(--text-secondary);
|
|
||||||
max-width: 300px;
|
|
||||||
overflow: hidden;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.text-muted {
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Modal */
|
|
||||||
.modal-overlay {
|
|
||||||
position: fixed;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
bottom: 0;
|
|
||||||
background: rgba(0, 0, 0, 0.7);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
z-index: 1000;
|
|
||||||
padding: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content {
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
width: 100%;
|
|
||||||
max-width: 480px;
|
|
||||||
box-shadow: var(--shadow-lg);
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-header {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
padding: 1.25rem 1.5rem;
|
|
||||||
border-bottom: 1px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-header h2 {
|
|
||||||
margin: 0;
|
|
||||||
font-size: 1.125rem;
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-close {
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
padding: 0.25rem;
|
|
||||||
cursor: pointer;
|
|
||||||
color: var(--text-muted);
|
|
||||||
display: flex;
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-close:hover {
|
|
||||||
color: var(--text-primary);
|
|
||||||
background: var(--bg-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content form {
|
|
||||||
padding: 1.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Form */
|
|
||||||
.form-group {
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group label {
|
|
||||||
display: block;
|
|
||||||
margin-bottom: 0.375rem;
|
|
||||||
font-weight: 500;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group .optional {
|
|
||||||
font-weight: 400;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group input,
|
|
||||||
.form-group textarea {
|
|
||||||
width: 100%;
|
|
||||||
padding: 0.625rem 0.75rem;
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-group input:focus,
|
|
||||||
.form-group textarea:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--accent-primary);
|
|
||||||
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.input-with-prefix {
|
|
||||||
display: flex;
|
|
||||||
align-items: stretch;
|
|
||||||
}
|
|
||||||
|
|
||||||
.input-prefix {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
padding: 0 0.75rem;
|
|
||||||
background: var(--bg-elevated);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-right: none;
|
|
||||||
border-radius: var(--radius-md) 0 0 var(--radius-md);
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.input-with-prefix input {
|
|
||||||
border-radius: 0 var(--radius-md) var(--radius-md) 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-hint {
|
|
||||||
display: block;
|
|
||||||
margin-top: 0.25rem;
|
|
||||||
font-size: 0.75rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-actions {
|
|
||||||
display: flex;
|
|
||||||
justify-content: flex-end;
|
|
||||||
gap: 0.75rem;
|
|
||||||
margin-top: 1.5rem;
|
|
||||||
padding-top: 1rem;
|
|
||||||
border-top: 1px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Buttons */
|
|
||||||
.btn {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.5rem;
|
|
||||||
padding: 0.5rem 1rem;
|
|
||||||
border: none;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 500;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: all 0.15s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn:disabled {
|
|
||||||
opacity: 0.6;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary {
|
|
||||||
background: var(--accent-primary);
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary:hover:not(:disabled) {
|
|
||||||
background: var(--accent-primary-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary:hover:not(:disabled) {
|
|
||||||
background: var(--bg-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Responsive */
|
|
||||||
@media (max-width: 640px) {
|
|
||||||
.teams-header {
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: stretch;
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-header .btn {
|
|
||||||
justify-content: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-stats {
|
|
||||||
justify-content: space-around;
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-table-container {
|
|
||||||
overflow-x: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.teams-table {
|
|
||||||
min-width: 600px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,310 +0,0 @@
|
|||||||
import { useState, useEffect, useCallback } from 'react';
|
|
||||||
import { Link, useNavigate } from 'react-router-dom';
|
|
||||||
import { TeamDetail, TeamCreate, PaginatedResponse } from '../types';
|
|
||||||
import { listTeams, createTeam } from '../api';
|
|
||||||
import { useAuth } from '../contexts/AuthContext';
|
|
||||||
import { Badge } from '../components/Badge';
|
|
||||||
import { DataTable } from '../components/DataTable';
|
|
||||||
import './TeamsPage.css';
|
|
||||||
|
|
||||||
function TeamsPage() {
|
|
||||||
const navigate = useNavigate();
|
|
||||||
const { user } = useAuth();
|
|
||||||
const [teamsData, setTeamsData] = useState<PaginatedResponse<TeamDetail> | null>(null);
|
|
||||||
const [loading, setLoading] = useState(true);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
const [showForm, setShowForm] = useState(false);
|
|
||||||
const [newTeam, setNewTeam] = useState<TeamCreate>({ name: '', slug: '', description: '' });
|
|
||||||
const [creating, setCreating] = useState(false);
|
|
||||||
const [slugManuallySet, setSlugManuallySet] = useState(false);
|
|
||||||
const [searchQuery, setSearchQuery] = useState('');
|
|
||||||
|
|
||||||
const loadTeams = useCallback(async () => {
|
|
||||||
try {
|
|
||||||
setLoading(true);
|
|
||||||
const data = await listTeams({ limit: 100 });
|
|
||||||
setTeamsData(data);
|
|
||||||
setError(null);
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to load teams');
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
loadTeams();
|
|
||||||
}, [loadTeams]);
|
|
||||||
|
|
||||||
// Auto-generate slug from name
|
|
||||||
const handleNameChange = (name: string) => {
|
|
||||||
setNewTeam(prev => ({
|
|
||||||
...prev,
|
|
||||||
name,
|
|
||||||
slug: slugManuallySet ? prev.slug : name.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-|-$/g, ''),
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleSlugChange = (slug: string) => {
|
|
||||||
setSlugManuallySet(true);
|
|
||||||
setNewTeam(prev => ({ ...prev, slug }));
|
|
||||||
};
|
|
||||||
|
|
||||||
async function handleCreateTeam(e: React.FormEvent) {
|
|
||||||
e.preventDefault();
|
|
||||||
try {
|
|
||||||
setCreating(true);
|
|
||||||
const team = await createTeam(newTeam);
|
|
||||||
setNewTeam({ name: '', slug: '', description: '' });
|
|
||||||
setSlugManuallySet(false);
|
|
||||||
setShowForm(false);
|
|
||||||
navigate(`/teams/${team.slug}`);
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to create team');
|
|
||||||
} finally {
|
|
||||||
setCreating(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const closeModal = () => {
|
|
||||||
setShowForm(false);
|
|
||||||
setNewTeam({ name: '', slug: '', description: '' });
|
|
||||||
setSlugManuallySet(false);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Filter teams by search
|
|
||||||
const filteredTeams = teamsData?.items.filter(team =>
|
|
||||||
team.name.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
|
||||||
team.slug.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
|
||||||
(team.description?.toLowerCase().includes(searchQuery.toLowerCase()))
|
|
||||||
) || [];
|
|
||||||
|
|
||||||
const totalTeams = teamsData?.items.length || 0;
|
|
||||||
|
|
||||||
const roleConfig: Record<string, { variant: 'success' | 'info' | 'default'; label: string }> = {
|
|
||||||
owner: { variant: 'success', label: 'Owner' },
|
|
||||||
admin: { variant: 'info', label: 'Admin' },
|
|
||||||
member: { variant: 'default', label: 'Member' },
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!user) {
|
|
||||||
return (
|
|
||||||
<div className="teams-page">
|
|
||||||
<div className="teams-empty-state">
|
|
||||||
<div className="teams-empty-icon">
|
|
||||||
<svg width="64" height="64" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
|
|
||||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
|
||||||
<circle cx="9" cy="7" r="4"/>
|
|
||||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
|
||||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
|
||||||
</svg>
|
|
||||||
</div>
|
|
||||||
<h2>Sign in to view your teams</h2>
|
|
||||||
<p>Teams help you organize projects and collaborate with others.</p>
|
|
||||||
<Link to="/login" className="btn btn-primary">Sign In</Link>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="teams-page">
|
|
||||||
{/* Header */}
|
|
||||||
<div className="teams-header">
|
|
||||||
<h1>Teams</h1>
|
|
||||||
<button className="btn btn-primary" onClick={() => setShowForm(true)}>
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<line x1="12" y1="5" x2="12" y2="19" />
|
|
||||||
<line x1="5" y1="12" x2="19" y2="12" />
|
|
||||||
</svg>
|
|
||||||
Create Team
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Search */}
|
|
||||||
{!loading && totalTeams > 3 && (
|
|
||||||
<div className="teams-search">
|
|
||||||
<svg className="teams-search__icon" width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<circle cx="11" cy="11" r="8"/>
|
|
||||||
<line x1="21" y1="21" x2="16.65" y2="16.65"/>
|
|
||||||
</svg>
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
placeholder="Search teams..."
|
|
||||||
value={searchQuery}
|
|
||||||
onChange={(e) => setSearchQuery(e.target.value)}
|
|
||||||
className="teams-search__input"
|
|
||||||
/>
|
|
||||||
{searchQuery && (
|
|
||||||
<button className="teams-search__clear" onClick={() => setSearchQuery('')}>
|
|
||||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<line x1="18" y1="6" x2="6" y2="18"/>
|
|
||||||
<line x1="6" y1="6" x2="18" y2="18"/>
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{error && (
|
|
||||||
<div className="teams-error">
|
|
||||||
{error}
|
|
||||||
<button onClick={() => setError(null)} className="teams-error__dismiss">×</button>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Create Team Modal */}
|
|
||||||
{showForm && (
|
|
||||||
<div className="modal-overlay" onClick={closeModal}>
|
|
||||||
<div className="modal-content" onClick={e => e.stopPropagation()}>
|
|
||||||
<div className="modal-header">
|
|
||||||
<h2>Create New Team</h2>
|
|
||||||
<button className="modal-close" onClick={closeModal}>
|
|
||||||
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<line x1="18" y1="6" x2="6" y2="18"/>
|
|
||||||
<line x1="6" y1="6" x2="18" y2="18"/>
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<form onSubmit={handleCreateTeam}>
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="team-name">Team Name</label>
|
|
||||||
<input
|
|
||||||
id="team-name"
|
|
||||||
type="text"
|
|
||||||
value={newTeam.name}
|
|
||||||
onChange={e => handleNameChange(e.target.value)}
|
|
||||||
placeholder="Engineering"
|
|
||||||
required
|
|
||||||
autoFocus
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="team-slug">URL Slug</label>
|
|
||||||
<div className="input-with-prefix">
|
|
||||||
<span className="input-prefix">@</span>
|
|
||||||
<input
|
|
||||||
id="team-slug"
|
|
||||||
type="text"
|
|
||||||
value={newTeam.slug}
|
|
||||||
onChange={e => handleSlugChange(e.target.value)}
|
|
||||||
placeholder="engineering"
|
|
||||||
pattern="^[a-z0-9][a-z0-9-]*[a-z0-9]$|^[a-z0-9]$"
|
|
||||||
title="Lowercase letters, numbers, and hyphens only"
|
|
||||||
required
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<span className="form-hint">Used in URLs. Lowercase letters, numbers, and hyphens.</span>
|
|
||||||
</div>
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="team-description">Description <span className="optional">(optional)</span></label>
|
|
||||||
<textarea
|
|
||||||
id="team-description"
|
|
||||||
value={newTeam.description}
|
|
||||||
onChange={e => setNewTeam({ ...newTeam, description: e.target.value })}
|
|
||||||
placeholder="What is this team for?"
|
|
||||||
rows={3}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div className="form-actions">
|
|
||||||
<button type="button" className="btn btn-secondary" onClick={closeModal}>
|
|
||||||
Cancel
|
|
||||||
</button>
|
|
||||||
<button type="submit" className="btn btn-primary" disabled={creating}>
|
|
||||||
{creating ? 'Creating...' : 'Create Team'}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Content */}
|
|
||||||
{loading ? (
|
|
||||||
<div className="teams-loading">
|
|
||||||
<div className="teams-loading__spinner" />
|
|
||||||
<span>Loading teams...</span>
|
|
||||||
</div>
|
|
||||||
) : filteredTeams.length === 0 ? (
|
|
||||||
<div className="teams-empty-state">
|
|
||||||
<div className="teams-empty-icon">
|
|
||||||
<svg width="64" height="64" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
|
|
||||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
|
||||||
<circle cx="9" cy="7" r="4"/>
|
|
||||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
|
||||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
|
||||||
</svg>
|
|
||||||
</div>
|
|
||||||
{searchQuery ? (
|
|
||||||
<>
|
|
||||||
<h2>No teams found</h2>
|
|
||||||
<p>No teams match "{searchQuery}"</p>
|
|
||||||
<button className="btn btn-secondary" onClick={() => setSearchQuery('')}>
|
|
||||||
Clear search
|
|
||||||
</button>
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<h2>No teams yet</h2>
|
|
||||||
<p>Create your first team to start organizing your projects.</p>
|
|
||||||
<button className="btn btn-primary" onClick={() => setShowForm(true)}>
|
|
||||||
Create Team
|
|
||||||
</button>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<DataTable
|
|
||||||
data={filteredTeams}
|
|
||||||
keyExtractor={(team) => team.id}
|
|
||||||
onRowClick={(team) => navigate(`/teams/${team.slug}`)}
|
|
||||||
columns={[
|
|
||||||
{
|
|
||||||
key: 'name',
|
|
||||||
header: 'Name',
|
|
||||||
render: (team) => (
|
|
||||||
<div className="team-name-cell">
|
|
||||||
<Link
|
|
||||||
to={`/teams/${team.slug}`}
|
|
||||||
className="cell-name"
|
|
||||||
onClick={(e) => e.stopPropagation()}
|
|
||||||
>
|
|
||||||
{team.name}
|
|
||||||
</Link>
|
|
||||||
<span className="team-slug">@{team.slug}</span>
|
|
||||||
</div>
|
|
||||||
),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'description',
|
|
||||||
header: 'Description',
|
|
||||||
className: 'cell-description',
|
|
||||||
render: (team) => team.description || <span className="text-muted">—</span>,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'role',
|
|
||||||
header: 'Role',
|
|
||||||
render: (team) => team.user_role ? (
|
|
||||||
<Badge variant={roleConfig[team.user_role]?.variant || 'default'}>
|
|
||||||
{roleConfig[team.user_role]?.label || team.user_role}
|
|
||||||
</Badge>
|
|
||||||
) : null,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'members',
|
|
||||||
header: 'Members',
|
|
||||||
render: (team) => <span className="text-muted">{team.member_count}</span>,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'projects',
|
|
||||||
header: 'Projects',
|
|
||||||
render: (team) => <span className="text-muted">{team.project_count}</span>,
|
|
||||||
},
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default TeamsPage;
|
|
||||||
@@ -6,17 +6,12 @@ export interface Project {
|
|||||||
name: string;
|
name: string;
|
||||||
description: string | null;
|
description: string | null;
|
||||||
is_public: boolean;
|
is_public: boolean;
|
||||||
is_system?: boolean; // True for system cache projects (_npm, _pypi, etc.)
|
|
||||||
created_at: string;
|
created_at: string;
|
||||||
updated_at: string;
|
updated_at: string;
|
||||||
created_by: string;
|
created_by: string;
|
||||||
// Access level info (populated when listing projects)
|
// Access level info (populated when listing projects)
|
||||||
access_level?: AccessLevel | null;
|
access_level?: AccessLevel | null;
|
||||||
is_owner?: boolean;
|
is_owner?: boolean;
|
||||||
// Team info
|
|
||||||
team_id?: string | null;
|
|
||||||
team_slug?: string | null;
|
|
||||||
team_name?: string | null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TagSummary {
|
export interface TagSummary {
|
||||||
@@ -321,8 +316,6 @@ export interface UserUpdate {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Access Permission types
|
// Access Permission types
|
||||||
export type AccessSource = 'explicit' | 'team';
|
|
||||||
|
|
||||||
export interface AccessPermission {
|
export interface AccessPermission {
|
||||||
id: string;
|
id: string;
|
||||||
project_id: string;
|
project_id: string;
|
||||||
@@ -330,9 +323,6 @@ export interface AccessPermission {
|
|||||||
level: AccessLevel;
|
level: AccessLevel;
|
||||||
created_at: string;
|
created_at: string;
|
||||||
expires_at: string | null;
|
expires_at: string | null;
|
||||||
source?: AccessSource; // "explicit" or "team"
|
|
||||||
team_slug?: string; // Team slug if source is "team"
|
|
||||||
team_role?: string; // Team role if source is "team"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AccessPermissionCreate {
|
export interface AccessPermissionCreate {
|
||||||
@@ -383,177 +373,3 @@ export interface OIDCStatus {
|
|||||||
enabled: boolean;
|
enabled: boolean;
|
||||||
issuer_url?: string;
|
issuer_url?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Dependency types
|
|
||||||
export interface Dependency {
|
|
||||||
id: string;
|
|
||||||
artifact_id: string;
|
|
||||||
project: string;
|
|
||||||
package: string;
|
|
||||||
version: string | null;
|
|
||||||
tag: string | null;
|
|
||||||
created_at: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ArtifactDependenciesResponse {
|
|
||||||
artifact_id: string;
|
|
||||||
dependencies: Dependency[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DependentInfo {
|
|
||||||
artifact_id: string;
|
|
||||||
project: string;
|
|
||||||
package: string;
|
|
||||||
version: string | null;
|
|
||||||
constraint_type: 'version' | 'tag';
|
|
||||||
constraint_value: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ReverseDependenciesResponse {
|
|
||||||
project: string;
|
|
||||||
package: string;
|
|
||||||
dependents: DependentInfo[];
|
|
||||||
pagination: {
|
|
||||||
page: number;
|
|
||||||
limit: number;
|
|
||||||
total: number;
|
|
||||||
total_pages: number;
|
|
||||||
has_more: boolean;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Dependency Resolution types
|
|
||||||
export interface ResolvedArtifact {
|
|
||||||
artifact_id: string;
|
|
||||||
project: string;
|
|
||||||
package: string;
|
|
||||||
version: string | null;
|
|
||||||
tag: string | null;
|
|
||||||
size: number;
|
|
||||||
download_url: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DependencyResolutionResponse {
|
|
||||||
requested: {
|
|
||||||
project: string;
|
|
||||||
package: string;
|
|
||||||
ref: string;
|
|
||||||
};
|
|
||||||
resolved: ResolvedArtifact[];
|
|
||||||
total_size: number;
|
|
||||||
artifact_count: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DependencyResolutionError {
|
|
||||||
error: 'circular_dependency' | 'dependency_conflict' | 'not_found';
|
|
||||||
message: string;
|
|
||||||
cycle?: string[];
|
|
||||||
conflicts?: Array<{
|
|
||||||
project: string;
|
|
||||||
package: string;
|
|
||||||
requirements: Array<{
|
|
||||||
version: string;
|
|
||||||
required_by: Array<{ path: string }>;
|
|
||||||
}>;
|
|
||||||
}>;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Team types
|
|
||||||
export type TeamRole = 'owner' | 'admin' | 'member';
|
|
||||||
|
|
||||||
export interface Team {
|
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
slug: string;
|
|
||||||
description: string | null;
|
|
||||||
created_at: string;
|
|
||||||
updated_at: string;
|
|
||||||
member_count: number;
|
|
||||||
project_count: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TeamDetail extends Team {
|
|
||||||
user_role: TeamRole | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TeamMember {
|
|
||||||
id: string;
|
|
||||||
user_id: string;
|
|
||||||
username: string;
|
|
||||||
email: string | null;
|
|
||||||
role: TeamRole;
|
|
||||||
created_at: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TeamCreate {
|
|
||||||
name: string;
|
|
||||||
slug: string;
|
|
||||||
description?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TeamUpdate {
|
|
||||||
name?: string;
|
|
||||||
description?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TeamMemberCreate {
|
|
||||||
username: string;
|
|
||||||
role: TeamRole;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TeamMemberUpdate {
|
|
||||||
role: TeamRole;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Upstream Source types
|
|
||||||
export type SourceType = 'npm' | 'pypi' | 'maven' | 'docker' | 'helm' | 'nuget' | 'deb' | 'rpm' | 'generic';
|
|
||||||
export type AuthType = 'none' | 'basic' | 'bearer' | 'api_key';
|
|
||||||
|
|
||||||
export interface UpstreamSource {
|
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
source_type: SourceType;
|
|
||||||
url: string;
|
|
||||||
enabled: boolean;
|
|
||||||
auth_type: AuthType;
|
|
||||||
username: string | null;
|
|
||||||
has_password: boolean;
|
|
||||||
has_headers: boolean;
|
|
||||||
priority: number;
|
|
||||||
source: 'database' | 'env';
|
|
||||||
created_at: string | null;
|
|
||||||
updated_at: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface UpstreamSourceCreate {
|
|
||||||
name: string;
|
|
||||||
source_type: SourceType;
|
|
||||||
url: string;
|
|
||||||
enabled?: boolean;
|
|
||||||
auth_type?: AuthType;
|
|
||||||
username?: string;
|
|
||||||
password?: string;
|
|
||||||
headers?: Record<string, string>;
|
|
||||||
priority?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface UpstreamSourceUpdate {
|
|
||||||
name?: string;
|
|
||||||
source_type?: SourceType;
|
|
||||||
url?: string;
|
|
||||||
enabled?: boolean;
|
|
||||||
auth_type?: AuthType;
|
|
||||||
username?: string;
|
|
||||||
password?: string;
|
|
||||||
headers?: Record<string, string> | null;
|
|
||||||
priority?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface UpstreamSourceTestResult {
|
|
||||||
success: boolean;
|
|
||||||
status_code: number | null;
|
|
||||||
elapsed_ms: number;
|
|
||||||
error: string | null;
|
|
||||||
source_id: string;
|
|
||||||
source_name: string;
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -141,16 +141,3 @@ MinIO secret name
|
|||||||
{{- printf "%s-s3-secret" (include "orchard.fullname" .) }}
|
{{- printf "%s-s3-secret" (include "orchard.fullname" .) }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
|
||||||
{{/*
|
|
||||||
Auth secret name (for admin password)
|
|
||||||
*/}}
|
|
||||||
{{- define "orchard.auth.secretName" -}}
|
|
||||||
{{- if and .Values.orchard.auth .Values.orchard.auth.existingSecret }}
|
|
||||||
{{- .Values.orchard.auth.existingSecret }}
|
|
||||||
{{- else if and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled }}
|
|
||||||
{{- printf "%s-auth-credentials" (include "orchard.fullname" .) }}
|
|
||||||
{{- else }}
|
|
||||||
{{- printf "%s-auth-secret" (include "orchard.fullname" .) }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
|
|||||||
@@ -69,8 +69,6 @@ spec:
|
|||||||
containerPort: {{ .Values.orchard.server.port }}
|
containerPort: {{ .Values.orchard.server.port }}
|
||||||
protocol: TCP
|
protocol: TCP
|
||||||
env:
|
env:
|
||||||
- name: ORCHARD_ENV
|
|
||||||
value: {{ .Values.orchard.env | default "development" | quote }}
|
|
||||||
- name: ORCHARD_SERVER_HOST
|
- name: ORCHARD_SERVER_HOST
|
||||||
value: {{ .Values.orchard.server.host | quote }}
|
value: {{ .Values.orchard.server.host | quote }}
|
||||||
- name: ORCHARD_SERVER_PORT
|
- name: ORCHARD_SERVER_PORT
|
||||||
@@ -128,43 +126,11 @@ spec:
|
|||||||
value: {{ .Values.orchard.rateLimit.login | quote }}
|
value: {{ .Values.orchard.rateLimit.login | quote }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- if .Values.orchard.purgeSeedData }}
|
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||||
- name: ORCHARD_PURGE_SEED_DATA
|
|
||||||
value: "true"
|
|
||||||
{{- end }}
|
|
||||||
{{- if .Values.orchard.database.poolSize }}
|
|
||||||
- name: ORCHARD_DATABASE_POOL_SIZE
|
|
||||||
value: {{ .Values.orchard.database.poolSize | quote }}
|
|
||||||
{{- end }}
|
|
||||||
{{- if .Values.orchard.database.maxOverflow }}
|
|
||||||
- name: ORCHARD_DATABASE_MAX_OVERFLOW
|
|
||||||
value: {{ .Values.orchard.database.maxOverflow | quote }}
|
|
||||||
{{- end }}
|
|
||||||
{{- if .Values.orchard.database.poolTimeout }}
|
|
||||||
- name: ORCHARD_DATABASE_POOL_TIMEOUT
|
|
||||||
value: {{ .Values.orchard.database.poolTimeout | quote }}
|
|
||||||
{{- end }}
|
|
||||||
{{- if .Values.orchard.auth }}
|
|
||||||
{{- if or .Values.orchard.auth.secretsManager .Values.orchard.auth.existingSecret .Values.orchard.auth.adminPassword }}
|
|
||||||
- name: ORCHARD_ADMIN_PASSWORD
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: {{ include "orchard.auth.secretName" . }}
|
|
||||||
key: admin-password
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
{{- if or (and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled) (and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled) }}
|
|
||||||
volumeMounts:
|
volumeMounts:
|
||||||
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
|
||||||
- name: db-secrets
|
- name: db-secrets
|
||||||
mountPath: /mnt/secrets-store/db
|
mountPath: /mnt/secrets-store
|
||||||
readOnly: true
|
readOnly: true
|
||||||
{{- end }}
|
|
||||||
{{- if and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled }}
|
|
||||||
- name: auth-secrets
|
|
||||||
mountPath: /mnt/secrets-store/auth
|
|
||||||
readOnly: true
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
{{- end }}
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
{{- toYaml .Values.livenessProbe | nindent 12 }}
|
{{- toYaml .Values.livenessProbe | nindent 12 }}
|
||||||
@@ -172,24 +138,14 @@ spec:
|
|||||||
{{- toYaml .Values.readinessProbe | nindent 12 }}
|
{{- toYaml .Values.readinessProbe | nindent 12 }}
|
||||||
resources:
|
resources:
|
||||||
{{- toYaml .Values.resources | nindent 12 }}
|
{{- toYaml .Values.resources | nindent 12 }}
|
||||||
{{- if or (and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled) (and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled) }}
|
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
||||||
volumes:
|
volumes:
|
||||||
{{- if and .Values.orchard.database.secretsManager .Values.orchard.database.secretsManager.enabled }}
|
|
||||||
- name: db-secrets
|
- name: db-secrets
|
||||||
csi:
|
csi:
|
||||||
driver: secrets-store.csi.k8s.io
|
driver: secrets-store.csi.k8s.io
|
||||||
readOnly: true
|
readOnly: true
|
||||||
volumeAttributes:
|
volumeAttributes:
|
||||||
secretProviderClass: {{ include "orchard.fullname" . }}-db-secret
|
secretProviderClass: {{ include "orchard.fullname" . }}-db-secret
|
||||||
{{- end }}
|
|
||||||
{{- if and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled }}
|
|
||||||
- name: auth-secrets
|
|
||||||
csi:
|
|
||||||
driver: secrets-store.csi.k8s.io
|
|
||||||
readOnly: true
|
|
||||||
volumeAttributes:
|
|
||||||
secretProviderClass: {{ include "orchard.fullname" . }}-auth-secret
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- with .Values.nodeSelector }}
|
{{- with .Values.nodeSelector }}
|
||||||
nodeSelector:
|
nodeSelector:
|
||||||
|
|||||||
@@ -25,27 +25,3 @@ spec:
|
|||||||
- objectName: db-password
|
- objectName: db-password
|
||||||
key: password
|
key: password
|
||||||
{{- end }}
|
{{- end }}
|
||||||
---
|
|
||||||
{{- if and .Values.orchard.auth .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled }}
|
|
||||||
apiVersion: secrets-store.csi.x-k8s.io/v1
|
|
||||||
kind: SecretProviderClass
|
|
||||||
metadata:
|
|
||||||
name: {{ include "orchard.fullname" . }}-auth-secret
|
|
||||||
labels:
|
|
||||||
{{- include "orchard.labels" . | nindent 4 }}
|
|
||||||
spec:
|
|
||||||
provider: aws
|
|
||||||
parameters:
|
|
||||||
objects: |
|
|
||||||
- objectName: "{{ .Values.orchard.auth.secretsManager.secretArn }}"
|
|
||||||
objectType: "secretsmanager"
|
|
||||||
jmesPath:
|
|
||||||
- path: admin_password
|
|
||||||
objectAlias: admin-password
|
|
||||||
secretObjects:
|
|
||||||
- secretName: {{ include "orchard.fullname" . }}-auth-credentials
|
|
||||||
type: Opaque
|
|
||||||
data:
|
|
||||||
- objectName: admin-password
|
|
||||||
key: admin-password
|
|
||||||
{{- end }}
|
|
||||||
|
|||||||
@@ -22,15 +22,3 @@ data:
|
|||||||
access-key-id: {{ .Values.orchard.s3.accessKeyId | b64enc | quote }}
|
access-key-id: {{ .Values.orchard.s3.accessKeyId | b64enc | quote }}
|
||||||
secret-access-key: {{ .Values.orchard.s3.secretAccessKey | b64enc | quote }}
|
secret-access-key: {{ .Values.orchard.s3.secretAccessKey | b64enc | quote }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
---
|
|
||||||
{{- if and .Values.orchard.auth .Values.orchard.auth.adminPassword (not .Values.orchard.auth.existingSecret) (not (and .Values.orchard.auth.secretsManager .Values.orchard.auth.secretsManager.enabled)) }}
|
|
||||||
apiVersion: v1
|
|
||||||
kind: Secret
|
|
||||||
metadata:
|
|
||||||
name: {{ include "orchard.fullname" . }}-auth-secret
|
|
||||||
labels:
|
|
||||||
{{- include "orchard.labels" . | nindent 4 }}
|
|
||||||
type: Opaque
|
|
||||||
data:
|
|
||||||
admin-password: {{ .Values.orchard.auth.adminPassword | b64enc | quote }}
|
|
||||||
{{- end }}
|
|
||||||
|
|||||||
@@ -53,16 +53,15 @@ ingress:
|
|||||||
hosts:
|
hosts:
|
||||||
- orchard-dev.common.global.bsf.tools # Overridden by CI
|
- orchard-dev.common.global.bsf.tools # Overridden by CI
|
||||||
|
|
||||||
# Resources for dev/feature environments
|
# Lighter resources for ephemeral environments
|
||||||
# Bumped to handle concurrent integration tests
|
|
||||||
# Note: memory requests must equal limits per cluster policy
|
# Note: memory requests must equal limits per cluster policy
|
||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
cpu: 500m
|
cpu: 250m
|
||||||
memory: 512Mi
|
memory: 256Mi
|
||||||
requests:
|
requests:
|
||||||
cpu: 200m
|
cpu: 100m
|
||||||
memory: 512Mi
|
memory: 256Mi
|
||||||
|
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
httpGet:
|
httpGet:
|
||||||
@@ -86,15 +85,10 @@ tolerations: []
|
|||||||
affinity: {}
|
affinity: {}
|
||||||
|
|
||||||
orchard:
|
orchard:
|
||||||
env: "development" # Allows seed data for testing
|
|
||||||
server:
|
server:
|
||||||
host: "0.0.0.0"
|
host: "0.0.0.0"
|
||||||
port: 8080
|
port: 8080
|
||||||
|
|
||||||
# Authentication settings
|
|
||||||
# Admin password is set via CI variable (DEV_ADMIN_PASSWORD) passed as --set flag
|
|
||||||
# This keeps the password out of version control
|
|
||||||
|
|
||||||
database:
|
database:
|
||||||
host: ""
|
host: ""
|
||||||
port: 5432
|
port: 5432
|
||||||
@@ -104,10 +98,6 @@ orchard:
|
|||||||
sslmode: disable
|
sslmode: disable
|
||||||
existingSecret: ""
|
existingSecret: ""
|
||||||
existingSecretPasswordKey: "password"
|
existingSecretPasswordKey: "password"
|
||||||
# Increased pool settings for concurrent integration tests
|
|
||||||
poolSize: 10
|
|
||||||
maxOverflow: 20
|
|
||||||
poolTimeout: 60
|
|
||||||
|
|
||||||
s3:
|
s3:
|
||||||
endpoint: ""
|
endpoint: ""
|
||||||
@@ -143,16 +133,15 @@ postgresql:
|
|||||||
primary:
|
primary:
|
||||||
persistence:
|
persistence:
|
||||||
enabled: false
|
enabled: false
|
||||||
# Bumped resources for concurrent integration tests
|
# Resources with memory requests = limits per cluster policy
|
||||||
# Note: memory requests must equal limits per cluster policy
|
|
||||||
resourcesPreset: "none"
|
resourcesPreset: "none"
|
||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
cpu: 500m
|
cpu: 250m
|
||||||
memory: 512Mi
|
memory: 256Mi
|
||||||
requests:
|
requests:
|
||||||
cpu: 200m
|
cpu: 100m
|
||||||
memory: 512Mi
|
memory: 256Mi
|
||||||
# Volume permissions init container
|
# Volume permissions init container
|
||||||
volumePermissions:
|
volumePermissions:
|
||||||
resourcesPreset: "none"
|
resourcesPreset: "none"
|
||||||
@@ -178,16 +167,15 @@ minio:
|
|||||||
defaultBuckets: "orchard-artifacts"
|
defaultBuckets: "orchard-artifacts"
|
||||||
persistence:
|
persistence:
|
||||||
enabled: false
|
enabled: false
|
||||||
# Bumped resources for concurrent integration tests
|
# Resources with memory requests = limits per cluster policy
|
||||||
# Note: memory requests must equal limits per cluster policy
|
|
||||||
resourcesPreset: "none" # Disable preset to use explicit resources
|
resourcesPreset: "none" # Disable preset to use explicit resources
|
||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
cpu: 500m
|
cpu: 250m
|
||||||
memory: 512Mi
|
memory: 256Mi
|
||||||
requests:
|
requests:
|
||||||
cpu: 200m
|
cpu: 100m
|
||||||
memory: 512Mi
|
memory: 256Mi
|
||||||
# Init container resources
|
# Init container resources
|
||||||
defaultInitContainers:
|
defaultInitContainers:
|
||||||
volumePermissions:
|
volumePermissions:
|
||||||
|
|||||||
@@ -88,18 +88,10 @@ tolerations: []
|
|||||||
affinity: {}
|
affinity: {}
|
||||||
|
|
||||||
orchard:
|
orchard:
|
||||||
env: "production" # Disables seed data
|
|
||||||
server:
|
server:
|
||||||
host: "0.0.0.0"
|
host: "0.0.0.0"
|
||||||
port: 8080
|
port: 8080
|
||||||
|
|
||||||
# Authentication settings
|
|
||||||
auth:
|
|
||||||
# Admin password from AWS Secrets Manager
|
|
||||||
secretsManager:
|
|
||||||
enabled: true
|
|
||||||
secretArn: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:orch-prod-creds-0nhqkY"
|
|
||||||
|
|
||||||
# Database configuration - uses AWS Secrets Manager via CSI driver
|
# Database configuration - uses AWS Secrets Manager via CSI driver
|
||||||
database:
|
database:
|
||||||
host: "orchard-prd.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com"
|
host: "orchard-prd.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com"
|
||||||
|
|||||||
@@ -90,16 +90,10 @@ affinity: {}
|
|||||||
|
|
||||||
# Orchard server configuration
|
# Orchard server configuration
|
||||||
orchard:
|
orchard:
|
||||||
env: "development" # Allows seed data for testing
|
|
||||||
purgeSeedData: true # Remove public seed data (npm-public, pypi-public, etc.)
|
|
||||||
server:
|
server:
|
||||||
host: "0.0.0.0"
|
host: "0.0.0.0"
|
||||||
port: 8080
|
port: 8080
|
||||||
|
|
||||||
# Authentication settings
|
|
||||||
# Admin password is set via CI variable (STAGE_ADMIN_PASSWORD) passed as --set flag
|
|
||||||
# This keeps the password out of version control
|
|
||||||
|
|
||||||
# Database configuration - uses AWS Secrets Manager via CSI driver
|
# Database configuration - uses AWS Secrets Manager via CSI driver
|
||||||
database:
|
database:
|
||||||
host: "orchard-stage.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com"
|
host: "orchard-stage.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com"
|
||||||
|
|||||||
@@ -120,17 +120,6 @@ orchard:
|
|||||||
mode: "presigned" # presigned, redirect, or proxy
|
mode: "presigned" # presigned, redirect, or proxy
|
||||||
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
||||||
|
|
||||||
# Authentication settings
|
|
||||||
auth:
|
|
||||||
# Option 1: Plain admin password (creates K8s secret)
|
|
||||||
adminPassword: ""
|
|
||||||
# Option 2: Use existing K8s secret (must have 'admin-password' key)
|
|
||||||
existingSecret: ""
|
|
||||||
# Option 3: AWS Secrets Manager
|
|
||||||
# secretsManager:
|
|
||||||
# enabled: false
|
|
||||||
# secretArn: "" # Secret must have 'admin_password' field
|
|
||||||
|
|
||||||
# PostgreSQL subchart configuration
|
# PostgreSQL subchart configuration
|
||||||
postgresql:
|
postgresql:
|
||||||
enabled: true
|
enabled: true
|
||||||
|
|||||||
@@ -1,48 +0,0 @@
|
|||||||
-- Migration 008: Artifact Dependencies
|
|
||||||
-- Adds support for declaring dependencies between artifacts
|
|
||||||
-- Part of Package Dependency Management feature (#76)
|
|
||||||
|
|
||||||
-- Create artifact_dependencies table
|
|
||||||
CREATE TABLE IF NOT EXISTS artifact_dependencies (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
artifact_id VARCHAR(64) NOT NULL REFERENCES artifacts(id) ON DELETE CASCADE,
|
|
||||||
dependency_project VARCHAR(255) NOT NULL,
|
|
||||||
dependency_package VARCHAR(255) NOT NULL,
|
|
||||||
version_constraint VARCHAR(255),
|
|
||||||
tag_constraint VARCHAR(255),
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
|
|
||||||
-- Exactly one of version_constraint or tag_constraint must be set
|
|
||||||
CONSTRAINT check_constraint_type CHECK (
|
|
||||||
(version_constraint IS NOT NULL AND tag_constraint IS NULL) OR
|
|
||||||
(version_constraint IS NULL AND tag_constraint IS NOT NULL)
|
|
||||||
),
|
|
||||||
|
|
||||||
-- Each artifact can only have one dependency on a specific project/package
|
|
||||||
CONSTRAINT unique_artifact_dependency UNIQUE (artifact_id, dependency_project, dependency_package)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Index for fast lookups by artifact_id (get all deps for an artifact)
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_artifact_id
|
|
||||||
ON artifact_dependencies(artifact_id);
|
|
||||||
|
|
||||||
-- Index for reverse dependency lookups (find what depends on a package)
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_target
|
|
||||||
ON artifact_dependencies(dependency_project, dependency_package);
|
|
||||||
|
|
||||||
-- Index for finding dependencies with specific version constraints
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_version
|
|
||||||
ON artifact_dependencies(dependency_project, dependency_package, version_constraint)
|
|
||||||
WHERE version_constraint IS NOT NULL;
|
|
||||||
|
|
||||||
-- Index for finding dependencies with specific tag constraints
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_artifact_dependencies_tag
|
|
||||||
ON artifact_dependencies(dependency_project, dependency_package, tag_constraint)
|
|
||||||
WHERE tag_constraint IS NOT NULL;
|
|
||||||
|
|
||||||
COMMENT ON TABLE artifact_dependencies IS 'Stores dependencies declared by artifacts on other packages';
|
|
||||||
COMMENT ON COLUMN artifact_dependencies.artifact_id IS 'The artifact that declares this dependency';
|
|
||||||
COMMENT ON COLUMN artifact_dependencies.dependency_project IS 'Project name of the dependency';
|
|
||||||
COMMENT ON COLUMN artifact_dependencies.dependency_package IS 'Package name of the dependency';
|
|
||||||
COMMENT ON COLUMN artifact_dependencies.version_constraint IS 'Exact version required (mutually exclusive with tag_constraint)';
|
|
||||||
COMMENT ON COLUMN artifact_dependencies.tag_constraint IS 'Tag name required (mutually exclusive with version_constraint)';
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
-- Migration 009: Teams and Multi-Tenancy
|
|
||||||
-- Adds support for team-based multi-tenancy
|
|
||||||
-- Part of Multi-Tenancy with Teams feature
|
|
||||||
|
|
||||||
-- Create teams table
|
|
||||||
CREATE TABLE IF NOT EXISTS teams (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
name VARCHAR(255) NOT NULL,
|
|
||||||
slug VARCHAR(255) NOT NULL UNIQUE,
|
|
||||||
description TEXT,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
created_by VARCHAR(255) NOT NULL,
|
|
||||||
settings JSONB DEFAULT '{}'::jsonb,
|
|
||||||
|
|
||||||
-- Slug must be lowercase alphanumeric with hyphens
|
|
||||||
CONSTRAINT check_team_slug_format CHECK (slug ~ '^[a-z0-9][a-z0-9-]*[a-z0-9]$' OR slug ~ '^[a-z0-9]$')
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Create team_memberships table
|
|
||||||
CREATE TABLE IF NOT EXISTS team_memberships (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
team_id UUID NOT NULL REFERENCES teams(id) ON DELETE CASCADE,
|
|
||||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
|
||||||
role VARCHAR(20) NOT NULL DEFAULT 'member',
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
invited_by VARCHAR(255),
|
|
||||||
|
|
||||||
-- Each user can only be a member of a team once
|
|
||||||
CONSTRAINT unique_team_membership UNIQUE (team_id, user_id),
|
|
||||||
|
|
||||||
-- Role must be one of: owner, admin, member
|
|
||||||
CONSTRAINT check_team_role CHECK (role IN ('owner', 'admin', 'member'))
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Add team_id column to projects table (nullable for migration compatibility)
|
|
||||||
ALTER TABLE projects ADD COLUMN IF NOT EXISTS team_id UUID REFERENCES teams(id) ON DELETE SET NULL;
|
|
||||||
|
|
||||||
-- Indexes for teams table
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_teams_slug ON teams(slug);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_teams_created_by ON teams(created_by);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_teams_created_at ON teams(created_at);
|
|
||||||
|
|
||||||
-- Indexes for team_memberships table
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_team_memberships_team_id ON team_memberships(team_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_team_memberships_user_id ON team_memberships(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_team_memberships_role ON team_memberships(role);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_team_memberships_team_role ON team_memberships(team_id, role);
|
|
||||||
|
|
||||||
-- Index for projects team_id
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_projects_team_id ON projects(team_id);
|
|
||||||
|
|
||||||
-- Comments
|
|
||||||
COMMENT ON TABLE teams IS 'Teams serve as organizational containers for projects';
|
|
||||||
COMMENT ON COLUMN teams.slug IS 'URL-friendly unique identifier (lowercase alphanumeric with hyphens)';
|
|
||||||
COMMENT ON COLUMN teams.settings IS 'JSON object for team-specific settings';
|
|
||||||
|
|
||||||
COMMENT ON TABLE team_memberships IS 'Maps users to teams with their roles';
|
|
||||||
COMMENT ON COLUMN team_memberships.role IS 'User role in the team: owner, admin, or member';
|
|
||||||
COMMENT ON COLUMN team_memberships.invited_by IS 'Username of the user who invited this member';
|
|
||||||
|
|
||||||
COMMENT ON COLUMN projects.team_id IS 'Optional team that owns this project';
|
|
||||||
@@ -1,99 +0,0 @@
|
|||||||
-- Migration 009b: Migrate Existing Projects to Personal Teams
|
|
||||||
-- Creates personal teams for existing users and assigns their projects to those teams.
|
|
||||||
-- This migration is idempotent and can be run multiple times safely.
|
|
||||||
|
|
||||||
-- Create personal teams for users who own projects but don't have a personal team yet
|
|
||||||
INSERT INTO teams (name, slug, description, created_by, settings)
|
|
||||||
SELECT DISTINCT
|
|
||||||
u.username || '''s Team' AS name,
|
|
||||||
LOWER(u.username) || '-personal' AS slug,
|
|
||||||
'Personal team for ' || u.username AS description,
|
|
||||||
u.username AS created_by,
|
|
||||||
'{"personal": true}'::jsonb AS settings
|
|
||||||
FROM users u
|
|
||||||
JOIN projects p ON p.created_by = u.username
|
|
||||||
WHERE NOT EXISTS (
|
|
||||||
SELECT 1 FROM teams t
|
|
||||||
WHERE t.slug = LOWER(u.username) || '-personal'
|
|
||||||
)
|
|
||||||
AND p.team_id IS NULL
|
|
||||||
ON CONFLICT (slug) DO NOTHING;
|
|
||||||
|
|
||||||
-- Add users as owners of their personal teams
|
|
||||||
INSERT INTO team_memberships (team_id, user_id, role, invited_by)
|
|
||||||
SELECT
|
|
||||||
t.id AS team_id,
|
|
||||||
u.id AS user_id,
|
|
||||||
'owner' AS role,
|
|
||||||
u.username AS invited_by
|
|
||||||
FROM teams t
|
|
||||||
JOIN users u ON t.created_by = u.username
|
|
||||||
WHERE t.slug LIKE '%-personal'
|
|
||||||
AND NOT EXISTS (
|
|
||||||
SELECT 1 FROM team_memberships tm
|
|
||||||
WHERE tm.team_id = t.id
|
|
||||||
AND tm.user_id = u.id
|
|
||||||
)
|
|
||||||
ON CONFLICT DO NOTHING;
|
|
||||||
|
|
||||||
-- Assign projects without a team to their creator's personal team
|
|
||||||
UPDATE projects p
|
|
||||||
SET team_id = t.id
|
|
||||||
FROM teams t
|
|
||||||
WHERE t.slug = LOWER(p.created_by) || '-personal'
|
|
||||||
AND p.team_id IS NULL;
|
|
||||||
|
|
||||||
-- Handle orphaned projects (created_by doesn't match any user)
|
|
||||||
-- Create a special orphaned projects team if there are any
|
|
||||||
DO $$
|
|
||||||
DECLARE
|
|
||||||
orphan_count INTEGER;
|
|
||||||
orphan_team_id UUID;
|
|
||||||
BEGIN
|
|
||||||
-- Count orphaned projects
|
|
||||||
SELECT COUNT(*) INTO orphan_count
|
|
||||||
FROM projects p
|
|
||||||
WHERE p.team_id IS NULL
|
|
||||||
AND NOT EXISTS (
|
|
||||||
SELECT 1 FROM users u WHERE u.username = p.created_by
|
|
||||||
);
|
|
||||||
|
|
||||||
IF orphan_count > 0 THEN
|
|
||||||
-- Create or get the orphaned projects team
|
|
||||||
INSERT INTO teams (name, slug, description, created_by, settings)
|
|
||||||
VALUES (
|
|
||||||
'Orphaned Projects',
|
|
||||||
'orphaned-projects',
|
|
||||||
'Projects whose original creators no longer exist',
|
|
||||||
'system',
|
|
||||||
'{"system": true}'::jsonb
|
|
||||||
)
|
|
||||||
ON CONFLICT (slug) DO UPDATE SET name = teams.name
|
|
||||||
RETURNING id INTO orphan_team_id;
|
|
||||||
|
|
||||||
-- Assign orphaned projects to this team
|
|
||||||
UPDATE projects
|
|
||||||
SET team_id = orphan_team_id
|
|
||||||
WHERE team_id IS NULL
|
|
||||||
AND NOT EXISTS (
|
|
||||||
SELECT 1 FROM users u WHERE u.username = projects.created_by
|
|
||||||
);
|
|
||||||
|
|
||||||
RAISE NOTICE 'Migrated % orphaned project(s) to orphaned-projects team', orphan_count;
|
|
||||||
END IF;
|
|
||||||
END $$;
|
|
||||||
|
|
||||||
-- Log migration results
|
|
||||||
DO $$
|
|
||||||
DECLARE
|
|
||||||
teams_created INTEGER;
|
|
||||||
memberships_created INTEGER;
|
|
||||||
projects_migrated INTEGER;
|
|
||||||
BEGIN
|
|
||||||
SELECT COUNT(*) INTO teams_created FROM teams WHERE slug LIKE '%-personal';
|
|
||||||
SELECT COUNT(*) INTO memberships_created FROM team_memberships;
|
|
||||||
SELECT COUNT(*) INTO projects_migrated FROM projects WHERE team_id IS NOT NULL;
|
|
||||||
|
|
||||||
RAISE NOTICE 'Migration complete: % personal teams, % memberships, % projects with teams',
|
|
||||||
teams_created, memberships_created, projects_migrated;
|
|
||||||
END $$;
|
|
||||||
@@ -1,137 +0,0 @@
|
|||||||
-- Migration 010: Upstream Artifact Caching
|
|
||||||
-- Adds support for caching artifacts from upstream registries (npm, PyPI, Maven, etc.)
|
|
||||||
-- Part of "The cache that never forgets" epic for hermetic builds
|
|
||||||
|
|
||||||
-- =============================================================================
|
|
||||||
-- upstream_sources: Configure upstream registries for artifact caching
|
|
||||||
-- =============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS upstream_sources (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
name VARCHAR(255) NOT NULL UNIQUE,
|
|
||||||
source_type VARCHAR(50) NOT NULL DEFAULT 'generic',
|
|
||||||
url VARCHAR(2048) NOT NULL,
|
|
||||||
enabled BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
is_public BOOLEAN NOT NULL DEFAULT TRUE,
|
|
||||||
auth_type VARCHAR(20) NOT NULL DEFAULT 'none',
|
|
||||||
username VARCHAR(255),
|
|
||||||
password_encrypted BYTEA,
|
|
||||||
headers_encrypted BYTEA,
|
|
||||||
priority INTEGER NOT NULL DEFAULT 100,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
|
|
||||||
-- Source type must be one of the supported types
|
|
||||||
CONSTRAINT check_source_type CHECK (
|
|
||||||
source_type IN ('npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic')
|
|
||||||
),
|
|
||||||
|
|
||||||
-- Auth type must be valid
|
|
||||||
CONSTRAINT check_auth_type CHECK (
|
|
||||||
auth_type IN ('none', 'basic', 'bearer', 'api_key')
|
|
||||||
),
|
|
||||||
|
|
||||||
-- Priority must be positive
|
|
||||||
CONSTRAINT check_priority_positive CHECK (priority > 0)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Indexes for upstream_sources
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_upstream_sources_enabled ON upstream_sources(enabled);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_upstream_sources_source_type ON upstream_sources(source_type);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_upstream_sources_is_public ON upstream_sources(is_public);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_upstream_sources_priority ON upstream_sources(priority);
|
|
||||||
|
|
||||||
-- Comments for upstream_sources
|
|
||||||
COMMENT ON TABLE upstream_sources IS 'Configuration for upstream artifact registries (npm, PyPI, Maven, etc.)';
|
|
||||||
COMMENT ON COLUMN upstream_sources.name IS 'Unique human-readable name (e.g., npm-public, artifactory-private)';
|
|
||||||
COMMENT ON COLUMN upstream_sources.source_type IS 'Type of registry: npm, pypi, maven, docker, helm, nuget, deb, rpm, generic';
|
|
||||||
COMMENT ON COLUMN upstream_sources.url IS 'Base URL of the upstream registry';
|
|
||||||
COMMENT ON COLUMN upstream_sources.enabled IS 'Whether this source is active for caching';
|
|
||||||
COMMENT ON COLUMN upstream_sources.is_public IS 'True if this is a public internet source (for air-gap mode)';
|
|
||||||
COMMENT ON COLUMN upstream_sources.auth_type IS 'Authentication type: none, basic, bearer, api_key';
|
|
||||||
COMMENT ON COLUMN upstream_sources.username IS 'Username for basic auth';
|
|
||||||
COMMENT ON COLUMN upstream_sources.password_encrypted IS 'Fernet-encrypted password/token';
|
|
||||||
COMMENT ON COLUMN upstream_sources.headers_encrypted IS 'Fernet-encrypted custom headers (JSON)';
|
|
||||||
COMMENT ON COLUMN upstream_sources.priority IS 'Priority for source selection (lower = higher priority)';
|
|
||||||
|
|
||||||
-- =============================================================================
|
|
||||||
-- cache_settings: Global cache configuration (singleton table)
|
|
||||||
-- =============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS cache_settings (
|
|
||||||
id INTEGER PRIMARY KEY DEFAULT 1,
|
|
||||||
allow_public_internet BOOLEAN NOT NULL DEFAULT TRUE,
|
|
||||||
auto_create_system_projects BOOLEAN NOT NULL DEFAULT TRUE,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
|
|
||||||
-- Singleton constraint
|
|
||||||
CONSTRAINT check_cache_settings_singleton CHECK (id = 1)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Insert default row
|
|
||||||
INSERT INTO cache_settings (id, allow_public_internet, auto_create_system_projects)
|
|
||||||
VALUES (1, TRUE, TRUE)
|
|
||||||
ON CONFLICT (id) DO NOTHING;
|
|
||||||
|
|
||||||
-- Comments for cache_settings
|
|
||||||
COMMENT ON TABLE cache_settings IS 'Global cache settings (singleton table)';
|
|
||||||
COMMENT ON COLUMN cache_settings.allow_public_internet IS 'Air-gap mode: when false, blocks all public internet sources';
|
|
||||||
COMMENT ON COLUMN cache_settings.auto_create_system_projects IS 'Auto-create system projects (_npm, _pypi, etc.) on first cache';
|
|
||||||
|
|
||||||
-- =============================================================================
|
|
||||||
-- cached_urls: Track URL to artifact mappings for provenance
|
|
||||||
-- =============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS cached_urls (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
url VARCHAR(4096) NOT NULL,
|
|
||||||
url_hash VARCHAR(64) NOT NULL,
|
|
||||||
artifact_id VARCHAR(64) NOT NULL REFERENCES artifacts(id),
|
|
||||||
source_id UUID REFERENCES upstream_sources(id) ON DELETE SET NULL,
|
|
||||||
fetched_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
response_headers JSONB DEFAULT '{}',
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
|
|
||||||
-- URL hash must be unique (same URL = same cached artifact)
|
|
||||||
CONSTRAINT unique_url_hash UNIQUE (url_hash)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Indexes for cached_urls
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_cached_urls_url_hash ON cached_urls(url_hash);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_cached_urls_artifact_id ON cached_urls(artifact_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_cached_urls_source_id ON cached_urls(source_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_cached_urls_fetched_at ON cached_urls(fetched_at);
|
|
||||||
|
|
||||||
-- Comments for cached_urls
|
|
||||||
COMMENT ON TABLE cached_urls IS 'Tracks which URLs have been cached and maps to artifacts';
|
|
||||||
COMMENT ON COLUMN cached_urls.url IS 'Original URL that was fetched';
|
|
||||||
COMMENT ON COLUMN cached_urls.url_hash IS 'SHA256 hash of URL for fast lookup';
|
|
||||||
COMMENT ON COLUMN cached_urls.artifact_id IS 'The cached artifact (by SHA256 content hash)';
|
|
||||||
COMMENT ON COLUMN cached_urls.source_id IS 'Which upstream source provided this (null if manual)';
|
|
||||||
COMMENT ON COLUMN cached_urls.fetched_at IS 'When the URL was fetched from upstream';
|
|
||||||
COMMENT ON COLUMN cached_urls.response_headers IS 'Original response headers from upstream (for debugging)';
|
|
||||||
|
|
||||||
-- =============================================================================
|
|
||||||
-- Add is_system column to projects table for system cache projects
|
|
||||||
-- =============================================================================
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM information_schema.columns
|
|
||||||
WHERE table_name = 'projects' AND column_name = 'is_system'
|
|
||||||
) THEN
|
|
||||||
ALTER TABLE projects ADD COLUMN is_system BOOLEAN NOT NULL DEFAULT FALSE;
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_projects_is_system ON projects(is_system);
|
|
||||||
END IF;
|
|
||||||
END $$;
|
|
||||||
|
|
||||||
COMMENT ON COLUMN projects.is_system IS 'True for system cache projects (_npm, _pypi, etc.)';
|
|
||||||
|
|
||||||
-- =============================================================================
|
|
||||||
-- Seed default upstream sources (disabled by default for safety)
|
|
||||||
-- =============================================================================
|
|
||||||
INSERT INTO upstream_sources (id, name, source_type, url, enabled, is_public, auth_type, priority)
|
|
||||||
VALUES
|
|
||||||
(gen_random_uuid(), 'npm-public', 'npm', 'https://registry.npmjs.org', FALSE, TRUE, 'none', 100),
|
|
||||||
(gen_random_uuid(), 'pypi-public', 'pypi', 'https://pypi.org/simple', FALSE, TRUE, 'none', 100),
|
|
||||||
(gen_random_uuid(), 'maven-central', 'maven', 'https://repo1.maven.org/maven2', FALSE, TRUE, 'none', 100),
|
|
||||||
(gen_random_uuid(), 'docker-hub', 'docker', 'https://registry-1.docker.io', FALSE, TRUE, 'none', 100)
|
|
||||||
ON CONFLICT (name) DO NOTHING;
|
|
||||||
Reference in New Issue
Block a user