Compare commits
137 Commits
fix/teams-
...
fix/pypi-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ec518519b2 | ||
|
|
968cb00477 | ||
|
|
262aff6e97 | ||
|
|
1389a03c69 | ||
|
|
a45ec46e94 | ||
|
|
1202947620 | ||
|
|
f5c9e438a0 | ||
|
|
aff08ad393 | ||
|
|
cdb3b5ecb3 | ||
|
|
659ecf6f73 | ||
|
|
15cd90b36d | ||
|
|
65bb073a6e | ||
|
|
cbc2e5e11a | ||
|
|
9f233e0d4d | ||
|
|
b27eb0a928 | ||
|
|
9a1d578525 | ||
|
|
08291a2f56 | ||
|
|
f8ad957ff9 | ||
|
|
331745320d | ||
|
|
a6fee37ea9 | ||
|
|
b1056f2286 | ||
|
|
2a423d66c0 | ||
|
|
cd9940da01 | ||
|
|
bdfc525e71 | ||
|
|
8d04dd5449 | ||
|
|
743ce26e54 | ||
|
|
39ae40f1c6 | ||
|
|
ca8f62f69b | ||
|
|
b55c810100 | ||
|
|
bef16d884b | ||
|
|
a97d3e630f | ||
|
|
7b0d423bee | ||
|
|
8731b42d3e | ||
|
|
a442778458 | ||
|
|
36c05230ff | ||
|
|
dc9c217d8a | ||
|
|
da3fd7a601 | ||
|
|
9a2b323fd8 | ||
|
|
6b3522aef2 | ||
|
|
f37d3e3e9a | ||
|
|
308057784e | ||
|
|
86c95bea2b | ||
|
|
cc5d67abd6 | ||
|
|
eb287edbda | ||
|
|
86e971381a | ||
|
|
cf2fe5151f | ||
|
|
2ae479146f | ||
|
|
a0dad73db0 | ||
|
|
b40c53d308 | ||
|
|
f04149b410 | ||
|
|
aa851ab445 | ||
|
|
9313942f53 | ||
|
|
9a795a301a | ||
|
|
9f13221012 | ||
|
|
a99381aafb | ||
|
|
d422ed5cd8 | ||
|
|
b2a8c7cfcc | ||
|
|
eb11efd001 | ||
|
|
02e69c65ee | ||
|
|
34d98f52cb | ||
|
|
29fa53d174 | ||
|
|
63de1ce672 | ||
|
|
0b85f37abd | ||
|
|
101152f87f | ||
|
|
3a09accfe6 | ||
|
|
88765b4f50 | ||
|
|
152af0a852 | ||
|
|
31edadf3ad | ||
|
|
2136e1f0c5 | ||
|
|
ff25677b16 | ||
|
|
0a6dad9af0 | ||
|
|
36cf288526 | ||
|
|
7008d913bf | ||
|
|
46e8c7df70 | ||
|
|
a3929bfb17 | ||
|
|
db2805a36c | ||
|
|
7a6e270d63 | ||
|
|
df4f9d168b | ||
|
|
1f98caa73c | ||
|
|
a485852a6f | ||
|
|
5517048f05 | ||
|
|
c7eca269f4 | ||
|
|
6a3a875a9c | ||
|
|
a39b6f098f | ||
|
|
e0562195df | ||
|
|
db7d0bb7c4 | ||
|
|
4a287d46c8 | ||
|
|
cbea91a528 | ||
|
|
80e2f3d157 | ||
|
|
522d23ec01 | ||
|
|
c1060feb5f | ||
|
|
e62e75bade | ||
|
|
befa517485 | ||
|
|
7a2c0a54c6 | ||
|
|
ead016208d | ||
|
|
4b76ca2046 | ||
|
|
94bbd87e6b | ||
|
|
2cf04a43ef | ||
|
|
9acef055b6 | ||
|
|
694f25ac9b | ||
|
|
06b2beb152 | ||
|
|
2b2dbae38b | ||
|
|
cd56d00ebf | ||
|
|
558e1bc78f | ||
|
|
32218dbb1c | ||
|
|
006df9dff9 | ||
|
|
844e937071 | ||
|
|
77c7526023 | ||
|
|
ec69d7619b | ||
|
|
8e3af8c4f5 | ||
|
|
24a0a71cf4 | ||
|
|
ab50148a60 | ||
|
|
acee458b3c | ||
|
|
f18b8ed560 | ||
|
|
7e84dd3958 | ||
|
|
a72c9d3f6e | ||
|
|
a6618fe550 | ||
|
|
796176c251 | ||
|
|
f58fb0079a | ||
|
|
f57762334f | ||
|
|
599c8c1d5b | ||
|
|
11c5aee0f1 | ||
|
|
1b706fe858 | ||
|
|
dcd405679a | ||
|
|
97498b2f86 | ||
|
|
e8cf2462b7 | ||
|
|
038ad4ed1b | ||
|
|
858b45d434 | ||
|
|
95470b2bf6 | ||
|
|
c512d85f9e | ||
|
|
82f67539bd | ||
|
|
e93e7e7021 | ||
|
|
1d51c856b0 | ||
|
|
c92895ffe9 | ||
|
|
b147af43d2 | ||
|
|
aed48bb4a2 | ||
|
|
0e67ebf94f |
284
.gitlab-ci.yml
284
.gitlab-ci.yml
@@ -11,13 +11,6 @@ variables:
|
|||||||
# Environment URLs (used by deploy and test jobs)
|
# Environment URLs (used by deploy and test jobs)
|
||||||
STAGE_URL: https://orchard-stage.common.global.bsf.tools
|
STAGE_URL: https://orchard-stage.common.global.bsf.tools
|
||||||
PROD_URL: https://orchard.common.global.bsf.tools
|
PROD_URL: https://orchard.common.global.bsf.tools
|
||||||
# Stage environment AWS resources (used by reset job)
|
|
||||||
STAGE_RDS_HOST: orchard-stage.cluster-cvw3jzjkozoc.us-gov-west-1.rds.amazonaws.com
|
|
||||||
STAGE_RDS_DBNAME: postgres
|
|
||||||
STAGE_SECRET_ARN: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:rds!cluster-a573672b-1a38-4665-a654-1b7df37b5297-IaeFQL"
|
|
||||||
STAGE_AUTH_SECRET_ARN: "arn:aws-us-gov:secretsmanager:us-gov-west-1:052673043337:secret:orchard-stage-creds-SMqvQx"
|
|
||||||
STAGE_S3_BUCKET: orchard-artifacts-stage
|
|
||||||
AWS_REGION: us-gov-west-1
|
|
||||||
# Shared pip cache directory
|
# Shared pip cache directory
|
||||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.pip-cache"
|
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.pip-cache"
|
||||||
|
|
||||||
@@ -95,10 +88,18 @@ cve_sbom_analysis:
|
|||||||
when: never
|
when: never
|
||||||
- when: on_success
|
- when: on_success
|
||||||
|
|
||||||
# Override release job to wait for stage integration tests before creating tag
|
# Disable prosper_setup for tag pipelines since no build/analysis jobs run
|
||||||
|
# (image is already built when commit was on main, and deploy uses helm directly)
|
||||||
|
prosper_setup:
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
when: never
|
||||||
|
- when: on_success
|
||||||
|
|
||||||
|
# Override release job to wait for stage deployment and smoke tests before creating tag
|
||||||
# This ensures the tag (which triggers prod deploy) is only created after stage passes
|
# This ensures the tag (which triggers prod deploy) is only created after stage passes
|
||||||
release:
|
release:
|
||||||
needs: [integration_test_stage, changelog]
|
needs: [smoke_test_stage, changelog]
|
||||||
|
|
||||||
# Full integration test suite template (for feature/stage deployments)
|
# Full integration test suite template (for feature/stage deployments)
|
||||||
# Runs the complete pytest integration test suite against the deployed environment
|
# Runs the complete pytest integration test suite against the deployed environment
|
||||||
@@ -200,108 +201,6 @@ release:
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
PYTEST_SCRIPT
|
PYTEST_SCRIPT
|
||||||
|
|
||||||
# Reset stage template - runs from CI runner, uses CI variable for auth
|
|
||||||
# Calls the /api/v1/admin/factory-reset endpoint which handles DB and S3 cleanup
|
|
||||||
.reset_stage_template: &reset_stage_template
|
|
||||||
stage: deploy
|
|
||||||
image: deps.global.bsf.tools/docker/python:3.12-slim
|
|
||||||
timeout: 5m
|
|
||||||
retry: 1
|
|
||||||
before_script:
|
|
||||||
- pip install --index-url "$PIP_INDEX_URL" httpx
|
|
||||||
script:
|
|
||||||
- |
|
|
||||||
python - <<'RESET_SCRIPT'
|
|
||||||
import httpx
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
BASE_URL = os.environ.get("STAGE_URL", "")
|
|
||||||
ADMIN_USER = "admin"
|
|
||||||
ADMIN_PASS = os.environ.get("STAGE_ADMIN_PASSWORD", "")
|
|
||||||
MAX_RETRIES = 3
|
|
||||||
RETRY_DELAY = 5
|
|
||||||
|
|
||||||
if not BASE_URL:
|
|
||||||
print("ERROR: STAGE_URL not set")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not ADMIN_PASS:
|
|
||||||
print("ERROR: STAGE_ADMIN_PASSWORD not set")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
print(f"=== Resetting stage environment at {BASE_URL} ===")
|
|
||||||
|
|
||||||
def do_reset():
|
|
||||||
with httpx.Client(base_url=BASE_URL, timeout=120.0) as client:
|
|
||||||
print("Logging in as admin...")
|
|
||||||
login_response = client.post(
|
|
||||||
"/api/v1/auth/login",
|
|
||||||
json={"username": ADMIN_USER, "password": ADMIN_PASS},
|
|
||||||
)
|
|
||||||
if login_response.status_code != 200:
|
|
||||||
raise Exception(f"Login failed: {login_response.status_code} - {login_response.text}")
|
|
||||||
print("Login successful")
|
|
||||||
|
|
||||||
print("Calling factory reset endpoint...")
|
|
||||||
reset_response = client.post(
|
|
||||||
"/api/v1/admin/factory-reset",
|
|
||||||
headers={"X-Confirm-Reset": "yes-delete-all-data"},
|
|
||||||
)
|
|
||||||
|
|
||||||
if reset_response.status_code == 200:
|
|
||||||
result = reset_response.json()
|
|
||||||
print("Factory reset successful!")
|
|
||||||
print(f" Database tables dropped: {result['results']['database_tables_dropped']}")
|
|
||||||
print(f" S3 objects deleted: {result['results']['s3_objects_deleted']}")
|
|
||||||
print(f" Database reinitialized: {result['results']['database_reinitialized']}")
|
|
||||||
print(f" Seeded: {result['results']['seeded']}")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
raise Exception(f"Factory reset failed: {reset_response.status_code} - {reset_response.text}")
|
|
||||||
|
|
||||||
for attempt in range(1, MAX_RETRIES + 1):
|
|
||||||
try:
|
|
||||||
print(f"Attempt {attempt}/{MAX_RETRIES}")
|
|
||||||
if do_reset():
|
|
||||||
sys.exit(0)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Attempt {attempt} failed: {e}")
|
|
||||||
if attempt < MAX_RETRIES:
|
|
||||||
print(f"Retrying in {RETRY_DELAY} seconds...")
|
|
||||||
time.sleep(RETRY_DELAY)
|
|
||||||
else:
|
|
||||||
print("All retry attempts failed")
|
|
||||||
sys.exit(1)
|
|
||||||
RESET_SCRIPT
|
|
||||||
rules:
|
|
||||||
- if: '$CI_COMMIT_BRANCH == "main"'
|
|
||||||
when: on_success
|
|
||||||
|
|
||||||
# Reset stage BEFORE integration tests (ensure known state)
|
|
||||||
reset_stage_pre:
|
|
||||||
<<: *reset_stage_template
|
|
||||||
needs: [deploy_stage]
|
|
||||||
|
|
||||||
# Integration tests for stage deployment
|
|
||||||
# Uses CI variable STAGE_ADMIN_PASSWORD (set in GitLab CI/CD settings)
|
|
||||||
integration_test_stage:
|
|
||||||
<<: *integration_test_template
|
|
||||||
needs: [reset_stage_pre]
|
|
||||||
variables:
|
|
||||||
ORCHARD_TEST_URL: $STAGE_URL
|
|
||||||
ORCHARD_TEST_PASSWORD: $STAGE_ADMIN_PASSWORD
|
|
||||||
rules:
|
|
||||||
- if: '$CI_COMMIT_BRANCH == "main"'
|
|
||||||
when: on_success
|
|
||||||
|
|
||||||
# Reset stage AFTER integration tests (clean slate for next run)
|
|
||||||
reset_stage:
|
|
||||||
<<: *reset_stage_template
|
|
||||||
needs: [integration_test_stage]
|
|
||||||
allow_failure: true # Don't fail pipeline if reset has issues
|
|
||||||
|
|
||||||
# Integration tests for feature deployment (full suite)
|
# Integration tests for feature deployment (full suite)
|
||||||
# Uses DEV_ADMIN_PASSWORD CI variable (same as deploy_feature)
|
# Uses DEV_ADMIN_PASSWORD CI variable (same as deploy_feature)
|
||||||
integration_test_feature:
|
integration_test_feature:
|
||||||
@@ -314,6 +213,74 @@ integration_test_feature:
|
|||||||
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
||||||
when: on_success
|
when: on_success
|
||||||
|
|
||||||
|
# Reset feature environment after integration tests
|
||||||
|
# Calls factory-reset to clean up test data created during integration tests
|
||||||
|
reset_feature:
|
||||||
|
stage: deploy
|
||||||
|
needs: [integration_test_feature]
|
||||||
|
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||||
|
timeout: 5m
|
||||||
|
before_script:
|
||||||
|
- pip install --index-url "$PIP_INDEX_URL" httpx
|
||||||
|
script:
|
||||||
|
# Debug: Check if variable is set at shell level
|
||||||
|
- echo "RESET_ADMIN_PASSWORD length at shell level:${#RESET_ADMIN_PASSWORD}"
|
||||||
|
- |
|
||||||
|
python - <<'RESET_SCRIPT'
|
||||||
|
import httpx
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
BASE_URL = f"https://orchard-{os.environ['CI_COMMIT_REF_SLUG']}.common.global.bsf.tools"
|
||||||
|
PASSWORD_RAW = os.environ.get("RESET_ADMIN_PASSWORD")
|
||||||
|
|
||||||
|
if not PASSWORD_RAW:
|
||||||
|
print("ERROR: RESET_ADMIN_PASSWORD not set")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Debug: check for hidden characters
|
||||||
|
print(f"Raw password repr (first 3 chars): {repr(PASSWORD_RAW[:3])}")
|
||||||
|
print(f"Raw password repr (last 3 chars): {repr(PASSWORD_RAW[-3:])}")
|
||||||
|
print(f"Raw length: {len(PASSWORD_RAW)}")
|
||||||
|
|
||||||
|
# Strip any whitespace
|
||||||
|
PASSWORD = PASSWORD_RAW.strip()
|
||||||
|
print(f"Stripped length: {len(PASSWORD)}")
|
||||||
|
|
||||||
|
print(f"Resetting environment at {BASE_URL}")
|
||||||
|
client = httpx.Client(base_url=BASE_URL, timeout=60.0)
|
||||||
|
|
||||||
|
# Login as admin
|
||||||
|
login_resp = client.post("/api/v1/auth/login", json={
|
||||||
|
"username": "admin",
|
||||||
|
"password": PASSWORD
|
||||||
|
})
|
||||||
|
if login_resp.status_code != 200:
|
||||||
|
print(f"ERROR: Login failed: {login_resp.status_code}")
|
||||||
|
print(f"Response: {login_resp.text}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Call factory reset
|
||||||
|
reset_resp = client.post(
|
||||||
|
"/api/v1/admin/factory-reset",
|
||||||
|
headers={"X-Confirm-Reset": "yes-delete-all-data"}
|
||||||
|
)
|
||||||
|
if reset_resp.status_code == 200:
|
||||||
|
print("SUCCESS: Factory reset completed")
|
||||||
|
print(reset_resp.json())
|
||||||
|
else:
|
||||||
|
print(f"ERROR: Factory reset failed: {reset_resp.status_code}")
|
||||||
|
print(reset_resp.text)
|
||||||
|
sys.exit(1)
|
||||||
|
RESET_SCRIPT
|
||||||
|
variables:
|
||||||
|
# Use same pattern as integration_test_feature - create new variable from CI variable
|
||||||
|
RESET_ADMIN_PASSWORD: $DEV_ADMIN_PASSWORD
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
||||||
|
when: on_success
|
||||||
|
allow_failure: true # Don't fail the pipeline if reset fails
|
||||||
|
|
||||||
# Run Python backend unit tests
|
# Run Python backend unit tests
|
||||||
python_unit_tests:
|
python_unit_tests:
|
||||||
stage: test
|
stage: test
|
||||||
@@ -412,9 +379,88 @@ frontend_tests:
|
|||||||
echo "Health check failed after 30 attempts"
|
echo "Health check failed after 30 attempts"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
# Deploy to stage (main branch)
|
# Ephemeral test deployment in stage namespace (main branch only)
|
||||||
deploy_stage:
|
# Runs integration tests before promoting to long-running stage
|
||||||
|
deploy_test:
|
||||||
<<: *deploy_template
|
<<: *deploy_template
|
||||||
|
variables:
|
||||||
|
NAMESPACE: orch-stage-namespace
|
||||||
|
VALUES_FILE: helm/orchard/values-dev.yaml
|
||||||
|
BASE_URL: https://orchard-test.common.global.bsf.tools
|
||||||
|
before_script:
|
||||||
|
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
||||||
|
- *helm_setup
|
||||||
|
script:
|
||||||
|
- echo "Deploying ephemeral test environment"
|
||||||
|
- cd $CI_PROJECT_DIR
|
||||||
|
- |
|
||||||
|
helm upgrade --install orchard-test ./helm/orchard \
|
||||||
|
--namespace $NAMESPACE \
|
||||||
|
-f $VALUES_FILE \
|
||||||
|
--set image.tag=git.linux-amd64-$CI_COMMIT_SHA \
|
||||||
|
--set orchard.auth.adminPassword=$STAGE_ADMIN_PASSWORD \
|
||||||
|
--set ingress.hosts[0].host=orchard-test.common.global.bsf.tools \
|
||||||
|
--set ingress.tls[0].hosts[0]=orchard-test.common.global.bsf.tools \
|
||||||
|
--set ingress.tls[0].secretName=orchard-test-tls \
|
||||||
|
--set minioIngress.host=minio-test.common.global.bsf.tools \
|
||||||
|
--set minioIngress.tls.secretName=minio-test-tls \
|
||||||
|
--wait \
|
||||||
|
--atomic \
|
||||||
|
--timeout 10m
|
||||||
|
- kubectl rollout status deployment/orchard-test-server -n $NAMESPACE --timeout=10m
|
||||||
|
- *verify_deployment
|
||||||
|
environment:
|
||||||
|
name: test
|
||||||
|
url: https://orchard-test.common.global.bsf.tools
|
||||||
|
on_stop: cleanup_test
|
||||||
|
kubernetes:
|
||||||
|
agent: esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||||
|
when: on_success
|
||||||
|
|
||||||
|
# Integration tests for ephemeral test deployment (main branch)
|
||||||
|
# Runs against orchard-test before promoting to long-running stage
|
||||||
|
integration_test_main:
|
||||||
|
<<: *integration_test_template
|
||||||
|
needs: [deploy_test]
|
||||||
|
variables:
|
||||||
|
ORCHARD_TEST_URL: https://orchard-test.common.global.bsf.tools
|
||||||
|
ORCHARD_TEST_PASSWORD: $STAGE_ADMIN_PASSWORD
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||||
|
when: on_success
|
||||||
|
|
||||||
|
# Cleanup ephemeral test deployment after integration tests
|
||||||
|
cleanup_test:
|
||||||
|
stage: deploy
|
||||||
|
needs: [integration_test_main]
|
||||||
|
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
|
||||||
|
timeout: 5m
|
||||||
|
variables:
|
||||||
|
NAMESPACE: orch-stage-namespace
|
||||||
|
GIT_STRATEGY: none
|
||||||
|
before_script:
|
||||||
|
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
||||||
|
script:
|
||||||
|
- echo "Cleaning up ephemeral test deployment orchard-test"
|
||||||
|
- helm uninstall orchard-test --namespace $NAMESPACE || true
|
||||||
|
environment:
|
||||||
|
name: test
|
||||||
|
action: stop
|
||||||
|
kubernetes:
|
||||||
|
agent: esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||||
|
when: on_success
|
||||||
|
allow_failure: true
|
||||||
|
|
||||||
|
# Deploy to long-running stage (main branch, after ephemeral tests pass)
|
||||||
|
deploy_stage:
|
||||||
|
stage: deploy
|
||||||
|
# Wait for ephemeral test to pass before promoting to long-running stage
|
||||||
|
needs: [cleanup_test]
|
||||||
|
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
|
||||||
variables:
|
variables:
|
||||||
NAMESPACE: orch-stage-namespace
|
NAMESPACE: orch-stage-namespace
|
||||||
VALUES_FILE: helm/orchard/values-stage.yaml
|
VALUES_FILE: helm/orchard/values-stage.yaml
|
||||||
@@ -423,7 +469,7 @@ deploy_stage:
|
|||||||
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
- kubectl config use-context esv/bsf/bsf-integration/orchard/orchard-mvp:orchard-stage
|
||||||
- *helm_setup
|
- *helm_setup
|
||||||
script:
|
script:
|
||||||
- echo "Deploying to stage environment"
|
- echo "Deploying to long-running stage environment"
|
||||||
- cd $CI_PROJECT_DIR
|
- cd $CI_PROJECT_DIR
|
||||||
- |
|
- |
|
||||||
helm upgrade --install orchard-stage ./helm/orchard \
|
helm upgrade --install orchard-stage ./helm/orchard \
|
||||||
@@ -445,6 +491,16 @@ deploy_stage:
|
|||||||
- if: '$CI_COMMIT_BRANCH == "main"'
|
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||||
when: on_success
|
when: on_success
|
||||||
|
|
||||||
|
# Smoke test for long-running stage (after promotion)
|
||||||
|
smoke_test_stage:
|
||||||
|
<<: *smoke_test_template
|
||||||
|
needs: [deploy_stage]
|
||||||
|
variables:
|
||||||
|
ORCHARD_TEST_URL: $STAGE_URL
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||||
|
when: on_success
|
||||||
|
|
||||||
# Deploy feature branch to dev namespace
|
# Deploy feature branch to dev namespace
|
||||||
deploy_feature:
|
deploy_feature:
|
||||||
<<: *deploy_template
|
<<: *deploy_template
|
||||||
|
|||||||
139
CHANGELOG.md
139
CHANGELOG.md
@@ -7,6 +7,115 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
### Added
|
### Added
|
||||||
|
- Added S3 bucket provisioning terraform configuration (#59)
|
||||||
|
- Creates an S3 bucket to be used for anything Orchard
|
||||||
|
- Creates a log bucket for any logs tracking the S3 bucket
|
||||||
|
- Added auto-fetch capability to dependency resolution endpoint
|
||||||
|
- `GET /api/v1/project/{project}/{package}/+/{ref}/resolve?auto_fetch=true` fetches missing dependencies from upstream registries
|
||||||
|
- PyPI registry client queries PyPI JSON API to resolve version constraints
|
||||||
|
- Fetched artifacts are cached and included in response `fetched` field
|
||||||
|
- Missing dependencies show `fetch_attempted` and `fetch_error` status
|
||||||
|
- Configurable max fetch depth via `ORCHARD_AUTO_FETCH_MAX_DEPTH` (default: 3)
|
||||||
|
- Added `backend/app/registry_client.py` with extensible registry client abstraction
|
||||||
|
- `RegistryClient` ABC for implementing upstream registry clients
|
||||||
|
- `PyPIRegistryClient` implementation using PyPI JSON API
|
||||||
|
- `get_registry_client()` factory function for future npm/maven support
|
||||||
|
- Added `fetch_and_cache_pypi_package()` reusable function for PyPI package fetching
|
||||||
|
- Added HTTP connection pooling infrastructure for improved PyPI proxy performance
|
||||||
|
- `HttpClientManager` with configurable pool size, timeouts, and thread pool executor
|
||||||
|
- Eliminates per-request connection overhead (~100-500ms → ~5ms)
|
||||||
|
- Added Redis caching layer with category-aware TTL for hermetic builds
|
||||||
|
- `CacheService` with graceful fallback when Redis unavailable
|
||||||
|
- Immutable data (artifact metadata, dependencies) cached forever
|
||||||
|
- Mutable data (package index, versions) uses configurable TTL
|
||||||
|
- Added `ArtifactRepository` for batch database operations
|
||||||
|
- `batch_upsert_dependencies()` reduces N+1 queries to single INSERT
|
||||||
|
- `get_or_create_artifact()` uses atomic ON CONFLICT upsert
|
||||||
|
- Added infrastructure status to health endpoint (`/health`)
|
||||||
|
- Reports HTTP pool size and worker threads
|
||||||
|
- Reports Redis cache connection status
|
||||||
|
- Added new configuration settings for HTTP client, Redis, and cache TTL
|
||||||
|
- `ORCHARD_HTTP_MAX_CONNECTIONS`, `ORCHARD_HTTP_CONNECT_TIMEOUT`, etc.
|
||||||
|
- `ORCHARD_REDIS_HOST`, `ORCHARD_REDIS_PORT`, `ORCHARD_REDIS_ENABLED`
|
||||||
|
- `ORCHARD_CACHE_TTL_INDEX`, `ORCHARD_CACHE_TTL_VERSIONS`, etc.
|
||||||
|
- Added transparent PyPI proxy implementing PEP 503 Simple API (#108)
|
||||||
|
- `GET /pypi/simple/` - package index (proxied from upstream)
|
||||||
|
- `GET /pypi/simple/{package}/` - version list with rewritten download links
|
||||||
|
- `GET /pypi/simple/{package}/{filename}` - download with automatic caching
|
||||||
|
- Allows `pip install --index-url https://orchard.../pypi/simple/ <package>`
|
||||||
|
- Artifacts cached on first access through configured upstream sources
|
||||||
|
- Added `POST /api/v1/cache/resolve` endpoint to cache packages by coordinates instead of URL (#108)
|
||||||
|
- Added `ORCHARD_PURGE_SEED_DATA` environment variable support to stage helm values to remove seed data from long-running deployments (#107)
|
||||||
|
- Added frontend system projects visual distinction (#105)
|
||||||
|
- "Cache" badge for system projects in project list
|
||||||
|
- "System Cache" badge on project detail page
|
||||||
|
- Added `is_system` field to Project type
|
||||||
|
- Added frontend admin page for upstream sources and cache settings (#75)
|
||||||
|
- New `/admin/cache` page accessible from user menu (admin only)
|
||||||
|
- Upstream sources table with create/edit/delete/test connectivity
|
||||||
|
- Cache settings section with air-gap mode and auto-create system projects toggles
|
||||||
|
- Visual indicators for env-defined sources (locked, cannot be modified)
|
||||||
|
- Environment variable override badges when settings are overridden
|
||||||
|
- API client functions for all cache admin operations
|
||||||
|
- Added environment variable overrides for cache configuration (#74)
|
||||||
|
- `ORCHARD_CACHE_ALLOW_PUBLIC_INTERNET` - Override allow_public_internet (air-gap mode)
|
||||||
|
- `ORCHARD_CACHE_AUTO_CREATE_SYSTEM_PROJECTS` - Override auto_create_system_projects
|
||||||
|
- `ORCHARD_UPSTREAM__{NAME}__*` - Define upstream sources via env vars
|
||||||
|
- Env-defined sources appear in API with `source: "env"` marker
|
||||||
|
- Env-defined sources cannot be modified/deleted via API (400 error)
|
||||||
|
- Cache settings response includes `*_env_override` fields when overridden
|
||||||
|
- 7 unit tests for env var parsing and configuration
|
||||||
|
- Added Global Cache Settings Admin API (#73)
|
||||||
|
- `GET /api/v1/admin/cache-settings` - Retrieve current cache settings
|
||||||
|
- `PUT /api/v1/admin/cache-settings` - Update cache settings (partial updates)
|
||||||
|
- Admin-only access with audit logging
|
||||||
|
- Controls `allow_public_internet` (air-gap mode) and `auto_create_system_projects`
|
||||||
|
- 7 integration tests for settings management
|
||||||
|
- Added Upstream Sources Admin API for managing cache sources (#72)
|
||||||
|
- `GET /api/v1/admin/upstream-sources` - List sources with filtering
|
||||||
|
- `POST /api/v1/admin/upstream-sources` - Create source with auth configuration
|
||||||
|
- `GET /api/v1/admin/upstream-sources/{id}` - Get source details
|
||||||
|
- `PUT /api/v1/admin/upstream-sources/{id}` - Update source (partial updates)
|
||||||
|
- `DELETE /api/v1/admin/upstream-sources/{id}` - Delete source
|
||||||
|
- `POST /api/v1/admin/upstream-sources/{id}/test` - Test connectivity
|
||||||
|
- Admin-only access with audit logging
|
||||||
|
- Credentials never exposed (only has_password/has_headers flags)
|
||||||
|
- 13 integration tests for all CRUD operations
|
||||||
|
- Added system project restrictions and management (#71)
|
||||||
|
- System projects (`_npm`, `_pypi`, etc.) cannot be deleted (returns 403)
|
||||||
|
- System projects cannot be made private (must remain public)
|
||||||
|
- `GET /api/v1/system-projects` endpoint to list all system cache projects
|
||||||
|
- 5 integration tests for system project restrictions
|
||||||
|
- Added Cache API endpoint for fetching and storing artifacts from upstream URLs (#70)
|
||||||
|
- `POST /api/v1/cache` endpoint to cache artifacts from upstream registries
|
||||||
|
- URL parsing helpers to extract package name/version from npm, PyPI, Maven URLs
|
||||||
|
- Automatic system project creation (`_npm`, `_pypi`, `_maven`, etc.)
|
||||||
|
- URL-to-artifact provenance tracking via `cached_urls` table
|
||||||
|
- Optional user project cross-referencing for custom organization
|
||||||
|
- Cache hit returns existing artifact without re-fetching
|
||||||
|
- Air-gap mode enforcement (blocks public URLs when disabled)
|
||||||
|
- Hash verification for downloaded artifacts
|
||||||
|
- 21 unit tests for URL parsing and cache endpoint
|
||||||
|
- Added HTTP client for fetching artifacts from upstream sources (#69)
|
||||||
|
- `UpstreamClient` class in `backend/app/upstream.py` with streaming downloads
|
||||||
|
- SHA256 hash computation while streaming (doesn't load large files into memory)
|
||||||
|
- Auth support: none, basic auth, bearer token, API key (custom headers)
|
||||||
|
- URL-to-source matching by URL prefix with priority ordering
|
||||||
|
- Configuration options: timeouts, retries with exponential backoff, redirect limits, max file size
|
||||||
|
- Air-gap mode enforcement via `allow_public_internet` setting
|
||||||
|
- Response header capture for provenance tracking
|
||||||
|
- Proper error handling with custom exception types
|
||||||
|
- Connection test method for upstream source validation
|
||||||
|
- 33 unit tests for client functionality
|
||||||
|
- Added upstream artifact caching schema for hermetic builds (#68)
|
||||||
|
- `upstream_sources` table for configuring upstream registries (npm, PyPI, Maven, etc.)
|
||||||
|
- `cache_settings` table for global settings including air-gap mode
|
||||||
|
- `cached_urls` table for URL-to-artifact provenance tracking
|
||||||
|
- `is_system` column on projects for system cache projects (_npm, _pypi, etc.)
|
||||||
|
- Support for multiple auth types: none, basic auth, bearer token, API key
|
||||||
|
- Fernet encryption for credentials using `ORCHARD_CACHE_ENCRYPTION_KEY`
|
||||||
|
- Default upstream sources seeded (npm-public, pypi-public, maven-central, docker-hub) - disabled by default
|
||||||
|
- Migration `010_upstream_caching.sql`
|
||||||
- Added team-based multi-tenancy for organizing projects and collaboration (#88-#104)
|
- Added team-based multi-tenancy for organizing projects and collaboration (#88-#104)
|
||||||
- Teams serve as organizational containers for projects
|
- Teams serve as organizational containers for projects
|
||||||
- Users can belong to multiple teams with different roles (owner, admin, member)
|
- Users can belong to multiple teams with different roles (owner, admin, member)
|
||||||
@@ -102,6 +211,24 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- Added comprehensive integration tests for all dependency features
|
- Added comprehensive integration tests for all dependency features
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
- Removed Usage section from Package page (curl command examples)
|
||||||
|
- PyPI proxy now uses shared HTTP connection pool instead of per-request clients
|
||||||
|
- PyPI proxy now caches upstream source configuration in Redis
|
||||||
|
- Dependency storage now uses batch INSERT instead of individual queries
|
||||||
|
- Increased default database pool size from 5 to 20 connections
|
||||||
|
- Increased default database max overflow from 10 to 30 connections
|
||||||
|
- Enabled Redis in Helm chart values for dev, stage, and prod environments
|
||||||
|
- Upstream sources table text is now centered under column headers (#108)
|
||||||
|
- ENV badge now appears inline with source name instead of separate column (#108)
|
||||||
|
- Test and Edit buttons now have more prominent button styling (#108)
|
||||||
|
- Reduced footer padding for cleaner layout (#108)
|
||||||
|
- Upstream source connectivity test no longer follows redirects, fixing "Exceeded maximum allowed redirects" error with Artifactory proxies (#107)
|
||||||
|
- Test runs automatically after saving a new or updated upstream source (#107)
|
||||||
|
- Test status now shows as colored dots (green=success, red=error) instead of text badges (#107)
|
||||||
|
- Clicking red dot shows error details in a modal (#107)
|
||||||
|
- Source name column no longer wraps text for better table layout (#107)
|
||||||
|
- Renamed "Cache Management" page to "Upstream Sources" (#107)
|
||||||
|
- Moved Delete button from table row to edit modal for cleaner table layout (#107)
|
||||||
- Added pre-test stage reset to ensure known environment state before integration tests (#54)
|
- Added pre-test stage reset to ensure known environment state before integration tests (#54)
|
||||||
- Upload endpoint now accepts optional `ensure` file parameter for declaring dependencies
|
- Upload endpoint now accepts optional `ensure` file parameter for declaring dependencies
|
||||||
- Updated upload API documentation with ensure file format and examples
|
- Updated upload API documentation with ensure file format and examples
|
||||||
@@ -110,8 +237,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- Added orchard logo icon and dot separator to footer
|
- Added orchard logo icon and dot separator to footer
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
- Fixed purge_seed_data crash when deleting access permissions - was comparing UUID to VARCHAR column (#107)
|
||||||
- Fixed dark theme styling for team pages - modals, forms, and dropdowns now use correct theme variables
|
- Fixed dark theme styling for team pages - modals, forms, and dropdowns now use correct theme variables
|
||||||
- Fixed UserAutocomplete and TeamSelector dropdown backgrounds for dark theme
|
- Fixed UserAutocomplete and TeamSelector dropdown backgrounds for dark theme
|
||||||
|
- Fixed PyPI proxy filtering platform-specific dependencies (pyobjc on macOS, pywin32 on Windows)
|
||||||
|
- Fixed bare version constraints being treated as wildcards (e.g., `certifi@2025.10.5` now fetches exact version)
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
- Removed `is_public` field from upstream sources - all sources are now treated as internal/private (#107)
|
||||||
|
- Removed `allow_public_internet` (air-gap mode) setting from cache settings - not needed for enterprise proxy use case (#107)
|
||||||
|
- Removed seeding of public registry URLs (npm-public, pypi-public, maven-central, docker-hub) (#107)
|
||||||
|
- Removed "Public" badge and checkbox from upstream sources UI (#107)
|
||||||
|
- Removed "Allow Public Internet" toggle from cache settings UI (#107)
|
||||||
|
- Removed "Global Settings" section from cache management UI - auto-create system projects is always enabled (#107)
|
||||||
|
- Removed unused CacheSettings frontend types and API functions (#107)
|
||||||
|
|
||||||
## [0.5.1] - 2026-01-23
|
## [0.5.1] - 2026-01-23
|
||||||
### Changed
|
### Changed
|
||||||
|
|||||||
316
backend/app/cache.py
Normal file
316
backend/app/cache.py
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
"""
|
||||||
|
Cache service for upstream artifact caching.
|
||||||
|
|
||||||
|
Provides URL parsing, system project management, and caching logic
|
||||||
|
for the upstream caching feature.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional
|
||||||
|
from urllib.parse import urlparse, unquote
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# System project names for each source type
|
||||||
|
SYSTEM_PROJECT_NAMES = {
|
||||||
|
"npm": "_npm",
|
||||||
|
"pypi": "_pypi",
|
||||||
|
"maven": "_maven",
|
||||||
|
"docker": "_docker",
|
||||||
|
"helm": "_helm",
|
||||||
|
"nuget": "_nuget",
|
||||||
|
"deb": "_deb",
|
||||||
|
"rpm": "_rpm",
|
||||||
|
"generic": "_generic",
|
||||||
|
}
|
||||||
|
|
||||||
|
# System project descriptions
|
||||||
|
SYSTEM_PROJECT_DESCRIPTIONS = {
|
||||||
|
"npm": "System cache for npm packages",
|
||||||
|
"pypi": "System cache for PyPI packages",
|
||||||
|
"maven": "System cache for Maven packages",
|
||||||
|
"docker": "System cache for Docker images",
|
||||||
|
"helm": "System cache for Helm charts",
|
||||||
|
"nuget": "System cache for NuGet packages",
|
||||||
|
"deb": "System cache for Debian packages",
|
||||||
|
"rpm": "System cache for RPM packages",
|
||||||
|
"generic": "System cache for generic artifacts",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ParsedUrl:
|
||||||
|
"""Parsed URL information for caching."""
|
||||||
|
|
||||||
|
package_name: str
|
||||||
|
version: Optional[str] = None
|
||||||
|
filename: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
def parse_npm_url(url: str) -> Optional[ParsedUrl]:
|
||||||
|
"""
|
||||||
|
Parse npm registry URL to extract package name and version.
|
||||||
|
|
||||||
|
Formats:
|
||||||
|
- https://registry.npmjs.org/{package}/-/{package}-{version}.tgz
|
||||||
|
- https://registry.npmjs.org/@{scope}/{package}/-/{package}-{version}.tgz
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz
|
||||||
|
- https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz
|
||||||
|
"""
|
||||||
|
parsed = urlparse(url)
|
||||||
|
path = unquote(parsed.path)
|
||||||
|
|
||||||
|
# Pattern for scoped packages: /@scope/package/-/package-version.tgz
|
||||||
|
scoped_pattern = r"^/@([^/]+)/([^/]+)/-/\2-(.+)\.tgz$"
|
||||||
|
match = re.match(scoped_pattern, path)
|
||||||
|
if match:
|
||||||
|
scope, name, version = match.groups()
|
||||||
|
return ParsedUrl(
|
||||||
|
package_name=f"@{scope}/{name}",
|
||||||
|
version=version,
|
||||||
|
filename=f"{name}-{version}.tgz",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Pattern for unscoped packages: /package/-/package-version.tgz
|
||||||
|
unscoped_pattern = r"^/([^/@]+)/-/\1-(.+)\.tgz$"
|
||||||
|
match = re.match(unscoped_pattern, path)
|
||||||
|
if match:
|
||||||
|
name, version = match.groups()
|
||||||
|
return ParsedUrl(
|
||||||
|
package_name=name,
|
||||||
|
version=version,
|
||||||
|
filename=f"{name}-{version}.tgz",
|
||||||
|
)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def parse_pypi_url(url: str) -> Optional[ParsedUrl]:
|
||||||
|
"""
|
||||||
|
Parse PyPI URL to extract package name and version.
|
||||||
|
|
||||||
|
Formats:
|
||||||
|
- https://files.pythonhosted.org/packages/.../package-version.tar.gz
|
||||||
|
- https://files.pythonhosted.org/packages/.../package-version-py3-none-any.whl
|
||||||
|
- https://pypi.org/packages/.../package-version.tar.gz
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- https://files.pythonhosted.org/packages/ab/cd/requests-2.28.0.tar.gz
|
||||||
|
- https://files.pythonhosted.org/packages/ab/cd/requests-2.28.0-py3-none-any.whl
|
||||||
|
"""
|
||||||
|
parsed = urlparse(url)
|
||||||
|
path = unquote(parsed.path)
|
||||||
|
|
||||||
|
# Get the filename from the path
|
||||||
|
filename = path.split("/")[-1]
|
||||||
|
if not filename:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Handle wheel files: package-version-py3-none-any.whl
|
||||||
|
wheel_pattern = r"^([a-zA-Z0-9_-]+)-(\d+[^-]*)-.*\.whl$"
|
||||||
|
match = re.match(wheel_pattern, filename)
|
||||||
|
if match:
|
||||||
|
name, version = match.groups()
|
||||||
|
# Normalize package name (PyPI uses underscores internally)
|
||||||
|
name = name.replace("_", "-").lower()
|
||||||
|
return ParsedUrl(
|
||||||
|
package_name=name,
|
||||||
|
version=version,
|
||||||
|
filename=filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle source distributions: package-version.tar.gz or package-version.zip
|
||||||
|
sdist_pattern = r"^([a-zA-Z0-9_-]+)-(\d+(?:\.\d+)*(?:[a-zA-Z0-9_.+-]*)?)(?:\.tar\.gz|\.zip|\.tar\.bz2)$"
|
||||||
|
match = re.match(sdist_pattern, filename)
|
||||||
|
if match:
|
||||||
|
name, version = match.groups()
|
||||||
|
name = name.replace("_", "-").lower()
|
||||||
|
return ParsedUrl(
|
||||||
|
package_name=name,
|
||||||
|
version=version,
|
||||||
|
filename=filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def parse_maven_url(url: str) -> Optional[ParsedUrl]:
|
||||||
|
"""
|
||||||
|
Parse Maven repository URL to extract artifact info.
|
||||||
|
|
||||||
|
Format:
|
||||||
|
- https://repo1.maven.org/maven2/{group}/{artifact}/{version}/{artifact}-{version}.jar
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- https://repo1.maven.org/maven2/org/apache/commons/commons-lang3/3.12.0/commons-lang3-3.12.0.jar
|
||||||
|
- https://repo1.maven.org/maven2/com/google/guava/guava/31.1-jre/guava-31.1-jre.jar
|
||||||
|
"""
|
||||||
|
parsed = urlparse(url)
|
||||||
|
path = unquote(parsed.path)
|
||||||
|
|
||||||
|
# Find /maven2/ or similar repository path
|
||||||
|
maven2_idx = path.find("/maven2/")
|
||||||
|
if maven2_idx >= 0:
|
||||||
|
path = path[maven2_idx + 8:] # Remove /maven2/
|
||||||
|
elif path.startswith("/"):
|
||||||
|
path = path[1:]
|
||||||
|
|
||||||
|
parts = path.split("/")
|
||||||
|
if len(parts) < 4:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Last part is filename, before that is version, before that is artifact
|
||||||
|
filename = parts[-1]
|
||||||
|
version = parts[-2]
|
||||||
|
artifact = parts[-3]
|
||||||
|
group = ".".join(parts[:-3])
|
||||||
|
|
||||||
|
# Verify filename matches expected pattern
|
||||||
|
if not filename.startswith(f"{artifact}-{version}"):
|
||||||
|
return None
|
||||||
|
|
||||||
|
return ParsedUrl(
|
||||||
|
package_name=f"{group}:{artifact}",
|
||||||
|
version=version,
|
||||||
|
filename=filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_docker_url(url: str) -> Optional[ParsedUrl]:
|
||||||
|
"""
|
||||||
|
Parse Docker registry URL to extract image info.
|
||||||
|
|
||||||
|
Note: Docker registries are more complex (manifests, blobs, etc.)
|
||||||
|
This handles basic blob/manifest URLs.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- https://registry-1.docker.io/v2/library/nginx/blobs/sha256:abc123
|
||||||
|
- https://registry-1.docker.io/v2/myuser/myimage/manifests/latest
|
||||||
|
"""
|
||||||
|
parsed = urlparse(url)
|
||||||
|
path = unquote(parsed.path)
|
||||||
|
|
||||||
|
# Pattern: /v2/{namespace}/{image}/blobs/{digest} or /manifests/{tag}
|
||||||
|
pattern = r"^/v2/([^/]+(?:/[^/]+)?)/([^/]+)/(blobs|manifests)/(.+)$"
|
||||||
|
match = re.match(pattern, path)
|
||||||
|
if match:
|
||||||
|
namespace, image, artifact_type, reference = match.groups()
|
||||||
|
if namespace == "library":
|
||||||
|
package_name = image
|
||||||
|
else:
|
||||||
|
package_name = f"{namespace}/{image}"
|
||||||
|
|
||||||
|
# For manifests, the reference is the tag
|
||||||
|
version = reference if artifact_type == "manifests" else None
|
||||||
|
|
||||||
|
return ParsedUrl(
|
||||||
|
package_name=package_name,
|
||||||
|
version=version,
|
||||||
|
filename=f"{image}-{reference}" if version else reference,
|
||||||
|
)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def parse_generic_url(url: str) -> ParsedUrl:
|
||||||
|
"""
|
||||||
|
Parse a generic URL to extract filename.
|
||||||
|
|
||||||
|
Attempts to extract meaningful package name and version from filename.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- https://example.com/downloads/myapp-1.2.3.tar.gz
|
||||||
|
- https://github.com/user/repo/releases/download/v1.0/release.zip
|
||||||
|
"""
|
||||||
|
parsed = urlparse(url)
|
||||||
|
path = unquote(parsed.path)
|
||||||
|
filename = path.split("/")[-1] or "artifact"
|
||||||
|
|
||||||
|
# List of known compound and simple extensions
|
||||||
|
known_extensions = [
|
||||||
|
".tar.gz", ".tar.bz2", ".tar.xz",
|
||||||
|
".zip", ".tgz", ".gz", ".jar", ".war", ".deb", ".rpm"
|
||||||
|
]
|
||||||
|
|
||||||
|
# Strip extension from filename first
|
||||||
|
base_name = filename
|
||||||
|
matched_ext = None
|
||||||
|
for ext in known_extensions:
|
||||||
|
if filename.endswith(ext):
|
||||||
|
base_name = filename[:-len(ext)]
|
||||||
|
matched_ext = ext
|
||||||
|
break
|
||||||
|
|
||||||
|
if matched_ext is None:
|
||||||
|
# Unknown extension, return filename as package name
|
||||||
|
return ParsedUrl(
|
||||||
|
package_name=filename,
|
||||||
|
version=None,
|
||||||
|
filename=filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Try to extract version from base_name
|
||||||
|
# Pattern: name-version or name_version
|
||||||
|
# Version starts with digit(s) and can include dots, dashes, and alphanumeric suffixes
|
||||||
|
version_pattern = r"^(.+?)[-_](v?\d+(?:\.\d+)*(?:[-_][a-zA-Z0-9]+)?)$"
|
||||||
|
match = re.match(version_pattern, base_name)
|
||||||
|
if match:
|
||||||
|
name, version = match.groups()
|
||||||
|
return ParsedUrl(
|
||||||
|
package_name=name,
|
||||||
|
version=version,
|
||||||
|
filename=filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
# No version found, use base_name as package name
|
||||||
|
return ParsedUrl(
|
||||||
|
package_name=base_name,
|
||||||
|
version=None,
|
||||||
|
filename=filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_url(url: str, source_type: str) -> ParsedUrl:
|
||||||
|
"""
|
||||||
|
Parse URL to extract package name and version based on source type.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: The URL to parse.
|
||||||
|
source_type: The source type (npm, pypi, maven, docker, etc.)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ParsedUrl with extracted information.
|
||||||
|
"""
|
||||||
|
parsed = None
|
||||||
|
|
||||||
|
if source_type == "npm":
|
||||||
|
parsed = parse_npm_url(url)
|
||||||
|
elif source_type == "pypi":
|
||||||
|
parsed = parse_pypi_url(url)
|
||||||
|
elif source_type == "maven":
|
||||||
|
parsed = parse_maven_url(url)
|
||||||
|
elif source_type == "docker":
|
||||||
|
parsed = parse_docker_url(url)
|
||||||
|
|
||||||
|
# Fall back to generic parsing if type-specific parsing fails
|
||||||
|
if parsed is None:
|
||||||
|
parsed = parse_generic_url(url)
|
||||||
|
|
||||||
|
return parsed
|
||||||
|
|
||||||
|
|
||||||
|
def get_system_project_name(source_type: str) -> str:
|
||||||
|
"""Get the system project name for a source type."""
|
||||||
|
return SYSTEM_PROJECT_NAMES.get(source_type, "_generic")
|
||||||
|
|
||||||
|
|
||||||
|
def get_system_project_description(source_type: str) -> str:
|
||||||
|
"""Get the system project description for a source type."""
|
||||||
|
return SYSTEM_PROJECT_DESCRIPTIONS.get(
|
||||||
|
source_type, "System cache for artifacts"
|
||||||
|
)
|
||||||
262
backend/app/cache_service.py
Normal file
262
backend/app/cache_service.py
Normal file
@@ -0,0 +1,262 @@
|
|||||||
|
"""
|
||||||
|
Redis-backed caching service with category-aware TTL and invalidation.
|
||||||
|
|
||||||
|
Provides:
|
||||||
|
- Immutable caching for artifact data (hermetic builds)
|
||||||
|
- TTL-based caching for discovery data
|
||||||
|
- Event-driven invalidation for config changes
|
||||||
|
- Graceful fallback when Redis unavailable
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from .config import Settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CacheCategory(Enum):
|
||||||
|
"""
|
||||||
|
Cache categories with different TTL and invalidation rules.
|
||||||
|
|
||||||
|
Immutable (cache forever):
|
||||||
|
- ARTIFACT_METADATA: Artifact info by SHA256
|
||||||
|
- ARTIFACT_DEPENDENCIES: Extracted deps by SHA256
|
||||||
|
- DEPENDENCY_RESOLUTION: Resolution results by input hash
|
||||||
|
|
||||||
|
Mutable (TTL + event invalidation):
|
||||||
|
- UPSTREAM_SOURCES: Upstream config, invalidate on DB change
|
||||||
|
- PACKAGE_INDEX: PyPI/npm index pages, TTL only
|
||||||
|
- PACKAGE_VERSIONS: Version listings, TTL only
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Immutable - cache forever (hermetic builds)
|
||||||
|
ARTIFACT_METADATA = "artifact"
|
||||||
|
ARTIFACT_DEPENDENCIES = "deps"
|
||||||
|
DEPENDENCY_RESOLUTION = "resolve"
|
||||||
|
|
||||||
|
# Mutable - TTL + event invalidation
|
||||||
|
UPSTREAM_SOURCES = "upstream"
|
||||||
|
PACKAGE_INDEX = "index"
|
||||||
|
PACKAGE_VERSIONS = "versions"
|
||||||
|
|
||||||
|
|
||||||
|
def get_category_ttl(category: CacheCategory, settings: Settings) -> Optional[int]:
|
||||||
|
"""
|
||||||
|
Get TTL for a cache category.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
TTL in seconds, or None for no expiry (immutable).
|
||||||
|
"""
|
||||||
|
ttl_map = {
|
||||||
|
# Immutable - no TTL
|
||||||
|
CacheCategory.ARTIFACT_METADATA: None,
|
||||||
|
CacheCategory.ARTIFACT_DEPENDENCIES: None,
|
||||||
|
CacheCategory.DEPENDENCY_RESOLUTION: None,
|
||||||
|
# Mutable - configurable TTL
|
||||||
|
CacheCategory.UPSTREAM_SOURCES: settings.cache_ttl_upstream,
|
||||||
|
CacheCategory.PACKAGE_INDEX: settings.cache_ttl_index,
|
||||||
|
CacheCategory.PACKAGE_VERSIONS: settings.cache_ttl_versions,
|
||||||
|
}
|
||||||
|
return ttl_map.get(category)
|
||||||
|
|
||||||
|
|
||||||
|
class CacheService:
|
||||||
|
"""
|
||||||
|
Redis-backed caching with category-aware TTL.
|
||||||
|
|
||||||
|
Key format: orchard:{category}:{protocol}:{identifier}
|
||||||
|
Example: orchard:deps:pypi:abc123def456
|
||||||
|
|
||||||
|
When Redis is disabled or unavailable, operations gracefully
|
||||||
|
return None/no-op to allow the application to function without caching.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, settings: Settings):
|
||||||
|
self._settings = settings
|
||||||
|
self._enabled = settings.redis_enabled
|
||||||
|
self._redis: Optional["redis.asyncio.Redis"] = None
|
||||||
|
self._started = False
|
||||||
|
|
||||||
|
async def startup(self) -> None:
|
||||||
|
"""Initialize Redis connection. Called by FastAPI lifespan."""
|
||||||
|
if self._started:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not self._enabled:
|
||||||
|
logger.info("CacheService disabled (redis_enabled=False)")
|
||||||
|
self._started = True
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
import redis.asyncio as redis
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Connecting to Redis at {self._settings.redis_host}:"
|
||||||
|
f"{self._settings.redis_port}/{self._settings.redis_db}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._redis = redis.Redis(
|
||||||
|
host=self._settings.redis_host,
|
||||||
|
port=self._settings.redis_port,
|
||||||
|
db=self._settings.redis_db,
|
||||||
|
password=self._settings.redis_password,
|
||||||
|
decode_responses=False, # We handle bytes
|
||||||
|
)
|
||||||
|
|
||||||
|
# Test connection
|
||||||
|
await self._redis.ping()
|
||||||
|
logger.info("CacheService connected to Redis")
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("redis package not installed, caching disabled")
|
||||||
|
self._enabled = False
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Redis connection failed, caching disabled: {e}")
|
||||||
|
self._enabled = False
|
||||||
|
self._redis = None
|
||||||
|
|
||||||
|
self._started = True
|
||||||
|
|
||||||
|
async def shutdown(self) -> None:
|
||||||
|
"""Close Redis connection. Called by FastAPI lifespan."""
|
||||||
|
if not self._started:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self._redis:
|
||||||
|
await self._redis.aclose()
|
||||||
|
self._redis = None
|
||||||
|
|
||||||
|
self._started = False
|
||||||
|
logger.info("CacheService shutdown complete")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _make_key(category: CacheCategory, protocol: str, identifier: str) -> str:
|
||||||
|
"""Build namespaced cache key."""
|
||||||
|
return f"orchard:{category.value}:{protocol}:{identifier}"
|
||||||
|
|
||||||
|
async def get(
|
||||||
|
self,
|
||||||
|
category: CacheCategory,
|
||||||
|
key: str,
|
||||||
|
protocol: str = "default",
|
||||||
|
) -> Optional[bytes]:
|
||||||
|
"""
|
||||||
|
Get cached value.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
category: Cache category for TTL rules
|
||||||
|
key: Unique identifier within category
|
||||||
|
protocol: Protocol namespace (pypi, npm, etc.)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Cached bytes or None if not found/disabled.
|
||||||
|
"""
|
||||||
|
if not self._enabled or not self._redis:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
full_key = self._make_key(category, protocol, key)
|
||||||
|
return await self._redis.get(full_key)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Cache get failed for {key}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def set(
|
||||||
|
self,
|
||||||
|
category: CacheCategory,
|
||||||
|
key: str,
|
||||||
|
value: bytes,
|
||||||
|
protocol: str = "default",
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Set cached value with category-appropriate TTL.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
category: Cache category for TTL rules
|
||||||
|
key: Unique identifier within category
|
||||||
|
value: Bytes to cache
|
||||||
|
protocol: Protocol namespace (pypi, npm, etc.)
|
||||||
|
"""
|
||||||
|
if not self._enabled or not self._redis:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
full_key = self._make_key(category, protocol, key)
|
||||||
|
ttl = get_category_ttl(category, self._settings)
|
||||||
|
|
||||||
|
if ttl is None:
|
||||||
|
await self._redis.set(full_key, value)
|
||||||
|
else:
|
||||||
|
await self._redis.setex(full_key, ttl, value)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Cache set failed for {key}: {e}")
|
||||||
|
|
||||||
|
async def delete(
|
||||||
|
self,
|
||||||
|
category: CacheCategory,
|
||||||
|
key: str,
|
||||||
|
protocol: str = "default",
|
||||||
|
) -> None:
|
||||||
|
"""Delete a specific cache entry."""
|
||||||
|
if not self._enabled or not self._redis:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
full_key = self._make_key(category, protocol, key)
|
||||||
|
await self._redis.delete(full_key)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Cache delete failed for {key}: {e}")
|
||||||
|
|
||||||
|
async def invalidate_pattern(
|
||||||
|
self,
|
||||||
|
category: CacheCategory,
|
||||||
|
pattern: str = "*",
|
||||||
|
protocol: str = "default",
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Invalidate all entries matching pattern.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
category: Cache category
|
||||||
|
pattern: Glob pattern for keys (default "*" = all in category)
|
||||||
|
protocol: Protocol namespace
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of keys deleted.
|
||||||
|
"""
|
||||||
|
if not self._enabled or not self._redis:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
full_pattern = self._make_key(category, protocol, pattern)
|
||||||
|
keys = []
|
||||||
|
async for key in self._redis.scan_iter(match=full_pattern):
|
||||||
|
keys.append(key)
|
||||||
|
|
||||||
|
if keys:
|
||||||
|
return await self._redis.delete(*keys)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Cache invalidate failed for pattern {pattern}: {e}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
async def ping(self) -> bool:
|
||||||
|
"""Check if Redis is connected and responding."""
|
||||||
|
if not self._enabled or not self._redis:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self._redis.ping()
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def enabled(self) -> bool:
|
||||||
|
"""Check if caching is enabled."""
|
||||||
|
return self._enabled
|
||||||
@@ -1,5 +1,8 @@
|
|||||||
from pydantic_settings import BaseSettings
|
from pydantic_settings import BaseSettings
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
|
from typing import Optional
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
class Settings(BaseSettings):
|
class Settings(BaseSettings):
|
||||||
@@ -19,8 +22,8 @@ class Settings(BaseSettings):
|
|||||||
database_sslmode: str = "disable"
|
database_sslmode: str = "disable"
|
||||||
|
|
||||||
# Database connection pool settings
|
# Database connection pool settings
|
||||||
database_pool_size: int = 5 # Number of connections to keep open
|
database_pool_size: int = 20 # Number of connections to keep open
|
||||||
database_max_overflow: int = 10 # Max additional connections beyond pool_size
|
database_max_overflow: int = 30 # Max additional connections beyond pool_size
|
||||||
database_pool_timeout: int = 30 # Seconds to wait for a connection from pool
|
database_pool_timeout: int = 30 # Seconds to wait for a connection from pool
|
||||||
database_pool_recycle: int = (
|
database_pool_recycle: int = (
|
||||||
1800 # Recycle connections after this many seconds (30 min)
|
1800 # Recycle connections after this many seconds (30 min)
|
||||||
@@ -48,6 +51,26 @@ class Settings(BaseSettings):
|
|||||||
presigned_url_expiry: int = (
|
presigned_url_expiry: int = (
|
||||||
3600 # Presigned URL expiry in seconds (default: 1 hour)
|
3600 # Presigned URL expiry in seconds (default: 1 hour)
|
||||||
)
|
)
|
||||||
|
pypi_download_mode: str = "redirect" # "redirect" (to S3) or "proxy" (stream through Orchard)
|
||||||
|
|
||||||
|
# HTTP Client pool settings
|
||||||
|
http_max_connections: int = 100 # Max connections per pool
|
||||||
|
http_max_keepalive: int = 20 # Keep-alive connections
|
||||||
|
http_connect_timeout: float = 30.0 # Connection timeout seconds
|
||||||
|
http_read_timeout: float = 60.0 # Read timeout seconds
|
||||||
|
http_worker_threads: int = 32 # Thread pool for blocking ops
|
||||||
|
|
||||||
|
# Redis cache settings
|
||||||
|
redis_host: str = "localhost"
|
||||||
|
redis_port: int = 6379
|
||||||
|
redis_db: int = 0
|
||||||
|
redis_password: Optional[str] = None
|
||||||
|
redis_enabled: bool = True # Set False to disable caching
|
||||||
|
|
||||||
|
# Cache TTL settings (seconds, 0 = no expiry)
|
||||||
|
cache_ttl_index: int = 300 # Package index pages: 5 min
|
||||||
|
cache_ttl_versions: int = 300 # Version listings: 5 min
|
||||||
|
cache_ttl_upstream: int = 3600 # Upstream source config: 1 hour
|
||||||
|
|
||||||
# Logging settings
|
# Logging settings
|
||||||
log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
|
log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
|
||||||
@@ -56,6 +79,20 @@ class Settings(BaseSettings):
|
|||||||
# Initial admin user settings
|
# Initial admin user settings
|
||||||
admin_password: str = "" # Initial admin password (if empty, uses 'changeme123')
|
admin_password: str = "" # Initial admin password (if empty, uses 'changeme123')
|
||||||
|
|
||||||
|
# Cache settings
|
||||||
|
cache_encryption_key: str = "" # Fernet key for encrypting upstream credentials (auto-generated if empty)
|
||||||
|
# Global cache settings override (None = use DB value, True/False = override DB)
|
||||||
|
cache_auto_create_system_projects: Optional[bool] = None # Override auto_create_system_projects
|
||||||
|
|
||||||
|
# PyPI Cache Worker settings
|
||||||
|
pypi_cache_workers: int = 5 # Number of concurrent cache workers
|
||||||
|
pypi_cache_max_depth: int = 10 # Maximum recursion depth for dependency caching
|
||||||
|
pypi_cache_max_attempts: int = 3 # Maximum retry attempts for failed cache tasks
|
||||||
|
|
||||||
|
# Auto-fetch configuration for dependency resolution
|
||||||
|
auto_fetch_dependencies: bool = False # Server default for auto_fetch parameter
|
||||||
|
auto_fetch_timeout: int = 300 # Total timeout for auto-fetch resolution in seconds
|
||||||
|
|
||||||
# JWT Authentication settings (optional, for external identity providers)
|
# JWT Authentication settings (optional, for external identity providers)
|
||||||
jwt_enabled: bool = False # Enable JWT token validation
|
jwt_enabled: bool = False # Enable JWT token validation
|
||||||
jwt_secret: str = "" # Secret key for HS256, or leave empty for RS256 with JWKS
|
jwt_secret: str = "" # Secret key for HS256, or leave empty for RS256 with JWKS
|
||||||
@@ -80,6 +117,24 @@ class Settings(BaseSettings):
|
|||||||
def is_production(self) -> bool:
|
def is_production(self) -> bool:
|
||||||
return self.env.lower() == "production"
|
return self.env.lower() == "production"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def PORT(self) -> int:
|
||||||
|
"""Alias for server_port for compatibility."""
|
||||||
|
return self.server_port
|
||||||
|
|
||||||
|
# Uppercase aliases for PyPI cache settings (for backward compatibility)
|
||||||
|
@property
|
||||||
|
def PYPI_CACHE_WORKERS(self) -> int:
|
||||||
|
return self.pypi_cache_workers
|
||||||
|
|
||||||
|
@property
|
||||||
|
def PYPI_CACHE_MAX_DEPTH(self) -> int:
|
||||||
|
return self.pypi_cache_max_depth
|
||||||
|
|
||||||
|
@property
|
||||||
|
def PYPI_CACHE_MAX_ATTEMPTS(self) -> int:
|
||||||
|
return self.pypi_cache_max_attempts
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
env_prefix = "ORCHARD_"
|
env_prefix = "ORCHARD_"
|
||||||
case_sensitive = False
|
case_sensitive = False
|
||||||
@@ -88,3 +143,110 @@ class Settings(BaseSettings):
|
|||||||
@lru_cache()
|
@lru_cache()
|
||||||
def get_settings() -> Settings:
|
def get_settings() -> Settings:
|
||||||
return Settings()
|
return Settings()
|
||||||
|
|
||||||
|
|
||||||
|
class EnvUpstreamSource:
|
||||||
|
"""Represents an upstream source defined via environment variables."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
url: str,
|
||||||
|
source_type: str = "generic",
|
||||||
|
enabled: bool = True,
|
||||||
|
auth_type: str = "none",
|
||||||
|
username: Optional[str] = None,
|
||||||
|
password: Optional[str] = None,
|
||||||
|
priority: int = 100,
|
||||||
|
):
|
||||||
|
self.name = name
|
||||||
|
self.url = url
|
||||||
|
self.source_type = source_type
|
||||||
|
self.enabled = enabled
|
||||||
|
self.auth_type = auth_type
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.priority = priority
|
||||||
|
self.source = "env" # Mark as env-defined
|
||||||
|
|
||||||
|
|
||||||
|
def parse_upstream_sources_from_env() -> list[EnvUpstreamSource]:
|
||||||
|
"""
|
||||||
|
Parse upstream sources from environment variables.
|
||||||
|
|
||||||
|
Uses double underscore (__) as separator to allow source names with single underscores.
|
||||||
|
Pattern: ORCHARD_UPSTREAM__{NAME}__FIELD
|
||||||
|
|
||||||
|
Example:
|
||||||
|
ORCHARD_UPSTREAM__NPM_PRIVATE__URL=https://npm.corp.com
|
||||||
|
ORCHARD_UPSTREAM__NPM_PRIVATE__TYPE=npm
|
||||||
|
ORCHARD_UPSTREAM__NPM_PRIVATE__ENABLED=true
|
||||||
|
ORCHARD_UPSTREAM__NPM_PRIVATE__AUTH_TYPE=basic
|
||||||
|
ORCHARD_UPSTREAM__NPM_PRIVATE__USERNAME=reader
|
||||||
|
ORCHARD_UPSTREAM__NPM_PRIVATE__PASSWORD=secret
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of EnvUpstreamSource objects parsed from environment variables.
|
||||||
|
"""
|
||||||
|
# Pattern: ORCHARD_UPSTREAM__{NAME}__{FIELD}
|
||||||
|
pattern = re.compile(r"^ORCHARD_UPSTREAM__([A-Z0-9_]+)__([A-Z_]+)$", re.IGNORECASE)
|
||||||
|
|
||||||
|
# Collect all env vars matching the pattern, grouped by source name
|
||||||
|
sources_data: dict[str, dict[str, str]] = {}
|
||||||
|
|
||||||
|
for key, value in os.environ.items():
|
||||||
|
match = pattern.match(key)
|
||||||
|
if match:
|
||||||
|
source_name = match.group(1).lower() # Normalize to lowercase
|
||||||
|
field = match.group(2).upper()
|
||||||
|
if source_name not in sources_data:
|
||||||
|
sources_data[source_name] = {}
|
||||||
|
sources_data[source_name][field] = value
|
||||||
|
|
||||||
|
# Build source objects from collected data
|
||||||
|
sources: list[EnvUpstreamSource] = []
|
||||||
|
|
||||||
|
for name, data in sources_data.items():
|
||||||
|
# URL is required
|
||||||
|
url = data.get("URL")
|
||||||
|
if not url:
|
||||||
|
continue # Skip sources without URL
|
||||||
|
|
||||||
|
# Parse boolean fields
|
||||||
|
def parse_bool(val: Optional[str], default: bool) -> bool:
|
||||||
|
if val is None:
|
||||||
|
return default
|
||||||
|
return val.lower() in ("true", "1", "yes", "on")
|
||||||
|
|
||||||
|
# Parse integer fields
|
||||||
|
def parse_int(val: Optional[str], default: int) -> int:
|
||||||
|
if val is None:
|
||||||
|
return default
|
||||||
|
try:
|
||||||
|
return int(val)
|
||||||
|
except ValueError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
source = EnvUpstreamSource(
|
||||||
|
name=name.replace("_", "-"), # Convert underscores to hyphens for readability
|
||||||
|
url=url,
|
||||||
|
source_type=data.get("TYPE", "generic").lower(),
|
||||||
|
enabled=parse_bool(data.get("ENABLED"), True),
|
||||||
|
auth_type=data.get("AUTH_TYPE", "none").lower(),
|
||||||
|
username=data.get("USERNAME"),
|
||||||
|
password=data.get("PASSWORD"),
|
||||||
|
priority=parse_int(data.get("PRIORITY"), 100),
|
||||||
|
)
|
||||||
|
sources.append(source)
|
||||||
|
|
||||||
|
return sources
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache()
|
||||||
|
def get_env_upstream_sources() -> tuple[EnvUpstreamSource, ...]:
|
||||||
|
"""
|
||||||
|
Get cached list of upstream sources from environment variables.
|
||||||
|
|
||||||
|
Returns a tuple for hashability (required by lru_cache).
|
||||||
|
"""
|
||||||
|
return tuple(parse_upstream_sources_from_env())
|
||||||
|
|||||||
@@ -1,17 +1,34 @@
|
|||||||
from sqlalchemy import create_engine, text, event
|
from sqlalchemy import create_engine, text, event
|
||||||
from sqlalchemy.orm import sessionmaker, Session
|
from sqlalchemy.orm import sessionmaker, Session
|
||||||
from sqlalchemy.pool import QueuePool
|
from sqlalchemy.pool import QueuePool
|
||||||
from typing import Generator
|
from typing import Generator, NamedTuple
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
import hashlib
|
||||||
|
|
||||||
from .config import get_settings
|
from .config import get_settings
|
||||||
from .models import Base
|
from .models import Base
|
||||||
|
from .purge_seed_data import should_purge_seed_data, purge_seed_data
|
||||||
|
|
||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(NamedTuple):
|
||||||
|
"""A database migration with a unique name and SQL to execute."""
|
||||||
|
name: str
|
||||||
|
sql: str
|
||||||
|
|
||||||
|
|
||||||
|
# PostgreSQL error codes that indicate "already exists" - safe to skip
|
||||||
|
SAFE_PG_ERROR_CODES = {
|
||||||
|
"42P07", # duplicate_table
|
||||||
|
"42701", # duplicate_column
|
||||||
|
"42710", # duplicate_object (index, constraint, etc.)
|
||||||
|
"42P16", # invalid_table_definition (e.g., column already exists)
|
||||||
|
}
|
||||||
|
|
||||||
# Build connect_args with query timeout if configured
|
# Build connect_args with query timeout if configured
|
||||||
connect_args = {}
|
connect_args = {}
|
||||||
if settings.database_query_timeout > 0:
|
if settings.database_query_timeout > 0:
|
||||||
@@ -64,12 +81,74 @@ def init_db():
|
|||||||
# Run migrations for schema updates
|
# Run migrations for schema updates
|
||||||
_run_migrations()
|
_run_migrations()
|
||||||
|
|
||||||
|
# Purge seed data if requested (for transitioning to production-like environment)
|
||||||
|
if should_purge_seed_data():
|
||||||
|
db = SessionLocal()
|
||||||
|
try:
|
||||||
|
purge_seed_data(db)
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_migrations_table(conn) -> None:
|
||||||
|
"""Create the migrations tracking table if it doesn't exist."""
|
||||||
|
conn.execute(text("""
|
||||||
|
CREATE TABLE IF NOT EXISTS _schema_migrations (
|
||||||
|
name VARCHAR(255) PRIMARY KEY,
|
||||||
|
checksum VARCHAR(64) NOT NULL,
|
||||||
|
applied_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||||
|
);
|
||||||
|
"""))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_applied_migrations(conn) -> dict[str, str]:
|
||||||
|
"""Get all applied migrations and their checksums."""
|
||||||
|
result = conn.execute(text(
|
||||||
|
"SELECT name, checksum FROM _schema_migrations"
|
||||||
|
))
|
||||||
|
return {row[0]: row[1] for row in result}
|
||||||
|
|
||||||
|
|
||||||
|
def _compute_checksum(sql: str) -> str:
|
||||||
|
"""Compute a checksum for migration SQL to detect changes."""
|
||||||
|
return hashlib.sha256(sql.strip().encode()).hexdigest()[:16]
|
||||||
|
|
||||||
|
|
||||||
|
def _is_safe_error(exception: Exception) -> bool:
|
||||||
|
"""Check if the error indicates the migration was already applied."""
|
||||||
|
# Check for psycopg2 errors with pgcode attribute
|
||||||
|
original = getattr(exception, "orig", None)
|
||||||
|
if original is not None:
|
||||||
|
pgcode = getattr(original, "pgcode", None)
|
||||||
|
if pgcode in SAFE_PG_ERROR_CODES:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Fallback: check error message for common "already exists" patterns
|
||||||
|
error_str = str(exception).lower()
|
||||||
|
safe_patterns = [
|
||||||
|
"already exists",
|
||||||
|
"duplicate key",
|
||||||
|
"relation .* already exists",
|
||||||
|
"column .* already exists",
|
||||||
|
]
|
||||||
|
return any(pattern in error_str for pattern in safe_patterns)
|
||||||
|
|
||||||
|
|
||||||
|
def _record_migration(conn, name: str, checksum: str) -> None:
|
||||||
|
"""Record a migration as applied."""
|
||||||
|
conn.execute(text(
|
||||||
|
"INSERT INTO _schema_migrations (name, checksum) VALUES (:name, :checksum)"
|
||||||
|
), {"name": name, "checksum": checksum})
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
def _run_migrations():
|
def _run_migrations():
|
||||||
"""Run manual migrations for schema updates"""
|
"""Run manual migrations for schema updates with tracking and error detection."""
|
||||||
migrations = [
|
migrations = [
|
||||||
# Add format_metadata column to artifacts table
|
Migration(
|
||||||
"""
|
name="001_add_format_metadata",
|
||||||
|
sql="""
|
||||||
DO $$
|
DO $$
|
||||||
BEGIN
|
BEGIN
|
||||||
IF NOT EXISTS (
|
IF NOT EXISTS (
|
||||||
@@ -80,8 +159,10 @@ def _run_migrations():
|
|||||||
END IF;
|
END IF;
|
||||||
END $$;
|
END $$;
|
||||||
""",
|
""",
|
||||||
# Add format column to packages table
|
),
|
||||||
"""
|
Migration(
|
||||||
|
name="002_add_package_format",
|
||||||
|
sql="""
|
||||||
DO $$
|
DO $$
|
||||||
BEGIN
|
BEGIN
|
||||||
IF NOT EXISTS (
|
IF NOT EXISTS (
|
||||||
@@ -93,8 +174,10 @@ def _run_migrations():
|
|||||||
END IF;
|
END IF;
|
||||||
END $$;
|
END $$;
|
||||||
""",
|
""",
|
||||||
# Add platform column to packages table
|
),
|
||||||
"""
|
Migration(
|
||||||
|
name="003_add_package_platform",
|
||||||
|
sql="""
|
||||||
DO $$
|
DO $$
|
||||||
BEGIN
|
BEGIN
|
||||||
IF NOT EXISTS (
|
IF NOT EXISTS (
|
||||||
@@ -106,18 +189,18 @@ def _run_migrations():
|
|||||||
END IF;
|
END IF;
|
||||||
END $$;
|
END $$;
|
||||||
""",
|
""",
|
||||||
# Add ref_count index and constraints for artifacts
|
),
|
||||||
"""
|
Migration(
|
||||||
|
name="004_add_ref_count_index_constraint",
|
||||||
|
sql="""
|
||||||
DO $$
|
DO $$
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Add ref_count index
|
|
||||||
IF NOT EXISTS (
|
IF NOT EXISTS (
|
||||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_artifacts_ref_count'
|
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_artifacts_ref_count'
|
||||||
) THEN
|
) THEN
|
||||||
CREATE INDEX idx_artifacts_ref_count ON artifacts(ref_count);
|
CREATE INDEX idx_artifacts_ref_count ON artifacts(ref_count);
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
-- Add ref_count >= 0 constraint
|
|
||||||
IF NOT EXISTS (
|
IF NOT EXISTS (
|
||||||
SELECT 1 FROM pg_constraint WHERE conname = 'check_ref_count_non_negative'
|
SELECT 1 FROM pg_constraint WHERE conname = 'check_ref_count_non_negative'
|
||||||
) THEN
|
) THEN
|
||||||
@@ -125,39 +208,28 @@ def _run_migrations():
|
|||||||
END IF;
|
END IF;
|
||||||
END $$;
|
END $$;
|
||||||
""",
|
""",
|
||||||
# Add composite indexes for packages and tags
|
),
|
||||||
"""
|
Migration(
|
||||||
|
name="005_add_composite_indexes",
|
||||||
|
sql="""
|
||||||
DO $$
|
DO $$
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Composite index for package lookup by project and name
|
|
||||||
IF NOT EXISTS (
|
IF NOT EXISTS (
|
||||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_packages_project_name'
|
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_packages_project_name'
|
||||||
) THEN
|
) THEN
|
||||||
CREATE UNIQUE INDEX idx_packages_project_name ON packages(project_id, name);
|
CREATE UNIQUE INDEX idx_packages_project_name ON packages(project_id, name);
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
-- Composite index for tag lookup by package and name
|
-- Tag indexes removed: tags table no longer exists (removed in tag system removal)
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_name'
|
|
||||||
) THEN
|
|
||||||
CREATE UNIQUE INDEX idx_tags_package_name ON tags(package_id, name);
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Composite index for recent tags queries
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tags_package_created_at'
|
|
||||||
) THEN
|
|
||||||
CREATE INDEX idx_tags_package_created_at ON tags(package_id, created_at);
|
|
||||||
END IF;
|
|
||||||
END $$;
|
END $$;
|
||||||
""",
|
""",
|
||||||
# Add package_versions indexes and triggers (007_package_versions.sql)
|
),
|
||||||
"""
|
Migration(
|
||||||
|
name="006_add_package_versions_indexes",
|
||||||
|
sql="""
|
||||||
DO $$
|
DO $$
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Create indexes for package_versions if table exists
|
|
||||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
||||||
-- Indexes for common queries
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_package_versions_package_id') THEN
|
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_package_versions_package_id') THEN
|
||||||
CREATE INDEX idx_package_versions_package_id ON package_versions(package_id);
|
CREATE INDEX idx_package_versions_package_id ON package_versions(package_id);
|
||||||
END IF;
|
END IF;
|
||||||
@@ -170,8 +242,10 @@ def _run_migrations():
|
|||||||
END IF;
|
END IF;
|
||||||
END $$;
|
END $$;
|
||||||
""",
|
""",
|
||||||
# Create ref_count trigger functions for tags (ensures triggers exist even if initial migration wasn't run)
|
),
|
||||||
"""
|
Migration(
|
||||||
|
name="007_create_ref_count_trigger_functions",
|
||||||
|
sql="""
|
||||||
CREATE OR REPLACE FUNCTION increment_artifact_ref_count()
|
CREATE OR REPLACE FUNCTION increment_artifact_ref_count()
|
||||||
RETURNS TRIGGER AS $$
|
RETURNS TRIGGER AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
@@ -179,8 +253,7 @@ def _run_migrations():
|
|||||||
RETURN NEW;
|
RETURN NEW;
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE plpgsql;
|
$$ LANGUAGE plpgsql;
|
||||||
""",
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION decrement_artifact_ref_count()
|
CREATE OR REPLACE FUNCTION decrement_artifact_ref_count()
|
||||||
RETURNS TRIGGER AS $$
|
RETURNS TRIGGER AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
@@ -188,8 +261,7 @@ def _run_migrations():
|
|||||||
RETURN OLD;
|
RETURN OLD;
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE plpgsql;
|
$$ LANGUAGE plpgsql;
|
||||||
""",
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION update_artifact_ref_count()
|
CREATE OR REPLACE FUNCTION update_artifact_ref_count()
|
||||||
RETURNS TRIGGER AS $$
|
RETURNS TRIGGER AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
@@ -201,33 +273,17 @@ def _run_migrations():
|
|||||||
END;
|
END;
|
||||||
$$ LANGUAGE plpgsql;
|
$$ LANGUAGE plpgsql;
|
||||||
""",
|
""",
|
||||||
# Create triggers for tags ref_count management
|
),
|
||||||
"""
|
Migration(
|
||||||
DO $$
|
name="008_create_tags_ref_count_triggers",
|
||||||
BEGIN
|
sql="""
|
||||||
-- Drop and recreate triggers to ensure they're current
|
-- Tags table removed: triggers no longer needed (tag system removed)
|
||||||
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
|
DO $$ BEGIN NULL; END $$;
|
||||||
CREATE TRIGGER tags_ref_count_insert_trigger
|
|
||||||
AFTER INSERT ON tags
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION increment_artifact_ref_count();
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
|
|
||||||
CREATE TRIGGER tags_ref_count_delete_trigger
|
|
||||||
AFTER DELETE ON tags
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION decrement_artifact_ref_count();
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
|
|
||||||
CREATE TRIGGER tags_ref_count_update_trigger
|
|
||||||
AFTER UPDATE ON tags
|
|
||||||
FOR EACH ROW
|
|
||||||
WHEN (OLD.artifact_id IS DISTINCT FROM NEW.artifact_id)
|
|
||||||
EXECUTE FUNCTION update_artifact_ref_count();
|
|
||||||
END $$;
|
|
||||||
""",
|
""",
|
||||||
# Create ref_count trigger functions for package_versions
|
),
|
||||||
"""
|
Migration(
|
||||||
|
name="009_create_version_ref_count_functions",
|
||||||
|
sql="""
|
||||||
CREATE OR REPLACE FUNCTION increment_version_ref_count()
|
CREATE OR REPLACE FUNCTION increment_version_ref_count()
|
||||||
RETURNS TRIGGER AS $$
|
RETURNS TRIGGER AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
@@ -235,8 +291,7 @@ def _run_migrations():
|
|||||||
RETURN NEW;
|
RETURN NEW;
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE plpgsql;
|
$$ LANGUAGE plpgsql;
|
||||||
""",
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION decrement_version_ref_count()
|
CREATE OR REPLACE FUNCTION decrement_version_ref_count()
|
||||||
RETURNS TRIGGER AS $$
|
RETURNS TRIGGER AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
@@ -245,12 +300,13 @@ def _run_migrations():
|
|||||||
END;
|
END;
|
||||||
$$ LANGUAGE plpgsql;
|
$$ LANGUAGE plpgsql;
|
||||||
""",
|
""",
|
||||||
# Create triggers for package_versions ref_count
|
),
|
||||||
"""
|
Migration(
|
||||||
|
name="010_create_package_versions_triggers",
|
||||||
|
sql="""
|
||||||
DO $$
|
DO $$
|
||||||
BEGIN
|
BEGIN
|
||||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
||||||
-- Drop and recreate triggers to ensure they're current
|
|
||||||
DROP TRIGGER IF EXISTS package_versions_ref_count_insert ON package_versions;
|
DROP TRIGGER IF EXISTS package_versions_ref_count_insert ON package_versions;
|
||||||
CREATE TRIGGER package_versions_ref_count_insert
|
CREATE TRIGGER package_versions_ref_count_insert
|
||||||
AFTER INSERT ON package_versions
|
AFTER INSERT ON package_versions
|
||||||
@@ -265,14 +321,18 @@ def _run_migrations():
|
|||||||
END IF;
|
END IF;
|
||||||
END $$;
|
END $$;
|
||||||
""",
|
""",
|
||||||
# Migrate existing semver tags to package_versions
|
),
|
||||||
r"""
|
Migration(
|
||||||
|
name="011_migrate_semver_tags_to_versions",
|
||||||
|
sql=r"""
|
||||||
|
-- Migrate semver tags to versions (only if both tables exist - for existing databases)
|
||||||
DO $$
|
DO $$
|
||||||
BEGIN
|
BEGIN
|
||||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions') THEN
|
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'package_versions')
|
||||||
-- Migrate tags that look like versions (v1.0.0, 1.2.3, 2.0.0-beta, etc.)
|
AND EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'tags') THEN
|
||||||
INSERT INTO package_versions (package_id, artifact_id, version, version_source, created_by, created_at)
|
INSERT INTO package_versions (id, package_id, artifact_id, version, version_source, created_by, created_at)
|
||||||
SELECT
|
SELECT
|
||||||
|
gen_random_uuid(),
|
||||||
t.package_id,
|
t.package_id,
|
||||||
t.artifact_id,
|
t.artifact_id,
|
||||||
CASE WHEN t.name LIKE 'v%' THEN substring(t.name from 2) ELSE t.name END,
|
CASE WHEN t.name LIKE 'v%' THEN substring(t.name from 2) ELSE t.name END,
|
||||||
@@ -285,15 +345,298 @@ def _run_migrations():
|
|||||||
END IF;
|
END IF;
|
||||||
END $$;
|
END $$;
|
||||||
""",
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="012_create_teams_table",
|
||||||
|
sql="""
|
||||||
|
CREATE TABLE IF NOT EXISTS teams (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
slug VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
description TEXT,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
created_by VARCHAR(255) NOT NULL,
|
||||||
|
settings JSONB DEFAULT '{}'
|
||||||
|
);
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="013_create_team_memberships_table",
|
||||||
|
sql="""
|
||||||
|
CREATE TABLE IF NOT EXISTS team_memberships (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
team_id UUID NOT NULL REFERENCES teams(id) ON DELETE CASCADE,
|
||||||
|
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
role VARCHAR(50) NOT NULL DEFAULT 'member',
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
invited_by VARCHAR(255),
|
||||||
|
CONSTRAINT team_memberships_unique UNIQUE (team_id, user_id),
|
||||||
|
CONSTRAINT team_memberships_role_check CHECK (role IN ('owner', 'admin', 'member'))
|
||||||
|
);
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="014_add_team_id_to_projects",
|
||||||
|
sql="""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'projects' AND column_name = 'team_id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE projects ADD COLUMN team_id UUID REFERENCES teams(id) ON DELETE SET NULL;
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_projects_team_id ON projects(team_id);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="015_add_teams_indexes",
|
||||||
|
sql="""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_teams_slug') THEN
|
||||||
|
CREATE INDEX idx_teams_slug ON teams(slug);
|
||||||
|
END IF;
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_teams_created_by') THEN
|
||||||
|
CREATE INDEX idx_teams_created_by ON teams(created_by);
|
||||||
|
END IF;
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_team_memberships_team_id') THEN
|
||||||
|
CREATE INDEX idx_team_memberships_team_id ON team_memberships(team_id);
|
||||||
|
END IF;
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_team_memberships_user_id') THEN
|
||||||
|
CREATE INDEX idx_team_memberships_user_id ON team_memberships(user_id);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="016_add_is_system_to_projects",
|
||||||
|
sql="""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'projects' AND column_name = 'is_system'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE projects ADD COLUMN is_system BOOLEAN NOT NULL DEFAULT FALSE;
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_projects_is_system ON projects(is_system);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="017_create_upstream_sources",
|
||||||
|
sql="""
|
||||||
|
CREATE TABLE IF NOT EXISTS upstream_sources (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
name VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
source_type VARCHAR(50) NOT NULL DEFAULT 'generic',
|
||||||
|
url VARCHAR(2048) NOT NULL,
|
||||||
|
enabled BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
auth_type VARCHAR(20) NOT NULL DEFAULT 'none',
|
||||||
|
username VARCHAR(255),
|
||||||
|
password_encrypted BYTEA,
|
||||||
|
headers_encrypted BYTEA,
|
||||||
|
priority INTEGER NOT NULL DEFAULT 100,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
CONSTRAINT check_source_type CHECK (
|
||||||
|
source_type IN ('npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic')
|
||||||
|
),
|
||||||
|
CONSTRAINT check_auth_type CHECK (
|
||||||
|
auth_type IN ('none', 'basic', 'bearer', 'api_key')
|
||||||
|
),
|
||||||
|
CONSTRAINT check_priority_positive CHECK (priority > 0)
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_upstream_sources_enabled ON upstream_sources(enabled);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_upstream_sources_source_type ON upstream_sources(source_type);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_upstream_sources_priority ON upstream_sources(priority);
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="018_create_cache_settings",
|
||||||
|
sql="""
|
||||||
|
CREATE TABLE IF NOT EXISTS cache_settings (
|
||||||
|
id INTEGER PRIMARY KEY DEFAULT 1,
|
||||||
|
auto_create_system_projects BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
CONSTRAINT check_cache_settings_singleton CHECK (id = 1)
|
||||||
|
);
|
||||||
|
INSERT INTO cache_settings (id, auto_create_system_projects)
|
||||||
|
VALUES (1, TRUE)
|
||||||
|
ON CONFLICT (id) DO NOTHING;
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="019_create_cached_urls",
|
||||||
|
sql="""
|
||||||
|
CREATE TABLE IF NOT EXISTS cached_urls (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
url VARCHAR(4096) NOT NULL,
|
||||||
|
url_hash VARCHAR(64) NOT NULL UNIQUE,
|
||||||
|
artifact_id VARCHAR(64) NOT NULL REFERENCES artifacts(id),
|
||||||
|
source_id UUID REFERENCES upstream_sources(id) ON DELETE SET NULL,
|
||||||
|
fetched_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
||||||
|
response_headers JSONB DEFAULT '{}',
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_cached_urls_url_hash ON cached_urls(url_hash);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_cached_urls_artifact_id ON cached_urls(artifact_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_cached_urls_source_id ON cached_urls(source_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_cached_urls_fetched_at ON cached_urls(fetched_at);
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="020_seed_default_upstream_sources",
|
||||||
|
sql="""
|
||||||
|
-- Originally seeded public sources, but these are no longer used.
|
||||||
|
-- Migration 023 deletes any previously seeded sources.
|
||||||
|
-- This migration is now a no-op for fresh installs.
|
||||||
|
SELECT 1;
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="021_remove_is_public_from_upstream_sources",
|
||||||
|
sql="""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
-- Drop the index if it exists
|
||||||
|
DROP INDEX IF EXISTS idx_upstream_sources_is_public;
|
||||||
|
|
||||||
|
-- Drop the column if it exists
|
||||||
|
IF EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'upstream_sources' AND column_name = 'is_public'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE upstream_sources DROP COLUMN is_public;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="022_remove_allow_public_internet_from_cache_settings",
|
||||||
|
sql="""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'cache_settings' AND column_name = 'allow_public_internet'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE cache_settings DROP COLUMN allow_public_internet;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="023_delete_seeded_public_sources",
|
||||||
|
sql="""
|
||||||
|
-- Delete the seeded public sources that were added by migration 020
|
||||||
|
DELETE FROM upstream_sources
|
||||||
|
WHERE name IN ('npm-public', 'pypi-public', 'maven-central', 'docker-hub');
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
Migration(
|
||||||
|
name="024_remove_tags",
|
||||||
|
sql="""
|
||||||
|
-- Remove tag system, keeping only versions for artifact references
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
-- Drop triggers on tags table (if they exist)
|
||||||
|
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
|
||||||
|
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
|
||||||
|
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
|
||||||
|
DROP TRIGGER IF EXISTS tags_updated_at_trigger ON tags;
|
||||||
|
DROP TRIGGER IF EXISTS tag_changes_trigger ON tags;
|
||||||
|
|
||||||
|
-- Drop the tag change tracking function
|
||||||
|
DROP FUNCTION IF EXISTS track_tag_changes();
|
||||||
|
|
||||||
|
-- Remove tag_constraint from artifact_dependencies
|
||||||
|
IF EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE constraint_name = 'check_constraint_type'
|
||||||
|
AND table_name = 'artifact_dependencies'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE artifact_dependencies DROP CONSTRAINT check_constraint_type;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Remove the tag_constraint column if it exists
|
||||||
|
IF EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'artifact_dependencies' AND column_name = 'tag_constraint'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE artifact_dependencies DROP COLUMN tag_constraint;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Make version_constraint NOT NULL
|
||||||
|
UPDATE artifact_dependencies SET version_constraint = '*' WHERE version_constraint IS NULL;
|
||||||
|
ALTER TABLE artifact_dependencies ALTER COLUMN version_constraint SET NOT NULL;
|
||||||
|
|
||||||
|
-- Drop tag_history table first (depends on tags)
|
||||||
|
DROP TABLE IF EXISTS tag_history;
|
||||||
|
|
||||||
|
-- Drop tags table
|
||||||
|
DROP TABLE IF EXISTS tags;
|
||||||
|
|
||||||
|
-- Rename uploads.tag_name to version if it exists and version doesn't
|
||||||
|
IF EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'uploads' AND column_name = 'tag_name'
|
||||||
|
) AND NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'uploads' AND column_name = 'version'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE uploads RENAME COLUMN tag_name TO version;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
""",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
with engine.connect() as conn:
|
with engine.connect() as conn:
|
||||||
|
# Ensure migrations tracking table exists
|
||||||
|
_ensure_migrations_table(conn)
|
||||||
|
|
||||||
|
# Get already-applied migrations
|
||||||
|
applied = _get_applied_migrations(conn)
|
||||||
|
|
||||||
for migration in migrations:
|
for migration in migrations:
|
||||||
|
checksum = _compute_checksum(migration.sql)
|
||||||
|
|
||||||
|
# Check if migration was already applied
|
||||||
|
if migration.name in applied:
|
||||||
|
stored_checksum = applied[migration.name]
|
||||||
|
if stored_checksum != checksum:
|
||||||
|
logger.warning(
|
||||||
|
f"Migration '{migration.name}' has changed since it was applied! "
|
||||||
|
f"Stored checksum: {stored_checksum}, current: {checksum}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Run the migration
|
||||||
try:
|
try:
|
||||||
conn.execute(text(migration))
|
logger.info(f"Running migration: {migration.name}")
|
||||||
|
conn.execute(text(migration.sql))
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
_record_migration(conn, migration.name, checksum)
|
||||||
|
logger.info(f"Migration '{migration.name}' applied successfully")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Migration failed (may already be applied): {e}")
|
conn.rollback()
|
||||||
|
if _is_safe_error(e):
|
||||||
|
# Migration was already applied (schema already exists)
|
||||||
|
logger.info(
|
||||||
|
f"Migration '{migration.name}' already applied (schema exists), recording as complete"
|
||||||
|
)
|
||||||
|
_record_migration(conn, migration.name, checksum)
|
||||||
|
else:
|
||||||
|
# Real error - fail hard
|
||||||
|
logger.error(f"Migration '{migration.name}' failed: {e}")
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Migration '{migration.name}' failed with error: {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
def get_db() -> Generator[Session, None, None]:
|
def get_db() -> Generator[Session, None, None]:
|
||||||
|
|||||||
175
backend/app/db_utils.py
Normal file
175
backend/app/db_utils.py
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
"""
|
||||||
|
Database utilities for optimized artifact operations.
|
||||||
|
|
||||||
|
Provides batch operations to eliminate N+1 queries.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from .models import Artifact, ArtifactDependency, CachedUrl
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ArtifactRepository:
|
||||||
|
"""
|
||||||
|
Optimized database operations for artifact storage.
|
||||||
|
|
||||||
|
Key optimizations:
|
||||||
|
- Atomic upserts using ON CONFLICT
|
||||||
|
- Batch inserts for dependencies
|
||||||
|
- Joined queries to avoid N+1
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, db: Session):
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _format_dependency_values(
|
||||||
|
artifact_id: str,
|
||||||
|
dependencies: list[tuple[str, str, str]],
|
||||||
|
) -> list[dict]:
|
||||||
|
"""
|
||||||
|
Format dependencies for batch insert.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
artifact_id: SHA256 of the artifact
|
||||||
|
dependencies: List of (project, package, version_constraint)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of dicts ready for bulk insert.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"artifact_id": artifact_id,
|
||||||
|
"dependency_project": proj,
|
||||||
|
"dependency_package": pkg,
|
||||||
|
"version_constraint": ver,
|
||||||
|
}
|
||||||
|
for proj, pkg, ver in dependencies
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_or_create_artifact(
|
||||||
|
self,
|
||||||
|
sha256: str,
|
||||||
|
size: int,
|
||||||
|
filename: str,
|
||||||
|
content_type: Optional[str] = None,
|
||||||
|
created_by: str = "system",
|
||||||
|
s3_key: Optional[str] = None,
|
||||||
|
) -> tuple[Artifact, bool]:
|
||||||
|
"""
|
||||||
|
Get existing artifact or create new one atomically.
|
||||||
|
|
||||||
|
Uses INSERT ... ON CONFLICT DO UPDATE to handle races.
|
||||||
|
If artifact exists, increments ref_count.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
sha256: Content hash (primary key)
|
||||||
|
size: File size in bytes
|
||||||
|
filename: Original filename
|
||||||
|
content_type: MIME type
|
||||||
|
created_by: User who created the artifact
|
||||||
|
s3_key: S3 storage key (defaults to standard path)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(artifact, created) tuple where created is True for new artifacts.
|
||||||
|
"""
|
||||||
|
if s3_key is None:
|
||||||
|
s3_key = f"fruits/{sha256[:2]}/{sha256[2:4]}/{sha256}"
|
||||||
|
|
||||||
|
stmt = pg_insert(Artifact).values(
|
||||||
|
id=sha256,
|
||||||
|
size=size,
|
||||||
|
original_name=filename,
|
||||||
|
content_type=content_type,
|
||||||
|
ref_count=1,
|
||||||
|
created_by=created_by,
|
||||||
|
s3_key=s3_key,
|
||||||
|
).on_conflict_do_update(
|
||||||
|
index_elements=['id'],
|
||||||
|
set_={'ref_count': Artifact.ref_count + 1}
|
||||||
|
).returning(Artifact)
|
||||||
|
|
||||||
|
result = self.db.execute(stmt)
|
||||||
|
artifact = result.scalar_one()
|
||||||
|
|
||||||
|
# Check if this was an insert or update by comparing ref_count
|
||||||
|
# ref_count=1 means new, >1 means existing
|
||||||
|
created = artifact.ref_count == 1
|
||||||
|
|
||||||
|
return artifact, created
|
||||||
|
|
||||||
|
def batch_upsert_dependencies(
|
||||||
|
self,
|
||||||
|
artifact_id: str,
|
||||||
|
dependencies: list[tuple[str, str, str]],
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Insert dependencies in a single batch operation.
|
||||||
|
|
||||||
|
Uses ON CONFLICT DO NOTHING to skip duplicates.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
artifact_id: SHA256 of the artifact
|
||||||
|
dependencies: List of (project, package, version_constraint)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of dependencies inserted.
|
||||||
|
"""
|
||||||
|
if not dependencies:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
values = self._format_dependency_values(artifact_id, dependencies)
|
||||||
|
|
||||||
|
stmt = pg_insert(ArtifactDependency).values(values)
|
||||||
|
stmt = stmt.on_conflict_do_nothing(
|
||||||
|
index_elements=['artifact_id', 'dependency_project', 'dependency_package']
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.db.execute(stmt)
|
||||||
|
return result.rowcount
|
||||||
|
|
||||||
|
def get_cached_url_with_artifact(
|
||||||
|
self,
|
||||||
|
url_hash: str,
|
||||||
|
) -> Optional[tuple[CachedUrl, Artifact]]:
|
||||||
|
"""
|
||||||
|
Get cached URL and its artifact in a single query.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url_hash: SHA256 of the URL
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(CachedUrl, Artifact) tuple or None if not found.
|
||||||
|
"""
|
||||||
|
result = (
|
||||||
|
self.db.query(CachedUrl, Artifact)
|
||||||
|
.join(Artifact, CachedUrl.artifact_id == Artifact.id)
|
||||||
|
.filter(CachedUrl.url_hash == url_hash)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_artifact_dependencies(
|
||||||
|
self,
|
||||||
|
artifact_id: str,
|
||||||
|
) -> list[ArtifactDependency]:
|
||||||
|
"""
|
||||||
|
Get all dependencies for an artifact in a single query.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
artifact_id: SHA256 of the artifact
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of ArtifactDependency objects.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
self.db.query(ArtifactDependency)
|
||||||
|
.filter(ArtifactDependency.artifact_id == artifact_id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
File diff suppressed because it is too large
Load Diff
160
backend/app/encryption.py
Normal file
160
backend/app/encryption.py
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
"""
|
||||||
|
Encryption utilities for sensitive data storage.
|
||||||
|
|
||||||
|
Uses Fernet symmetric encryption for credentials like upstream passwords.
|
||||||
|
The encryption key is sourced from ORCHARD_CACHE_ENCRYPTION_KEY environment variable.
|
||||||
|
If not set, a random key is generated on startup (with a warning).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import secrets
|
||||||
|
from functools import lru_cache
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from cryptography.fernet import Fernet, InvalidToken
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Module-level storage for auto-generated key (only used if env var not set)
|
||||||
|
_generated_key: Optional[bytes] = None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_key_from_env() -> Optional[bytes]:
|
||||||
|
"""Get encryption key from environment variable."""
|
||||||
|
key_str = os.environ.get("ORCHARD_CACHE_ENCRYPTION_KEY", "")
|
||||||
|
if not key_str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Support both raw base64 and url-safe base64 formats
|
||||||
|
try:
|
||||||
|
# Try to decode as-is (Fernet keys are url-safe base64)
|
||||||
|
key_bytes = key_str.encode("utf-8")
|
||||||
|
# Validate it's a valid Fernet key by trying to create a Fernet instance
|
||||||
|
Fernet(key_bytes)
|
||||||
|
return key_bytes
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Try base64 decoding if it's a raw 32-byte key encoded as base64
|
||||||
|
try:
|
||||||
|
decoded = base64.urlsafe_b64decode(key_str)
|
||||||
|
if len(decoded) == 32:
|
||||||
|
# Re-encode as url-safe base64 for Fernet
|
||||||
|
key_bytes = base64.urlsafe_b64encode(decoded)
|
||||||
|
Fernet(key_bytes)
|
||||||
|
return key_bytes
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
logger.error(
|
||||||
|
"ORCHARD_CACHE_ENCRYPTION_KEY is set but invalid. "
|
||||||
|
"Must be a valid Fernet key (32 bytes, url-safe base64 encoded). "
|
||||||
|
"Generate one with: python -c \"from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())\""
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_encryption_key() -> bytes:
|
||||||
|
"""
|
||||||
|
Get the Fernet encryption key.
|
||||||
|
|
||||||
|
Returns the key from ORCHARD_CACHE_ENCRYPTION_KEY if set and valid,
|
||||||
|
otherwise generates a random key (with a warning logged).
|
||||||
|
|
||||||
|
The generated key is cached for the lifetime of the process.
|
||||||
|
"""
|
||||||
|
global _generated_key
|
||||||
|
|
||||||
|
# Try to get from environment
|
||||||
|
env_key = _get_key_from_env()
|
||||||
|
if env_key:
|
||||||
|
return env_key
|
||||||
|
|
||||||
|
# Generate a new key if needed
|
||||||
|
if _generated_key is None:
|
||||||
|
_generated_key = Fernet.generate_key()
|
||||||
|
logger.warning(
|
||||||
|
"ORCHARD_CACHE_ENCRYPTION_KEY not set - using auto-generated key. "
|
||||||
|
"Encrypted credentials will be lost on restart! "
|
||||||
|
"Set ORCHARD_CACHE_ENCRYPTION_KEY for persistent encryption. "
|
||||||
|
"Generate a key with: python -c \"from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())\""
|
||||||
|
)
|
||||||
|
|
||||||
|
return _generated_key
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def _get_fernet() -> Fernet:
|
||||||
|
"""Get a cached Fernet instance."""
|
||||||
|
return Fernet(get_encryption_key())
|
||||||
|
|
||||||
|
|
||||||
|
def encrypt_value(plaintext: str) -> bytes:
|
||||||
|
"""
|
||||||
|
Encrypt a string value using Fernet.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
plaintext: The string to encrypt
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Encrypted bytes (includes Fernet token with timestamp)
|
||||||
|
"""
|
||||||
|
if not plaintext:
|
||||||
|
raise ValueError("Cannot encrypt empty value")
|
||||||
|
|
||||||
|
fernet = _get_fernet()
|
||||||
|
return fernet.encrypt(plaintext.encode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
def decrypt_value(ciphertext: bytes) -> str:
|
||||||
|
"""
|
||||||
|
Decrypt a Fernet-encrypted value.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ciphertext: The encrypted bytes
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Decrypted string
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
InvalidToken: If decryption fails (wrong key or corrupted data)
|
||||||
|
"""
|
||||||
|
if not ciphertext:
|
||||||
|
raise ValueError("Cannot decrypt empty value")
|
||||||
|
|
||||||
|
fernet = _get_fernet()
|
||||||
|
return fernet.decrypt(ciphertext).decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def can_decrypt(ciphertext: bytes) -> bool:
|
||||||
|
"""
|
||||||
|
Check if a value can be decrypted with the current key.
|
||||||
|
|
||||||
|
Useful for checking if credentials are still valid after key rotation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ciphertext: The encrypted bytes
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if decryption succeeds, False otherwise
|
||||||
|
"""
|
||||||
|
if not ciphertext:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
decrypt_value(ciphertext)
|
||||||
|
return True
|
||||||
|
except (InvalidToken, ValueError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def generate_key() -> str:
|
||||||
|
"""
|
||||||
|
Generate a new Fernet encryption key.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A valid Fernet key as a string (url-safe base64 encoded)
|
||||||
|
"""
|
||||||
|
return Fernet.generate_key().decode("utf-8")
|
||||||
179
backend/app/http_client.py
Normal file
179
backend/app/http_client.py
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
"""
|
||||||
|
HTTP client manager with connection pooling and lifecycle management.
|
||||||
|
|
||||||
|
Provides:
|
||||||
|
- Shared connection pools for upstream requests
|
||||||
|
- Per-upstream client isolation when needed
|
||||||
|
- Thread pool for blocking I/O operations
|
||||||
|
- FastAPI lifespan integration
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from typing import Any, Callable, Optional
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
from .config import Settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class HttpClientManager:
|
||||||
|
"""
|
||||||
|
Manages httpx.AsyncClient pools with FastAPI lifespan integration.
|
||||||
|
|
||||||
|
Features:
|
||||||
|
- Default shared pool for general requests
|
||||||
|
- Per-upstream pools for sources needing specific config/auth
|
||||||
|
- Dedicated thread pool for blocking operations
|
||||||
|
- Graceful shutdown
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, settings: Settings):
|
||||||
|
self.max_connections = settings.http_max_connections
|
||||||
|
self.max_keepalive = settings.http_max_keepalive
|
||||||
|
self.connect_timeout = settings.http_connect_timeout
|
||||||
|
self.read_timeout = settings.http_read_timeout
|
||||||
|
self.worker_threads = settings.http_worker_threads
|
||||||
|
|
||||||
|
self._default_client: Optional[httpx.AsyncClient] = None
|
||||||
|
self._upstream_clients: dict[str, httpx.AsyncClient] = {}
|
||||||
|
self._executor: Optional[ThreadPoolExecutor] = None
|
||||||
|
self._started = False
|
||||||
|
|
||||||
|
async def startup(self) -> None:
|
||||||
|
"""Initialize clients and thread pool. Called by FastAPI lifespan."""
|
||||||
|
if self._started:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Starting HttpClientManager: max_connections={self.max_connections}, "
|
||||||
|
f"worker_threads={self.worker_threads}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create connection limits
|
||||||
|
limits = httpx.Limits(
|
||||||
|
max_connections=self.max_connections,
|
||||||
|
max_keepalive_connections=self.max_keepalive,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create timeout config
|
||||||
|
timeout = httpx.Timeout(
|
||||||
|
connect=self.connect_timeout,
|
||||||
|
read=self.read_timeout,
|
||||||
|
write=self.read_timeout,
|
||||||
|
pool=self.connect_timeout,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create default client
|
||||||
|
self._default_client = httpx.AsyncClient(
|
||||||
|
limits=limits,
|
||||||
|
timeout=timeout,
|
||||||
|
follow_redirects=False, # Handle redirects manually for auth
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create thread pool for blocking operations
|
||||||
|
self._executor = ThreadPoolExecutor(
|
||||||
|
max_workers=self.worker_threads,
|
||||||
|
thread_name_prefix="orchard-blocking-",
|
||||||
|
)
|
||||||
|
|
||||||
|
self._started = True
|
||||||
|
logger.info("HttpClientManager started")
|
||||||
|
|
||||||
|
async def shutdown(self) -> None:
|
||||||
|
"""Close all clients and thread pool. Called by FastAPI lifespan."""
|
||||||
|
if not self._started:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("Shutting down HttpClientManager")
|
||||||
|
|
||||||
|
# Close default client
|
||||||
|
if self._default_client:
|
||||||
|
await self._default_client.aclose()
|
||||||
|
self._default_client = None
|
||||||
|
|
||||||
|
# Close upstream-specific clients
|
||||||
|
for name, client in self._upstream_clients.items():
|
||||||
|
logger.debug(f"Closing upstream client: {name}")
|
||||||
|
await client.aclose()
|
||||||
|
self._upstream_clients.clear()
|
||||||
|
|
||||||
|
# Shutdown thread pool
|
||||||
|
if self._executor:
|
||||||
|
self._executor.shutdown(wait=True)
|
||||||
|
self._executor = None
|
||||||
|
|
||||||
|
self._started = False
|
||||||
|
logger.info("HttpClientManager shutdown complete")
|
||||||
|
|
||||||
|
def get_client(self, upstream_name: Optional[str] = None) -> httpx.AsyncClient:
|
||||||
|
"""
|
||||||
|
Get HTTP client for making requests.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
upstream_name: Optional upstream source name for dedicated pool.
|
||||||
|
If None, returns the default shared client.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
httpx.AsyncClient configured for the request.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If manager not started.
|
||||||
|
"""
|
||||||
|
if not self._started or not self._default_client:
|
||||||
|
raise RuntimeError("HttpClientManager not started. Call startup() first.")
|
||||||
|
|
||||||
|
if upstream_name and upstream_name in self._upstream_clients:
|
||||||
|
return self._upstream_clients[upstream_name]
|
||||||
|
|
||||||
|
return self._default_client
|
||||||
|
|
||||||
|
async def run_blocking(self, func: Callable[..., Any], *args: Any) -> Any:
|
||||||
|
"""
|
||||||
|
Run a blocking function in the thread pool.
|
||||||
|
|
||||||
|
Use this for:
|
||||||
|
- File I/O operations
|
||||||
|
- Archive extraction (zipfile, tarfile)
|
||||||
|
- Hash computation on large data
|
||||||
|
|
||||||
|
Args:
|
||||||
|
func: Synchronous function to execute
|
||||||
|
*args: Arguments to pass to the function
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The function's return value.
|
||||||
|
"""
|
||||||
|
if not self._executor:
|
||||||
|
raise RuntimeError("HttpClientManager not started. Call startup() first.")
|
||||||
|
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
return await loop.run_in_executor(self._executor, func, *args)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def active_connections(self) -> int:
|
||||||
|
"""Get approximate number of active connections (for health checks)."""
|
||||||
|
if not self._default_client:
|
||||||
|
return 0
|
||||||
|
# httpx doesn't expose this directly, return pool size as approximation
|
||||||
|
return self.max_connections
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pool_size(self) -> int:
|
||||||
|
"""Get configured pool size."""
|
||||||
|
return self.max_connections
|
||||||
|
|
||||||
|
@property
|
||||||
|
def executor_active(self) -> int:
|
||||||
|
"""Get number of active thread pool workers."""
|
||||||
|
if not self._executor:
|
||||||
|
return 0
|
||||||
|
return len(self._executor._threads)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def executor_max(self) -> int:
|
||||||
|
"""Get max thread pool workers."""
|
||||||
|
return self.worker_threads
|
||||||
@@ -11,9 +11,12 @@ from slowapi.errors import RateLimitExceeded
|
|||||||
from .config import get_settings
|
from .config import get_settings
|
||||||
from .database import init_db, SessionLocal
|
from .database import init_db, SessionLocal
|
||||||
from .routes import router
|
from .routes import router
|
||||||
|
from .pypi_proxy import router as pypi_router
|
||||||
from .seed import seed_database
|
from .seed import seed_database
|
||||||
from .auth import create_default_admin
|
from .auth import create_default_admin
|
||||||
from .rate_limit import limiter
|
from .rate_limit import limiter
|
||||||
|
from .http_client import HttpClientManager
|
||||||
|
from .cache_service import CacheService
|
||||||
|
|
||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
@@ -37,6 +40,17 @@ async def lifespan(app: FastAPI):
|
|||||||
finally:
|
finally:
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
|
# Initialize infrastructure services
|
||||||
|
logger.info("Initializing infrastructure services...")
|
||||||
|
|
||||||
|
app.state.http_client = HttpClientManager(settings)
|
||||||
|
await app.state.http_client.startup()
|
||||||
|
|
||||||
|
app.state.cache = CacheService(settings)
|
||||||
|
await app.state.cache.startup()
|
||||||
|
|
||||||
|
logger.info("Infrastructure services ready")
|
||||||
|
|
||||||
# Seed test data in development mode
|
# Seed test data in development mode
|
||||||
if settings.is_development:
|
if settings.is_development:
|
||||||
logger.info(f"Running in {settings.env} mode - checking for seed data")
|
logger.info(f"Running in {settings.env} mode - checking for seed data")
|
||||||
@@ -49,7 +63,12 @@ async def lifespan(app: FastAPI):
|
|||||||
logger.info(f"Running in {settings.env} mode - skipping seed data")
|
logger.info(f"Running in {settings.env} mode - skipping seed data")
|
||||||
|
|
||||||
yield
|
yield
|
||||||
# Shutdown: cleanup if needed
|
|
||||||
|
# Shutdown infrastructure services
|
||||||
|
logger.info("Shutting down infrastructure services...")
|
||||||
|
await app.state.http_client.shutdown()
|
||||||
|
await app.state.cache.shutdown()
|
||||||
|
logger.info("Shutdown complete")
|
||||||
|
|
||||||
|
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
@@ -65,6 +84,7 @@ app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
|||||||
|
|
||||||
# Include API routes
|
# Include API routes
|
||||||
app.include_router(router)
|
app.include_router(router)
|
||||||
|
app.include_router(pypi_router)
|
||||||
|
|
||||||
# Serve static files (React build) if the directory exists
|
# Serve static files (React build) if the directory exists
|
||||||
static_dir = os.path.join(os.path.dirname(__file__), "..", "..", "frontend", "dist")
|
static_dir = os.path.join(os.path.dirname(__file__), "..", "..", "frontend", "dist")
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ from sqlalchemy import (
|
|||||||
Index,
|
Index,
|
||||||
JSON,
|
JSON,
|
||||||
ARRAY,
|
ARRAY,
|
||||||
|
LargeBinary,
|
||||||
)
|
)
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
from sqlalchemy.orm import relationship, declarative_base
|
from sqlalchemy.orm import relationship, declarative_base
|
||||||
@@ -27,6 +28,7 @@ class Project(Base):
|
|||||||
name = Column(String(255), unique=True, nullable=False)
|
name = Column(String(255), unique=True, nullable=False)
|
||||||
description = Column(Text)
|
description = Column(Text)
|
||||||
is_public = Column(Boolean, default=True)
|
is_public = Column(Boolean, default=True)
|
||||||
|
is_system = Column(Boolean, default=False, nullable=False)
|
||||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
updated_at = Column(
|
updated_at = Column(
|
||||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
||||||
@@ -46,6 +48,7 @@ class Project(Base):
|
|||||||
Index("idx_projects_name", "name"),
|
Index("idx_projects_name", "name"),
|
||||||
Index("idx_projects_created_by", "created_by"),
|
Index("idx_projects_created_by", "created_by"),
|
||||||
Index("idx_projects_team_id", "team_id"),
|
Index("idx_projects_team_id", "team_id"),
|
||||||
|
Index("idx_projects_is_system", "is_system"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -68,7 +71,6 @@ class Package(Base):
|
|||||||
)
|
)
|
||||||
|
|
||||||
project = relationship("Project", back_populates="packages")
|
project = relationship("Project", back_populates="packages")
|
||||||
tags = relationship("Tag", back_populates="package", cascade="all, delete-orphan")
|
|
||||||
uploads = relationship(
|
uploads = relationship(
|
||||||
"Upload", back_populates="package", cascade="all, delete-orphan"
|
"Upload", back_populates="package", cascade="all, delete-orphan"
|
||||||
)
|
)
|
||||||
@@ -117,7 +119,6 @@ class Artifact(Base):
|
|||||||
ref_count = Column(Integer, default=1)
|
ref_count = Column(Integer, default=1)
|
||||||
s3_key = Column(String(1024), nullable=False)
|
s3_key = Column(String(1024), nullable=False)
|
||||||
|
|
||||||
tags = relationship("Tag", back_populates="artifact")
|
|
||||||
uploads = relationship("Upload", back_populates="artifact")
|
uploads = relationship("Upload", back_populates="artifact")
|
||||||
versions = relationship("PackageVersion", back_populates="artifact")
|
versions = relationship("PackageVersion", back_populates="artifact")
|
||||||
dependencies = relationship(
|
dependencies = relationship(
|
||||||
@@ -148,65 +149,6 @@ class Artifact(Base):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Tag(Base):
|
|
||||||
__tablename__ = "tags"
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
package_id = Column(
|
|
||||||
UUID(as_uuid=True),
|
|
||||||
ForeignKey("packages.id", ondelete="CASCADE"),
|
|
||||||
nullable=False,
|
|
||||||
)
|
|
||||||
name = Column(String(255), nullable=False)
|
|
||||||
artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
|
|
||||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
|
||||||
updated_at = Column(
|
|
||||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
|
||||||
)
|
|
||||||
created_by = Column(String(255), nullable=False)
|
|
||||||
|
|
||||||
package = relationship("Package", back_populates="tags")
|
|
||||||
artifact = relationship("Artifact", back_populates="tags")
|
|
||||||
history = relationship(
|
|
||||||
"TagHistory", back_populates="tag", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index("idx_tags_package_id", "package_id"),
|
|
||||||
Index("idx_tags_artifact_id", "artifact_id"),
|
|
||||||
Index(
|
|
||||||
"idx_tags_package_name", "package_id", "name", unique=True
|
|
||||||
), # Composite unique index
|
|
||||||
Index(
|
|
||||||
"idx_tags_package_created_at", "package_id", "created_at"
|
|
||||||
), # For recent tags queries
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TagHistory(Base):
|
|
||||||
__tablename__ = "tag_history"
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
tag_id = Column(
|
|
||||||
UUID(as_uuid=True), ForeignKey("tags.id", ondelete="CASCADE"), nullable=False
|
|
||||||
)
|
|
||||||
old_artifact_id = Column(String(64), ForeignKey("artifacts.id"))
|
|
||||||
new_artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
|
|
||||||
change_type = Column(String(20), nullable=False, default="update")
|
|
||||||
changed_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
|
||||||
changed_by = Column(String(255), nullable=False)
|
|
||||||
|
|
||||||
tag = relationship("Tag", back_populates="history")
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index("idx_tag_history_tag_id", "tag_id"),
|
|
||||||
Index("idx_tag_history_changed_at", "changed_at"),
|
|
||||||
CheckConstraint(
|
|
||||||
"change_type IN ('create', 'update', 'delete')", name="check_change_type"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PackageVersion(Base):
|
class PackageVersion(Base):
|
||||||
"""Immutable version record for a package-artifact relationship.
|
"""Immutable version record for a package-artifact relationship.
|
||||||
|
|
||||||
@@ -246,7 +188,7 @@ class Upload(Base):
|
|||||||
artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
|
artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
|
||||||
package_id = Column(UUID(as_uuid=True), ForeignKey("packages.id"), nullable=False)
|
package_id = Column(UUID(as_uuid=True), ForeignKey("packages.id"), nullable=False)
|
||||||
original_name = Column(String(1024))
|
original_name = Column(String(1024))
|
||||||
tag_name = Column(String(255)) # Tag assigned during upload
|
version = Column(String(255)) # Version assigned during upload
|
||||||
user_agent = Column(String(512)) # Client identification
|
user_agent = Column(String(512)) # Client identification
|
||||||
duration_ms = Column(Integer) # Upload timing in milliseconds
|
duration_ms = Column(Integer) # Upload timing in milliseconds
|
||||||
deduplicated = Column(Boolean, default=False) # Whether artifact was deduplicated
|
deduplicated = Column(Boolean, default=False) # Whether artifact was deduplicated
|
||||||
@@ -521,8 +463,8 @@ class PackageHistory(Base):
|
|||||||
class ArtifactDependency(Base):
|
class ArtifactDependency(Base):
|
||||||
"""Dependency declared by an artifact on another package.
|
"""Dependency declared by an artifact on another package.
|
||||||
|
|
||||||
Each artifact can declare dependencies on other packages, specifying either
|
Each artifact can declare dependencies on other packages, specifying a version.
|
||||||
an exact version or a tag. This enables recursive dependency resolution.
|
This enables recursive dependency resolution.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__tablename__ = "artifact_dependencies"
|
__tablename__ = "artifact_dependencies"
|
||||||
@@ -535,20 +477,13 @@ class ArtifactDependency(Base):
|
|||||||
)
|
)
|
||||||
dependency_project = Column(String(255), nullable=False)
|
dependency_project = Column(String(255), nullable=False)
|
||||||
dependency_package = Column(String(255), nullable=False)
|
dependency_package = Column(String(255), nullable=False)
|
||||||
version_constraint = Column(String(255), nullable=True)
|
version_constraint = Column(String(255), nullable=False)
|
||||||
tag_constraint = Column(String(255), nullable=True)
|
|
||||||
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
|
||||||
# Relationship to the artifact that declares this dependency
|
# Relationship to the artifact that declares this dependency
|
||||||
artifact = relationship("Artifact", back_populates="dependencies")
|
artifact = relationship("Artifact", back_populates="dependencies")
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
# Exactly one of version_constraint or tag_constraint must be set
|
|
||||||
CheckConstraint(
|
|
||||||
"(version_constraint IS NOT NULL AND tag_constraint IS NULL) OR "
|
|
||||||
"(version_constraint IS NULL AND tag_constraint IS NOT NULL)",
|
|
||||||
name="check_constraint_type",
|
|
||||||
),
|
|
||||||
# Each artifact can only depend on a specific project/package once
|
# Each artifact can only depend on a specific project/package once
|
||||||
Index(
|
Index(
|
||||||
"idx_artifact_dependencies_artifact_id",
|
"idx_artifact_dependencies_artifact_id",
|
||||||
@@ -637,3 +572,166 @@ class TeamMembership(Base):
|
|||||||
name="check_team_role",
|
name="check_team_role",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Upstream Caching Models
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Valid source types for upstream registries
|
||||||
|
SOURCE_TYPES = ["npm", "pypi", "maven", "docker", "helm", "nuget", "deb", "rpm", "generic"]
|
||||||
|
|
||||||
|
# Valid authentication types
|
||||||
|
AUTH_TYPES = ["none", "basic", "bearer", "api_key"]
|
||||||
|
|
||||||
|
|
||||||
|
class UpstreamSource(Base):
|
||||||
|
"""Configuration for an upstream artifact registry.
|
||||||
|
|
||||||
|
Stores connection details and authentication for upstream registries
|
||||||
|
like npm, PyPI, Maven Central, or private Artifactory instances.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "upstream_sources"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
name = Column(String(255), unique=True, nullable=False)
|
||||||
|
source_type = Column(String(50), default="generic", nullable=False)
|
||||||
|
url = Column(String(2048), nullable=False)
|
||||||
|
enabled = Column(Boolean, default=False, nullable=False)
|
||||||
|
auth_type = Column(String(20), default="none", nullable=False)
|
||||||
|
username = Column(String(255))
|
||||||
|
password_encrypted = Column(LargeBinary)
|
||||||
|
headers_encrypted = Column(LargeBinary)
|
||||||
|
priority = Column(Integer, default=100, nullable=False)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
updated_at = Column(
|
||||||
|
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
||||||
|
)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
cached_urls = relationship("CachedUrl", back_populates="source")
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("idx_upstream_sources_enabled", "enabled"),
|
||||||
|
Index("idx_upstream_sources_source_type", "source_type"),
|
||||||
|
Index("idx_upstream_sources_priority", "priority"),
|
||||||
|
CheckConstraint(
|
||||||
|
"source_type IN ('npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic')",
|
||||||
|
name="check_source_type",
|
||||||
|
),
|
||||||
|
CheckConstraint(
|
||||||
|
"auth_type IN ('none', 'basic', 'bearer', 'api_key')",
|
||||||
|
name="check_auth_type",
|
||||||
|
),
|
||||||
|
CheckConstraint("priority > 0", name="check_priority_positive"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def set_password(self, password: str) -> None:
|
||||||
|
"""Encrypt and store a password/token."""
|
||||||
|
from .encryption import encrypt_value
|
||||||
|
|
||||||
|
if password:
|
||||||
|
self.password_encrypted = encrypt_value(password)
|
||||||
|
else:
|
||||||
|
self.password_encrypted = None
|
||||||
|
|
||||||
|
def get_password(self) -> str | None:
|
||||||
|
"""Decrypt and return the stored password/token."""
|
||||||
|
from .encryption import decrypt_value
|
||||||
|
|
||||||
|
if self.password_encrypted:
|
||||||
|
try:
|
||||||
|
return decrypt_value(self.password_encrypted)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
def has_password(self) -> bool:
|
||||||
|
"""Check if a password/token is stored."""
|
||||||
|
return self.password_encrypted is not None
|
||||||
|
|
||||||
|
def set_headers(self, headers: dict) -> None:
|
||||||
|
"""Encrypt and store custom headers as JSON."""
|
||||||
|
from .encryption import encrypt_value
|
||||||
|
import json
|
||||||
|
|
||||||
|
if headers:
|
||||||
|
self.headers_encrypted = encrypt_value(json.dumps(headers))
|
||||||
|
else:
|
||||||
|
self.headers_encrypted = None
|
||||||
|
|
||||||
|
def get_headers(self) -> dict | None:
|
||||||
|
"""Decrypt and return custom headers."""
|
||||||
|
from .encryption import decrypt_value
|
||||||
|
import json
|
||||||
|
|
||||||
|
if self.headers_encrypted:
|
||||||
|
try:
|
||||||
|
return json.loads(decrypt_value(self.headers_encrypted))
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class CacheSettings(Base):
|
||||||
|
"""Global cache settings (singleton table).
|
||||||
|
|
||||||
|
Controls behavior of the upstream caching system.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "cache_settings"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, default=1)
|
||||||
|
auto_create_system_projects = Column(Boolean, default=True, nullable=False)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
updated_at = Column(
|
||||||
|
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
CheckConstraint("id = 1", name="check_cache_settings_singleton"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CachedUrl(Base):
|
||||||
|
"""Tracks URL to artifact mappings for provenance.
|
||||||
|
|
||||||
|
Records which URLs have been cached and maps them to their stored artifacts.
|
||||||
|
Enables "is this URL already cached?" lookups and audit trails.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "cached_urls"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
url = Column(String(4096), nullable=False)
|
||||||
|
url_hash = Column(String(64), unique=True, nullable=False)
|
||||||
|
artifact_id = Column(
|
||||||
|
String(64), ForeignKey("artifacts.id"), nullable=False
|
||||||
|
)
|
||||||
|
source_id = Column(
|
||||||
|
UUID(as_uuid=True),
|
||||||
|
ForeignKey("upstream_sources.id", ondelete="SET NULL"),
|
||||||
|
)
|
||||||
|
fetched_at = Column(DateTime(timezone=True), default=datetime.utcnow, nullable=False)
|
||||||
|
response_headers = Column(JSON, default=dict)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
artifact = relationship("Artifact")
|
||||||
|
source = relationship("UpstreamSource", back_populates="cached_urls")
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("idx_cached_urls_url_hash", "url_hash"),
|
||||||
|
Index("idx_cached_urls_artifact_id", "artifact_id"),
|
||||||
|
Index("idx_cached_urls_source_id", "source_id"),
|
||||||
|
Index("idx_cached_urls_fetched_at", "fetched_at"),
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def compute_url_hash(url: str) -> str:
|
||||||
|
"""Compute SHA256 hash of a URL for fast lookups."""
|
||||||
|
import hashlib
|
||||||
|
return hashlib.sha256(url.encode("utf-8")).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
202
backend/app/purge_seed_data.py
Normal file
202
backend/app/purge_seed_data.py
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
"""
|
||||||
|
Purge seed/demo data from the database.
|
||||||
|
|
||||||
|
This is used when transitioning an environment from dev/test to production-like.
|
||||||
|
Triggered by setting ORCHARD_PURGE_SEED_DATA=true environment variable.
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
Project,
|
||||||
|
Package,
|
||||||
|
Artifact,
|
||||||
|
Upload,
|
||||||
|
PackageVersion,
|
||||||
|
ArtifactDependency,
|
||||||
|
Team,
|
||||||
|
TeamMembership,
|
||||||
|
User,
|
||||||
|
AccessPermission,
|
||||||
|
)
|
||||||
|
from .storage import get_storage
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Seed data identifiers (from seed.py)
|
||||||
|
SEED_PROJECT_NAMES = [
|
||||||
|
"frontend-libs",
|
||||||
|
"backend-services",
|
||||||
|
"mobile-apps",
|
||||||
|
"internal-tools",
|
||||||
|
]
|
||||||
|
|
||||||
|
SEED_TEAM_SLUG = "demo-team"
|
||||||
|
|
||||||
|
SEED_USERNAMES = [
|
||||||
|
"alice",
|
||||||
|
"bob",
|
||||||
|
"charlie",
|
||||||
|
"diana",
|
||||||
|
"eve",
|
||||||
|
"frank",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def should_purge_seed_data() -> bool:
|
||||||
|
"""Check if seed data should be purged based on environment variable."""
|
||||||
|
return os.environ.get("ORCHARD_PURGE_SEED_DATA", "").lower() == "true"
|
||||||
|
|
||||||
|
|
||||||
|
def purge_seed_data(db: Session) -> dict:
|
||||||
|
"""
|
||||||
|
Purge all seed/demo data from the database.
|
||||||
|
|
||||||
|
Returns a dict with counts of deleted items.
|
||||||
|
"""
|
||||||
|
logger.warning("PURGING SEED DATA - This will delete demo projects, users, and teams")
|
||||||
|
|
||||||
|
results = {
|
||||||
|
"dependencies_deleted": 0,
|
||||||
|
"versions_deleted": 0,
|
||||||
|
"uploads_deleted": 0,
|
||||||
|
"artifacts_deleted": 0,
|
||||||
|
"packages_deleted": 0,
|
||||||
|
"projects_deleted": 0,
|
||||||
|
"permissions_deleted": 0,
|
||||||
|
"team_memberships_deleted": 0,
|
||||||
|
"users_deleted": 0,
|
||||||
|
"teams_deleted": 0,
|
||||||
|
"s3_objects_deleted": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
storage = get_storage()
|
||||||
|
|
||||||
|
# Find seed projects
|
||||||
|
seed_projects = db.query(Project).filter(Project.name.in_(SEED_PROJECT_NAMES)).all()
|
||||||
|
seed_project_ids = [p.id for p in seed_projects]
|
||||||
|
|
||||||
|
if not seed_projects:
|
||||||
|
logger.info("No seed projects found, nothing to purge")
|
||||||
|
return results
|
||||||
|
|
||||||
|
logger.info(f"Found {len(seed_projects)} seed projects to purge")
|
||||||
|
|
||||||
|
# Find packages in seed projects
|
||||||
|
seed_packages = db.query(Package).filter(Package.project_id.in_(seed_project_ids)).all()
|
||||||
|
seed_package_ids = [p.id for p in seed_packages]
|
||||||
|
|
||||||
|
# Find artifacts in seed packages (via uploads)
|
||||||
|
seed_uploads = db.query(Upload).filter(Upload.package_id.in_(seed_package_ids)).all()
|
||||||
|
seed_artifact_ids = list(set(u.artifact_id for u in seed_uploads))
|
||||||
|
|
||||||
|
# Delete in order (respecting foreign keys)
|
||||||
|
|
||||||
|
# 1. Delete artifact dependencies
|
||||||
|
if seed_artifact_ids:
|
||||||
|
count = db.query(ArtifactDependency).filter(
|
||||||
|
ArtifactDependency.artifact_id.in_(seed_artifact_ids)
|
||||||
|
).delete(synchronize_session=False)
|
||||||
|
results["dependencies_deleted"] = count
|
||||||
|
logger.info(f"Deleted {count} artifact dependencies")
|
||||||
|
|
||||||
|
# 2. Delete package versions
|
||||||
|
if seed_package_ids:
|
||||||
|
count = db.query(PackageVersion).filter(
|
||||||
|
PackageVersion.package_id.in_(seed_package_ids)
|
||||||
|
).delete(synchronize_session=False)
|
||||||
|
results["versions_deleted"] = count
|
||||||
|
logger.info(f"Deleted {count} package versions")
|
||||||
|
|
||||||
|
# 3. Delete uploads
|
||||||
|
if seed_package_ids:
|
||||||
|
count = db.query(Upload).filter(Upload.package_id.in_(seed_package_ids)).delete(
|
||||||
|
synchronize_session=False
|
||||||
|
)
|
||||||
|
results["uploads_deleted"] = count
|
||||||
|
logger.info(f"Deleted {count} uploads")
|
||||||
|
|
||||||
|
# 4. Delete S3 objects for seed artifacts
|
||||||
|
if seed_artifact_ids:
|
||||||
|
seed_artifacts = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).all()
|
||||||
|
for artifact in seed_artifacts:
|
||||||
|
if artifact.s3_key:
|
||||||
|
try:
|
||||||
|
storage.client.delete_object(Bucket=storage.bucket, Key=artifact.s3_key)
|
||||||
|
results["s3_objects_deleted"] += 1
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to delete S3 object {artifact.s3_key}: {e}")
|
||||||
|
logger.info(f"Deleted {results['s3_objects_deleted']} S3 objects")
|
||||||
|
|
||||||
|
# 5. Delete artifacts (only those with ref_count that would be 0 after our deletions)
|
||||||
|
# Since we deleted all versions pointing to these artifacts, we can delete them
|
||||||
|
if seed_artifact_ids:
|
||||||
|
count = db.query(Artifact).filter(Artifact.id.in_(seed_artifact_ids)).delete(
|
||||||
|
synchronize_session=False
|
||||||
|
)
|
||||||
|
results["artifacts_deleted"] = count
|
||||||
|
logger.info(f"Deleted {count} artifacts")
|
||||||
|
|
||||||
|
# 6. Delete packages
|
||||||
|
if seed_package_ids:
|
||||||
|
count = db.query(Package).filter(Package.id.in_(seed_package_ids)).delete(
|
||||||
|
synchronize_session=False
|
||||||
|
)
|
||||||
|
results["packages_deleted"] = count
|
||||||
|
logger.info(f"Deleted {count} packages")
|
||||||
|
|
||||||
|
# 7. Delete access permissions for seed projects
|
||||||
|
if seed_project_ids:
|
||||||
|
count = db.query(AccessPermission).filter(
|
||||||
|
AccessPermission.project_id.in_(seed_project_ids)
|
||||||
|
).delete(synchronize_session=False)
|
||||||
|
results["permissions_deleted"] = count
|
||||||
|
logger.info(f"Deleted {count} access permissions")
|
||||||
|
|
||||||
|
# 8. Delete seed projects
|
||||||
|
count = db.query(Project).filter(Project.name.in_(SEED_PROJECT_NAMES)).delete(
|
||||||
|
synchronize_session=False
|
||||||
|
)
|
||||||
|
results["projects_deleted"] = count
|
||||||
|
logger.info(f"Deleted {count} projects")
|
||||||
|
|
||||||
|
# 9. Find and delete seed team
|
||||||
|
seed_team = db.query(Team).filter(Team.slug == SEED_TEAM_SLUG).first()
|
||||||
|
if seed_team:
|
||||||
|
# Delete team memberships first
|
||||||
|
count = db.query(TeamMembership).filter(
|
||||||
|
TeamMembership.team_id == seed_team.id
|
||||||
|
).delete(synchronize_session=False)
|
||||||
|
results["team_memberships_deleted"] = count
|
||||||
|
logger.info(f"Deleted {count} team memberships")
|
||||||
|
|
||||||
|
# Delete the team
|
||||||
|
db.delete(seed_team)
|
||||||
|
results["teams_deleted"] = 1
|
||||||
|
logger.info(f"Deleted team: {SEED_TEAM_SLUG}")
|
||||||
|
|
||||||
|
# 10. Delete seed users (but NOT admin)
|
||||||
|
seed_users = db.query(User).filter(User.username.in_(SEED_USERNAMES)).all()
|
||||||
|
for user in seed_users:
|
||||||
|
# Delete any remaining team memberships for this user
|
||||||
|
db.query(TeamMembership).filter(TeamMembership.user_id == user.id).delete(
|
||||||
|
synchronize_session=False
|
||||||
|
)
|
||||||
|
# Delete any access permissions for this user
|
||||||
|
# Note: AccessPermission.user_id is VARCHAR (username), not UUID
|
||||||
|
db.query(AccessPermission).filter(AccessPermission.user_id == user.username).delete(
|
||||||
|
synchronize_session=False
|
||||||
|
)
|
||||||
|
db.delete(user)
|
||||||
|
results["users_deleted"] += 1
|
||||||
|
|
||||||
|
if results["users_deleted"] > 0:
|
||||||
|
logger.info(f"Deleted {results['users_deleted']} seed users")
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
logger.warning("SEED DATA PURGE COMPLETE")
|
||||||
|
logger.info(f"Purge results: {results}")
|
||||||
|
|
||||||
|
return results
|
||||||
1176
backend/app/pypi_proxy.py
Normal file
1176
backend/app/pypi_proxy.py
Normal file
File diff suppressed because it is too large
Load Diff
426
backend/app/registry_client.py
Normal file
426
backend/app/registry_client.py
Normal file
@@ -0,0 +1,426 @@
|
|||||||
|
"""
|
||||||
|
Registry client abstraction for upstream package registries.
|
||||||
|
|
||||||
|
Provides a pluggable interface for fetching packages from upstream registries
|
||||||
|
(PyPI, npm, Maven, etc.) during dependency resolution with auto-fetch enabled.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import tempfile
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List, Optional, TYPE_CHECKING
|
||||||
|
from urllib.parse import urljoin, urlparse
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from packaging.specifiers import SpecifierSet, InvalidSpecifier
|
||||||
|
from packaging.version import Version, InvalidVersion
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .storage import S3Storage
|
||||||
|
from .http_client import HttpClientManager
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class VersionInfo:
|
||||||
|
"""Information about a package version from an upstream registry."""
|
||||||
|
|
||||||
|
version: str
|
||||||
|
download_url: str
|
||||||
|
filename: str
|
||||||
|
sha256: Optional[str] = None
|
||||||
|
size: Optional[int] = None
|
||||||
|
content_type: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FetchResult:
|
||||||
|
"""Result of fetching a package from upstream."""
|
||||||
|
|
||||||
|
artifact_id: str # SHA256 hash
|
||||||
|
size: int
|
||||||
|
version: str
|
||||||
|
filename: str
|
||||||
|
already_cached: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class RegistryClient(ABC):
|
||||||
|
"""Abstract base class for upstream registry clients."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def source_type(self) -> str:
|
||||||
|
"""Return the source type this client handles (e.g., 'pypi', 'npm')."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_available_versions(self, package_name: str) -> List[str]:
|
||||||
|
"""
|
||||||
|
Get all available versions of a package from upstream.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_name: The normalized package name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of version strings, sorted from oldest to newest
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def resolve_constraint(
|
||||||
|
self, package_name: str, constraint: str
|
||||||
|
) -> Optional[VersionInfo]:
|
||||||
|
"""
|
||||||
|
Find the best version matching a constraint.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_name: The normalized package name
|
||||||
|
constraint: Version constraint (e.g., '>=1.9', '<2.0,>=1.5', '*')
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
VersionInfo with download URL, or None if no matching version found
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def fetch_package(
|
||||||
|
self,
|
||||||
|
package_name: str,
|
||||||
|
version_info: VersionInfo,
|
||||||
|
db: Session,
|
||||||
|
storage: "S3Storage",
|
||||||
|
) -> Optional[FetchResult]:
|
||||||
|
"""
|
||||||
|
Fetch and cache a package from upstream.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_name: The normalized package name
|
||||||
|
version_info: Version details including download URL
|
||||||
|
db: Database session for creating records
|
||||||
|
storage: S3 storage for caching the artifact
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
FetchResult with artifact_id, or None if fetch failed
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PyPIRegistryClient(RegistryClient):
|
||||||
|
"""PyPI registry client using the JSON API."""
|
||||||
|
|
||||||
|
# Timeout configuration for PyPI requests
|
||||||
|
CONNECT_TIMEOUT = 30.0
|
||||||
|
READ_TIMEOUT = 60.0
|
||||||
|
DOWNLOAD_TIMEOUT = 300.0 # Longer timeout for file downloads
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
http_client: httpx.AsyncClient,
|
||||||
|
upstream_sources: List,
|
||||||
|
pypi_api_url: str = "https://pypi.org/pypi",
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize PyPI registry client.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
http_client: Shared async HTTP client
|
||||||
|
upstream_sources: List of configured upstream sources for auth
|
||||||
|
pypi_api_url: Base URL for PyPI JSON API
|
||||||
|
"""
|
||||||
|
self.client = http_client
|
||||||
|
self.sources = upstream_sources
|
||||||
|
self.api_url = pypi_api_url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def source_type(self) -> str:
|
||||||
|
return "pypi"
|
||||||
|
|
||||||
|
def _normalize_package_name(self, name: str) -> str:
|
||||||
|
"""Normalize a PyPI package name per PEP 503."""
|
||||||
|
return re.sub(r"[-_.]+", "-", name).lower()
|
||||||
|
|
||||||
|
def _get_auth_headers(self) -> dict:
|
||||||
|
"""Get authentication headers from configured sources."""
|
||||||
|
headers = {"User-Agent": "Orchard-Registry-Client/1.0"}
|
||||||
|
if self.sources:
|
||||||
|
source = self.sources[0]
|
||||||
|
if hasattr(source, "auth_type"):
|
||||||
|
if source.auth_type == "bearer":
|
||||||
|
password = (
|
||||||
|
source.get_password()
|
||||||
|
if hasattr(source, "get_password")
|
||||||
|
else getattr(source, "password", None)
|
||||||
|
)
|
||||||
|
if password:
|
||||||
|
headers["Authorization"] = f"Bearer {password}"
|
||||||
|
elif source.auth_type == "api_key":
|
||||||
|
custom_headers = (
|
||||||
|
source.get_headers()
|
||||||
|
if hasattr(source, "get_headers")
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
if custom_headers:
|
||||||
|
headers.update(custom_headers)
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def _get_basic_auth(self) -> Optional[tuple]:
|
||||||
|
"""Get basic auth credentials if configured."""
|
||||||
|
if self.sources:
|
||||||
|
source = self.sources[0]
|
||||||
|
if hasattr(source, "auth_type") and source.auth_type == "basic":
|
||||||
|
username = getattr(source, "username", None)
|
||||||
|
if username:
|
||||||
|
password = (
|
||||||
|
source.get_password()
|
||||||
|
if hasattr(source, "get_password")
|
||||||
|
else getattr(source, "password", "")
|
||||||
|
)
|
||||||
|
return (username, password or "")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def get_available_versions(self, package_name: str) -> List[str]:
|
||||||
|
"""Get all available versions from PyPI JSON API."""
|
||||||
|
normalized = self._normalize_package_name(package_name)
|
||||||
|
url = f"{self.api_url}/{normalized}/json"
|
||||||
|
|
||||||
|
headers = self._get_auth_headers()
|
||||||
|
auth = self._get_basic_auth()
|
||||||
|
timeout = httpx.Timeout(self.READ_TIMEOUT, connect=self.CONNECT_TIMEOUT)
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await self.client.get(
|
||||||
|
url, headers=headers, auth=auth, timeout=timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 404:
|
||||||
|
logger.debug(f"Package {normalized} not found on PyPI")
|
||||||
|
return []
|
||||||
|
|
||||||
|
if response.status_code != 200:
|
||||||
|
logger.warning(
|
||||||
|
f"PyPI API returned {response.status_code} for {normalized}"
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
releases = data.get("releases", {})
|
||||||
|
|
||||||
|
# Filter to valid versions and sort
|
||||||
|
versions = []
|
||||||
|
for v in releases.keys():
|
||||||
|
try:
|
||||||
|
Version(v)
|
||||||
|
versions.append(v)
|
||||||
|
except InvalidVersion:
|
||||||
|
continue
|
||||||
|
|
||||||
|
versions.sort(key=lambda x: Version(x))
|
||||||
|
return versions
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.warning(f"Failed to query PyPI for {normalized}: {e}")
|
||||||
|
return []
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error parsing PyPI response for {normalized}: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def resolve_constraint(
|
||||||
|
self, package_name: str, constraint: str
|
||||||
|
) -> Optional[VersionInfo]:
|
||||||
|
"""Find best version matching constraint from PyPI."""
|
||||||
|
normalized = self._normalize_package_name(package_name)
|
||||||
|
url = f"{self.api_url}/{normalized}/json"
|
||||||
|
|
||||||
|
headers = self._get_auth_headers()
|
||||||
|
auth = self._get_basic_auth()
|
||||||
|
timeout = httpx.Timeout(self.READ_TIMEOUT, connect=self.CONNECT_TIMEOUT)
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await self.client.get(
|
||||||
|
url, headers=headers, auth=auth, timeout=timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 404:
|
||||||
|
logger.debug(f"Package {normalized} not found on PyPI")
|
||||||
|
return None
|
||||||
|
|
||||||
|
if response.status_code != 200:
|
||||||
|
logger.warning(
|
||||||
|
f"PyPI API returned {response.status_code} for {normalized}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
releases = data.get("releases", {})
|
||||||
|
|
||||||
|
# Handle wildcard - return latest version
|
||||||
|
if constraint == "*":
|
||||||
|
latest_version = data.get("info", {}).get("version")
|
||||||
|
if latest_version and latest_version in releases:
|
||||||
|
return self._get_version_info(
|
||||||
|
normalized, latest_version, releases[latest_version]
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Parse constraint
|
||||||
|
# If constraint looks like a bare version (no operator), treat as exact match
|
||||||
|
# e.g., "2025.10.5" -> "==2025.10.5"
|
||||||
|
effective_constraint = constraint
|
||||||
|
if constraint and constraint[0].isdigit():
|
||||||
|
effective_constraint = f"=={constraint}"
|
||||||
|
logger.debug(
|
||||||
|
f"Bare version '{constraint}' for {normalized}, "
|
||||||
|
f"treating as exact match '{effective_constraint}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
specifier = SpecifierSet(effective_constraint)
|
||||||
|
except InvalidSpecifier:
|
||||||
|
# Invalid constraint - treat as wildcard
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid version constraint '{constraint}' for {normalized}, "
|
||||||
|
"treating as wildcard"
|
||||||
|
)
|
||||||
|
latest_version = data.get("info", {}).get("version")
|
||||||
|
if latest_version and latest_version in releases:
|
||||||
|
return self._get_version_info(
|
||||||
|
normalized, latest_version, releases[latest_version]
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Find matching versions
|
||||||
|
matching = []
|
||||||
|
for v_str, files in releases.items():
|
||||||
|
if not files: # Skip versions with no files
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
v = Version(v_str)
|
||||||
|
if v in specifier:
|
||||||
|
matching.append((v_str, v, files))
|
||||||
|
except InvalidVersion:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not matching:
|
||||||
|
logger.debug(
|
||||||
|
f"No versions of {normalized} match constraint '{constraint}'"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Sort by version and return highest match
|
||||||
|
matching.sort(key=lambda x: x[1], reverse=True)
|
||||||
|
best_version, _, best_files = matching[0]
|
||||||
|
|
||||||
|
return self._get_version_info(normalized, best_version, best_files)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.warning(f"Failed to query PyPI for {normalized}: {e}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error resolving {normalized}@{constraint}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_version_info(
|
||||||
|
self, package_name: str, version: str, files: List[dict]
|
||||||
|
) -> Optional[VersionInfo]:
|
||||||
|
"""Extract download info from PyPI release files."""
|
||||||
|
if not files:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Prefer wheel over sdist
|
||||||
|
wheel_file = None
|
||||||
|
sdist_file = None
|
||||||
|
|
||||||
|
for f in files:
|
||||||
|
filename = f.get("filename", "")
|
||||||
|
if filename.endswith(".whl"):
|
||||||
|
# Prefer platform-agnostic wheels
|
||||||
|
if "py3-none-any" in filename or wheel_file is None:
|
||||||
|
wheel_file = f
|
||||||
|
elif filename.endswith(".tar.gz") and sdist_file is None:
|
||||||
|
sdist_file = f
|
||||||
|
|
||||||
|
selected = wheel_file or sdist_file
|
||||||
|
if not selected:
|
||||||
|
# Fall back to first available file
|
||||||
|
selected = files[0]
|
||||||
|
|
||||||
|
return VersionInfo(
|
||||||
|
version=version,
|
||||||
|
download_url=selected.get("url", ""),
|
||||||
|
filename=selected.get("filename", ""),
|
||||||
|
sha256=selected.get("digests", {}).get("sha256"),
|
||||||
|
size=selected.get("size"),
|
||||||
|
content_type="application/zip"
|
||||||
|
if selected.get("filename", "").endswith(".whl")
|
||||||
|
else "application/gzip",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def fetch_package(
|
||||||
|
self,
|
||||||
|
package_name: str,
|
||||||
|
version_info: VersionInfo,
|
||||||
|
db: Session,
|
||||||
|
storage: "S3Storage",
|
||||||
|
) -> Optional[FetchResult]:
|
||||||
|
"""Fetch and cache a PyPI package."""
|
||||||
|
# Import here to avoid circular imports
|
||||||
|
from .pypi_proxy import fetch_and_cache_pypi_package
|
||||||
|
|
||||||
|
normalized = self._normalize_package_name(package_name)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Fetching {normalized}=={version_info.version} from upstream PyPI"
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await fetch_and_cache_pypi_package(
|
||||||
|
db=db,
|
||||||
|
storage=storage,
|
||||||
|
http_client=self.client,
|
||||||
|
package_name=normalized,
|
||||||
|
filename=version_info.filename,
|
||||||
|
download_url=version_info.download_url,
|
||||||
|
expected_sha256=version_info.sha256,
|
||||||
|
)
|
||||||
|
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return FetchResult(
|
||||||
|
artifact_id=result["artifact_id"],
|
||||||
|
size=result["size"],
|
||||||
|
version=version_info.version,
|
||||||
|
filename=version_info.filename,
|
||||||
|
already_cached=result.get("already_cached", False),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_registry_client(
|
||||||
|
source_type: str,
|
||||||
|
http_client: httpx.AsyncClient,
|
||||||
|
upstream_sources: List,
|
||||||
|
) -> Optional[RegistryClient]:
|
||||||
|
"""
|
||||||
|
Factory function to get a registry client for a source type.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_type: The registry type ('pypi', 'npm', etc.)
|
||||||
|
http_client: Shared async HTTP client
|
||||||
|
upstream_sources: List of configured upstream sources
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
RegistryClient for the source type, or None if not supported
|
||||||
|
"""
|
||||||
|
if source_type == "pypi":
|
||||||
|
# Filter to PyPI sources
|
||||||
|
pypi_sources = [s for s in upstream_sources if getattr(s, "source_type", "") == "pypi"]
|
||||||
|
return PyPIRegistryClient(http_client, pypi_sources)
|
||||||
|
|
||||||
|
# Future: Add npm, maven, etc.
|
||||||
|
logger.debug(f"No registry client available for source type: {source_type}")
|
||||||
|
return None
|
||||||
@@ -9,7 +9,6 @@ from .base import BaseRepository
|
|||||||
from .project import ProjectRepository
|
from .project import ProjectRepository
|
||||||
from .package import PackageRepository
|
from .package import PackageRepository
|
||||||
from .artifact import ArtifactRepository
|
from .artifact import ArtifactRepository
|
||||||
from .tag import TagRepository
|
|
||||||
from .upload import UploadRepository
|
from .upload import UploadRepository
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
@@ -17,6 +16,5 @@ __all__ = [
|
|||||||
"ProjectRepository",
|
"ProjectRepository",
|
||||||
"PackageRepository",
|
"PackageRepository",
|
||||||
"ArtifactRepository",
|
"ArtifactRepository",
|
||||||
"TagRepository",
|
|
||||||
"UploadRepository",
|
"UploadRepository",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from sqlalchemy import func, or_
|
|||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from .base import BaseRepository
|
from .base import BaseRepository
|
||||||
from ..models import Artifact, Tag, Upload, Package, Project
|
from ..models import Artifact, PackageVersion, Upload, Package, Project
|
||||||
|
|
||||||
|
|
||||||
class ArtifactRepository(BaseRepository[Artifact]):
|
class ArtifactRepository(BaseRepository[Artifact]):
|
||||||
@@ -77,14 +77,14 @@ class ArtifactRepository(BaseRepository[Artifact]):
|
|||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_artifacts_without_tags(self, limit: int = 100) -> List[Artifact]:
|
def get_artifacts_without_versions(self, limit: int = 100) -> List[Artifact]:
|
||||||
"""Get artifacts that have no tags pointing to them."""
|
"""Get artifacts that have no versions pointing to them."""
|
||||||
# Subquery to find artifact IDs that have tags
|
# Subquery to find artifact IDs that have versions
|
||||||
tagged_artifacts = self.db.query(Tag.artifact_id).distinct().subquery()
|
versioned_artifacts = self.db.query(PackageVersion.artifact_id).distinct().subquery()
|
||||||
|
|
||||||
return (
|
return (
|
||||||
self.db.query(Artifact)
|
self.db.query(Artifact)
|
||||||
.filter(~Artifact.id.in_(tagged_artifacts))
|
.filter(~Artifact.id.in_(versioned_artifacts))
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
@@ -115,34 +115,34 @@ class ArtifactRepository(BaseRepository[Artifact]):
|
|||||||
|
|
||||||
return artifacts, total
|
return artifacts, total
|
||||||
|
|
||||||
def get_referencing_tags(self, artifact_id: str) -> List[Tuple[Tag, Package, Project]]:
|
def get_referencing_versions(self, artifact_id: str) -> List[Tuple[PackageVersion, Package, Project]]:
|
||||||
"""Get all tags referencing this artifact with package and project info."""
|
"""Get all versions referencing this artifact with package and project info."""
|
||||||
return (
|
return (
|
||||||
self.db.query(Tag, Package, Project)
|
self.db.query(PackageVersion, Package, Project)
|
||||||
.join(Package, Tag.package_id == Package.id)
|
.join(Package, PackageVersion.package_id == Package.id)
|
||||||
.join(Project, Package.project_id == Project.id)
|
.join(Project, Package.project_id == Project.id)
|
||||||
.filter(Tag.artifact_id == artifact_id)
|
.filter(PackageVersion.artifact_id == artifact_id)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
|
|
||||||
def search(self, query_str: str, limit: int = 10) -> List[Tuple[Tag, Artifact, str, str]]:
|
def search(self, query_str: str, limit: int = 10) -> List[Tuple[PackageVersion, Artifact, str, str]]:
|
||||||
"""
|
"""
|
||||||
Search artifacts by tag name or original filename.
|
Search artifacts by version or original filename.
|
||||||
Returns (tag, artifact, package_name, project_name) tuples.
|
Returns (version, artifact, package_name, project_name) tuples.
|
||||||
"""
|
"""
|
||||||
search_lower = query_str.lower()
|
search_lower = query_str.lower()
|
||||||
return (
|
return (
|
||||||
self.db.query(Tag, Artifact, Package.name, Project.name)
|
self.db.query(PackageVersion, Artifact, Package.name, Project.name)
|
||||||
.join(Artifact, Tag.artifact_id == Artifact.id)
|
.join(Artifact, PackageVersion.artifact_id == Artifact.id)
|
||||||
.join(Package, Tag.package_id == Package.id)
|
.join(Package, PackageVersion.package_id == Package.id)
|
||||||
.join(Project, Package.project_id == Project.id)
|
.join(Project, Package.project_id == Project.id)
|
||||||
.filter(
|
.filter(
|
||||||
or_(
|
or_(
|
||||||
func.lower(Tag.name).contains(search_lower),
|
func.lower(PackageVersion.version).contains(search_lower),
|
||||||
func.lower(Artifact.original_name).contains(search_lower)
|
func.lower(Artifact.original_name).contains(search_lower)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by(Tag.name)
|
.order_by(PackageVersion.version)
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from sqlalchemy import func, or_, asc, desc
|
|||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from .base import BaseRepository
|
from .base import BaseRepository
|
||||||
from ..models import Package, Project, Tag, Upload, Artifact
|
from ..models import Package, Project, PackageVersion, Upload, Artifact
|
||||||
|
|
||||||
|
|
||||||
class PackageRepository(BaseRepository[Package]):
|
class PackageRepository(BaseRepository[Package]):
|
||||||
@@ -136,10 +136,10 @@ class PackageRepository(BaseRepository[Package]):
|
|||||||
return self.update(package, **updates)
|
return self.update(package, **updates)
|
||||||
|
|
||||||
def get_stats(self, package_id: UUID) -> dict:
|
def get_stats(self, package_id: UUID) -> dict:
|
||||||
"""Get package statistics (tag count, artifact count, total size)."""
|
"""Get package statistics (version count, artifact count, total size)."""
|
||||||
tag_count = (
|
version_count = (
|
||||||
self.db.query(func.count(Tag.id))
|
self.db.query(func.count(PackageVersion.id))
|
||||||
.filter(Tag.package_id == package_id)
|
.filter(PackageVersion.package_id == package_id)
|
||||||
.scalar() or 0
|
.scalar() or 0
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -154,7 +154,7 @@ class PackageRepository(BaseRepository[Package]):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"tag_count": tag_count,
|
"version_count": version_count,
|
||||||
"artifact_count": artifact_stats[0] if artifact_stats else 0,
|
"artifact_count": artifact_stats[0] if artifact_stats else 0,
|
||||||
"total_size": artifact_stats[1] if artifact_stats else 0,
|
"total_size": artifact_stats[1] if artifact_stats else 0,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,168 +0,0 @@
|
|||||||
"""
|
|
||||||
Tag repository for data access operations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Optional, List, Tuple
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from sqlalchemy import func, or_, asc, desc
|
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
from .base import BaseRepository
|
|
||||||
from ..models import Tag, TagHistory, Artifact, Package, Project
|
|
||||||
|
|
||||||
|
|
||||||
class TagRepository(BaseRepository[Tag]):
|
|
||||||
"""Repository for Tag entity operations."""
|
|
||||||
|
|
||||||
model = Tag
|
|
||||||
|
|
||||||
def get_by_name(self, package_id: UUID, name: str) -> Optional[Tag]:
|
|
||||||
"""Get tag by name within a package."""
|
|
||||||
return (
|
|
||||||
self.db.query(Tag)
|
|
||||||
.filter(Tag.package_id == package_id, Tag.name == name)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_with_artifact(self, package_id: UUID, name: str) -> Optional[Tuple[Tag, Artifact]]:
|
|
||||||
"""Get tag with its artifact."""
|
|
||||||
return (
|
|
||||||
self.db.query(Tag, Artifact)
|
|
||||||
.join(Artifact, Tag.artifact_id == Artifact.id)
|
|
||||||
.filter(Tag.package_id == package_id, Tag.name == name)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
|
|
||||||
def exists_by_name(self, package_id: UUID, name: str) -> bool:
|
|
||||||
"""Check if tag with name exists in package."""
|
|
||||||
return self.db.query(
|
|
||||||
self.db.query(Tag)
|
|
||||||
.filter(Tag.package_id == package_id, Tag.name == name)
|
|
||||||
.exists()
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
def list_by_package(
|
|
||||||
self,
|
|
||||||
package_id: UUID,
|
|
||||||
page: int = 1,
|
|
||||||
limit: int = 20,
|
|
||||||
search: Optional[str] = None,
|
|
||||||
sort: str = "name",
|
|
||||||
order: str = "asc",
|
|
||||||
) -> Tuple[List[Tuple[Tag, Artifact]], int]:
|
|
||||||
"""
|
|
||||||
List tags in a package with artifact metadata.
|
|
||||||
|
|
||||||
Returns tuple of ((tag, artifact) tuples, total_count).
|
|
||||||
"""
|
|
||||||
query = (
|
|
||||||
self.db.query(Tag, Artifact)
|
|
||||||
.join(Artifact, Tag.artifact_id == Artifact.id)
|
|
||||||
.filter(Tag.package_id == package_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Apply search filter (tag name or artifact original filename)
|
|
||||||
if search:
|
|
||||||
search_lower = search.lower()
|
|
||||||
query = query.filter(
|
|
||||||
or_(
|
|
||||||
func.lower(Tag.name).contains(search_lower),
|
|
||||||
func.lower(Artifact.original_name).contains(search_lower)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get total count
|
|
||||||
total = query.count()
|
|
||||||
|
|
||||||
# Apply sorting
|
|
||||||
sort_columns = {
|
|
||||||
"name": Tag.name,
|
|
||||||
"created_at": Tag.created_at,
|
|
||||||
}
|
|
||||||
sort_column = sort_columns.get(sort, Tag.name)
|
|
||||||
if order == "desc":
|
|
||||||
query = query.order_by(desc(sort_column))
|
|
||||||
else:
|
|
||||||
query = query.order_by(asc(sort_column))
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
offset = (page - 1) * limit
|
|
||||||
results = query.offset(offset).limit(limit).all()
|
|
||||||
|
|
||||||
return results, total
|
|
||||||
|
|
||||||
def create_tag(
|
|
||||||
self,
|
|
||||||
package_id: UUID,
|
|
||||||
name: str,
|
|
||||||
artifact_id: str,
|
|
||||||
created_by: str,
|
|
||||||
) -> Tag:
|
|
||||||
"""Create a new tag."""
|
|
||||||
return self.create(
|
|
||||||
package_id=package_id,
|
|
||||||
name=name,
|
|
||||||
artifact_id=artifact_id,
|
|
||||||
created_by=created_by,
|
|
||||||
)
|
|
||||||
|
|
||||||
def update_artifact(
|
|
||||||
self,
|
|
||||||
tag: Tag,
|
|
||||||
new_artifact_id: str,
|
|
||||||
changed_by: str,
|
|
||||||
record_history: bool = True,
|
|
||||||
) -> Tag:
|
|
||||||
"""
|
|
||||||
Update tag to point to a different artifact.
|
|
||||||
Optionally records change in tag history.
|
|
||||||
"""
|
|
||||||
old_artifact_id = tag.artifact_id
|
|
||||||
|
|
||||||
if record_history and old_artifact_id != new_artifact_id:
|
|
||||||
history = TagHistory(
|
|
||||||
tag_id=tag.id,
|
|
||||||
old_artifact_id=old_artifact_id,
|
|
||||||
new_artifact_id=new_artifact_id,
|
|
||||||
changed_by=changed_by,
|
|
||||||
)
|
|
||||||
self.db.add(history)
|
|
||||||
|
|
||||||
tag.artifact_id = new_artifact_id
|
|
||||||
tag.created_by = changed_by
|
|
||||||
self.db.flush()
|
|
||||||
return tag
|
|
||||||
|
|
||||||
def get_history(self, tag_id: UUID) -> List[TagHistory]:
|
|
||||||
"""Get tag change history."""
|
|
||||||
return (
|
|
||||||
self.db.query(TagHistory)
|
|
||||||
.filter(TagHistory.tag_id == tag_id)
|
|
||||||
.order_by(TagHistory.changed_at.desc())
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_latest_in_package(self, package_id: UUID) -> Optional[Tag]:
|
|
||||||
"""Get the most recently created/updated tag in a package."""
|
|
||||||
return (
|
|
||||||
self.db.query(Tag)
|
|
||||||
.filter(Tag.package_id == package_id)
|
|
||||||
.order_by(Tag.created_at.desc())
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_by_artifact(self, artifact_id: str) -> List[Tag]:
|
|
||||||
"""Get all tags pointing to an artifact."""
|
|
||||||
return (
|
|
||||||
self.db.query(Tag)
|
|
||||||
.filter(Tag.artifact_id == artifact_id)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
def count_by_artifact(self, artifact_id: str) -> int:
|
|
||||||
"""Count tags pointing to an artifact."""
|
|
||||||
return (
|
|
||||||
self.db.query(func.count(Tag.id))
|
|
||||||
.filter(Tag.artifact_id == artifact_id)
|
|
||||||
.scalar() or 0
|
|
||||||
)
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -33,6 +33,7 @@ class ProjectResponse(BaseModel):
|
|||||||
name: str
|
name: str
|
||||||
description: Optional[str]
|
description: Optional[str]
|
||||||
is_public: bool
|
is_public: bool
|
||||||
|
is_system: bool = False
|
||||||
created_at: datetime
|
created_at: datetime
|
||||||
updated_at: datetime
|
updated_at: datetime
|
||||||
created_by: str
|
created_by: str
|
||||||
@@ -113,14 +114,6 @@ class PackageUpdate(BaseModel):
|
|||||||
platform: Optional[str] = None
|
platform: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class TagSummary(BaseModel):
|
|
||||||
"""Lightweight tag info for embedding in package responses"""
|
|
||||||
|
|
||||||
name: str
|
|
||||||
artifact_id: str
|
|
||||||
created_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class PackageDetailResponse(BaseModel):
|
class PackageDetailResponse(BaseModel):
|
||||||
"""Package with aggregated metadata"""
|
"""Package with aggregated metadata"""
|
||||||
|
|
||||||
@@ -133,13 +126,9 @@ class PackageDetailResponse(BaseModel):
|
|||||||
created_at: datetime
|
created_at: datetime
|
||||||
updated_at: datetime
|
updated_at: datetime
|
||||||
# Aggregated fields
|
# Aggregated fields
|
||||||
tag_count: int = 0
|
|
||||||
artifact_count: int = 0
|
artifact_count: int = 0
|
||||||
total_size: int = 0
|
total_size: int = 0
|
||||||
latest_tag: Optional[str] = None
|
|
||||||
latest_upload_at: Optional[datetime] = None
|
latest_upload_at: Optional[datetime] = None
|
||||||
# Recent tags (limit 5)
|
|
||||||
recent_tags: List[TagSummary] = []
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
@@ -164,79 +153,6 @@ class ArtifactResponse(BaseModel):
|
|||||||
from_attributes = True
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
# Tag schemas
|
|
||||||
class TagCreate(BaseModel):
|
|
||||||
name: str
|
|
||||||
artifact_id: str
|
|
||||||
|
|
||||||
|
|
||||||
class TagResponse(BaseModel):
|
|
||||||
id: UUID
|
|
||||||
package_id: UUID
|
|
||||||
name: str
|
|
||||||
artifact_id: str
|
|
||||||
created_at: datetime
|
|
||||||
created_by: str
|
|
||||||
version: Optional[str] = None # Version of the artifact this tag points to
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
class TagDetailResponse(BaseModel):
|
|
||||||
"""Tag with embedded artifact metadata"""
|
|
||||||
|
|
||||||
id: UUID
|
|
||||||
package_id: UUID
|
|
||||||
name: str
|
|
||||||
artifact_id: str
|
|
||||||
created_at: datetime
|
|
||||||
created_by: str
|
|
||||||
version: Optional[str] = None # Version of the artifact this tag points to
|
|
||||||
# Artifact metadata
|
|
||||||
artifact_size: int
|
|
||||||
artifact_content_type: Optional[str]
|
|
||||||
artifact_original_name: Optional[str]
|
|
||||||
artifact_created_at: datetime
|
|
||||||
artifact_format_metadata: Optional[Dict[str, Any]] = None
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
class TagHistoryResponse(BaseModel):
|
|
||||||
"""History entry for tag changes"""
|
|
||||||
|
|
||||||
id: UUID
|
|
||||||
tag_id: UUID
|
|
||||||
old_artifact_id: Optional[str]
|
|
||||||
new_artifact_id: str
|
|
||||||
changed_at: datetime
|
|
||||||
changed_by: str
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
class TagHistoryDetailResponse(BaseModel):
|
|
||||||
"""Tag history with artifact metadata for each version"""
|
|
||||||
|
|
||||||
id: UUID
|
|
||||||
tag_id: UUID
|
|
||||||
tag_name: str
|
|
||||||
old_artifact_id: Optional[str]
|
|
||||||
new_artifact_id: str
|
|
||||||
changed_at: datetime
|
|
||||||
changed_by: str
|
|
||||||
# Artifact metadata for new artifact
|
|
||||||
artifact_size: int
|
|
||||||
artifact_original_name: Optional[str]
|
|
||||||
artifact_content_type: Optional[str]
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
# Audit log schemas
|
# Audit log schemas
|
||||||
class AuditLogResponse(BaseModel):
|
class AuditLogResponse(BaseModel):
|
||||||
"""Audit log entry response"""
|
"""Audit log entry response"""
|
||||||
@@ -263,7 +179,7 @@ class UploadHistoryResponse(BaseModel):
|
|||||||
package_name: str
|
package_name: str
|
||||||
project_name: str
|
project_name: str
|
||||||
original_name: Optional[str]
|
original_name: Optional[str]
|
||||||
tag_name: Optional[str]
|
version: Optional[str]
|
||||||
uploaded_at: datetime
|
uploaded_at: datetime
|
||||||
uploaded_by: str
|
uploaded_by: str
|
||||||
source_ip: Optional[str]
|
source_ip: Optional[str]
|
||||||
@@ -294,10 +210,10 @@ class ArtifactProvenanceResponse(BaseModel):
|
|||||||
# Usage statistics
|
# Usage statistics
|
||||||
upload_count: int
|
upload_count: int
|
||||||
# References
|
# References
|
||||||
packages: List[Dict[str, Any]] # List of {project_name, package_name, tag_names}
|
packages: List[Dict[str, Any]] # List of {project_name, package_name, versions}
|
||||||
tags: List[
|
versions: List[
|
||||||
Dict[str, Any]
|
Dict[str, Any]
|
||||||
] # List of {project_name, package_name, tag_name, created_at}
|
] # List of {project_name, package_name, version, created_at}
|
||||||
# Upload history
|
# Upload history
|
||||||
uploads: List[Dict[str, Any]] # List of upload events
|
uploads: List[Dict[str, Any]] # List of upload events
|
||||||
|
|
||||||
@@ -305,18 +221,8 @@ class ArtifactProvenanceResponse(BaseModel):
|
|||||||
from_attributes = True
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
class ArtifactTagInfo(BaseModel):
|
|
||||||
"""Tag info for embedding in artifact responses"""
|
|
||||||
|
|
||||||
id: UUID
|
|
||||||
name: str
|
|
||||||
package_id: UUID
|
|
||||||
package_name: str
|
|
||||||
project_name: str
|
|
||||||
|
|
||||||
|
|
||||||
class ArtifactDetailResponse(BaseModel):
|
class ArtifactDetailResponse(BaseModel):
|
||||||
"""Artifact with list of tags/packages referencing it"""
|
"""Artifact with metadata"""
|
||||||
|
|
||||||
id: str
|
id: str
|
||||||
sha256: str # Explicit SHA256 field (same as id)
|
sha256: str # Explicit SHA256 field (same as id)
|
||||||
@@ -330,14 +236,14 @@ class ArtifactDetailResponse(BaseModel):
|
|||||||
created_by: str
|
created_by: str
|
||||||
ref_count: int
|
ref_count: int
|
||||||
format_metadata: Optional[Dict[str, Any]] = None
|
format_metadata: Optional[Dict[str, Any]] = None
|
||||||
tags: List[ArtifactTagInfo] = []
|
versions: List[Dict[str, Any]] = [] # List of {version, package_name, project_name}
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
class PackageArtifactResponse(BaseModel):
|
class PackageArtifactResponse(BaseModel):
|
||||||
"""Artifact with tags for package artifact listing"""
|
"""Artifact for package artifact listing"""
|
||||||
|
|
||||||
id: str
|
id: str
|
||||||
sha256: str # Explicit SHA256 field (same as id)
|
sha256: str # Explicit SHA256 field (same as id)
|
||||||
@@ -350,7 +256,7 @@ class PackageArtifactResponse(BaseModel):
|
|||||||
created_at: datetime
|
created_at: datetime
|
||||||
created_by: str
|
created_by: str
|
||||||
format_metadata: Optional[Dict[str, Any]] = None
|
format_metadata: Optional[Dict[str, Any]] = None
|
||||||
tags: List[str] = [] # Tag names pointing to this artifact
|
version: Optional[str] = None # Version from PackageVersion if exists
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
@@ -368,28 +274,9 @@ class GlobalArtifactResponse(BaseModel):
|
|||||||
created_by: str
|
created_by: str
|
||||||
format_metadata: Optional[Dict[str, Any]] = None
|
format_metadata: Optional[Dict[str, Any]] = None
|
||||||
ref_count: int = 0
|
ref_count: int = 0
|
||||||
# Context from tags/packages
|
# Context from versions/packages
|
||||||
projects: List[str] = [] # List of project names containing this artifact
|
projects: List[str] = [] # List of project names containing this artifact
|
||||||
packages: List[str] = [] # List of "project/package" paths
|
packages: List[str] = [] # List of "project/package" paths
|
||||||
tags: List[str] = [] # List of "project/package:tag" references
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
class GlobalTagResponse(BaseModel):
|
|
||||||
"""Tag with project/package context for global listing"""
|
|
||||||
|
|
||||||
id: UUID
|
|
||||||
name: str
|
|
||||||
artifact_id: str
|
|
||||||
created_at: datetime
|
|
||||||
created_by: str
|
|
||||||
project_name: str
|
|
||||||
package_name: str
|
|
||||||
artifact_size: Optional[int] = None
|
|
||||||
artifact_content_type: Optional[str] = None
|
|
||||||
version: Optional[str] = None # Version of the artifact this tag points to
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
@@ -402,7 +289,6 @@ class UploadResponse(BaseModel):
|
|||||||
size: int
|
size: int
|
||||||
project: str
|
project: str
|
||||||
package: str
|
package: str
|
||||||
tag: Optional[str]
|
|
||||||
version: Optional[str] = None # Version assigned to this artifact
|
version: Optional[str] = None # Version assigned to this artifact
|
||||||
version_source: Optional[str] = None # How version was determined: 'explicit', 'filename', 'metadata'
|
version_source: Optional[str] = None # How version was determined: 'explicit', 'filename', 'metadata'
|
||||||
checksum_md5: Optional[str] = None
|
checksum_md5: Optional[str] = None
|
||||||
@@ -429,7 +315,6 @@ class ResumableUploadInitRequest(BaseModel):
|
|||||||
filename: str
|
filename: str
|
||||||
content_type: Optional[str] = None
|
content_type: Optional[str] = None
|
||||||
size: int
|
size: int
|
||||||
tag: Optional[str] = None
|
|
||||||
version: Optional[str] = None # Explicit version (auto-detected if not provided)
|
version: Optional[str] = None # Explicit version (auto-detected if not provided)
|
||||||
|
|
||||||
@field_validator("expected_hash")
|
@field_validator("expected_hash")
|
||||||
@@ -464,7 +349,7 @@ class ResumableUploadPartResponse(BaseModel):
|
|||||||
class ResumableUploadCompleteRequest(BaseModel):
|
class ResumableUploadCompleteRequest(BaseModel):
|
||||||
"""Request to complete a resumable upload"""
|
"""Request to complete a resumable upload"""
|
||||||
|
|
||||||
tag: Optional[str] = None
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ResumableUploadCompleteResponse(BaseModel):
|
class ResumableUploadCompleteResponse(BaseModel):
|
||||||
@@ -474,7 +359,6 @@ class ResumableUploadCompleteResponse(BaseModel):
|
|||||||
size: int
|
size: int
|
||||||
project: str
|
project: str
|
||||||
package: str
|
package: str
|
||||||
tag: Optional[str]
|
|
||||||
|
|
||||||
|
|
||||||
class ResumableUploadStatusResponse(BaseModel):
|
class ResumableUploadStatusResponse(BaseModel):
|
||||||
@@ -527,7 +411,6 @@ class PackageVersionResponse(BaseModel):
|
|||||||
size: Optional[int] = None
|
size: Optional[int] = None
|
||||||
content_type: Optional[str] = None
|
content_type: Optional[str] = None
|
||||||
original_name: Optional[str] = None
|
original_name: Optional[str] = None
|
||||||
tags: List[str] = [] # Tag names pointing to this artifact
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
@@ -569,11 +452,10 @@ class SearchResultPackage(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class SearchResultArtifact(BaseModel):
|
class SearchResultArtifact(BaseModel):
|
||||||
"""Artifact/tag result for global search"""
|
"""Artifact result for global search"""
|
||||||
|
|
||||||
tag_id: UUID
|
|
||||||
tag_name: str
|
|
||||||
artifact_id: str
|
artifact_id: str
|
||||||
|
version: Optional[str]
|
||||||
package_id: UUID
|
package_id: UUID
|
||||||
package_name: str
|
package_name: str
|
||||||
project_name: str
|
project_name: str
|
||||||
@@ -611,6 +493,8 @@ class HealthResponse(BaseModel):
|
|||||||
version: str = "1.0.0"
|
version: str = "1.0.0"
|
||||||
storage_healthy: Optional[bool] = None
|
storage_healthy: Optional[bool] = None
|
||||||
database_healthy: Optional[bool] = None
|
database_healthy: Optional[bool] = None
|
||||||
|
http_pool: Optional[Dict[str, Any]] = None
|
||||||
|
cache: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
# Garbage collection schemas
|
# Garbage collection schemas
|
||||||
@@ -686,7 +570,7 @@ class ProjectStatsResponse(BaseModel):
|
|||||||
project_id: str
|
project_id: str
|
||||||
project_name: str
|
project_name: str
|
||||||
package_count: int
|
package_count: int
|
||||||
tag_count: int
|
version_count: int
|
||||||
artifact_count: int
|
artifact_count: int
|
||||||
total_size_bytes: int
|
total_size_bytes: int
|
||||||
upload_count: int
|
upload_count: int
|
||||||
@@ -701,7 +585,7 @@ class PackageStatsResponse(BaseModel):
|
|||||||
package_id: str
|
package_id: str
|
||||||
package_name: str
|
package_name: str
|
||||||
project_name: str
|
project_name: str
|
||||||
tag_count: int
|
version_count: int
|
||||||
artifact_count: int
|
artifact_count: int
|
||||||
total_size_bytes: int
|
total_size_bytes: int
|
||||||
upload_count: int
|
upload_count: int
|
||||||
@@ -718,9 +602,9 @@ class ArtifactStatsResponse(BaseModel):
|
|||||||
size: int
|
size: int
|
||||||
ref_count: int
|
ref_count: int
|
||||||
storage_savings: int # (ref_count - 1) * size
|
storage_savings: int # (ref_count - 1) * size
|
||||||
tags: List[Dict[str, Any]] # Tags referencing this artifact
|
|
||||||
projects: List[str] # Projects using this artifact
|
projects: List[str] # Projects using this artifact
|
||||||
packages: List[str] # Packages using this artifact
|
packages: List[str] # Packages using this artifact
|
||||||
|
versions: List[Dict[str, Any]] = [] # List of {version, package_name, project_name}
|
||||||
first_uploaded: Optional[datetime] = None
|
first_uploaded: Optional[datetime] = None
|
||||||
last_referenced: Optional[datetime] = None
|
last_referenced: Optional[datetime] = None
|
||||||
|
|
||||||
@@ -929,20 +813,7 @@ class DependencyCreate(BaseModel):
|
|||||||
"""Schema for creating a dependency"""
|
"""Schema for creating a dependency"""
|
||||||
project: str
|
project: str
|
||||||
package: str
|
package: str
|
||||||
version: Optional[str] = None
|
version: str
|
||||||
tag: Optional[str] = None
|
|
||||||
|
|
||||||
@field_validator('version', 'tag')
|
|
||||||
@classmethod
|
|
||||||
def validate_constraint(cls, v, info):
|
|
||||||
return v
|
|
||||||
|
|
||||||
def model_post_init(self, __context):
|
|
||||||
"""Validate that exactly one of version or tag is set"""
|
|
||||||
if self.version is None and self.tag is None:
|
|
||||||
raise ValueError("Either 'version' or 'tag' must be specified")
|
|
||||||
if self.version is not None and self.tag is not None:
|
|
||||||
raise ValueError("Cannot specify both 'version' and 'tag'")
|
|
||||||
|
|
||||||
|
|
||||||
class DependencyResponse(BaseModel):
|
class DependencyResponse(BaseModel):
|
||||||
@@ -951,8 +822,7 @@ class DependencyResponse(BaseModel):
|
|||||||
artifact_id: str
|
artifact_id: str
|
||||||
project: str
|
project: str
|
||||||
package: str
|
package: str
|
||||||
version: Optional[str] = None
|
version: str
|
||||||
tag: Optional[str] = None
|
|
||||||
created_at: datetime
|
created_at: datetime
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
@@ -967,7 +837,6 @@ class DependencyResponse(BaseModel):
|
|||||||
project=dep.dependency_project,
|
project=dep.dependency_project,
|
||||||
package=dep.dependency_package,
|
package=dep.dependency_package,
|
||||||
version=dep.version_constraint,
|
version=dep.version_constraint,
|
||||||
tag=dep.tag_constraint,
|
|
||||||
created_at=dep.created_at,
|
created_at=dep.created_at,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -984,7 +853,6 @@ class DependentInfo(BaseModel):
|
|||||||
project: str
|
project: str
|
||||||
package: str
|
package: str
|
||||||
version: Optional[str] = None
|
version: Optional[str] = None
|
||||||
constraint_type: str # 'version' or 'tag'
|
|
||||||
constraint_value: str
|
constraint_value: str
|
||||||
|
|
||||||
|
|
||||||
@@ -1000,20 +868,7 @@ class EnsureFileDependency(BaseModel):
|
|||||||
"""Dependency entry from orchard.ensure file"""
|
"""Dependency entry from orchard.ensure file"""
|
||||||
project: str
|
project: str
|
||||||
package: str
|
package: str
|
||||||
version: Optional[str] = None
|
version: str
|
||||||
tag: Optional[str] = None
|
|
||||||
|
|
||||||
@field_validator('version', 'tag')
|
|
||||||
@classmethod
|
|
||||||
def validate_constraint(cls, v, info):
|
|
||||||
return v
|
|
||||||
|
|
||||||
def model_post_init(self, __context):
|
|
||||||
"""Validate that exactly one of version or tag is set"""
|
|
||||||
if self.version is None and self.tag is None:
|
|
||||||
raise ValueError("Either 'version' or 'tag' must be specified")
|
|
||||||
if self.version is not None and self.tag is not None:
|
|
||||||
raise ValueError("Cannot specify both 'version' and 'tag'")
|
|
||||||
|
|
||||||
|
|
||||||
class EnsureFileContent(BaseModel):
|
class EnsureFileContent(BaseModel):
|
||||||
@@ -1027,15 +882,26 @@ class ResolvedArtifact(BaseModel):
|
|||||||
project: str
|
project: str
|
||||||
package: str
|
package: str
|
||||||
version: Optional[str] = None
|
version: Optional[str] = None
|
||||||
tag: Optional[str] = None
|
|
||||||
size: int
|
size: int
|
||||||
download_url: str
|
download_url: str
|
||||||
|
|
||||||
|
|
||||||
|
class MissingDependency(BaseModel):
|
||||||
|
"""A dependency that could not be resolved (not cached on server)"""
|
||||||
|
project: str
|
||||||
|
package: str
|
||||||
|
constraint: Optional[str] = None
|
||||||
|
required_by: Optional[str] = None
|
||||||
|
fetch_attempted: bool = False # True if auto-fetch was attempted
|
||||||
|
fetch_error: Optional[str] = None # Error message if fetch failed
|
||||||
|
|
||||||
|
|
||||||
class DependencyResolutionResponse(BaseModel):
|
class DependencyResolutionResponse(BaseModel):
|
||||||
"""Response from dependency resolution endpoint"""
|
"""Response from dependency resolution endpoint"""
|
||||||
requested: Dict[str, str] # project, package, ref
|
requested: Dict[str, str] # project, package, ref
|
||||||
resolved: List[ResolvedArtifact]
|
resolved: List[ResolvedArtifact]
|
||||||
|
missing: List[MissingDependency] = []
|
||||||
|
fetched: List[ResolvedArtifact] = [] # Artifacts fetched from upstream during resolution
|
||||||
total_size: int
|
total_size: int
|
||||||
artifact_count: int
|
artifact_count: int
|
||||||
|
|
||||||
@@ -1044,7 +910,7 @@ class DependencyConflict(BaseModel):
|
|||||||
"""Details about a dependency conflict"""
|
"""Details about a dependency conflict"""
|
||||||
project: str
|
project: str
|
||||||
package: str
|
package: str
|
||||||
requirements: List[Dict[str, Any]] # version/tag and required_by info
|
requirements: List[Dict[str, Any]] # version and required_by info
|
||||||
|
|
||||||
|
|
||||||
class DependencyConflictError(BaseModel):
|
class DependencyConflictError(BaseModel):
|
||||||
@@ -1196,3 +1062,277 @@ class TeamMemberResponse(BaseModel):
|
|||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Upstream Caching Schemas
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Valid source types
|
||||||
|
SOURCE_TYPES = ["npm", "pypi", "maven", "docker", "helm", "nuget", "deb", "rpm", "generic"]
|
||||||
|
|
||||||
|
# Valid auth types
|
||||||
|
AUTH_TYPES = ["none", "basic", "bearer", "api_key"]
|
||||||
|
|
||||||
|
|
||||||
|
class UpstreamSourceCreate(BaseModel):
|
||||||
|
"""Create a new upstream source"""
|
||||||
|
name: str
|
||||||
|
source_type: str = "generic"
|
||||||
|
url: str
|
||||||
|
enabled: bool = False
|
||||||
|
auth_type: str = "none"
|
||||||
|
username: Optional[str] = None
|
||||||
|
password: Optional[str] = None # Write-only
|
||||||
|
headers: Optional[dict] = None # Write-only, custom headers
|
||||||
|
priority: int = 100
|
||||||
|
|
||||||
|
@field_validator('name')
|
||||||
|
@classmethod
|
||||||
|
def validate_name(cls, v: str) -> str:
|
||||||
|
v = v.strip()
|
||||||
|
if not v:
|
||||||
|
raise ValueError("name cannot be empty")
|
||||||
|
if len(v) > 255:
|
||||||
|
raise ValueError("name must be 255 characters or less")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('source_type')
|
||||||
|
@classmethod
|
||||||
|
def validate_source_type(cls, v: str) -> str:
|
||||||
|
if v not in SOURCE_TYPES:
|
||||||
|
raise ValueError(f"source_type must be one of: {', '.join(SOURCE_TYPES)}")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('url')
|
||||||
|
@classmethod
|
||||||
|
def validate_url(cls, v: str) -> str:
|
||||||
|
v = v.strip()
|
||||||
|
if not v:
|
||||||
|
raise ValueError("url cannot be empty")
|
||||||
|
if not (v.startswith('http://') or v.startswith('https://')):
|
||||||
|
raise ValueError("url must start with http:// or https://")
|
||||||
|
if len(v) > 2048:
|
||||||
|
raise ValueError("url must be 2048 characters or less")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('auth_type')
|
||||||
|
@classmethod
|
||||||
|
def validate_auth_type(cls, v: str) -> str:
|
||||||
|
if v not in AUTH_TYPES:
|
||||||
|
raise ValueError(f"auth_type must be one of: {', '.join(AUTH_TYPES)}")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('priority')
|
||||||
|
@classmethod
|
||||||
|
def validate_priority(cls, v: int) -> int:
|
||||||
|
if v <= 0:
|
||||||
|
raise ValueError("priority must be greater than 0")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class UpstreamSourceUpdate(BaseModel):
|
||||||
|
"""Update an upstream source (partial)"""
|
||||||
|
name: Optional[str] = None
|
||||||
|
source_type: Optional[str] = None
|
||||||
|
url: Optional[str] = None
|
||||||
|
enabled: Optional[bool] = None
|
||||||
|
auth_type: Optional[str] = None
|
||||||
|
username: Optional[str] = None
|
||||||
|
password: Optional[str] = None # Write-only, None = keep existing, empty string = clear
|
||||||
|
headers: Optional[dict] = None # Write-only
|
||||||
|
priority: Optional[int] = None
|
||||||
|
|
||||||
|
@field_validator('name')
|
||||||
|
@classmethod
|
||||||
|
def validate_name(cls, v: Optional[str]) -> Optional[str]:
|
||||||
|
if v is not None:
|
||||||
|
v = v.strip()
|
||||||
|
if not v:
|
||||||
|
raise ValueError("name cannot be empty")
|
||||||
|
if len(v) > 255:
|
||||||
|
raise ValueError("name must be 255 characters or less")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('source_type')
|
||||||
|
@classmethod
|
||||||
|
def validate_source_type(cls, v: Optional[str]) -> Optional[str]:
|
||||||
|
if v is not None and v not in SOURCE_TYPES:
|
||||||
|
raise ValueError(f"source_type must be one of: {', '.join(SOURCE_TYPES)}")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('url')
|
||||||
|
@classmethod
|
||||||
|
def validate_url(cls, v: Optional[str]) -> Optional[str]:
|
||||||
|
if v is not None:
|
||||||
|
v = v.strip()
|
||||||
|
if not v:
|
||||||
|
raise ValueError("url cannot be empty")
|
||||||
|
if not (v.startswith('http://') or v.startswith('https://')):
|
||||||
|
raise ValueError("url must start with http:// or https://")
|
||||||
|
if len(v) > 2048:
|
||||||
|
raise ValueError("url must be 2048 characters or less")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('auth_type')
|
||||||
|
@classmethod
|
||||||
|
def validate_auth_type(cls, v: Optional[str]) -> Optional[str]:
|
||||||
|
if v is not None and v not in AUTH_TYPES:
|
||||||
|
raise ValueError(f"auth_type must be one of: {', '.join(AUTH_TYPES)}")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('priority')
|
||||||
|
@classmethod
|
||||||
|
def validate_priority(cls, v: Optional[int]) -> Optional[int]:
|
||||||
|
if v is not None and v <= 0:
|
||||||
|
raise ValueError("priority must be greater than 0")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class UpstreamSourceResponse(BaseModel):
|
||||||
|
"""Upstream source response (credentials never included)"""
|
||||||
|
id: UUID
|
||||||
|
name: str
|
||||||
|
source_type: str
|
||||||
|
url: str
|
||||||
|
enabled: bool
|
||||||
|
auth_type: str
|
||||||
|
username: Optional[str]
|
||||||
|
has_password: bool # True if password is set
|
||||||
|
has_headers: bool # True if custom headers are set
|
||||||
|
priority: int
|
||||||
|
source: str = "database" # "database" or "env" (env = defined via environment variables)
|
||||||
|
created_at: Optional[datetime] = None # May be None for legacy/env data
|
||||||
|
updated_at: Optional[datetime] = None # May be None for legacy/env data
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class CacheSettingsResponse(BaseModel):
|
||||||
|
"""Global cache settings response"""
|
||||||
|
auto_create_system_projects: bool
|
||||||
|
auto_create_system_projects_env_override: Optional[bool] = None # Set if overridden by env var
|
||||||
|
created_at: Optional[datetime] = None # May be None for legacy data
|
||||||
|
updated_at: Optional[datetime] = None # May be None for legacy data
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class CacheSettingsUpdate(BaseModel):
|
||||||
|
"""Update cache settings (partial)"""
|
||||||
|
auto_create_system_projects: Optional[bool] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CachedUrlResponse(BaseModel):
|
||||||
|
"""Cached URL response"""
|
||||||
|
id: UUID
|
||||||
|
url: str
|
||||||
|
url_hash: str
|
||||||
|
artifact_id: str
|
||||||
|
source_id: Optional[UUID]
|
||||||
|
source_name: Optional[str] = None # Populated from join
|
||||||
|
fetched_at: datetime
|
||||||
|
created_at: datetime
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class CacheRequest(BaseModel):
|
||||||
|
"""Request to cache an artifact from an upstream URL"""
|
||||||
|
url: str
|
||||||
|
source_type: str
|
||||||
|
package_name: Optional[str] = None # Auto-derived from URL if not provided
|
||||||
|
version: Optional[str] = None # Auto-derived from URL if not provided
|
||||||
|
user_project: Optional[str] = None # Cross-reference to user project
|
||||||
|
user_package: Optional[str] = None
|
||||||
|
user_version: Optional[str] = None
|
||||||
|
expected_hash: Optional[str] = None # Verify downloaded content
|
||||||
|
|
||||||
|
@field_validator('url')
|
||||||
|
@classmethod
|
||||||
|
def validate_url(cls, v: str) -> str:
|
||||||
|
v = v.strip()
|
||||||
|
if not v:
|
||||||
|
raise ValueError("url cannot be empty")
|
||||||
|
if not (v.startswith('http://') or v.startswith('https://')):
|
||||||
|
raise ValueError("url must start with http:// or https://")
|
||||||
|
if len(v) > 4096:
|
||||||
|
raise ValueError("url must be 4096 characters or less")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('source_type')
|
||||||
|
@classmethod
|
||||||
|
def validate_source_type(cls, v: str) -> str:
|
||||||
|
if v not in SOURCE_TYPES:
|
||||||
|
raise ValueError(f"source_type must be one of: {', '.join(SOURCE_TYPES)}")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('expected_hash')
|
||||||
|
@classmethod
|
||||||
|
def validate_expected_hash(cls, v: Optional[str]) -> Optional[str]:
|
||||||
|
if v is not None:
|
||||||
|
v = v.strip().lower()
|
||||||
|
# Remove sha256: prefix if present
|
||||||
|
if v.startswith('sha256:'):
|
||||||
|
v = v[7:]
|
||||||
|
# Validate hex format
|
||||||
|
if len(v) != 64 or not all(c in '0123456789abcdef' for c in v):
|
||||||
|
raise ValueError("expected_hash must be a 64-character hex string (SHA256)")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class CacheResponse(BaseModel):
|
||||||
|
"""Response from caching an artifact"""
|
||||||
|
artifact_id: str
|
||||||
|
sha256: str
|
||||||
|
size: int
|
||||||
|
content_type: Optional[str]
|
||||||
|
already_cached: bool
|
||||||
|
source_url: str
|
||||||
|
source_name: Optional[str]
|
||||||
|
system_project: str
|
||||||
|
system_package: str
|
||||||
|
system_version: Optional[str]
|
||||||
|
user_reference: Optional[str] = None # e.g., "my-app/npm-deps/+/4.17.21"
|
||||||
|
|
||||||
|
|
||||||
|
class CacheResolveRequest(BaseModel):
|
||||||
|
"""Request to cache an artifact by package coordinates (no URL required).
|
||||||
|
|
||||||
|
The server will construct the appropriate URL based on source_type and
|
||||||
|
configured upstream sources.
|
||||||
|
"""
|
||||||
|
source_type: str
|
||||||
|
package: str
|
||||||
|
version: str
|
||||||
|
user_project: Optional[str] = None
|
||||||
|
user_package: Optional[str] = None
|
||||||
|
user_version: Optional[str] = None
|
||||||
|
|
||||||
|
@field_validator('source_type')
|
||||||
|
@classmethod
|
||||||
|
def validate_source_type(cls, v: str) -> str:
|
||||||
|
if v not in SOURCE_TYPES:
|
||||||
|
raise ValueError(f"source_type must be one of: {', '.join(SOURCE_TYPES)}")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('package')
|
||||||
|
@classmethod
|
||||||
|
def validate_package(cls, v: str) -> str:
|
||||||
|
v = v.strip()
|
||||||
|
if not v:
|
||||||
|
raise ValueError("package cannot be empty")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('version')
|
||||||
|
@classmethod
|
||||||
|
def validate_version(cls, v: str) -> str:
|
||||||
|
v = v.strip()
|
||||||
|
if not v:
|
||||||
|
raise ValueError("version cannot be empty")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import hashlib
|
|||||||
import logging
|
import logging
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from .models import Project, Package, Artifact, Tag, Upload, PackageVersion, ArtifactDependency, Team, TeamMembership, User
|
from .models import Project, Package, Artifact, Upload, PackageVersion, ArtifactDependency, Team, TeamMembership, User
|
||||||
from .storage import get_storage
|
from .storage import get_storage
|
||||||
from .auth import hash_password
|
from .auth import hash_password
|
||||||
|
|
||||||
@@ -125,14 +125,14 @@ TEST_ARTIFACTS = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Dependencies to create (source artifact -> dependency)
|
# Dependencies to create (source artifact -> dependency)
|
||||||
# Format: (source_project, source_package, source_version, dep_project, dep_package, version_constraint, tag_constraint)
|
# Format: (source_project, source_package, source_version, dep_project, dep_package, version_constraint)
|
||||||
TEST_DEPENDENCIES = [
|
TEST_DEPENDENCIES = [
|
||||||
# ui-components v1.1.0 depends on design-tokens v1.0.0
|
# ui-components v1.1.0 depends on design-tokens v1.0.0
|
||||||
("frontend-libs", "ui-components", "1.1.0", "frontend-libs", "design-tokens", "1.0.0", None),
|
("frontend-libs", "ui-components", "1.1.0", "frontend-libs", "design-tokens", "1.0.0"),
|
||||||
# auth-lib v1.0.0 depends on common-utils v2.0.0
|
# auth-lib v1.0.0 depends on common-utils v2.0.0
|
||||||
("backend-services", "auth-lib", "1.0.0", "backend-services", "common-utils", "2.0.0", None),
|
("backend-services", "auth-lib", "1.0.0", "backend-services", "common-utils", "2.0.0"),
|
||||||
# auth-lib v1.0.0 also depends on design-tokens (stable tag)
|
# auth-lib v1.0.0 also depends on design-tokens v1.0.0
|
||||||
("backend-services", "auth-lib", "1.0.0", "frontend-libs", "design-tokens", None, "latest"),
|
("backend-services", "auth-lib", "1.0.0", "frontend-libs", "design-tokens", "1.0.0"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -252,9 +252,8 @@ def seed_database(db: Session) -> None:
|
|||||||
|
|
||||||
logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages (assigned to {demo_team.slug})")
|
logger.info(f"Created {len(project_map)} projects and {len(package_map)} packages (assigned to {demo_team.slug})")
|
||||||
|
|
||||||
# Create artifacts, tags, and versions
|
# Create artifacts and versions
|
||||||
artifact_count = 0
|
artifact_count = 0
|
||||||
tag_count = 0
|
|
||||||
version_count = 0
|
version_count = 0
|
||||||
|
|
||||||
for artifact_data in TEST_ARTIFACTS:
|
for artifact_data in TEST_ARTIFACTS:
|
||||||
@@ -316,23 +315,12 @@ def seed_database(db: Session) -> None:
|
|||||||
db.add(version)
|
db.add(version)
|
||||||
version_count += 1
|
version_count += 1
|
||||||
|
|
||||||
# Create tags
|
|
||||||
for tag_name in artifact_data["tags"]:
|
|
||||||
tag = Tag(
|
|
||||||
package_id=package.id,
|
|
||||||
name=tag_name,
|
|
||||||
artifact_id=sha256_hash,
|
|
||||||
created_by=team_owner_username,
|
|
||||||
)
|
|
||||||
db.add(tag)
|
|
||||||
tag_count += 1
|
|
||||||
|
|
||||||
db.flush()
|
db.flush()
|
||||||
|
|
||||||
# Create dependencies
|
# Create dependencies
|
||||||
dependency_count = 0
|
dependency_count = 0
|
||||||
for dep_data in TEST_DEPENDENCIES:
|
for dep_data in TEST_DEPENDENCIES:
|
||||||
src_project, src_package, src_version, dep_project, dep_package, version_constraint, tag_constraint = dep_data
|
src_project, src_package, src_version, dep_project, dep_package, version_constraint = dep_data
|
||||||
|
|
||||||
# Find the source artifact by looking up its version
|
# Find the source artifact by looking up its version
|
||||||
src_pkg = package_map.get((src_project, src_package))
|
src_pkg = package_map.get((src_project, src_package))
|
||||||
@@ -356,11 +344,10 @@ def seed_database(db: Session) -> None:
|
|||||||
dependency_project=dep_project,
|
dependency_project=dep_project,
|
||||||
dependency_package=dep_package,
|
dependency_package=dep_package,
|
||||||
version_constraint=version_constraint,
|
version_constraint=version_constraint,
|
||||||
tag_constraint=tag_constraint,
|
|
||||||
)
|
)
|
||||||
db.add(dependency)
|
db.add(dependency)
|
||||||
dependency_count += 1
|
dependency_count += 1
|
||||||
|
|
||||||
db.commit()
|
db.commit()
|
||||||
logger.info(f"Created {artifact_count} artifacts, {tag_count} tags, {version_count} versions, and {dependency_count} dependencies")
|
logger.info(f"Created {artifact_count} artifacts, {version_count} versions, and {dependency_count} dependencies")
|
||||||
logger.info("Database seeding complete")
|
logger.info("Database seeding complete")
|
||||||
|
|||||||
@@ -6,9 +6,8 @@ from typing import List, Optional, Tuple
|
|||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from ..models import Artifact, Tag
|
from ..models import Artifact, PackageVersion
|
||||||
from ..repositories.artifact import ArtifactRepository
|
from ..repositories.artifact import ArtifactRepository
|
||||||
from ..repositories.tag import TagRepository
|
|
||||||
from ..storage import S3Storage
|
from ..storage import S3Storage
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -21,8 +20,8 @@ class ArtifactCleanupService:
|
|||||||
Reference counting rules:
|
Reference counting rules:
|
||||||
- ref_count starts at 1 when artifact is first uploaded
|
- ref_count starts at 1 when artifact is first uploaded
|
||||||
- ref_count increments when the same artifact is uploaded again (deduplication)
|
- ref_count increments when the same artifact is uploaded again (deduplication)
|
||||||
- ref_count decrements when a tag is deleted or updated to point elsewhere
|
- ref_count decrements when a version is deleted or updated to point elsewhere
|
||||||
- ref_count decrements when a package is deleted (for each tag pointing to artifact)
|
- ref_count decrements when a package is deleted (for each version pointing to artifact)
|
||||||
- When ref_count reaches 0, artifact is a candidate for deletion from S3
|
- When ref_count reaches 0, artifact is a candidate for deletion from S3
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -30,12 +29,11 @@ class ArtifactCleanupService:
|
|||||||
self.db = db
|
self.db = db
|
||||||
self.storage = storage
|
self.storage = storage
|
||||||
self.artifact_repo = ArtifactRepository(db)
|
self.artifact_repo = ArtifactRepository(db)
|
||||||
self.tag_repo = TagRepository(db)
|
|
||||||
|
|
||||||
def on_tag_deleted(self, artifact_id: str) -> Artifact:
|
def on_version_deleted(self, artifact_id: str) -> Artifact:
|
||||||
"""
|
"""
|
||||||
Called when a tag is deleted.
|
Called when a version is deleted.
|
||||||
Decrements ref_count for the artifact the tag was pointing to.
|
Decrements ref_count for the artifact the version was pointing to.
|
||||||
"""
|
"""
|
||||||
artifact = self.artifact_repo.get_by_sha256(artifact_id)
|
artifact = self.artifact_repo.get_by_sha256(artifact_id)
|
||||||
if artifact:
|
if artifact:
|
||||||
@@ -45,11 +43,11 @@ class ArtifactCleanupService:
|
|||||||
)
|
)
|
||||||
return artifact
|
return artifact
|
||||||
|
|
||||||
def on_tag_updated(
|
def on_version_updated(
|
||||||
self, old_artifact_id: str, new_artifact_id: str
|
self, old_artifact_id: str, new_artifact_id: str
|
||||||
) -> Tuple[Optional[Artifact], Optional[Artifact]]:
|
) -> Tuple[Optional[Artifact], Optional[Artifact]]:
|
||||||
"""
|
"""
|
||||||
Called when a tag is updated to point to a different artifact.
|
Called when a version is updated to point to a different artifact.
|
||||||
Decrements ref_count for old artifact, increments for new (if different).
|
Decrements ref_count for old artifact, increments for new (if different).
|
||||||
|
|
||||||
Returns (old_artifact, new_artifact) tuple.
|
Returns (old_artifact, new_artifact) tuple.
|
||||||
@@ -79,21 +77,21 @@ class ArtifactCleanupService:
|
|||||||
def on_package_deleted(self, package_id) -> List[str]:
|
def on_package_deleted(self, package_id) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Called when a package is deleted.
|
Called when a package is deleted.
|
||||||
Decrements ref_count for all artifacts that had tags in the package.
|
Decrements ref_count for all artifacts that had versions in the package.
|
||||||
|
|
||||||
Returns list of artifact IDs that were affected.
|
Returns list of artifact IDs that were affected.
|
||||||
"""
|
"""
|
||||||
# Get all tags in the package before deletion
|
# Get all versions in the package before deletion
|
||||||
tags = self.db.query(Tag).filter(Tag.package_id == package_id).all()
|
versions = self.db.query(PackageVersion).filter(PackageVersion.package_id == package_id).all()
|
||||||
|
|
||||||
affected_artifacts = []
|
affected_artifacts = []
|
||||||
for tag in tags:
|
for version in versions:
|
||||||
artifact = self.artifact_repo.get_by_sha256(tag.artifact_id)
|
artifact = self.artifact_repo.get_by_sha256(version.artifact_id)
|
||||||
if artifact:
|
if artifact:
|
||||||
self.artifact_repo.decrement_ref_count(artifact)
|
self.artifact_repo.decrement_ref_count(artifact)
|
||||||
affected_artifacts.append(tag.artifact_id)
|
affected_artifacts.append(version.artifact_id)
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Decremented ref_count for artifact {tag.artifact_id} (package delete)"
|
f"Decremented ref_count for artifact {version.artifact_id} (package delete)"
|
||||||
)
|
)
|
||||||
|
|
||||||
return affected_artifacts
|
return affected_artifacts
|
||||||
@@ -152,7 +150,7 @@ class ArtifactCleanupService:
|
|||||||
|
|
||||||
def verify_ref_counts(self, fix: bool = False) -> List[dict]:
|
def verify_ref_counts(self, fix: bool = False) -> List[dict]:
|
||||||
"""
|
"""
|
||||||
Verify that ref_counts match actual tag references.
|
Verify that ref_counts match actual version references.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
fix: If True, fix any mismatched ref_counts
|
fix: If True, fix any mismatched ref_counts
|
||||||
@@ -162,28 +160,28 @@ class ArtifactCleanupService:
|
|||||||
"""
|
"""
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
|
|
||||||
# Get actual tag counts per artifact
|
# Get actual version counts per artifact
|
||||||
tag_counts = (
|
version_counts = (
|
||||||
self.db.query(Tag.artifact_id, func.count(Tag.id).label("tag_count"))
|
self.db.query(PackageVersion.artifact_id, func.count(PackageVersion.id).label("version_count"))
|
||||||
.group_by(Tag.artifact_id)
|
.group_by(PackageVersion.artifact_id)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
tag_count_map = {artifact_id: count for artifact_id, count in tag_counts}
|
version_count_map = {artifact_id: count for artifact_id, count in version_counts}
|
||||||
|
|
||||||
# Check all artifacts
|
# Check all artifacts
|
||||||
artifacts = self.db.query(Artifact).all()
|
artifacts = self.db.query(Artifact).all()
|
||||||
mismatches = []
|
mismatches = []
|
||||||
|
|
||||||
for artifact in artifacts:
|
for artifact in artifacts:
|
||||||
actual_count = tag_count_map.get(artifact.id, 0)
|
actual_count = version_count_map.get(artifact.id, 0)
|
||||||
# ref_count should be at least 1 (initial upload) + additional uploads
|
# ref_count should be at least 1 (initial upload) + additional uploads
|
||||||
# But tags are the primary reference, so we check against tag count
|
# But versions are the primary reference, so we check against version count
|
||||||
|
|
||||||
if artifact.ref_count < actual_count:
|
if artifact.ref_count < actual_count:
|
||||||
mismatch = {
|
mismatch = {
|
||||||
"artifact_id": artifact.id,
|
"artifact_id": artifact.id,
|
||||||
"stored_ref_count": artifact.ref_count,
|
"stored_ref_count": artifact.ref_count,
|
||||||
"actual_tag_count": actual_count,
|
"actual_version_count": actual_count,
|
||||||
}
|
}
|
||||||
mismatches.append(mismatch)
|
mismatches.append(mismatch)
|
||||||
|
|
||||||
|
|||||||
565
backend/app/upstream.py
Normal file
565
backend/app/upstream.py
Normal file
@@ -0,0 +1,565 @@
|
|||||||
|
"""
|
||||||
|
HTTP client for fetching artifacts from upstream sources.
|
||||||
|
|
||||||
|
Provides streaming downloads with SHA256 computation, authentication support,
|
||||||
|
and automatic source matching based on URL prefixes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import BinaryIO, Optional, TYPE_CHECKING
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .models import CacheSettings, UpstreamSource
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class UpstreamError(Exception):
|
||||||
|
"""Base exception for upstream client errors."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UpstreamConnectionError(UpstreamError):
|
||||||
|
"""Connection to upstream failed (network error, DNS, etc.)."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UpstreamTimeoutError(UpstreamError):
|
||||||
|
"""Request to upstream timed out."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UpstreamHTTPError(UpstreamError):
|
||||||
|
"""Upstream returned an HTTP error response."""
|
||||||
|
|
||||||
|
def __init__(self, message: str, status_code: int, response_headers: dict = None):
|
||||||
|
super().__init__(message)
|
||||||
|
self.status_code = status_code
|
||||||
|
self.response_headers = response_headers or {}
|
||||||
|
|
||||||
|
|
||||||
|
class UpstreamSSLError(UpstreamError):
|
||||||
|
"""SSL/TLS error when connecting to upstream."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class FileSizeExceededError(UpstreamError):
|
||||||
|
"""File size exceeds the maximum allowed."""
|
||||||
|
|
||||||
|
def __init__(self, message: str, content_length: int, max_size: int):
|
||||||
|
super().__init__(message)
|
||||||
|
self.content_length = content_length
|
||||||
|
self.max_size = max_size
|
||||||
|
|
||||||
|
|
||||||
|
class SourceNotFoundError(UpstreamError):
|
||||||
|
"""No matching upstream source found for URL."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SourceDisabledError(UpstreamError):
|
||||||
|
"""The matching upstream source is disabled."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FetchResult:
|
||||||
|
"""Result of fetching an artifact from upstream."""
|
||||||
|
|
||||||
|
content: BinaryIO # File-like object with content
|
||||||
|
sha256: str # SHA256 hash of content
|
||||||
|
size: int # Size in bytes
|
||||||
|
content_type: Optional[str] # Content-Type header
|
||||||
|
response_headers: dict # All response headers for provenance
|
||||||
|
source_name: Optional[str] = None # Name of matched upstream source
|
||||||
|
temp_path: Optional[Path] = None # Path to temp file (for cleanup)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close and clean up resources."""
|
||||||
|
if self.content:
|
||||||
|
try:
|
||||||
|
self.content.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
if self.temp_path and self.temp_path.exists():
|
||||||
|
try:
|
||||||
|
self.temp_path.unlink()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UpstreamClientConfig:
|
||||||
|
"""Configuration for the upstream client."""
|
||||||
|
|
||||||
|
connect_timeout: float = 30.0 # Connection timeout in seconds
|
||||||
|
read_timeout: float = 300.0 # Read timeout in seconds (5 minutes for large files)
|
||||||
|
max_retries: int = 3 # Maximum number of retry attempts
|
||||||
|
retry_backoff_base: float = 1.0 # Base delay for exponential backoff
|
||||||
|
retry_backoff_max: float = 30.0 # Maximum delay between retries
|
||||||
|
follow_redirects: bool = True # Whether to follow redirects
|
||||||
|
max_redirects: int = 5 # Maximum number of redirects to follow
|
||||||
|
max_file_size: Optional[int] = None # Maximum file size (None = unlimited)
|
||||||
|
verify_ssl: bool = True # Verify SSL certificates
|
||||||
|
user_agent: str = "Orchard-UpstreamClient/1.0"
|
||||||
|
|
||||||
|
|
||||||
|
class UpstreamClient:
|
||||||
|
"""
|
||||||
|
HTTP client for fetching artifacts from upstream sources.
|
||||||
|
|
||||||
|
Supports streaming downloads, multiple authentication methods,
|
||||||
|
automatic source matching, and air-gap mode enforcement.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sources: list[UpstreamSource] = None,
|
||||||
|
cache_settings: CacheSettings = None,
|
||||||
|
config: UpstreamClientConfig = None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize the upstream client.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
sources: List of upstream sources for URL matching and auth.
|
||||||
|
Should be sorted by priority (lowest first).
|
||||||
|
cache_settings: Global cache settings including air-gap mode.
|
||||||
|
config: Client configuration options.
|
||||||
|
"""
|
||||||
|
self.sources = sources or []
|
||||||
|
self.cache_settings = cache_settings
|
||||||
|
self.config = config or UpstreamClientConfig()
|
||||||
|
|
||||||
|
# Sort sources by priority (lower = higher priority)
|
||||||
|
self.sources = sorted(self.sources, key=lambda s: s.priority)
|
||||||
|
|
||||||
|
def _match_source(self, url: str) -> Optional[UpstreamSource]:
|
||||||
|
"""
|
||||||
|
Find the upstream source that matches the given URL.
|
||||||
|
|
||||||
|
Matches by URL prefix, returns the highest priority match.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: The URL to match.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The matching UpstreamSource or None if no match.
|
||||||
|
"""
|
||||||
|
for source in self.sources:
|
||||||
|
# Check if URL starts with source URL (prefix match)
|
||||||
|
if url.startswith(source.url.rstrip("/")):
|
||||||
|
return source
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _build_auth_headers(self, source: UpstreamSource) -> dict:
|
||||||
|
"""
|
||||||
|
Build authentication headers for the given source.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source: The upstream source with auth configuration.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary of headers to add to the request.
|
||||||
|
"""
|
||||||
|
headers = {}
|
||||||
|
|
||||||
|
if source.auth_type == "none":
|
||||||
|
pass
|
||||||
|
elif source.auth_type == "basic":
|
||||||
|
# httpx handles basic auth via auth parameter, but we can also
|
||||||
|
# do it manually if needed. We'll use the auth parameter instead.
|
||||||
|
pass
|
||||||
|
elif source.auth_type == "bearer":
|
||||||
|
password = source.get_password()
|
||||||
|
if password:
|
||||||
|
headers["Authorization"] = f"Bearer {password}"
|
||||||
|
elif source.auth_type == "api_key":
|
||||||
|
# API key auth uses custom headers
|
||||||
|
custom_headers = source.get_headers()
|
||||||
|
if custom_headers:
|
||||||
|
headers.update(custom_headers)
|
||||||
|
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def _get_basic_auth(self, source: UpstreamSource) -> Optional[tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
Get basic auth credentials if applicable.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source: The upstream source.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (username, password) or None.
|
||||||
|
"""
|
||||||
|
if source.auth_type == "basic" and source.username:
|
||||||
|
password = source.get_password() or ""
|
||||||
|
return (source.username, password)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _should_retry(self, error: Exception, attempt: int) -> bool:
|
||||||
|
"""
|
||||||
|
Determine if a request should be retried.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
error: The exception that occurred.
|
||||||
|
attempt: Current attempt number (0-indexed).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if the request should be retried.
|
||||||
|
"""
|
||||||
|
if attempt >= self.config.max_retries - 1:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Retry on connection errors and timeouts
|
||||||
|
if isinstance(error, (httpx.ConnectError, httpx.ConnectTimeout)):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Retry on read timeouts
|
||||||
|
if isinstance(error, httpx.ReadTimeout):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Retry on certain HTTP errors (502, 503, 504)
|
||||||
|
if isinstance(error, httpx.HTTPStatusError):
|
||||||
|
return error.response.status_code in (502, 503, 504)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _calculate_backoff(self, attempt: int) -> float:
|
||||||
|
"""
|
||||||
|
Calculate backoff delay for retry.
|
||||||
|
|
||||||
|
Uses exponential backoff with jitter.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
attempt: Current attempt number (0-indexed).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Delay in seconds.
|
||||||
|
"""
|
||||||
|
import random
|
||||||
|
|
||||||
|
delay = self.config.retry_backoff_base * (2**attempt)
|
||||||
|
# Add jitter (±25%)
|
||||||
|
delay *= 0.75 + random.random() * 0.5
|
||||||
|
return min(delay, self.config.retry_backoff_max)
|
||||||
|
|
||||||
|
def fetch(self, url: str, expected_hash: Optional[str] = None) -> FetchResult:
|
||||||
|
"""
|
||||||
|
Fetch an artifact from the given URL.
|
||||||
|
|
||||||
|
Streams the response to a temp file while computing the SHA256 hash.
|
||||||
|
Handles authentication, retries, and error cases.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: The URL to fetch.
|
||||||
|
expected_hash: Optional expected SHA256 hash for verification.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
FetchResult with content, hash, size, and headers.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SourceDisabledError: If the matching source is disabled.
|
||||||
|
UpstreamConnectionError: On connection failures.
|
||||||
|
UpstreamTimeoutError: On timeout.
|
||||||
|
UpstreamHTTPError: On HTTP error responses.
|
||||||
|
UpstreamSSLError: On SSL/TLS errors.
|
||||||
|
FileSizeExceededError: If Content-Length exceeds max_file_size.
|
||||||
|
"""
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
# Match URL to source
|
||||||
|
source = self._match_source(url)
|
||||||
|
|
||||||
|
# Check if source is enabled (if we have a match)
|
||||||
|
if source is not None and not source.enabled:
|
||||||
|
raise SourceDisabledError(
|
||||||
|
f"Upstream source '{source.name}' is disabled"
|
||||||
|
)
|
||||||
|
|
||||||
|
source_name = source.name if source else None
|
||||||
|
logger.info(
|
||||||
|
f"Fetching URL: {url} (source: {source_name or 'none'})"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Build request parameters
|
||||||
|
headers = {"User-Agent": self.config.user_agent}
|
||||||
|
auth = None
|
||||||
|
|
||||||
|
if source:
|
||||||
|
headers.update(self._build_auth_headers(source))
|
||||||
|
auth = self._get_basic_auth(source)
|
||||||
|
|
||||||
|
timeout = httpx.Timeout(
|
||||||
|
connect=self.config.connect_timeout,
|
||||||
|
read=self.config.read_timeout,
|
||||||
|
write=30.0,
|
||||||
|
pool=10.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Attempt fetch with retries
|
||||||
|
last_error = None
|
||||||
|
for attempt in range(self.config.max_retries):
|
||||||
|
try:
|
||||||
|
return self._do_fetch(
|
||||||
|
url=url,
|
||||||
|
headers=headers,
|
||||||
|
auth=auth,
|
||||||
|
timeout=timeout,
|
||||||
|
source_name=source_name,
|
||||||
|
start_time=start_time,
|
||||||
|
expected_hash=expected_hash,
|
||||||
|
)
|
||||||
|
except (
|
||||||
|
httpx.ConnectError,
|
||||||
|
httpx.ConnectTimeout,
|
||||||
|
httpx.ReadTimeout,
|
||||||
|
httpx.HTTPStatusError,
|
||||||
|
) as e:
|
||||||
|
last_error = e
|
||||||
|
if self._should_retry(e, attempt):
|
||||||
|
delay = self._calculate_backoff(attempt)
|
||||||
|
logger.warning(
|
||||||
|
f"Fetch failed (attempt {attempt + 1}/{self.config.max_retries}), "
|
||||||
|
f"retrying in {delay:.1f}s: {e}"
|
||||||
|
)
|
||||||
|
time.sleep(delay)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
# Convert final error to our exception types
|
||||||
|
self._raise_upstream_error(last_error, url)
|
||||||
|
|
||||||
|
def _do_fetch(
|
||||||
|
self,
|
||||||
|
url: str,
|
||||||
|
headers: dict,
|
||||||
|
auth: Optional[tuple[str, str]],
|
||||||
|
timeout: httpx.Timeout,
|
||||||
|
source_name: Optional[str],
|
||||||
|
start_time: float,
|
||||||
|
expected_hash: Optional[str] = None,
|
||||||
|
) -> FetchResult:
|
||||||
|
"""
|
||||||
|
Perform the actual fetch operation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: URL to fetch.
|
||||||
|
headers: Request headers.
|
||||||
|
auth: Basic auth credentials or None.
|
||||||
|
timeout: Request timeout configuration.
|
||||||
|
source_name: Name of matched source for logging.
|
||||||
|
start_time: Request start time for timing.
|
||||||
|
expected_hash: Optional expected hash for verification.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
FetchResult with content and metadata.
|
||||||
|
"""
|
||||||
|
with httpx.Client(
|
||||||
|
timeout=timeout,
|
||||||
|
follow_redirects=self.config.follow_redirects,
|
||||||
|
max_redirects=self.config.max_redirects,
|
||||||
|
verify=self.config.verify_ssl,
|
||||||
|
) as client:
|
||||||
|
with client.stream("GET", url, headers=headers, auth=auth) as response:
|
||||||
|
# Check for HTTP errors
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
# Check Content-Length against max size
|
||||||
|
content_length = response.headers.get("content-length")
|
||||||
|
if content_length:
|
||||||
|
content_length = int(content_length)
|
||||||
|
if (
|
||||||
|
self.config.max_file_size
|
||||||
|
and content_length > self.config.max_file_size
|
||||||
|
):
|
||||||
|
raise FileSizeExceededError(
|
||||||
|
f"File size {content_length} exceeds maximum {self.config.max_file_size}",
|
||||||
|
content_length,
|
||||||
|
self.config.max_file_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Stream to temp file while computing hash
|
||||||
|
hasher = hashlib.sha256()
|
||||||
|
size = 0
|
||||||
|
|
||||||
|
# Create temp file
|
||||||
|
temp_file = tempfile.NamedTemporaryFile(
|
||||||
|
delete=False, prefix="orchard_upstream_"
|
||||||
|
)
|
||||||
|
temp_path = Path(temp_file.name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
for chunk in response.iter_bytes(chunk_size=65536):
|
||||||
|
temp_file.write(chunk)
|
||||||
|
hasher.update(chunk)
|
||||||
|
size += len(chunk)
|
||||||
|
|
||||||
|
# Check size while streaming if max_file_size is set
|
||||||
|
if self.config.max_file_size and size > self.config.max_file_size:
|
||||||
|
temp_file.close()
|
||||||
|
temp_path.unlink()
|
||||||
|
raise FileSizeExceededError(
|
||||||
|
f"Downloaded size {size} exceeds maximum {self.config.max_file_size}",
|
||||||
|
size,
|
||||||
|
self.config.max_file_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
temp_file.close()
|
||||||
|
|
||||||
|
sha256 = hasher.hexdigest()
|
||||||
|
|
||||||
|
# Verify hash if expected
|
||||||
|
if expected_hash and sha256 != expected_hash.lower():
|
||||||
|
temp_path.unlink()
|
||||||
|
raise UpstreamError(
|
||||||
|
f"Hash mismatch: expected {expected_hash}, got {sha256}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Capture response headers
|
||||||
|
response_headers = dict(response.headers)
|
||||||
|
|
||||||
|
# Get content type
|
||||||
|
content_type = response.headers.get("content-type")
|
||||||
|
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
logger.info(
|
||||||
|
f"Fetched {url}: {size} bytes, sha256={sha256[:12]}..., "
|
||||||
|
f"source={source_name}, time={elapsed:.2f}s"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return file handle positioned at start
|
||||||
|
content = open(temp_path, "rb")
|
||||||
|
|
||||||
|
return FetchResult(
|
||||||
|
content=content,
|
||||||
|
sha256=sha256,
|
||||||
|
size=size,
|
||||||
|
content_type=content_type,
|
||||||
|
response_headers=response_headers,
|
||||||
|
source_name=source_name,
|
||||||
|
temp_path=temp_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
# Clean up on error
|
||||||
|
try:
|
||||||
|
temp_file.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
if temp_path.exists():
|
||||||
|
temp_path.unlink()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _raise_upstream_error(self, error: Exception, url: str):
|
||||||
|
"""
|
||||||
|
Convert httpx exception to appropriate UpstreamError.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
error: The httpx exception.
|
||||||
|
url: The URL that was being fetched.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
Appropriate UpstreamError subclass.
|
||||||
|
"""
|
||||||
|
if error is None:
|
||||||
|
raise UpstreamError(f"Unknown error fetching {url}")
|
||||||
|
|
||||||
|
if isinstance(error, httpx.ConnectError):
|
||||||
|
raise UpstreamConnectionError(
|
||||||
|
f"Failed to connect to upstream: {error}"
|
||||||
|
) from error
|
||||||
|
|
||||||
|
if isinstance(error, (httpx.ConnectTimeout, httpx.ReadTimeout)):
|
||||||
|
raise UpstreamTimeoutError(
|
||||||
|
f"Request timed out: {error}"
|
||||||
|
) from error
|
||||||
|
|
||||||
|
if isinstance(error, httpx.HTTPStatusError):
|
||||||
|
raise UpstreamHTTPError(
|
||||||
|
f"HTTP {error.response.status_code}: {error}",
|
||||||
|
error.response.status_code,
|
||||||
|
dict(error.response.headers),
|
||||||
|
) from error
|
||||||
|
|
||||||
|
# Check for SSL errors in the error chain
|
||||||
|
if "ssl" in str(error).lower() or "certificate" in str(error).lower():
|
||||||
|
raise UpstreamSSLError(f"SSL/TLS error: {error}") from error
|
||||||
|
|
||||||
|
raise UpstreamError(f"Error fetching {url}: {error}") from error
|
||||||
|
|
||||||
|
def test_connection(self, source: UpstreamSource) -> tuple[bool, Optional[str], Optional[int]]:
|
||||||
|
"""
|
||||||
|
Test connectivity to an upstream source.
|
||||||
|
|
||||||
|
Performs a HEAD request to the source URL to verify connectivity
|
||||||
|
and authentication. Does not follow redirects - a 3xx response
|
||||||
|
is considered successful since it proves the server is reachable.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source: The upstream source to test.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (success, error_message, status_code).
|
||||||
|
"""
|
||||||
|
headers = {"User-Agent": self.config.user_agent}
|
||||||
|
headers.update(self._build_auth_headers(source))
|
||||||
|
auth = self._get_basic_auth(source)
|
||||||
|
|
||||||
|
timeout = httpx.Timeout(
|
||||||
|
connect=self.config.connect_timeout,
|
||||||
|
read=30.0,
|
||||||
|
write=30.0,
|
||||||
|
pool=10.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with httpx.Client(
|
||||||
|
timeout=timeout,
|
||||||
|
verify=self.config.verify_ssl,
|
||||||
|
) as client:
|
||||||
|
response = client.head(
|
||||||
|
source.url,
|
||||||
|
headers=headers,
|
||||||
|
auth=auth,
|
||||||
|
follow_redirects=False,
|
||||||
|
)
|
||||||
|
# Consider 2xx and 3xx as success, also 405 (Method Not Allowed)
|
||||||
|
# since some servers don't support HEAD
|
||||||
|
if response.status_code < 400 or response.status_code == 405:
|
||||||
|
return (True, None, response.status_code)
|
||||||
|
else:
|
||||||
|
return (
|
||||||
|
False,
|
||||||
|
f"HTTP {response.status_code}",
|
||||||
|
response.status_code,
|
||||||
|
)
|
||||||
|
except httpx.ConnectError as e:
|
||||||
|
return (False, f"Connection failed: {e}", None)
|
||||||
|
except httpx.ConnectTimeout as e:
|
||||||
|
return (False, f"Connection timed out: {e}", None)
|
||||||
|
except httpx.ReadTimeout as e:
|
||||||
|
return (False, f"Read timed out: {e}", None)
|
||||||
|
except httpx.TooManyRedirects as e:
|
||||||
|
return (False, f"Too many redirects: {e}", None)
|
||||||
|
except Exception as e:
|
||||||
|
return (False, f"Error: {e}", None)
|
||||||
@@ -11,10 +11,11 @@ python-jose[cryptography]==3.3.0
|
|||||||
passlib[bcrypt]==1.7.4
|
passlib[bcrypt]==1.7.4
|
||||||
bcrypt==4.0.1
|
bcrypt==4.0.1
|
||||||
slowapi==0.1.9
|
slowapi==0.1.9
|
||||||
|
httpx>=0.25.0
|
||||||
|
redis>=5.0.0
|
||||||
|
|
||||||
# Test dependencies
|
# Test dependencies
|
||||||
pytest>=7.4.0
|
pytest>=7.4.0
|
||||||
pytest-asyncio>=0.21.0
|
pytest-asyncio>=0.21.0
|
||||||
pytest-cov>=4.1.0
|
pytest-cov>=4.1.0
|
||||||
httpx>=0.25.0
|
|
||||||
moto[s3]>=4.2.0
|
moto[s3]>=4.2.0
|
||||||
|
|||||||
1
backend/scripts/__init__.py
Normal file
1
backend/scripts/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Scripts package
|
||||||
262
backend/scripts/backfill_pypi_dependencies.py
Normal file
262
backend/scripts/backfill_pypi_dependencies.py
Normal file
@@ -0,0 +1,262 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Backfill script to extract dependencies from cached PyPI packages.
|
||||||
|
|
||||||
|
This script scans all artifacts in the _pypi project and extracts
|
||||||
|
Requires-Dist metadata from wheel and sdist files that don't already
|
||||||
|
have dependencies recorded.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
# From within the container:
|
||||||
|
python -m scripts.backfill_pypi_dependencies
|
||||||
|
|
||||||
|
# Or with docker exec:
|
||||||
|
docker exec orchard_orchard-server_1 python -m scripts.backfill_pypi_dependencies
|
||||||
|
|
||||||
|
# Dry run (preview only):
|
||||||
|
docker exec orchard_orchard-server_1 python -m scripts.backfill_pypi_dependencies --dry-run
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import tarfile
|
||||||
|
import zipfile
|
||||||
|
from io import BytesIO
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
# Add parent directory to path for imports
|
||||||
|
sys.path.insert(0, "/app")
|
||||||
|
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
from backend.app.config import get_settings
|
||||||
|
from backend.app.models import (
|
||||||
|
Artifact,
|
||||||
|
ArtifactDependency,
|
||||||
|
Package,
|
||||||
|
Project,
|
||||||
|
Tag,
|
||||||
|
)
|
||||||
|
from backend.app.storage import get_storage
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format="%(asctime)s - %(levelname)s - %(message)s",
|
||||||
|
)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_requires_dist(requires_dist: str) -> Tuple[Optional[str], Optional[str]]:
|
||||||
|
"""Parse a Requires-Dist line into (package_name, version_constraint)."""
|
||||||
|
# Remove any environment markers (after semicolon)
|
||||||
|
if ";" in requires_dist:
|
||||||
|
requires_dist = requires_dist.split(";")[0].strip()
|
||||||
|
|
||||||
|
# Match patterns like "package (>=1.0)" or "package>=1.0" or "package"
|
||||||
|
match = re.match(
|
||||||
|
r"^([a-zA-Z0-9][-a-zA-Z0-9._]*)\s*(?:\(([^)]+)\)|([<>=!~][^\s;]+))?",
|
||||||
|
requires_dist.strip(),
|
||||||
|
)
|
||||||
|
|
||||||
|
if not match:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
package_name = match.group(1)
|
||||||
|
version_constraint = match.group(2) or match.group(3)
|
||||||
|
|
||||||
|
# Normalize package name (PEP 503)
|
||||||
|
normalized_name = re.sub(r"[-_.]+", "-", package_name).lower()
|
||||||
|
|
||||||
|
if version_constraint:
|
||||||
|
version_constraint = version_constraint.strip()
|
||||||
|
|
||||||
|
return normalized_name, version_constraint
|
||||||
|
|
||||||
|
|
||||||
|
def extract_requires_from_metadata(metadata_content: str) -> List[Tuple[str, Optional[str]]]:
|
||||||
|
"""Extract all Requires-Dist entries from METADATA/PKG-INFO content."""
|
||||||
|
dependencies = []
|
||||||
|
|
||||||
|
for line in metadata_content.split("\n"):
|
||||||
|
if line.startswith("Requires-Dist:"):
|
||||||
|
value = line[len("Requires-Dist:"):].strip()
|
||||||
|
pkg_name, version = parse_requires_dist(value)
|
||||||
|
if pkg_name:
|
||||||
|
dependencies.append((pkg_name, version))
|
||||||
|
|
||||||
|
return dependencies
|
||||||
|
|
||||||
|
|
||||||
|
def extract_metadata_from_wheel(content: bytes) -> Optional[str]:
|
||||||
|
"""Extract METADATA file content from a wheel (zip) file."""
|
||||||
|
try:
|
||||||
|
with zipfile.ZipFile(BytesIO(content)) as zf:
|
||||||
|
for name in zf.namelist():
|
||||||
|
if name.endswith(".dist-info/METADATA"):
|
||||||
|
return zf.read(name).decode("utf-8", errors="replace")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to extract metadata from wheel: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def extract_metadata_from_sdist(content: bytes) -> Optional[str]:
|
||||||
|
"""Extract PKG-INFO file content from a source distribution (.tar.gz)."""
|
||||||
|
try:
|
||||||
|
with tarfile.open(fileobj=BytesIO(content), mode="r:gz") as tf:
|
||||||
|
for member in tf.getmembers():
|
||||||
|
if member.name.endswith("/PKG-INFO") and member.name.count("/") == 1:
|
||||||
|
f = tf.extractfile(member)
|
||||||
|
if f:
|
||||||
|
return f.read().decode("utf-8", errors="replace")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to extract metadata from sdist: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def extract_dependencies(content: bytes, filename: str) -> List[Tuple[str, Optional[str]]]:
|
||||||
|
"""Extract dependencies from a PyPI package file."""
|
||||||
|
metadata = None
|
||||||
|
|
||||||
|
if filename.endswith(".whl"):
|
||||||
|
metadata = extract_metadata_from_wheel(content)
|
||||||
|
elif filename.endswith(".tar.gz"):
|
||||||
|
metadata = extract_metadata_from_sdist(content)
|
||||||
|
|
||||||
|
if metadata:
|
||||||
|
return extract_requires_from_metadata(metadata)
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def backfill_dependencies(dry_run: bool = False):
|
||||||
|
"""Main backfill function."""
|
||||||
|
settings = get_settings()
|
||||||
|
|
||||||
|
# Create database connection
|
||||||
|
engine = create_engine(settings.database_url)
|
||||||
|
Session = sessionmaker(bind=engine)
|
||||||
|
db = Session()
|
||||||
|
|
||||||
|
# Create storage client
|
||||||
|
storage = get_storage()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Find the _pypi project
|
||||||
|
pypi_project = db.query(Project).filter(Project.name == "_pypi").first()
|
||||||
|
if not pypi_project:
|
||||||
|
logger.info("No _pypi project found. Nothing to backfill.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get all packages in _pypi
|
||||||
|
packages = db.query(Package).filter(Package.project_id == pypi_project.id).all()
|
||||||
|
logger.info(f"Found {len(packages)} packages in _pypi project")
|
||||||
|
|
||||||
|
total_artifacts = 0
|
||||||
|
artifacts_with_deps = 0
|
||||||
|
artifacts_processed = 0
|
||||||
|
dependencies_added = 0
|
||||||
|
|
||||||
|
for package in packages:
|
||||||
|
# Get all tags (each tag points to an artifact)
|
||||||
|
tags = db.query(Tag).filter(Tag.package_id == package.id).all()
|
||||||
|
|
||||||
|
for tag in tags:
|
||||||
|
total_artifacts += 1
|
||||||
|
filename = tag.name
|
||||||
|
|
||||||
|
# Skip non-package files (like .metadata files)
|
||||||
|
if not (filename.endswith(".whl") or filename.endswith(".tar.gz")):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if this artifact already has dependencies
|
||||||
|
existing_deps = db.query(ArtifactDependency).filter(
|
||||||
|
ArtifactDependency.artifact_id == tag.artifact_id
|
||||||
|
).count()
|
||||||
|
|
||||||
|
if existing_deps > 0:
|
||||||
|
artifacts_with_deps += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get the artifact
|
||||||
|
artifact = db.query(Artifact).filter(Artifact.id == tag.artifact_id).first()
|
||||||
|
if not artifact:
|
||||||
|
logger.warning(f"Artifact {tag.artifact_id} not found for tag {filename}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
logger.info(f"Processing {package.name}/{filename}...")
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
logger.info(f" [DRY RUN] Would extract dependencies from {filename}")
|
||||||
|
artifacts_processed += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Download the artifact from S3
|
||||||
|
try:
|
||||||
|
content = storage.get(artifact.s3_key)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f" Failed to download {filename}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Extract dependencies
|
||||||
|
deps = extract_dependencies(content, filename)
|
||||||
|
|
||||||
|
if deps:
|
||||||
|
logger.info(f" Found {len(deps)} dependencies")
|
||||||
|
for dep_name, dep_version in deps:
|
||||||
|
# Check if already exists (race condition protection)
|
||||||
|
existing = db.query(ArtifactDependency).filter(
|
||||||
|
ArtifactDependency.artifact_id == tag.artifact_id,
|
||||||
|
ArtifactDependency.dependency_project == "_pypi",
|
||||||
|
ArtifactDependency.dependency_package == dep_name,
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not existing:
|
||||||
|
dep = ArtifactDependency(
|
||||||
|
artifact_id=tag.artifact_id,
|
||||||
|
dependency_project="_pypi",
|
||||||
|
dependency_package=dep_name,
|
||||||
|
version_constraint=dep_version if dep_version else "*",
|
||||||
|
)
|
||||||
|
db.add(dep)
|
||||||
|
dependencies_added += 1
|
||||||
|
logger.info(f" + {dep_name} {dep_version or '*'}")
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
else:
|
||||||
|
logger.info(f" No dependencies found")
|
||||||
|
|
||||||
|
artifacts_processed += 1
|
||||||
|
|
||||||
|
logger.info("")
|
||||||
|
logger.info("=" * 50)
|
||||||
|
logger.info("Backfill complete!")
|
||||||
|
logger.info(f" Total artifacts: {total_artifacts}")
|
||||||
|
logger.info(f" Already had deps: {artifacts_with_deps}")
|
||||||
|
logger.info(f" Processed: {artifacts_processed}")
|
||||||
|
logger.info(f" Dependencies added: {dependencies_added}")
|
||||||
|
if dry_run:
|
||||||
|
logger.info(" (DRY RUN - no changes made)")
|
||||||
|
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Backfill dependencies for cached PyPI packages"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dry-run",
|
||||||
|
action="store_true",
|
||||||
|
help="Preview what would be done without making changes",
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
backfill_dependencies(dry_run=args.dry_run)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -96,7 +96,6 @@ def upload_test_file(
|
|||||||
package: str,
|
package: str,
|
||||||
content: bytes,
|
content: bytes,
|
||||||
filename: str = "test.bin",
|
filename: str = "test.bin",
|
||||||
tag: Optional[str] = None,
|
|
||||||
version: Optional[str] = None,
|
version: Optional[str] = None,
|
||||||
) -> dict:
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
@@ -108,7 +107,6 @@ def upload_test_file(
|
|||||||
package: Package name
|
package: Package name
|
||||||
content: File content as bytes
|
content: File content as bytes
|
||||||
filename: Original filename
|
filename: Original filename
|
||||||
tag: Optional tag to assign
|
|
||||||
version: Optional version to assign
|
version: Optional version to assign
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -116,8 +114,6 @@ def upload_test_file(
|
|||||||
"""
|
"""
|
||||||
files = {"file": (filename, io.BytesIO(content), "application/octet-stream")}
|
files = {"file": (filename, io.BytesIO(content), "application/octet-stream")}
|
||||||
data = {}
|
data = {}
|
||||||
if tag:
|
|
||||||
data["tag"] = tag
|
|
||||||
if version:
|
if version:
|
||||||
data["version"] = version
|
data["version"] = version
|
||||||
|
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ class TestArtifactRetrieval:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package_name, content, tag="v1"
|
integration_client, project_name, package_name, content, version="v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
@@ -46,27 +46,27 @@ class TestArtifactRetrieval:
|
|||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_artifact_includes_tags(self, integration_client, test_package):
|
def test_artifact_includes_versions(self, integration_client, test_package):
|
||||||
"""Test artifact response includes tags pointing to it."""
|
"""Test artifact response includes versions pointing to it."""
|
||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"artifact with tags test"
|
content = b"artifact with versions test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package_name, content, tag="tagged-v1"
|
integration_client, project_name, package_name, content, version="1.0.0"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
assert "tags" in data
|
assert "versions" in data
|
||||||
assert len(data["tags"]) >= 1
|
assert len(data["versions"]) >= 1
|
||||||
|
|
||||||
tag = data["tags"][0]
|
version = data["versions"][0]
|
||||||
assert "name" in tag
|
assert "version" in version
|
||||||
assert "package_name" in tag
|
assert "package_name" in version
|
||||||
assert "project_name" in tag
|
assert "project_name" in version
|
||||||
|
|
||||||
|
|
||||||
class TestArtifactStats:
|
class TestArtifactStats:
|
||||||
@@ -82,7 +82,7 @@ class TestArtifactStats:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag=f"art-{unique_test_id}"
|
integration_client, project, package, content, version=f"art-{unique_test_id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
|
||||||
@@ -94,7 +94,7 @@ class TestArtifactStats:
|
|||||||
assert "size" in data
|
assert "size" in data
|
||||||
assert "ref_count" in data
|
assert "ref_count" in data
|
||||||
assert "storage_savings" in data
|
assert "storage_savings" in data
|
||||||
assert "tags" in data
|
assert "versions" in data
|
||||||
assert "projects" in data
|
assert "projects" in data
|
||||||
assert "packages" in data
|
assert "packages" in data
|
||||||
|
|
||||||
@@ -136,8 +136,8 @@ class TestArtifactStats:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Upload same content to both projects
|
# Upload same content to both projects
|
||||||
upload_test_file(integration_client, proj1, "pkg", content, tag="v1")
|
upload_test_file(integration_client, proj1, "pkg", content, version="v1")
|
||||||
upload_test_file(integration_client, proj2, "pkg", content, tag="v1")
|
upload_test_file(integration_client, proj2, "pkg", content, version="v1")
|
||||||
|
|
||||||
# Check artifact stats
|
# Check artifact stats
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
|
||||||
@@ -203,7 +203,7 @@ class TestArtifactProvenance:
|
|||||||
assert "first_uploaded_by" in data
|
assert "first_uploaded_by" in data
|
||||||
assert "upload_count" in data
|
assert "upload_count" in data
|
||||||
assert "packages" in data
|
assert "packages" in data
|
||||||
assert "tags" in data
|
assert "versions" in data
|
||||||
assert "uploads" in data
|
assert "uploads" in data
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
@@ -214,17 +214,17 @@ class TestArtifactProvenance:
|
|||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_artifact_history_with_tag(self, integration_client, test_package):
|
def test_artifact_history_with_version(self, integration_client, test_package):
|
||||||
"""Test artifact history includes tag information when tagged."""
|
"""Test artifact history includes version information when versioned."""
|
||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
|
|
||||||
upload_result = upload_test_file(
|
upload_result = upload_test_file(
|
||||||
integration_client,
|
integration_client,
|
||||||
project_name,
|
project_name,
|
||||||
package_name,
|
package_name,
|
||||||
b"tagged provenance test",
|
b"versioned provenance test",
|
||||||
"tagged.txt",
|
"versioned.txt",
|
||||||
tag="v1.0.0",
|
version="v1.0.0",
|
||||||
)
|
)
|
||||||
artifact_id = upload_result["artifact_id"]
|
artifact_id = upload_result["artifact_id"]
|
||||||
|
|
||||||
@@ -232,12 +232,12 @@ class TestArtifactProvenance:
|
|||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
assert len(data["tags"]) >= 1
|
assert len(data["versions"]) >= 1
|
||||||
|
|
||||||
tag = data["tags"][0]
|
version = data["versions"][0]
|
||||||
assert "project_name" in tag
|
assert "project_name" in version
|
||||||
assert "package_name" in tag
|
assert "package_name" in version
|
||||||
assert "tag_name" in tag
|
assert "version" in version
|
||||||
|
|
||||||
|
|
||||||
class TestArtifactUploads:
|
class TestArtifactUploads:
|
||||||
@@ -306,24 +306,24 @@ class TestOrphanedArtifacts:
|
|||||||
assert len(response.json()) <= 5
|
assert len(response.json()) <= 5
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_artifact_becomes_orphaned_when_tag_deleted(
|
def test_artifact_becomes_orphaned_when_version_deleted(
|
||||||
self, integration_client, test_package, unique_test_id
|
self, integration_client, test_package, unique_test_id
|
||||||
):
|
):
|
||||||
"""Test artifact appears in orphaned list after tag is deleted."""
|
"""Test artifact appears in orphaned list after version is deleted."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = f"orphan test {unique_test_id}".encode()
|
content = f"orphan test {unique_test_id}".encode()
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
# Upload with tag
|
# Upload with version
|
||||||
upload_test_file(integration_client, project, package, content, tag="temp-tag")
|
upload_test_file(integration_client, project, package, content, version="1.0.0-temp")
|
||||||
|
|
||||||
# Verify not in orphaned list
|
# Verify not in orphaned list
|
||||||
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
|
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
|
||||||
orphaned_ids = [a["id"] for a in response.json()]
|
orphaned_ids = [a["id"] for a in response.json()]
|
||||||
assert expected_hash not in orphaned_ids
|
assert expected_hash not in orphaned_ids
|
||||||
|
|
||||||
# Delete the tag
|
# Delete the version
|
||||||
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/temp-tag")
|
integration_client.delete(f"/api/v1/project/{project}/{package}/versions/1.0.0-temp")
|
||||||
|
|
||||||
# Verify now in orphaned list
|
# Verify now in orphaned list
|
||||||
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
|
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
|
||||||
@@ -356,9 +356,9 @@ class TestGarbageCollection:
|
|||||||
content = f"dry run test {unique_test_id}".encode()
|
content = f"dry run test {unique_test_id}".encode()
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
# Upload and delete tag to create orphan
|
# Upload and delete version to create orphan
|
||||||
upload_test_file(integration_client, project, package, content, tag="dry-run")
|
upload_test_file(integration_client, project, package, content, version="1.0.0-dryrun")
|
||||||
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/dry-run")
|
integration_client.delete(f"/api/v1/project/{project}/{package}/versions/1.0.0-dryrun")
|
||||||
|
|
||||||
# Verify artifact exists
|
# Verify artifact exists
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
@@ -385,7 +385,7 @@ class TestGarbageCollection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
# Upload with tag (ref_count=1)
|
# Upload with tag (ref_count=1)
|
||||||
upload_test_file(integration_client, project, package, content, tag="keep-this")
|
upload_test_file(integration_client, project, package, content, version="keep-this")
|
||||||
|
|
||||||
# Verify artifact exists with ref_count=1
|
# Verify artifact exists with ref_count=1
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
@@ -534,50 +534,6 @@ class TestGlobalArtifacts:
|
|||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
|
||||||
class TestGlobalTags:
|
|
||||||
"""Tests for global tags endpoint."""
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_global_tags_returns_200(self, integration_client):
|
|
||||||
"""Test global tags endpoint returns 200."""
|
|
||||||
response = integration_client.get("/api/v1/tags")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert "items" in data
|
|
||||||
assert "pagination" in data
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_global_tags_pagination(self, integration_client):
|
|
||||||
"""Test global tags endpoint respects pagination."""
|
|
||||||
response = integration_client.get("/api/v1/tags?limit=5&page=1")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert len(data["items"]) <= 5
|
|
||||||
assert data["pagination"]["limit"] == 5
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_global_tags_has_project_context(self, integration_client):
|
|
||||||
"""Test global tags response includes project/package context."""
|
|
||||||
response = integration_client.get("/api/v1/tags?limit=1")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
if len(data["items"]) > 0:
|
|
||||||
item = data["items"][0]
|
|
||||||
assert "project_name" in item
|
|
||||||
assert "package_name" in item
|
|
||||||
assert "artifact_id" in item
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_global_tags_search_with_wildcard(self, integration_client):
|
|
||||||
"""Test global tags search supports wildcards."""
|
|
||||||
response = integration_client.get("/api/v1/tags?search=v*")
|
|
||||||
assert response.status_code == 200
|
|
||||||
# Just verify it doesn't error; results may vary
|
|
||||||
|
|
||||||
|
|
||||||
class TestAuditLogs:
|
class TestAuditLogs:
|
||||||
"""Tests for global audit logs endpoint."""
|
"""Tests for global audit logs endpoint."""
|
||||||
|
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"concurrent-{idx}"},
|
data={"version": f"concurrent-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -117,7 +117,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"concurrent5-{idx}"},
|
data={"version": f"concurrent5-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -171,7 +171,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"concurrent10-{idx}"},
|
data={"version": f"concurrent10-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -195,19 +195,38 @@ class TestConcurrentUploads:
|
|||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
@pytest.mark.concurrent
|
@pytest.mark.concurrent
|
||||||
def test_concurrent_uploads_same_file_deduplication(self, integration_client, test_package):
|
def test_concurrent_uploads_same_file_deduplication(
|
||||||
"""Test concurrent uploads of same file handle deduplication correctly."""
|
self, integration_client, test_project, unique_test_id
|
||||||
project, package = test_package
|
):
|
||||||
|
"""Test concurrent uploads of same file handle deduplication correctly.
|
||||||
|
|
||||||
|
Same content uploaded to different packages should result in:
|
||||||
|
- Same artifact_id (content-addressable)
|
||||||
|
- ref_count = number of packages (one version per package)
|
||||||
|
"""
|
||||||
|
project = test_project
|
||||||
api_key = get_api_key(integration_client)
|
api_key = get_api_key(integration_client)
|
||||||
assert api_key, "Failed to create API key"
|
assert api_key, "Failed to create API key"
|
||||||
|
|
||||||
content, expected_hash = generate_content_with_hash(4096, seed=999)
|
|
||||||
num_concurrent = 5
|
num_concurrent = 5
|
||||||
|
package_names = []
|
||||||
|
|
||||||
|
# Create multiple packages for concurrent uploads
|
||||||
|
for i in range(num_concurrent):
|
||||||
|
pkg_name = f"dedup-pkg-{unique_test_id}-{i}"
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/packages",
|
||||||
|
json={"name": pkg_name, "description": f"Dedup test package {i}"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
package_names.append(pkg_name)
|
||||||
|
|
||||||
|
content, expected_hash = generate_content_with_hash(4096, seed=999)
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
def upload_worker(idx):
|
def upload_worker(idx, package):
|
||||||
try:
|
try:
|
||||||
from httpx import Client
|
from httpx import Client
|
||||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
@@ -219,7 +238,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"dedup-{idx}"},
|
data={"version": "1.0.0"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -230,7 +249,10 @@ class TestConcurrentUploads:
|
|||||||
errors.append(f"Worker {idx}: {str(e)}")
|
errors.append(f"Worker {idx}: {str(e)}")
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=num_concurrent) as executor:
|
with ThreadPoolExecutor(max_workers=num_concurrent) as executor:
|
||||||
futures = [executor.submit(upload_worker, i) for i in range(num_concurrent)]
|
futures = [
|
||||||
|
executor.submit(upload_worker, i, package_names[i])
|
||||||
|
for i in range(num_concurrent)
|
||||||
|
]
|
||||||
for future in as_completed(futures):
|
for future in as_completed(futures):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -242,7 +264,7 @@ class TestConcurrentUploads:
|
|||||||
assert len(artifact_ids) == 1
|
assert len(artifact_ids) == 1
|
||||||
assert expected_hash in artifact_ids
|
assert expected_hash in artifact_ids
|
||||||
|
|
||||||
# Verify final ref_count equals number of uploads
|
# Verify final ref_count equals number of packages
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["ref_count"] == num_concurrent
|
assert response.json()["ref_count"] == num_concurrent
|
||||||
@@ -287,7 +309,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "latest"},
|
data={"version": "latest"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -321,7 +343,7 @@ class TestConcurrentDownloads:
|
|||||||
content, expected_hash = generate_content_with_hash(2048, seed=400)
|
content, expected_hash = generate_content_with_hash(2048, seed=400)
|
||||||
|
|
||||||
# Upload first
|
# Upload first
|
||||||
upload_test_file(integration_client, project, package, content, tag="download-test")
|
upload_test_file(integration_client, project, package, content, version="download-test")
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
errors = []
|
errors = []
|
||||||
@@ -362,7 +384,7 @@ class TestConcurrentDownloads:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content, expected_hash = generate_content_with_hash(4096, seed=500)
|
content, expected_hash = generate_content_with_hash(4096, seed=500)
|
||||||
|
|
||||||
upload_test_file(integration_client, project, package, content, tag="download5-test")
|
upload_test_file(integration_client, project, package, content, version="download5-test")
|
||||||
|
|
||||||
num_downloads = 5
|
num_downloads = 5
|
||||||
results = []
|
results = []
|
||||||
@@ -403,7 +425,7 @@ class TestConcurrentDownloads:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content, expected_hash = generate_content_with_hash(8192, seed=600)
|
content, expected_hash = generate_content_with_hash(8192, seed=600)
|
||||||
|
|
||||||
upload_test_file(integration_client, project, package, content, tag="download10-test")
|
upload_test_file(integration_client, project, package, content, version="download10-test")
|
||||||
|
|
||||||
num_downloads = 10
|
num_downloads = 10
|
||||||
results = []
|
results = []
|
||||||
@@ -450,7 +472,7 @@ class TestConcurrentDownloads:
|
|||||||
content, expected_hash = generate_content_with_hash(1024, seed=700 + i)
|
content, expected_hash = generate_content_with_hash(1024, seed=700 + i)
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
tag=f"multi-download-{i}"
|
version=f"multi-download-{i}"
|
||||||
)
|
)
|
||||||
uploads.append((f"multi-download-{i}", content))
|
uploads.append((f"multi-download-{i}", content))
|
||||||
|
|
||||||
@@ -502,7 +524,7 @@ class TestMixedConcurrentOperations:
|
|||||||
|
|
||||||
# Upload initial content
|
# Upload initial content
|
||||||
content1, hash1 = generate_content_with_hash(10240, seed=800) # 10KB
|
content1, hash1 = generate_content_with_hash(10240, seed=800) # 10KB
|
||||||
upload_test_file(integration_client, project, package, content1, tag="initial")
|
upload_test_file(integration_client, project, package, content1, version="initial")
|
||||||
|
|
||||||
# New content for upload during download
|
# New content for upload during download
|
||||||
content2, hash2 = generate_content_with_hash(10240, seed=801)
|
content2, hash2 = generate_content_with_hash(10240, seed=801)
|
||||||
@@ -539,7 +561,7 @@ class TestMixedConcurrentOperations:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "during-download"},
|
data={"version": "during-download"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -579,7 +601,7 @@ class TestMixedConcurrentOperations:
|
|||||||
existing_files = []
|
existing_files = []
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
content, hash = generate_content_with_hash(2048, seed=900 + i)
|
content, hash = generate_content_with_hash(2048, seed=900 + i)
|
||||||
upload_test_file(integration_client, project, package, content, tag=f"existing-{i}")
|
upload_test_file(integration_client, project, package, content, version=f"existing-{i}")
|
||||||
existing_files.append((f"existing-{i}", content))
|
existing_files.append((f"existing-{i}", content))
|
||||||
|
|
||||||
# New files for uploading
|
# New files for uploading
|
||||||
@@ -619,7 +641,7 @@ class TestMixedConcurrentOperations:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"new-{idx}"},
|
data={"version": f"new-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -689,7 +711,7 @@ class TestMixedConcurrentOperations:
|
|||||||
upload_resp = client.post(
|
upload_resp = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"pattern-{idx}"},
|
data={"version": f"pattern-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if upload_resp.status_code != 200:
|
if upload_resp.status_code != 200:
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ class TestUploadErrorHandling:
|
|||||||
|
|
||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
data={"tag": "no-file-provided"},
|
data={"version": "no-file-provided"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 422
|
assert response.status_code == 422
|
||||||
|
|
||||||
@@ -200,7 +200,7 @@ class TestTimeoutBehavior:
|
|||||||
|
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="timeout-test"
|
integration_client, project, package, content, version="timeout-test"
|
||||||
)
|
)
|
||||||
elapsed = time.time() - start_time
|
elapsed = time.time() - start_time
|
||||||
|
|
||||||
@@ -219,7 +219,7 @@ class TestTimeoutBehavior:
|
|||||||
|
|
||||||
# First upload
|
# First upload
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="download-timeout-test"
|
integration_client, project, package, content, version="download-timeout-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Then download and time it
|
# Then download and time it
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ class TestRoundTripVerification:
|
|||||||
|
|
||||||
# Upload and capture returned hash
|
# Upload and capture returned hash
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="roundtrip"
|
integration_client, project, package, content, version="roundtrip"
|
||||||
)
|
)
|
||||||
uploaded_hash = result["artifact_id"]
|
uploaded_hash = result["artifact_id"]
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ class TestRoundTripVerification:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="header-check"
|
integration_client, project, package, content, version="header-check"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -102,7 +102,7 @@ class TestRoundTripVerification:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="etag-check"
|
integration_client, project, package, content, version="etag-check"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -186,7 +186,7 @@ class TestClientSideVerificationWorkflow:
|
|||||||
content = b"Client post-download verification"
|
content = b"Client post-download verification"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="verify-after"
|
integration_client, project, package, content, version="verify-after"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -215,7 +215,7 @@ class TestIntegritySizeVariants:
|
|||||||
content, expected_hash = sized_content(SIZE_1KB, seed=100)
|
content, expected_hash = sized_content(SIZE_1KB, seed=100)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="int-1kb"
|
integration_client, project, package, content, version="int-1kb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -234,7 +234,7 @@ class TestIntegritySizeVariants:
|
|||||||
content, expected_hash = sized_content(SIZE_100KB, seed=101)
|
content, expected_hash = sized_content(SIZE_100KB, seed=101)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="int-100kb"
|
integration_client, project, package, content, version="int-100kb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -253,7 +253,7 @@ class TestIntegritySizeVariants:
|
|||||||
content, expected_hash = sized_content(SIZE_1MB, seed=102)
|
content, expected_hash = sized_content(SIZE_1MB, seed=102)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="int-1mb"
|
integration_client, project, package, content, version="int-1mb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -273,7 +273,7 @@ class TestIntegritySizeVariants:
|
|||||||
content, expected_hash = sized_content(SIZE_10MB, seed=103)
|
content, expected_hash = sized_content(SIZE_10MB, seed=103)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="int-10mb"
|
integration_client, project, package, content, version="int-10mb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -323,7 +323,13 @@ class TestConsistencyCheck:
|
|||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_consistency_check_after_upload(self, integration_client, test_package):
|
def test_consistency_check_after_upload(self, integration_client, test_package):
|
||||||
"""Test consistency check passes after valid upload."""
|
"""Test consistency check runs successfully after a valid upload.
|
||||||
|
|
||||||
|
Note: We don't assert healthy=True because other tests (especially
|
||||||
|
corruption detection tests) may leave orphaned S3 objects behind.
|
||||||
|
This test validates the consistency check endpoint works and the
|
||||||
|
uploaded artifact is included in the check count.
|
||||||
|
"""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"Consistency check test content"
|
content = b"Consistency check test content"
|
||||||
|
|
||||||
@@ -335,9 +341,10 @@ class TestConsistencyCheck:
|
|||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
|
|
||||||
# Verify check ran and no issues
|
# Verify check ran - at least 1 artifact was checked
|
||||||
assert data["total_artifacts_checked"] >= 1
|
assert data["total_artifacts_checked"] >= 1
|
||||||
assert data["healthy"] is True
|
# Verify no missing S3 objects (uploaded artifact should exist)
|
||||||
|
assert data["missing_s3_objects"] == 0
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_consistency_check_limit_parameter(self, integration_client):
|
def test_consistency_check_limit_parameter(self, integration_client):
|
||||||
@@ -366,7 +373,7 @@ class TestDigestHeader:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="digest-test"
|
integration_client, project, package, content, version="digest-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -390,7 +397,7 @@ class TestDigestHeader:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="digest-b64"
|
integration_client, project, package, content, version="digest-b64"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -420,7 +427,7 @@ class TestVerificationModes:
|
|||||||
content = b"Pre-verification mode test"
|
content = b"Pre-verification mode test"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="pre-verify"
|
integration_client, project, package, content, version="pre-verify"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -440,7 +447,7 @@ class TestVerificationModes:
|
|||||||
content = b"Stream verification mode test"
|
content = b"Stream verification mode test"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="stream-verify"
|
integration_client, project, package, content, version="stream-verify"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -477,7 +484,7 @@ class TestArtifactIntegrityEndpoint:
|
|||||||
expected_size = len(content)
|
expected_size = len(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="content-len"
|
integration_client, project, package, content, version="content-len"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -513,7 +520,7 @@ class TestCorruptionDetection:
|
|||||||
|
|
||||||
# Upload original content
|
# Upload original content
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="corrupt-test"
|
integration_client, project, package, content, version="corrupt-test"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -555,7 +562,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="bitflip-test"
|
integration_client, project, package, content, version="bitflip-test"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -592,7 +599,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="truncate-test"
|
integration_client, project, package, content, version="truncate-test"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -627,7 +634,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="append-test"
|
integration_client, project, package, content, version="append-test"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -670,7 +677,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="client-detect"
|
integration_client, project, package, content, version="client-detect"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Corrupt the S3 object
|
# Corrupt the S3 object
|
||||||
@@ -713,7 +720,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="size-mismatch"
|
integration_client, project, package, content, version="size-mismatch"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Modify S3 object to have different size
|
# Modify S3 object to have different size
|
||||||
@@ -747,7 +754,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="missing-s3"
|
integration_client, project, package, content, version="missing-s3"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Delete the S3 object
|
# Delete the S3 object
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ class TestUploadMetrics:
|
|||||||
content = b"duration test content"
|
content = b"duration test content"
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="duration-test"
|
integration_client, project, package, content, version="duration-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert "duration_ms" in result
|
assert "duration_ms" in result
|
||||||
@@ -55,7 +55,7 @@ class TestUploadMetrics:
|
|||||||
content = b"throughput test content"
|
content = b"throughput test content"
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="throughput-test"
|
integration_client, project, package, content, version="throughput-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert "throughput_mbps" in result
|
assert "throughput_mbps" in result
|
||||||
@@ -72,7 +72,7 @@ class TestUploadMetrics:
|
|||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="duration-check"
|
integration_client, project, package, content, version="duration-check"
|
||||||
)
|
)
|
||||||
actual_duration = (time.time() - start) * 1000 # ms
|
actual_duration = (time.time() - start) * 1000 # ms
|
||||||
|
|
||||||
@@ -92,7 +92,7 @@ class TestLargeFileUploads:
|
|||||||
content, expected_hash = sized_content(SIZE_10MB, seed=200)
|
content, expected_hash = sized_content(SIZE_10MB, seed=200)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="large-10mb"
|
integration_client, project, package, content, version="large-10mb"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
@@ -109,7 +109,7 @@ class TestLargeFileUploads:
|
|||||||
content, expected_hash = sized_content(SIZE_100MB, seed=300)
|
content, expected_hash = sized_content(SIZE_100MB, seed=300)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="large-100mb"
|
integration_client, project, package, content, version="large-100mb"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
@@ -126,7 +126,7 @@ class TestLargeFileUploads:
|
|||||||
content, expected_hash = sized_content(SIZE_1GB, seed=400)
|
content, expected_hash = sized_content(SIZE_1GB, seed=400)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="large-1gb"
|
integration_client, project, package, content, version="large-1gb"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
@@ -147,14 +147,14 @@ class TestLargeFileUploads:
|
|||||||
|
|
||||||
# First upload
|
# First upload
|
||||||
result1 = upload_test_file(
|
result1 = upload_test_file(
|
||||||
integration_client, project, package, content, tag=f"dedup-{unique_test_id}-1"
|
integration_client, project, package, content, version=f"dedup-{unique_test_id}-1"
|
||||||
)
|
)
|
||||||
# Note: may be True if previous test uploaded same content
|
# Note: may be True if previous test uploaded same content
|
||||||
first_dedupe = result1["deduplicated"]
|
first_dedupe = result1["deduplicated"]
|
||||||
|
|
||||||
# Second upload of same content
|
# Second upload of same content
|
||||||
result2 = upload_test_file(
|
result2 = upload_test_file(
|
||||||
integration_client, project, package, content, tag=f"dedup-{unique_test_id}-2"
|
integration_client, project, package, content, version=f"dedup-{unique_test_id}-2"
|
||||||
)
|
)
|
||||||
assert result2["artifact_id"] == expected_hash
|
assert result2["artifact_id"] == expected_hash
|
||||||
# Second upload MUST be deduplicated
|
# Second upload MUST be deduplicated
|
||||||
@@ -277,7 +277,7 @@ class TestUploadSizeLimits:
|
|||||||
content = b"X"
|
content = b"X"
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="min-size"
|
integration_client, project, package, content, version="min-size"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["size"] == 1
|
assert result["size"] == 1
|
||||||
@@ -289,7 +289,7 @@ class TestUploadSizeLimits:
|
|||||||
content = b"content length verification test"
|
content = b"content length verification test"
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="content-length-test"
|
integration_client, project, package, content, version="content-length-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Size in response should match actual content length
|
# Size in response should match actual content length
|
||||||
@@ -336,7 +336,7 @@ class TestUploadErrorHandling:
|
|||||||
|
|
||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
data={"tag": "no-file"},
|
data={"version": "no-file"},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == 422
|
assert response.status_code == 422
|
||||||
@@ -459,7 +459,7 @@ class TestUploadTimeout:
|
|||||||
|
|
||||||
# httpx client should handle this quickly
|
# httpx client should handle this quickly
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="timeout-small"
|
integration_client, project, package, content, version="timeout-small"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] is not None
|
assert result["artifact_id"] is not None
|
||||||
@@ -474,7 +474,7 @@ class TestUploadTimeout:
|
|||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="timeout-check"
|
integration_client, project, package, content, version="timeout-check"
|
||||||
)
|
)
|
||||||
duration = time.time() - start
|
duration = time.time() - start
|
||||||
|
|
||||||
@@ -525,7 +525,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"concurrent-diff-{idx}"},
|
data={"version": f"concurrent-diff-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
|
|||||||
@@ -175,7 +175,7 @@ class TestPackageStats:
|
|||||||
assert "package_id" in data
|
assert "package_id" in data
|
||||||
assert "package_name" in data
|
assert "package_name" in data
|
||||||
assert "project_name" in data
|
assert "project_name" in data
|
||||||
assert "tag_count" in data
|
assert "version_count" in data
|
||||||
assert "artifact_count" in data
|
assert "artifact_count" in data
|
||||||
assert "total_size_bytes" in data
|
assert "total_size_bytes" in data
|
||||||
assert "upload_count" in data
|
assert "upload_count" in data
|
||||||
@@ -234,7 +234,11 @@ class TestPackageCascadeDelete:
|
|||||||
def test_ref_count_decrements_on_package_delete(
|
def test_ref_count_decrements_on_package_delete(
|
||||||
self, integration_client, unique_test_id
|
self, integration_client, unique_test_id
|
||||||
):
|
):
|
||||||
"""Test ref_count decrements for all tags when package is deleted."""
|
"""Test ref_count decrements when package is deleted.
|
||||||
|
|
||||||
|
Each package can only have one version per artifact (same content = same version).
|
||||||
|
This test verifies that deleting a package decrements the artifact's ref_count.
|
||||||
|
"""
|
||||||
project_name = f"cascade-pkg-{unique_test_id}"
|
project_name = f"cascade-pkg-{unique_test_id}"
|
||||||
package_name = f"test-pkg-{unique_test_id}"
|
package_name = f"test-pkg-{unique_test_id}"
|
||||||
|
|
||||||
@@ -256,23 +260,17 @@ class TestPackageCascadeDelete:
|
|||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
# Upload content with multiple tags
|
# Upload content with version
|
||||||
content = f"cascade delete test {unique_test_id}".encode()
|
content = f"cascade delete test {unique_test_id}".encode()
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package_name, content, tag="v1"
|
integration_client, project_name, package_name, content, version="1.0.0"
|
||||||
)
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package_name, content, tag="v2"
|
|
||||||
)
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package_name, content, tag="v3"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify ref_count is 3
|
# Verify ref_count is 1
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
assert response.json()["ref_count"] == 3
|
assert response.json()["ref_count"] == 1
|
||||||
|
|
||||||
# Delete the package
|
# Delete the package
|
||||||
delete_response = integration_client.delete(
|
delete_response = integration_client.delete(
|
||||||
|
|||||||
@@ -128,7 +128,9 @@ class TestProjectListingFilters:
|
|||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
names = [p["name"] for p in data["items"]]
|
# Filter out system projects (names starting with "_") as they may have
|
||||||
|
# collation-specific sort behavior and aren't part of the test data
|
||||||
|
names = [p["name"] for p in data["items"] if not p["name"].startswith("_")]
|
||||||
assert names == sorted(names)
|
assert names == sorted(names)
|
||||||
|
|
||||||
|
|
||||||
@@ -147,7 +149,7 @@ class TestProjectStats:
|
|||||||
assert "project_id" in data
|
assert "project_id" in data
|
||||||
assert "project_name" in data
|
assert "project_name" in data
|
||||||
assert "package_count" in data
|
assert "package_count" in data
|
||||||
assert "tag_count" in data
|
assert "version_count" in data
|
||||||
assert "artifact_count" in data
|
assert "artifact_count" in data
|
||||||
assert "total_size_bytes" in data
|
assert "total_size_bytes" in data
|
||||||
assert "upload_count" in data
|
assert "upload_count" in data
|
||||||
@@ -227,7 +229,11 @@ class TestProjectCascadeDelete:
|
|||||||
def test_ref_count_decrements_on_project_delete(
|
def test_ref_count_decrements_on_project_delete(
|
||||||
self, integration_client, unique_test_id
|
self, integration_client, unique_test_id
|
||||||
):
|
):
|
||||||
"""Test ref_count decrements for all tags when project is deleted."""
|
"""Test ref_count decrements for all versions when project is deleted.
|
||||||
|
|
||||||
|
Each package can only have one version per artifact (same content = same version).
|
||||||
|
With 2 packages, ref_count should be 2, and go to 0 when project is deleted.
|
||||||
|
"""
|
||||||
project_name = f"cascade-proj-{unique_test_id}"
|
project_name = f"cascade-proj-{unique_test_id}"
|
||||||
package1_name = f"pkg1-{unique_test_id}"
|
package1_name = f"pkg1-{unique_test_id}"
|
||||||
package2_name = f"pkg2-{unique_test_id}"
|
package2_name = f"pkg2-{unique_test_id}"
|
||||||
@@ -251,26 +257,20 @@ class TestProjectCascadeDelete:
|
|||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
# Upload same content with tags in both packages
|
# Upload same content to both packages
|
||||||
content = f"project cascade test {unique_test_id}".encode()
|
content = f"project cascade test {unique_test_id}".encode()
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package1_name, content, tag="v1"
|
integration_client, project_name, package1_name, content, version="1.0.0"
|
||||||
)
|
)
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package1_name, content, tag="v2"
|
integration_client, project_name, package2_name, content, version="1.0.0"
|
||||||
)
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package2_name, content, tag="latest"
|
|
||||||
)
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package2_name, content, tag="stable"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify ref_count is 4 (2 tags in each of 2 packages)
|
# Verify ref_count is 2 (1 version in each of 2 packages)
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
assert response.json()["ref_count"] == 4
|
assert response.json()["ref_count"] == 2
|
||||||
|
|
||||||
# Delete the project
|
# Delete the project
|
||||||
delete_response = integration_client.delete(f"/api/v1/projects/{project_name}")
|
delete_response = integration_client.delete(f"/api/v1/projects/{project_name}")
|
||||||
|
|||||||
153
backend/tests/integration/test_pypi_proxy.py
Normal file
153
backend/tests/integration/test_pypi_proxy.py
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
"""Integration tests for PyPI transparent proxy."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
|
||||||
|
def get_base_url():
|
||||||
|
"""Get the base URL for the Orchard server from environment."""
|
||||||
|
return os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||||
|
|
||||||
|
|
||||||
|
class TestPyPIProxyEndpoints:
|
||||||
|
"""Tests for PyPI proxy endpoints.
|
||||||
|
|
||||||
|
These endpoints are public (no auth required) since pip needs to use them.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_pypi_simple_index(self):
|
||||||
|
"""Test that /pypi/simple/ returns HTML response."""
|
||||||
|
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
|
||||||
|
response = client.get("/pypi/simple/")
|
||||||
|
# Returns 200 if sources configured, 503 if not
|
||||||
|
assert response.status_code in (200, 503)
|
||||||
|
if response.status_code == 200:
|
||||||
|
assert "text/html" in response.headers.get("content-type", "")
|
||||||
|
else:
|
||||||
|
assert "No PyPI upstream sources configured" in response.json()["detail"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_pypi_package_endpoint(self):
|
||||||
|
"""Test that /pypi/simple/{package}/ returns appropriate response."""
|
||||||
|
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
|
||||||
|
response = client.get("/pypi/simple/requests/")
|
||||||
|
# Returns 200 if sources configured and package found,
|
||||||
|
# 404 if package not found, 503 if no sources
|
||||||
|
assert response.status_code in (200, 404, 503)
|
||||||
|
if response.status_code == 200:
|
||||||
|
assert "text/html" in response.headers.get("content-type", "")
|
||||||
|
elif response.status_code == 404:
|
||||||
|
assert "not found" in response.json()["detail"].lower()
|
||||||
|
else: # 503
|
||||||
|
assert "No PyPI upstream sources configured" in response.json()["detail"]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_pypi_download_missing_upstream_param(self):
|
||||||
|
"""Test that /pypi/simple/{package}/{filename} requires upstream param."""
|
||||||
|
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
|
||||||
|
response = client.get("/pypi/simple/requests/requests-2.31.0.tar.gz")
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert "upstream" in response.json()["detail"].lower()
|
||||||
|
|
||||||
|
|
||||||
|
class TestPyPILinkRewriting:
|
||||||
|
"""Tests for URL rewriting in PyPI proxy responses."""
|
||||||
|
|
||||||
|
def test_rewrite_package_links(self):
|
||||||
|
"""Test that download links are rewritten to go through proxy."""
|
||||||
|
from app.pypi_proxy import _rewrite_package_links
|
||||||
|
|
||||||
|
html = '''
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
<a href="https://files.pythonhosted.org/packages/ab/cd/requests-2.31.0.tar.gz#sha256=abc123">requests-2.31.0.tar.gz</a>
|
||||||
|
<a href="https://files.pythonhosted.org/packages/ef/gh/requests-2.31.0-py3-none-any.whl#sha256=def456">requests-2.31.0-py3-none-any.whl</a>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
'''
|
||||||
|
|
||||||
|
# upstream_base_url is used to resolve relative URLs (not needed here since URLs are absolute)
|
||||||
|
result = _rewrite_package_links(
|
||||||
|
html,
|
||||||
|
"http://localhost:8080",
|
||||||
|
"requests",
|
||||||
|
"https://pypi.org/simple/requests/"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Links should be rewritten to go through our proxy
|
||||||
|
assert "/pypi/simple/requests/requests-2.31.0.tar.gz?upstream=" in result
|
||||||
|
assert "/pypi/simple/requests/requests-2.31.0-py3-none-any.whl?upstream=" in result
|
||||||
|
# Original URLs should be encoded in upstream param
|
||||||
|
assert "files.pythonhosted.org" in result
|
||||||
|
# Hash fragments should be preserved
|
||||||
|
assert "#sha256=abc123" in result
|
||||||
|
assert "#sha256=def456" in result
|
||||||
|
|
||||||
|
def test_rewrite_relative_links(self):
|
||||||
|
"""Test that relative URLs are resolved to absolute URLs."""
|
||||||
|
from app.pypi_proxy import _rewrite_package_links
|
||||||
|
|
||||||
|
# Artifactory-style relative URLs
|
||||||
|
html = '''
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
<a href="../../packages/ab/cd/requests-2.31.0.tar.gz#sha256=abc123">requests-2.31.0.tar.gz</a>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
'''
|
||||||
|
|
||||||
|
result = _rewrite_package_links(
|
||||||
|
html,
|
||||||
|
"https://orchard.example.com",
|
||||||
|
"requests",
|
||||||
|
"https://artifactory.example.com/api/pypi/pypi-remote/simple/requests/"
|
||||||
|
)
|
||||||
|
|
||||||
|
# The relative URL should be resolved to absolute
|
||||||
|
# ../../packages/ab/cd/... from /api/pypi/pypi-remote/simple/requests/ resolves to /api/pypi/pypi-remote/packages/ab/cd/...
|
||||||
|
assert "upstream=https%3A%2F%2Fartifactory.example.com%2Fapi%2Fpypi%2Fpypi-remote%2Fpackages" in result
|
||||||
|
# Hash fragment should be preserved
|
||||||
|
assert "#sha256=abc123" in result
|
||||||
|
|
||||||
|
|
||||||
|
class TestPyPIPackageNormalization:
|
||||||
|
"""Tests for PyPI package name normalization."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_package_name_normalized(self):
|
||||||
|
"""Test that package names are normalized per PEP 503.
|
||||||
|
|
||||||
|
Different capitalizations/separators should all be valid paths.
|
||||||
|
The endpoint normalizes to lowercase with hyphens before lookup.
|
||||||
|
"""
|
||||||
|
with httpx.Client(base_url=get_base_url(), timeout=30.0) as client:
|
||||||
|
# Test various name formats - all should be valid endpoint paths
|
||||||
|
for package_name in ["Requests", "some_package", "some-package"]:
|
||||||
|
response = client.get(f"/pypi/simple/{package_name}/")
|
||||||
|
# 200 = found, 404 = not found, 503 = no sources configured
|
||||||
|
assert response.status_code in (200, 404, 503), \
|
||||||
|
f"Unexpected status {response.status_code} for {package_name}"
|
||||||
|
|
||||||
|
# Verify response is appropriate for the status code
|
||||||
|
if response.status_code == 200:
|
||||||
|
assert "text/html" in response.headers.get("content-type", "")
|
||||||
|
elif response.status_code == 503:
|
||||||
|
assert "No PyPI upstream sources configured" in response.json()["detail"]
|
||||||
|
|
||||||
|
|
||||||
|
class TestPyPIProxyInfrastructure:
|
||||||
|
"""Tests for PyPI proxy infrastructure integration."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_health_endpoint_includes_infrastructure(self, integration_client):
|
||||||
|
"""Health endpoint should report infrastructure status."""
|
||||||
|
response = integration_client.get("/health")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
assert data["status"] == "ok"
|
||||||
|
# Infrastructure status should be present
|
||||||
|
assert "http_pool" in data
|
||||||
|
assert "cache" in data
|
||||||
@@ -48,7 +48,7 @@ class TestSmallFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="1byte.bin", tag="1byte"
|
filename="1byte.bin", version="1byte"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_1B
|
assert result["size"] == SIZE_1B
|
||||||
@@ -70,7 +70,7 @@ class TestSmallFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="1kb.bin", tag="1kb"
|
filename="1kb.bin", version="1kb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_1KB
|
assert result["size"] == SIZE_1KB
|
||||||
@@ -90,7 +90,7 @@ class TestSmallFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="10kb.bin", tag="10kb"
|
filename="10kb.bin", version="10kb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_10KB
|
assert result["size"] == SIZE_10KB
|
||||||
@@ -110,7 +110,7 @@ class TestSmallFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="100kb.bin", tag="100kb"
|
filename="100kb.bin", version="100kb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_100KB
|
assert result["size"] == SIZE_100KB
|
||||||
@@ -134,7 +134,7 @@ class TestMediumFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="1mb.bin", tag="1mb"
|
filename="1mb.bin", version="1mb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_1MB
|
assert result["size"] == SIZE_1MB
|
||||||
@@ -155,7 +155,7 @@ class TestMediumFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="5mb.bin", tag="5mb"
|
filename="5mb.bin", version="5mb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_5MB
|
assert result["size"] == SIZE_5MB
|
||||||
@@ -177,7 +177,7 @@ class TestMediumFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="10mb.bin", tag="10mb"
|
filename="10mb.bin", version="10mb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_10MB
|
assert result["size"] == SIZE_10MB
|
||||||
@@ -200,7 +200,7 @@ class TestMediumFileSizes:
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="50mb.bin", tag="50mb"
|
filename="50mb.bin", version="50mb"
|
||||||
)
|
)
|
||||||
upload_time = time.time() - start_time
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
@@ -240,7 +240,7 @@ class TestLargeFileSizes:
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="100mb.bin", tag="100mb"
|
filename="100mb.bin", version="100mb"
|
||||||
)
|
)
|
||||||
upload_time = time.time() - start_time
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
@@ -271,7 +271,7 @@ class TestLargeFileSizes:
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="250mb.bin", tag="250mb"
|
filename="250mb.bin", version="250mb"
|
||||||
)
|
)
|
||||||
upload_time = time.time() - start_time
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
@@ -302,7 +302,7 @@ class TestLargeFileSizes:
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="500mb.bin", tag="500mb"
|
filename="500mb.bin", version="500mb"
|
||||||
)
|
)
|
||||||
upload_time = time.time() - start_time
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
@@ -336,7 +336,7 @@ class TestLargeFileSizes:
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="1gb.bin", tag="1gb"
|
filename="1gb.bin", version="1gb"
|
||||||
)
|
)
|
||||||
upload_time = time.time() - start_time
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
@@ -368,7 +368,7 @@ class TestChunkBoundaries:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="chunk.bin", tag="chunk-exact"
|
filename="chunk.bin", version="chunk-exact"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == CHUNK_SIZE
|
assert result["size"] == CHUNK_SIZE
|
||||||
@@ -389,7 +389,7 @@ class TestChunkBoundaries:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="chunk_plus.bin", tag="chunk-plus"
|
filename="chunk_plus.bin", version="chunk-plus"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == size
|
assert result["size"] == size
|
||||||
@@ -410,7 +410,7 @@ class TestChunkBoundaries:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="chunk_minus.bin", tag="chunk-minus"
|
filename="chunk_minus.bin", version="chunk-minus"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == size
|
assert result["size"] == size
|
||||||
@@ -431,7 +431,7 @@ class TestChunkBoundaries:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="multi_chunk.bin", tag="multi-chunk"
|
filename="multi_chunk.bin", version="multi-chunk"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == size
|
assert result["size"] == size
|
||||||
@@ -457,7 +457,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="binary.bin", tag="binary"
|
filename="binary.bin", version="binary"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -477,7 +477,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="text.txt", tag="text"
|
filename="text.txt", version="text"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -498,7 +498,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="nulls.bin", tag="nulls"
|
filename="nulls.bin", version="nulls"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -519,7 +519,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="文件名.txt", tag="unicode-name"
|
filename="文件名.txt", version="unicode-name"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["original_name"] == "文件名.txt"
|
assert result["original_name"] == "文件名.txt"
|
||||||
@@ -543,7 +543,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="data.gz", tag="compressed"
|
filename="data.gz", version="compressed"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -568,7 +568,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename=f"hash_test_{size}.bin", tag=f"hash-{size}"
|
filename=f"hash_test_{size}.bin", version=f"hash-{size}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify artifact_id matches expected hash
|
# Verify artifact_id matches expected hash
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ class TestRangeRequests:
|
|||||||
"""Test range request for first N bytes."""
|
"""Test range request for first N bytes."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"0123456789" * 100 # 1000 bytes
|
content = b"0123456789" * 100 # 1000 bytes
|
||||||
upload_test_file(integration_client, project, package, content, tag="range-test")
|
upload_test_file(integration_client, project, package, content, version="range-test")
|
||||||
|
|
||||||
# Request first 10 bytes
|
# Request first 10 bytes
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -50,7 +50,7 @@ class TestRangeRequests:
|
|||||||
"""Test range request for bytes in the middle."""
|
"""Test range request for bytes in the middle."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
content = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||||
upload_test_file(integration_client, project, package, content, tag="range-mid")
|
upload_test_file(integration_client, project, package, content, version="range-mid")
|
||||||
|
|
||||||
# Request bytes 10-19 (KLMNOPQRST)
|
# Request bytes 10-19 (KLMNOPQRST)
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -66,7 +66,7 @@ class TestRangeRequests:
|
|||||||
"""Test range request for last N bytes (suffix range)."""
|
"""Test range request for last N bytes (suffix range)."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"0123456789ABCDEF" # 16 bytes
|
content = b"0123456789ABCDEF" # 16 bytes
|
||||||
upload_test_file(integration_client, project, package, content, tag="range-suffix")
|
upload_test_file(integration_client, project, package, content, version="range-suffix")
|
||||||
|
|
||||||
# Request last 4 bytes
|
# Request last 4 bytes
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -82,7 +82,7 @@ class TestRangeRequests:
|
|||||||
"""Test range request from offset to end."""
|
"""Test range request from offset to end."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"0123456789"
|
content = b"0123456789"
|
||||||
upload_test_file(integration_client, project, package, content, tag="range-open")
|
upload_test_file(integration_client, project, package, content, version="range-open")
|
||||||
|
|
||||||
# Request from byte 5 to end
|
# Request from byte 5 to end
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -100,7 +100,7 @@ class TestRangeRequests:
|
|||||||
"""Test that range requests include Accept-Ranges header."""
|
"""Test that range requests include Accept-Ranges header."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"test content"
|
content = b"test content"
|
||||||
upload_test_file(integration_client, project, package, content, tag="accept-ranges")
|
upload_test_file(integration_client, project, package, content, version="accept-ranges")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/accept-ranges",
|
f"/api/v1/project/{project}/{package}/+/accept-ranges",
|
||||||
@@ -117,7 +117,7 @@ class TestRangeRequests:
|
|||||||
"""Test that full downloads advertise range support."""
|
"""Test that full downloads advertise range support."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"test content"
|
content = b"test content"
|
||||||
upload_test_file(integration_client, project, package, content, tag="full-accept")
|
upload_test_file(integration_client, project, package, content, version="full-accept")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/full-accept",
|
f"/api/v1/project/{project}/{package}/+/full-accept",
|
||||||
@@ -136,7 +136,7 @@ class TestConditionalRequests:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"conditional request test content"
|
content = b"conditional request test content"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="cond-etag")
|
upload_test_file(integration_client, project, package, content, version="cond-etag")
|
||||||
|
|
||||||
# Request with matching ETag
|
# Request with matching ETag
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -153,7 +153,7 @@ class TestConditionalRequests:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"etag no quotes test"
|
content = b"etag no quotes test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="cond-noquote")
|
upload_test_file(integration_client, project, package, content, version="cond-noquote")
|
||||||
|
|
||||||
# Request with ETag without quotes
|
# Request with ETag without quotes
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -168,7 +168,7 @@ class TestConditionalRequests:
|
|||||||
"""Test If-None-Match with non-matching ETag returns 200."""
|
"""Test If-None-Match with non-matching ETag returns 200."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"etag mismatch test"
|
content = b"etag mismatch test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="cond-mismatch")
|
upload_test_file(integration_client, project, package, content, version="cond-mismatch")
|
||||||
|
|
||||||
# Request with different ETag
|
# Request with different ETag
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -184,7 +184,7 @@ class TestConditionalRequests:
|
|||||||
"""Test If-Modified-Since with future date returns 304."""
|
"""Test If-Modified-Since with future date returns 304."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"modified since test"
|
content = b"modified since test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="cond-modified")
|
upload_test_file(integration_client, project, package, content, version="cond-modified")
|
||||||
|
|
||||||
# Request with future date (artifact was definitely created before this)
|
# Request with future date (artifact was definitely created before this)
|
||||||
future_date = formatdate(time.time() + 86400, usegmt=True) # Tomorrow
|
future_date = formatdate(time.time() + 86400, usegmt=True) # Tomorrow
|
||||||
@@ -202,7 +202,7 @@ class TestConditionalRequests:
|
|||||||
"""Test If-Modified-Since with old date returns 200."""
|
"""Test If-Modified-Since with old date returns 200."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"old date test"
|
content = b"old date test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="cond-old")
|
upload_test_file(integration_client, project, package, content, version="cond-old")
|
||||||
|
|
||||||
# Request with old date (2020-01-01)
|
# Request with old date (2020-01-01)
|
||||||
old_date = "Wed, 01 Jan 2020 00:00:00 GMT"
|
old_date = "Wed, 01 Jan 2020 00:00:00 GMT"
|
||||||
@@ -220,7 +220,7 @@ class TestConditionalRequests:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"304 etag test"
|
content = b"304 etag test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="304-etag")
|
upload_test_file(integration_client, project, package, content, version="304-etag")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/304-etag",
|
f"/api/v1/project/{project}/{package}/+/304-etag",
|
||||||
@@ -236,7 +236,7 @@ class TestConditionalRequests:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"304 cache test"
|
content = b"304 cache test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="304-cache")
|
upload_test_file(integration_client, project, package, content, version="304-cache")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/304-cache",
|
f"/api/v1/project/{project}/{package}/+/304-cache",
|
||||||
@@ -255,7 +255,7 @@ class TestCachingHeaders:
|
|||||||
"""Test download response includes Cache-Control header."""
|
"""Test download response includes Cache-Control header."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"cache control test"
|
content = b"cache control test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="cache-ctl")
|
upload_test_file(integration_client, project, package, content, version="cache-ctl")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/cache-ctl",
|
f"/api/v1/project/{project}/{package}/+/cache-ctl",
|
||||||
@@ -272,7 +272,7 @@ class TestCachingHeaders:
|
|||||||
"""Test download response includes Last-Modified header."""
|
"""Test download response includes Last-Modified header."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"last modified test"
|
content = b"last modified test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="last-mod")
|
upload_test_file(integration_client, project, package, content, version="last-mod")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/last-mod",
|
f"/api/v1/project/{project}/{package}/+/last-mod",
|
||||||
@@ -290,7 +290,7 @@ class TestCachingHeaders:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"etag header test"
|
content = b"etag header test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="etag-hdr")
|
upload_test_file(integration_client, project, package, content, version="etag-hdr")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/etag-hdr",
|
f"/api/v1/project/{project}/{package}/+/etag-hdr",
|
||||||
@@ -308,7 +308,7 @@ class TestDownloadResume:
|
|||||||
"""Test resuming download from where it left off."""
|
"""Test resuming download from where it left off."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"ABCDEFGHIJ" * 100 # 1000 bytes
|
content = b"ABCDEFGHIJ" * 100 # 1000 bytes
|
||||||
upload_test_file(integration_client, project, package, content, tag="resume-test")
|
upload_test_file(integration_client, project, package, content, version="resume-test")
|
||||||
|
|
||||||
# Simulate partial download (first 500 bytes)
|
# Simulate partial download (first 500 bytes)
|
||||||
response1 = integration_client.get(
|
response1 = integration_client.get(
|
||||||
@@ -340,7 +340,7 @@ class TestDownloadResume:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"resume etag verification test content"
|
content = b"resume etag verification test content"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="resume-etag")
|
upload_test_file(integration_client, project, package, content, version="resume-etag")
|
||||||
|
|
||||||
# Get ETag from first request
|
# Get ETag from first request
|
||||||
response1 = integration_client.get(
|
response1 = integration_client.get(
|
||||||
@@ -373,7 +373,7 @@ class TestLargeFileStreaming:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content, expected_hash = sized_content(SIZE_1MB, seed=500)
|
content, expected_hash = sized_content(SIZE_1MB, seed=500)
|
||||||
|
|
||||||
upload_test_file(integration_client, project, package, content, tag="stream-1mb")
|
upload_test_file(integration_client, project, package, content, version="stream-1mb")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/stream-1mb",
|
f"/api/v1/project/{project}/{package}/+/stream-1mb",
|
||||||
@@ -391,7 +391,7 @@ class TestLargeFileStreaming:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content, expected_hash = sized_content(SIZE_100KB, seed=501)
|
content, expected_hash = sized_content(SIZE_100KB, seed=501)
|
||||||
|
|
||||||
upload_test_file(integration_client, project, package, content, tag="stream-hdr")
|
upload_test_file(integration_client, project, package, content, version="stream-hdr")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/stream-hdr",
|
f"/api/v1/project/{project}/{package}/+/stream-hdr",
|
||||||
@@ -410,7 +410,7 @@ class TestLargeFileStreaming:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content, _ = sized_content(SIZE_100KB, seed=502)
|
content, _ = sized_content(SIZE_100KB, seed=502)
|
||||||
|
|
||||||
upload_test_file(integration_client, project, package, content, tag="range-large")
|
upload_test_file(integration_client, project, package, content, version="range-large")
|
||||||
|
|
||||||
# Request a slice from the middle
|
# Request a slice from the middle
|
||||||
start = 50000
|
start = 50000
|
||||||
@@ -433,7 +433,7 @@ class TestDownloadModes:
|
|||||||
"""Test proxy mode streams content through backend."""
|
"""Test proxy mode streams content through backend."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"proxy mode test content"
|
content = b"proxy mode test content"
|
||||||
upload_test_file(integration_client, project, package, content, tag="mode-proxy")
|
upload_test_file(integration_client, project, package, content, version="mode-proxy")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/mode-proxy",
|
f"/api/v1/project/{project}/{package}/+/mode-proxy",
|
||||||
@@ -447,7 +447,7 @@ class TestDownloadModes:
|
|||||||
"""Test presigned mode returns JSON with URL."""
|
"""Test presigned mode returns JSON with URL."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"presigned mode test"
|
content = b"presigned mode test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="mode-presign")
|
upload_test_file(integration_client, project, package, content, version="mode-presign")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/mode-presign",
|
f"/api/v1/project/{project}/{package}/+/mode-presign",
|
||||||
@@ -464,7 +464,7 @@ class TestDownloadModes:
|
|||||||
"""Test redirect mode returns 302 to presigned URL."""
|
"""Test redirect mode returns 302 to presigned URL."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"redirect mode test"
|
content = b"redirect mode test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="mode-redir")
|
upload_test_file(integration_client, project, package, content, version="mode-redir")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/mode-redir",
|
f"/api/v1/project/{project}/{package}/+/mode-redir",
|
||||||
@@ -484,7 +484,7 @@ class TestIntegrityDuringStreaming:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"integrity check content"
|
content = b"integrity check content"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="integrity")
|
upload_test_file(integration_client, project, package, content, version="integrity")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/integrity",
|
f"/api/v1/project/{project}/{package}/+/integrity",
|
||||||
@@ -505,7 +505,7 @@ class TestIntegrityDuringStreaming:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"etag integrity test"
|
content = b"etag integrity test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="etag-int")
|
upload_test_file(integration_client, project, package, content, version="etag-int")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/etag-int",
|
f"/api/v1/project/{project}/{package}/+/etag-int",
|
||||||
@@ -524,7 +524,7 @@ class TestIntegrityDuringStreaming:
|
|||||||
"""Test Digest header is present in RFC 3230 format."""
|
"""Test Digest header is present in RFC 3230 format."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"digest header test"
|
content = b"digest header test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="digest")
|
upload_test_file(integration_client, project, package, content, version="digest")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/digest",
|
f"/api/v1/project/{project}/{package}/+/digest",
|
||||||
|
|||||||
@@ -1,403 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for tag API endpoints.
|
|
||||||
|
|
||||||
Tests cover:
|
|
||||||
- Tag CRUD operations
|
|
||||||
- Tag listing with pagination and search
|
|
||||||
- Tag history tracking
|
|
||||||
- ref_count behavior with tag operations
|
|
||||||
"""
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from tests.factories import compute_sha256, upload_test_file
|
|
||||||
|
|
||||||
|
|
||||||
class TestTagCRUD:
|
|
||||||
"""Tests for tag create, read, delete operations."""
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_create_tag_via_upload(self, integration_client, test_package):
|
|
||||||
"""Test creating a tag via upload endpoint."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
result = upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"tag create test",
|
|
||||||
tag="v1.0.0",
|
|
||||||
)
|
|
||||||
|
|
||||||
assert result["tag"] == "v1.0.0"
|
|
||||||
assert result["artifact_id"]
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_create_tag_via_post(
|
|
||||||
self, integration_client, test_package, unique_test_id
|
|
||||||
):
|
|
||||||
"""Test creating a tag via POST /tags endpoint."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
# First upload an artifact
|
|
||||||
result = upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"artifact for tag",
|
|
||||||
)
|
|
||||||
artifact_id = result["artifact_id"]
|
|
||||||
|
|
||||||
# Create tag via POST
|
|
||||||
tag_name = f"post-tag-{unique_test_id}"
|
|
||||||
response = integration_client.post(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags",
|
|
||||||
json={"name": tag_name, "artifact_id": artifact_id},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["name"] == tag_name
|
|
||||||
assert data["artifact_id"] == artifact_id
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_get_tag(self, integration_client, test_package):
|
|
||||||
"""Test getting a tag by name."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"get tag test",
|
|
||||||
tag="get-tag",
|
|
||||||
)
|
|
||||||
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags/get-tag"
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["name"] == "get-tag"
|
|
||||||
assert "artifact_id" in data
|
|
||||||
assert "artifact_size" in data
|
|
||||||
assert "artifact_content_type" in data
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_list_tags(self, integration_client, test_package):
|
|
||||||
"""Test listing tags for a package."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
# Create some tags
|
|
||||||
upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"list tags test",
|
|
||||||
tag="list-v1",
|
|
||||||
)
|
|
||||||
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags"
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert "items" in data
|
|
||||||
assert "pagination" in data
|
|
||||||
|
|
||||||
tag_names = [t["name"] for t in data["items"]]
|
|
||||||
assert "list-v1" in tag_names
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_delete_tag(self, integration_client, test_package):
|
|
||||||
"""Test deleting a tag."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"delete tag test",
|
|
||||||
tag="to-delete",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete tag
|
|
||||||
response = integration_client.delete(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags/to-delete"
|
|
||||||
)
|
|
||||||
assert response.status_code == 204
|
|
||||||
|
|
||||||
# Verify deleted
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags/to-delete"
|
|
||||||
)
|
|
||||||
assert response.status_code == 404
|
|
||||||
|
|
||||||
|
|
||||||
class TestTagListingFilters:
|
|
||||||
"""Tests for tag listing with filters and search."""
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_tags_pagination(self, integration_client, test_package):
|
|
||||||
"""Test tag listing respects pagination."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags?limit=5"
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert len(data["items"]) <= 5
|
|
||||||
assert data["pagination"]["limit"] == 5
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_tags_search(self, integration_client, test_package, unique_test_id):
|
|
||||||
"""Test tag search by name."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
tag_name = f"searchable-{unique_test_id}"
|
|
||||||
upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"search test",
|
|
||||||
tag=tag_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags?search=searchable"
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
tag_names = [t["name"] for t in data["items"]]
|
|
||||||
assert tag_name in tag_names
|
|
||||||
|
|
||||||
|
|
||||||
class TestTagHistory:
|
|
||||||
"""Tests for tag history tracking."""
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_tag_history_on_create(self, integration_client, test_package):
|
|
||||||
"""Test tag history is created when tag is created."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"history create test",
|
|
||||||
tag="history-create",
|
|
||||||
)
|
|
||||||
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags/history-create/history"
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert len(data) >= 1
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_tag_history_on_update(
|
|
||||||
self, integration_client, test_package, unique_test_id
|
|
||||||
):
|
|
||||||
"""Test tag history is created when tag is updated."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
tag_name = f"history-update-{unique_test_id}"
|
|
||||||
|
|
||||||
# Create tag with first artifact
|
|
||||||
upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"first content",
|
|
||||||
tag=tag_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Update tag with second artifact
|
|
||||||
upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"second content",
|
|
||||||
tag=tag_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags/{tag_name}/history"
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
# Should have at least 2 history entries (create + update)
|
|
||||||
assert len(data) >= 2
|
|
||||||
|
|
||||||
|
|
||||||
class TestTagRefCount:
|
|
||||||
"""Tests for ref_count behavior with tag operations."""
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_ref_count_decrements_on_tag_delete(self, integration_client, test_package):
|
|
||||||
"""Test ref_count decrements when a tag is deleted."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
content = b"ref count delete test"
|
|
||||||
expected_hash = compute_sha256(content)
|
|
||||||
|
|
||||||
# Upload with two tags
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package_name, content, tag="rc-v1"
|
|
||||||
)
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package_name, content, tag="rc-v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify ref_count is 2
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
|
||||||
assert response.json()["ref_count"] == 2
|
|
||||||
|
|
||||||
# Delete one tag
|
|
||||||
delete_response = integration_client.delete(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags/rc-v1"
|
|
||||||
)
|
|
||||||
assert delete_response.status_code == 204
|
|
||||||
|
|
||||||
# Verify ref_count is now 1
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
|
||||||
assert response.json()["ref_count"] == 1
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_ref_count_zero_after_all_tags_deleted(
|
|
||||||
self, integration_client, test_package
|
|
||||||
):
|
|
||||||
"""Test ref_count goes to 0 when all tags are deleted."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
content = b"orphan test content"
|
|
||||||
expected_hash = compute_sha256(content)
|
|
||||||
|
|
||||||
# Upload with one tag
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package_name, content, tag="only-tag"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete the tag
|
|
||||||
integration_client.delete(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags/only-tag"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify ref_count is 0
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
|
||||||
assert response.json()["ref_count"] == 0
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_ref_count_adjusts_on_tag_update(
|
|
||||||
self, integration_client, test_package, unique_test_id
|
|
||||||
):
|
|
||||||
"""Test ref_count adjusts when a tag is updated to point to different artifact."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
# Upload two different artifacts
|
|
||||||
content1 = f"artifact one {unique_test_id}".encode()
|
|
||||||
content2 = f"artifact two {unique_test_id}".encode()
|
|
||||||
hash1 = compute_sha256(content1)
|
|
||||||
hash2 = compute_sha256(content2)
|
|
||||||
|
|
||||||
# Upload first artifact with tag "latest"
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package_name, content1, tag="latest"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify first artifact has ref_count 1
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{hash1}")
|
|
||||||
assert response.json()["ref_count"] == 1
|
|
||||||
|
|
||||||
# Upload second artifact with different tag
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package_name, content2, tag="stable"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Now update "latest" tag to point to second artifact
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package_name, content2, tag="latest"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify first artifact ref_count decreased to 0
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{hash1}")
|
|
||||||
assert response.json()["ref_count"] == 0
|
|
||||||
|
|
||||||
# Verify second artifact ref_count increased to 2
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{hash2}")
|
|
||||||
assert response.json()["ref_count"] == 2
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_ref_count_unchanged_when_tag_same_artifact(
|
|
||||||
self, integration_client, test_package, unique_test_id
|
|
||||||
):
|
|
||||||
"""Test ref_count doesn't change when tag is 'updated' to same artifact."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
content = f"same artifact {unique_test_id}".encode()
|
|
||||||
expected_hash = compute_sha256(content)
|
|
||||||
|
|
||||||
# Upload with tag
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package_name, content, tag="same-v1"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify ref_count is 1
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
|
||||||
assert response.json()["ref_count"] == 1
|
|
||||||
|
|
||||||
# Upload same content with same tag (no-op)
|
|
||||||
upload_test_file(
|
|
||||||
integration_client, project_name, package_name, content, tag="same-v1"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify ref_count is still 1
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
|
||||||
assert response.json()["ref_count"] == 1
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_tag_via_post_endpoint_increments_ref_count(
|
|
||||||
self, integration_client, test_package, unique_test_id
|
|
||||||
):
|
|
||||||
"""Test creating tag via POST /tags endpoint increments ref_count."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
content = f"tag endpoint test {unique_test_id}".encode()
|
|
||||||
expected_hash = compute_sha256(content)
|
|
||||||
|
|
||||||
# Upload artifact without tag
|
|
||||||
result = upload_test_file(
|
|
||||||
integration_client, project_name, package_name, content, filename="test.bin"
|
|
||||||
)
|
|
||||||
artifact_id = result["artifact_id"]
|
|
||||||
|
|
||||||
# Verify ref_count is 0 (no tags yet)
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
|
||||||
assert response.json()["ref_count"] == 0
|
|
||||||
|
|
||||||
# Create tag via POST endpoint
|
|
||||||
tag_response = integration_client.post(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags",
|
|
||||||
json={"name": "post-v1", "artifact_id": artifact_id},
|
|
||||||
)
|
|
||||||
assert tag_response.status_code == 200
|
|
||||||
|
|
||||||
# Verify ref_count is now 1
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
|
||||||
assert response.json()["ref_count"] == 1
|
|
||||||
|
|
||||||
# Create another tag via POST endpoint
|
|
||||||
tag_response = integration_client.post(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags",
|
|
||||||
json={"name": "post-latest", "artifact_id": artifact_id},
|
|
||||||
)
|
|
||||||
assert tag_response.status_code == 200
|
|
||||||
|
|
||||||
# Verify ref_count is now 2
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
|
||||||
assert response.json()["ref_count"] == 2
|
|
||||||
@@ -47,7 +47,7 @@ class TestUploadBasics:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project_name, package_name, content, tag="v1"
|
integration_client, project_name, package_name, content, version="v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
@@ -116,31 +116,23 @@ class TestUploadBasics:
|
|||||||
assert result["created_at"] is not None
|
assert result["created_at"] is not None
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_upload_without_tag_succeeds(self, integration_client, test_package):
|
def test_upload_without_version_succeeds(self, integration_client, test_package):
|
||||||
"""Test upload without tag succeeds (no tag created)."""
|
"""Test upload without version succeeds (no version created)."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"upload without tag test"
|
content = b"upload without version test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
files = {"file": ("no_tag.bin", io.BytesIO(content), "application/octet-stream")}
|
files = {"file": ("no_version.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
# No tag parameter
|
# No version parameter
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
result = response.json()
|
result = response.json()
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
# Version should be None when not specified
|
||||||
# Verify no tag was created - list tags and check
|
assert result.get("version") is None
|
||||||
tags_response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project}/{package}/tags"
|
|
||||||
)
|
|
||||||
assert tags_response.status_code == 200
|
|
||||||
tags = tags_response.json()
|
|
||||||
# Filter for tags pointing to this artifact
|
|
||||||
artifact_tags = [t for t in tags.get("items", tags) if t.get("artifact_id") == expected_hash]
|
|
||||||
assert len(artifact_tags) == 0, "Tag should not be created when not specified"
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_upload_creates_artifact_in_database(self, integration_client, test_package):
|
def test_upload_creates_artifact_in_database(self, integration_client, test_package):
|
||||||
@@ -172,25 +164,29 @@ class TestUploadBasics:
|
|||||||
assert s3_object_exists(expected_hash), "S3 object should exist after upload"
|
assert s3_object_exists(expected_hash), "S3 object should exist after upload"
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_upload_with_tag_creates_tag_record(self, integration_client, test_package):
|
def test_upload_with_version_creates_version_record(self, integration_client, test_package):
|
||||||
"""Test upload with tag creates tag record."""
|
"""Test upload with version creates version record."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"tag creation test"
|
content = b"version creation test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
tag_name = "my-tag-v1"
|
version_name = "1.0.0"
|
||||||
|
|
||||||
upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag=tag_name
|
integration_client, project, package, content, version=version_name
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify tag exists
|
# Verify version was created
|
||||||
tags_response = integration_client.get(
|
assert result.get("version") == version_name
|
||||||
f"/api/v1/project/{project}/{package}/tags"
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
# Verify version exists in versions list
|
||||||
|
versions_response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/versions"
|
||||||
)
|
)
|
||||||
assert tags_response.status_code == 200
|
assert versions_response.status_code == 200
|
||||||
tags = tags_response.json()
|
versions = versions_response.json()
|
||||||
tag_names = [t["name"] for t in tags.get("items", tags)]
|
version_names = [v["version"] for v in versions.get("items", [])]
|
||||||
assert tag_name in tag_names
|
assert version_name in version_names
|
||||||
|
|
||||||
|
|
||||||
class TestDuplicateUploads:
|
class TestDuplicateUploads:
|
||||||
@@ -207,36 +203,44 @@ class TestDuplicateUploads:
|
|||||||
|
|
||||||
# First upload
|
# First upload
|
||||||
result1 = upload_test_file(
|
result1 = upload_test_file(
|
||||||
integration_client, project, package, content, tag="first"
|
integration_client, project, package, content, version="first"
|
||||||
)
|
)
|
||||||
assert result1["artifact_id"] == expected_hash
|
assert result1["artifact_id"] == expected_hash
|
||||||
|
|
||||||
# Second upload
|
# Second upload
|
||||||
result2 = upload_test_file(
|
result2 = upload_test_file(
|
||||||
integration_client, project, package, content, tag="second"
|
integration_client, project, package, content, version="second"
|
||||||
)
|
)
|
||||||
assert result2["artifact_id"] == expected_hash
|
assert result2["artifact_id"] == expected_hash
|
||||||
assert result1["artifact_id"] == result2["artifact_id"]
|
assert result1["artifact_id"] == result2["artifact_id"]
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_same_file_twice_increments_ref_count(
|
def test_same_file_twice_returns_existing_version(
|
||||||
self, integration_client, test_package
|
self, integration_client, test_package
|
||||||
):
|
):
|
||||||
"""Test uploading same file twice increments ref_count to 2."""
|
"""Test uploading same file twice in same package returns existing version.
|
||||||
|
|
||||||
|
Same artifact can only have one version per package. Uploading the same content
|
||||||
|
with a different version name returns the existing version, not a new one.
|
||||||
|
ref_count stays at 1 because there's still only one PackageVersion reference.
|
||||||
|
"""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"content for ref count increment test"
|
content = b"content for ref count increment test"
|
||||||
|
|
||||||
# First upload
|
# First upload
|
||||||
result1 = upload_test_file(
|
result1 = upload_test_file(
|
||||||
integration_client, project, package, content, tag="v1"
|
integration_client, project, package, content, version="v1"
|
||||||
)
|
)
|
||||||
assert result1["ref_count"] == 1
|
assert result1["ref_count"] == 1
|
||||||
|
|
||||||
# Second upload
|
# Second upload with different version name returns existing version
|
||||||
result2 = upload_test_file(
|
result2 = upload_test_file(
|
||||||
integration_client, project, package, content, tag="v2"
|
integration_client, project, package, content, version="v2"
|
||||||
)
|
)
|
||||||
assert result2["ref_count"] == 2
|
# Same artifact, same package = same version returned, ref_count stays 1
|
||||||
|
assert result2["ref_count"] == 1
|
||||||
|
assert result2["deduplicated"] is True
|
||||||
|
assert result1["version"] == result2["version"] # Both return "v1"
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_same_file_different_packages_shares_artifact(
|
def test_same_file_different_packages_shares_artifact(
|
||||||
@@ -261,12 +265,12 @@ class TestDuplicateUploads:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Upload to first package
|
# Upload to first package
|
||||||
result1 = upload_test_file(integration_client, project, pkg1, content, tag="v1")
|
result1 = upload_test_file(integration_client, project, pkg1, content, version="v1")
|
||||||
assert result1["artifact_id"] == expected_hash
|
assert result1["artifact_id"] == expected_hash
|
||||||
assert result1["deduplicated"] is False
|
assert result1["deduplicated"] is False
|
||||||
|
|
||||||
# Upload to second package
|
# Upload to second package
|
||||||
result2 = upload_test_file(integration_client, project, pkg2, content, tag="v1")
|
result2 = upload_test_file(integration_client, project, pkg2, content, version="v1")
|
||||||
assert result2["artifact_id"] == expected_hash
|
assert result2["artifact_id"] == expected_hash
|
||||||
assert result2["deduplicated"] is True
|
assert result2["deduplicated"] is True
|
||||||
|
|
||||||
@@ -286,7 +290,7 @@ class TestDuplicateUploads:
|
|||||||
package,
|
package,
|
||||||
content,
|
content,
|
||||||
filename="file1.bin",
|
filename="file1.bin",
|
||||||
tag="v1",
|
version="v1",
|
||||||
)
|
)
|
||||||
assert result1["artifact_id"] == expected_hash
|
assert result1["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -297,7 +301,7 @@ class TestDuplicateUploads:
|
|||||||
package,
|
package,
|
||||||
content,
|
content,
|
||||||
filename="file2.bin",
|
filename="file2.bin",
|
||||||
tag="v2",
|
version="v2",
|
||||||
)
|
)
|
||||||
assert result2["artifact_id"] == expected_hash
|
assert result2["artifact_id"] == expected_hash
|
||||||
assert result2["deduplicated"] is True
|
assert result2["deduplicated"] is True
|
||||||
@@ -307,17 +311,17 @@ class TestDownload:
|
|||||||
"""Tests for download functionality."""
|
"""Tests for download functionality."""
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_download_by_tag(self, integration_client, test_package):
|
def test_download_by_version(self, integration_client, test_package):
|
||||||
"""Test downloading artifact by tag name."""
|
"""Test downloading artifact by version."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
original_content = b"download by tag test"
|
original_content = b"download by version test"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, original_content, tag="download-tag"
|
integration_client, project, package, original_content, version="1.0.0"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/download-tag",
|
f"/api/v1/project/{project}/{package}/+/1.0.0",
|
||||||
params={"mode": "proxy"},
|
params={"mode": "proxy"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
@@ -340,29 +344,29 @@ class TestDownload:
|
|||||||
assert response.content == original_content
|
assert response.content == original_content
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_download_by_tag_prefix(self, integration_client, test_package):
|
def test_download_by_version_prefix(self, integration_client, test_package):
|
||||||
"""Test downloading artifact using tag: prefix."""
|
"""Test downloading artifact using version: prefix."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
original_content = b"download by tag prefix test"
|
original_content = b"download by version prefix test"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, original_content, tag="prefix-tag"
|
integration_client, project, package, original_content, version="2.0.0"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/tag:prefix-tag",
|
f"/api/v1/project/{project}/{package}/+/version:2.0.0",
|
||||||
params={"mode": "proxy"},
|
params={"mode": "proxy"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.content == original_content
|
assert response.content == original_content
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_download_nonexistent_tag(self, integration_client, test_package):
|
def test_download_nonexistent_version(self, integration_client, test_package):
|
||||||
"""Test downloading nonexistent tag returns 404."""
|
"""Test downloading nonexistent version returns 404."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/nonexistent-tag"
|
f"/api/v1/project/{project}/{package}/+/nonexistent-version"
|
||||||
)
|
)
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|
||||||
@@ -400,7 +404,7 @@ class TestDownload:
|
|||||||
original_content = b"exact content verification test data 12345"
|
original_content = b"exact content verification test data 12345"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, original_content, tag="verify"
|
integration_client, project, package, original_content, version="verify"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -421,7 +425,7 @@ class TestDownloadHeaders:
|
|||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="test.txt", tag="content-type-test"
|
filename="test.txt", version="content-type-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -440,7 +444,7 @@ class TestDownloadHeaders:
|
|||||||
expected_length = len(content)
|
expected_length = len(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="content-length-test"
|
integration_client, project, package, content, version="content-length-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -460,7 +464,7 @@ class TestDownloadHeaders:
|
|||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename=filename, tag="disposition-test"
|
filename=filename, version="disposition-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -481,7 +485,7 @@ class TestDownloadHeaders:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="checksum-headers"
|
integration_client, project, package, content, version="checksum-headers"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -501,7 +505,7 @@ class TestDownloadHeaders:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="etag-test"
|
integration_client, project, package, content, version="etag-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -519,17 +523,31 @@ class TestConcurrentUploads:
|
|||||||
"""Tests for concurrent upload handling."""
|
"""Tests for concurrent upload handling."""
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_concurrent_uploads_same_file(self, integration_client, test_package):
|
def test_concurrent_uploads_same_file(self, integration_client, test_project, unique_test_id):
|
||||||
"""Test concurrent uploads of same file handle deduplication correctly."""
|
"""Test concurrent uploads of same file to different packages handle deduplication correctly.
|
||||||
project, package = test_package
|
|
||||||
|
Same artifact can only have one version per package, so we create multiple packages
|
||||||
|
to test that concurrent uploads to different packages correctly increment ref_count.
|
||||||
|
"""
|
||||||
content = b"content for concurrent upload test"
|
content = b"content for concurrent upload test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
num_concurrent = 5
|
num_concurrent = 5
|
||||||
|
|
||||||
|
# Create packages for each concurrent upload
|
||||||
|
packages = []
|
||||||
|
for i in range(num_concurrent):
|
||||||
|
pkg_name = f"concurrent-pkg-{unique_test_id}-{i}"
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{test_project}/packages",
|
||||||
|
json={"name": pkg_name},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
packages.append(pkg_name)
|
||||||
|
|
||||||
# Create an API key for worker threads
|
# Create an API key for worker threads
|
||||||
api_key_response = integration_client.post(
|
api_key_response = integration_client.post(
|
||||||
"/api/v1/auth/keys",
|
"/api/v1/auth/keys",
|
||||||
json={"name": "concurrent-test-key"},
|
json={"name": f"concurrent-test-key-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert api_key_response.status_code == 200, f"Failed to create API key: {api_key_response.text}"
|
assert api_key_response.status_code == 200, f"Failed to create API key: {api_key_response.text}"
|
||||||
api_key = api_key_response.json()["key"]
|
api_key = api_key_response.json()["key"]
|
||||||
@@ -537,7 +555,7 @@ class TestConcurrentUploads:
|
|||||||
results = []
|
results = []
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
def upload_worker(tag_suffix):
|
def upload_worker(idx):
|
||||||
try:
|
try:
|
||||||
from httpx import Client
|
from httpx import Client
|
||||||
|
|
||||||
@@ -545,15 +563,15 @@ class TestConcurrentUploads:
|
|||||||
with Client(base_url=base_url, timeout=30.0) as client:
|
with Client(base_url=base_url, timeout=30.0) as client:
|
||||||
files = {
|
files = {
|
||||||
"file": (
|
"file": (
|
||||||
f"concurrent-{tag_suffix}.bin",
|
f"concurrent-{idx}.bin",
|
||||||
io.BytesIO(content),
|
io.BytesIO(content),
|
||||||
"application/octet-stream",
|
"application/octet-stream",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{test_project}/{packages[idx]}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"concurrent-{tag_suffix}"},
|
data={"version": "1.0.0"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -576,7 +594,7 @@ class TestConcurrentUploads:
|
|||||||
assert len(artifact_ids) == 1
|
assert len(artifact_ids) == 1
|
||||||
assert expected_hash in artifact_ids
|
assert expected_hash in artifact_ids
|
||||||
|
|
||||||
# Verify final ref_count
|
# Verify final ref_count equals number of packages
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["ref_count"] == num_concurrent
|
assert response.json()["ref_count"] == num_concurrent
|
||||||
@@ -605,7 +623,7 @@ class TestFileSizeValidation:
|
|||||||
content = b"X"
|
content = b"X"
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="tiny"
|
integration_client, project, package, content, version="tiny"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] is not None
|
assert result["artifact_id"] is not None
|
||||||
@@ -621,7 +639,7 @@ class TestFileSizeValidation:
|
|||||||
expected_size = len(content)
|
expected_size = len(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="size-test"
|
integration_client, project, package, content, version="size-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["size"] == expected_size
|
assert result["size"] == expected_size
|
||||||
@@ -649,7 +667,7 @@ class TestUploadFailureCleanup:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload",
|
f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "test"},
|
data={"version": "test"},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
@@ -672,7 +690,7 @@ class TestUploadFailureCleanup:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "test"},
|
data={"version": "test"},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
@@ -693,7 +711,7 @@ class TestUploadFailureCleanup:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "test"},
|
data={"version": "test"},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
@@ -719,7 +737,7 @@ class TestS3StorageVerification:
|
|||||||
|
|
||||||
# Upload same content multiple times
|
# Upload same content multiple times
|
||||||
for tag in ["s3test1", "s3test2", "s3test3"]:
|
for tag in ["s3test1", "s3test2", "s3test3"]:
|
||||||
upload_test_file(integration_client, project, package, content, tag=tag)
|
upload_test_file(integration_client, project, package, content, version=tag)
|
||||||
|
|
||||||
# Verify only one S3 object exists
|
# Verify only one S3 object exists
|
||||||
s3_objects = list_s3_objects_by_hash(expected_hash)
|
s3_objects = list_s3_objects_by_hash(expected_hash)
|
||||||
@@ -735,16 +753,26 @@ class TestS3StorageVerification:
|
|||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_artifact_table_single_row_after_duplicates(
|
def test_artifact_table_single_row_after_duplicates(
|
||||||
self, integration_client, test_package
|
self, integration_client, test_project, unique_test_id
|
||||||
):
|
):
|
||||||
"""Test artifact table contains only one row after duplicate uploads."""
|
"""Test artifact table contains only one row after duplicate uploads to different packages.
|
||||||
project, package = test_package
|
|
||||||
|
Same artifact can only have one version per package, so we create multiple packages
|
||||||
|
to test deduplication across packages.
|
||||||
|
"""
|
||||||
content = b"content for single row test"
|
content = b"content for single row test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
# Upload same content multiple times
|
# Create 3 packages and upload same content to each
|
||||||
for tag in ["v1", "v2", "v3"]:
|
for i in range(3):
|
||||||
upload_test_file(integration_client, project, package, content, tag=tag)
|
pkg_name = f"single-row-pkg-{unique_test_id}-{i}"
|
||||||
|
integration_client.post(
|
||||||
|
f"/api/v1/project/{test_project}/packages",
|
||||||
|
json={"name": pkg_name},
|
||||||
|
)
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, test_project, pkg_name, content, version="1.0.0"
|
||||||
|
)
|
||||||
|
|
||||||
# Query artifact
|
# Query artifact
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
@@ -783,7 +811,7 @@ class TestSecurityPathTraversal:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "traversal-test"},
|
data={"version": "traversal-test"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
result = response.json()
|
result = response.json()
|
||||||
@@ -801,48 +829,16 @@ class TestSecurityPathTraversal:
|
|||||||
assert response.status_code in [400, 404, 422]
|
assert response.status_code in [400, 404, 422]
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_path_traversal_in_tag_name(self, integration_client, test_package):
|
def test_path_traversal_in_version_name(self, integration_client, test_package):
|
||||||
"""Test tag names with path traversal are handled safely."""
|
"""Test version names with path traversal are handled safely."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"tag traversal test"
|
content = b"version traversal test"
|
||||||
|
|
||||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "../../../etc/passwd"},
|
data={"version": "../../../etc/passwd"},
|
||||||
)
|
|
||||||
assert response.status_code in [200, 400, 422]
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_download_path_traversal_in_ref(self, integration_client, test_package):
|
|
||||||
"""Test download ref with path traversal is rejected."""
|
|
||||||
project, package = test_package
|
|
||||||
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project}/{package}/+/../../../etc/passwd"
|
|
||||||
)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_path_traversal_in_package_name(self, integration_client, test_project):
|
|
||||||
"""Test package names with path traversal sequences are rejected."""
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{test_project}/packages/../../../etc/passwd"
|
|
||||||
)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_path_traversal_in_tag_name(self, integration_client, test_package):
|
|
||||||
"""Test tag names with path traversal are rejected or handled safely."""
|
|
||||||
project, package = test_package
|
|
||||||
content = b"tag traversal test"
|
|
||||||
|
|
||||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
|
||||||
response = integration_client.post(
|
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
|
||||||
files=files,
|
|
||||||
data={"tag": "../../../etc/passwd"},
|
|
||||||
)
|
)
|
||||||
assert response.status_code in [200, 400, 422]
|
assert response.status_code in [200, 400, 422]
|
||||||
|
|
||||||
@@ -867,7 +863,7 @@ class TestSecurityMalformedRequests:
|
|||||||
|
|
||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
data={"tag": "no-file"},
|
data={"version": "no-file"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 422
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
|||||||
@@ -39,31 +39,6 @@ class TestVersionCreation:
|
|||||||
assert result.get("version") == "1.0.0"
|
assert result.get("version") == "1.0.0"
|
||||||
assert result.get("version_source") == "explicit"
|
assert result.get("version_source") == "explicit"
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_upload_with_version_and_tag(self, integration_client, test_package):
|
|
||||||
"""Test upload with both version and tag creates both records."""
|
|
||||||
project, package = test_package
|
|
||||||
content = b"version and tag test"
|
|
||||||
|
|
||||||
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
|
||||||
response = integration_client.post(
|
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
|
||||||
files=files,
|
|
||||||
data={"version": "2.0.0", "tag": "latest"},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
result = response.json()
|
|
||||||
assert result.get("version") == "2.0.0"
|
|
||||||
|
|
||||||
# Verify tag was also created
|
|
||||||
tags_response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project}/{package}/tags"
|
|
||||||
)
|
|
||||||
assert tags_response.status_code == 200
|
|
||||||
tags = tags_response.json()
|
|
||||||
tag_names = [t["name"] for t in tags.get("items", tags)]
|
|
||||||
assert "latest" in tag_names
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_duplicate_version_same_content_succeeds(self, integration_client, test_package):
|
def test_duplicate_version_same_content_succeeds(self, integration_client, test_package):
|
||||||
"""Test uploading same version with same content succeeds (deduplication)."""
|
"""Test uploading same version with same content succeeds (deduplication)."""
|
||||||
@@ -262,11 +237,10 @@ class TestDownloadByVersion:
|
|||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_version_resolution_priority(self, integration_client, test_package):
|
def test_version_resolution_with_prefix(self, integration_client, test_package):
|
||||||
"""Test that version: prefix explicitly resolves to version, not tag."""
|
"""Test that version: prefix explicitly resolves to version."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
version_content = b"this is the version content"
|
version_content = b"this is the version content"
|
||||||
tag_content = b"this is the tag content"
|
|
||||||
|
|
||||||
# Create a version 6.0.0
|
# Create a version 6.0.0
|
||||||
files1 = {"file": ("app-v.tar.gz", io.BytesIO(version_content), "application/octet-stream")}
|
files1 = {"file": ("app-v.tar.gz", io.BytesIO(version_content), "application/octet-stream")}
|
||||||
@@ -276,14 +250,6 @@ class TestDownloadByVersion:
|
|||||||
data={"version": "6.0.0"},
|
data={"version": "6.0.0"},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a tag named "6.0.0" pointing to different content
|
|
||||||
files2 = {"file": ("app-t.tar.gz", io.BytesIO(tag_content), "application/octet-stream")}
|
|
||||||
integration_client.post(
|
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
|
||||||
files=files2,
|
|
||||||
data={"tag": "6.0.0"},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Download with version: prefix should get version content
|
# Download with version: prefix should get version content
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/version:6.0.0",
|
f"/api/v1/project/{project}/{package}/+/version:6.0.0",
|
||||||
@@ -292,14 +258,6 @@ class TestDownloadByVersion:
|
|||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.content == version_content
|
assert response.content == version_content
|
||||||
|
|
||||||
# Download with tag: prefix should get tag content
|
|
||||||
response2 = integration_client.get(
|
|
||||||
f"/api/v1/project/{project}/{package}/+/tag:6.0.0",
|
|
||||||
params={"mode": "proxy"},
|
|
||||||
)
|
|
||||||
assert response2.status_code == 200
|
|
||||||
assert response2.content == tag_content
|
|
||||||
|
|
||||||
|
|
||||||
class TestVersionDeletion:
|
class TestVersionDeletion:
|
||||||
"""Tests for deleting versions."""
|
"""Tests for deleting versions."""
|
||||||
|
|||||||
@@ -27,11 +27,9 @@ class TestVersionCreation:
|
|||||||
project_name,
|
project_name,
|
||||||
package_name,
|
package_name,
|
||||||
b"version create test",
|
b"version create test",
|
||||||
tag="latest",
|
|
||||||
version="1.0.0",
|
version="1.0.0",
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["tag"] == "latest"
|
|
||||||
assert result["version"] == "1.0.0"
|
assert result["version"] == "1.0.0"
|
||||||
assert result["version_source"] == "explicit"
|
assert result["version_source"] == "explicit"
|
||||||
assert result["artifact_id"]
|
assert result["artifact_id"]
|
||||||
@@ -149,7 +147,6 @@ class TestVersionCRUD:
|
|||||||
package_name,
|
package_name,
|
||||||
b"version with info",
|
b"version with info",
|
||||||
version="1.0.0",
|
version="1.0.0",
|
||||||
tag="release",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -166,8 +163,6 @@ class TestVersionCRUD:
|
|||||||
assert version_item is not None
|
assert version_item is not None
|
||||||
assert "size" in version_item
|
assert "size" in version_item
|
||||||
assert "artifact_id" in version_item
|
assert "artifact_id" in version_item
|
||||||
assert "tags" in version_item
|
|
||||||
assert "release" in version_item["tags"]
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_get_version(self, integration_client, test_package):
|
def test_get_version(self, integration_client, test_package):
|
||||||
@@ -272,94 +267,9 @@ class TestVersionDownload:
|
|||||||
follow_redirects=False,
|
follow_redirects=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Should resolve version first (before tag)
|
# Should resolve version
|
||||||
assert response.status_code in [200, 302, 307]
|
assert response.status_code in [200, 302, 307]
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_version_takes_precedence_over_tag(self, integration_client, test_package):
|
|
||||||
"""Test that version is checked before tag when resolving refs."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
# Upload with version "1.0"
|
|
||||||
version_result = upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"version content",
|
|
||||||
version="1.0",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create a tag with the same name "1.0" pointing to different artifact
|
|
||||||
tag_result = upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"tag content different",
|
|
||||||
tag="1.0",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Download by "1.0" should resolve to version, not tag
|
|
||||||
# Since version:1.0 artifact was uploaded first
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/1.0",
|
|
||||||
follow_redirects=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 302, 307]
|
|
||||||
|
|
||||||
|
|
||||||
class TestTagVersionEnrichment:
|
|
||||||
"""Tests for tag responses including version information."""
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_tag_response_includes_version(self, integration_client, test_package):
|
|
||||||
"""Test that tag responses include version of the artifact."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
# Upload with both version and tag
|
|
||||||
upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"enriched tag test",
|
|
||||||
version="7.0.0",
|
|
||||||
tag="stable",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get tag and check version field
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags/stable"
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["name"] == "stable"
|
|
||||||
assert data["version"] == "7.0.0"
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_tag_list_includes_versions(self, integration_client, test_package):
|
|
||||||
"""Test that tag list responses include version for each tag."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"list version test",
|
|
||||||
version="8.0.0",
|
|
||||||
tag="latest",
|
|
||||||
)
|
|
||||||
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags"
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
tag_item = next((t for t in data["items"] if t["name"] == "latest"), None)
|
|
||||||
assert tag_item is not None
|
|
||||||
assert tag_item.get("version") == "8.0.0"
|
|
||||||
|
|
||||||
|
|
||||||
class TestVersionPagination:
|
class TestVersionPagination:
|
||||||
"""Tests for version listing pagination and sorting."""
|
"""Tests for version listing pagination and sorting."""
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ class TestDependencySchema:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -59,29 +59,17 @@ class TestDependencySchema:
|
|||||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_dependency_requires_version_or_tag(self, integration_client):
|
def test_dependency_requires_version(self, integration_client):
|
||||||
"""Test that dependency must have either version or tag, not both or neither."""
|
"""Test that dependency requires version."""
|
||||||
from app.schemas import DependencyCreate
|
from app.schemas import DependencyCreate
|
||||||
|
|
||||||
# Test: neither version nor tag
|
# Test: missing version
|
||||||
with pytest.raises(ValidationError) as exc_info:
|
with pytest.raises(ValidationError):
|
||||||
DependencyCreate(project="proj", package="pkg")
|
DependencyCreate(project="proj", package="pkg")
|
||||||
assert "Either 'version' or 'tag' must be specified" in str(exc_info.value)
|
|
||||||
|
|
||||||
# Test: both version and tag
|
|
||||||
with pytest.raises(ValidationError) as exc_info:
|
|
||||||
DependencyCreate(project="proj", package="pkg", version="1.0.0", tag="stable")
|
|
||||||
assert "Cannot specify both 'version' and 'tag'" in str(exc_info.value)
|
|
||||||
|
|
||||||
# Test: valid with version
|
# Test: valid with version
|
||||||
dep = DependencyCreate(project="proj", package="pkg", version="1.0.0")
|
dep = DependencyCreate(project="proj", package="pkg", version="1.0.0")
|
||||||
assert dep.version == "1.0.0"
|
assert dep.version == "1.0.0"
|
||||||
assert dep.tag is None
|
|
||||||
|
|
||||||
# Test: valid with tag
|
|
||||||
dep = DependencyCreate(project="proj", package="pkg", tag="stable")
|
|
||||||
assert dep.tag == "stable"
|
|
||||||
assert dep.version is None
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_dependency_unique_constraint(
|
def test_dependency_unique_constraint(
|
||||||
@@ -126,7 +114,7 @@ class TestEnsureFileParsing:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -162,7 +150,7 @@ class TestEnsureFileParsing:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
assert "Invalid ensure file" in response.json().get("detail", "")
|
assert "Invalid ensure file" in response.json().get("detail", "")
|
||||||
@@ -188,7 +176,7 @@ class TestEnsureFileParsing:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
assert "Project" in response.json().get("detail", "")
|
assert "Project" in response.json().get("detail", "")
|
||||||
@@ -208,7 +196,7 @@ class TestEnsureFileParsing:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-nodeps-{unique_test_id}"},
|
data={"version": f"v1.0.0-nodeps-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -226,13 +214,14 @@ class TestEnsureFileParsing:
|
|||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# Test with missing version field (version is now required)
|
||||||
ensure_content = yaml.dump({
|
ensure_content = yaml.dump({
|
||||||
"dependencies": [
|
"dependencies": [
|
||||||
{"project": dep_project_name, "package": "pkg", "version": "1.0.0", "tag": "stable"}
|
{"project": dep_project_name, "package": "pkg"} # Missing version
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
|
||||||
content = unique_content("test-both", unique_test_id, "constraint")
|
content = unique_content("test-missing-version", unique_test_id, "constraint")
|
||||||
files = {
|
files = {
|
||||||
"file": ("test.tar.gz", BytesIO(content), "application/gzip"),
|
"file": ("test.tar.gz", BytesIO(content), "application/gzip"),
|
||||||
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
||||||
@@ -240,11 +229,10 @@ class TestEnsureFileParsing:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
assert "both" in response.json().get("detail", "").lower() or \
|
assert "version" in response.json().get("detail", "").lower()
|
||||||
"version" in response.json().get("detail", "").lower()
|
|
||||||
finally:
|
finally:
|
||||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||||
|
|
||||||
@@ -271,7 +259,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
ensure_content = yaml.dump({
|
ensure_content = yaml.dump({
|
||||||
"dependencies": [
|
"dependencies": [
|
||||||
{"project": dep_project_name, "package": "lib-a", "version": "1.0.0"},
|
{"project": dep_project_name, "package": "lib-a", "version": "1.0.0"},
|
||||||
{"project": dep_project_name, "package": "lib-b", "tag": "stable"},
|
{"project": dep_project_name, "package": "lib-b", "version": "2.0.0"},
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -283,7 +271,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v2.0.0-{unique_test_id}"},
|
data={"version": f"v2.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
artifact_id = response.json()["artifact_id"]
|
artifact_id = response.json()["artifact_id"]
|
||||||
@@ -299,10 +287,8 @@ class TestDependencyQueryEndpoints:
|
|||||||
deps = {d["package"]: d for d in data["dependencies"]}
|
deps = {d["package"]: d for d in data["dependencies"]}
|
||||||
assert "lib-a" in deps
|
assert "lib-a" in deps
|
||||||
assert deps["lib-a"]["version"] == "1.0.0"
|
assert deps["lib-a"]["version"] == "1.0.0"
|
||||||
assert deps["lib-a"]["tag"] is None
|
|
||||||
assert "lib-b" in deps
|
assert "lib-b" in deps
|
||||||
assert deps["lib-b"]["tag"] == "stable"
|
assert deps["lib-b"]["version"] == "2.0.0"
|
||||||
assert deps["lib-b"]["version"] is None
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||||
@@ -336,7 +322,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": tag_name},
|
data={"version": tag_name},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -381,7 +367,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{dep_project_name}/target-lib/upload",
|
f"/api/v1/project/{dep_project_name}/target-lib/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -400,7 +386,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v4.0.0-{unique_test_id}"},
|
data={"version": f"v4.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -419,7 +405,6 @@ class TestDependencyQueryEndpoints:
|
|||||||
for dep in data["dependents"]:
|
for dep in data["dependents"]:
|
||||||
if dep["project"] == project_name:
|
if dep["project"] == project_name:
|
||||||
found = True
|
found = True
|
||||||
assert dep["constraint_type"] == "version"
|
|
||||||
assert dep["constraint_value"] == "1.0.0"
|
assert dep["constraint_value"] == "1.0.0"
|
||||||
break
|
break
|
||||||
assert found, "Our package should be in the dependents list"
|
assert found, "Our package should be in the dependents list"
|
||||||
@@ -442,7 +427,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v5.0.0-nodeps-{unique_test_id}"},
|
data={"version": f"v5.0.0-nodeps-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
artifact_id = response.json()["artifact_id"]
|
artifact_id = response.json()["artifact_id"]
|
||||||
@@ -482,7 +467,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -500,7 +485,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -518,7 +503,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -566,7 +551,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_d}/upload",
|
f"/api/v1/project/{test_project}/{pkg_d}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -584,7 +569,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -602,7 +587,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -621,7 +606,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -663,7 +648,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"solo-{unique_test_id}"},
|
data={"version": f"solo-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -698,17 +683,21 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"missing-dep-{unique_test_id}"},
|
data={"version": f"missing-dep-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
# Should fail at upload time since package doesn't exist
|
# Should fail at upload time since package doesn't exist
|
||||||
# OR succeed at upload but fail at resolution
|
# OR succeed at upload but fail at resolution
|
||||||
# Depending on implementation choice
|
# Depending on implementation choice
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
# Resolution should fail
|
# Resolution should return missing dependencies
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/missing-dep-{unique_test_id}/resolve"
|
f"/api/v1/project/{project_name}/{package_name}/+/missing-dep-{unique_test_id}/resolve"
|
||||||
)
|
)
|
||||||
assert response.status_code == 404
|
# Expect 200 with missing dependencies listed
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
# The missing dependency should be in the 'missing' list
|
||||||
|
assert len(data.get("missing", [])) >= 1
|
||||||
|
|
||||||
|
|
||||||
class TestCircularDependencyDetection:
|
class TestCircularDependencyDetection:
|
||||||
@@ -736,7 +725,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -754,7 +743,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -772,7 +761,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "2.0.0"},
|
data={"version": "2.0.0"},
|
||||||
)
|
)
|
||||||
# Should be rejected with 400 (circular dependency)
|
# Should be rejected with 400 (circular dependency)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
@@ -807,7 +796,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -825,7 +814,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -843,7 +832,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -861,7 +850,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "2.0.0"},
|
data={"version": "2.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -884,10 +873,14 @@ class TestCircularDependencyDetection:
|
|||||||
|
|
||||||
|
|
||||||
class TestConflictDetection:
|
class TestConflictDetection:
|
||||||
"""Tests for #81: Dependency Conflict Detection and Reporting"""
|
"""Tests for dependency conflict handling.
|
||||||
|
|
||||||
|
The resolver uses "first version wins" strategy for version conflicts,
|
||||||
|
allowing resolution to succeed rather than failing with an error.
|
||||||
|
"""
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_detect_version_conflict(
|
def test_version_conflict_uses_first_version(
|
||||||
self, integration_client, test_project, unique_test_id
|
self, integration_client, test_project, unique_test_id
|
||||||
):
|
):
|
||||||
"""Test conflict when two deps require different versions of same package."""
|
"""Test conflict when two deps require different versions of same package."""
|
||||||
@@ -910,7 +903,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -920,7 +913,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "2.0.0"},
|
data={"version": "2.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -938,7 +931,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_lib_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_lib_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -956,7 +949,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_lib_b}/upload",
|
f"/api/v1/project/{test_project}/{pkg_lib_b}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -975,25 +968,23 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
# Try to resolve app - should report conflict
|
# Try to resolve app - with lenient conflict handling, this should succeed
|
||||||
|
# The resolver uses "first version wins" strategy for conflicting versions
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{test_project}/{pkg_app}/+/1.0.0/resolve"
|
f"/api/v1/project/{test_project}/{pkg_app}/+/1.0.0/resolve"
|
||||||
)
|
)
|
||||||
assert response.status_code == 409
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
# Error details are nested in "detail" for HTTPException
|
|
||||||
detail = data.get("detail", data)
|
|
||||||
assert detail.get("error") == "dependency_conflict"
|
|
||||||
assert len(detail.get("conflicts", [])) > 0
|
|
||||||
|
|
||||||
# Verify conflict details
|
# Resolution should succeed with first-encountered version of common
|
||||||
conflict = detail["conflicts"][0]
|
assert data["artifact_count"] >= 1
|
||||||
assert conflict["package"] == pkg_common
|
# Find the common package in resolved list
|
||||||
assert len(conflict["requirements"]) == 2
|
common_resolved = [r for r in data["resolved"] if r["package"] == pkg_common]
|
||||||
|
assert len(common_resolved) == 1 # Only one version should be included
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
||||||
@@ -1023,7 +1014,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -1042,7 +1033,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{lib_pkg}/upload",
|
f"/api/v1/project/{test_project}/{lib_pkg}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -1061,7 +1052,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -1078,3 +1069,277 @@ class TestConflictDetection:
|
|||||||
finally:
|
finally:
|
||||||
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
||||||
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
||||||
|
|
||||||
|
|
||||||
|
class TestAutoFetchDependencies:
|
||||||
|
"""Tests for auto-fetch functionality in dependency resolution.
|
||||||
|
|
||||||
|
These tests verify:
|
||||||
|
- Resolution with auto_fetch=true (default) fetches missing dependencies from upstream
|
||||||
|
- Resolution with auto_fetch=false skips network calls for fast resolution
|
||||||
|
- Proper handling of missing/non-existent packages
|
||||||
|
- Response schema includes fetched artifacts list
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_resolve_auto_fetch_true_is_default(
|
||||||
|
self, integration_client, test_package, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test that auto_fetch=true is the default (no fetch needed when all deps cached)."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
# Upload a simple artifact without dependencies
|
||||||
|
content = unique_content("autofetch-default", unique_test_id, "nodeps")
|
||||||
|
files = {"file": ("default.tar.gz", BytesIO(content), "application/gzip")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Resolve without auto_fetch param (should default to false)
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/+/v1.0.0-{unique_test_id}/resolve"
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Should have empty fetched list
|
||||||
|
assert data.get("fetched", []) == []
|
||||||
|
assert data["artifact_count"] == 1
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_resolve_auto_fetch_explicit_false(
|
||||||
|
self, integration_client, test_package, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test that auto_fetch=false works explicitly."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
content = unique_content("autofetch-explicit-false", unique_test_id, "nodeps")
|
||||||
|
files = {"file": ("explicit.tar.gz", BytesIO(content), "application/gzip")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": f"v2.0.0-{unique_test_id}"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Resolve with explicit auto_fetch=false
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/+/v2.0.0-{unique_test_id}/resolve",
|
||||||
|
params={"auto_fetch": "false"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data.get("fetched", []) == []
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_resolve_auto_fetch_true_no_missing_deps(
|
||||||
|
self, integration_client, test_project, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test that auto_fetch=true works when all deps are already cached."""
|
||||||
|
pkg_a = f"fetch-a-{unique_test_id}"
|
||||||
|
pkg_b = f"fetch-b-{unique_test_id}"
|
||||||
|
|
||||||
|
for pkg in [pkg_a, pkg_b]:
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{test_project}/packages",
|
||||||
|
json={"name": pkg}
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Upload B (no deps)
|
||||||
|
content_b = unique_content("B", unique_test_id, "fetch")
|
||||||
|
files = {"file": ("b.tar.gz", BytesIO(content_b), "application/gzip")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": "1.0.0"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Upload A (depends on B)
|
||||||
|
ensure_a = yaml.dump({
|
||||||
|
"dependencies": [
|
||||||
|
{"project": test_project, "package": pkg_b, "version": "1.0.0"}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
content_a = unique_content("A", unique_test_id, "fetch")
|
||||||
|
files = {
|
||||||
|
"file": ("a.tar.gz", BytesIO(content_a), "application/gzip"),
|
||||||
|
"ensure": ("orchard.ensure", BytesIO(ensure_a.encode()), "application/x-yaml"),
|
||||||
|
}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": "1.0.0"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Resolve with auto_fetch=true - should work since deps are cached
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{test_project}/{pkg_a}/+/1.0.0/resolve",
|
||||||
|
params={"auto_fetch": "true"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Should resolve successfully
|
||||||
|
assert data["artifact_count"] == 2
|
||||||
|
# Nothing fetched since everything was cached
|
||||||
|
assert len(data.get("fetched", [])) == 0
|
||||||
|
# No missing deps
|
||||||
|
assert len(data.get("missing", [])) == 0
|
||||||
|
|
||||||
|
finally:
|
||||||
|
for pkg in [pkg_a, pkg_b]:
|
||||||
|
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_resolve_missing_dep_with_auto_fetch_false(
|
||||||
|
self, integration_client, test_package, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test that missing deps are reported when auto_fetch=false."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
# Create _pypi system project if it doesn't exist
|
||||||
|
response = integration_client.get("/api/v1/projects/_pypi")
|
||||||
|
if response.status_code == 404:
|
||||||
|
response = integration_client.post(
|
||||||
|
"/api/v1/projects",
|
||||||
|
json={"name": "_pypi", "description": "System project for PyPI packages"}
|
||||||
|
)
|
||||||
|
# May fail if already exists or can't create - that's ok
|
||||||
|
|
||||||
|
# Upload artifact with dependency on _pypi package that doesn't exist locally
|
||||||
|
ensure_content = yaml.dump({
|
||||||
|
"dependencies": [
|
||||||
|
{"project": "_pypi", "package": "nonexistent-pkg-xyz123", "version": ">=1.0.0"}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
content = unique_content("missing-pypi", unique_test_id, "dep")
|
||||||
|
files = {
|
||||||
|
"file": ("missing-pypi-dep.tar.gz", BytesIO(content), "application/gzip"),
|
||||||
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
||||||
|
}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": f"v3.0.0-{unique_test_id}"},
|
||||||
|
)
|
||||||
|
# Upload should succeed - validation is loose for system projects
|
||||||
|
if response.status_code == 200:
|
||||||
|
# Resolve without auto_fetch - should report missing
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/+/v3.0.0-{unique_test_id}/resolve",
|
||||||
|
params={"auto_fetch": "false"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Should have missing dependencies
|
||||||
|
assert len(data.get("missing", [])) >= 1
|
||||||
|
|
||||||
|
# Verify missing dependency structure
|
||||||
|
missing = data["missing"][0]
|
||||||
|
assert missing["project"] == "_pypi"
|
||||||
|
assert missing["package"] == "nonexistent-pkg-xyz123"
|
||||||
|
# Without auto_fetch, these should be false/None
|
||||||
|
assert missing.get("fetch_attempted", False) is False
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_resolve_response_schema_has_fetched_field(
|
||||||
|
self, integration_client, test_package, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test that the resolve response always includes the fetched field."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
content = unique_content("schema-check", unique_test_id, "nodeps")
|
||||||
|
files = {"file": ("schema.tar.gz", BytesIO(content), "application/gzip")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": f"v4.0.0-{unique_test_id}"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Check both auto_fetch modes include fetched field
|
||||||
|
for auto_fetch in ["false", "true"]:
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/+/v4.0.0-{unique_test_id}/resolve",
|
||||||
|
params={"auto_fetch": auto_fetch},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Required fields
|
||||||
|
assert "requested" in data
|
||||||
|
assert "resolved" in data
|
||||||
|
assert "missing" in data
|
||||||
|
assert "fetched" in data # New field
|
||||||
|
assert "total_size" in data
|
||||||
|
assert "artifact_count" in data
|
||||||
|
|
||||||
|
# Types
|
||||||
|
assert isinstance(data["fetched"], list)
|
||||||
|
assert isinstance(data["missing"], list)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_missing_dep_schema_has_fetch_fields(
|
||||||
|
self, integration_client, test_package, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test that missing dependency entries have fetch_attempted and fetch_error fields."""
|
||||||
|
project_name, package_name = test_package
|
||||||
|
|
||||||
|
# Create a dependency on a non-existent package in a real project
|
||||||
|
dep_project_name = f"dep-test-{unique_test_id}"
|
||||||
|
response = integration_client.post(
|
||||||
|
"/api/v1/projects", json={"name": dep_project_name}
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
try:
|
||||||
|
ensure_content = yaml.dump({
|
||||||
|
"dependencies": [
|
||||||
|
{"project": dep_project_name, "package": "nonexistent-pkg", "version": "1.0.0"}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
content = unique_content("missing-schema", unique_test_id, "check")
|
||||||
|
files = {
|
||||||
|
"file": ("missing-schema.tar.gz", BytesIO(content), "application/gzip"),
|
||||||
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
||||||
|
}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
|
files=files,
|
||||||
|
data={"version": f"v5.0.0-{unique_test_id}"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Resolve
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project_name}/{package_name}/+/v5.0.0-{unique_test_id}/resolve",
|
||||||
|
params={"auto_fetch": "true"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Should have missing dependencies
|
||||||
|
assert len(data.get("missing", [])) >= 1
|
||||||
|
|
||||||
|
# Check schema for missing dependency
|
||||||
|
missing = data["missing"][0]
|
||||||
|
assert "project" in missing
|
||||||
|
assert "package" in missing
|
||||||
|
assert "constraint" in missing
|
||||||
|
assert "required_by" in missing
|
||||||
|
# New fields
|
||||||
|
assert "fetch_attempted" in missing
|
||||||
|
assert "fetch_error" in missing # May be None
|
||||||
|
|
||||||
|
finally:
|
||||||
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||||
|
|||||||
@@ -26,16 +26,16 @@ def upload_test_file(integration_client):
|
|||||||
Factory fixture to upload a test file and return its artifact ID.
|
Factory fixture to upload a test file and return its artifact ID.
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
artifact_id = upload_test_file(project, package, content, tag="v1.0")
|
artifact_id = upload_test_file(project, package, content, version="v1.0")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _upload(project_name: str, package_name: str, content: bytes, tag: str = None):
|
def _upload(project_name: str, package_name: str, content: bytes, version: str = None):
|
||||||
files = {
|
files = {
|
||||||
"file": ("test-file.bin", io.BytesIO(content), "application/octet-stream")
|
"file": ("test-file.bin", io.BytesIO(content), "application/octet-stream")
|
||||||
}
|
}
|
||||||
data = {}
|
data = {}
|
||||||
if tag:
|
if version:
|
||||||
data["tag"] = tag
|
data["version"] = version
|
||||||
|
|
||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
@@ -66,7 +66,7 @@ class TestDownloadChecksumHeaders:
|
|||||||
|
|
||||||
# Upload file
|
# Upload file
|
||||||
artifact_id = upload_test_file(
|
artifact_id = upload_test_file(
|
||||||
project_name, package_name, content, tag="sha256-header-test"
|
project_name, package_name, content, version="sha256-header-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Download with proxy mode
|
# Download with proxy mode
|
||||||
@@ -88,7 +88,7 @@ class TestDownloadChecksumHeaders:
|
|||||||
content = b"Content for ETag header test"
|
content = b"Content for ETag header test"
|
||||||
|
|
||||||
artifact_id = upload_test_file(
|
artifact_id = upload_test_file(
|
||||||
project_name, package_name, content, tag="etag-test"
|
project_name, package_name, content, version="etag-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -110,7 +110,7 @@ class TestDownloadChecksumHeaders:
|
|||||||
content = b"Content for Digest header test"
|
content = b"Content for Digest header test"
|
||||||
sha256 = hashlib.sha256(content).hexdigest()
|
sha256 = hashlib.sha256(content).hexdigest()
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="digest-test")
|
upload_test_file(project_name, package_name, content, version="digest-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/digest-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/digest-test",
|
||||||
@@ -137,7 +137,7 @@ class TestDownloadChecksumHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for X-Content-Length test"
|
content = b"Content for X-Content-Length test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="content-length-test")
|
upload_test_file(project_name, package_name, content, version="content-length-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/content-length-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/content-length-test",
|
||||||
@@ -156,7 +156,7 @@ class TestDownloadChecksumHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for X-Verified false test"
|
content = b"Content for X-Verified false test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="verified-false-test")
|
upload_test_file(project_name, package_name, content, version="verified-false-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/verified-false-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/verified-false-test",
|
||||||
@@ -184,7 +184,7 @@ class TestPreVerificationMode:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for pre-verification success test"
|
content = b"Content for pre-verification success test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="pre-verify-success")
|
upload_test_file(project_name, package_name, content, version="pre-verify-success")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-success",
|
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-success",
|
||||||
@@ -205,7 +205,7 @@ class TestPreVerificationMode:
|
|||||||
# Use binary content to verify no corruption
|
# Use binary content to verify no corruption
|
||||||
content = bytes(range(256)) * 10 # 2560 bytes of all byte values
|
content = bytes(range(256)) * 10 # 2560 bytes of all byte values
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="pre-verify-content")
|
upload_test_file(project_name, package_name, content, version="pre-verify-content")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-content",
|
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-content",
|
||||||
@@ -233,7 +233,7 @@ class TestStreamingVerificationMode:
|
|||||||
content = b"Content for streaming verification success test"
|
content = b"Content for streaming verification success test"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
project_name, package_name, content, tag="stream-verify-success"
|
project_name, package_name, content, version="stream-verify-success"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -255,7 +255,7 @@ class TestStreamingVerificationMode:
|
|||||||
# 100KB of content
|
# 100KB of content
|
||||||
content = b"x" * (100 * 1024)
|
content = b"x" * (100 * 1024)
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="stream-verify-large")
|
upload_test_file(project_name, package_name, content, version="stream-verify-large")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/stream-verify-large",
|
f"/api/v1/project/{project_name}/{package_name}/+/stream-verify-large",
|
||||||
@@ -283,7 +283,7 @@ class TestHeadRequestHeaders:
|
|||||||
content = b"Content for HEAD SHA256 test"
|
content = b"Content for HEAD SHA256 test"
|
||||||
|
|
||||||
artifact_id = upload_test_file(
|
artifact_id = upload_test_file(
|
||||||
project_name, package_name, content, tag="head-sha256-test"
|
project_name, package_name, content, version="head-sha256-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.head(
|
response = integration_client.head(
|
||||||
@@ -303,7 +303,7 @@ class TestHeadRequestHeaders:
|
|||||||
content = b"Content for HEAD ETag test"
|
content = b"Content for HEAD ETag test"
|
||||||
|
|
||||||
artifact_id = upload_test_file(
|
artifact_id = upload_test_file(
|
||||||
project_name, package_name, content, tag="head-etag-test"
|
project_name, package_name, content, version="head-etag-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.head(
|
response = integration_client.head(
|
||||||
@@ -322,7 +322,7 @@ class TestHeadRequestHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for HEAD Digest test"
|
content = b"Content for HEAD Digest test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="head-digest-test")
|
upload_test_file(project_name, package_name, content, version="head-digest-test")
|
||||||
|
|
||||||
response = integration_client.head(
|
response = integration_client.head(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/head-digest-test"
|
f"/api/v1/project/{project_name}/{package_name}/+/head-digest-test"
|
||||||
@@ -340,7 +340,7 @@ class TestHeadRequestHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for HEAD Content-Length test"
|
content = b"Content for HEAD Content-Length test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="head-length-test")
|
upload_test_file(project_name, package_name, content, version="head-length-test")
|
||||||
|
|
||||||
response = integration_client.head(
|
response = integration_client.head(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/head-length-test"
|
f"/api/v1/project/{project_name}/{package_name}/+/head-length-test"
|
||||||
@@ -356,7 +356,7 @@ class TestHeadRequestHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for HEAD no-body test"
|
content = b"Content for HEAD no-body test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="head-no-body-test")
|
upload_test_file(project_name, package_name, content, version="head-no-body-test")
|
||||||
|
|
||||||
response = integration_client.head(
|
response = integration_client.head(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/head-no-body-test"
|
f"/api/v1/project/{project_name}/{package_name}/+/head-no-body-test"
|
||||||
@@ -382,7 +382,7 @@ class TestRangeRequestHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for range request checksum header test"
|
content = b"Content for range request checksum header test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="range-checksum-test")
|
upload_test_file(project_name, package_name, content, version="range-checksum-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/range-checksum-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/range-checksum-test",
|
||||||
@@ -412,7 +412,7 @@ class TestClientSideVerification:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for client-side verification test"
|
content = b"Content for client-side verification test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="client-verify-test")
|
upload_test_file(project_name, package_name, content, version="client-verify-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/client-verify-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/client-verify-test",
|
||||||
@@ -438,7 +438,7 @@ class TestClientSideVerification:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for Digest header verification"
|
content = b"Content for Digest header verification"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="digest-verify-test")
|
upload_test_file(project_name, package_name, content, version="digest-verify-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/digest-verify-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/digest-verify-test",
|
||||||
|
|||||||
1905
backend/tests/test_upstream_caching.py
Normal file
1905
backend/tests/test_upstream_caching.py
Normal file
File diff suppressed because it is too large
Load Diff
374
backend/tests/unit/test_cache_service.py
Normal file
374
backend/tests/unit/test_cache_service.py
Normal file
@@ -0,0 +1,374 @@
|
|||||||
|
"""Tests for CacheService."""
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import MagicMock, AsyncMock, patch
|
||||||
|
|
||||||
|
|
||||||
|
class TestCacheCategory:
|
||||||
|
"""Tests for cache category enum."""
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_immutable_categories_have_no_ttl(self):
|
||||||
|
"""Immutable categories should return None for TTL."""
|
||||||
|
from app.cache_service import CacheCategory, get_category_ttl
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
|
||||||
|
assert get_category_ttl(CacheCategory.ARTIFACT_METADATA, settings) is None
|
||||||
|
assert get_category_ttl(CacheCategory.ARTIFACT_DEPENDENCIES, settings) is None
|
||||||
|
assert get_category_ttl(CacheCategory.DEPENDENCY_RESOLUTION, settings) is None
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_mutable_categories_have_ttl(self):
|
||||||
|
"""Mutable categories should return configured TTL."""
|
||||||
|
from app.cache_service import CacheCategory, get_category_ttl
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(
|
||||||
|
cache_ttl_index=300,
|
||||||
|
cache_ttl_upstream=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert get_category_ttl(CacheCategory.PACKAGE_INDEX, settings) == 300
|
||||||
|
assert get_category_ttl(CacheCategory.UPSTREAM_SOURCES, settings) == 3600
|
||||||
|
|
||||||
|
|
||||||
|
class TestCacheService:
|
||||||
|
"""Tests for Redis cache service."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_disabled_cache_returns_none(self):
|
||||||
|
"""When Redis disabled, get() should return None."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=False)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
await cache.startup()
|
||||||
|
|
||||||
|
result = await cache.get(CacheCategory.PACKAGE_INDEX, "test-key")
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
await cache.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_disabled_cache_set_is_noop(self):
|
||||||
|
"""When Redis disabled, set() should be a no-op."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=False)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
await cache.startup()
|
||||||
|
|
||||||
|
# Should not raise
|
||||||
|
await cache.set(CacheCategory.PACKAGE_INDEX, "test-key", b"test-value")
|
||||||
|
|
||||||
|
await cache.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_cache_key_namespacing(self):
|
||||||
|
"""Cache keys should be properly namespaced."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
|
||||||
|
key = CacheService._make_key(CacheCategory.PACKAGE_INDEX, "pypi", "numpy")
|
||||||
|
|
||||||
|
assert key == "orchard:index:pypi:numpy"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_ping_returns_false_when_disabled(self):
|
||||||
|
"""ping() should return False when Redis is disabled."""
|
||||||
|
from app.cache_service import CacheService
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=False)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
await cache.startup()
|
||||||
|
|
||||||
|
result = await cache.ping()
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
await cache.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_enabled_property(self):
|
||||||
|
"""enabled property should reflect Redis state."""
|
||||||
|
from app.cache_service import CacheService
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=False)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
|
||||||
|
assert cache.enabled is False
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_delete_is_noop_when_disabled(self):
|
||||||
|
"""delete() should be a no-op when Redis is disabled."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=False)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
await cache.startup()
|
||||||
|
|
||||||
|
# Should not raise
|
||||||
|
await cache.delete(CacheCategory.PACKAGE_INDEX, "test-key")
|
||||||
|
|
||||||
|
await cache.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_invalidate_pattern_returns_zero_when_disabled(self):
|
||||||
|
"""invalidate_pattern() should return 0 when Redis is disabled."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=False)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
await cache.startup()
|
||||||
|
|
||||||
|
result = await cache.invalidate_pattern(CacheCategory.PACKAGE_INDEX)
|
||||||
|
|
||||||
|
assert result == 0
|
||||||
|
await cache.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_startup_already_started(self):
|
||||||
|
"""startup() should be idempotent."""
|
||||||
|
from app.cache_service import CacheService
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=False)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
await cache.startup()
|
||||||
|
await cache.startup() # Should not raise
|
||||||
|
|
||||||
|
assert cache._started is True
|
||||||
|
await cache.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_shutdown_not_started(self):
|
||||||
|
"""shutdown() should handle not-started state."""
|
||||||
|
from app.cache_service import CacheService
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=False)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
|
||||||
|
# Should not raise
|
||||||
|
await cache.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_make_key_with_default_protocol(self):
|
||||||
|
"""_make_key should work with default protocol."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
|
||||||
|
key = CacheService._make_key(CacheCategory.ARTIFACT_METADATA, "default", "abc123")
|
||||||
|
|
||||||
|
assert key == "orchard:artifact:default:abc123"
|
||||||
|
|
||||||
|
|
||||||
|
class TestCacheServiceWithMockedRedis:
|
||||||
|
"""Tests for CacheService with mocked Redis client."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_get_returns_cached_value(self):
|
||||||
|
"""get() should return cached value when available."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=True)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
|
||||||
|
# Mock the redis client
|
||||||
|
mock_redis = AsyncMock()
|
||||||
|
mock_redis.get.return_value = b"cached-data"
|
||||||
|
cache._redis = mock_redis
|
||||||
|
cache._enabled = True
|
||||||
|
cache._started = True
|
||||||
|
|
||||||
|
result = await cache.get(CacheCategory.PACKAGE_INDEX, "test-key", "pypi")
|
||||||
|
|
||||||
|
assert result == b"cached-data"
|
||||||
|
mock_redis.get.assert_called_once_with("orchard:index:pypi:test-key")
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_set_with_ttl(self):
|
||||||
|
"""set() should use setex for mutable categories."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=True, cache_ttl_index=300)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
|
||||||
|
mock_redis = AsyncMock()
|
||||||
|
cache._redis = mock_redis
|
||||||
|
cache._enabled = True
|
||||||
|
cache._started = True
|
||||||
|
|
||||||
|
await cache.set(CacheCategory.PACKAGE_INDEX, "test-key", b"test-value", "pypi")
|
||||||
|
|
||||||
|
mock_redis.setex.assert_called_once_with(
|
||||||
|
"orchard:index:pypi:test-key", 300, b"test-value"
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_set_without_ttl(self):
|
||||||
|
"""set() should use set (no expiry) for immutable categories."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=True)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
|
||||||
|
mock_redis = AsyncMock()
|
||||||
|
cache._redis = mock_redis
|
||||||
|
cache._enabled = True
|
||||||
|
cache._started = True
|
||||||
|
|
||||||
|
await cache.set(
|
||||||
|
CacheCategory.ARTIFACT_METADATA, "abc123", b"metadata", "pypi"
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_redis.set.assert_called_once_with(
|
||||||
|
"orchard:artifact:pypi:abc123", b"metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_delete_calls_redis_delete(self):
|
||||||
|
"""delete() should call Redis delete."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=True)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
|
||||||
|
mock_redis = AsyncMock()
|
||||||
|
cache._redis = mock_redis
|
||||||
|
cache._enabled = True
|
||||||
|
cache._started = True
|
||||||
|
|
||||||
|
await cache.delete(CacheCategory.PACKAGE_INDEX, "test-key", "pypi")
|
||||||
|
|
||||||
|
mock_redis.delete.assert_called_once_with("orchard:index:pypi:test-key")
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_invalidate_pattern_deletes_matching_keys(self):
|
||||||
|
"""invalidate_pattern() should delete all matching keys."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=True)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
|
||||||
|
mock_redis = AsyncMock()
|
||||||
|
|
||||||
|
# Create an async generator for scan_iter
|
||||||
|
async def mock_scan_iter(match=None):
|
||||||
|
for key in [b"orchard:index:pypi:numpy", b"orchard:index:pypi:requests"]:
|
||||||
|
yield key
|
||||||
|
|
||||||
|
mock_redis.scan_iter = mock_scan_iter
|
||||||
|
mock_redis.delete.return_value = 2
|
||||||
|
cache._redis = mock_redis
|
||||||
|
cache._enabled = True
|
||||||
|
cache._started = True
|
||||||
|
|
||||||
|
result = await cache.invalidate_pattern(CacheCategory.PACKAGE_INDEX, "*", "pypi")
|
||||||
|
|
||||||
|
assert result == 2
|
||||||
|
mock_redis.delete.assert_called_once()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_ping_returns_true_when_connected(self):
|
||||||
|
"""ping() should return True when Redis responds."""
|
||||||
|
from app.cache_service import CacheService
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=True)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
|
||||||
|
mock_redis = AsyncMock()
|
||||||
|
mock_redis.ping.return_value = True
|
||||||
|
cache._redis = mock_redis
|
||||||
|
cache._enabled = True
|
||||||
|
cache._started = True
|
||||||
|
|
||||||
|
result = await cache.ping()
|
||||||
|
|
||||||
|
assert result is True
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_get_handles_exception(self):
|
||||||
|
"""get() should return None and log warning on exception."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=True)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
|
||||||
|
mock_redis = AsyncMock()
|
||||||
|
mock_redis.get.side_effect = Exception("Connection lost")
|
||||||
|
cache._redis = mock_redis
|
||||||
|
cache._enabled = True
|
||||||
|
cache._started = True
|
||||||
|
|
||||||
|
result = await cache.get(CacheCategory.PACKAGE_INDEX, "test-key")
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_set_handles_exception(self):
|
||||||
|
"""set() should log warning on exception."""
|
||||||
|
from app.cache_service import CacheService, CacheCategory
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=True, cache_ttl_index=300)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
|
||||||
|
mock_redis = AsyncMock()
|
||||||
|
mock_redis.setex.side_effect = Exception("Connection lost")
|
||||||
|
cache._redis = mock_redis
|
||||||
|
cache._enabled = True
|
||||||
|
cache._started = True
|
||||||
|
|
||||||
|
# Should not raise
|
||||||
|
await cache.set(CacheCategory.PACKAGE_INDEX, "test-key", b"value")
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_ping_returns_false_on_exception(self):
|
||||||
|
"""ping() should return False on exception."""
|
||||||
|
from app.cache_service import CacheService
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(redis_enabled=True)
|
||||||
|
cache = CacheService(settings)
|
||||||
|
|
||||||
|
mock_redis = AsyncMock()
|
||||||
|
mock_redis.ping.side_effect = Exception("Connection lost")
|
||||||
|
cache._redis = mock_redis
|
||||||
|
cache._enabled = True
|
||||||
|
cache._started = True
|
||||||
|
|
||||||
|
result = await cache.ping()
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
|
||||||
167
backend/tests/unit/test_db_utils.py
Normal file
167
backend/tests/unit/test_db_utils.py
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
"""Tests for database utility functions."""
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
|
||||||
|
class TestArtifactRepository:
|
||||||
|
"""Tests for ArtifactRepository."""
|
||||||
|
|
||||||
|
def test_batch_dependency_values_formatting(self):
|
||||||
|
"""batch_upsert_dependencies should format values correctly."""
|
||||||
|
from app.db_utils import ArtifactRepository
|
||||||
|
|
||||||
|
deps = [
|
||||||
|
("_pypi", "numpy", ">=1.21.0"),
|
||||||
|
("_pypi", "requests", "*"),
|
||||||
|
("myproject", "mylib", "==1.0.0"),
|
||||||
|
]
|
||||||
|
|
||||||
|
values = ArtifactRepository._format_dependency_values("abc123", deps)
|
||||||
|
|
||||||
|
assert len(values) == 3
|
||||||
|
assert values[0] == {
|
||||||
|
"artifact_id": "abc123",
|
||||||
|
"dependency_project": "_pypi",
|
||||||
|
"dependency_package": "numpy",
|
||||||
|
"version_constraint": ">=1.21.0",
|
||||||
|
}
|
||||||
|
assert values[2]["dependency_project"] == "myproject"
|
||||||
|
|
||||||
|
def test_empty_dependencies_returns_empty_list(self):
|
||||||
|
"""Empty dependency list should return empty values."""
|
||||||
|
from app.db_utils import ArtifactRepository
|
||||||
|
|
||||||
|
values = ArtifactRepository._format_dependency_values("abc123", [])
|
||||||
|
|
||||||
|
assert values == []
|
||||||
|
|
||||||
|
def test_format_dependency_values_preserves_special_characters(self):
|
||||||
|
"""Version constraints with special characters should be preserved."""
|
||||||
|
from app.db_utils import ArtifactRepository
|
||||||
|
|
||||||
|
deps = [
|
||||||
|
("_pypi", "package-name", ">=1.0.0,<2.0.0"),
|
||||||
|
("_pypi", "another_pkg", "~=1.4.2"),
|
||||||
|
]
|
||||||
|
|
||||||
|
values = ArtifactRepository._format_dependency_values("hash123", deps)
|
||||||
|
|
||||||
|
assert values[0]["version_constraint"] == ">=1.0.0,<2.0.0"
|
||||||
|
assert values[1]["version_constraint"] == "~=1.4.2"
|
||||||
|
|
||||||
|
def test_batch_upsert_dependencies_returns_zero_for_empty(self):
|
||||||
|
"""batch_upsert_dependencies should return 0 for empty list without DB call."""
|
||||||
|
from app.db_utils import ArtifactRepository
|
||||||
|
|
||||||
|
mock_db = MagicMock()
|
||||||
|
repo = ArtifactRepository(mock_db)
|
||||||
|
|
||||||
|
result = repo.batch_upsert_dependencies("abc123", [])
|
||||||
|
|
||||||
|
assert result == 0
|
||||||
|
# Verify no DB operations were performed
|
||||||
|
mock_db.execute.assert_not_called()
|
||||||
|
|
||||||
|
def test_get_or_create_artifact_builds_correct_statement(self):
|
||||||
|
"""get_or_create_artifact should use ON CONFLICT DO UPDATE."""
|
||||||
|
from app.db_utils import ArtifactRepository
|
||||||
|
from app.models import Artifact
|
||||||
|
|
||||||
|
mock_db = MagicMock()
|
||||||
|
mock_result = MagicMock()
|
||||||
|
mock_artifact = MagicMock()
|
||||||
|
mock_artifact.ref_count = 1
|
||||||
|
mock_result.scalar_one.return_value = mock_artifact
|
||||||
|
mock_db.execute.return_value = mock_result
|
||||||
|
|
||||||
|
repo = ArtifactRepository(mock_db)
|
||||||
|
artifact, created = repo.get_or_create_artifact(
|
||||||
|
sha256="abc123def456",
|
||||||
|
size=1024,
|
||||||
|
filename="test.whl",
|
||||||
|
content_type="application/zip",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert mock_db.execute.called
|
||||||
|
assert created is True
|
||||||
|
assert artifact == mock_artifact
|
||||||
|
|
||||||
|
def test_get_or_create_artifact_existing_not_created(self):
|
||||||
|
"""get_or_create_artifact should return created=False for existing artifact."""
|
||||||
|
from app.db_utils import ArtifactRepository
|
||||||
|
|
||||||
|
mock_db = MagicMock()
|
||||||
|
mock_result = MagicMock()
|
||||||
|
mock_artifact = MagicMock()
|
||||||
|
mock_artifact.ref_count = 5 # Existing artifact with ref_count > 1
|
||||||
|
mock_result.scalar_one.return_value = mock_artifact
|
||||||
|
mock_db.execute.return_value = mock_result
|
||||||
|
|
||||||
|
repo = ArtifactRepository(mock_db)
|
||||||
|
artifact, created = repo.get_or_create_artifact(
|
||||||
|
sha256="abc123def456",
|
||||||
|
size=1024,
|
||||||
|
filename="test.whl",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert created is False
|
||||||
|
|
||||||
|
def test_get_cached_url_with_artifact_returns_tuple(self):
|
||||||
|
"""get_cached_url_with_artifact should return (CachedUrl, Artifact) tuple."""
|
||||||
|
from app.db_utils import ArtifactRepository
|
||||||
|
|
||||||
|
mock_db = MagicMock()
|
||||||
|
mock_cached_url = MagicMock()
|
||||||
|
mock_artifact = MagicMock()
|
||||||
|
mock_db.query.return_value.join.return_value.filter.return_value.first.return_value = (
|
||||||
|
mock_cached_url,
|
||||||
|
mock_artifact,
|
||||||
|
)
|
||||||
|
|
||||||
|
repo = ArtifactRepository(mock_db)
|
||||||
|
result = repo.get_cached_url_with_artifact("url_hash_123")
|
||||||
|
|
||||||
|
assert result == (mock_cached_url, mock_artifact)
|
||||||
|
|
||||||
|
def test_get_cached_url_with_artifact_returns_none_when_not_found(self):
|
||||||
|
"""get_cached_url_with_artifact should return None when URL not cached."""
|
||||||
|
from app.db_utils import ArtifactRepository
|
||||||
|
|
||||||
|
mock_db = MagicMock()
|
||||||
|
mock_db.query.return_value.join.return_value.filter.return_value.first.return_value = None
|
||||||
|
|
||||||
|
repo = ArtifactRepository(mock_db)
|
||||||
|
result = repo.get_cached_url_with_artifact("nonexistent_hash")
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
def test_get_artifact_dependencies_returns_list(self):
|
||||||
|
"""get_artifact_dependencies should return list of dependencies."""
|
||||||
|
from app.db_utils import ArtifactRepository
|
||||||
|
|
||||||
|
mock_db = MagicMock()
|
||||||
|
mock_dep1 = MagicMock()
|
||||||
|
mock_dep2 = MagicMock()
|
||||||
|
mock_db.query.return_value.filter.return_value.all.return_value = [
|
||||||
|
mock_dep1,
|
||||||
|
mock_dep2,
|
||||||
|
]
|
||||||
|
|
||||||
|
repo = ArtifactRepository(mock_db)
|
||||||
|
result = repo.get_artifact_dependencies("artifact_hash_123")
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
assert result[0] == mock_dep1
|
||||||
|
assert result[1] == mock_dep2
|
||||||
|
|
||||||
|
def test_get_artifact_dependencies_returns_empty_list(self):
|
||||||
|
"""get_artifact_dependencies should return empty list when no dependencies."""
|
||||||
|
from app.db_utils import ArtifactRepository
|
||||||
|
|
||||||
|
mock_db = MagicMock()
|
||||||
|
mock_db.query.return_value.filter.return_value.all.return_value = []
|
||||||
|
|
||||||
|
repo = ArtifactRepository(mock_db)
|
||||||
|
result = repo.get_artifact_dependencies("artifact_without_deps")
|
||||||
|
|
||||||
|
assert result == []
|
||||||
194
backend/tests/unit/test_http_client.py
Normal file
194
backend/tests/unit/test_http_client.py
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
"""Tests for HttpClientManager."""
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import MagicMock, AsyncMock, patch
|
||||||
|
|
||||||
|
|
||||||
|
class TestHttpClientManager:
|
||||||
|
"""Tests for HTTP client pool management."""
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_manager_initializes_with_settings(self):
|
||||||
|
"""Manager should initialize with config settings."""
|
||||||
|
from app.http_client import HttpClientManager
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(
|
||||||
|
http_max_connections=50,
|
||||||
|
http_connect_timeout=15.0,
|
||||||
|
)
|
||||||
|
manager = HttpClientManager(settings)
|
||||||
|
|
||||||
|
assert manager.max_connections == 50
|
||||||
|
assert manager.connect_timeout == 15.0
|
||||||
|
assert manager._default_client is None # Not started yet
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_startup_creates_client(self):
|
||||||
|
"""Startup should create the default async client."""
|
||||||
|
from app.http_client import HttpClientManager
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
manager = HttpClientManager(settings)
|
||||||
|
|
||||||
|
await manager.startup()
|
||||||
|
|
||||||
|
assert manager._default_client is not None
|
||||||
|
|
||||||
|
await manager.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_shutdown_closes_client(self):
|
||||||
|
"""Shutdown should close all clients gracefully."""
|
||||||
|
from app.http_client import HttpClientManager
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
manager = HttpClientManager(settings)
|
||||||
|
|
||||||
|
await manager.startup()
|
||||||
|
client = manager._default_client
|
||||||
|
|
||||||
|
await manager.shutdown()
|
||||||
|
|
||||||
|
assert manager._default_client is None
|
||||||
|
assert client.is_closed
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_get_client_returns_default(self):
|
||||||
|
"""get_client() should return the default client."""
|
||||||
|
from app.http_client import HttpClientManager
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
manager = HttpClientManager(settings)
|
||||||
|
await manager.startup()
|
||||||
|
|
||||||
|
client = manager.get_client()
|
||||||
|
|
||||||
|
assert client is manager._default_client
|
||||||
|
|
||||||
|
await manager.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_get_client_raises_if_not_started(self):
|
||||||
|
"""get_client() should raise RuntimeError if manager not started."""
|
||||||
|
from app.http_client import HttpClientManager
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
manager = HttpClientManager(settings)
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError, match="not started"):
|
||||||
|
manager.get_client()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_run_blocking_executes_in_thread_pool(self):
|
||||||
|
"""run_blocking should execute sync functions in thread pool."""
|
||||||
|
from app.http_client import HttpClientManager
|
||||||
|
from app.config import Settings
|
||||||
|
import threading
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
manager = HttpClientManager(settings)
|
||||||
|
await manager.startup()
|
||||||
|
|
||||||
|
main_thread = threading.current_thread()
|
||||||
|
execution_thread = None
|
||||||
|
|
||||||
|
def blocking_func():
|
||||||
|
nonlocal execution_thread
|
||||||
|
execution_thread = threading.current_thread()
|
||||||
|
return "result"
|
||||||
|
|
||||||
|
result = await manager.run_blocking(blocking_func)
|
||||||
|
|
||||||
|
assert result == "result"
|
||||||
|
assert execution_thread is not main_thread
|
||||||
|
|
||||||
|
await manager.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_run_blocking_raises_if_not_started(self):
|
||||||
|
"""run_blocking should raise RuntimeError if manager not started."""
|
||||||
|
from app.http_client import HttpClientManager
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
manager = HttpClientManager(settings)
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError, match="not started"):
|
||||||
|
await manager.run_blocking(lambda: None)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_startup_idempotent(self):
|
||||||
|
"""Calling startup multiple times should be safe."""
|
||||||
|
from app.http_client import HttpClientManager
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
manager = HttpClientManager(settings)
|
||||||
|
|
||||||
|
await manager.startup()
|
||||||
|
client1 = manager._default_client
|
||||||
|
|
||||||
|
await manager.startup() # Should not create a new client
|
||||||
|
client2 = manager._default_client
|
||||||
|
|
||||||
|
assert client1 is client2 # Same client instance
|
||||||
|
|
||||||
|
await manager.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_shutdown_idempotent(self):
|
||||||
|
"""Calling shutdown multiple times should be safe."""
|
||||||
|
from app.http_client import HttpClientManager
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
manager = HttpClientManager(settings)
|
||||||
|
|
||||||
|
await manager.startup()
|
||||||
|
await manager.shutdown()
|
||||||
|
await manager.shutdown() # Should not raise
|
||||||
|
|
||||||
|
assert manager._default_client is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_properties_return_configured_values(self):
|
||||||
|
"""Properties should return configured values."""
|
||||||
|
from app.http_client import HttpClientManager
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings(
|
||||||
|
http_max_connections=75,
|
||||||
|
http_worker_threads=16,
|
||||||
|
)
|
||||||
|
manager = HttpClientManager(settings)
|
||||||
|
await manager.startup()
|
||||||
|
|
||||||
|
assert manager.pool_size == 75
|
||||||
|
assert manager.executor_max == 16
|
||||||
|
|
||||||
|
await manager.shutdown()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.unit
|
||||||
|
async def test_active_connections_when_not_started(self):
|
||||||
|
"""active_connections should return 0 when not started."""
|
||||||
|
from app.http_client import HttpClientManager
|
||||||
|
from app.config import Settings
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
manager = HttpClientManager(settings)
|
||||||
|
|
||||||
|
assert manager.active_connections == 0
|
||||||
243
backend/tests/unit/test_metadata.py
Normal file
243
backend/tests/unit/test_metadata.py
Normal file
@@ -0,0 +1,243 @@
|
|||||||
|
"""Unit tests for metadata extraction functionality."""
|
||||||
|
|
||||||
|
import io
|
||||||
|
import gzip
|
||||||
|
import tarfile
|
||||||
|
import zipfile
|
||||||
|
import pytest
|
||||||
|
from app.metadata import (
|
||||||
|
extract_metadata,
|
||||||
|
extract_deb_metadata,
|
||||||
|
extract_wheel_metadata,
|
||||||
|
extract_tarball_metadata,
|
||||||
|
extract_jar_metadata,
|
||||||
|
parse_deb_control,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestDebMetadata:
|
||||||
|
"""Tests for Debian package metadata extraction."""
|
||||||
|
|
||||||
|
def test_parse_deb_control_basic(self):
|
||||||
|
"""Test parsing a basic control file."""
|
||||||
|
control = """Package: my-package
|
||||||
|
Version: 1.2.3
|
||||||
|
Architecture: amd64
|
||||||
|
Maintainer: Test <test@example.com>
|
||||||
|
Description: A test package
|
||||||
|
"""
|
||||||
|
result = parse_deb_control(control)
|
||||||
|
assert result["package_name"] == "my-package"
|
||||||
|
assert result["version"] == "1.2.3"
|
||||||
|
assert result["architecture"] == "amd64"
|
||||||
|
assert result["format"] == "deb"
|
||||||
|
|
||||||
|
def test_parse_deb_control_with_epoch(self):
|
||||||
|
"""Test parsing version with epoch."""
|
||||||
|
control = """Package: another-pkg
|
||||||
|
Version: 2:1.0.0-1
|
||||||
|
"""
|
||||||
|
result = parse_deb_control(control)
|
||||||
|
assert result["version"] == "2:1.0.0-1"
|
||||||
|
assert result["package_name"] == "another-pkg"
|
||||||
|
assert result["format"] == "deb"
|
||||||
|
|
||||||
|
def test_extract_deb_metadata_invalid_magic(self):
|
||||||
|
"""Test that invalid ar magic returns empty dict."""
|
||||||
|
file = io.BytesIO(b"not an ar archive")
|
||||||
|
result = extract_deb_metadata(file)
|
||||||
|
assert result == {}
|
||||||
|
|
||||||
|
def test_extract_deb_metadata_valid_ar_no_control(self):
|
||||||
|
"""Test ar archive without control.tar returns empty."""
|
||||||
|
# Create minimal ar archive with just debian-binary
|
||||||
|
ar_data = b"!<arch>\n"
|
||||||
|
ar_data += b"debian-binary/ 0 0 0 100644 4 `\n"
|
||||||
|
ar_data += b"2.0\n"
|
||||||
|
|
||||||
|
file = io.BytesIO(ar_data)
|
||||||
|
result = extract_deb_metadata(file)
|
||||||
|
# Should return empty since no control.tar found
|
||||||
|
assert result == {} or "version" not in result
|
||||||
|
|
||||||
|
|
||||||
|
class TestWheelMetadata:
|
||||||
|
"""Tests for Python wheel metadata extraction."""
|
||||||
|
|
||||||
|
def _create_wheel_with_metadata(self, metadata_content: str) -> io.BytesIO:
|
||||||
|
"""Helper to create a wheel file with given METADATA content."""
|
||||||
|
buf = io.BytesIO()
|
||||||
|
with zipfile.ZipFile(buf, 'w') as zf:
|
||||||
|
zf.writestr('package-1.0.0.dist-info/METADATA', metadata_content)
|
||||||
|
buf.seek(0)
|
||||||
|
return buf
|
||||||
|
|
||||||
|
def test_extract_wheel_version(self):
|
||||||
|
"""Test extracting version from wheel METADATA."""
|
||||||
|
metadata = """Metadata-Version: 2.1
|
||||||
|
Name: my-package
|
||||||
|
Version: 2.3.4
|
||||||
|
Summary: A test package
|
||||||
|
"""
|
||||||
|
file = self._create_wheel_with_metadata(metadata)
|
||||||
|
result = extract_wheel_metadata(file)
|
||||||
|
assert result.get("version") == "2.3.4"
|
||||||
|
assert result.get("package_name") == "my-package"
|
||||||
|
assert result.get("format") == "wheel"
|
||||||
|
|
||||||
|
def test_extract_wheel_no_version(self):
|
||||||
|
"""Test wheel without version field."""
|
||||||
|
metadata = """Metadata-Version: 2.1
|
||||||
|
Name: no-version-pkg
|
||||||
|
"""
|
||||||
|
file = self._create_wheel_with_metadata(metadata)
|
||||||
|
result = extract_wheel_metadata(file)
|
||||||
|
assert "version" not in result
|
||||||
|
assert result.get("package_name") == "no-version-pkg"
|
||||||
|
assert result.get("format") == "wheel"
|
||||||
|
|
||||||
|
def test_extract_wheel_invalid_zip(self):
|
||||||
|
"""Test that invalid zip returns format-only dict."""
|
||||||
|
file = io.BytesIO(b"not a zip file")
|
||||||
|
result = extract_wheel_metadata(file)
|
||||||
|
assert result == {"format": "wheel"}
|
||||||
|
|
||||||
|
def test_extract_wheel_no_metadata_file(self):
|
||||||
|
"""Test wheel without METADATA file returns format-only dict."""
|
||||||
|
buf = io.BytesIO()
|
||||||
|
with zipfile.ZipFile(buf, 'w') as zf:
|
||||||
|
zf.writestr('some_file.py', 'print("hello")')
|
||||||
|
buf.seek(0)
|
||||||
|
result = extract_wheel_metadata(buf)
|
||||||
|
assert result == {"format": "wheel"}
|
||||||
|
|
||||||
|
|
||||||
|
class TestTarballMetadata:
|
||||||
|
"""Tests for tarball metadata extraction from filename."""
|
||||||
|
|
||||||
|
def test_extract_version_from_filename_standard(self):
|
||||||
|
"""Test standard package-version.tar.gz format."""
|
||||||
|
file = io.BytesIO(b"") # Content doesn't matter for filename extraction
|
||||||
|
result = extract_tarball_metadata(file, "mypackage-1.2.3.tar.gz")
|
||||||
|
assert result.get("version") == "1.2.3"
|
||||||
|
assert result.get("package_name") == "mypackage"
|
||||||
|
assert result.get("format") == "tarball"
|
||||||
|
|
||||||
|
def test_extract_version_with_v_prefix(self):
|
||||||
|
"""Test version with v prefix."""
|
||||||
|
file = io.BytesIO(b"")
|
||||||
|
result = extract_tarball_metadata(file, "package-v2.0.0.tar.gz")
|
||||||
|
assert result.get("version") == "2.0.0"
|
||||||
|
assert result.get("package_name") == "package"
|
||||||
|
assert result.get("format") == "tarball"
|
||||||
|
|
||||||
|
def test_extract_version_underscore_separator(self):
|
||||||
|
"""Test package_version format."""
|
||||||
|
file = io.BytesIO(b"")
|
||||||
|
result = extract_tarball_metadata(file, "my_package_3.1.4.tar.gz")
|
||||||
|
assert result.get("version") == "3.1.4"
|
||||||
|
assert result.get("package_name") == "my_package"
|
||||||
|
assert result.get("format") == "tarball"
|
||||||
|
|
||||||
|
def test_extract_version_complex(self):
|
||||||
|
"""Test complex version string."""
|
||||||
|
file = io.BytesIO(b"")
|
||||||
|
result = extract_tarball_metadata(file, "package-1.0.0-beta.1.tar.gz")
|
||||||
|
# The regex handles versions with suffix like -beta_1
|
||||||
|
assert result.get("format") == "tarball"
|
||||||
|
# May or may not extract version depending on regex match
|
||||||
|
if "version" in result:
|
||||||
|
assert result.get("package_name") == "package"
|
||||||
|
|
||||||
|
def test_extract_no_version_in_filename(self):
|
||||||
|
"""Test filename without version returns format-only dict."""
|
||||||
|
file = io.BytesIO(b"")
|
||||||
|
result = extract_tarball_metadata(file, "package.tar.gz")
|
||||||
|
# Should return format but no version
|
||||||
|
assert result.get("version") is None
|
||||||
|
assert result.get("format") == "tarball"
|
||||||
|
|
||||||
|
|
||||||
|
class TestJarMetadata:
|
||||||
|
"""Tests for JAR/Java metadata extraction."""
|
||||||
|
|
||||||
|
def _create_jar_with_manifest(self, manifest_content: str) -> io.BytesIO:
|
||||||
|
"""Helper to create a JAR file with given MANIFEST.MF content."""
|
||||||
|
buf = io.BytesIO()
|
||||||
|
with zipfile.ZipFile(buf, 'w') as zf:
|
||||||
|
zf.writestr('META-INF/MANIFEST.MF', manifest_content)
|
||||||
|
buf.seek(0)
|
||||||
|
return buf
|
||||||
|
|
||||||
|
def test_extract_jar_version_from_manifest(self):
|
||||||
|
"""Test extracting version from MANIFEST.MF."""
|
||||||
|
manifest = """Manifest-Version: 1.0
|
||||||
|
Implementation-Title: my-library
|
||||||
|
Implementation-Version: 4.5.6
|
||||||
|
"""
|
||||||
|
file = self._create_jar_with_manifest(manifest)
|
||||||
|
result = extract_jar_metadata(file)
|
||||||
|
assert result.get("version") == "4.5.6"
|
||||||
|
assert result.get("package_name") == "my-library"
|
||||||
|
assert result.get("format") == "jar"
|
||||||
|
|
||||||
|
def test_extract_jar_bundle_version(self):
|
||||||
|
"""Test extracting OSGi Bundle-Version."""
|
||||||
|
manifest = """Manifest-Version: 1.0
|
||||||
|
Bundle-Version: 2.1.0
|
||||||
|
Bundle-Name: Test Bundle
|
||||||
|
"""
|
||||||
|
file = self._create_jar_with_manifest(manifest)
|
||||||
|
result = extract_jar_metadata(file)
|
||||||
|
# Bundle-Version is stored in bundle_version, not version
|
||||||
|
assert result.get("bundle_version") == "2.1.0"
|
||||||
|
assert result.get("bundle_name") == "Test Bundle"
|
||||||
|
assert result.get("format") == "jar"
|
||||||
|
|
||||||
|
def test_extract_jar_invalid_zip(self):
|
||||||
|
"""Test that invalid JAR returns format-only dict."""
|
||||||
|
file = io.BytesIO(b"not a jar file")
|
||||||
|
result = extract_jar_metadata(file)
|
||||||
|
assert result == {"format": "jar"}
|
||||||
|
|
||||||
|
|
||||||
|
class TestExtractMetadataDispatch:
|
||||||
|
"""Tests for the main extract_metadata dispatcher function."""
|
||||||
|
|
||||||
|
def test_dispatch_to_wheel(self):
|
||||||
|
"""Test that .whl files use wheel extractor."""
|
||||||
|
buf = io.BytesIO()
|
||||||
|
with zipfile.ZipFile(buf, 'w') as zf:
|
||||||
|
zf.writestr('pkg-1.0.dist-info/METADATA', 'Version: 1.0.0\nName: pkg')
|
||||||
|
buf.seek(0)
|
||||||
|
|
||||||
|
result = extract_metadata(buf, "package-1.0.0-py3-none-any.whl")
|
||||||
|
assert result.get("version") == "1.0.0"
|
||||||
|
assert result.get("package_name") == "pkg"
|
||||||
|
assert result.get("format") == "wheel"
|
||||||
|
|
||||||
|
def test_dispatch_to_tarball(self):
|
||||||
|
"""Test that .tar.gz files use tarball extractor."""
|
||||||
|
file = io.BytesIO(b"")
|
||||||
|
result = extract_metadata(file, "mypackage-2.3.4.tar.gz")
|
||||||
|
assert result.get("version") == "2.3.4"
|
||||||
|
assert result.get("package_name") == "mypackage"
|
||||||
|
assert result.get("format") == "tarball"
|
||||||
|
|
||||||
|
def test_dispatch_unknown_extension(self):
|
||||||
|
"""Test that unknown extensions return empty dict."""
|
||||||
|
file = io.BytesIO(b"some content")
|
||||||
|
result = extract_metadata(file, "unknown.xyz")
|
||||||
|
assert result == {}
|
||||||
|
|
||||||
|
def test_file_position_reset_after_extraction(self):
|
||||||
|
"""Test that file position is reset to start after extraction."""
|
||||||
|
buf = io.BytesIO()
|
||||||
|
with zipfile.ZipFile(buf, 'w') as zf:
|
||||||
|
zf.writestr('pkg-1.0.dist-info/METADATA', 'Version: 1.0.0\nName: pkg')
|
||||||
|
buf.seek(0)
|
||||||
|
|
||||||
|
extract_metadata(buf, "package.whl")
|
||||||
|
|
||||||
|
# File should be back at position 0
|
||||||
|
assert buf.tell() == 0
|
||||||
@@ -145,54 +145,6 @@ class TestPackageModel:
|
|||||||
assert platform_col.default.arg == "any"
|
assert platform_col.default.arg == "any"
|
||||||
|
|
||||||
|
|
||||||
class TestTagModel:
|
|
||||||
"""Tests for the Tag model."""
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
def test_tag_requires_package_id(self):
|
|
||||||
"""Test tag requires package_id."""
|
|
||||||
from app.models import Tag
|
|
||||||
|
|
||||||
tag = Tag(
|
|
||||||
name="v1.0.0",
|
|
||||||
package_id=uuid.uuid4(),
|
|
||||||
artifact_id="f" * 64,
|
|
||||||
created_by="test-user",
|
|
||||||
)
|
|
||||||
|
|
||||||
assert tag.package_id is not None
|
|
||||||
assert tag.artifact_id == "f" * 64
|
|
||||||
|
|
||||||
|
|
||||||
class TestTagHistoryModel:
|
|
||||||
"""Tests for the TagHistory model."""
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
def test_tag_history_default_change_type(self):
|
|
||||||
"""Test tag history change_type column has default value of 'update'."""
|
|
||||||
from app.models import TagHistory
|
|
||||||
|
|
||||||
# Check the column definition has the right default
|
|
||||||
change_type_col = TagHistory.__table__.columns["change_type"]
|
|
||||||
assert change_type_col.default is not None
|
|
||||||
assert change_type_col.default.arg == "update"
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
def test_tag_history_allows_null_old_artifact(self):
|
|
||||||
"""Test tag history allows null old_artifact_id (for create events)."""
|
|
||||||
from app.models import TagHistory
|
|
||||||
|
|
||||||
history = TagHistory(
|
|
||||||
tag_id=uuid.uuid4(),
|
|
||||||
old_artifact_id=None,
|
|
||||||
new_artifact_id="h" * 64,
|
|
||||||
change_type="create",
|
|
||||||
changed_by="test-user",
|
|
||||||
)
|
|
||||||
|
|
||||||
assert history.old_artifact_id is None
|
|
||||||
|
|
||||||
|
|
||||||
class TestUploadModel:
|
class TestUploadModel:
|
||||||
"""Tests for the Upload model."""
|
"""Tests for the Upload model."""
|
||||||
|
|
||||||
|
|||||||
85
backend/tests/unit/test_pypi_proxy.py
Normal file
85
backend/tests/unit/test_pypi_proxy.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"""Unit tests for PyPI proxy functionality."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from app.pypi_proxy import _parse_requires_dist
|
||||||
|
|
||||||
|
|
||||||
|
class TestParseRequiresDist:
|
||||||
|
"""Tests for _parse_requires_dist function."""
|
||||||
|
|
||||||
|
def test_simple_package(self):
|
||||||
|
"""Test parsing a simple package name."""
|
||||||
|
name, version = _parse_requires_dist("numpy")
|
||||||
|
assert name == "numpy"
|
||||||
|
assert version is None
|
||||||
|
|
||||||
|
def test_package_with_version(self):
|
||||||
|
"""Test parsing package with version constraint."""
|
||||||
|
name, version = _parse_requires_dist("numpy>=1.21.0")
|
||||||
|
assert name == "numpy"
|
||||||
|
assert version == ">=1.21.0"
|
||||||
|
|
||||||
|
def test_package_with_parenthesized_version(self):
|
||||||
|
"""Test parsing package with parenthesized version."""
|
||||||
|
name, version = _parse_requires_dist("requests (>=2.25.0)")
|
||||||
|
assert name == "requests"
|
||||||
|
assert version == ">=2.25.0"
|
||||||
|
|
||||||
|
def test_package_with_python_version_marker(self):
|
||||||
|
"""Test that python_version markers are preserved but marker stripped."""
|
||||||
|
name, version = _parse_requires_dist("typing-extensions; python_version < '3.8'")
|
||||||
|
assert name == "typing-extensions"
|
||||||
|
assert version is None
|
||||||
|
|
||||||
|
def test_filters_extra_dependencies(self):
|
||||||
|
"""Test that extra dependencies are filtered out."""
|
||||||
|
# Extra dependencies should return (None, None)
|
||||||
|
name, version = _parse_requires_dist("pytest; extra == 'test'")
|
||||||
|
assert name is None
|
||||||
|
assert version is None
|
||||||
|
|
||||||
|
name, version = _parse_requires_dist("sphinx; extra == 'docs'")
|
||||||
|
assert name is None
|
||||||
|
assert version is None
|
||||||
|
|
||||||
|
def test_filters_platform_specific_darwin(self):
|
||||||
|
"""Test that macOS-specific dependencies are filtered out."""
|
||||||
|
name, version = _parse_requires_dist("pyobjc; sys_platform == 'darwin'")
|
||||||
|
assert name is None
|
||||||
|
assert version is None
|
||||||
|
|
||||||
|
def test_filters_platform_specific_win32(self):
|
||||||
|
"""Test that Windows-specific dependencies are filtered out."""
|
||||||
|
name, version = _parse_requires_dist("pywin32; sys_platform == 'win32'")
|
||||||
|
assert name is None
|
||||||
|
assert version is None
|
||||||
|
|
||||||
|
def test_filters_platform_system_marker(self):
|
||||||
|
"""Test that platform_system markers are filtered out."""
|
||||||
|
name, version = _parse_requires_dist("jaraco-windows; platform_system == 'Windows'")
|
||||||
|
assert name is None
|
||||||
|
assert version is None
|
||||||
|
|
||||||
|
def test_normalizes_package_name(self):
|
||||||
|
"""Test that package names are normalized (PEP 503)."""
|
||||||
|
name, version = _parse_requires_dist("Typing_Extensions>=3.7.4")
|
||||||
|
assert name == "typing-extensions"
|
||||||
|
assert version == ">=3.7.4"
|
||||||
|
|
||||||
|
def test_complex_version_constraint(self):
|
||||||
|
"""Test parsing complex version constraints."""
|
||||||
|
name, version = _parse_requires_dist("gast!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1")
|
||||||
|
assert name == "gast"
|
||||||
|
assert version == "!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1"
|
||||||
|
|
||||||
|
def test_version_range(self):
|
||||||
|
"""Test parsing version range constraints."""
|
||||||
|
name, version = _parse_requires_dist("grpcio<2.0,>=1.24.3")
|
||||||
|
assert name == "grpcio"
|
||||||
|
assert version == "<2.0,>=1.24.3"
|
||||||
|
|
||||||
|
def test_tilde_version(self):
|
||||||
|
"""Test parsing tilde version constraints."""
|
||||||
|
name, version = _parse_requires_dist("tensorboard~=2.20.0")
|
||||||
|
assert name == "tensorboard"
|
||||||
|
assert version == "~=2.20.0"
|
||||||
65
backend/tests/unit/test_rate_limit.py
Normal file
65
backend/tests/unit/test_rate_limit.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
"""Unit tests for rate limiting configuration."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
class TestRateLimitConfiguration:
|
||||||
|
"""Tests for rate limit configuration."""
|
||||||
|
|
||||||
|
def test_default_login_rate_limit(self):
|
||||||
|
"""Test default login rate limit is 5/minute."""
|
||||||
|
# Import fresh to get default value
|
||||||
|
import importlib
|
||||||
|
import app.rate_limit as rate_limit_module
|
||||||
|
|
||||||
|
# Save original env value
|
||||||
|
original = os.environ.get("ORCHARD_LOGIN_RATE_LIMIT")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Clear env variable to test default
|
||||||
|
if "ORCHARD_LOGIN_RATE_LIMIT" in os.environ:
|
||||||
|
del os.environ["ORCHARD_LOGIN_RATE_LIMIT"]
|
||||||
|
|
||||||
|
# Reload module to pick up new env
|
||||||
|
importlib.reload(rate_limit_module)
|
||||||
|
|
||||||
|
assert rate_limit_module.LOGIN_RATE_LIMIT == "5/minute"
|
||||||
|
finally:
|
||||||
|
# Restore original env value
|
||||||
|
if original is not None:
|
||||||
|
os.environ["ORCHARD_LOGIN_RATE_LIMIT"] = original
|
||||||
|
importlib.reload(rate_limit_module)
|
||||||
|
|
||||||
|
def test_custom_login_rate_limit(self):
|
||||||
|
"""Test custom login rate limit from environment."""
|
||||||
|
import importlib
|
||||||
|
import app.rate_limit as rate_limit_module
|
||||||
|
|
||||||
|
# Save original env value
|
||||||
|
original = os.environ.get("ORCHARD_LOGIN_RATE_LIMIT")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Set custom rate limit
|
||||||
|
os.environ["ORCHARD_LOGIN_RATE_LIMIT"] = "10/minute"
|
||||||
|
|
||||||
|
# Reload module to pick up new env
|
||||||
|
importlib.reload(rate_limit_module)
|
||||||
|
|
||||||
|
assert rate_limit_module.LOGIN_RATE_LIMIT == "10/minute"
|
||||||
|
finally:
|
||||||
|
# Restore original env value
|
||||||
|
if original is not None:
|
||||||
|
os.environ["ORCHARD_LOGIN_RATE_LIMIT"] = original
|
||||||
|
else:
|
||||||
|
if "ORCHARD_LOGIN_RATE_LIMIT" in os.environ:
|
||||||
|
del os.environ["ORCHARD_LOGIN_RATE_LIMIT"]
|
||||||
|
importlib.reload(rate_limit_module)
|
||||||
|
|
||||||
|
def test_limiter_exists(self):
|
||||||
|
"""Test that limiter object is created."""
|
||||||
|
from app.rate_limit import limiter
|
||||||
|
|
||||||
|
assert limiter is not None
|
||||||
|
# Limiter should have a key_func set
|
||||||
|
assert limiter._key_func is not None
|
||||||
300
backend/tests/unit/test_registry_client.py
Normal file
300
backend/tests/unit/test_registry_client.py
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
"""Unit tests for registry client functionality."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
import httpx
|
||||||
|
from packaging.specifiers import SpecifierSet
|
||||||
|
|
||||||
|
from app.registry_client import (
|
||||||
|
PyPIRegistryClient,
|
||||||
|
VersionInfo,
|
||||||
|
FetchResult,
|
||||||
|
get_registry_client,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestPyPIRegistryClient:
|
||||||
|
"""Tests for PyPI registry client."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_http_client(self):
|
||||||
|
"""Create a mock async HTTP client."""
|
||||||
|
return AsyncMock(spec=httpx.AsyncClient)
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client(self, mock_http_client):
|
||||||
|
"""Create a PyPI registry client with mocked HTTP."""
|
||||||
|
return PyPIRegistryClient(
|
||||||
|
http_client=mock_http_client,
|
||||||
|
upstream_sources=[],
|
||||||
|
pypi_api_url="https://pypi.org/pypi",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_source_type(self, client):
|
||||||
|
"""Test source_type returns 'pypi'."""
|
||||||
|
assert client.source_type == "pypi"
|
||||||
|
|
||||||
|
def test_normalize_package_name(self, client):
|
||||||
|
"""Test package name normalization per PEP 503."""
|
||||||
|
assert client._normalize_package_name("My_Package") == "my-package"
|
||||||
|
assert client._normalize_package_name("my.package") == "my-package"
|
||||||
|
assert client._normalize_package_name("my-package") == "my-package"
|
||||||
|
assert client._normalize_package_name("MY-PACKAGE") == "my-package"
|
||||||
|
assert client._normalize_package_name("my__package") == "my-package"
|
||||||
|
assert client._normalize_package_name("my..package") == "my-package"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_available_versions_success(self, client, mock_http_client):
|
||||||
|
"""Test fetching available versions from PyPI."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"releases": {
|
||||||
|
"1.0.0": [{"packagetype": "bdist_wheel"}],
|
||||||
|
"1.1.0": [{"packagetype": "bdist_wheel"}],
|
||||||
|
"2.0.0": [{"packagetype": "bdist_wheel"}],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mock_http_client.get.return_value = mock_response
|
||||||
|
|
||||||
|
versions = await client.get_available_versions("test-package")
|
||||||
|
|
||||||
|
assert "1.0.0" in versions
|
||||||
|
assert "1.1.0" in versions
|
||||||
|
assert "2.0.0" in versions
|
||||||
|
mock_http_client.get.assert_called_once()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_available_versions_empty(self, client, mock_http_client):
|
||||||
|
"""Test handling package with no releases."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.json.return_value = {"releases": {}}
|
||||||
|
mock_http_client.get.return_value = mock_response
|
||||||
|
|
||||||
|
versions = await client.get_available_versions("empty-package")
|
||||||
|
|
||||||
|
assert versions == []
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_available_versions_404(self, client, mock_http_client):
|
||||||
|
"""Test handling non-existent package."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.status_code = 404
|
||||||
|
mock_http_client.get.return_value = mock_response
|
||||||
|
|
||||||
|
versions = await client.get_available_versions("nonexistent")
|
||||||
|
|
||||||
|
assert versions == []
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_resolve_constraint_wildcard(self, client, mock_http_client):
|
||||||
|
"""Test resolving wildcard constraint returns latest."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"info": {"version": "2.0.0"},
|
||||||
|
"releases": {
|
||||||
|
"1.0.0": [
|
||||||
|
{
|
||||||
|
"packagetype": "bdist_wheel",
|
||||||
|
"url": "https://files.pythonhosted.org/test-1.0.0.whl",
|
||||||
|
"filename": "test-1.0.0.whl",
|
||||||
|
"digests": {"sha256": "abc123"},
|
||||||
|
"size": 1000,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"2.0.0": [
|
||||||
|
{
|
||||||
|
"packagetype": "bdist_wheel",
|
||||||
|
"url": "https://files.pythonhosted.org/test-2.0.0.whl",
|
||||||
|
"filename": "test-2.0.0.whl",
|
||||||
|
"digests": {"sha256": "def456"},
|
||||||
|
"size": 2000,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
mock_http_client.get.return_value = mock_response
|
||||||
|
|
||||||
|
result = await client.resolve_constraint("test-package", "*")
|
||||||
|
|
||||||
|
assert result is not None
|
||||||
|
assert result.version == "2.0.0"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_resolve_constraint_specific_version(self, client, mock_http_client):
|
||||||
|
"""Test resolving specific version constraint."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"releases": {
|
||||||
|
"1.0.0": [
|
||||||
|
{
|
||||||
|
"packagetype": "bdist_wheel",
|
||||||
|
"url": "https://files.pythonhosted.org/test-1.0.0.whl",
|
||||||
|
"filename": "test-1.0.0.whl",
|
||||||
|
"digests": {"sha256": "abc123"},
|
||||||
|
"size": 1000,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"2.0.0": [
|
||||||
|
{
|
||||||
|
"packagetype": "bdist_wheel",
|
||||||
|
"url": "https://files.pythonhosted.org/test-2.0.0.whl",
|
||||||
|
"filename": "test-2.0.0.whl",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
mock_http_client.get.return_value = mock_response
|
||||||
|
|
||||||
|
result = await client.resolve_constraint("test-package", ">=1.0.0,<2.0.0")
|
||||||
|
|
||||||
|
assert result is not None
|
||||||
|
assert result.version == "1.0.0"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_resolve_constraint_no_match(self, client, mock_http_client):
|
||||||
|
"""Test resolving constraint with no matching version."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"releases": {
|
||||||
|
"1.0.0": [
|
||||||
|
{
|
||||||
|
"packagetype": "bdist_wheel",
|
||||||
|
"url": "https://files.pythonhosted.org/test-1.0.0.whl",
|
||||||
|
"filename": "test-1.0.0.whl",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
mock_http_client.get.return_value = mock_response
|
||||||
|
|
||||||
|
result = await client.resolve_constraint("test-package", ">=5.0.0")
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_resolve_constraint_bare_version(self, client, mock_http_client):
|
||||||
|
"""Test resolving bare version string as exact match."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"info": {"version": "2.0.0"},
|
||||||
|
"releases": {
|
||||||
|
"1.0.0": [
|
||||||
|
{
|
||||||
|
"packagetype": "bdist_wheel",
|
||||||
|
"url": "https://files.pythonhosted.org/test-1.0.0.whl",
|
||||||
|
"filename": "test-1.0.0.whl",
|
||||||
|
"digests": {"sha256": "abc123"},
|
||||||
|
"size": 1000,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"2.0.0": [
|
||||||
|
{
|
||||||
|
"packagetype": "bdist_wheel",
|
||||||
|
"url": "https://files.pythonhosted.org/test-2.0.0.whl",
|
||||||
|
"filename": "test-2.0.0.whl",
|
||||||
|
"digests": {"sha256": "def456"},
|
||||||
|
"size": 2000,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
mock_http_client.get.return_value = mock_response
|
||||||
|
|
||||||
|
# Bare version "1.0.0" should resolve to exactly 1.0.0, not latest
|
||||||
|
result = await client.resolve_constraint("test-package", "1.0.0")
|
||||||
|
|
||||||
|
assert result is not None
|
||||||
|
assert result.version == "1.0.0"
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionInfo:
|
||||||
|
"""Tests for VersionInfo dataclass."""
|
||||||
|
|
||||||
|
def test_create_version_info(self):
|
||||||
|
"""Test creating VersionInfo with all fields."""
|
||||||
|
info = VersionInfo(
|
||||||
|
version="1.0.0",
|
||||||
|
download_url="https://example.com/pkg-1.0.0.whl",
|
||||||
|
filename="pkg-1.0.0.whl",
|
||||||
|
sha256="abc123",
|
||||||
|
size=5000,
|
||||||
|
content_type="application/zip",
|
||||||
|
)
|
||||||
|
assert info.version == "1.0.0"
|
||||||
|
assert info.download_url == "https://example.com/pkg-1.0.0.whl"
|
||||||
|
assert info.filename == "pkg-1.0.0.whl"
|
||||||
|
assert info.sha256 == "abc123"
|
||||||
|
assert info.size == 5000
|
||||||
|
|
||||||
|
def test_create_version_info_minimal(self):
|
||||||
|
"""Test creating VersionInfo with only required fields."""
|
||||||
|
info = VersionInfo(
|
||||||
|
version="1.0.0",
|
||||||
|
download_url="https://example.com/pkg.whl",
|
||||||
|
filename="pkg.whl",
|
||||||
|
)
|
||||||
|
assert info.sha256 is None
|
||||||
|
assert info.size is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestFetchResult:
|
||||||
|
"""Tests for FetchResult dataclass."""
|
||||||
|
|
||||||
|
def test_create_fetch_result(self):
|
||||||
|
"""Test creating FetchResult."""
|
||||||
|
result = FetchResult(
|
||||||
|
artifact_id="abc123def456",
|
||||||
|
size=10000,
|
||||||
|
version="2.0.0",
|
||||||
|
filename="pkg-2.0.0.whl",
|
||||||
|
already_cached=True,
|
||||||
|
)
|
||||||
|
assert result.artifact_id == "abc123def456"
|
||||||
|
assert result.size == 10000
|
||||||
|
assert result.version == "2.0.0"
|
||||||
|
assert result.already_cached is True
|
||||||
|
|
||||||
|
def test_fetch_result_default_not_cached(self):
|
||||||
|
"""Test FetchResult defaults to not cached."""
|
||||||
|
result = FetchResult(
|
||||||
|
artifact_id="xyz",
|
||||||
|
size=100,
|
||||||
|
version="1.0.0",
|
||||||
|
filename="pkg.whl",
|
||||||
|
)
|
||||||
|
assert result.already_cached is False
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetRegistryClient:
|
||||||
|
"""Tests for registry client factory function."""
|
||||||
|
|
||||||
|
def test_get_pypi_client(self):
|
||||||
|
"""Test getting PyPI client."""
|
||||||
|
mock_client = MagicMock()
|
||||||
|
mock_sources = []
|
||||||
|
|
||||||
|
client = get_registry_client("pypi", mock_client, mock_sources)
|
||||||
|
|
||||||
|
assert isinstance(client, PyPIRegistryClient)
|
||||||
|
|
||||||
|
def test_get_unsupported_client(self):
|
||||||
|
"""Test getting unsupported registry type returns None."""
|
||||||
|
mock_client = MagicMock()
|
||||||
|
|
||||||
|
client = get_registry_client("npm", mock_client, [])
|
||||||
|
|
||||||
|
assert client is None
|
||||||
|
|
||||||
|
def test_get_unknown_client(self):
|
||||||
|
"""Test getting unknown registry type returns None."""
|
||||||
|
mock_client = MagicMock()
|
||||||
|
|
||||||
|
client = get_registry_client("unknown", mock_client, [])
|
||||||
|
|
||||||
|
assert client is None
|
||||||
672
docs/epic-upstream-caching.md
Normal file
672
docs/epic-upstream-caching.md
Normal file
@@ -0,0 +1,672 @@
|
|||||||
|
# Epic: Upstream Artifact Caching for Hermetic Builds
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Orchard will act as a permanent, content-addressable cache for upstream artifacts (npm, PyPI, Maven, Docker, etc.). Once an artifact is cached, it is stored forever by SHA256 hash - enabling reproducible builds years later regardless of whether the upstream source still exists.
|
||||||
|
|
||||||
|
## Problem Statement
|
||||||
|
|
||||||
|
Build reproducibility is critical for enterprise environments:
|
||||||
|
- Packages get deleted, yanked, or modified upstream
|
||||||
|
- Registries go down or change URLs
|
||||||
|
- Version constraints resolve differently over time
|
||||||
|
- Air-gapped environments cannot access public internet
|
||||||
|
|
||||||
|
Teams need to guarantee that a build from 5 years ago produces the exact same output today.
|
||||||
|
|
||||||
|
## Solution
|
||||||
|
|
||||||
|
Orchard becomes "the cache that never forgets":
|
||||||
|
|
||||||
|
1. **Fetch once, store forever** - When a build needs `lodash@4.17.21`, Orchard fetches it from npm, stores it by SHA256 hash, and never deletes it
|
||||||
|
2. **Content-addressable** - Same hash = same bytes, guaranteed
|
||||||
|
3. **Format-agnostic** - Orchard doesn't need to understand npm/PyPI/Maven protocols; the client provides the URL, Orchard fetches and stores
|
||||||
|
4. **Air-gap support** - Disable public internet entirely, only allow configured private upstreams
|
||||||
|
|
||||||
|
## User Workflow
|
||||||
|
|
||||||
|
```
|
||||||
|
1. Build tool resolves dependencies npm install / pip install / mvn resolve
|
||||||
|
↓
|
||||||
|
2. Generate lockfile with URLs package-lock.json / requirements.txt
|
||||||
|
↓
|
||||||
|
3. Cache all URLs in Orchard orchard cache --file urls.txt
|
||||||
|
↓
|
||||||
|
4. Pin by SHA256 hash lodash = "sha256:abc123..."
|
||||||
|
↓
|
||||||
|
5. Future builds fetch by hash Always get exact same bytes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Features
|
||||||
|
|
||||||
|
- **Multiple upstream sources** - Configure npm, PyPI, Maven Central, private Artifactory, etc.
|
||||||
|
- **Per-source authentication** - Basic auth, bearer tokens, API keys
|
||||||
|
- **System cache projects** - `_npm`, `_pypi`, `_maven` organize cached packages by format
|
||||||
|
- **Cross-referencing** - Link cached artifacts to user projects for visibility
|
||||||
|
- **URL tracking** - Know which URLs map to which hashes, audit provenance
|
||||||
|
- **Air-gap mode** - Global kill switch for all public internet access
|
||||||
|
- **Environment variable config** - 12-factor friendly for containerized deployments
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────────────┐
|
||||||
|
│ Orchard Server │
|
||||||
|
├─────────────────────────────────────────────────────────────────┤
|
||||||
|
│ POST /api/v1/cache │
|
||||||
|
│ ├── Check if URL already cached (url_hash lookup) │
|
||||||
|
│ ├── Match URL to upstream source (get auth) │
|
||||||
|
│ ├── Fetch via UpstreamClient (stream + compute SHA256) │
|
||||||
|
│ ├── Store artifact in S3 (content-addressable) │
|
||||||
|
│ ├── Create tag in system project (_npm/lodash:4.17.21) │
|
||||||
|
│ ├── Optionally create tag in user project │
|
||||||
|
│ └── Record in cached_urls table (provenance) │
|
||||||
|
├─────────────────────────────────────────────────────────────────┤
|
||||||
|
│ Tables │
|
||||||
|
│ ├── upstream_sources (npm-public, pypi-public, artifactory) │
|
||||||
|
│ ├── cache_settings (allow_public_internet, etc.) │
|
||||||
|
│ ├── cached_urls (url → artifact_id mapping) │
|
||||||
|
│ └── projects.is_system (for _npm, _pypi, etc.) │
|
||||||
|
└─────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## Issues Summary
|
||||||
|
|
||||||
|
| Issue | Title | Status | Dependencies |
|
||||||
|
|-------|-------|--------|--------------|
|
||||||
|
| #68 | Schema: Upstream Sources & Cache Tracking | ✅ Complete | None |
|
||||||
|
| #69 | HTTP Client: Generic URL Fetcher | Pending | None |
|
||||||
|
| #70 | Cache API Endpoint | Pending | #68, #69 |
|
||||||
|
| #71 | System Projects (Cache Namespaces) | Pending | #68, #70 |
|
||||||
|
| #72 | Upstream Sources Admin API | Pending | #68 |
|
||||||
|
| #73 | Global Cache Settings API | Pending | #68 |
|
||||||
|
| #74 | Environment Variable Overrides | Pending | #68, #72, #73 |
|
||||||
|
| #75 | Frontend: Upstream Sources Management | Pending | #72, #73 |
|
||||||
|
| #105 | Frontend: System Projects Integration | Pending | #71 |
|
||||||
|
| #77 | CLI: Cache Command | Pending | #70 |
|
||||||
|
|
||||||
|
## Implementation Phases
|
||||||
|
|
||||||
|
**Phase 1 - Core (MVP):**
|
||||||
|
- #68 Schema ✅
|
||||||
|
- #69 HTTP Client
|
||||||
|
- #70 Cache API
|
||||||
|
- #71 System Projects
|
||||||
|
|
||||||
|
**Phase 2 - Admin:**
|
||||||
|
- #72 Upstream Sources API
|
||||||
|
- #73 Cache Settings API
|
||||||
|
- #74 Environment Variables
|
||||||
|
|
||||||
|
**Phase 3 - Frontend:**
|
||||||
|
- #75 Upstream Sources UI
|
||||||
|
- #105 System Projects UI
|
||||||
|
|
||||||
|
**Phase 4 - CLI:**
|
||||||
|
- #77 Cache Command
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Issue #68: Schema - Upstream Sources & Cache Tracking
|
||||||
|
|
||||||
|
**Status: ✅ Complete**
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
Create database schema for flexible multi-source upstream configuration and URL-to-artifact tracking. This replaces the previous singleton proxy_config design with a more flexible model supporting multiple upstream sources, air-gap mode, and provenance tracking.
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
- [x] `upstream_sources` table:
|
||||||
|
- id (UUID, primary key)
|
||||||
|
- name (VARCHAR(255), unique, e.g., "npm-public", "artifactory-private")
|
||||||
|
- source_type (VARCHAR(50), enum: npm, pypi, maven, docker, helm, nuget, deb, rpm, generic)
|
||||||
|
- url (VARCHAR(2048), base URL of upstream)
|
||||||
|
- enabled (BOOLEAN, default false)
|
||||||
|
- is_public (BOOLEAN, true if this is a public internet source)
|
||||||
|
- auth_type (VARCHAR(20), enum: none, basic, bearer, api_key)
|
||||||
|
- username (VARCHAR(255), nullable)
|
||||||
|
- password_encrypted (BYTEA, nullable, Fernet encrypted)
|
||||||
|
- headers_encrypted (BYTEA, nullable, for custom headers like API keys)
|
||||||
|
- priority (INTEGER, default 100, lower = checked first)
|
||||||
|
- created_at, updated_at timestamps
|
||||||
|
- [x] `cache_settings` table (singleton, id always 1):
|
||||||
|
- id (INTEGER, primary key, check id = 1)
|
||||||
|
- allow_public_internet (BOOLEAN, default true, air-gap kill switch)
|
||||||
|
- auto_create_system_projects (BOOLEAN, default true)
|
||||||
|
- created_at, updated_at timestamps
|
||||||
|
- [x] `cached_urls` table:
|
||||||
|
- id (UUID, primary key)
|
||||||
|
- url (VARCHAR(4096), original URL fetched)
|
||||||
|
- url_hash (VARCHAR(64), SHA256 of URL for fast lookup, indexed)
|
||||||
|
- artifact_id (VARCHAR(64), FK to artifacts)
|
||||||
|
- source_id (UUID, FK to upstream_sources, nullable for manual imports)
|
||||||
|
- fetched_at (TIMESTAMP WITH TIME ZONE)
|
||||||
|
- response_headers (JSONB, original upstream headers for provenance)
|
||||||
|
- created_at timestamp
|
||||||
|
- [x] Add `is_system` BOOLEAN column to projects table (default false)
|
||||||
|
- [x] Migration SQL file in migrations/
|
||||||
|
- [x] Runtime migration in database.py
|
||||||
|
- [x] SQLAlchemy models for all new tables
|
||||||
|
- [x] Pydantic schemas for API input/output (passwords write-only)
|
||||||
|
- [x] Encryption helpers for password/headers fields
|
||||||
|
- [x] Seed default upstream sources (disabled by default):
|
||||||
|
- npm-public: https://registry.npmjs.org
|
||||||
|
- pypi-public: https://pypi.org/simple
|
||||||
|
- maven-central: https://repo1.maven.org/maven2
|
||||||
|
- docker-hub: https://registry-1.docker.io
|
||||||
|
- [x] Unit tests for models and schemas
|
||||||
|
|
||||||
|
## Files Modified
|
||||||
|
|
||||||
|
- `migrations/010_upstream_caching.sql`
|
||||||
|
- `backend/app/database.py` (migrations 016-020)
|
||||||
|
- `backend/app/models.py` (UpstreamSource, CacheSettings, CachedUrl, Project.is_system)
|
||||||
|
- `backend/app/schemas.py` (all caching schemas)
|
||||||
|
- `backend/app/encryption.py` (renamed env var)
|
||||||
|
- `backend/app/config.py` (renamed setting)
|
||||||
|
- `backend/tests/test_upstream_caching.py` (37 tests)
|
||||||
|
- `frontend/src/components/Layout.tsx` (footer tagline)
|
||||||
|
- `CHANGELOG.md`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Issue #69: HTTP Client - Generic URL Fetcher
|
||||||
|
|
||||||
|
**Status: Pending**
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
Create a reusable HTTP client for fetching artifacts from upstream sources. Supports multiple auth methods, streaming for large files, and computes SHA256 while downloading.
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
- [ ] `UpstreamClient` class in `backend/app/upstream.py`
|
||||||
|
- [ ] `fetch(url)` method that:
|
||||||
|
- Streams response body (doesn't load large files into memory)
|
||||||
|
- Computes SHA256 hash while streaming
|
||||||
|
- Returns file content, hash, size, and response headers
|
||||||
|
- [ ] Auth support based on upstream source configuration:
|
||||||
|
- None (anonymous)
|
||||||
|
- Basic auth (username/password)
|
||||||
|
- Bearer token (Authorization: Bearer {token})
|
||||||
|
- API key (custom header name/value)
|
||||||
|
- [ ] URL-to-source matching:
|
||||||
|
- Match URL to configured upstream source by URL prefix
|
||||||
|
- Apply auth from matched source
|
||||||
|
- Respect source priority for multiple matches
|
||||||
|
- [ ] Configuration options:
|
||||||
|
- Timeout (connect and read, default 30s/300s)
|
||||||
|
- Max retries (default 3)
|
||||||
|
- Follow redirects (default true, max 5)
|
||||||
|
- Max file size (reject if Content-Length exceeds limit)
|
||||||
|
- [ ] Respect `allow_public_internet` setting:
|
||||||
|
- If false, reject URLs matching `is_public=true` sources
|
||||||
|
- If false, reject URLs not matching any configured source
|
||||||
|
- [ ] Capture response headers for provenance tracking
|
||||||
|
- [ ] Proper error handling:
|
||||||
|
- Connection errors (retry with backoff)
|
||||||
|
- HTTP errors (4xx, 5xx)
|
||||||
|
- Timeout errors
|
||||||
|
- SSL/TLS errors
|
||||||
|
- [ ] Logging for debugging (URL, source matched, status, timing)
|
||||||
|
- [ ] Unit tests with mocked HTTP responses
|
||||||
|
- [ ] Integration tests against httpbin.org or similar (optional, marked)
|
||||||
|
|
||||||
|
## Technical Notes
|
||||||
|
|
||||||
|
- Use `httpx` for async HTTP support (already in requirements)
|
||||||
|
- Stream to temp file to avoid memory issues with large artifacts
|
||||||
|
- Consider checksum verification if upstream provides it (e.g., npm provides shasum)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Issue #70: Cache API Endpoint
|
||||||
|
|
||||||
|
**Status: Pending**
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
API endpoint to cache an artifact from an upstream URL. This is the core endpoint that fetches from upstream, stores in Orchard, and creates appropriate tags.
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
- [ ] `POST /api/v1/cache` endpoint
|
||||||
|
- [ ] Request body:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"url": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||||
|
"source_type": "npm",
|
||||||
|
"package_name": "lodash",
|
||||||
|
"tag": "4.17.21",
|
||||||
|
"user_project": "my-app",
|
||||||
|
"user_package": "npm-deps",
|
||||||
|
"user_tag": "lodash-4.17.21",
|
||||||
|
"expected_hash": "sha256:abc123..."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- `url` (required): URL to fetch
|
||||||
|
- `source_type` (required): Determines system project (_npm, _pypi, etc.)
|
||||||
|
- `package_name` (optional): Package name in system project, derived from URL if not provided
|
||||||
|
- `tag` (optional): Tag name in system project, derived from URL if not provided
|
||||||
|
- `user_project`, `user_package`, `user_tag` (optional): Cross-reference in user's project
|
||||||
|
- `expected_hash` (optional): Verify downloaded content matches
|
||||||
|
- [ ] Response:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"artifact_id": "abc123...",
|
||||||
|
"sha256": "abc123...",
|
||||||
|
"size": 12345,
|
||||||
|
"content_type": "application/gzip",
|
||||||
|
"already_cached": false,
|
||||||
|
"source_url": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||||
|
"source_name": "npm-public",
|
||||||
|
"system_project": "_npm",
|
||||||
|
"system_package": "lodash",
|
||||||
|
"system_tag": "4.17.21",
|
||||||
|
"user_reference": "my-app/npm-deps:lodash-4.17.21"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- [ ] Behavior:
|
||||||
|
- Check if URL already cached (by url_hash in cached_urls)
|
||||||
|
- If cached: return existing artifact, optionally create user tag
|
||||||
|
- If not cached: fetch via UpstreamClient, store artifact, create tags
|
||||||
|
- Create/get system project if needed (e.g., `_npm`)
|
||||||
|
- Create package in system project (e.g., `_npm/lodash`)
|
||||||
|
- Create tag in system project (e.g., `_npm/lodash:4.17.21`)
|
||||||
|
- If user reference provided, create tag in user's project
|
||||||
|
- Record in cached_urls table with provenance
|
||||||
|
- [ ] Error handling:
|
||||||
|
- 400: Invalid request (bad URL format, missing required fields)
|
||||||
|
- 403: Air-gap mode enabled and URL is from public source
|
||||||
|
- 404: Upstream returned 404
|
||||||
|
- 409: Hash mismatch (if expected_hash provided)
|
||||||
|
- 502: Upstream fetch failed (connection error, timeout)
|
||||||
|
- 503: Upstream source disabled
|
||||||
|
- [ ] Authentication required (any authenticated user can cache)
|
||||||
|
- [ ] Audit logging for cache operations
|
||||||
|
- [ ] Integration tests covering success and error cases
|
||||||
|
|
||||||
|
## Technical Notes
|
||||||
|
|
||||||
|
- URL parsing for package_name/tag derivation is format-specific:
|
||||||
|
- npm: `/{package}/-/{package}-{version}.tgz` → package=lodash, tag=4.17.21
|
||||||
|
- pypi: `/packages/.../requests-2.28.0.tar.gz` → package=requests, tag=2.28.0
|
||||||
|
- maven: `/{group}/{artifact}/{version}/{artifact}-{version}.jar`
|
||||||
|
- Deduplication: if same SHA256 already exists, just create new tag pointing to it
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Issue #71: System Projects (Cache Namespaces)
|
||||||
|
|
||||||
|
**Status: Pending**
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
Implement auto-created system projects for organizing cached artifacts by format type. These are special projects that provide a browsable namespace for all cached upstream packages.
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
- [ ] System project names: `_npm`, `_pypi`, `_maven`, `_docker`, `_helm`, `_nuget`, `_deb`, `_rpm`, `_generic`
|
||||||
|
- [ ] Auto-creation:
|
||||||
|
- Created automatically on first cache request for that format
|
||||||
|
- Created by cache endpoint, not at startup
|
||||||
|
- Uses system user as creator (`created_by = "system"`)
|
||||||
|
- [ ] System project properties:
|
||||||
|
- `is_system = true`
|
||||||
|
- `is_public = true` (readable by all authenticated users)
|
||||||
|
- `description` = "System cache for {format} packages"
|
||||||
|
- [ ] Restrictions:
|
||||||
|
- Cannot be deleted (return 403 with message)
|
||||||
|
- Cannot be renamed
|
||||||
|
- Cannot change `is_public` to false
|
||||||
|
- Only admins can modify description
|
||||||
|
- [ ] Helper function: `get_or_create_system_project(source_type)` in routes.py or new cache.py module
|
||||||
|
- [ ] Update project deletion endpoint to check `is_system` flag
|
||||||
|
- [ ] Update project update endpoint to enforce restrictions
|
||||||
|
- [ ] Query helper: list all system projects for UI dropdown
|
||||||
|
- [ ] Unit tests for restrictions
|
||||||
|
- [ ] Integration tests for auto-creation and restrictions
|
||||||
|
|
||||||
|
## Technical Notes
|
||||||
|
|
||||||
|
- System projects are identified by `is_system=true`, not just naming convention
|
||||||
|
- The `_` prefix is a convention for display purposes
|
||||||
|
- Packages within system projects follow upstream naming (e.g., `_npm/lodash`, `_npm/@types/node`)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Issue #72: Upstream Sources Admin API
|
||||||
|
|
||||||
|
**Status: Pending**
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
CRUD API endpoints for managing upstream sources configuration. Admin-only access.
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
- [ ] `GET /api/v1/admin/upstream-sources` - List all upstream sources
|
||||||
|
- Returns array of sources with id, name, source_type, url, enabled, is_public, auth_type, priority, has_credentials, created_at, updated_at
|
||||||
|
- Supports `?enabled=true/false` filter
|
||||||
|
- Supports `?source_type=npm,pypi` filter
|
||||||
|
- Passwords/tokens never returned
|
||||||
|
- [ ] `POST /api/v1/admin/upstream-sources` - Create upstream source
|
||||||
|
- Request: name, source_type, url, enabled, is_public, auth_type, username, password, headers, priority
|
||||||
|
- Validates unique name
|
||||||
|
- Validates URL format
|
||||||
|
- Encrypts password/headers before storage
|
||||||
|
- Returns created source (without secrets)
|
||||||
|
- [ ] `GET /api/v1/admin/upstream-sources/{id}` - Get source details
|
||||||
|
- Returns source with `has_credentials` boolean, not actual credentials
|
||||||
|
- [ ] `PUT /api/v1/admin/upstream-sources/{id}` - Update source
|
||||||
|
- Partial update supported
|
||||||
|
- If password provided, re-encrypt; if omitted, keep existing
|
||||||
|
- Special value `password: null` clears credentials
|
||||||
|
- [ ] `DELETE /api/v1/admin/upstream-sources/{id}` - Delete source
|
||||||
|
- Returns 400 if source has cached_urls referencing it (optional: cascade or reassign)
|
||||||
|
- [ ] `POST /api/v1/admin/upstream-sources/{id}/test` - Test connectivity
|
||||||
|
- Attempts HEAD request to source URL
|
||||||
|
- Returns success/failure with status code and timing
|
||||||
|
- Does not cache anything
|
||||||
|
- [ ] All endpoints require admin role
|
||||||
|
- [ ] Audit logging for all mutations
|
||||||
|
- [ ] Pydantic schemas: UpstreamSourceCreate, UpstreamSourceUpdate, UpstreamSourceResponse
|
||||||
|
- [ ] Integration tests for all endpoints
|
||||||
|
|
||||||
|
## Technical Notes
|
||||||
|
|
||||||
|
- Test endpoint should respect auth configuration to verify credentials work
|
||||||
|
- Consider adding `last_used_at` and `last_error` fields for observability (future enhancement)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Issue #73: Global Cache Settings API
|
||||||
|
|
||||||
|
**Status: Pending**
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
API endpoints for managing global cache settings including air-gap mode.
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
- [ ] `GET /api/v1/admin/cache-settings` - Get current settings
|
||||||
|
- Returns: allow_public_internet, auto_create_system_projects, created_at, updated_at
|
||||||
|
- [ ] `PUT /api/v1/admin/cache-settings` - Update settings
|
||||||
|
- Partial update supported
|
||||||
|
- Returns updated settings
|
||||||
|
- [ ] Settings fields:
|
||||||
|
- `allow_public_internet` (boolean): When false, blocks all requests to sources marked `is_public=true`
|
||||||
|
- `auto_create_system_projects` (boolean): When false, system projects must be created manually
|
||||||
|
- [ ] Admin-only access
|
||||||
|
- [ ] Audit logging for changes (especially air-gap mode changes)
|
||||||
|
- [ ] Pydantic schemas: CacheSettingsResponse, CacheSettingsUpdate
|
||||||
|
- [ ] Initialize singleton row on first access if not exists
|
||||||
|
- [ ] Integration tests
|
||||||
|
|
||||||
|
## Technical Notes
|
||||||
|
|
||||||
|
- Air-gap mode change should be logged prominently (security-relevant)
|
||||||
|
- Consider requiring confirmation header for disabling air-gap mode (similar to factory reset)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Issue #74: Environment Variable Overrides
|
||||||
|
|
||||||
|
**Status: Pending**
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
Allow cache and upstream configuration via environment variables for containerized deployments. Environment variables override database settings following 12-factor app principles.
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
- [ ] Global settings overrides:
|
||||||
|
- `ORCHARD_CACHE_ALLOW_PUBLIC_INTERNET=true/false`
|
||||||
|
- `ORCHARD_CACHE_AUTO_CREATE_SYSTEM_PROJECTS=true/false`
|
||||||
|
- `ORCHARD_CACHE_ENCRYPTION_KEY` (Fernet key for credential encryption)
|
||||||
|
- [ ] Upstream source definition via env vars:
|
||||||
|
- `ORCHARD_UPSTREAM__{NAME}__URL` (double underscore as separator)
|
||||||
|
- `ORCHARD_UPSTREAM__{NAME}__TYPE` (npm, pypi, maven, etc.)
|
||||||
|
- `ORCHARD_UPSTREAM__{NAME}__ENABLED` (true/false)
|
||||||
|
- `ORCHARD_UPSTREAM__{NAME}__IS_PUBLIC` (true/false)
|
||||||
|
- `ORCHARD_UPSTREAM__{NAME}__AUTH_TYPE` (none, basic, bearer, api_key)
|
||||||
|
- `ORCHARD_UPSTREAM__{NAME}__USERNAME`
|
||||||
|
- `ORCHARD_UPSTREAM__{NAME}__PASSWORD`
|
||||||
|
- `ORCHARD_UPSTREAM__{NAME}__PRIORITY`
|
||||||
|
- Example: `ORCHARD_UPSTREAM__NPM_PRIVATE__URL=https://npm.corp.com`
|
||||||
|
- [ ] Env var sources:
|
||||||
|
- Loaded at startup
|
||||||
|
- Merged with database sources
|
||||||
|
- Env var sources have `source = "env"` marker
|
||||||
|
- Cannot be modified via API (return 400)
|
||||||
|
- Cannot be deleted via API (return 400)
|
||||||
|
- [ ] Update Settings class in config.py
|
||||||
|
- [ ] Update get/list endpoints to include env-defined sources
|
||||||
|
- [ ] Document all env vars in CLAUDE.md
|
||||||
|
- [ ] Unit tests for env var parsing
|
||||||
|
- [ ] Integration tests with env vars set
|
||||||
|
|
||||||
|
## Technical Notes
|
||||||
|
|
||||||
|
- Double underscore (`__`) separator allows source names with single underscores
|
||||||
|
- Env-defined sources should appear in API responses but marked as read-only
|
||||||
|
- Consider startup validation that warns about invalid env var combinations
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Issue #75: Frontend - Upstream Sources Management
|
||||||
|
|
||||||
|
**Status: Pending**
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
Admin UI for managing upstream sources and cache settings.
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
- [ ] New admin page: `/admin/cache` or `/admin/upstream-sources`
|
||||||
|
- [ ] Upstream sources section:
|
||||||
|
- Table listing all sources with: name, type, URL, enabled toggle, public badge, priority, actions
|
||||||
|
- Visual distinction for env-defined sources (locked icon, no edit/delete)
|
||||||
|
- Create button opens modal/form
|
||||||
|
- Edit button for DB-defined sources
|
||||||
|
- Delete with confirmation modal
|
||||||
|
- Test connection button with status indicator
|
||||||
|
- [ ] Create/edit form fields:
|
||||||
|
- Name (text, required)
|
||||||
|
- Source type (dropdown)
|
||||||
|
- URL (text, required)
|
||||||
|
- Priority (number)
|
||||||
|
- Is public (checkbox)
|
||||||
|
- Enabled (checkbox)
|
||||||
|
- Auth type (dropdown: none, basic, bearer, api_key)
|
||||||
|
- Conditional auth fields based on type:
|
||||||
|
- Basic: username, password
|
||||||
|
- Bearer: token
|
||||||
|
- API key: header name, header value
|
||||||
|
- Password fields masked, "unchanged" placeholder on edit
|
||||||
|
- [ ] Cache settings section:
|
||||||
|
- Air-gap mode toggle with warning
|
||||||
|
- Auto-create system projects toggle
|
||||||
|
- "Air-gap mode" shows prominent warning banner when enabled
|
||||||
|
- [ ] Link from main admin navigation
|
||||||
|
- [ ] Loading and error states
|
||||||
|
- [ ] Success/error toast notifications
|
||||||
|
|
||||||
|
## Technical Notes
|
||||||
|
|
||||||
|
- Use existing admin page patterns from user management
|
||||||
|
- Air-gap toggle should require confirmation (modal with warning text)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Issue #105: Frontend - System Projects Integration
|
||||||
|
|
||||||
|
**Status: Pending**
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
Integrate system projects into the frontend UI with appropriate visual treatment and navigation.
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
- [ ] Home page project dropdown:
|
||||||
|
- System projects shown in separate "Cached Packages" section
|
||||||
|
- Visual distinction (icon, different background, or badge)
|
||||||
|
- Format icon for each type (npm, pypi, maven, etc.)
|
||||||
|
- [ ] Project list/grid:
|
||||||
|
- System projects can be filtered: "Show system projects" toggle
|
||||||
|
- Or separate tab: "Projects" | "Package Cache"
|
||||||
|
- [ ] System project page:
|
||||||
|
- "System Cache" badge in header
|
||||||
|
- Description explains this is auto-managed cache
|
||||||
|
- Settings/delete buttons hidden or disabled
|
||||||
|
- Shows format type prominently
|
||||||
|
- [ ] Package page within system project:
|
||||||
|
- Shows "Cached from" with source URL (linked)
|
||||||
|
- Shows "First cached" timestamp
|
||||||
|
- Shows which upstream source provided it
|
||||||
|
- [ ] Artifact page:
|
||||||
|
- If artifact came from cache, show provenance:
|
||||||
|
- Original URL
|
||||||
|
- Upstream source name
|
||||||
|
- Fetch timestamp
|
||||||
|
- [ ] Search includes system projects (with filter option)
|
||||||
|
|
||||||
|
## Technical Notes
|
||||||
|
|
||||||
|
- Use React context or query params for system project filtering
|
||||||
|
- Consider dedicated route: `/cache/npm/lodash` as alias for `/_npm/lodash`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Issue #77: CLI - Cache Command
|
||||||
|
|
||||||
|
**Status: Pending**
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
Add a new `orchard cache` command to the existing CLI for caching artifacts from upstream URLs. This integrates with the new cache API endpoint and can optionally update `orchard.ensure` with cached artifacts.
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
- [ ] New command: `orchard cache <url>` in `orchard/commands/cache.py`
|
||||||
|
- [ ] Basic usage:
|
||||||
|
```bash
|
||||||
|
# Cache a URL, print artifact info
|
||||||
|
orchard cache https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz
|
||||||
|
|
||||||
|
# Output:
|
||||||
|
# Caching https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz...
|
||||||
|
# Source type: npm
|
||||||
|
# Package: lodash
|
||||||
|
# Version: 4.17.21
|
||||||
|
#
|
||||||
|
# Successfully cached artifact
|
||||||
|
# Artifact ID: abc123...
|
||||||
|
# Size: 1.2 MB
|
||||||
|
# System project: _npm
|
||||||
|
# System package: lodash
|
||||||
|
# System tag: 4.17.21
|
||||||
|
```
|
||||||
|
- [ ] Options:
|
||||||
|
| Option | Description |
|
||||||
|
|--------|-------------|
|
||||||
|
| `--type, -t TYPE` | Source type: npm, pypi, maven, docker, helm, generic (auto-detected from URL if not provided) |
|
||||||
|
| `--package, -p NAME` | Package name in system project (auto-derived from URL if not provided) |
|
||||||
|
| `--tag TAG` | Tag name in system project (auto-derived from URL if not provided) |
|
||||||
|
| `--project PROJECT` | Also create tag in this user project |
|
||||||
|
| `--user-package PKG` | Package name in user project (required if --project specified) |
|
||||||
|
| `--user-tag TAG` | Tag name in user project (default: same as system tag) |
|
||||||
|
| `--expected-hash HASH` | Verify downloaded content matches this SHA256 |
|
||||||
|
| `--add` | Add to orchard.ensure after caching |
|
||||||
|
| `--add-path PATH` | Extraction path for --add (default: `<package>/`) |
|
||||||
|
| `--file, -f FILE` | Path to orchard.ensure file |
|
||||||
|
| `--verbose, -v` | Show detailed output |
|
||||||
|
- [ ] URL type auto-detection:
|
||||||
|
- `registry.npmjs.org` → npm
|
||||||
|
- `pypi.org` or `files.pythonhosted.org` → pypi
|
||||||
|
- `repo1.maven.org` or contains `/maven2/` → maven
|
||||||
|
- `registry-1.docker.io` or `docker.io` → docker
|
||||||
|
- Otherwise → generic
|
||||||
|
- [ ] Package/version extraction from URL patterns:
|
||||||
|
- npm: `/{package}/-/{package}-{version}.tgz`
|
||||||
|
- pypi: `/packages/.../requests-{version}.tar.gz`
|
||||||
|
- maven: `/{group}/{artifact}/{version}/{artifact}-{version}.jar`
|
||||||
|
- [ ] Add `cache_artifact()` function to `orchard/api.py`
|
||||||
|
- [ ] Integration with `--add` flag:
|
||||||
|
- Parse existing orchard.ensure
|
||||||
|
- Add new dependency entry pointing to cached artifact
|
||||||
|
- Use artifact_id (SHA256) for hermetic pinning
|
||||||
|
- [ ] Batch mode: `orchard cache --file urls.txt`
|
||||||
|
- One URL per line
|
||||||
|
- Lines starting with `#` are comments
|
||||||
|
- Report success/failure for each
|
||||||
|
- [ ] Exit codes:
|
||||||
|
- 0: Success (or already cached)
|
||||||
|
- 1: Fetch failed
|
||||||
|
- 2: Hash mismatch
|
||||||
|
- 3: Air-gap mode blocked request
|
||||||
|
- [ ] Error handling consistent with existing CLI patterns
|
||||||
|
- [ ] Unit tests in `test/test_cache.py`
|
||||||
|
- [ ] Update README.md with cache command documentation
|
||||||
|
|
||||||
|
## Technical Notes
|
||||||
|
|
||||||
|
- Follow existing Click patterns from other commands
|
||||||
|
- Use `get_auth_headers()` from `orchard/auth.py`
|
||||||
|
- URL parsing can use `urllib.parse`
|
||||||
|
- Consider adding URL pattern registry for extensibility
|
||||||
|
- The `--add` flag should integrate with existing ensure file parsing in `orchard/ensure.py`
|
||||||
|
|
||||||
|
## Example Workflows
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Simple: cache a single URL
|
||||||
|
orchard cache https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz
|
||||||
|
|
||||||
|
# Cache and add to orchard.ensure for current project
|
||||||
|
orchard cache https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz \
|
||||||
|
--add --add-path libs/lodash/
|
||||||
|
|
||||||
|
# Cache with explicit metadata
|
||||||
|
orchard cache https://internal.corp/files/custom-lib.tar.gz \
|
||||||
|
--type generic \
|
||||||
|
--package custom-lib \
|
||||||
|
--tag v1.0.0
|
||||||
|
|
||||||
|
# Cache and cross-reference to user project
|
||||||
|
orchard cache https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz \
|
||||||
|
--project my-app \
|
||||||
|
--user-package npm-deps \
|
||||||
|
--user-tag lodash-4.17.21
|
||||||
|
|
||||||
|
# Batch cache from file
|
||||||
|
orchard cache --file deps-urls.txt
|
||||||
|
|
||||||
|
# Verify hash while caching
|
||||||
|
orchard cache https://example.com/file.tar.gz \
|
||||||
|
--expected-hash sha256:abc123...
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Out of Scope (Future Enhancements)
|
||||||
|
|
||||||
|
- Automatic transitive dependency resolution (client's responsibility)
|
||||||
|
- Lockfile parsing (`package-lock.json`, `requirements.txt`) - stretch goal for CLI
|
||||||
|
- Cache eviction policies (we cache forever by design)
|
||||||
|
- Mirroring/sync between Orchard instances
|
||||||
|
- Format-specific metadata extraction (npm package.json parsing, etc.)
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
- [ ] Can cache any URL and retrieve by SHA256 hash
|
||||||
|
- [ ] Cached artifacts persist indefinitely
|
||||||
|
- [ ] Air-gap mode blocks all public internet access
|
||||||
|
- [ ] Multiple upstream sources with different auth
|
||||||
|
- [ ] System projects organize cached packages by format
|
||||||
|
- [ ] CLI can cache URLs and update orchard.ensure
|
||||||
|
- [ ] Admin UI for upstream source management
|
||||||
228
docs/plans/2026-02-04-pypi-proxy-performance-design.md
Normal file
228
docs/plans/2026-02-04-pypi-proxy-performance-design.md
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
# PyPI Proxy Performance & Multi-Protocol Architecture Design
|
||||||
|
|
||||||
|
**Date:** 2026-02-04
|
||||||
|
**Status:** Approved
|
||||||
|
**Branch:** fix/pypi-proxy-timeout
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Comprehensive infrastructure overhaul to address latency, throughput, and resource consumption issues in the PyPI proxy, while establishing a foundation for npm, Maven, and other package protocols.
|
||||||
|
|
||||||
|
## Goals
|
||||||
|
|
||||||
|
1. **Reduce latency** - Eliminate per-request connection overhead, cache aggressively
|
||||||
|
2. **Increase throughput** - Handle hundreds of concurrent requests without degradation
|
||||||
|
3. **Lower resource usage** - Connection pooling, efficient DB queries, proper async I/O
|
||||||
|
4. **Enable multi-protocol** - Abstract base class ready for npm/Maven/etc.
|
||||||
|
5. **Maintain hermetic builds** - Immutable artifact content and metadata, mutable discovery data
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────────────────┐
|
||||||
|
│ FastAPI Application │
|
||||||
|
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
|
||||||
|
│ │ PyPI Proxy │ │ npm Proxy │ │ Maven Proxy │ │ (future) │ │
|
||||||
|
│ │ Router │ │ Router │ │ Router │ │ │ │
|
||||||
|
│ └──────┬──────┘ └──────┬──────┘ └──────┬──────┘ └─────────────┘ │
|
||||||
|
│ │ │ │ │
|
||||||
|
│ └────────────────┼────────────────┘ │
|
||||||
|
│ ▼ │
|
||||||
|
│ ┌───────────────────────┐ │
|
||||||
|
│ │ PackageProxyBase │ ← Abstract base class │
|
||||||
|
│ │ - check_cache() │ │
|
||||||
|
│ │ - fetch_upstream() │ │
|
||||||
|
│ │ - store_artifact() │ │
|
||||||
|
│ │ - serve_artifact() │ │
|
||||||
|
│ └───────────┬───────────┘ │
|
||||||
|
│ │ │
|
||||||
|
│ ┌────────────────┼────────────────┐ │
|
||||||
|
│ ▼ ▼ ▼ │
|
||||||
|
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
|
||||||
|
│ │ HttpClient │ │ CacheService│ │ ThreadPool │ │
|
||||||
|
│ │ Manager │ │ (Redis) │ │ Executor │ │
|
||||||
|
│ └─────────────┘ └─────────────┘ └─────────────┘ │
|
||||||
|
│ │ │ │ │
|
||||||
|
└─────────┼────────────────┼────────────────┼──────────────────────────┘
|
||||||
|
▼ ▼ ▼
|
||||||
|
┌──────────┐ ┌──────────┐ ┌──────────────┐
|
||||||
|
│ Upstream │ │ Redis │ │ S3/MinIO │
|
||||||
|
│ Sources │ │ │ │ │
|
||||||
|
└──────────┘ └──────────┘ └──────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## Components
|
||||||
|
|
||||||
|
### 1. HttpClientManager
|
||||||
|
|
||||||
|
Manages httpx.AsyncClient pools with FastAPI lifespan integration.
|
||||||
|
|
||||||
|
**Features:**
|
||||||
|
- Default pool for general requests
|
||||||
|
- Per-upstream pools for sources needing specific config/auth
|
||||||
|
- Graceful shutdown drains in-flight requests
|
||||||
|
- Dedicated thread pool for blocking operations
|
||||||
|
|
||||||
|
**Configuration:**
|
||||||
|
```bash
|
||||||
|
ORCHARD_HTTP_MAX_CONNECTIONS=100 # Default pool size
|
||||||
|
ORCHARD_HTTP_KEEPALIVE_CONNECTIONS=20 # Keep-alive connections
|
||||||
|
ORCHARD_HTTP_CONNECT_TIMEOUT=30 # Connection timeout (seconds)
|
||||||
|
ORCHARD_HTTP_READ_TIMEOUT=60 # Read timeout (seconds)
|
||||||
|
ORCHARD_HTTP_WORKER_THREADS=32 # Thread pool size
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `backend/app/http_client.py`
|
||||||
|
|
||||||
|
### 2. CacheService (Redis Layer)
|
||||||
|
|
||||||
|
Redis-backed caching with category-aware TTL and invalidation.
|
||||||
|
|
||||||
|
**Cache Categories:**
|
||||||
|
|
||||||
|
| Category | TTL | Invalidation | Purpose |
|
||||||
|
|----------|-----|--------------|---------|
|
||||||
|
| ARTIFACT_METADATA | Forever | Never (immutable) | Artifact info by SHA256 |
|
||||||
|
| ARTIFACT_DEPENDENCIES | Forever | Never (immutable) | Extracted deps by SHA256 |
|
||||||
|
| DEPENDENCY_RESOLUTION | Forever | Manual/refresh param | Resolution results |
|
||||||
|
| UPSTREAM_SOURCES | 1 hour | On DB change | Upstream config |
|
||||||
|
| PACKAGE_INDEX | 5 min | TTL only | PyPI/npm index pages |
|
||||||
|
| PACKAGE_VERSIONS | 5 min | TTL only | Version listings |
|
||||||
|
|
||||||
|
**Key format:** `orchard:{category}:{protocol}:{identifier}`
|
||||||
|
|
||||||
|
**Configuration:**
|
||||||
|
```bash
|
||||||
|
ORCHARD_REDIS_HOST=redis
|
||||||
|
ORCHARD_REDIS_PORT=6379
|
||||||
|
ORCHARD_REDIS_DB=0
|
||||||
|
ORCHARD_CACHE_TTL_INDEX=300 # Package index: 5 minutes
|
||||||
|
ORCHARD_CACHE_TTL_VERSIONS=300 # Version listings: 5 minutes
|
||||||
|
ORCHARD_CACHE_TTL_UPSTREAM=3600 # Upstream config: 1 hour
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `backend/app/cache_service.py`
|
||||||
|
|
||||||
|
### 3. PackageProxyBase
|
||||||
|
|
||||||
|
Abstract base class defining the cache→fetch→store→serve flow.
|
||||||
|
|
||||||
|
**Abstract methods (protocol-specific):**
|
||||||
|
- `get_protocol_name()` - Return 'pypi', 'npm', 'maven'
|
||||||
|
- `get_system_project_name()` - Return '_pypi', '_npm'
|
||||||
|
- `rewrite_index_html()` - Rewrite upstream index to Orchard URLs
|
||||||
|
- `extract_metadata()` - Extract deps from package file
|
||||||
|
- `parse_package_url()` - Parse URL into package/version/filename
|
||||||
|
|
||||||
|
**Concrete methods (shared):**
|
||||||
|
- `serve_index()` - Serve package index with caching
|
||||||
|
- `serve_artifact()` - Full cache→fetch→store→serve flow
|
||||||
|
|
||||||
|
**File:** `backend/app/proxy_base.py`
|
||||||
|
|
||||||
|
### 4. ArtifactRepository (DB Optimization)
|
||||||
|
|
||||||
|
Optimized database operations eliminating N+1 queries.
|
||||||
|
|
||||||
|
**Key methods:**
|
||||||
|
- `get_or_create_artifact()` - Atomic upsert via ON CONFLICT
|
||||||
|
- `batch_upsert_dependencies()` - Single INSERT for all deps
|
||||||
|
- `get_cached_url_with_artifact()` - Joined query for cache lookup
|
||||||
|
|
||||||
|
**Query reduction:**
|
||||||
|
|
||||||
|
| Operation | Before | After |
|
||||||
|
|-----------|--------|-------|
|
||||||
|
| Cache hit check | 2 queries | 1 query (joined) |
|
||||||
|
| Store artifact | 3-4 queries | 1 query (upsert) |
|
||||||
|
| Store 50 deps | 50+ queries | 1 query (batch) |
|
||||||
|
|
||||||
|
**Configuration:**
|
||||||
|
```bash
|
||||||
|
ORCHARD_DATABASE_POOL_SIZE=20 # Base connections (up from 5)
|
||||||
|
ORCHARD_DATABASE_MAX_OVERFLOW=30 # Burst capacity (up from 10)
|
||||||
|
ORCHARD_DATABASE_POOL_TIMEOUT=30 # Wait timeout
|
||||||
|
ORCHARD_DATABASE_POOL_PRE_PING=false # Disable in prod for performance
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `backend/app/db_utils.py`
|
||||||
|
|
||||||
|
### 5. Dependency Resolution Caching
|
||||||
|
|
||||||
|
Cache resolution results for ensure files and API queries.
|
||||||
|
|
||||||
|
**Cache key:** Hash of (artifact_id, max_depth, include_optional)
|
||||||
|
|
||||||
|
**Invalidation:** Manual only (immutable artifact deps mean cached resolutions stay valid)
|
||||||
|
|
||||||
|
**Refresh:** `?refresh=true` parameter forces fresh resolution
|
||||||
|
|
||||||
|
**File:** Updates to `backend/app/dependencies.py`
|
||||||
|
|
||||||
|
### 6. FastAPI Integration
|
||||||
|
|
||||||
|
Lifespan-managed infrastructure with dependency injection.
|
||||||
|
|
||||||
|
**Startup:**
|
||||||
|
1. Initialize HttpClientManager (connection pools)
|
||||||
|
2. Initialize CacheService (Redis connection)
|
||||||
|
3. Load upstream source configs
|
||||||
|
|
||||||
|
**Shutdown:**
|
||||||
|
1. Drain in-flight HTTP requests
|
||||||
|
2. Close Redis connections
|
||||||
|
3. Shutdown thread pool
|
||||||
|
|
||||||
|
**Health endpoint additions:**
|
||||||
|
- Database connection status
|
||||||
|
- Redis ping
|
||||||
|
- HTTP pool active/max connections
|
||||||
|
- Thread pool active/max workers
|
||||||
|
|
||||||
|
**File:** Updates to `backend/app/main.py`
|
||||||
|
|
||||||
|
## Files Summary
|
||||||
|
|
||||||
|
**New files:**
|
||||||
|
- `backend/app/http_client.py` - HttpClientManager
|
||||||
|
- `backend/app/cache_service.py` - CacheService
|
||||||
|
- `backend/app/proxy_base.py` - PackageProxyBase
|
||||||
|
- `backend/app/db_utils.py` - ArtifactRepository
|
||||||
|
|
||||||
|
**Modified files:**
|
||||||
|
- `backend/app/config.py` - New settings
|
||||||
|
- `backend/app/main.py` - Lifespan integration
|
||||||
|
- `backend/app/pypi_proxy.py` - Refactor to use base class
|
||||||
|
- `backend/app/dependencies.py` - Resolution caching
|
||||||
|
- `backend/app/routes.py` - Health endpoint, DI
|
||||||
|
|
||||||
|
## Hermetic Build Guarantees
|
||||||
|
|
||||||
|
**Immutable (cached forever):**
|
||||||
|
- Artifact content (by SHA256)
|
||||||
|
- Extracted dependencies for a specific artifact
|
||||||
|
- Dependency resolution results
|
||||||
|
|
||||||
|
**Mutable (TTL + event invalidation):**
|
||||||
|
- Package index listings
|
||||||
|
- Version discovery
|
||||||
|
- Upstream source configuration
|
||||||
|
|
||||||
|
Once an artifact is cached with SHA256 `abc123` and dependencies extracted, that data never changes.
|
||||||
|
|
||||||
|
## Performance Expectations
|
||||||
|
|
||||||
|
| Metric | Before | After |
|
||||||
|
|--------|--------|-------|
|
||||||
|
| HTTP connection setup | Per request (~100-500ms) | Pooled (~5ms) |
|
||||||
|
| Cache hit (index page) | N/A | ~5ms (Redis) |
|
||||||
|
| Store 50 dependencies | ~500ms (50 queries) | ~10ms (1 query) |
|
||||||
|
| Dependency resolution (cached) | N/A | ~5ms |
|
||||||
|
| Concurrent request capacity | ~15 (DB pool) | ~50 (configurable) |
|
||||||
|
|
||||||
|
## Testing Requirements
|
||||||
|
|
||||||
|
- Unit tests for each new component
|
||||||
|
- Integration tests for full proxy flow
|
||||||
|
- Load tests to verify pool sizing
|
||||||
|
- Cache hit/miss verification tests
|
||||||
1587
docs/plans/2026-02-04-pypi-proxy-performance-implementation.md
Normal file
1587
docs/plans/2026-02-04-pypi-proxy-performance-implementation.md
Normal file
File diff suppressed because it is too large
Load Diff
1117
frontend/package-lock.json
generated
1117
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -12,9 +12,12 @@
|
|||||||
"test:coverage": "vitest run --coverage"
|
"test:coverage": "vitest run --coverage"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@types/dagre": "^0.7.53",
|
||||||
|
"dagre": "^0.8.5",
|
||||||
"react": "^18.2.0",
|
"react": "^18.2.0",
|
||||||
"react-dom": "^18.2.0",
|
"react-dom": "^18.2.0",
|
||||||
"react-router-dom": "6.28.0"
|
"react-router-dom": "6.28.0",
|
||||||
|
"reactflow": "^11.11.4"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@testing-library/jest-dom": "^6.4.2",
|
"@testing-library/jest-dom": "^6.4.2",
|
||||||
@@ -34,6 +37,15 @@
|
|||||||
"ufo": "1.5.4",
|
"ufo": "1.5.4",
|
||||||
"rollup": "4.52.4",
|
"rollup": "4.52.4",
|
||||||
"caniuse-lite": "1.0.30001692",
|
"caniuse-lite": "1.0.30001692",
|
||||||
"baseline-browser-mapping": "2.9.5"
|
"baseline-browser-mapping": "2.9.5",
|
||||||
|
"lodash": "4.17.21",
|
||||||
|
"electron-to-chromium": "1.5.72",
|
||||||
|
"@babel/core": "7.26.0",
|
||||||
|
"@babel/traverse": "7.26.4",
|
||||||
|
"@babel/types": "7.26.3",
|
||||||
|
"@babel/compat-data": "7.26.3",
|
||||||
|
"@babel/parser": "7.26.3",
|
||||||
|
"@babel/generator": "7.26.3",
|
||||||
|
"@babel/code-frame": "7.26.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import ChangePasswordPage from './pages/ChangePasswordPage';
|
|||||||
import APIKeysPage from './pages/APIKeysPage';
|
import APIKeysPage from './pages/APIKeysPage';
|
||||||
import AdminUsersPage from './pages/AdminUsersPage';
|
import AdminUsersPage from './pages/AdminUsersPage';
|
||||||
import AdminOIDCPage from './pages/AdminOIDCPage';
|
import AdminOIDCPage from './pages/AdminOIDCPage';
|
||||||
|
import AdminCachePage from './pages/AdminCachePage';
|
||||||
import ProjectSettingsPage from './pages/ProjectSettingsPage';
|
import ProjectSettingsPage from './pages/ProjectSettingsPage';
|
||||||
import TeamsPage from './pages/TeamsPage';
|
import TeamsPage from './pages/TeamsPage';
|
||||||
import TeamDashboardPage from './pages/TeamDashboardPage';
|
import TeamDashboardPage from './pages/TeamDashboardPage';
|
||||||
@@ -50,6 +51,7 @@ function AppRoutes() {
|
|||||||
<Route path="/settings/api-keys" element={<APIKeysPage />} />
|
<Route path="/settings/api-keys" element={<APIKeysPage />} />
|
||||||
<Route path="/admin/users" element={<AdminUsersPage />} />
|
<Route path="/admin/users" element={<AdminUsersPage />} />
|
||||||
<Route path="/admin/oidc" element={<AdminOIDCPage />} />
|
<Route path="/admin/oidc" element={<AdminOIDCPage />} />
|
||||||
|
<Route path="/admin/cache" element={<AdminCachePage />} />
|
||||||
<Route path="/teams" element={<TeamsPage />} />
|
<Route path="/teams" element={<TeamsPage />} />
|
||||||
<Route path="/teams/:slug" element={<TeamDashboardPage />} />
|
<Route path="/teams/:slug" element={<TeamDashboardPage />} />
|
||||||
<Route path="/teams/:slug/settings" element={<TeamSettingsPage />} />
|
<Route path="/teams/:slug/settings" element={<TeamSettingsPage />} />
|
||||||
|
|||||||
@@ -1,14 +1,11 @@
|
|||||||
import {
|
import {
|
||||||
Project,
|
Project,
|
||||||
Package,
|
Package,
|
||||||
Tag,
|
|
||||||
TagDetail,
|
|
||||||
Artifact,
|
|
||||||
ArtifactDetail,
|
ArtifactDetail,
|
||||||
|
PackageArtifact,
|
||||||
UploadResponse,
|
UploadResponse,
|
||||||
PaginatedResponse,
|
PaginatedResponse,
|
||||||
ListParams,
|
ListParams,
|
||||||
TagListParams,
|
|
||||||
PackageListParams,
|
PackageListParams,
|
||||||
ArtifactListParams,
|
ArtifactListParams,
|
||||||
ProjectListParams,
|
ProjectListParams,
|
||||||
@@ -42,6 +39,10 @@ import {
|
|||||||
TeamUpdate,
|
TeamUpdate,
|
||||||
TeamMemberCreate,
|
TeamMemberCreate,
|
||||||
TeamMemberUpdate,
|
TeamMemberUpdate,
|
||||||
|
UpstreamSource,
|
||||||
|
UpstreamSourceCreate,
|
||||||
|
UpstreamSourceUpdate,
|
||||||
|
UpstreamSourceTestResult,
|
||||||
} from './types';
|
} from './types';
|
||||||
|
|
||||||
const API_BASE = '/api/v1';
|
const API_BASE = '/api/v1';
|
||||||
@@ -74,7 +75,13 @@ export class ForbiddenError extends ApiError {
|
|||||||
async function handleResponse<T>(response: Response): Promise<T> {
|
async function handleResponse<T>(response: Response): Promise<T> {
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||||
const message = error.detail || `HTTP ${response.status}`;
|
// Handle detail as string or object (backend may return structured errors)
|
||||||
|
let message: string;
|
||||||
|
if (typeof error.detail === 'object') {
|
||||||
|
message = JSON.stringify(error.detail);
|
||||||
|
} else {
|
||||||
|
message = error.detail || `HTTP ${response.status}`;
|
||||||
|
}
|
||||||
|
|
||||||
if (response.status === 401) {
|
if (response.status === 401) {
|
||||||
throw new UnauthorizedError(message);
|
throw new UnauthorizedError(message);
|
||||||
@@ -230,32 +237,6 @@ export async function createPackage(projectName: string, data: { name: string; d
|
|||||||
return handleResponse<Package>(response);
|
return handleResponse<Package>(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tag API
|
|
||||||
export async function listTags(projectName: string, packageName: string, params: TagListParams = {}): Promise<PaginatedResponse<TagDetail>> {
|
|
||||||
const query = buildQueryString(params as Record<string, unknown>);
|
|
||||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags${query}`);
|
|
||||||
return handleResponse<PaginatedResponse<TagDetail>>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function listTagsSimple(projectName: string, packageName: string, params: TagListParams = {}): Promise<TagDetail[]> {
|
|
||||||
const data = await listTags(projectName, packageName, params);
|
|
||||||
return data.items;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getTag(projectName: string, packageName: string, tagName: string): Promise<TagDetail> {
|
|
||||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags/${tagName}`);
|
|
||||||
return handleResponse<TagDetail>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function createTag(projectName: string, packageName: string, data: { name: string; artifact_id: string }): Promise<Tag> {
|
|
||||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/tags`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
});
|
|
||||||
return handleResponse<Tag>(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Artifact API
|
// Artifact API
|
||||||
export async function getArtifact(artifactId: string): Promise<ArtifactDetail> {
|
export async function getArtifact(artifactId: string): Promise<ArtifactDetail> {
|
||||||
const response = await fetch(`${API_BASE}/artifact/${artifactId}`);
|
const response = await fetch(`${API_BASE}/artifact/${artifactId}`);
|
||||||
@@ -266,10 +247,10 @@ export async function listPackageArtifacts(
|
|||||||
projectName: string,
|
projectName: string,
|
||||||
packageName: string,
|
packageName: string,
|
||||||
params: ArtifactListParams = {}
|
params: ArtifactListParams = {}
|
||||||
): Promise<PaginatedResponse<Artifact & { tags: string[] }>> {
|
): Promise<PaginatedResponse<PackageArtifact>> {
|
||||||
const query = buildQueryString(params as Record<string, unknown>);
|
const query = buildQueryString(params as Record<string, unknown>);
|
||||||
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/artifacts${query}`);
|
const response = await fetch(`${API_BASE}/project/${projectName}/${packageName}/artifacts${query}`);
|
||||||
return handleResponse<PaginatedResponse<Artifact & { tags: string[] }>>(response);
|
return handleResponse<PaginatedResponse<PackageArtifact>>(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upload
|
// Upload
|
||||||
@@ -277,14 +258,10 @@ export async function uploadArtifact(
|
|||||||
projectName: string,
|
projectName: string,
|
||||||
packageName: string,
|
packageName: string,
|
||||||
file: File,
|
file: File,
|
||||||
tag?: string,
|
|
||||||
version?: string
|
version?: string
|
||||||
): Promise<UploadResponse> {
|
): Promise<UploadResponse> {
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('file', file);
|
formData.append('file', file);
|
||||||
if (tag) {
|
|
||||||
formData.append('tag', tag);
|
|
||||||
}
|
|
||||||
if (version) {
|
if (version) {
|
||||||
formData.append('version', version);
|
formData.append('version', version);
|
||||||
}
|
}
|
||||||
@@ -682,3 +659,64 @@ export async function searchUsers(query: string, limit: number = 10): Promise<Us
|
|||||||
});
|
});
|
||||||
return handleResponse<UserSearchResult[]>(response);
|
return handleResponse<UserSearchResult[]>(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Upstream Sources Admin API
|
||||||
|
export interface UpstreamSourceListParams {
|
||||||
|
enabled?: boolean;
|
||||||
|
source_type?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function listUpstreamSources(params: UpstreamSourceListParams = {}): Promise<UpstreamSource[]> {
|
||||||
|
const query = buildQueryString(params as Record<string, unknown>);
|
||||||
|
const response = await fetch(`${API_BASE}/admin/upstream-sources${query}`, {
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<UpstreamSource[]>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createUpstreamSource(data: UpstreamSourceCreate): Promise<UpstreamSource> {
|
||||||
|
const response = await fetch(`${API_BASE}/admin/upstream-sources`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<UpstreamSource>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getUpstreamSource(id: string): Promise<UpstreamSource> {
|
||||||
|
const response = await fetch(`${API_BASE}/admin/upstream-sources/${id}`, {
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<UpstreamSource>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateUpstreamSource(id: string, data: UpstreamSourceUpdate): Promise<UpstreamSource> {
|
||||||
|
const response = await fetch(`${API_BASE}/admin/upstream-sources/${id}`, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<UpstreamSource>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function deleteUpstreamSource(id: string): Promise<void> {
|
||||||
|
const response = await fetch(`${API_BASE}/admin/upstream-sources/${id}`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
if (!response.ok) {
|
||||||
|
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
|
||||||
|
throw new ApiError(error.detail || `HTTP ${response.status}`, response.status);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function testUpstreamSource(id: string): Promise<UpstreamSourceTestResult> {
|
||||||
|
const response = await fetch(`${API_BASE}/admin/upstream-sources/${id}/test`, {
|
||||||
|
method: 'POST',
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
return handleResponse<UpstreamSourceTestResult>(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -55,6 +55,10 @@
|
|||||||
font-size: 0.8125rem;
|
font-size: 0.8125rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.missing-count {
|
||||||
|
color: #f59e0b;
|
||||||
|
}
|
||||||
|
|
||||||
.close-btn {
|
.close-btn {
|
||||||
background: transparent;
|
background: transparent;
|
||||||
border: none;
|
border: none;
|
||||||
@@ -72,171 +76,115 @@
|
|||||||
color: var(--text-primary);
|
color: var(--text-primary);
|
||||||
}
|
}
|
||||||
|
|
||||||
.dependency-graph-toolbar {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
padding: 12px 20px;
|
|
||||||
border-bottom: 1px solid var(--border-primary);
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.zoom-level {
|
|
||||||
margin-left: auto;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-family: 'JetBrains Mono', monospace;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dependency-graph-container {
|
.dependency-graph-container {
|
||||||
flex: 1;
|
flex: 1;
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
position: relative;
|
position: relative;
|
||||||
background:
|
background: var(--bg-primary);
|
||||||
linear-gradient(90deg, var(--border-primary) 1px, transparent 1px),
|
|
||||||
linear-gradient(var(--border-primary) 1px, transparent 1px);
|
|
||||||
background-size: 20px 20px;
|
|
||||||
background-position: center center;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.graph-canvas {
|
/* React Flow Customization */
|
||||||
padding: 40px;
|
.react-flow__background {
|
||||||
min-width: 100%;
|
background-color: var(--bg-primary) !important;
|
||||||
min-height: 100%;
|
|
||||||
transform-origin: center center;
|
|
||||||
transition: transform 0.1s ease-out;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Graph Nodes */
|
.react-flow__controls {
|
||||||
.graph-node-container {
|
background: var(--bg-tertiary);
|
||||||
display: flex;
|
border: 1px solid var(--border-primary);
|
||||||
flex-direction: column;
|
border-radius: var(--radius-md);
|
||||||
align-items: flex-start;
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
|
||||||
}
|
}
|
||||||
|
|
||||||
.graph-node {
|
.react-flow__controls-button {
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: none;
|
||||||
|
border-bottom: 1px solid var(--border-primary);
|
||||||
|
color: var(--text-secondary);
|
||||||
|
width: 28px;
|
||||||
|
height: 28px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.react-flow__controls-button:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.react-flow__controls-button:last-child {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.react-flow__controls-button svg {
|
||||||
|
fill: currentColor;
|
||||||
|
}
|
||||||
|
|
||||||
|
.react-flow__attribution {
|
||||||
|
background: transparent !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.react-flow__attribution a {
|
||||||
|
color: var(--text-muted) !important;
|
||||||
|
font-size: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Custom Flow Nodes */
|
||||||
|
.flow-node {
|
||||||
background: var(--bg-tertiary);
|
background: var(--bg-tertiary);
|
||||||
border: 2px solid var(--border-primary);
|
border: 2px solid var(--border-primary);
|
||||||
border-radius: var(--radius-md);
|
border-radius: var(--radius-md);
|
||||||
padding: 12px 16px;
|
padding: 12px 16px;
|
||||||
min-width: 200px;
|
min-width: 160px;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
transition: all var(--transition-fast);
|
transition: all var(--transition-fast);
|
||||||
position: relative;
|
text-align: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
.graph-node:hover {
|
.flow-node:hover {
|
||||||
border-color: var(--accent-primary);
|
border-color: var(--accent-primary);
|
||||||
box-shadow: 0 4px 12px rgba(16, 185, 129, 0.2);
|
box-shadow: 0 4px 12px rgba(16, 185, 129, 0.2);
|
||||||
}
|
}
|
||||||
|
|
||||||
.graph-node--root {
|
.flow-node--root {
|
||||||
background: linear-gradient(135deg, rgba(16, 185, 129, 0.15) 0%, rgba(5, 150, 105, 0.15) 100%);
|
background: linear-gradient(135deg, rgba(16, 185, 129, 0.15) 0%, rgba(5, 150, 105, 0.15) 100%);
|
||||||
border-color: var(--accent-primary);
|
border-color: var(--accent-primary);
|
||||||
}
|
}
|
||||||
|
|
||||||
.graph-node--hovered {
|
.flow-node__name {
|
||||||
transform: scale(1.02);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node__header {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
margin-bottom: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node__name {
|
|
||||||
font-weight: 600;
|
font-weight: 600;
|
||||||
color: var(--accent-primary);
|
color: var(--accent-primary);
|
||||||
font-family: 'JetBrains Mono', monospace;
|
font-family: 'JetBrains Mono', monospace;
|
||||||
font-size: 0.875rem;
|
font-size: 0.8125rem;
|
||||||
|
margin-bottom: 4px;
|
||||||
|
word-break: break-word;
|
||||||
}
|
}
|
||||||
|
|
||||||
.graph-node__toggle {
|
.flow-node__details {
|
||||||
background: var(--bg-hover);
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
border-radius: 4px;
|
|
||||||
width: 20px;
|
|
||||||
height: 20px;
|
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
cursor: pointer;
|
gap: 8px;
|
||||||
font-size: 0.875rem;
|
font-size: 0.6875rem;
|
||||||
color: var(--text-secondary);
|
|
||||||
font-weight: 600;
|
|
||||||
margin-left: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node__toggle:hover {
|
|
||||||
background: var(--bg-tertiary);
|
|
||||||
color: var(--text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-node__details {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 12px;
|
|
||||||
font-size: 0.75rem;
|
|
||||||
color: var(--text-muted);
|
color: var(--text-muted);
|
||||||
}
|
}
|
||||||
|
|
||||||
.graph-node__version {
|
.flow-node__version {
|
||||||
font-family: 'JetBrains Mono', monospace;
|
font-family: 'JetBrains Mono', monospace;
|
||||||
color: var(--text-secondary);
|
color: var(--text-secondary);
|
||||||
}
|
}
|
||||||
|
|
||||||
.graph-node__size {
|
.flow-node__size {
|
||||||
color: var(--text-muted);
|
color: var(--text-muted);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Graph Children / Tree Structure */
|
/* Flow Handles (connection points) */
|
||||||
.graph-children {
|
.flow-handle {
|
||||||
display: flex;
|
width: 8px !important;
|
||||||
padding-left: 24px;
|
height: 8px !important;
|
||||||
margin-top: 8px;
|
background: var(--border-primary) !important;
|
||||||
position: relative;
|
border: 2px solid var(--bg-tertiary) !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.graph-connector {
|
.flow-node:hover .flow-handle {
|
||||||
position: absolute;
|
background: var(--accent-primary) !important;
|
||||||
left: 12px;
|
|
||||||
top: 0;
|
|
||||||
bottom: 50%;
|
|
||||||
width: 12px;
|
|
||||||
border-left: 2px solid var(--border-primary);
|
|
||||||
border-bottom: 2px solid var(--border-primary);
|
|
||||||
border-bottom-left-radius: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-children-list {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 8px;
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-children-list::before {
|
|
||||||
content: '';
|
|
||||||
position: absolute;
|
|
||||||
left: -12px;
|
|
||||||
top: 20px;
|
|
||||||
bottom: 20px;
|
|
||||||
border-left: 2px solid var(--border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-children-list > .graph-node-container {
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.graph-children-list > .graph-node-container::before {
|
|
||||||
content: '';
|
|
||||||
position: absolute;
|
|
||||||
left: -12px;
|
|
||||||
top: 20px;
|
|
||||||
width: 12px;
|
|
||||||
border-top: 2px solid var(--border-primary);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Loading, Error, Empty States */
|
/* Loading, Error, Empty States */
|
||||||
@@ -279,39 +227,76 @@
|
|||||||
line-height: 1.5;
|
line-height: 1.5;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Tooltip */
|
.graph-warning {
|
||||||
.graph-tooltip {
|
display: flex;
|
||||||
position: fixed;
|
align-items: center;
|
||||||
bottom: 24px;
|
gap: 8px;
|
||||||
left: 50%;
|
padding: 8px 16px;
|
||||||
transform: translateX(-50%);
|
background: rgba(245, 158, 11, 0.1);
|
||||||
background: var(--bg-tertiary);
|
border-top: 1px solid rgba(245, 158, 11, 0.3);
|
||||||
border: 1px solid var(--border-primary);
|
color: var(--warning-color, #f59e0b);
|
||||||
border-radius: var(--radius-md);
|
font-size: 0.875rem;
|
||||||
padding: 12px 16px;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.4);
|
|
||||||
z-index: 1001;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.graph-tooltip strong {
|
.graph-warning svg {
|
||||||
display: block;
|
flex-shrink: 0;
|
||||||
color: var(--accent-primary);
|
|
||||||
font-family: 'JetBrains Mono', monospace;
|
|
||||||
margin-bottom: 4px;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.graph-tooltip div {
|
/* Missing Dependencies */
|
||||||
color: var(--text-secondary);
|
.missing-dependencies {
|
||||||
margin-top: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tooltip-hint {
|
|
||||||
margin-top: 8px;
|
|
||||||
padding-top: 8px;
|
|
||||||
border-top: 1px solid var(--border-primary);
|
border-top: 1px solid var(--border-primary);
|
||||||
color: var(--text-muted);
|
padding: 16px 20px;
|
||||||
|
background: rgba(245, 158, 11, 0.05);
|
||||||
|
max-height: 200px;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-dependencies h3 {
|
||||||
|
margin: 0 0 8px 0;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: #f59e0b;
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-hint {
|
||||||
|
margin: 0 0 12px 0;
|
||||||
font-size: 0.75rem;
|
font-size: 0.75rem;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-list {
|
||||||
|
list-style: none;
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-item {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 4px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border: 1px solid rgba(245, 158, 11, 0.3);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
padding: 4px 8px;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-name {
|
||||||
|
font-family: 'JetBrains Mono', monospace;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-constraint {
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-family: 'JetBrains Mono', monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-required-by {
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-size: 0.6875rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Responsive */
|
/* Responsive */
|
||||||
|
|||||||
@@ -1,5 +1,19 @@
|
|||||||
import { useState, useEffect, useCallback, useRef } from 'react';
|
import { useState, useEffect, useCallback, useMemo } from 'react';
|
||||||
import { useNavigate } from 'react-router-dom';
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import ReactFlow, {
|
||||||
|
Node,
|
||||||
|
Edge,
|
||||||
|
Controls,
|
||||||
|
Background,
|
||||||
|
useNodesState,
|
||||||
|
useEdgesState,
|
||||||
|
MarkerType,
|
||||||
|
NodeProps,
|
||||||
|
Handle,
|
||||||
|
Position,
|
||||||
|
} from 'reactflow';
|
||||||
|
import dagre from 'dagre';
|
||||||
|
import 'reactflow/dist/style.css';
|
||||||
import { ResolvedArtifact, DependencyResolutionResponse, Dependency } from '../types';
|
import { ResolvedArtifact, DependencyResolutionResponse, Dependency } from '../types';
|
||||||
import { resolveDependencies, getArtifactDependencies } from '../api';
|
import { resolveDependencies, getArtifactDependencies } from '../api';
|
||||||
import './DependencyGraph.css';
|
import './DependencyGraph.css';
|
||||||
@@ -11,15 +25,14 @@ interface DependencyGraphProps {
|
|||||||
onClose: () => void;
|
onClose: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface GraphNode {
|
interface NodeData {
|
||||||
id: string;
|
label: string;
|
||||||
project: string;
|
project: string;
|
||||||
package: string;
|
package: string;
|
||||||
version: string | null;
|
version: string | null;
|
||||||
size: number;
|
size: number;
|
||||||
depth: number;
|
isRoot: boolean;
|
||||||
children: GraphNode[];
|
onNavigate: (project: string, pkg: string) => void;
|
||||||
isRoot?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function formatBytes(bytes: number): string {
|
function formatBytes(bytes: number): string {
|
||||||
@@ -30,84 +43,185 @@ function formatBytes(bytes: number): string {
|
|||||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
|
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Custom node component
|
||||||
|
function DependencyNode({ data }: NodeProps<NodeData>) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
className={`flow-node ${data.isRoot ? 'flow-node--root' : ''}`}
|
||||||
|
onClick={() => data.onNavigate(data.project, data.package)}
|
||||||
|
>
|
||||||
|
<Handle type="target" position={Position.Top} className="flow-handle" />
|
||||||
|
<div className="flow-node__name">{data.package}</div>
|
||||||
|
<div className="flow-node__details">
|
||||||
|
{data.version && <span className="flow-node__version">{data.version}</span>}
|
||||||
|
<span className="flow-node__size">{formatBytes(data.size)}</span>
|
||||||
|
</div>
|
||||||
|
<Handle type="source" position={Position.Bottom} className="flow-handle" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const nodeTypes = { dependency: DependencyNode };
|
||||||
|
|
||||||
|
// Dagre layout function
|
||||||
|
function getLayoutedElements(
|
||||||
|
nodes: Node<NodeData>[],
|
||||||
|
edges: Edge[],
|
||||||
|
direction: 'TB' | 'LR' = 'TB'
|
||||||
|
) {
|
||||||
|
const dagreGraph = new dagre.graphlib.Graph();
|
||||||
|
dagreGraph.setDefaultEdgeLabel(() => ({}));
|
||||||
|
|
||||||
|
const nodeWidth = 180;
|
||||||
|
const nodeHeight = 60;
|
||||||
|
|
||||||
|
dagreGraph.setGraph({ rankdir: direction, nodesep: 50, ranksep: 80 });
|
||||||
|
|
||||||
|
nodes.forEach((node) => {
|
||||||
|
dagreGraph.setNode(node.id, { width: nodeWidth, height: nodeHeight });
|
||||||
|
});
|
||||||
|
|
||||||
|
edges.forEach((edge) => {
|
||||||
|
dagreGraph.setEdge(edge.source, edge.target);
|
||||||
|
});
|
||||||
|
|
||||||
|
dagre.layout(dagreGraph);
|
||||||
|
|
||||||
|
const layoutedNodes = nodes.map((node) => {
|
||||||
|
const nodeWithPosition = dagreGraph.node(node.id);
|
||||||
|
return {
|
||||||
|
...node,
|
||||||
|
position: {
|
||||||
|
x: nodeWithPosition.x - nodeWidth / 2,
|
||||||
|
y: nodeWithPosition.y - nodeHeight / 2,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return { nodes: layoutedNodes, edges };
|
||||||
|
}
|
||||||
|
|
||||||
function DependencyGraph({ projectName, packageName, tagName, onClose }: DependencyGraphProps) {
|
function DependencyGraph({ projectName, packageName, tagName, onClose }: DependencyGraphProps) {
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
const containerRef = useRef<HTMLDivElement>(null);
|
|
||||||
|
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const [warning, setWarning] = useState<string | null>(null);
|
||||||
const [resolution, setResolution] = useState<DependencyResolutionResponse | null>(null);
|
const [resolution, setResolution] = useState<DependencyResolutionResponse | null>(null);
|
||||||
const [graphRoot, setGraphRoot] = useState<GraphNode | null>(null);
|
const [nodes, setNodes, onNodesChange] = useNodesState<NodeData>([]);
|
||||||
const [hoveredNode, setHoveredNode] = useState<GraphNode | null>(null);
|
const [edges, setEdges, onEdgesChange] = useEdgesState([]);
|
||||||
const [zoom, setZoom] = useState(1);
|
|
||||||
const [pan, setPan] = useState({ x: 0, y: 0 });
|
const handleNavigate = useCallback((project: string, pkg: string) => {
|
||||||
const [isDragging, setIsDragging] = useState(false);
|
navigate(`/project/${project}/${pkg}`);
|
||||||
const [dragStart, setDragStart] = useState({ x: 0, y: 0 });
|
onClose();
|
||||||
const [collapsedNodes, setCollapsedNodes] = useState<Set<string>>(new Set());
|
}, [navigate, onClose]);
|
||||||
|
|
||||||
// Build graph structure from resolution data
|
// Build graph structure from resolution data
|
||||||
const buildGraph = useCallback(async (resolutionData: DependencyResolutionResponse) => {
|
const buildFlowGraph = useCallback(async (
|
||||||
|
resolutionData: DependencyResolutionResponse,
|
||||||
|
onNavigate: (project: string, pkg: string) => void
|
||||||
|
) => {
|
||||||
const artifactMap = new Map<string, ResolvedArtifact>();
|
const artifactMap = new Map<string, ResolvedArtifact>();
|
||||||
resolutionData.resolved.forEach(artifact => {
|
resolutionData.resolved.forEach(artifact => {
|
||||||
artifactMap.set(artifact.artifact_id, artifact);
|
artifactMap.set(artifact.artifact_id, artifact);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Fetch dependencies for each artifact to build the tree
|
// Fetch dependencies for each artifact
|
||||||
const depsMap = new Map<string, Dependency[]>();
|
const depsMap = new Map<string, Dependency[]>();
|
||||||
|
const failedFetches: string[] = [];
|
||||||
|
|
||||||
for (const artifact of resolutionData.resolved) {
|
for (const artifact of resolutionData.resolved) {
|
||||||
try {
|
try {
|
||||||
const deps = await getArtifactDependencies(artifact.artifact_id);
|
const deps = await getArtifactDependencies(artifact.artifact_id);
|
||||||
depsMap.set(artifact.artifact_id, deps.dependencies);
|
depsMap.set(artifact.artifact_id, deps.dependencies);
|
||||||
} catch {
|
} catch (err) {
|
||||||
|
console.warn(`Failed to fetch dependencies for ${artifact.package}:`, err);
|
||||||
|
failedFetches.push(artifact.package);
|
||||||
depsMap.set(artifact.artifact_id, []);
|
depsMap.set(artifact.artifact_id, []);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find the root artifact (the requested one)
|
// Report warning if some fetches failed
|
||||||
|
if (failedFetches.length > 0) {
|
||||||
|
setWarning(`Could not load dependency details for: ${failedFetches.slice(0, 3).join(', ')}${failedFetches.length > 3 ? ` and ${failedFetches.length - 3} more` : ''}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the root artifact
|
||||||
const rootArtifact = resolutionData.resolved.find(
|
const rootArtifact = resolutionData.resolved.find(
|
||||||
a => a.project === resolutionData.requested.project &&
|
a => a.project === resolutionData.requested.project &&
|
||||||
a.package === resolutionData.requested.package
|
a.package === resolutionData.requested.package
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!rootArtifact) {
|
if (!rootArtifact) {
|
||||||
return null;
|
return { nodes: [], edges: [] };
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build tree recursively
|
const flowNodes: Node<NodeData>[] = [];
|
||||||
|
const flowEdges: Edge[] = [];
|
||||||
const visited = new Set<string>();
|
const visited = new Set<string>();
|
||||||
|
const nodeIdMap = new Map<string, string>(); // artifact_id -> node id
|
||||||
|
|
||||||
|
// Build nodes and edges recursively
|
||||||
|
const processNode = (artifact: ResolvedArtifact, isRoot: boolean) => {
|
||||||
|
if (visited.has(artifact.artifact_id)) {
|
||||||
|
return nodeIdMap.get(artifact.artifact_id);
|
||||||
|
}
|
||||||
|
|
||||||
const buildNode = (artifact: ResolvedArtifact, depth: number): GraphNode => {
|
|
||||||
const nodeId = `${artifact.project}/${artifact.package}`;
|
|
||||||
visited.add(artifact.artifact_id);
|
visited.add(artifact.artifact_id);
|
||||||
|
const nodeId = `node-${flowNodes.length}`;
|
||||||
|
nodeIdMap.set(artifact.artifact_id, nodeId);
|
||||||
|
|
||||||
|
flowNodes.push({
|
||||||
|
id: nodeId,
|
||||||
|
type: 'dependency',
|
||||||
|
position: { x: 0, y: 0 }, // Will be set by dagre
|
||||||
|
data: {
|
||||||
|
label: `${artifact.project}/${artifact.package}`,
|
||||||
|
project: artifact.project,
|
||||||
|
package: artifact.package,
|
||||||
|
version: artifact.version,
|
||||||
|
size: artifact.size,
|
||||||
|
isRoot,
|
||||||
|
onNavigate,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const deps = depsMap.get(artifact.artifact_id) || [];
|
const deps = depsMap.get(artifact.artifact_id) || [];
|
||||||
const children: GraphNode[] = [];
|
|
||||||
|
|
||||||
for (const dep of deps) {
|
for (const dep of deps) {
|
||||||
// Find the resolved artifact for this dependency
|
|
||||||
const childArtifact = resolutionData.resolved.find(
|
const childArtifact = resolutionData.resolved.find(
|
||||||
a => a.project === dep.project && a.package === dep.package
|
a => a.project === dep.project && a.package === dep.package
|
||||||
);
|
);
|
||||||
|
|
||||||
if (childArtifact && !visited.has(childArtifact.artifact_id)) {
|
if (childArtifact) {
|
||||||
children.push(buildNode(childArtifact, depth + 1));
|
const childNodeId = processNode(childArtifact, false);
|
||||||
|
if (childNodeId) {
|
||||||
|
flowEdges.push({
|
||||||
|
id: `edge-${nodeId}-${childNodeId}`,
|
||||||
|
source: nodeId,
|
||||||
|
target: childNodeId,
|
||||||
|
markerEnd: {
|
||||||
|
type: MarkerType.ArrowClosed,
|
||||||
|
width: 15,
|
||||||
|
height: 15,
|
||||||
|
color: 'var(--accent-primary)',
|
||||||
|
},
|
||||||
|
style: {
|
||||||
|
stroke: 'var(--border-primary)',
|
||||||
|
strokeWidth: 2,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return nodeId;
|
||||||
id: nodeId,
|
|
||||||
project: artifact.project,
|
|
||||||
package: artifact.package,
|
|
||||||
version: artifact.version || artifact.tag,
|
|
||||||
size: artifact.size,
|
|
||||||
depth,
|
|
||||||
children,
|
|
||||||
isRoot: depth === 0,
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return buildNode(rootArtifact, 0);
|
processNode(rootArtifact, true);
|
||||||
|
|
||||||
|
// Apply dagre layout
|
||||||
|
return getLayoutedElements(flowNodes, flowEdges);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -117,13 +231,21 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const result = await resolveDependencies(projectName, packageName, tagName);
|
const result = await resolveDependencies(projectName, packageName, tagName);
|
||||||
|
|
||||||
|
// If only the root package (no dependencies) and no missing deps, close the modal
|
||||||
|
const hasDeps = result.artifact_count > 1 || (result.missing && result.missing.length > 0);
|
||||||
|
if (!hasDeps) {
|
||||||
|
onClose();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
setResolution(result);
|
setResolution(result);
|
||||||
|
|
||||||
const graph = await buildGraph(result);
|
const { nodes: layoutedNodes, edges: layoutedEdges } = await buildFlowGraph(result, handleNavigate);
|
||||||
setGraphRoot(graph);
|
setNodes(layoutedNodes);
|
||||||
|
setEdges(layoutedEdges);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (err instanceof Error) {
|
if (err instanceof Error) {
|
||||||
// Check if it's a resolution error
|
|
||||||
try {
|
try {
|
||||||
const errorData = JSON.parse(err.message);
|
const errorData = JSON.parse(err.message);
|
||||||
if (errorData.error === 'circular_dependency') {
|
if (errorData.error === 'circular_dependency') {
|
||||||
@@ -145,95 +267,9 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
|
|||||||
}
|
}
|
||||||
|
|
||||||
loadData();
|
loadData();
|
||||||
}, [projectName, packageName, tagName, buildGraph]);
|
}, [projectName, packageName, tagName, buildFlowGraph, handleNavigate, onClose, setNodes, setEdges]);
|
||||||
|
|
||||||
const handleNodeClick = (node: GraphNode) => {
|
const defaultViewport = useMemo(() => ({ x: 50, y: 50, zoom: 0.8 }), []);
|
||||||
navigate(`/project/${node.project}/${node.package}`);
|
|
||||||
onClose();
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleNodeToggle = (node: GraphNode, e: React.MouseEvent) => {
|
|
||||||
e.stopPropagation();
|
|
||||||
setCollapsedNodes(prev => {
|
|
||||||
const next = new Set(prev);
|
|
||||||
if (next.has(node.id)) {
|
|
||||||
next.delete(node.id);
|
|
||||||
} else {
|
|
||||||
next.add(node.id);
|
|
||||||
}
|
|
||||||
return next;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleWheel = (e: React.WheelEvent) => {
|
|
||||||
e.preventDefault();
|
|
||||||
const delta = e.deltaY > 0 ? -0.1 : 0.1;
|
|
||||||
setZoom(z => Math.max(0.25, Math.min(2, z + delta)));
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleMouseDown = (e: React.MouseEvent) => {
|
|
||||||
if (e.target === containerRef.current || (e.target as HTMLElement).classList.contains('graph-canvas')) {
|
|
||||||
setIsDragging(true);
|
|
||||||
setDragStart({ x: e.clientX - pan.x, y: e.clientY - pan.y });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleMouseMove = (e: React.MouseEvent) => {
|
|
||||||
if (isDragging) {
|
|
||||||
setPan({ x: e.clientX - dragStart.x, y: e.clientY - dragStart.y });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleMouseUp = () => {
|
|
||||||
setIsDragging(false);
|
|
||||||
};
|
|
||||||
|
|
||||||
const resetView = () => {
|
|
||||||
setZoom(1);
|
|
||||||
setPan({ x: 0, y: 0 });
|
|
||||||
};
|
|
||||||
|
|
||||||
const renderNode = (node: GraphNode, index: number = 0): JSX.Element => {
|
|
||||||
const isCollapsed = collapsedNodes.has(node.id);
|
|
||||||
const hasChildren = node.children.length > 0;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div key={`${node.id}-${index}`} className="graph-node-container">
|
|
||||||
<div
|
|
||||||
className={`graph-node ${node.isRoot ? 'graph-node--root' : ''} ${hoveredNode?.id === node.id ? 'graph-node--hovered' : ''}`}
|
|
||||||
onClick={() => handleNodeClick(node)}
|
|
||||||
onMouseEnter={() => setHoveredNode(node)}
|
|
||||||
onMouseLeave={() => setHoveredNode(null)}
|
|
||||||
>
|
|
||||||
<div className="graph-node__header">
|
|
||||||
<span className="graph-node__name">{node.project}/{node.package}</span>
|
|
||||||
{hasChildren && (
|
|
||||||
<button
|
|
||||||
className="graph-node__toggle"
|
|
||||||
onClick={(e) => handleNodeToggle(node, e)}
|
|
||||||
title={isCollapsed ? 'Expand' : 'Collapse'}
|
|
||||||
>
|
|
||||||
{isCollapsed ? '+' : '-'}
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
<div className="graph-node__details">
|
|
||||||
{node.version && <span className="graph-node__version">@ {node.version}</span>}
|
|
||||||
<span className="graph-node__size">{formatBytes(node.size)}</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{hasChildren && !isCollapsed && (
|
|
||||||
<div className="graph-children">
|
|
||||||
<div className="graph-connector"></div>
|
|
||||||
<div className="graph-children-list">
|
|
||||||
{node.children.map((child, i) => renderNode(child, i))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="dependency-graph-modal" onClick={onClose}>
|
<div className="dependency-graph-modal" onClick={onClose}>
|
||||||
@@ -244,7 +280,11 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
|
|||||||
<span>{projectName}/{packageName} @ {tagName}</span>
|
<span>{projectName}/{packageName} @ {tagName}</span>
|
||||||
{resolution && (
|
{resolution && (
|
||||||
<span className="graph-stats">
|
<span className="graph-stats">
|
||||||
{resolution.artifact_count} packages • {formatBytes(resolution.total_size)} total
|
{resolution.artifact_count} cached
|
||||||
|
{resolution.missing && resolution.missing.length > 0 && (
|
||||||
|
<span className="missing-count"> • {resolution.missing.length} not cached</span>
|
||||||
|
)}
|
||||||
|
• {formatBytes(resolution.total_size)} total
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
@@ -256,28 +296,7 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
|
|||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="dependency-graph-toolbar">
|
<div className="dependency-graph-container">
|
||||||
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.min(2, z + 0.25))}>
|
|
||||||
Zoom In
|
|
||||||
</button>
|
|
||||||
<button className="btn btn-secondary btn-small" onClick={() => setZoom(z => Math.max(0.25, z - 0.25))}>
|
|
||||||
Zoom Out
|
|
||||||
</button>
|
|
||||||
<button className="btn btn-secondary btn-small" onClick={resetView}>
|
|
||||||
Reset View
|
|
||||||
</button>
|
|
||||||
<span className="zoom-level">{Math.round(zoom * 100)}%</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div
|
|
||||||
ref={containerRef}
|
|
||||||
className="dependency-graph-container"
|
|
||||||
onWheel={handleWheel}
|
|
||||||
onMouseDown={handleMouseDown}
|
|
||||||
onMouseMove={handleMouseMove}
|
|
||||||
onMouseUp={handleMouseUp}
|
|
||||||
onMouseLeave={handleMouseUp}
|
|
||||||
>
|
|
||||||
{loading ? (
|
{loading ? (
|
||||||
<div className="graph-loading">
|
<div className="graph-loading">
|
||||||
<div className="spinner"></div>
|
<div className="spinner"></div>
|
||||||
@@ -292,27 +311,52 @@ function DependencyGraph({ projectName, packageName, tagName, onClose }: Depende
|
|||||||
</svg>
|
</svg>
|
||||||
<p>{error}</p>
|
<p>{error}</p>
|
||||||
</div>
|
</div>
|
||||||
) : graphRoot ? (
|
) : nodes.length > 0 ? (
|
||||||
<div
|
<ReactFlow
|
||||||
className="graph-canvas"
|
nodes={nodes}
|
||||||
style={{
|
edges={edges}
|
||||||
transform: `translate(${pan.x}px, ${pan.y}px) scale(${zoom})`,
|
onNodesChange={onNodesChange}
|
||||||
cursor: isDragging ? 'grabbing' : 'grab',
|
onEdgesChange={onEdgesChange}
|
||||||
}}
|
nodeTypes={nodeTypes}
|
||||||
|
defaultViewport={defaultViewport}
|
||||||
|
fitView
|
||||||
|
fitViewOptions={{ padding: 0.2 }}
|
||||||
|
minZoom={0.1}
|
||||||
|
maxZoom={2}
|
||||||
|
attributionPosition="bottom-left"
|
||||||
>
|
>
|
||||||
{renderNode(graphRoot)}
|
<Controls />
|
||||||
</div>
|
<Background color="var(--border-primary)" gap={20} />
|
||||||
|
</ReactFlow>
|
||||||
) : (
|
) : (
|
||||||
<div className="graph-empty">No dependencies to display</div>
|
<div className="graph-empty">No dependencies to display</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{hoveredNode && (
|
{warning && (
|
||||||
<div className="graph-tooltip">
|
<div className="graph-warning">
|
||||||
<strong>{hoveredNode.project}/{hoveredNode.package}</strong>
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
{hoveredNode.version && <div>Version: {hoveredNode.version}</div>}
|
<path d="M10.29 3.86L1.82 18a2 2 0 0 0 1.71 3h16.94a2 2 0 0 0 1.71-3L13.71 3.86a2 2 0 0 0-3.42 0z"></path>
|
||||||
<div>Size: {formatBytes(hoveredNode.size)}</div>
|
<line x1="12" y1="9" x2="12" y2="13"></line>
|
||||||
<div className="tooltip-hint">Click to navigate</div>
|
<line x1="12" y1="17" x2="12.01" y2="17"></line>
|
||||||
|
</svg>
|
||||||
|
<span>{warning}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{resolution && resolution.missing && resolution.missing.length > 0 && (
|
||||||
|
<div className="missing-dependencies">
|
||||||
|
<h3>Not Cached ({resolution.missing.length})</h3>
|
||||||
|
<p className="missing-hint">These dependencies are referenced but not yet cached on the server.</p>
|
||||||
|
<ul className="missing-list">
|
||||||
|
{resolution.missing.map((dep, i) => (
|
||||||
|
<li key={i} className="missing-item">
|
||||||
|
<span className="missing-name">{dep.project}/{dep.package}</span>
|
||||||
|
{dep.constraint && <span className="missing-constraint">@{dep.constraint}</span>}
|
||||||
|
{dep.required_by && <span className="missing-required-by">← {dep.required_by}</span>}
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -290,20 +290,25 @@
|
|||||||
color: var(--error-color, #dc3545);
|
color: var(--error-color, #dc3545);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Progress Bar */
|
/* Progress Bar - scoped to upload component */
|
||||||
.progress-bar {
|
.drag-drop-upload .progress-bar,
|
||||||
|
.upload-queue .progress-bar {
|
||||||
height: 8px;
|
height: 8px;
|
||||||
background: var(--border-color, #ddd);
|
background: var(--border-color, #ddd);
|
||||||
border-radius: 4px;
|
border-radius: 4px;
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
|
width: 100%;
|
||||||
|
max-width: 100%;
|
||||||
}
|
}
|
||||||
|
|
||||||
.progress-bar--small {
|
.drag-drop-upload .progress-bar--small,
|
||||||
|
.upload-queue .progress-bar--small {
|
||||||
height: 4px;
|
height: 4px;
|
||||||
margin-top: 0.25rem;
|
margin-top: 0.25rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
.progress-bar__fill {
|
.drag-drop-upload .progress-bar__fill,
|
||||||
|
.upload-queue .progress-bar__fill {
|
||||||
height: 100%;
|
height: 100%;
|
||||||
background: var(--accent-color, #007bff);
|
background: var(--accent-color, #007bff);
|
||||||
border-radius: 4px;
|
border-radius: 4px;
|
||||||
|
|||||||
@@ -504,42 +504,4 @@ describe('DragDropUpload', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Tag Support', () => {
|
|
||||||
it('includes tag in upload request', async () => {
|
|
||||||
let capturedFormData: FormData | null = null;
|
|
||||||
|
|
||||||
class MockXHR {
|
|
||||||
status = 200;
|
|
||||||
responseText = JSON.stringify({ artifact_id: 'abc123', size: 100 });
|
|
||||||
timeout = 0;
|
|
||||||
upload = { addEventListener: vi.fn() };
|
|
||||||
addEventListener = vi.fn((event: string, handler: () => void) => {
|
|
||||||
if (event === 'load') setTimeout(handler, 10);
|
|
||||||
});
|
|
||||||
open = vi.fn();
|
|
||||||
send = vi.fn((data: FormData) => {
|
|
||||||
capturedFormData = data;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
vi.stubGlobal('XMLHttpRequest', MockXHR);
|
|
||||||
|
|
||||||
render(<DragDropUpload {...defaultProps} tag="v1.0.0" />);
|
|
||||||
|
|
||||||
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
|
|
||||||
const file = createMockFile('test.txt', 100, 'text/plain');
|
|
||||||
|
|
||||||
Object.defineProperty(input, 'files', {
|
|
||||||
value: Object.assign([file], { item: (i: number) => [file][i] }),
|
|
||||||
});
|
|
||||||
|
|
||||||
fireEvent.change(input);
|
|
||||||
|
|
||||||
await vi.advanceTimersByTimeAsync(100);
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(capturedFormData?.get('tag')).toBe('v1.0.0');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ interface StoredUploadState {
|
|||||||
completedParts: number[];
|
completedParts: number[];
|
||||||
project: string;
|
project: string;
|
||||||
package: string;
|
package: string;
|
||||||
tag?: string;
|
|
||||||
createdAt: number;
|
createdAt: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -87,7 +86,6 @@ export interface DragDropUploadProps {
|
|||||||
maxFileSize?: number; // in bytes
|
maxFileSize?: number; // in bytes
|
||||||
maxConcurrentUploads?: number;
|
maxConcurrentUploads?: number;
|
||||||
maxRetries?: number;
|
maxRetries?: number;
|
||||||
tag?: string;
|
|
||||||
className?: string;
|
className?: string;
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
disabledReason?: string;
|
disabledReason?: string;
|
||||||
@@ -230,7 +228,6 @@ export function DragDropUpload({
|
|||||||
maxFileSize,
|
maxFileSize,
|
||||||
maxConcurrentUploads = 3,
|
maxConcurrentUploads = 3,
|
||||||
maxRetries = 3,
|
maxRetries = 3,
|
||||||
tag,
|
|
||||||
className = '',
|
className = '',
|
||||||
disabled = false,
|
disabled = false,
|
||||||
disabledReason,
|
disabledReason,
|
||||||
@@ -368,7 +365,6 @@ export function DragDropUpload({
|
|||||||
expected_hash: fileHash,
|
expected_hash: fileHash,
|
||||||
filename: item.file.name,
|
filename: item.file.name,
|
||||||
size: item.file.size,
|
size: item.file.size,
|
||||||
tag: tag || undefined,
|
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@@ -392,7 +388,6 @@ export function DragDropUpload({
|
|||||||
completedParts: [],
|
completedParts: [],
|
||||||
project: projectName,
|
project: projectName,
|
||||||
package: packageName,
|
package: packageName,
|
||||||
tag: tag || undefined,
|
|
||||||
createdAt: Date.now(),
|
createdAt: Date.now(),
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -438,7 +433,6 @@ export function DragDropUpload({
|
|||||||
completedParts,
|
completedParts,
|
||||||
project: projectName,
|
project: projectName,
|
||||||
package: packageName,
|
package: packageName,
|
||||||
tag: tag || undefined,
|
|
||||||
createdAt: Date.now(),
|
createdAt: Date.now(),
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -459,7 +453,7 @@ export function DragDropUpload({
|
|||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({ tag: tag || undefined }),
|
body: JSON.stringify({}),
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -475,7 +469,7 @@ export function DragDropUpload({
|
|||||||
size: completeData.size,
|
size: completeData.size,
|
||||||
deduplicated: false,
|
deduplicated: false,
|
||||||
};
|
};
|
||||||
}, [projectName, packageName, tag, isOnline]);
|
}, [projectName, packageName, isOnline]);
|
||||||
|
|
||||||
const uploadFileSimple = useCallback((item: UploadItem): Promise<UploadResult> => {
|
const uploadFileSimple = useCallback((item: UploadItem): Promise<UploadResult> => {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
@@ -484,9 +478,6 @@ export function DragDropUpload({
|
|||||||
|
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('file', item.file);
|
formData.append('file', item.file);
|
||||||
if (tag) {
|
|
||||||
formData.append('tag', tag);
|
|
||||||
}
|
|
||||||
|
|
||||||
let lastLoaded = 0;
|
let lastLoaded = 0;
|
||||||
let lastTime = Date.now();
|
let lastTime = Date.now();
|
||||||
@@ -555,7 +546,7 @@ export function DragDropUpload({
|
|||||||
: u
|
: u
|
||||||
));
|
));
|
||||||
});
|
});
|
||||||
}, [projectName, packageName, tag]);
|
}, [projectName, packageName]);
|
||||||
|
|
||||||
const uploadFile = useCallback((item: UploadItem): Promise<UploadResult> => {
|
const uploadFile = useCallback((item: UploadItem): Promise<UploadResult> => {
|
||||||
if (item.file.size >= CHUNKED_UPLOAD_THRESHOLD) {
|
if (item.file.size >= CHUNKED_UPLOAD_THRESHOLD) {
|
||||||
|
|||||||
@@ -233,7 +233,7 @@ export function GlobalSearch() {
|
|||||||
const flatIndex = results.projects.length + results.packages.length + index;
|
const flatIndex = results.projects.length + results.packages.length + index;
|
||||||
return (
|
return (
|
||||||
<button
|
<button
|
||||||
key={artifact.tag_id}
|
key={artifact.artifact_id}
|
||||||
className={`global-search__result ${selectedIndex === flatIndex ? 'selected' : ''}`}
|
className={`global-search__result ${selectedIndex === flatIndex ? 'selected' : ''}`}
|
||||||
onClick={() => navigateToResult({ type: 'artifact', item: artifact })}
|
onClick={() => navigateToResult({ type: 'artifact', item: artifact })}
|
||||||
onMouseEnter={() => setSelectedIndex(flatIndex)}
|
onMouseEnter={() => setSelectedIndex(flatIndex)}
|
||||||
@@ -243,7 +243,7 @@ export function GlobalSearch() {
|
|||||||
<line x1="7" y1="7" x2="7.01" y2="7" />
|
<line x1="7" y1="7" x2="7.01" y2="7" />
|
||||||
</svg>
|
</svg>
|
||||||
<div className="global-search__result-content">
|
<div className="global-search__result-content">
|
||||||
<span className="global-search__result-name">{artifact.tag_name}</span>
|
<span className="global-search__result-name">{artifact.version}</span>
|
||||||
<span className="global-search__result-path">
|
<span className="global-search__result-path">
|
||||||
{artifact.project_name} / {artifact.package_name}
|
{artifact.project_name} / {artifact.package_name}
|
||||||
</span>
|
</span>
|
||||||
|
|||||||
@@ -272,7 +272,7 @@
|
|||||||
.footer {
|
.footer {
|
||||||
background: var(--bg-secondary);
|
background: var(--bg-secondary);
|
||||||
border-top: 1px solid var(--border-primary);
|
border-top: 1px solid var(--border-primary);
|
||||||
padding: 24px 0;
|
padding: 12px 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.footer-content {
|
.footer-content {
|
||||||
|
|||||||
@@ -84,29 +84,6 @@ function Layout({ children }: LayoutProps) {
|
|||||||
</svg>
|
</svg>
|
||||||
Projects
|
Projects
|
||||||
</Link>
|
</Link>
|
||||||
<Link to="/dashboard" className={location.pathname === '/dashboard' ? 'active' : ''}>
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<rect x="3" y="3" width="7" height="7" rx="1"/>
|
|
||||||
<rect x="14" y="3" width="7" height="7" rx="1"/>
|
|
||||||
<rect x="3" y="14" width="7" height="7" rx="1"/>
|
|
||||||
<rect x="14" y="14" width="7" height="7" rx="1"/>
|
|
||||||
</svg>
|
|
||||||
Dashboard
|
|
||||||
</Link>
|
|
||||||
{user && userTeams.length > 0 && (
|
|
||||||
<Link
|
|
||||||
to={userTeams.length === 1 ? `/teams/${userTeams[0].slug}` : '/teams'}
|
|
||||||
className={location.pathname.startsWith('/teams') ? 'active' : ''}
|
|
||||||
>
|
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
|
||||||
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
|
||||||
<circle cx="9" cy="7" r="4"/>
|
|
||||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
|
||||||
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
|
||||||
</svg>
|
|
||||||
{userTeams.length === 1 ? 'Team' : 'Teams'}
|
|
||||||
</Link>
|
|
||||||
)}
|
|
||||||
<a href="/docs" className="nav-link-muted">
|
<a href="/docs" className="nav-link-muted">
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"/>
|
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"/>
|
||||||
@@ -148,6 +125,35 @@ function Layout({ children }: LayoutProps) {
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
<div className="user-menu-divider"></div>
|
<div className="user-menu-divider"></div>
|
||||||
|
<NavLink
|
||||||
|
to="/dashboard"
|
||||||
|
className="user-menu-item"
|
||||||
|
onClick={() => setShowUserMenu(false)}
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<rect x="3" y="3" width="7" height="7" rx="1"/>
|
||||||
|
<rect x="14" y="3" width="7" height="7" rx="1"/>
|
||||||
|
<rect x="3" y="14" width="7" height="7" rx="1"/>
|
||||||
|
<rect x="14" y="14" width="7" height="7" rx="1"/>
|
||||||
|
</svg>
|
||||||
|
Dashboard
|
||||||
|
</NavLink>
|
||||||
|
{userTeams.length > 0 && (
|
||||||
|
<NavLink
|
||||||
|
to={userTeams.length === 1 ? `/teams/${userTeams[0].slug}` : '/teams'}
|
||||||
|
className="user-menu-item"
|
||||||
|
onClick={() => setShowUserMenu(false)}
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
|
||||||
|
<circle cx="9" cy="7" r="4"/>
|
||||||
|
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
|
||||||
|
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
|
||||||
|
</svg>
|
||||||
|
{userTeams.length === 1 ? 'Team' : 'Teams'}
|
||||||
|
</NavLink>
|
||||||
|
)}
|
||||||
|
<div className="user-menu-divider"></div>
|
||||||
<NavLink
|
<NavLink
|
||||||
to="/settings/api-keys"
|
to="/settings/api-keys"
|
||||||
className="user-menu-item"
|
className="user-menu-item"
|
||||||
@@ -183,6 +189,18 @@ function Layout({ children }: LayoutProps) {
|
|||||||
</svg>
|
</svg>
|
||||||
SSO Configuration
|
SSO Configuration
|
||||||
</NavLink>
|
</NavLink>
|
||||||
|
<NavLink
|
||||||
|
to="/admin/cache"
|
||||||
|
className="user-menu-item"
|
||||||
|
onClick={() => setShowUserMenu(false)}
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"/>
|
||||||
|
<polyline points="3.27 6.96 12 12.01 20.73 6.96"/>
|
||||||
|
<line x1="12" y1="22.08" x2="12" y2="12"/>
|
||||||
|
</svg>
|
||||||
|
Cache Management
|
||||||
|
</NavLink>
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
<div className="user-menu-divider"></div>
|
<div className="user-menu-divider"></div>
|
||||||
@@ -229,7 +247,7 @@ function Layout({ children }: LayoutProps) {
|
|||||||
</svg>
|
</svg>
|
||||||
<span className="footer-logo">Orchard</span>
|
<span className="footer-logo">Orchard</span>
|
||||||
<span className="footer-separator">·</span>
|
<span className="footer-separator">·</span>
|
||||||
<span className="footer-tagline">Content-Addressable Storage</span>
|
<span className="footer-tagline">The cache that never forgets</span>
|
||||||
</div>
|
</div>
|
||||||
<div className="footer-links">
|
<div className="footer-links">
|
||||||
<a href="/docs">Documentation</a>
|
<a href="/docs">Documentation</a>
|
||||||
|
|||||||
377
frontend/src/pages/AdminCachePage.css
Normal file
377
frontend/src/pages/AdminCachePage.css
Normal file
@@ -0,0 +1,377 @@
|
|||||||
|
.admin-cache-page {
|
||||||
|
padding: 2rem;
|
||||||
|
max-width: 1400px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-cache-page h1 {
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-cache-page h2 {
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 1.25rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Success/Error Messages */
|
||||||
|
.success-message {
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
background-color: #d4edda;
|
||||||
|
border: 1px solid #c3e6cb;
|
||||||
|
border-radius: 4px;
|
||||||
|
color: #155724;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-message {
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
background-color: #f8d7da;
|
||||||
|
border: 1px solid #f5c6cb;
|
||||||
|
border-radius: 4px;
|
||||||
|
color: #721c24;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Sources Section */
|
||||||
|
.sources-section {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 1.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.section-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.section-header h2 {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Sources Table */
|
||||||
|
.sources-table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
background: var(--bg-primary);
|
||||||
|
border-radius: 4px;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sources-table th,
|
||||||
|
.sources-table td {
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
text-align: center;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.sources-table th {
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.85rem;
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sources-table tr:last-child td {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sources-table tr.disabled-row {
|
||||||
|
opacity: 0.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.source-name {
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-primary);
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Name column should be left-aligned */
|
||||||
|
.sources-table td:first-child {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.url-cell {
|
||||||
|
font-family: monospace;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
max-width: 300px;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Badges */
|
||||||
|
.env-badge,
|
||||||
|
.status-badge {
|
||||||
|
display: inline-block;
|
||||||
|
padding: 0.2rem 0.5rem;
|
||||||
|
border-radius: 4px;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 500;
|
||||||
|
margin-left: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.env-badge {
|
||||||
|
background-color: #fff3e0;
|
||||||
|
color: #e65100;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-badge.enabled {
|
||||||
|
background-color: #e8f5e9;
|
||||||
|
color: #2e7d32;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-badge.disabled {
|
||||||
|
background-color: #ffebee;
|
||||||
|
color: #c62828;
|
||||||
|
}
|
||||||
|
|
||||||
|
.coming-soon-badge {
|
||||||
|
color: #9e9e9e;
|
||||||
|
font-style: italic;
|
||||||
|
font-size: 0.85em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Actions */
|
||||||
|
.actions-cell {
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.actions-cell .btn {
|
||||||
|
margin-right: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.actions-cell .btn:last-child {
|
||||||
|
margin-right: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.test-cell {
|
||||||
|
text-align: center;
|
||||||
|
width: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.test-dot {
|
||||||
|
font-size: 1rem;
|
||||||
|
cursor: default;
|
||||||
|
}
|
||||||
|
|
||||||
|
.test-dot.success {
|
||||||
|
color: #2e7d32;
|
||||||
|
}
|
||||||
|
|
||||||
|
.test-dot.failure {
|
||||||
|
color: #c62828;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.test-dot.failure:hover {
|
||||||
|
color: #b71c1c;
|
||||||
|
}
|
||||||
|
|
||||||
|
.test-dot.testing {
|
||||||
|
color: #1976d2;
|
||||||
|
animation: pulse 1s infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes pulse {
|
||||||
|
0%, 100% { opacity: 1; }
|
||||||
|
50% { opacity: 0.4; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Error Modal */
|
||||||
|
.error-modal-content {
|
||||||
|
background: var(--bg-primary);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 2rem;
|
||||||
|
width: 100%;
|
||||||
|
max-width: 500px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-modal-content h3 {
|
||||||
|
margin-top: 0;
|
||||||
|
color: #c62828;
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-modal-content .error-details {
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
padding: 1rem;
|
||||||
|
border-radius: 4px;
|
||||||
|
font-family: monospace;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
word-break: break-word;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-modal-content .modal-actions {
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
|
margin-top: 1.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Buttons */
|
||||||
|
.btn {
|
||||||
|
padding: 0.5rem 1rem;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 4px;
|
||||||
|
background: var(--bg-primary);
|
||||||
|
color: var(--text-primary);
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn:hover {
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn:disabled {
|
||||||
|
opacity: 0.6;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-primary {
|
||||||
|
background-color: var(--color-primary);
|
||||||
|
border-color: var(--color-primary);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-primary:hover {
|
||||||
|
background-color: var(--color-primary-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-danger {
|
||||||
|
background-color: #dc3545;
|
||||||
|
border-color: #dc3545;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-danger:hover {
|
||||||
|
background-color: #c82333;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-sm {
|
||||||
|
padding: 0.25rem 0.75rem;
|
||||||
|
font-size: 0.8rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-secondary {
|
||||||
|
background-color: var(--bg-tertiary);
|
||||||
|
border-color: var(--border-color);
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-secondary:hover {
|
||||||
|
background-color: var(--bg-secondary);
|
||||||
|
border-color: var(--text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.empty-message {
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-style: italic;
|
||||||
|
padding: 2rem;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Modal */
|
||||||
|
.modal-overlay {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
background: rgba(0, 0, 0, 0.5);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
z-index: 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-content {
|
||||||
|
background: var(--bg-primary);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 2rem;
|
||||||
|
width: 100%;
|
||||||
|
max-width: 600px;
|
||||||
|
max-height: 90vh;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-content h2 {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Form */
|
||||||
|
.form-group {
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-group label {
|
||||||
|
display: block;
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-group input,
|
||||||
|
.form-group select {
|
||||||
|
width: 100%;
|
||||||
|
padding: 0.5rem;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 4px;
|
||||||
|
background: var(--bg-primary);
|
||||||
|
color: var(--text-primary);
|
||||||
|
font-size: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-group input:focus,
|
||||||
|
.form-group select:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--color-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-row {
|
||||||
|
display: flex;
|
||||||
|
gap: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-row .form-group {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.checkbox-group label {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.checkbox-group input[type="checkbox"] {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.help-text {
|
||||||
|
display: block;
|
||||||
|
font-size: 0.8rem;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
margin-top: 0.25rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-actions {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-top: 1.5rem;
|
||||||
|
padding-top: 1rem;
|
||||||
|
border-top: 1px solid var(--border-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-actions-right {
|
||||||
|
display: flex;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
509
frontend/src/pages/AdminCachePage.tsx
Normal file
509
frontend/src/pages/AdminCachePage.tsx
Normal file
@@ -0,0 +1,509 @@
|
|||||||
|
import { useState, useEffect } from 'react';
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import { useAuth } from '../contexts/AuthContext';
|
||||||
|
import {
|
||||||
|
listUpstreamSources,
|
||||||
|
createUpstreamSource,
|
||||||
|
updateUpstreamSource,
|
||||||
|
deleteUpstreamSource,
|
||||||
|
testUpstreamSource,
|
||||||
|
} from '../api';
|
||||||
|
import { UpstreamSource, SourceType, AuthType } from '../types';
|
||||||
|
import './AdminCachePage.css';
|
||||||
|
|
||||||
|
const SOURCE_TYPES: SourceType[] = ['npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic'];
|
||||||
|
const SUPPORTED_SOURCE_TYPES: Set<SourceType> = new Set(['pypi', 'generic']);
|
||||||
|
const AUTH_TYPES: AuthType[] = ['none', 'basic', 'bearer', 'api_key'];
|
||||||
|
|
||||||
|
function AdminCachePage() {
|
||||||
|
const { user, loading: authLoading } = useAuth();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
// Upstream sources state
|
||||||
|
const [sources, setSources] = useState<UpstreamSource[]>([]);
|
||||||
|
const [loadingSources, setLoadingSources] = useState(true);
|
||||||
|
const [sourcesError, setSourcesError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Create/Edit form state
|
||||||
|
const [showForm, setShowForm] = useState(false);
|
||||||
|
const [editingSource, setEditingSource] = useState<UpstreamSource | null>(null);
|
||||||
|
const [formData, setFormData] = useState({
|
||||||
|
name: '',
|
||||||
|
source_type: 'generic' as SourceType,
|
||||||
|
url: '',
|
||||||
|
enabled: true,
|
||||||
|
auth_type: 'none' as AuthType,
|
||||||
|
username: '',
|
||||||
|
password: '',
|
||||||
|
priority: 100,
|
||||||
|
});
|
||||||
|
const [formError, setFormError] = useState<string | null>(null);
|
||||||
|
const [isSaving, setIsSaving] = useState(false);
|
||||||
|
|
||||||
|
// Test result state
|
||||||
|
const [testingId, setTestingId] = useState<string | null>(null);
|
||||||
|
const [testResults, setTestResults] = useState<Record<string, { success: boolean; message: string }>>({});
|
||||||
|
|
||||||
|
// Delete confirmation state
|
||||||
|
const [deletingId, setDeletingId] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Success message
|
||||||
|
const [successMessage, setSuccessMessage] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Error modal state
|
||||||
|
const [showErrorModal, setShowErrorModal] = useState(false);
|
||||||
|
const [selectedError, setSelectedError] = useState<{ sourceName: string; error: string } | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!authLoading && !user) {
|
||||||
|
navigate('/login', { state: { from: '/admin/cache' } });
|
||||||
|
}
|
||||||
|
}, [user, authLoading, navigate]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (user && user.is_admin) {
|
||||||
|
loadSources();
|
||||||
|
}
|
||||||
|
}, [user]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (successMessage) {
|
||||||
|
const timer = setTimeout(() => setSuccessMessage(null), 3000);
|
||||||
|
return () => clearTimeout(timer);
|
||||||
|
}
|
||||||
|
}, [successMessage]);
|
||||||
|
|
||||||
|
async function loadSources() {
|
||||||
|
setLoadingSources(true);
|
||||||
|
setSourcesError(null);
|
||||||
|
try {
|
||||||
|
const data = await listUpstreamSources();
|
||||||
|
setSources(data);
|
||||||
|
} catch (err) {
|
||||||
|
setSourcesError(err instanceof Error ? err.message : 'Failed to load sources');
|
||||||
|
} finally {
|
||||||
|
setLoadingSources(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function openCreateForm() {
|
||||||
|
setEditingSource(null);
|
||||||
|
setFormData({
|
||||||
|
name: '',
|
||||||
|
source_type: 'generic',
|
||||||
|
url: '',
|
||||||
|
enabled: true,
|
||||||
|
auth_type: 'none',
|
||||||
|
username: '',
|
||||||
|
password: '',
|
||||||
|
priority: 100,
|
||||||
|
});
|
||||||
|
setFormError(null);
|
||||||
|
setShowForm(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
function openEditForm(source: UpstreamSource) {
|
||||||
|
setEditingSource(source);
|
||||||
|
setFormData({
|
||||||
|
name: source.name,
|
||||||
|
source_type: source.source_type,
|
||||||
|
url: source.url,
|
||||||
|
enabled: source.enabled,
|
||||||
|
auth_type: source.auth_type,
|
||||||
|
username: source.username || '',
|
||||||
|
password: '',
|
||||||
|
priority: source.priority,
|
||||||
|
});
|
||||||
|
setFormError(null);
|
||||||
|
setShowForm(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleFormSubmit(e: React.FormEvent) {
|
||||||
|
e.preventDefault();
|
||||||
|
if (!formData.name.trim()) {
|
||||||
|
setFormError('Name is required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!formData.url.trim()) {
|
||||||
|
setFormError('URL is required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsSaving(true);
|
||||||
|
setFormError(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
let savedSourceId: string | null = null;
|
||||||
|
|
||||||
|
if (editingSource) {
|
||||||
|
// Update existing source
|
||||||
|
await updateUpstreamSource(editingSource.id, {
|
||||||
|
name: formData.name.trim(),
|
||||||
|
source_type: formData.source_type,
|
||||||
|
url: formData.url.trim(),
|
||||||
|
enabled: formData.enabled,
|
||||||
|
auth_type: formData.auth_type,
|
||||||
|
username: formData.username.trim() || undefined,
|
||||||
|
password: formData.password || undefined,
|
||||||
|
priority: formData.priority,
|
||||||
|
});
|
||||||
|
savedSourceId = editingSource.id;
|
||||||
|
setSuccessMessage('Source updated successfully');
|
||||||
|
} else {
|
||||||
|
// Create new source
|
||||||
|
const newSource = await createUpstreamSource({
|
||||||
|
name: formData.name.trim(),
|
||||||
|
source_type: formData.source_type,
|
||||||
|
url: formData.url.trim(),
|
||||||
|
enabled: formData.enabled,
|
||||||
|
auth_type: formData.auth_type,
|
||||||
|
username: formData.username.trim() || undefined,
|
||||||
|
password: formData.password || undefined,
|
||||||
|
priority: formData.priority,
|
||||||
|
});
|
||||||
|
savedSourceId = newSource.id;
|
||||||
|
setSuccessMessage('Source created successfully');
|
||||||
|
}
|
||||||
|
setShowForm(false);
|
||||||
|
await loadSources();
|
||||||
|
|
||||||
|
// Auto-test the source after save
|
||||||
|
if (savedSourceId) {
|
||||||
|
testSourceById(savedSourceId);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setFormError(err instanceof Error ? err.message : 'Failed to save source');
|
||||||
|
} finally {
|
||||||
|
setIsSaving(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleDelete(source: UpstreamSource) {
|
||||||
|
if (!window.confirm(`Delete upstream source "${source.name}"? This cannot be undone.`)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setDeletingId(source.id);
|
||||||
|
try {
|
||||||
|
await deleteUpstreamSource(source.id);
|
||||||
|
setSuccessMessage(`Source "${source.name}" deleted`);
|
||||||
|
await loadSources();
|
||||||
|
} catch (err) {
|
||||||
|
setSourcesError(err instanceof Error ? err.message : 'Failed to delete source');
|
||||||
|
} finally {
|
||||||
|
setDeletingId(null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleTest(source: UpstreamSource) {
|
||||||
|
testSourceById(source.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testSourceById(sourceId: string) {
|
||||||
|
setTestingId(sourceId);
|
||||||
|
setTestResults((prev) => ({ ...prev, [sourceId]: { success: true, message: 'Testing...' } }));
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await testUpstreamSource(sourceId);
|
||||||
|
setTestResults((prev) => ({
|
||||||
|
...prev,
|
||||||
|
[sourceId]: {
|
||||||
|
success: result.success,
|
||||||
|
message: result.success
|
||||||
|
? `OK (${result.elapsed_ms}ms)`
|
||||||
|
: result.error || `HTTP ${result.status_code}`,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
} catch (err) {
|
||||||
|
setTestResults((prev) => ({
|
||||||
|
...prev,
|
||||||
|
[sourceId]: {
|
||||||
|
success: false,
|
||||||
|
message: err instanceof Error ? err.message : 'Test failed',
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
} finally {
|
||||||
|
setTestingId(null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function showError(sourceName: string, error: string) {
|
||||||
|
setSelectedError({ sourceName, error });
|
||||||
|
setShowErrorModal(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (authLoading) {
|
||||||
|
return <div className="admin-cache-page">Loading...</div>;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!user?.is_admin) {
|
||||||
|
return (
|
||||||
|
<div className="admin-cache-page">
|
||||||
|
<div className="error-message">Access denied. Admin privileges required.</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="admin-cache-page">
|
||||||
|
<h1>Upstream Sources</h1>
|
||||||
|
|
||||||
|
{successMessage && <div className="success-message">{successMessage}</div>}
|
||||||
|
|
||||||
|
{/* Upstream Sources Section */}
|
||||||
|
<section className="sources-section">
|
||||||
|
<div className="section-header">
|
||||||
|
<button className="btn btn-primary" onClick={openCreateForm}>
|
||||||
|
Add Source
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{loadingSources ? (
|
||||||
|
<p>Loading sources...</p>
|
||||||
|
) : sourcesError ? (
|
||||||
|
<div className="error-message">{sourcesError}</div>
|
||||||
|
) : sources.length === 0 ? (
|
||||||
|
<p className="empty-message">No upstream sources configured.</p>
|
||||||
|
) : (
|
||||||
|
<table className="sources-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Name</th>
|
||||||
|
<th>Type</th>
|
||||||
|
<th>URL</th>
|
||||||
|
<th>Priority</th>
|
||||||
|
<th>Status</th>
|
||||||
|
<th>Test</th>
|
||||||
|
<th>Actions</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{sources.map((source) => (
|
||||||
|
<tr key={source.id} className={source.enabled ? '' : 'disabled-row'}>
|
||||||
|
<td>
|
||||||
|
<span className="source-name">{source.name}</span>
|
||||||
|
{source.source === 'env' && (
|
||||||
|
<span className="env-badge" title="Defined via environment variable">ENV</span>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
{source.source_type}
|
||||||
|
{!SUPPORTED_SOURCE_TYPES.has(source.source_type) && (
|
||||||
|
<span className="coming-soon-badge"> (coming soon)</span>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="url-cell" title={source.url}>{source.url}</td>
|
||||||
|
<td>{source.priority}</td>
|
||||||
|
<td>
|
||||||
|
<span className={`status-badge ${source.enabled ? 'enabled' : 'disabled'}`}>
|
||||||
|
{source.enabled ? 'Enabled' : 'Disabled'}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="test-cell">
|
||||||
|
{testingId === source.id ? (
|
||||||
|
<span className="test-dot testing" title="Testing...">●</span>
|
||||||
|
) : testResults[source.id] ? (
|
||||||
|
testResults[source.id].success ? (
|
||||||
|
<span className="test-dot success" title={testResults[source.id].message}>●</span>
|
||||||
|
) : (
|
||||||
|
<span
|
||||||
|
className="test-dot failure"
|
||||||
|
title="Click to see error"
|
||||||
|
onClick={() => showError(source.name, testResults[source.id].message)}
|
||||||
|
>●</span>
|
||||||
|
)
|
||||||
|
) : null}
|
||||||
|
</td>
|
||||||
|
<td className="actions-cell">
|
||||||
|
<button
|
||||||
|
className="btn btn-sm btn-secondary"
|
||||||
|
onClick={() => handleTest(source)}
|
||||||
|
disabled={testingId === source.id}
|
||||||
|
>
|
||||||
|
Test
|
||||||
|
</button>
|
||||||
|
{source.source !== 'env' && (
|
||||||
|
<button className="btn btn-sm btn-secondary" onClick={() => openEditForm(source)}>
|
||||||
|
Edit
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
)}
|
||||||
|
</section>
|
||||||
|
|
||||||
|
{/* Create/Edit Modal */}
|
||||||
|
{showForm && (
|
||||||
|
<div className="modal-overlay" onClick={() => setShowForm(false)}>
|
||||||
|
<div className="modal-content" onClick={(e) => e.stopPropagation()}>
|
||||||
|
<h2>{editingSource ? 'Edit Upstream Source' : 'Add Upstream Source'}</h2>
|
||||||
|
<form onSubmit={handleFormSubmit}>
|
||||||
|
{formError && <div className="error-message">{formError}</div>}
|
||||||
|
|
||||||
|
<div className="form-group">
|
||||||
|
<label htmlFor="name">Name</label>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
id="name"
|
||||||
|
value={formData.name}
|
||||||
|
onChange={(e) => setFormData({ ...formData, name: e.target.value })}
|
||||||
|
placeholder="e.g., npm-private"
|
||||||
|
required
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="form-row">
|
||||||
|
<div className="form-group">
|
||||||
|
<label htmlFor="source_type">Type</label>
|
||||||
|
<select
|
||||||
|
id="source_type"
|
||||||
|
value={formData.source_type}
|
||||||
|
onChange={(e) => setFormData({ ...formData, source_type: e.target.value as SourceType })}
|
||||||
|
>
|
||||||
|
{SOURCE_TYPES.map((type) => (
|
||||||
|
<option key={type} value={type}>
|
||||||
|
{type}{!SUPPORTED_SOURCE_TYPES.has(type) ? ' (coming soon)' : ''}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="form-group">
|
||||||
|
<label htmlFor="priority">Priority</label>
|
||||||
|
<input
|
||||||
|
type="number"
|
||||||
|
id="priority"
|
||||||
|
value={formData.priority}
|
||||||
|
onChange={(e) => setFormData({ ...formData, priority: parseInt(e.target.value) || 100 })}
|
||||||
|
min="1"
|
||||||
|
/>
|
||||||
|
<span className="help-text">Lower = higher priority</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="form-group">
|
||||||
|
<label htmlFor="url">URL</label>
|
||||||
|
<input
|
||||||
|
type="url"
|
||||||
|
id="url"
|
||||||
|
value={formData.url}
|
||||||
|
onChange={(e) => setFormData({ ...formData, url: e.target.value })}
|
||||||
|
placeholder="https://registry.example.com"
|
||||||
|
required
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="form-row">
|
||||||
|
<div className="form-group checkbox-group">
|
||||||
|
<label>
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={formData.enabled}
|
||||||
|
onChange={(e) => setFormData({ ...formData, enabled: e.target.checked })}
|
||||||
|
/>
|
||||||
|
Enabled
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="form-group">
|
||||||
|
<label htmlFor="auth_type">Authentication</label>
|
||||||
|
<select
|
||||||
|
id="auth_type"
|
||||||
|
value={formData.auth_type}
|
||||||
|
onChange={(e) => setFormData({ ...formData, auth_type: e.target.value as AuthType })}
|
||||||
|
>
|
||||||
|
{AUTH_TYPES.map((type) => (
|
||||||
|
<option key={type} value={type}>
|
||||||
|
{type === 'none' ? 'None' : type === 'api_key' ? 'API Key' : type.charAt(0).toUpperCase() + type.slice(1)}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{formData.auth_type !== 'none' && (
|
||||||
|
<div className="form-row">
|
||||||
|
{(formData.auth_type === 'basic' || formData.auth_type === 'api_key') && (
|
||||||
|
<div className="form-group">
|
||||||
|
<label htmlFor="username">{formData.auth_type === 'api_key' ? 'Header Name' : 'Username'}</label>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
id="username"
|
||||||
|
value={formData.username}
|
||||||
|
onChange={(e) => setFormData({ ...formData, username: e.target.value })}
|
||||||
|
placeholder={formData.auth_type === 'api_key' ? 'X-API-Key' : 'username'}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div className="form-group">
|
||||||
|
<label htmlFor="password">
|
||||||
|
{formData.auth_type === 'bearer'
|
||||||
|
? 'Token'
|
||||||
|
: formData.auth_type === 'api_key'
|
||||||
|
? 'API Key Value'
|
||||||
|
: 'Password'}
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
type="password"
|
||||||
|
id="password"
|
||||||
|
value={formData.password}
|
||||||
|
onChange={(e) => setFormData({ ...formData, password: e.target.value })}
|
||||||
|
placeholder={editingSource ? '(unchanged)' : ''}
|
||||||
|
/>
|
||||||
|
{editingSource && (
|
||||||
|
<span className="help-text">Leave empty to keep existing {formData.auth_type === 'bearer' ? 'token' : 'credentials'}</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="form-actions">
|
||||||
|
{editingSource && (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="btn btn-danger"
|
||||||
|
onClick={() => {
|
||||||
|
handleDelete(editingSource);
|
||||||
|
setShowForm(false);
|
||||||
|
}}
|
||||||
|
disabled={deletingId === editingSource.id}
|
||||||
|
>
|
||||||
|
{deletingId === editingSource.id ? 'Deleting...' : 'Delete'}
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
<div className="form-actions-right">
|
||||||
|
<button type="button" className="btn" onClick={() => setShowForm(false)}>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button type="submit" className="btn btn-primary" disabled={isSaving}>
|
||||||
|
{isSaving ? 'Saving...' : editingSource ? 'Update' : 'Create'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Error Details Modal */}
|
||||||
|
{showErrorModal && selectedError && (
|
||||||
|
<div className="modal-overlay" onClick={() => setShowErrorModal(false)}>
|
||||||
|
<div className="error-modal-content" onClick={(e) => e.stopPropagation()}>
|
||||||
|
<h3>Connection Error: {selectedError.sourceName}</h3>
|
||||||
|
<div className="error-details">{selectedError.error}</div>
|
||||||
|
<div className="modal-actions">
|
||||||
|
<button className="btn" onClick={() => setShowErrorModal(false)}>
|
||||||
|
Close
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default AdminCachePage;
|
||||||
@@ -493,3 +493,16 @@
|
|||||||
gap: 6px;
|
gap: 6px;
|
||||||
flex-wrap: wrap;
|
flex-wrap: wrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Cell name styles */
|
||||||
|
.cell-name {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* System project badge */
|
||||||
|
.system-badge {
|
||||||
|
font-size: 0.7rem;
|
||||||
|
padding: 2px 6px;
|
||||||
|
}
|
||||||
|
|||||||
@@ -224,6 +224,9 @@ function Home() {
|
|||||||
<span className="cell-name">
|
<span className="cell-name">
|
||||||
{!project.is_public && <LockIcon />}
|
{!project.is_public && <LockIcon />}
|
||||||
{project.name}
|
{project.name}
|
||||||
|
{project.is_system && (
|
||||||
|
<Badge variant="warning" className="system-badge">Cache</Badge>
|
||||||
|
)}
|
||||||
</span>
|
</span>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
@@ -246,7 +249,7 @@ function Home() {
|
|||||||
key: 'created_by',
|
key: 'created_by',
|
||||||
header: 'Owner',
|
header: 'Owner',
|
||||||
className: 'cell-owner',
|
className: 'cell-owner',
|
||||||
render: (project) => project.created_by,
|
render: (project) => project.team_name || project.created_by,
|
||||||
},
|
},
|
||||||
...(user
|
...(user
|
||||||
? [
|
? [
|
||||||
|
|||||||
@@ -185,56 +185,6 @@ h2 {
|
|||||||
color: var(--warning-color, #f59e0b);
|
color: var(--warning-color, #f59e0b);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Usage Section */
|
|
||||||
.usage-section {
|
|
||||||
margin-top: 32px;
|
|
||||||
background: var(--bg-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.usage-section h3 {
|
|
||||||
margin-bottom: 12px;
|
|
||||||
color: var(--text-primary);
|
|
||||||
font-size: 1rem;
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
.usage-section p {
|
|
||||||
color: var(--text-secondary);
|
|
||||||
margin-bottom: 12px;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.usage-section pre {
|
|
||||||
background: #0d0d0f;
|
|
||||||
border: 1px solid var(--border-primary);
|
|
||||||
padding: 16px 20px;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
overflow-x: auto;
|
|
||||||
margin-bottom: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.usage-section code {
|
|
||||||
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: #e2e8f0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Syntax highlighting for code blocks */
|
|
||||||
.usage-section pre {
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.usage-section pre::before {
|
|
||||||
content: 'bash';
|
|
||||||
position: absolute;
|
|
||||||
top: 8px;
|
|
||||||
right: 12px;
|
|
||||||
font-size: 0.6875rem;
|
|
||||||
color: var(--text-muted);
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.05em;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Copy button for code blocks (optional enhancement) */
|
/* Copy button for code blocks (optional enhancement) */
|
||||||
.code-block {
|
.code-block {
|
||||||
position: relative;
|
position: relative;
|
||||||
@@ -642,6 +592,11 @@ tr:hover .copy-btn {
|
|||||||
padding: 20px;
|
padding: 20px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Ensure file modal needs higher z-index when opened from deps modal */
|
||||||
|
.modal-overlay:has(.ensure-file-modal) {
|
||||||
|
z-index: 1100;
|
||||||
|
}
|
||||||
|
|
||||||
.ensure-file-modal {
|
.ensure-file-modal {
|
||||||
background: var(--bg-secondary);
|
background: var(--bg-secondary);
|
||||||
border: 1px solid var(--border-primary);
|
border: 1px solid var(--border-primary);
|
||||||
@@ -793,4 +748,194 @@ tr:hover .copy-btn {
|
|||||||
.ensure-file-modal {
|
.ensure-file-modal {
|
||||||
max-height: 90vh;
|
max-height: 90vh;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.action-menu-dropdown {
|
||||||
|
right: 0;
|
||||||
|
left: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Header upload button */
|
||||||
|
.header-upload-btn {
|
||||||
|
margin-left: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tag/Version cell */
|
||||||
|
.tag-version-cell {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-version-cell .version-badge {
|
||||||
|
font-size: 0.75rem;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Icon buttons */
|
||||||
|
.btn-icon {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
width: 32px;
|
||||||
|
height: 32px;
|
||||||
|
padding: 0;
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid transparent;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
color: var(--text-secondary);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-icon:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
color: var(--text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Action menu */
|
||||||
|
.action-buttons {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-menu {
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Action menu backdrop for click-outside */
|
||||||
|
.action-menu-backdrop {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
z-index: 999;
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-menu-dropdown {
|
||||||
|
position: fixed;
|
||||||
|
z-index: 1000;
|
||||||
|
min-width: 180px;
|
||||||
|
padding: 4px 0;
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-menu-dropdown button {
|
||||||
|
display: block;
|
||||||
|
width: 100%;
|
||||||
|
padding: 8px 12px;
|
||||||
|
background: none;
|
||||||
|
border: none;
|
||||||
|
text-align: left;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: background var(--transition-fast);
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-menu-dropdown button:hover {
|
||||||
|
background: var(--bg-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Upload Modal */
|
||||||
|
.upload-modal,
|
||||||
|
.create-tag-modal {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
width: 90%;
|
||||||
|
max-width: 500px;
|
||||||
|
max-height: 90vh;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: 16px 20px;
|
||||||
|
border-bottom: 1px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-header h3 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 1.125rem;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-body {
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-description {
|
||||||
|
margin-bottom: 16px;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-actions {
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
|
gap: 12px;
|
||||||
|
margin-top: 20px;
|
||||||
|
padding-top: 16px;
|
||||||
|
border-top: 1px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dependencies Modal */
|
||||||
|
.deps-modal {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
width: 90%;
|
||||||
|
max-width: 600px;
|
||||||
|
max-height: 80vh;
|
||||||
|
overflow: hidden;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.deps-modal .modal-body {
|
||||||
|
overflow-y: auto;
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.deps-modal-controls {
|
||||||
|
display: flex;
|
||||||
|
gap: 8px;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Artifact ID Modal */
|
||||||
|
.artifact-id-modal {
|
||||||
|
background: var(--bg-secondary);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
width: 90%;
|
||||||
|
max-width: 500px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.artifact-id-display {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
padding: 16px;
|
||||||
|
background: var(--bg-tertiary);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
border: 1px solid var(--border-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.artifact-id-display code {
|
||||||
|
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
|
||||||
|
font-size: 0.8125rem;
|
||||||
|
color: var(--text-primary);
|
||||||
|
word-break: break-all;
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.artifact-id-display .copy-btn {
|
||||||
|
opacity: 1;
|
||||||
|
flex-shrink: 0;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { useState, useEffect, useCallback } from 'react';
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
import { useParams, useSearchParams, useNavigate, useLocation, Link } from 'react-router-dom';
|
import { useParams, useSearchParams, useNavigate, useLocation, Link } from 'react-router-dom';
|
||||||
import { TagDetail, Package, PaginatedResponse, AccessLevel, Dependency, DependentInfo } from '../types';
|
import { PackageArtifact, Package, PaginatedResponse, AccessLevel, Dependency, DependentInfo } from '../types';
|
||||||
import { listTags, getDownloadUrl, getPackage, getMyProjectAccess, createTag, getArtifactDependencies, getReverseDependencies, getEnsureFile, UnauthorizedError, ForbiddenError } from '../api';
|
import { listPackageArtifacts, getDownloadUrl, getPackage, getMyProjectAccess, getArtifactDependencies, getReverseDependencies, getEnsureFile, UnauthorizedError, ForbiddenError } from '../api';
|
||||||
import { Breadcrumb } from '../components/Breadcrumb';
|
import { Breadcrumb } from '../components/Breadcrumb';
|
||||||
import { Badge } from '../components/Badge';
|
import { Badge } from '../components/Badge';
|
||||||
import { SearchInput } from '../components/SearchInput';
|
import { SearchInput } from '../components/SearchInput';
|
||||||
@@ -57,20 +57,20 @@ function PackagePage() {
|
|||||||
const { user } = useAuth();
|
const { user } = useAuth();
|
||||||
|
|
||||||
const [pkg, setPkg] = useState<Package | null>(null);
|
const [pkg, setPkg] = useState<Package | null>(null);
|
||||||
const [tagsData, setTagsData] = useState<PaginatedResponse<TagDetail> | null>(null);
|
const [artifactsData, setArtifactsData] = useState<PaginatedResponse<PackageArtifact> | null>(null);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
const [accessDenied, setAccessDenied] = useState(false);
|
const [accessDenied, setAccessDenied] = useState(false);
|
||||||
const [uploadTag, setUploadTag] = useState('');
|
|
||||||
const [uploadSuccess, setUploadSuccess] = useState<string | null>(null);
|
const [uploadSuccess, setUploadSuccess] = useState<string | null>(null);
|
||||||
const [artifactIdInput, setArtifactIdInput] = useState('');
|
|
||||||
const [accessLevel, setAccessLevel] = useState<AccessLevel | null>(null);
|
const [accessLevel, setAccessLevel] = useState<AccessLevel | null>(null);
|
||||||
const [createTagName, setCreateTagName] = useState('');
|
|
||||||
const [createTagArtifactId, setCreateTagArtifactId] = useState('');
|
// UI state
|
||||||
const [createTagLoading, setCreateTagLoading] = useState(false);
|
const [showUploadModal, setShowUploadModal] = useState(false);
|
||||||
|
const [openMenuId, setOpenMenuId] = useState<string | null>(null);
|
||||||
|
const [menuPosition, setMenuPosition] = useState<{ top: number; left: number } | null>(null);
|
||||||
|
|
||||||
// Dependencies state
|
// Dependencies state
|
||||||
const [selectedTag, setSelectedTag] = useState<TagDetail | null>(null);
|
const [selectedArtifact, setSelectedArtifact] = useState<PackageArtifact | null>(null);
|
||||||
const [dependencies, setDependencies] = useState<Dependency[]>([]);
|
const [dependencies, setDependencies] = useState<Dependency[]>([]);
|
||||||
const [depsLoading, setDepsLoading] = useState(false);
|
const [depsLoading, setDepsLoading] = useState(false);
|
||||||
const [depsError, setDepsError] = useState<string | null>(null);
|
const [depsError, setDepsError] = useState<string | null>(null);
|
||||||
@@ -86,6 +86,13 @@ function PackagePage() {
|
|||||||
// Dependency graph modal state
|
// Dependency graph modal state
|
||||||
const [showGraph, setShowGraph] = useState(false);
|
const [showGraph, setShowGraph] = useState(false);
|
||||||
|
|
||||||
|
// Dependencies modal state
|
||||||
|
const [showDepsModal, setShowDepsModal] = useState(false);
|
||||||
|
|
||||||
|
// Artifact ID modal state
|
||||||
|
const [showArtifactIdModal, setShowArtifactIdModal] = useState(false);
|
||||||
|
const [viewArtifactId, setViewArtifactId] = useState<string | null>(null);
|
||||||
|
|
||||||
// Ensure file modal state
|
// Ensure file modal state
|
||||||
const [showEnsureFile, setShowEnsureFile] = useState(false);
|
const [showEnsureFile, setShowEnsureFile] = useState(false);
|
||||||
const [ensureFileContent, setEnsureFileContent] = useState<string | null>(null);
|
const [ensureFileContent, setEnsureFileContent] = useState<string | null>(null);
|
||||||
@@ -96,11 +103,15 @@ function PackagePage() {
|
|||||||
// Derived permissions
|
// Derived permissions
|
||||||
const canWrite = accessLevel === 'write' || accessLevel === 'admin';
|
const canWrite = accessLevel === 'write' || accessLevel === 'admin';
|
||||||
|
|
||||||
|
// Detect system projects (convention: name starts with "_")
|
||||||
|
const isSystemProject = projectName?.startsWith('_') ?? false;
|
||||||
|
|
||||||
// Get params from URL
|
// Get params from URL
|
||||||
|
// Valid sort fields for artifacts: created_at, size, original_name
|
||||||
const page = parseInt(searchParams.get('page') || '1', 10);
|
const page = parseInt(searchParams.get('page') || '1', 10);
|
||||||
const search = searchParams.get('search') || '';
|
const search = searchParams.get('search') || '';
|
||||||
const sort = searchParams.get('sort') || 'name';
|
const sort = searchParams.get('sort') || 'created_at';
|
||||||
const order = (searchParams.get('order') || 'asc') as 'asc' | 'desc';
|
const order = (searchParams.get('order') || 'desc') as 'asc' | 'desc';
|
||||||
|
|
||||||
const updateParams = useCallback(
|
const updateParams = useCallback(
|
||||||
(updates: Record<string, string | undefined>) => {
|
(updates: Record<string, string | undefined>) => {
|
||||||
@@ -123,13 +134,13 @@ function PackagePage() {
|
|||||||
try {
|
try {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
setAccessDenied(false);
|
setAccessDenied(false);
|
||||||
const [pkgData, tagsResult, accessResult] = await Promise.all([
|
const [pkgData, artifactsResult, accessResult] = await Promise.all([
|
||||||
getPackage(projectName, packageName),
|
getPackage(projectName, packageName),
|
||||||
listTags(projectName, packageName, { page, search, sort, order }),
|
listPackageArtifacts(projectName, packageName, { page, search, sort, order }),
|
||||||
getMyProjectAccess(projectName),
|
getMyProjectAccess(projectName),
|
||||||
]);
|
]);
|
||||||
setPkg(pkgData);
|
setPkg(pkgData);
|
||||||
setTagsData(tagsResult);
|
setArtifactsData(artifactsResult);
|
||||||
setAccessLevel(accessResult.access_level);
|
setAccessLevel(accessResult.access_level);
|
||||||
setError(null);
|
setError(null);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -153,25 +164,15 @@ function PackagePage() {
|
|||||||
loadData();
|
loadData();
|
||||||
}, [loadData]);
|
}, [loadData]);
|
||||||
|
|
||||||
// Auto-select tag when tags are loaded (prefer version from URL, then first tag)
|
// Auto-select artifact when artifacts are loaded (prefer first artifact)
|
||||||
// Re-run when package changes to pick up new tags
|
// Re-run when package changes to pick up new artifacts
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (tagsData?.items && tagsData.items.length > 0) {
|
if (artifactsData?.items && artifactsData.items.length > 0) {
|
||||||
const versionParam = searchParams.get('version');
|
// Fall back to first artifact
|
||||||
if (versionParam) {
|
setSelectedArtifact(artifactsData.items[0]);
|
||||||
// Find tag matching the version parameter
|
|
||||||
const matchingTag = tagsData.items.find(t => t.version === versionParam);
|
|
||||||
if (matchingTag) {
|
|
||||||
setSelectedTag(matchingTag);
|
|
||||||
setDependencies([]);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Fall back to first tag
|
|
||||||
setSelectedTag(tagsData.items[0]);
|
|
||||||
setDependencies([]);
|
setDependencies([]);
|
||||||
}
|
}
|
||||||
}, [tagsData, searchParams, projectName, packageName]);
|
}, [artifactsData, projectName, packageName]);
|
||||||
|
|
||||||
// Fetch dependencies when selected tag changes
|
// Fetch dependencies when selected tag changes
|
||||||
const fetchDependencies = useCallback(async (artifactId: string) => {
|
const fetchDependencies = useCallback(async (artifactId: string) => {
|
||||||
@@ -189,10 +190,10 @@ function PackagePage() {
|
|||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (selectedTag) {
|
if (selectedArtifact) {
|
||||||
fetchDependencies(selectedTag.artifact_id);
|
fetchDependencies(selectedArtifact.id);
|
||||||
}
|
}
|
||||||
}, [selectedTag, fetchDependencies]);
|
}, [selectedArtifact, fetchDependencies]);
|
||||||
|
|
||||||
// Fetch reverse dependencies
|
// Fetch reverse dependencies
|
||||||
const fetchReverseDeps = useCallback(async (pageNum: number = 1) => {
|
const fetchReverseDeps = useCallback(async (pageNum: number = 1) => {
|
||||||
@@ -220,15 +221,15 @@ function PackagePage() {
|
|||||||
}
|
}
|
||||||
}, [projectName, packageName, loading, fetchReverseDeps]);
|
}, [projectName, packageName, loading, fetchReverseDeps]);
|
||||||
|
|
||||||
// Fetch ensure file for a specific tag
|
// Fetch ensure file for a specific version or artifact
|
||||||
const fetchEnsureFileForTag = useCallback(async (tagName: string) => {
|
const fetchEnsureFileForRef = useCallback(async (ref: string) => {
|
||||||
if (!projectName || !packageName) return;
|
if (!projectName || !packageName) return;
|
||||||
|
|
||||||
setEnsureFileTagName(tagName);
|
setEnsureFileTagName(ref);
|
||||||
setEnsureFileLoading(true);
|
setEnsureFileLoading(true);
|
||||||
setEnsureFileError(null);
|
setEnsureFileError(null);
|
||||||
try {
|
try {
|
||||||
const content = await getEnsureFile(projectName, packageName, tagName);
|
const content = await getEnsureFile(projectName, packageName, ref);
|
||||||
setEnsureFileContent(content);
|
setEnsureFileContent(content);
|
||||||
setShowEnsureFile(true);
|
setShowEnsureFile(true);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -239,11 +240,13 @@ function PackagePage() {
|
|||||||
}
|
}
|
||||||
}, [projectName, packageName]);
|
}, [projectName, packageName]);
|
||||||
|
|
||||||
// Fetch ensure file for selected tag
|
// Fetch ensure file for selected artifact
|
||||||
const fetchEnsureFile = useCallback(async () => {
|
const fetchEnsureFile = useCallback(async () => {
|
||||||
if (!selectedTag) return;
|
if (!selectedArtifact) return;
|
||||||
fetchEnsureFileForTag(selectedTag.name);
|
const version = getArtifactVersion(selectedArtifact);
|
||||||
}, [selectedTag, fetchEnsureFileForTag]);
|
const ref = version || `artifact:${selectedArtifact.id}`;
|
||||||
|
fetchEnsureFileForRef(ref);
|
||||||
|
}, [selectedArtifact, fetchEnsureFileForRef]);
|
||||||
|
|
||||||
// Keyboard navigation - go back with backspace
|
// Keyboard navigation - go back with backspace
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -263,7 +266,6 @@ function PackagePage() {
|
|||||||
? `Uploaded successfully! Artifact ID: ${results[0].artifact_id}`
|
? `Uploaded successfully! Artifact ID: ${results[0].artifact_id}`
|
||||||
: `${count} files uploaded successfully!`;
|
: `${count} files uploaded successfully!`;
|
||||||
setUploadSuccess(message);
|
setUploadSuccess(message);
|
||||||
setUploadTag('');
|
|
||||||
loadData();
|
loadData();
|
||||||
|
|
||||||
// Auto-dismiss success message after 5 seconds
|
// Auto-dismiss success message after 5 seconds
|
||||||
@@ -274,30 +276,6 @@ function PackagePage() {
|
|||||||
setError(errorMsg);
|
setError(errorMsg);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const handleCreateTag = async (e: React.FormEvent) => {
|
|
||||||
e.preventDefault();
|
|
||||||
if (!createTagName.trim() || createTagArtifactId.length !== 64) return;
|
|
||||||
|
|
||||||
setCreateTagLoading(true);
|
|
||||||
setError(null);
|
|
||||||
|
|
||||||
try {
|
|
||||||
await createTag(projectName!, packageName!, {
|
|
||||||
name: createTagName.trim(),
|
|
||||||
artifact_id: createTagArtifactId,
|
|
||||||
});
|
|
||||||
setUploadSuccess(`Tag "${createTagName}" created successfully!`);
|
|
||||||
setCreateTagName('');
|
|
||||||
setCreateTagArtifactId('');
|
|
||||||
loadData();
|
|
||||||
setTimeout(() => setUploadSuccess(null), 5000);
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to create tag');
|
|
||||||
} finally {
|
|
||||||
setCreateTagLoading(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleSearchChange = (value: string) => {
|
const handleSearchChange = (value: string) => {
|
||||||
updateParams({ search: value, page: '1' });
|
updateParams({ search: value, page: '1' });
|
||||||
};
|
};
|
||||||
@@ -316,101 +294,225 @@ function PackagePage() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const hasActiveFilters = search !== '';
|
const hasActiveFilters = search !== '';
|
||||||
const tags = tagsData?.items || [];
|
const artifacts = artifactsData?.items || [];
|
||||||
const pagination = tagsData?.pagination;
|
const pagination = artifactsData?.pagination;
|
||||||
|
|
||||||
const handleTagSelect = (tag: TagDetail) => {
|
const handleArtifactSelect = (artifact: PackageArtifact) => {
|
||||||
setSelectedTag(tag);
|
setSelectedArtifact(artifact);
|
||||||
};
|
};
|
||||||
|
|
||||||
const columns = [
|
const handleMenuOpen = (e: React.MouseEvent, artifactId: string) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
if (openMenuId === artifactId) {
|
||||||
|
setOpenMenuId(null);
|
||||||
|
setMenuPosition(null);
|
||||||
|
} else {
|
||||||
|
const rect = e.currentTarget.getBoundingClientRect();
|
||||||
|
setMenuPosition({ top: rect.bottom + 4, left: rect.right - 180 });
|
||||||
|
setOpenMenuId(artifactId);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Helper to get version from artifact - prefer direct version field, fallback to metadata
|
||||||
|
const getArtifactVersion = (a: PackageArtifact): string | null => {
|
||||||
|
return a.version || (a.format_metadata?.version as string) || null;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Helper to get download ref - prefer version, fallback to artifact ID
|
||||||
|
const getDownloadRef = (a: PackageArtifact): string => {
|
||||||
|
const version = getArtifactVersion(a);
|
||||||
|
return version || `artifact:${a.id}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
// System projects show Version first, regular projects show Tag first
|
||||||
|
const columns = isSystemProject
|
||||||
|
? [
|
||||||
|
// System project columns: Version first, then Filename
|
||||||
{
|
{
|
||||||
key: 'name',
|
key: 'version',
|
||||||
header: 'Tag',
|
header: 'Version',
|
||||||
sortable: true,
|
// version is from format_metadata, not a sortable DB field
|
||||||
render: (t: TagDetail) => (
|
render: (a: PackageArtifact) => (
|
||||||
<strong
|
<strong
|
||||||
className={`tag-name-link ${selectedTag?.id === t.id ? 'selected' : ''}`}
|
className={`tag-name-link ${selectedArtifact?.id === a.id ? 'selected' : ''}`}
|
||||||
onClick={() => handleTagSelect(t)}
|
onClick={() => handleArtifactSelect(a)}
|
||||||
style={{ cursor: 'pointer' }}
|
style={{ cursor: 'pointer' }}
|
||||||
>
|
>
|
||||||
{t.name}
|
<span className="version-badge">{getArtifactVersion(a) || a.id.slice(0, 12)}</span>
|
||||||
</strong>
|
</strong>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'version',
|
key: 'original_name',
|
||||||
header: 'Version',
|
header: 'Filename',
|
||||||
render: (t: TagDetail) => (
|
sortable: true,
|
||||||
<span className="version-badge">{t.version || '-'}</span>
|
className: 'cell-truncate',
|
||||||
|
render: (a: PackageArtifact) => (
|
||||||
|
<span title={a.original_name || a.id}>{a.original_name || a.id.slice(0, 12)}</span>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'artifact_id',
|
key: 'size',
|
||||||
header: 'Artifact ID',
|
header: 'Size',
|
||||||
render: (t: TagDetail) => (
|
sortable: true,
|
||||||
<div className="artifact-id-cell">
|
render: (a: PackageArtifact) => <span>{formatBytes(a.size)}</span>,
|
||||||
<code className="artifact-id">{t.artifact_id.substring(0, 12)}...</code>
|
},
|
||||||
<CopyButton text={t.artifact_id} />
|
{
|
||||||
|
key: 'created_at',
|
||||||
|
header: 'Cached',
|
||||||
|
sortable: true,
|
||||||
|
render: (a: PackageArtifact) => (
|
||||||
|
<span>{new Date(a.created_at).toLocaleDateString()}</span>
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'actions',
|
||||||
|
header: '',
|
||||||
|
render: (a: PackageArtifact) => (
|
||||||
|
<div className="action-buttons">
|
||||||
|
<a
|
||||||
|
href={getDownloadUrl(projectName!, packageName!, getDownloadRef(a))}
|
||||||
|
className="btn btn-icon"
|
||||||
|
download
|
||||||
|
title="Download"
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4" />
|
||||||
|
<polyline points="7 10 12 15 17 10" />
|
||||||
|
<line x1="12" y1="15" x2="12" y2="3" />
|
||||||
|
</svg>
|
||||||
|
</a>
|
||||||
|
<button
|
||||||
|
className="btn btn-icon"
|
||||||
|
onClick={(e) => handleMenuOpen(e, a.id)}
|
||||||
|
title="More actions"
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<circle cx="12" cy="12" r="1" />
|
||||||
|
<circle cx="12" cy="5" r="1" />
|
||||||
|
<circle cx="12" cy="19" r="1" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
</div>
|
</div>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
]
|
||||||
|
: [
|
||||||
|
// Regular project columns: Version, Filename, Size, Created
|
||||||
|
// Valid sort fields: created_at, size, original_name
|
||||||
{
|
{
|
||||||
key: 'artifact_size',
|
key: 'version',
|
||||||
header: 'Size',
|
header: 'Version',
|
||||||
render: (t: TagDetail) => <span>{formatBytes(t.artifact_size)}</span>,
|
// version is from format_metadata, not a sortable DB field
|
||||||
},
|
render: (a: PackageArtifact) => (
|
||||||
{
|
<strong
|
||||||
key: 'artifact_content_type',
|
className={`tag-name-link ${selectedArtifact?.id === a.id ? 'selected' : ''}`}
|
||||||
header: 'Type',
|
onClick={() => handleArtifactSelect(a)}
|
||||||
render: (t: TagDetail) => (
|
style={{ cursor: 'pointer' }}
|
||||||
<span className="content-type">{t.artifact_content_type || '-'}</span>
|
>
|
||||||
|
<span className="version-badge">{getArtifactVersion(a) || a.id.slice(0, 12)}</span>
|
||||||
|
</strong>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'artifact_original_name',
|
key: 'original_name',
|
||||||
header: 'Filename',
|
header: 'Filename',
|
||||||
|
sortable: true,
|
||||||
className: 'cell-truncate',
|
className: 'cell-truncate',
|
||||||
render: (t: TagDetail) => (
|
render: (a: PackageArtifact) => (
|
||||||
<span title={t.artifact_original_name || undefined}>{t.artifact_original_name || '-'}</span>
|
<span title={a.original_name || undefined}>{a.original_name || '—'}</span>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: 'size',
|
||||||
|
header: 'Size',
|
||||||
|
sortable: true,
|
||||||
|
render: (a: PackageArtifact) => <span>{formatBytes(a.size)}</span>,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
key: 'created_at',
|
key: 'created_at',
|
||||||
header: 'Created',
|
header: 'Created',
|
||||||
sortable: true,
|
sortable: true,
|
||||||
render: (t: TagDetail) => (
|
render: (a: PackageArtifact) => (
|
||||||
<div className="created-cell">
|
<span title={`by ${a.created_by}`}>{new Date(a.created_at).toLocaleDateString()}</span>
|
||||||
<span>{new Date(t.created_at).toLocaleString()}</span>
|
|
||||||
<span className="created-by">by {t.created_by}</span>
|
|
||||||
</div>
|
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'actions',
|
key: 'actions',
|
||||||
header: 'Actions',
|
header: '',
|
||||||
render: (t: TagDetail) => (
|
render: (a: PackageArtifact) => (
|
||||||
<div className="action-buttons">
|
<div className="action-buttons">
|
||||||
<button
|
|
||||||
className="btn btn-secondary btn-small"
|
|
||||||
onClick={() => fetchEnsureFileForTag(t.name)}
|
|
||||||
title="View orchard.ensure file"
|
|
||||||
>
|
|
||||||
Ensure
|
|
||||||
</button>
|
|
||||||
<a
|
<a
|
||||||
href={getDownloadUrl(projectName!, packageName!, t.name)}
|
href={getDownloadUrl(projectName!, packageName!, getDownloadRef(a))}
|
||||||
className="btn btn-secondary btn-small"
|
className="btn btn-icon"
|
||||||
download
|
download
|
||||||
|
title="Download"
|
||||||
>
|
>
|
||||||
Download
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4" />
|
||||||
|
<polyline points="7 10 12 15 17 10" />
|
||||||
|
<line x1="12" y1="15" x2="12" y2="3" />
|
||||||
|
</svg>
|
||||||
</a>
|
</a>
|
||||||
|
<button
|
||||||
|
className="btn btn-icon"
|
||||||
|
onClick={(e) => handleMenuOpen(e, a.id)}
|
||||||
|
title="More actions"
|
||||||
|
>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<circle cx="12" cy="12" r="1" />
|
||||||
|
<circle cx="12" cy="5" r="1" />
|
||||||
|
<circle cx="12" cy="19" r="1" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
</div>
|
</div>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
if (loading && !tagsData) {
|
// Find the artifact for the open menu
|
||||||
|
const openMenuArtifact = artifacts.find(a => a.id === openMenuId);
|
||||||
|
|
||||||
|
// Close menu when clicking outside
|
||||||
|
const handleClickOutside = () => {
|
||||||
|
if (openMenuId) {
|
||||||
|
setOpenMenuId(null);
|
||||||
|
setMenuPosition(null);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Render dropdown menu as a portal-like element
|
||||||
|
const renderActionMenu = () => {
|
||||||
|
if (!openMenuId || !menuPosition || !openMenuArtifact) return null;
|
||||||
|
const a = openMenuArtifact;
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
className="action-menu-backdrop"
|
||||||
|
onClick={handleClickOutside}
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
className="action-menu-dropdown"
|
||||||
|
style={{ top: menuPosition.top, left: menuPosition.left }}
|
||||||
|
onClick={(e) => e.stopPropagation()}
|
||||||
|
>
|
||||||
|
<button onClick={() => { setViewArtifactId(a.id); setShowArtifactIdModal(true); setOpenMenuId(null); setMenuPosition(null); }}>
|
||||||
|
View Artifact ID
|
||||||
|
</button>
|
||||||
|
<button onClick={() => { navigator.clipboard.writeText(a.id); setOpenMenuId(null); setMenuPosition(null); }}>
|
||||||
|
Copy Artifact ID
|
||||||
|
</button>
|
||||||
|
<button onClick={() => { const version = getArtifactVersion(a); const ref = version || `artifact:${a.id}`; fetchEnsureFileForRef(ref); setOpenMenuId(null); setMenuPosition(null); }}>
|
||||||
|
View Ensure File
|
||||||
|
</button>
|
||||||
|
<button onClick={() => { handleArtifactSelect(a); setShowDepsModal(true); setOpenMenuId(null); setMenuPosition(null); }}>
|
||||||
|
View Dependencies
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (loading && !artifactsData) {
|
||||||
return <div className="loading">Loading...</div>;
|
return <div className="loading">Loading...</div>;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -451,6 +553,19 @@ function PackagePage() {
|
|||||||
<div className="page-header__title-row">
|
<div className="page-header__title-row">
|
||||||
<h1>{packageName}</h1>
|
<h1>{packageName}</h1>
|
||||||
{pkg && <Badge variant="default">{pkg.format}</Badge>}
|
{pkg && <Badge variant="default">{pkg.format}</Badge>}
|
||||||
|
{user && canWrite && !isSystemProject && (
|
||||||
|
<button
|
||||||
|
className="btn btn-primary btn-small header-upload-btn"
|
||||||
|
onClick={() => setShowUploadModal(true)}
|
||||||
|
>
|
||||||
|
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" style={{ marginRight: '6px' }}>
|
||||||
|
<path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4" />
|
||||||
|
<polyline points="17 8 12 3 7 8" />
|
||||||
|
<line x1="12" y1="3" x2="12" y2="15" />
|
||||||
|
</svg>
|
||||||
|
Upload
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
{pkg?.description && <p className="description">{pkg.description}</p>}
|
{pkg?.description && <p className="description">{pkg.description}</p>}
|
||||||
<div className="page-header__meta">
|
<div className="page-header__meta">
|
||||||
@@ -466,16 +581,11 @@ function PackagePage() {
|
|||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
{pkg && (pkg.tag_count !== undefined || pkg.artifact_count !== undefined) && (
|
{pkg && pkg.artifact_count !== undefined && (
|
||||||
<div className="package-header-stats">
|
<div className="package-header-stats">
|
||||||
{pkg.tag_count !== undefined && (
|
|
||||||
<span className="stat-item">
|
|
||||||
<strong>{pkg.tag_count}</strong> tags
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
{pkg.artifact_count !== undefined && (
|
{pkg.artifact_count !== undefined && (
|
||||||
<span className="stat-item">
|
<span className="stat-item">
|
||||||
<strong>{pkg.artifact_count}</strong> artifacts
|
<strong>{pkg.artifact_count}</strong> {isSystemProject ? 'versions' : 'artifacts'}
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
{pkg.total_size !== undefined && pkg.total_size > 0 && (
|
{pkg.total_size !== undefined && pkg.total_size > 0 && (
|
||||||
@@ -483,11 +593,6 @@ function PackagePage() {
|
|||||||
<strong>{formatBytes(pkg.total_size)}</strong> total
|
<strong>{formatBytes(pkg.total_size)}</strong> total
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
{pkg.latest_tag && (
|
|
||||||
<span className="stat-item">
|
|
||||||
Latest: <strong className="accent">{pkg.latest_tag}</strong>
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
@@ -496,51 +601,16 @@ function PackagePage() {
|
|||||||
{error && <div className="error-message">{error}</div>}
|
{error && <div className="error-message">{error}</div>}
|
||||||
{uploadSuccess && <div className="success-message">{uploadSuccess}</div>}
|
{uploadSuccess && <div className="success-message">{uploadSuccess}</div>}
|
||||||
|
|
||||||
{user && (
|
|
||||||
<div className="upload-section card">
|
|
||||||
<h3>Upload Artifact</h3>
|
|
||||||
{canWrite ? (
|
|
||||||
<div className="upload-form">
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="upload-tag">Tag (optional)</label>
|
|
||||||
<input
|
|
||||||
id="upload-tag"
|
|
||||||
type="text"
|
|
||||||
value={uploadTag}
|
|
||||||
onChange={(e) => setUploadTag(e.target.value)}
|
|
||||||
placeholder="v1.0.0, latest, stable..."
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<DragDropUpload
|
|
||||||
projectName={projectName!}
|
|
||||||
packageName={packageName!}
|
|
||||||
tag={uploadTag || undefined}
|
|
||||||
onUploadComplete={handleUploadComplete}
|
|
||||||
onUploadError={handleUploadError}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<DragDropUpload
|
|
||||||
projectName={projectName!}
|
|
||||||
packageName={packageName!}
|
|
||||||
disabled={true}
|
|
||||||
disabledReason="You have read-only access to this project and cannot upload artifacts."
|
|
||||||
onUploadComplete={handleUploadComplete}
|
|
||||||
onUploadError={handleUploadError}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="section-header">
|
<div className="section-header">
|
||||||
<h2>Tags / Versions</h2>
|
<h2>{isSystemProject ? 'Versions' : 'Artifacts'}</h2>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="list-controls">
|
<div className="list-controls">
|
||||||
<SearchInput
|
<SearchInput
|
||||||
value={search}
|
value={search}
|
||||||
onChange={handleSearchChange}
|
onChange={handleSearchChange}
|
||||||
placeholder="Filter tags..."
|
placeholder="Filter artifacts..."
|
||||||
className="list-controls__search"
|
className="list-controls__search"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
@@ -553,13 +623,13 @@ function PackagePage() {
|
|||||||
|
|
||||||
<div className="data-table--responsive">
|
<div className="data-table--responsive">
|
||||||
<DataTable
|
<DataTable
|
||||||
data={tags}
|
data={artifacts}
|
||||||
columns={columns}
|
columns={columns}
|
||||||
keyExtractor={(t) => t.id}
|
keyExtractor={(a) => a.id}
|
||||||
emptyMessage={
|
emptyMessage={
|
||||||
hasActiveFilters
|
hasActiveFilters
|
||||||
? 'No tags match your filters. Try adjusting your search.'
|
? 'No artifacts match your filters. Try adjusting your search.'
|
||||||
: 'No tags yet. Upload an artifact with a tag to create one!'
|
: 'No artifacts yet. Upload a file to get started!'
|
||||||
}
|
}
|
||||||
onSort={handleSortChange}
|
onSort={handleSortChange}
|
||||||
sortKey={sort}
|
sortKey={sort}
|
||||||
@@ -577,121 +647,13 @@ function PackagePage() {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Dependencies Section */}
|
{/* Used By (Reverse Dependencies) Section - only show if there are reverse deps or error */}
|
||||||
{tags.length > 0 && (
|
{(reverseDeps.length > 0 || reverseDepsError) && (
|
||||||
<div className="dependencies-section card">
|
|
||||||
<div className="dependencies-header">
|
|
||||||
<h3>Dependencies</h3>
|
|
||||||
<div className="dependencies-controls">
|
|
||||||
{selectedTag && (
|
|
||||||
<>
|
|
||||||
<button
|
|
||||||
className="btn btn-secondary btn-small"
|
|
||||||
onClick={fetchEnsureFile}
|
|
||||||
disabled={ensureFileLoading}
|
|
||||||
title="View orchard.ensure file"
|
|
||||||
>
|
|
||||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" style={{ marginRight: '6px' }}>
|
|
||||||
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path>
|
|
||||||
<polyline points="14 2 14 8 20 8"></polyline>
|
|
||||||
<line x1="16" y1="13" x2="8" y2="13"></line>
|
|
||||||
<line x1="16" y1="17" x2="8" y2="17"></line>
|
|
||||||
<polyline points="10 9 9 9 8 9"></polyline>
|
|
||||||
</svg>
|
|
||||||
{ensureFileLoading ? 'Loading...' : 'View Ensure File'}
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
className="btn btn-secondary btn-small"
|
|
||||||
onClick={() => setShowGraph(true)}
|
|
||||||
title="View full dependency tree"
|
|
||||||
>
|
|
||||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" style={{ marginRight: '6px' }}>
|
|
||||||
<circle cx="12" cy="12" r="3"></circle>
|
|
||||||
<circle cx="4" cy="4" r="2"></circle>
|
|
||||||
<circle cx="20" cy="4" r="2"></circle>
|
|
||||||
<circle cx="4" cy="20" r="2"></circle>
|
|
||||||
<circle cx="20" cy="20" r="2"></circle>
|
|
||||||
<line x1="9.5" y1="9.5" x2="5.5" y2="5.5"></line>
|
|
||||||
<line x1="14.5" y1="9.5" x2="18.5" y2="5.5"></line>
|
|
||||||
<line x1="9.5" y1="14.5" x2="5.5" y2="18.5"></line>
|
|
||||||
<line x1="14.5" y1="14.5" x2="18.5" y2="18.5"></line>
|
|
||||||
</svg>
|
|
||||||
View Graph
|
|
||||||
</button>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="dependencies-tag-select">
|
|
||||||
{selectedTag && (
|
|
||||||
<select
|
|
||||||
className="tag-selector"
|
|
||||||
value={selectedTag.id}
|
|
||||||
onChange={(e) => {
|
|
||||||
const tag = tags.find(t => t.id === e.target.value);
|
|
||||||
if (tag) setSelectedTag(tag);
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{tags.map(t => (
|
|
||||||
<option key={t.id} value={t.id}>
|
|
||||||
{t.name}{t.version ? ` (${t.version})` : ''}
|
|
||||||
</option>
|
|
||||||
))}
|
|
||||||
</select>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{depsLoading ? (
|
|
||||||
<div className="deps-loading">Loading dependencies...</div>
|
|
||||||
) : depsError ? (
|
|
||||||
<div className="deps-error">{depsError}</div>
|
|
||||||
) : dependencies.length === 0 ? (
|
|
||||||
<div className="deps-empty">
|
|
||||||
{selectedTag ? (
|
|
||||||
<span><strong>{selectedTag.name}</strong> has no dependencies</span>
|
|
||||||
) : (
|
|
||||||
<span>No dependencies</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<div className="deps-list">
|
|
||||||
<div className="deps-summary">
|
|
||||||
<strong>{selectedTag?.name}</strong> has {dependencies.length} {dependencies.length === 1 ? 'dependency' : 'dependencies'}:
|
|
||||||
</div>
|
|
||||||
<ul className="deps-items">
|
|
||||||
{dependencies.map((dep) => (
|
|
||||||
<li key={dep.id} className="dep-item">
|
|
||||||
<Link
|
|
||||||
to={`/project/${dep.project}/${dep.package}`}
|
|
||||||
className="dep-link"
|
|
||||||
>
|
|
||||||
{dep.project}/{dep.package}
|
|
||||||
</Link>
|
|
||||||
<span className="dep-constraint">
|
|
||||||
@ {dep.version || dep.tag}
|
|
||||||
</span>
|
|
||||||
<span className="dep-status dep-status--ok" title="Package exists">
|
|
||||||
✓
|
|
||||||
</span>
|
|
||||||
</li>
|
|
||||||
))}
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Used By (Reverse Dependencies) Section */}
|
|
||||||
<div className="used-by-section card">
|
<div className="used-by-section card">
|
||||||
<h3>Used By</h3>
|
<h3>Used By</h3>
|
||||||
|
{reverseDepsError && (
|
||||||
{reverseDepsLoading ? (
|
<div className="error-message">{reverseDepsError}</div>
|
||||||
<div className="deps-loading">Loading reverse dependencies...</div>
|
)}
|
||||||
) : reverseDepsError ? (
|
|
||||||
<div className="deps-error">{reverseDepsError}</div>
|
|
||||||
) : reverseDeps.length === 0 ? (
|
|
||||||
<div className="deps-empty">No packages depend on this package</div>
|
|
||||||
) : (
|
|
||||||
<div className="reverse-deps-list">
|
<div className="reverse-deps-list">
|
||||||
<div className="deps-summary">
|
<div className="deps-summary">
|
||||||
{reverseDepsTotal} {reverseDepsTotal === 1 ? 'package depends' : 'packages depend'} on this:
|
{reverseDepsTotal} {reverseDepsTotal === 1 ? 'package depends' : 'packages depend'} on this:
|
||||||
@@ -734,103 +696,51 @@ function PackagePage() {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="download-by-id-section card">
|
|
||||||
<h3>Download by Artifact ID</h3>
|
|
||||||
<div className="download-by-id-form">
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
value={artifactIdInput}
|
|
||||||
onChange={(e) => setArtifactIdInput(e.target.value.toLowerCase().replace(/[^a-f0-9]/g, '').slice(0, 64))}
|
|
||||||
placeholder="Enter SHA256 artifact ID (64 hex characters)"
|
|
||||||
className="artifact-id-input"
|
|
||||||
/>
|
|
||||||
<a
|
|
||||||
href={artifactIdInput.length === 64 ? getDownloadUrl(projectName!, packageName!, `artifact:${artifactIdInput}`) : '#'}
|
|
||||||
className={`btn btn-primary ${artifactIdInput.length !== 64 ? 'btn-disabled' : ''}`}
|
|
||||||
download
|
|
||||||
onClick={(e) => {
|
|
||||||
if (artifactIdInput.length !== 64) {
|
|
||||||
e.preventDefault();
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
Download
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
{artifactIdInput.length > 0 && artifactIdInput.length !== 64 && (
|
|
||||||
<p className="validation-hint">Artifact ID must be exactly 64 hex characters ({artifactIdInput.length}/64)</p>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{user && canWrite && (
|
|
||||||
<div className="create-tag-section card">
|
|
||||||
<h3>Create / Update Tag</h3>
|
|
||||||
<p className="section-description">Point a tag at any existing artifact by its ID</p>
|
|
||||||
<form onSubmit={handleCreateTag} className="create-tag-form">
|
|
||||||
<div className="form-row">
|
|
||||||
<div className="form-group">
|
|
||||||
<label htmlFor="create-tag-name">Tag Name</label>
|
|
||||||
<input
|
|
||||||
id="create-tag-name"
|
|
||||||
type="text"
|
|
||||||
value={createTagName}
|
|
||||||
onChange={(e) => setCreateTagName(e.target.value)}
|
|
||||||
placeholder="latest, stable, v1.0.0..."
|
|
||||||
disabled={createTagLoading}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div className="form-group form-group--wide">
|
|
||||||
<label htmlFor="create-tag-artifact">Artifact ID</label>
|
|
||||||
<input
|
|
||||||
id="create-tag-artifact"
|
|
||||||
type="text"
|
|
||||||
value={createTagArtifactId}
|
|
||||||
onChange={(e) => setCreateTagArtifactId(e.target.value.toLowerCase().replace(/[^a-f0-9]/g, '').slice(0, 64))}
|
|
||||||
placeholder="SHA256 hash (64 hex characters)"
|
|
||||||
className="artifact-id-input"
|
|
||||||
disabled={createTagLoading}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
className="btn btn-primary"
|
|
||||||
disabled={createTagLoading || !createTagName.trim() || createTagArtifactId.length !== 64}
|
|
||||||
>
|
|
||||||
{createTagLoading ? 'Creating...' : 'Create Tag'}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
{createTagArtifactId.length > 0 && createTagArtifactId.length !== 64 && (
|
|
||||||
<p className="validation-hint">Artifact ID must be exactly 64 hex characters ({createTagArtifactId.length}/64)</p>
|
|
||||||
)}
|
|
||||||
</form>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<div className="usage-section card">
|
|
||||||
<h3>Usage</h3>
|
|
||||||
<p>Download artifacts using:</p>
|
|
||||||
<pre>
|
|
||||||
<code>curl -O {window.location.origin}/api/v1/project/{projectName}/{packageName}/+/latest</code>
|
|
||||||
</pre>
|
|
||||||
<p>Or with a specific tag:</p>
|
|
||||||
<pre>
|
|
||||||
<code>curl -O {window.location.origin}/api/v1/project/{projectName}/{packageName}/+/v1.0.0</code>
|
|
||||||
</pre>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Dependency Graph Modal */}
|
{/* Dependency Graph Modal */}
|
||||||
{showGraph && selectedTag && (
|
{showGraph && selectedArtifact && (
|
||||||
<DependencyGraph
|
<DependencyGraph
|
||||||
projectName={projectName!}
|
projectName={projectName!}
|
||||||
packageName={packageName!}
|
packageName={packageName!}
|
||||||
tagName={selectedTag.name}
|
tagName={getArtifactVersion(selectedArtifact) || `artifact:${selectedArtifact.id}`}
|
||||||
onClose={() => setShowGraph(false)}
|
onClose={() => setShowGraph(false)}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Upload Modal */}
|
||||||
|
{showUploadModal && (
|
||||||
|
<div className="modal-overlay" onClick={() => setShowUploadModal(false)}>
|
||||||
|
<div className="upload-modal" onClick={(e) => e.stopPropagation()}>
|
||||||
|
<div className="modal-header">
|
||||||
|
<h3>Upload Artifact</h3>
|
||||||
|
<button
|
||||||
|
className="modal-close"
|
||||||
|
onClick={() => setShowUploadModal(false)}
|
||||||
|
title="Close"
|
||||||
|
>
|
||||||
|
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18"></line>
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18"></line>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div className="modal-body">
|
||||||
|
<DragDropUpload
|
||||||
|
projectName={projectName!}
|
||||||
|
packageName={packageName!}
|
||||||
|
onUploadComplete={(result) => {
|
||||||
|
handleUploadComplete(result);
|
||||||
|
setShowUploadModal(false);
|
||||||
|
}}
|
||||||
|
onUploadError={handleUploadError}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Ensure File Modal */}
|
{/* Ensure File Modal */}
|
||||||
{showEnsureFile && (
|
{showEnsureFile && (
|
||||||
<div className="modal-overlay" onClick={() => setShowEnsureFile(false)}>
|
<div className="modal-overlay" onClick={() => setShowEnsureFile(false)}>
|
||||||
@@ -872,6 +782,107 @@ function PackagePage() {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Dependencies Modal */}
|
||||||
|
{showDepsModal && selectedArtifact && (
|
||||||
|
<div className="modal-overlay" onClick={() => setShowDepsModal(false)}>
|
||||||
|
<div className="deps-modal" onClick={(e) => e.stopPropagation()}>
|
||||||
|
<div className="modal-header">
|
||||||
|
<h3>Dependencies for {selectedArtifact.original_name || selectedArtifact.id.slice(0, 12)}</h3>
|
||||||
|
<button
|
||||||
|
className="modal-close"
|
||||||
|
onClick={() => setShowDepsModal(false)}
|
||||||
|
title="Close"
|
||||||
|
>
|
||||||
|
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18"></line>
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18"></line>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div className="modal-body">
|
||||||
|
<div className="deps-modal-controls">
|
||||||
|
<button
|
||||||
|
className="btn btn-secondary btn-small"
|
||||||
|
onClick={fetchEnsureFile}
|
||||||
|
disabled={ensureFileLoading}
|
||||||
|
>
|
||||||
|
View Ensure File
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="btn btn-secondary btn-small"
|
||||||
|
onClick={() => { setShowDepsModal(false); setShowGraph(true); }}
|
||||||
|
>
|
||||||
|
View Graph
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{depsLoading ? (
|
||||||
|
<div className="deps-loading">Loading dependencies...</div>
|
||||||
|
) : depsError ? (
|
||||||
|
<div className="deps-error">{depsError}</div>
|
||||||
|
) : dependencies.length === 0 ? (
|
||||||
|
<div className="deps-empty">No dependencies</div>
|
||||||
|
) : (
|
||||||
|
<div className="deps-list">
|
||||||
|
<div className="deps-summary">
|
||||||
|
{dependencies.length} {dependencies.length === 1 ? 'dependency' : 'dependencies'}:
|
||||||
|
</div>
|
||||||
|
<ul className="deps-items">
|
||||||
|
{dependencies.map((dep) => (
|
||||||
|
<li key={dep.id} className="dep-item">
|
||||||
|
<Link
|
||||||
|
to={`/project/${dep.project}/${dep.package}`}
|
||||||
|
className="dep-link"
|
||||||
|
onClick={() => setShowDepsModal(false)}
|
||||||
|
>
|
||||||
|
{dep.project}/{dep.package}
|
||||||
|
</Link>
|
||||||
|
<span className="dep-constraint">
|
||||||
|
@ {dep.version}
|
||||||
|
</span>
|
||||||
|
<span className="dep-status dep-status--ok" title="Package exists">
|
||||||
|
✓
|
||||||
|
</span>
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Artifact ID Modal */}
|
||||||
|
{showArtifactIdModal && viewArtifactId && (
|
||||||
|
<div className="modal-overlay" onClick={() => setShowArtifactIdModal(false)}>
|
||||||
|
<div className="artifact-id-modal" onClick={(e) => e.stopPropagation()}>
|
||||||
|
<div className="modal-header">
|
||||||
|
<h3>Artifact ID</h3>
|
||||||
|
<button
|
||||||
|
className="modal-close"
|
||||||
|
onClick={() => setShowArtifactIdModal(false)}
|
||||||
|
title="Close"
|
||||||
|
>
|
||||||
|
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18"></line>
|
||||||
|
<line x1="6" y1="6" x2="18" y2="18"></line>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div className="modal-body">
|
||||||
|
<p className="modal-description">SHA256 hash identifying this artifact:</p>
|
||||||
|
<div className="artifact-id-display">
|
||||||
|
<code>{viewArtifactId}</code>
|
||||||
|
<CopyButton text={viewArtifactId} />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Action Menu Dropdown */}
|
||||||
|
{renderActionMenu()}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -195,6 +195,9 @@ function ProjectPage() {
|
|||||||
<Badge variant={project.is_public ? 'public' : 'private'}>
|
<Badge variant={project.is_public ? 'public' : 'private'}>
|
||||||
{project.is_public ? 'Public' : 'Private'}
|
{project.is_public ? 'Public' : 'Private'}
|
||||||
</Badge>
|
</Badge>
|
||||||
|
{project.is_system && (
|
||||||
|
<Badge variant="warning">System Cache</Badge>
|
||||||
|
)}
|
||||||
{accessLevel && (
|
{accessLevel && (
|
||||||
<Badge variant={accessLevel === 'admin' ? 'success' : accessLevel === 'write' ? 'info' : 'default'}>
|
<Badge variant={accessLevel === 'admin' ? 'success' : accessLevel === 'write' ? 'info' : 'default'}>
|
||||||
{isOwner ? 'Owner' : accessLevel.charAt(0).toUpperCase() + accessLevel.slice(1)}
|
{isOwner ? 'Owner' : accessLevel.charAt(0).toUpperCase() + accessLevel.slice(1)}
|
||||||
@@ -211,7 +214,7 @@ function ProjectPage() {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="page-header__actions">
|
<div className="page-header__actions">
|
||||||
{canAdmin && !project.team_id && (
|
{canAdmin && !project.team_id && !project.is_system && (
|
||||||
<button
|
<button
|
||||||
className="btn btn-secondary"
|
className="btn btn-secondary"
|
||||||
onClick={() => navigate(`/project/${projectName}/settings`)}
|
onClick={() => navigate(`/project/${projectName}/settings`)}
|
||||||
@@ -224,11 +227,11 @@ function ProjectPage() {
|
|||||||
Settings
|
Settings
|
||||||
</button>
|
</button>
|
||||||
)}
|
)}
|
||||||
{canWrite ? (
|
{canWrite && !project.is_system ? (
|
||||||
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
|
||||||
{showForm ? 'Cancel' : '+ New Package'}
|
{showForm ? 'Cancel' : '+ New Package'}
|
||||||
</button>
|
</button>
|
||||||
) : user ? (
|
) : user && !project.is_system ? (
|
||||||
<span className="text-muted" title="You have read-only access to this project">
|
<span className="text-muted" title="You have read-only access to this project">
|
||||||
Read-only access
|
Read-only access
|
||||||
</span>
|
</span>
|
||||||
@@ -291,6 +294,7 @@ function ProjectPage() {
|
|||||||
placeholder="Filter packages..."
|
placeholder="Filter packages..."
|
||||||
className="list-controls__search"
|
className="list-controls__search"
|
||||||
/>
|
/>
|
||||||
|
{!project?.is_system && (
|
||||||
<select
|
<select
|
||||||
className="list-controls__select"
|
className="list-controls__select"
|
||||||
value={format}
|
value={format}
|
||||||
@@ -303,6 +307,7 @@ function ProjectPage() {
|
|||||||
</option>
|
</option>
|
||||||
))}
|
))}
|
||||||
</select>
|
</select>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{hasActiveFilters && (
|
{hasActiveFilters && (
|
||||||
@@ -338,19 +343,19 @@ function ProjectPage() {
|
|||||||
className: 'cell-description',
|
className: 'cell-description',
|
||||||
render: (pkg) => pkg.description || '—',
|
render: (pkg) => pkg.description || '—',
|
||||||
},
|
},
|
||||||
{
|
...(!project?.is_system ? [{
|
||||||
key: 'format',
|
key: 'format',
|
||||||
header: 'Format',
|
header: 'Format',
|
||||||
render: (pkg) => <Badge variant="default">{pkg.format}</Badge>,
|
render: (pkg: Package) => <Badge variant="default">{pkg.format}</Badge>,
|
||||||
},
|
}] : []),
|
||||||
{
|
...(!project?.is_system ? [{
|
||||||
key: 'tag_count',
|
key: 'version_count',
|
||||||
header: 'Tags',
|
header: 'Versions',
|
||||||
render: (pkg) => pkg.tag_count ?? '—',
|
render: (pkg: Package) => pkg.version_count ?? '—',
|
||||||
},
|
}] : []),
|
||||||
{
|
{
|
||||||
key: 'artifact_count',
|
key: 'artifact_count',
|
||||||
header: 'Artifacts',
|
header: project?.is_system ? 'Versions' : 'Artifacts',
|
||||||
render: (pkg) => pkg.artifact_count ?? '—',
|
render: (pkg) => pkg.artifact_count ?? '—',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -359,12 +364,12 @@ function ProjectPage() {
|
|||||||
render: (pkg) =>
|
render: (pkg) =>
|
||||||
pkg.total_size !== undefined && pkg.total_size > 0 ? formatBytes(pkg.total_size) : '—',
|
pkg.total_size !== undefined && pkg.total_size > 0 ? formatBytes(pkg.total_size) : '—',
|
||||||
},
|
},
|
||||||
{
|
...(!project?.is_system ? [{
|
||||||
key: 'latest_tag',
|
key: 'latest_version',
|
||||||
header: 'Latest',
|
header: 'Latest',
|
||||||
render: (pkg) =>
|
render: (pkg: Package) =>
|
||||||
pkg.latest_tag ? <strong style={{ color: 'var(--accent-primary)' }}>{pkg.latest_tag}</strong> : '—',
|
pkg.latest_version ? <strong style={{ color: 'var(--accent-primary)' }}>{pkg.latest_version}</strong> : '—',
|
||||||
},
|
}] : []),
|
||||||
{
|
{
|
||||||
key: 'created_at',
|
key: 'created_at',
|
||||||
header: 'Created',
|
header: 'Created',
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ export interface Project {
|
|||||||
name: string;
|
name: string;
|
||||||
description: string | null;
|
description: string | null;
|
||||||
is_public: boolean;
|
is_public: boolean;
|
||||||
|
is_system?: boolean; // True for system cache projects (_npm, _pypi, etc.)
|
||||||
created_at: string;
|
created_at: string;
|
||||||
updated_at: string;
|
updated_at: string;
|
||||||
created_by: string;
|
created_by: string;
|
||||||
@@ -18,12 +19,6 @@ export interface Project {
|
|||||||
team_name?: string | null;
|
team_name?: string | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TagSummary {
|
|
||||||
name: string;
|
|
||||||
artifact_id: string;
|
|
||||||
created_at: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Package {
|
export interface Package {
|
||||||
id: string;
|
id: string;
|
||||||
project_id: string;
|
project_id: string;
|
||||||
@@ -34,12 +29,11 @@ export interface Package {
|
|||||||
created_at: string;
|
created_at: string;
|
||||||
updated_at: string;
|
updated_at: string;
|
||||||
// Aggregated fields (from PackageDetailResponse)
|
// Aggregated fields (from PackageDetailResponse)
|
||||||
tag_count?: number;
|
|
||||||
artifact_count?: number;
|
artifact_count?: number;
|
||||||
|
version_count?: number;
|
||||||
total_size?: number;
|
total_size?: number;
|
||||||
latest_tag?: string | null;
|
|
||||||
latest_upload_at?: string | null;
|
latest_upload_at?: string | null;
|
||||||
recent_tags?: TagSummary[];
|
latest_version?: string | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Artifact {
|
export interface Artifact {
|
||||||
@@ -52,22 +46,19 @@ export interface Artifact {
|
|||||||
ref_count: number;
|
ref_count: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Tag {
|
export interface PackageArtifact {
|
||||||
id: string;
|
id: string;
|
||||||
package_id: string;
|
sha256: string;
|
||||||
name: string;
|
size: number;
|
||||||
artifact_id: string;
|
content_type: string | null;
|
||||||
|
original_name: string | null;
|
||||||
|
checksum_md5?: string | null;
|
||||||
|
checksum_sha1?: string | null;
|
||||||
|
s3_etag?: string | null;
|
||||||
created_at: string;
|
created_at: string;
|
||||||
created_by: string;
|
created_by: string;
|
||||||
}
|
format_metadata?: Record<string, unknown> | null;
|
||||||
|
version?: string | null; // Version from PackageVersion if exists
|
||||||
export interface TagDetail extends Tag {
|
|
||||||
artifact_size: number;
|
|
||||||
artifact_content_type: string | null;
|
|
||||||
artifact_original_name: string | null;
|
|
||||||
artifact_created_at: string;
|
|
||||||
artifact_format_metadata: Record<string, unknown> | null;
|
|
||||||
version: string | null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface PackageVersion {
|
export interface PackageVersion {
|
||||||
@@ -82,20 +73,9 @@ export interface PackageVersion {
|
|||||||
size?: number;
|
size?: number;
|
||||||
content_type?: string | null;
|
content_type?: string | null;
|
||||||
original_name?: string | null;
|
original_name?: string | null;
|
||||||
tags?: string[];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ArtifactTagInfo {
|
export interface ArtifactDetail extends Artifact {}
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
package_id: string;
|
|
||||||
package_name: string;
|
|
||||||
project_name: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ArtifactDetail extends Artifact {
|
|
||||||
tags: ArtifactTagInfo[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface PaginatedResponse<T> {
|
export interface PaginatedResponse<T> {
|
||||||
items: T[];
|
items: T[];
|
||||||
@@ -115,8 +95,6 @@ export interface ListParams {
|
|||||||
order?: 'asc' | 'desc';
|
order?: 'asc' | 'desc';
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TagListParams extends ListParams {}
|
|
||||||
|
|
||||||
export interface PackageListParams extends ListParams {
|
export interface PackageListParams extends ListParams {
|
||||||
format?: string;
|
format?: string;
|
||||||
platform?: string;
|
platform?: string;
|
||||||
@@ -141,7 +119,6 @@ export interface UploadResponse {
|
|||||||
size: number;
|
size: number;
|
||||||
project: string;
|
project: string;
|
||||||
package: string;
|
package: string;
|
||||||
tag: string | null;
|
|
||||||
version: string | null;
|
version: string | null;
|
||||||
version_source: string | null;
|
version_source: string | null;
|
||||||
}
|
}
|
||||||
@@ -164,9 +141,8 @@ export interface SearchResultPackage {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface SearchResultArtifact {
|
export interface SearchResultArtifact {
|
||||||
tag_id: string;
|
|
||||||
tag_name: string;
|
|
||||||
artifact_id: string;
|
artifact_id: string;
|
||||||
|
version: string | null;
|
||||||
package_id: string;
|
package_id: string;
|
||||||
package_name: string;
|
package_name: string;
|
||||||
project_name: string;
|
project_name: string;
|
||||||
@@ -389,8 +365,7 @@ export interface Dependency {
|
|||||||
artifact_id: string;
|
artifact_id: string;
|
||||||
project: string;
|
project: string;
|
||||||
package: string;
|
package: string;
|
||||||
version: string | null;
|
version: string;
|
||||||
tag: string | null;
|
|
||||||
created_at: string;
|
created_at: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -404,7 +379,6 @@ export interface DependentInfo {
|
|||||||
project: string;
|
project: string;
|
||||||
package: string;
|
package: string;
|
||||||
version: string | null;
|
version: string | null;
|
||||||
constraint_type: 'version' | 'tag';
|
|
||||||
constraint_value: string;
|
constraint_value: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -427,11 +401,17 @@ export interface ResolvedArtifact {
|
|||||||
project: string;
|
project: string;
|
||||||
package: string;
|
package: string;
|
||||||
version: string | null;
|
version: string | null;
|
||||||
tag: string | null;
|
|
||||||
size: number;
|
size: number;
|
||||||
download_url: string;
|
download_url: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface MissingDependency {
|
||||||
|
project: string;
|
||||||
|
package: string;
|
||||||
|
constraint: string | null;
|
||||||
|
required_by: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
export interface DependencyResolutionResponse {
|
export interface DependencyResolutionResponse {
|
||||||
requested: {
|
requested: {
|
||||||
project: string;
|
project: string;
|
||||||
@@ -439,6 +419,7 @@ export interface DependencyResolutionResponse {
|
|||||||
ref: string;
|
ref: string;
|
||||||
};
|
};
|
||||||
resolved: ResolvedArtifact[];
|
resolved: ResolvedArtifact[];
|
||||||
|
missing: MissingDependency[];
|
||||||
total_size: number;
|
total_size: number;
|
||||||
artifact_count: number;
|
artifact_count: number;
|
||||||
}
|
}
|
||||||
@@ -503,3 +484,56 @@ export interface TeamMemberCreate {
|
|||||||
export interface TeamMemberUpdate {
|
export interface TeamMemberUpdate {
|
||||||
role: TeamRole;
|
role: TeamRole;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Upstream Source types
|
||||||
|
export type SourceType = 'npm' | 'pypi' | 'maven' | 'docker' | 'helm' | 'nuget' | 'deb' | 'rpm' | 'generic';
|
||||||
|
export type AuthType = 'none' | 'basic' | 'bearer' | 'api_key';
|
||||||
|
|
||||||
|
export interface UpstreamSource {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
source_type: SourceType;
|
||||||
|
url: string;
|
||||||
|
enabled: boolean;
|
||||||
|
auth_type: AuthType;
|
||||||
|
username: string | null;
|
||||||
|
has_password: boolean;
|
||||||
|
has_headers: boolean;
|
||||||
|
priority: number;
|
||||||
|
source: 'database' | 'env';
|
||||||
|
created_at: string | null;
|
||||||
|
updated_at: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UpstreamSourceCreate {
|
||||||
|
name: string;
|
||||||
|
source_type: SourceType;
|
||||||
|
url: string;
|
||||||
|
enabled?: boolean;
|
||||||
|
auth_type?: AuthType;
|
||||||
|
username?: string;
|
||||||
|
password?: string;
|
||||||
|
headers?: Record<string, string>;
|
||||||
|
priority?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UpstreamSourceUpdate {
|
||||||
|
name?: string;
|
||||||
|
source_type?: SourceType;
|
||||||
|
url?: string;
|
||||||
|
enabled?: boolean;
|
||||||
|
auth_type?: AuthType;
|
||||||
|
username?: string;
|
||||||
|
password?: string;
|
||||||
|
headers?: Record<string, string> | null;
|
||||||
|
priority?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UpstreamSourceTestResult {
|
||||||
|
success: boolean;
|
||||||
|
status_code: number | null;
|
||||||
|
elapsed_ms: number;
|
||||||
|
error: string | null;
|
||||||
|
source_id: string;
|
||||||
|
source_name: string;
|
||||||
|
}
|
||||||
|
|||||||
@@ -128,6 +128,10 @@ spec:
|
|||||||
value: {{ .Values.orchard.rateLimit.login | quote }}
|
value: {{ .Values.orchard.rateLimit.login | quote }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
{{- if .Values.orchard.purgeSeedData }}
|
||||||
|
- name: ORCHARD_PURGE_SEED_DATA
|
||||||
|
value: "true"
|
||||||
|
{{- end }}
|
||||||
{{- if .Values.orchard.database.poolSize }}
|
{{- if .Values.orchard.database.poolSize }}
|
||||||
- name: ORCHARD_DATABASE_POOL_SIZE
|
- name: ORCHARD_DATABASE_POOL_SIZE
|
||||||
value: {{ .Values.orchard.database.poolSize | quote }}
|
value: {{ .Values.orchard.database.poolSize | quote }}
|
||||||
@@ -140,6 +144,20 @@ spec:
|
|||||||
- name: ORCHARD_DATABASE_POOL_TIMEOUT
|
- name: ORCHARD_DATABASE_POOL_TIMEOUT
|
||||||
value: {{ .Values.orchard.database.poolTimeout | quote }}
|
value: {{ .Values.orchard.database.poolTimeout | quote }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
{{- if .Values.orchard.pypiCache }}
|
||||||
|
{{- if .Values.orchard.pypiCache.workers }}
|
||||||
|
- name: ORCHARD_PYPI_CACHE_WORKERS
|
||||||
|
value: {{ .Values.orchard.pypiCache.workers | quote }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.orchard.pypiCache.maxDepth }}
|
||||||
|
- name: ORCHARD_PYPI_CACHE_MAX_DEPTH
|
||||||
|
value: {{ .Values.orchard.pypiCache.maxDepth | quote }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.orchard.pypiCache.maxAttempts }}
|
||||||
|
- name: ORCHARD_PYPI_CACHE_MAX_ATTEMPTS
|
||||||
|
value: {{ .Values.orchard.pypiCache.maxAttempts | quote }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
{{- if .Values.orchard.auth }}
|
{{- if .Values.orchard.auth }}
|
||||||
{{- if or .Values.orchard.auth.secretsManager .Values.orchard.auth.existingSecret .Values.orchard.auth.adminPassword }}
|
{{- if or .Values.orchard.auth.secretsManager .Values.orchard.auth.existingSecret .Values.orchard.auth.adminPassword }}
|
||||||
- name: ORCHARD_ADMIN_PASSWORD
|
- name: ORCHARD_ADMIN_PASSWORD
|
||||||
|
|||||||
@@ -59,10 +59,10 @@ ingress:
|
|||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
cpu: 500m
|
cpu: 500m
|
||||||
memory: 512Mi
|
memory: 1Gi
|
||||||
requests:
|
requests:
|
||||||
cpu: 200m
|
cpu: 200m
|
||||||
memory: 512Mi
|
memory: 1Gi
|
||||||
|
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
httpGet:
|
httpGet:
|
||||||
@@ -124,6 +124,12 @@ orchard:
|
|||||||
mode: "presigned"
|
mode: "presigned"
|
||||||
presignedUrlExpiry: 3600
|
presignedUrlExpiry: 3600
|
||||||
|
|
||||||
|
# PyPI Cache Worker settings (reduced workers to limit memory usage)
|
||||||
|
pypiCache:
|
||||||
|
workers: 1
|
||||||
|
maxDepth: 10
|
||||||
|
maxAttempts: 3
|
||||||
|
|
||||||
# Relaxed rate limits for dev/feature environments (allows integration tests to run)
|
# Relaxed rate limits for dev/feature environments (allows integration tests to run)
|
||||||
rateLimit:
|
rateLimit:
|
||||||
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests
|
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests
|
||||||
@@ -222,7 +228,7 @@ minioIngress:
|
|||||||
secretName: minio-tls # Overridden by CI
|
secretName: minio-tls # Overridden by CI
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
enabled: false
|
enabled: true
|
||||||
|
|
||||||
waitForDatabase: true
|
waitForDatabase: true
|
||||||
|
|
||||||
|
|||||||
@@ -57,10 +57,10 @@ ingress:
|
|||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
cpu: 500m
|
cpu: 500m
|
||||||
memory: 512Mi
|
memory: 768Mi
|
||||||
requests:
|
requests:
|
||||||
cpu: 500m
|
cpu: 500m
|
||||||
memory: 512Mi
|
memory: 768Mi
|
||||||
|
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
httpGet:
|
httpGet:
|
||||||
@@ -121,6 +121,12 @@ orchard:
|
|||||||
mode: "presigned"
|
mode: "presigned"
|
||||||
presignedUrlExpiry: 3600
|
presignedUrlExpiry: 3600
|
||||||
|
|
||||||
|
# PyPI Cache Worker settings (reduced workers to limit memory usage)
|
||||||
|
pypiCache:
|
||||||
|
workers: 2
|
||||||
|
maxDepth: 10
|
||||||
|
maxAttempts: 3
|
||||||
|
|
||||||
# PostgreSQL subchart - disabled in prod, using RDS
|
# PostgreSQL subchart - disabled in prod, using RDS
|
||||||
postgresql:
|
postgresql:
|
||||||
enabled: false
|
enabled: false
|
||||||
@@ -134,7 +140,7 @@ minioIngress:
|
|||||||
enabled: false
|
enabled: false
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
enabled: false
|
enabled: true
|
||||||
|
|
||||||
waitForDatabase: true
|
waitForDatabase: true
|
||||||
|
|
||||||
|
|||||||
@@ -56,10 +56,10 @@ ingress:
|
|||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
cpu: 500m
|
cpu: 500m
|
||||||
memory: 512Mi
|
memory: 768Mi
|
||||||
requests:
|
requests:
|
||||||
cpu: 500m
|
cpu: 500m
|
||||||
memory: 512Mi
|
memory: 768Mi
|
||||||
|
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
httpGet:
|
httpGet:
|
||||||
@@ -91,6 +91,7 @@ affinity: {}
|
|||||||
# Orchard server configuration
|
# Orchard server configuration
|
||||||
orchard:
|
orchard:
|
||||||
env: "development" # Allows seed data for testing
|
env: "development" # Allows seed data for testing
|
||||||
|
purgeSeedData: true # Remove public seed data (npm-public, pypi-public, etc.)
|
||||||
server:
|
server:
|
||||||
host: "0.0.0.0"
|
host: "0.0.0.0"
|
||||||
port: 8080
|
port: 8080
|
||||||
@@ -121,6 +122,12 @@ orchard:
|
|||||||
mode: "presigned" # presigned, redirect, or proxy
|
mode: "presigned" # presigned, redirect, or proxy
|
||||||
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
||||||
|
|
||||||
|
# PyPI Cache Worker settings (reduced workers to limit memory usage)
|
||||||
|
pypiCache:
|
||||||
|
workers: 2
|
||||||
|
maxDepth: 10
|
||||||
|
maxAttempts: 3
|
||||||
|
|
||||||
# Relaxed rate limits for stage (allows CI integration tests to run)
|
# Relaxed rate limits for stage (allows CI integration tests to run)
|
||||||
rateLimit:
|
rateLimit:
|
||||||
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests
|
login: "1000/minute" # Default is 5/minute, relaxed for CI integration tests
|
||||||
@@ -139,7 +146,7 @@ minioIngress:
|
|||||||
|
|
||||||
# Redis subchart configuration (for future caching)
|
# Redis subchart configuration (for future caching)
|
||||||
redis:
|
redis:
|
||||||
enabled: false
|
enabled: true
|
||||||
image:
|
image:
|
||||||
registry: containers.global.bsf.tools
|
registry: containers.global.bsf.tools
|
||||||
repository: bitnami/redis
|
repository: bitnami/redis
|
||||||
|
|||||||
@@ -54,10 +54,10 @@ ingress:
|
|||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
cpu: 500m
|
cpu: 500m
|
||||||
memory: 512Mi
|
memory: 768Mi
|
||||||
requests:
|
requests:
|
||||||
cpu: 500m
|
cpu: 500m
|
||||||
memory: 512Mi
|
memory: 768Mi
|
||||||
|
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
httpGet:
|
httpGet:
|
||||||
@@ -120,6 +120,12 @@ orchard:
|
|||||||
mode: "presigned" # presigned, redirect, or proxy
|
mode: "presigned" # presigned, redirect, or proxy
|
||||||
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
|
||||||
|
|
||||||
|
# PyPI Cache Worker settings
|
||||||
|
pypiCache:
|
||||||
|
workers: 2 # Number of concurrent cache workers (reduced to limit memory usage)
|
||||||
|
maxDepth: 10 # Maximum recursion depth for dependency caching
|
||||||
|
maxAttempts: 3 # Maximum retry attempts for failed cache tasks
|
||||||
|
|
||||||
# Authentication settings
|
# Authentication settings
|
||||||
auth:
|
auth:
|
||||||
# Option 1: Plain admin password (creates K8s secret)
|
# Option 1: Plain admin password (creates K8s secret)
|
||||||
|
|||||||
137
migrations/010_upstream_caching.sql
Normal file
137
migrations/010_upstream_caching.sql
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
-- Migration 010: Upstream Artifact Caching
|
||||||
|
-- Adds support for caching artifacts from upstream registries (npm, PyPI, Maven, etc.)
|
||||||
|
-- Part of "The cache that never forgets" epic for hermetic builds
|
||||||
|
|
||||||
|
-- =============================================================================
|
||||||
|
-- upstream_sources: Configure upstream registries for artifact caching
|
||||||
|
-- =============================================================================
|
||||||
|
CREATE TABLE IF NOT EXISTS upstream_sources (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
name VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
source_type VARCHAR(50) NOT NULL DEFAULT 'generic',
|
||||||
|
url VARCHAR(2048) NOT NULL,
|
||||||
|
enabled BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
is_public BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
auth_type VARCHAR(20) NOT NULL DEFAULT 'none',
|
||||||
|
username VARCHAR(255),
|
||||||
|
password_encrypted BYTEA,
|
||||||
|
headers_encrypted BYTEA,
|
||||||
|
priority INTEGER NOT NULL DEFAULT 100,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
-- Source type must be one of the supported types
|
||||||
|
CONSTRAINT check_source_type CHECK (
|
||||||
|
source_type IN ('npm', 'pypi', 'maven', 'docker', 'helm', 'nuget', 'deb', 'rpm', 'generic')
|
||||||
|
),
|
||||||
|
|
||||||
|
-- Auth type must be valid
|
||||||
|
CONSTRAINT check_auth_type CHECK (
|
||||||
|
auth_type IN ('none', 'basic', 'bearer', 'api_key')
|
||||||
|
),
|
||||||
|
|
||||||
|
-- Priority must be positive
|
||||||
|
CONSTRAINT check_priority_positive CHECK (priority > 0)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes for upstream_sources
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_upstream_sources_enabled ON upstream_sources(enabled);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_upstream_sources_source_type ON upstream_sources(source_type);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_upstream_sources_is_public ON upstream_sources(is_public);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_upstream_sources_priority ON upstream_sources(priority);
|
||||||
|
|
||||||
|
-- Comments for upstream_sources
|
||||||
|
COMMENT ON TABLE upstream_sources IS 'Configuration for upstream artifact registries (npm, PyPI, Maven, etc.)';
|
||||||
|
COMMENT ON COLUMN upstream_sources.name IS 'Unique human-readable name (e.g., npm-public, artifactory-private)';
|
||||||
|
COMMENT ON COLUMN upstream_sources.source_type IS 'Type of registry: npm, pypi, maven, docker, helm, nuget, deb, rpm, generic';
|
||||||
|
COMMENT ON COLUMN upstream_sources.url IS 'Base URL of the upstream registry';
|
||||||
|
COMMENT ON COLUMN upstream_sources.enabled IS 'Whether this source is active for caching';
|
||||||
|
COMMENT ON COLUMN upstream_sources.is_public IS 'True if this is a public internet source (for air-gap mode)';
|
||||||
|
COMMENT ON COLUMN upstream_sources.auth_type IS 'Authentication type: none, basic, bearer, api_key';
|
||||||
|
COMMENT ON COLUMN upstream_sources.username IS 'Username for basic auth';
|
||||||
|
COMMENT ON COLUMN upstream_sources.password_encrypted IS 'Fernet-encrypted password/token';
|
||||||
|
COMMENT ON COLUMN upstream_sources.headers_encrypted IS 'Fernet-encrypted custom headers (JSON)';
|
||||||
|
COMMENT ON COLUMN upstream_sources.priority IS 'Priority for source selection (lower = higher priority)';
|
||||||
|
|
||||||
|
-- =============================================================================
|
||||||
|
-- cache_settings: Global cache configuration (singleton table)
|
||||||
|
-- =============================================================================
|
||||||
|
CREATE TABLE IF NOT EXISTS cache_settings (
|
||||||
|
id INTEGER PRIMARY KEY DEFAULT 1,
|
||||||
|
allow_public_internet BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
auto_create_system_projects BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
-- Singleton constraint
|
||||||
|
CONSTRAINT check_cache_settings_singleton CHECK (id = 1)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Insert default row
|
||||||
|
INSERT INTO cache_settings (id, allow_public_internet, auto_create_system_projects)
|
||||||
|
VALUES (1, TRUE, TRUE)
|
||||||
|
ON CONFLICT (id) DO NOTHING;
|
||||||
|
|
||||||
|
-- Comments for cache_settings
|
||||||
|
COMMENT ON TABLE cache_settings IS 'Global cache settings (singleton table)';
|
||||||
|
COMMENT ON COLUMN cache_settings.allow_public_internet IS 'Air-gap mode: when false, blocks all public internet sources';
|
||||||
|
COMMENT ON COLUMN cache_settings.auto_create_system_projects IS 'Auto-create system projects (_npm, _pypi, etc.) on first cache';
|
||||||
|
|
||||||
|
-- =============================================================================
|
||||||
|
-- cached_urls: Track URL to artifact mappings for provenance
|
||||||
|
-- =============================================================================
|
||||||
|
CREATE TABLE IF NOT EXISTS cached_urls (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
url VARCHAR(4096) NOT NULL,
|
||||||
|
url_hash VARCHAR(64) NOT NULL,
|
||||||
|
artifact_id VARCHAR(64) NOT NULL REFERENCES artifacts(id),
|
||||||
|
source_id UUID REFERENCES upstream_sources(id) ON DELETE SET NULL,
|
||||||
|
fetched_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
response_headers JSONB DEFAULT '{}',
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
-- URL hash must be unique (same URL = same cached artifact)
|
||||||
|
CONSTRAINT unique_url_hash UNIQUE (url_hash)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes for cached_urls
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_cached_urls_url_hash ON cached_urls(url_hash);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_cached_urls_artifact_id ON cached_urls(artifact_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_cached_urls_source_id ON cached_urls(source_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_cached_urls_fetched_at ON cached_urls(fetched_at);
|
||||||
|
|
||||||
|
-- Comments for cached_urls
|
||||||
|
COMMENT ON TABLE cached_urls IS 'Tracks which URLs have been cached and maps to artifacts';
|
||||||
|
COMMENT ON COLUMN cached_urls.url IS 'Original URL that was fetched';
|
||||||
|
COMMENT ON COLUMN cached_urls.url_hash IS 'SHA256 hash of URL for fast lookup';
|
||||||
|
COMMENT ON COLUMN cached_urls.artifact_id IS 'The cached artifact (by SHA256 content hash)';
|
||||||
|
COMMENT ON COLUMN cached_urls.source_id IS 'Which upstream source provided this (null if manual)';
|
||||||
|
COMMENT ON COLUMN cached_urls.fetched_at IS 'When the URL was fetched from upstream';
|
||||||
|
COMMENT ON COLUMN cached_urls.response_headers IS 'Original response headers from upstream (for debugging)';
|
||||||
|
|
||||||
|
-- =============================================================================
|
||||||
|
-- Add is_system column to projects table for system cache projects
|
||||||
|
-- =============================================================================
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'projects' AND column_name = 'is_system'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE projects ADD COLUMN is_system BOOLEAN NOT NULL DEFAULT FALSE;
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_projects_is_system ON projects(is_system);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
COMMENT ON COLUMN projects.is_system IS 'True for system cache projects (_npm, _pypi, etc.)';
|
||||||
|
|
||||||
|
-- =============================================================================
|
||||||
|
-- Seed default upstream sources (disabled by default for safety)
|
||||||
|
-- =============================================================================
|
||||||
|
INSERT INTO upstream_sources (id, name, source_type, url, enabled, is_public, auth_type, priority)
|
||||||
|
VALUES
|
||||||
|
(gen_random_uuid(), 'npm-public', 'npm', 'https://registry.npmjs.org', FALSE, TRUE, 'none', 100),
|
||||||
|
(gen_random_uuid(), 'pypi-public', 'pypi', 'https://pypi.org/simple', FALSE, TRUE, 'none', 100),
|
||||||
|
(gen_random_uuid(), 'maven-central', 'maven', 'https://repo1.maven.org/maven2', FALSE, TRUE, 'none', 100),
|
||||||
|
(gen_random_uuid(), 'docker-hub', 'docker', 'https://registry-1.docker.io', FALSE, TRUE, 'none', 100)
|
||||||
|
ON CONFLICT (name) DO NOTHING;
|
||||||
55
migrations/011_pypi_cache_tasks.sql
Normal file
55
migrations/011_pypi_cache_tasks.sql
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
-- Migration: 011_pypi_cache_tasks
|
||||||
|
-- Description: Add table for tracking PyPI dependency caching tasks
|
||||||
|
-- Date: 2026-02-02
|
||||||
|
|
||||||
|
-- Table for tracking PyPI cache tasks with retry support
|
||||||
|
CREATE TABLE pypi_cache_tasks (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
|
||||||
|
-- What to cache
|
||||||
|
package_name VARCHAR(255) NOT NULL,
|
||||||
|
version_constraint VARCHAR(255),
|
||||||
|
|
||||||
|
-- Origin tracking
|
||||||
|
parent_task_id UUID REFERENCES pypi_cache_tasks(id) ON DELETE SET NULL,
|
||||||
|
depth INTEGER NOT NULL DEFAULT 0,
|
||||||
|
triggered_by_artifact VARCHAR(64) REFERENCES artifacts(id) ON DELETE SET NULL,
|
||||||
|
|
||||||
|
-- Status
|
||||||
|
status VARCHAR(20) NOT NULL DEFAULT 'pending',
|
||||||
|
attempts INTEGER NOT NULL DEFAULT 0,
|
||||||
|
max_attempts INTEGER NOT NULL DEFAULT 3,
|
||||||
|
|
||||||
|
-- Results
|
||||||
|
cached_artifact_id VARCHAR(64) REFERENCES artifacts(id) ON DELETE SET NULL,
|
||||||
|
error_message TEXT,
|
||||||
|
|
||||||
|
-- Timing
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
||||||
|
started_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
completed_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
next_retry_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
|
||||||
|
-- Constraints
|
||||||
|
CONSTRAINT check_task_status CHECK (status IN ('pending', 'in_progress', 'completed', 'failed')),
|
||||||
|
CONSTRAINT check_depth_non_negative CHECK (depth >= 0),
|
||||||
|
CONSTRAINT check_attempts_non_negative CHECK (attempts >= 0)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Index for finding tasks ready to process (pending with retry time passed)
|
||||||
|
CREATE INDEX idx_pypi_cache_tasks_status_retry ON pypi_cache_tasks(status, next_retry_at);
|
||||||
|
|
||||||
|
-- Index for deduplication check (is this package already queued?)
|
||||||
|
CREATE INDEX idx_pypi_cache_tasks_package_status ON pypi_cache_tasks(package_name, status);
|
||||||
|
|
||||||
|
-- Index for tracing dependency chains
|
||||||
|
CREATE INDEX idx_pypi_cache_tasks_parent ON pypi_cache_tasks(parent_task_id);
|
||||||
|
|
||||||
|
-- Index for finding tasks by artifact that triggered them
|
||||||
|
CREATE INDEX idx_pypi_cache_tasks_triggered_by ON pypi_cache_tasks(triggered_by_artifact);
|
||||||
|
|
||||||
|
-- Index for finding tasks by cached artifact
|
||||||
|
CREATE INDEX idx_pypi_cache_tasks_cached_artifact ON pypi_cache_tasks(cached_artifact_id);
|
||||||
|
|
||||||
|
-- Index for sorting by depth and creation time (processing order)
|
||||||
|
CREATE INDEX idx_pypi_cache_tasks_depth_created ON pypi_cache_tasks(depth, created_at);
|
||||||
33
migrations/012_remove_tags.sql
Normal file
33
migrations/012_remove_tags.sql
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
-- Migration: Remove tag system
|
||||||
|
-- Date: 2026-02-03
|
||||||
|
-- Description: Remove tags table and related objects, keeping only versions for artifact references
|
||||||
|
|
||||||
|
-- Drop triggers on tags table
|
||||||
|
DROP TRIGGER IF EXISTS tags_ref_count_insert_trigger ON tags;
|
||||||
|
DROP TRIGGER IF EXISTS tags_ref_count_delete_trigger ON tags;
|
||||||
|
DROP TRIGGER IF EXISTS tags_ref_count_update_trigger ON tags;
|
||||||
|
DROP TRIGGER IF EXISTS tags_updated_at_trigger ON tags;
|
||||||
|
DROP TRIGGER IF EXISTS tag_changes_trigger ON tags;
|
||||||
|
|
||||||
|
-- Drop the tag change tracking function
|
||||||
|
DROP FUNCTION IF EXISTS track_tag_changes();
|
||||||
|
|
||||||
|
-- Remove tag_constraint from artifact_dependencies
|
||||||
|
-- First drop the constraint that requires either version or tag
|
||||||
|
ALTER TABLE artifact_dependencies DROP CONSTRAINT IF EXISTS check_constraint_type;
|
||||||
|
|
||||||
|
-- Remove the tag_constraint column
|
||||||
|
ALTER TABLE artifact_dependencies DROP COLUMN IF EXISTS tag_constraint;
|
||||||
|
|
||||||
|
-- Make version_constraint NOT NULL (now the only option)
|
||||||
|
UPDATE artifact_dependencies SET version_constraint = '*' WHERE version_constraint IS NULL;
|
||||||
|
ALTER TABLE artifact_dependencies ALTER COLUMN version_constraint SET NOT NULL;
|
||||||
|
|
||||||
|
-- Drop tag_history table first (depends on tags)
|
||||||
|
DROP TABLE IF EXISTS tag_history;
|
||||||
|
|
||||||
|
-- Drop tags table
|
||||||
|
DROP TABLE IF EXISTS tags;
|
||||||
|
|
||||||
|
-- Rename uploads.tag_name to uploads.version (historical data field)
|
||||||
|
ALTER TABLE uploads RENAME COLUMN tag_name TO version;
|
||||||
19
provisioners/modules/aws-s3/.devcontainer/devcontainer.json
Normal file
19
provisioners/modules/aws-s3/.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
{
|
||||||
|
"name": "EC2 Provisioner Dev Container",
|
||||||
|
"image": "registry.global.bsf.tools/esv/bsf/bsf-integration/dev-env-setup/provisioner_image:v0.18.1",
|
||||||
|
"mounts": [
|
||||||
|
"source=${localEnv:HOME}/.ssh,target=/home/user/.ssh,type=bind,consistency=cached",
|
||||||
|
"source=${localEnv:HOME}/.okta,target=/home/user/.okta,type=bind,consistency=cached",
|
||||||
|
"source=${localEnv:HOME}/.netrc,target=/home/user/.netrc,type=bind,consistency=cached"
|
||||||
|
],
|
||||||
|
"forwardPorts": [
|
||||||
|
8000
|
||||||
|
],
|
||||||
|
"runArgs": [
|
||||||
|
"--network=host"
|
||||||
|
],
|
||||||
|
"containerUser": "ubuntu",
|
||||||
|
"remoteUser": "ubuntu",
|
||||||
|
"updateRemoteUserUID": true,
|
||||||
|
"onCreateCommand": "sudo usermod -s /bin/bash ubuntu"
|
||||||
|
}
|
||||||
70
provisioners/modules/aws-s3/data.tf
Normal file
70
provisioners/modules/aws-s3/data.tf
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
data "aws_caller_identity" "current" {}
|
||||||
|
|
||||||
|
# Main S3 bucket policy to reject HTTPS requests
|
||||||
|
data "aws_iam_policy_document" "s3_reject_https_policy" {
|
||||||
|
statement {
|
||||||
|
sid = "s3RejectHTTPS"
|
||||||
|
effect = "Deny"
|
||||||
|
|
||||||
|
principals {
|
||||||
|
type = "*"
|
||||||
|
identifiers = ["*"]
|
||||||
|
}
|
||||||
|
|
||||||
|
actions = ["s3:*"]
|
||||||
|
|
||||||
|
resources = [
|
||||||
|
aws_s3_bucket.s3_bucket.arn,
|
||||||
|
"${aws_s3_bucket.s3_bucket.arn}/*",
|
||||||
|
]
|
||||||
|
|
||||||
|
condition {
|
||||||
|
test = "Bool"
|
||||||
|
variable = "aws:SecureTransport"
|
||||||
|
values = ["false"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Logging bucket policy to reject HTTPS requests and take logs
|
||||||
|
data "aws_iam_policy_document" "logging_bucket_policy" {
|
||||||
|
statement {
|
||||||
|
principals {
|
||||||
|
identifiers = ["logging.s3.amazonaws.com"]
|
||||||
|
type = "Service"
|
||||||
|
}
|
||||||
|
|
||||||
|
actions = ["s3:PutObject"]
|
||||||
|
|
||||||
|
resources = ["${aws_s3_bucket.logging.arn}/*"]
|
||||||
|
|
||||||
|
condition {
|
||||||
|
test = "StringEquals"
|
||||||
|
variable = "aws:SourceAccount"
|
||||||
|
values = [data.aws_caller_identity.current.account_id]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
statement {
|
||||||
|
sid = "loggingRejectHTTPS"
|
||||||
|
effect = "Deny"
|
||||||
|
|
||||||
|
principals {
|
||||||
|
type = "*"
|
||||||
|
identifiers = ["*"]
|
||||||
|
}
|
||||||
|
|
||||||
|
actions = ["s3:*"]
|
||||||
|
|
||||||
|
resources = [
|
||||||
|
aws_s3_bucket.logging.arn,
|
||||||
|
"${aws_s3_bucket.logging.arn}/*"
|
||||||
|
]
|
||||||
|
|
||||||
|
condition {
|
||||||
|
test = "Bool"
|
||||||
|
variable = "aws:SecureTransport"
|
||||||
|
values = ["false"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
12
provisioners/modules/aws-s3/main.tf
Normal file
12
provisioners/modules/aws-s3/main.tf
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
terraform {
|
||||||
|
required_providers {
|
||||||
|
aws = {
|
||||||
|
source = "hashicorp/aws"
|
||||||
|
version = ">= 6.28"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "aws" {
|
||||||
|
region = "us-gov-west-1"
|
||||||
|
}
|
||||||
137
provisioners/modules/aws-s3/s3.tf
Normal file
137
provisioners/modules/aws-s3/s3.tf
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
# Disable warnings about MFA delete and IAM access analyzer (currently cannot support them)
|
||||||
|
# kics-scan disable=c5b31ab9-0f26-4a49-b8aa-4cc064392f4d,e592a0c5-5bdb-414c-9066-5dba7cdea370
|
||||||
|
|
||||||
|
# Bucket to actually store artifacts
|
||||||
|
resource "aws_s3_bucket" "s3_bucket" {
|
||||||
|
bucket = var.bucket
|
||||||
|
|
||||||
|
tags = {
|
||||||
|
Name = "Orchard S3 Provisioning Bucket"
|
||||||
|
Environment = var.environment
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Control public access
|
||||||
|
resource "aws_s3_bucket_public_access_block" "s3_bucket_public_access_block" {
|
||||||
|
bucket = aws_s3_bucket.s3_bucket.id
|
||||||
|
|
||||||
|
block_public_acls = true
|
||||||
|
block_public_policy = true
|
||||||
|
ignore_public_acls = true
|
||||||
|
restrict_public_buckets = true
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Our lifecycle rule is as follows:
|
||||||
|
- Standard storage
|
||||||
|
-> OneZone IA storage after 30 days
|
||||||
|
-> Glacier storage after 180 days
|
||||||
|
*/
|
||||||
|
resource "aws_s3_bucket_lifecycle_configuration" "s3_bucket_lifecycle_configuration" {
|
||||||
|
bucket = aws_s3_bucket.s3_bucket.id
|
||||||
|
|
||||||
|
rule {
|
||||||
|
id = "Standard to OneZone"
|
||||||
|
|
||||||
|
filter {}
|
||||||
|
|
||||||
|
status = "Enabled"
|
||||||
|
|
||||||
|
transition {
|
||||||
|
days = 30
|
||||||
|
storage_class = "ONEZONE_IA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rule {
|
||||||
|
id = "OneZone to Glacier"
|
||||||
|
|
||||||
|
filter {}
|
||||||
|
|
||||||
|
status = "Enabled"
|
||||||
|
|
||||||
|
transition {
|
||||||
|
days = 180
|
||||||
|
storage_class = "GLACIER"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Enable versioning but without MFA delete enabled
|
||||||
|
resource "aws_s3_bucket_versioning" "s3_bucket_versioning" {
|
||||||
|
bucket = aws_s3_bucket.s3_bucket.id
|
||||||
|
|
||||||
|
versioning_configuration {
|
||||||
|
status = "Enabled"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Give preference to the bucket owner
|
||||||
|
resource "aws_s3_bucket_ownership_controls" "s3_bucket_ownership_controls" {
|
||||||
|
bucket = aws_s3_bucket.s3_bucket.id
|
||||||
|
|
||||||
|
rule {
|
||||||
|
object_ownership = "BucketOwnerPreferred"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set access control list to private
|
||||||
|
resource "aws_s3_bucket_acl" "s3_bucket_acl" {
|
||||||
|
depends_on = [aws_s3_bucket_ownership_controls.s3_bucket_ownership_controls]
|
||||||
|
|
||||||
|
bucket = aws_s3_bucket.s3_bucket.id
|
||||||
|
acl = var.acl
|
||||||
|
}
|
||||||
|
|
||||||
|
# Bucket for logging
|
||||||
|
resource "aws_s3_bucket" "logging" {
|
||||||
|
bucket = "orchard-logging-bucket"
|
||||||
|
|
||||||
|
tags = {
|
||||||
|
Name = "Orchard S3 Logging Bucket"
|
||||||
|
Environment = var.environment
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Versioning for the logging bucket
|
||||||
|
resource "aws_s3_bucket_versioning" "orchard_logging_bucket_versioning" {
|
||||||
|
bucket = aws_s3_bucket.logging.id
|
||||||
|
|
||||||
|
versioning_configuration {
|
||||||
|
status = "Enabled"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Policies for the main s3 bucket and the logging bucket
|
||||||
|
resource "aws_s3_bucket_policy" "s3_bucket_https_policy" {
|
||||||
|
bucket = aws_s3_bucket.s3_bucket.id
|
||||||
|
policy = data.aws_iam_policy_document.s3_reject_https_policy.json
|
||||||
|
}
|
||||||
|
resource "aws_s3_bucket_policy" "logging_policy" {
|
||||||
|
bucket = aws_s3_bucket.logging.bucket
|
||||||
|
policy = data.aws_iam_policy_document.logging_bucket_policy.json
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set up the logging bucket with folders with logs for both buckets
|
||||||
|
resource "aws_s3_bucket_logging" "s3_bucket_logging" {
|
||||||
|
bucket = aws_s3_bucket.s3_bucket.bucket
|
||||||
|
|
||||||
|
target_bucket = aws_s3_bucket.logging.bucket
|
||||||
|
target_prefix = "s3_log/"
|
||||||
|
target_object_key_format {
|
||||||
|
partitioned_prefix {
|
||||||
|
partition_date_source = "EventTime"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resource "aws_s3_bucket_logging" "logging_bucket_logging" {
|
||||||
|
bucket = aws_s3_bucket.logging.bucket
|
||||||
|
|
||||||
|
target_bucket = aws_s3_bucket.logging.bucket
|
||||||
|
target_prefix = "log/"
|
||||||
|
target_object_key_format {
|
||||||
|
partitioned_prefix {
|
||||||
|
partition_date_source = "EventTime"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
17
provisioners/modules/aws-s3/variables.tf
Normal file
17
provisioners/modules/aws-s3/variables.tf
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
variable "bucket" {
|
||||||
|
description = "Name of the S3 bucket"
|
||||||
|
type = string
|
||||||
|
default = "orchard-provisioning-bucket"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "acl" {
|
||||||
|
description = "Access control list for the bucket"
|
||||||
|
type = string
|
||||||
|
default = "private"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "environment" {
|
||||||
|
description = "Environment of the bucket"
|
||||||
|
type = string
|
||||||
|
default = "Development"
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user