Fix remaining tag references in tests

- Update CacheRequest test to use version field
- Fix upload_test_file calls that still used tag parameter
- Update artifact history test to check versions instead of tags
- Update artifact stats tests to check versions instead of tags
- Fix garbage collection tests to delete versions instead of tags
- Remove TestGlobalTags class (endpoint removed)
- Update project/package stats tests to check version_count
- Fix upload_test_file fixture in test_download_verification
This commit is contained in:
Mondo Diaz
2026-02-03 12:51:31 -06:00
parent 87f30ea898
commit 9a95421064
7 changed files with 40 additions and 84 deletions

View File

@@ -46,27 +46,27 @@ class TestArtifactRetrieval:
assert response.status_code == 404 assert response.status_code == 404
@pytest.mark.integration @pytest.mark.integration
def test_artifact_includes_tags(self, integration_client, test_package): def test_artifact_includes_versions(self, integration_client, test_package):
"""Test artifact response includes tags pointing to it.""" """Test artifact response includes versions pointing to it."""
project_name, package_name = test_package project_name, package_name = test_package
content = b"artifact with tags test" content = b"artifact with versions test"
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
upload_test_file( upload_test_file(
integration_client, project_name, package_name, content, version="tagged-v1" integration_client, project_name, package_name, content, version="1.0.0"
) )
response = integration_client.get(f"/api/v1/artifact/{expected_hash}") response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200 assert response.status_code == 200
data = response.json() data = response.json()
assert "tags" in data assert "versions" in data
assert len(data["tags"]) >= 1 assert len(data["versions"]) >= 1
tag = data["tags"][0] version = data["versions"][0]
assert "name" in tag assert "version" in version
assert "package_name" in tag assert "package_name" in version
assert "project_name" in tag assert "project_name" in version
class TestArtifactStats: class TestArtifactStats:
@@ -94,7 +94,7 @@ class TestArtifactStats:
assert "size" in data assert "size" in data
assert "ref_count" in data assert "ref_count" in data
assert "storage_savings" in data assert "storage_savings" in data
assert "tags" in data assert "versions" in data
assert "projects" in data assert "projects" in data
assert "packages" in data assert "packages" in data
@@ -203,7 +203,7 @@ class TestArtifactProvenance:
assert "first_uploaded_by" in data assert "first_uploaded_by" in data
assert "upload_count" in data assert "upload_count" in data
assert "packages" in data assert "packages" in data
assert "tags" in data assert "versions" in data
assert "uploads" in data assert "uploads" in data
@pytest.mark.integration @pytest.mark.integration
@@ -214,17 +214,17 @@ class TestArtifactProvenance:
assert response.status_code == 404 assert response.status_code == 404
@pytest.mark.integration @pytest.mark.integration
def test_artifact_history_with_tag(self, integration_client, test_package): def test_artifact_history_with_version(self, integration_client, test_package):
"""Test artifact history includes tag information when tagged.""" """Test artifact history includes version information when versioned."""
project_name, package_name = test_package project_name, package_name = test_package
upload_result = upload_test_file( upload_result = upload_test_file(
integration_client, integration_client,
project_name, project_name,
package_name, package_name,
b"tagged provenance test", b"versioned provenance test",
"tagged.txt", "versioned.txt",
tag="v1.0.0", version="v1.0.0",
) )
artifact_id = upload_result["artifact_id"] artifact_id = upload_result["artifact_id"]
@@ -232,12 +232,12 @@ class TestArtifactProvenance:
assert response.status_code == 200 assert response.status_code == 200
data = response.json() data = response.json()
assert len(data["tags"]) >= 1 assert len(data["versions"]) >= 1
tag = data["tags"][0] version = data["versions"][0]
assert "project_name" in tag assert "project_name" in version
assert "package_name" in tag assert "package_name" in version
assert "tag_name" in tag assert "version" in version
class TestArtifactUploads: class TestArtifactUploads:
@@ -306,24 +306,24 @@ class TestOrphanedArtifacts:
assert len(response.json()) <= 5 assert len(response.json()) <= 5
@pytest.mark.integration @pytest.mark.integration
def test_artifact_becomes_orphaned_when_tag_deleted( def test_artifact_becomes_orphaned_when_version_deleted(
self, integration_client, test_package, unique_test_id self, integration_client, test_package, unique_test_id
): ):
"""Test artifact appears in orphaned list after tag is deleted.""" """Test artifact appears in orphaned list after version is deleted."""
project, package = test_package project, package = test_package
content = f"orphan test {unique_test_id}".encode() content = f"orphan test {unique_test_id}".encode()
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
# Upload with tag # Upload with version
upload_test_file(integration_client, project, package, content, version="temp-tag") upload_test_file(integration_client, project, package, content, version="1.0.0-temp")
# Verify not in orphaned list # Verify not in orphaned list
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000") response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
orphaned_ids = [a["id"] for a in response.json()] orphaned_ids = [a["id"] for a in response.json()]
assert expected_hash not in orphaned_ids assert expected_hash not in orphaned_ids
# Delete the tag # Delete the version
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/temp-tag") integration_client.delete(f"/api/v1/project/{project}/{package}/versions/1.0.0-temp")
# Verify now in orphaned list # Verify now in orphaned list
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000") response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
@@ -356,9 +356,9 @@ class TestGarbageCollection:
content = f"dry run test {unique_test_id}".encode() content = f"dry run test {unique_test_id}".encode()
expected_hash = compute_sha256(content) expected_hash = compute_sha256(content)
# Upload and delete tag to create orphan # Upload and delete version to create orphan
upload_test_file(integration_client, project, package, content, version="dry-run") upload_test_file(integration_client, project, package, content, version="1.0.0-dryrun")
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/dry-run") integration_client.delete(f"/api/v1/project/{project}/{package}/versions/1.0.0-dryrun")
# Verify artifact exists # Verify artifact exists
response = integration_client.get(f"/api/v1/artifact/{expected_hash}") response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
@@ -534,50 +534,6 @@ class TestGlobalArtifacts:
assert response.status_code == 400 assert response.status_code == 400
class TestGlobalTags:
"""Tests for global tags endpoint."""
@pytest.mark.integration
def test_global_tags_returns_200(self, integration_client):
"""Test global tags endpoint returns 200."""
response = integration_client.get("/api/v1/tags")
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
@pytest.mark.integration
def test_global_tags_pagination(self, integration_client):
"""Test global tags endpoint respects pagination."""
response = integration_client.get("/api/v1/tags?limit=5&page=1")
assert response.status_code == 200
data = response.json()
assert len(data["items"]) <= 5
assert data["pagination"]["limit"] == 5
@pytest.mark.integration
def test_global_tags_has_project_context(self, integration_client):
"""Test global tags response includes project/package context."""
response = integration_client.get("/api/v1/tags?limit=1")
assert response.status_code == 200
data = response.json()
if len(data["items"]) > 0:
item = data["items"][0]
assert "project_name" in item
assert "package_name" in item
assert "artifact_id" in item
@pytest.mark.integration
def test_global_tags_search_with_wildcard(self, integration_client):
"""Test global tags search supports wildcards."""
response = integration_client.get("/api/v1/tags?search=v*")
assert response.status_code == 200
# Just verify it doesn't error; results may vary
class TestAuditLogs: class TestAuditLogs:
"""Tests for global audit logs endpoint.""" """Tests for global audit logs endpoint."""

View File

@@ -450,7 +450,7 @@ class TestConcurrentDownloads:
content, expected_hash = generate_content_with_hash(1024, seed=700 + i) content, expected_hash = generate_content_with_hash(1024, seed=700 + i)
upload_test_file( upload_test_file(
integration_client, project, package, content, integration_client, project, package, content,
tag=f"multi-download-{i}" version=f"multi-download-{i}"
) )
uploads.append((f"multi-download-{i}", content)) uploads.append((f"multi-download-{i}", content))

View File

@@ -175,7 +175,7 @@ class TestPackageStats:
assert "package_id" in data assert "package_id" in data
assert "package_name" in data assert "package_name" in data
assert "project_name" in data assert "project_name" in data
assert "tag_count" in data assert "version_count" in data
assert "artifact_count" in data assert "artifact_count" in data
assert "total_size_bytes" in data assert "total_size_bytes" in data
assert "upload_count" in data assert "upload_count" in data

View File

@@ -149,7 +149,7 @@ class TestProjectStats:
assert "project_id" in data assert "project_id" in data
assert "project_name" in data assert "project_name" in data
assert "package_count" in data assert "package_count" in data
assert "tag_count" in data assert "version_count" in data
assert "artifact_count" in data assert "artifact_count" in data
assert "total_size_bytes" in data assert "total_size_bytes" in data
assert "upload_count" in data assert "upload_count" in data

View File

@@ -282,7 +282,7 @@ class TestDuplicateUploads:
package, package,
content, content,
filename="file1.bin", filename="file1.bin",
tag="v1", version="v1",
) )
assert result1["artifact_id"] == expected_hash assert result1["artifact_id"] == expected_hash
@@ -293,7 +293,7 @@ class TestDuplicateUploads:
package, package,
content, content,
filename="file2.bin", filename="file2.bin",
tag="v2", version="v2",
) )
assert result2["artifact_id"] == expected_hash assert result2["artifact_id"] == expected_hash
assert result2["deduplicated"] is True assert result2["deduplicated"] is True

View File

@@ -29,13 +29,13 @@ def upload_test_file(integration_client):
artifact_id = upload_test_file(project, package, content, version="v1.0") artifact_id = upload_test_file(project, package, content, version="v1.0")
""" """
def _upload(project_name: str, package_name: str, content: bytes, tag: str = None): def _upload(project_name: str, package_name: str, content: bytes, version: str = None):
files = { files = {
"file": ("test-file.bin", io.BytesIO(content), "application/octet-stream") "file": ("test-file.bin", io.BytesIO(content), "application/octet-stream")
} }
data = {} data = {}
if tag: if version:
data["tag"] = tag data["version"] = version
response = integration_client.post( response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload", f"/api/v1/project/{project_name}/{package_name}/upload",

View File

@@ -1132,7 +1132,7 @@ class TestCacheRequestValidation:
url="https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", url="https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
source_type="npm", source_type="npm",
package_name="lodash", package_name="lodash",
tag="4.17.21", version="4.17.21",
) )
assert request.url == "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" assert request.url == "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"
assert request.source_type == "npm" assert request.source_type == "npm"