Loading...
Loading...
Expert Harbor container registry administrator specializing in registry operations, vulnerability scanning with Trivy, artifact signing with Notary, RBAC, and multi-region replication. Use when managing container registries, implementing security policies, configuring image scanning, or setting up disaster recovery.
npx skill4agent add martinholovsky/claude-skills-generator harbor-expert# docker-compose.yml - Production Harbor with external database
version: '3.8'
services:
registry:
image: goharbor/registry-photon:v2.10.0
restart: always
volumes:
- /data/registry:/storage
networks:
- harbor
depends_on:
- postgresql
- redis
core:
image: goharbor/harbor-core:v2.10.0
restart: always
env_file:
- ./harbor.env
environment:
CORE_SECRET: ${CORE_SECRET}
JOBSERVICE_SECRET: ${JOBSERVICE_SECRET}
volumes:
- /data/ca_download:/etc/core/ca
networks:
- harbor
depends_on:
- postgresql
- redis
jobservice:
image: goharbor/harbor-jobservice:v2.10.0
restart: always
env_file:
- ./harbor.env
volumes:
- /data/job_logs:/var/log/jobs
networks:
- harbor
trivy:
image: goharbor/trivy-adapter-photon:v2.10.0
restart: always
environment:
SCANNER_TRIVY_VULN_TYPE: "os,library"
SCANNER_TRIVY_SEVERITY: "UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL"
SCANNER_TRIVY_TIMEOUT: "10m"
networks:
- harbor
notary-server:
image: goharbor/notary-server-photon:v2.10.0
restart: always
env_file:
- ./notary.env
networks:
- harbor
nginx:
image: goharbor/nginx-photon:v2.10.0
restart: always
ports:
- "443:8443"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
- /data/cert:/etc/nginx/cert:ro
networks:
- harbor
networks:
harbor:
driver: bridge# harbor.env - Core configuration
POSTGRESQL_HOST=postgres.example.com
POSTGRESQL_PORT=5432
POSTGRESQL_DATABASE=registry
POSTGRESQL_USERNAME=harbor
POSTGRESQL_PASSWORD=${DB_PASSWORD}
POSTGRESQL_SSLMODE=require
REDIS_HOST=redis.example.com:6379
REDIS_PASSWORD=${REDIS_PASSWORD}
REDIS_DB_INDEX=0
HARBOR_ADMIN_PASSWORD=${ADMIN_PASSWORD}
REGISTRY_STORAGE_PROVIDER_NAME=s3
REGISTRY_STORAGE_PROVIDER_CONFIG={"bucket":"harbor-artifacts","region":"us-east-1"}# Configure Trivy scanner via Harbor API
curl -X POST "https://harbor.example.com/api/v2.0/scanners" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"name": "Trivy",
"url": "http://trivy:8080",
"description": "Primary vulnerability scanner",
"vendor": "Aqua Security",
"version": "0.48.0"
}'
# Set scanner as default
curl -X PATCH "https://harbor.example.com/api/v2.0/scanners/1" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{"is_default": true}'// Project-level CVE policy
{
"cve_allowlist": {
"items": [
{
"cve_id": "CVE-2023-12345"
}
],
"expires_at": 1735689600
},
"severity": "high",
"scan_on_push": true,
"prevent_vulnerable": true,
"auto_scan": true
}{
"deployment_policy": {
"vulnerability_severity": "critical",
"signature_enabled": true
}
}/home/user/ai-coding/new-skills/harbor-expert/references/security-scanning.md# Create robot account with scoped permissions
curl -X POST "https://harbor.example.com/api/v2.0/projects/library/robots" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"name": "github-actions",
"description": "CI/CD pipeline for GitHub Actions",
"duration": 90,
"level": "project",
"disable": false,
"permissions": [
{
"kind": "project",
"namespace": "library",
"access": [
{"resource": "repository", "action": "pull"},
{"resource": "repository", "action": "push"},
{"resource": "artifact", "action": "read"}
]
}
]
}'{
"id": 1,
"name": "robot$github-actions",
"secret": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9...",
"expires_at": 1735689600,
"level": "project"
}# .github/workflows/build.yml
- name: Login to Harbor
uses: docker/login-action@v3
with:
registry: harbor.example.com
username: robot$github-actions
password: ${{ secrets.HARBOR_ROBOT_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v5
with:
push: true
tags: harbor.example.com/library/app:${{ github.sha }}# Create replication endpoint
curl -X POST "https://harbor.example.com/api/v2.0/registries" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"name": "harbor-eu",
"url": "https://harbor-eu.example.com",
"credential": {
"access_key": "robot$replication",
"access_secret": "token_here"
},
"type": "harbor",
"insecure": false
}'
# Create pull-based replication rule
curl -X POST "https://harbor.example.com/api/v2.0/replication/policies" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"name": "replicate-production",
"description": "Pull production images from primary",
"src_registry": {
"id": 1
},
"dest_namespace": "production",
"trigger": {
"type": "scheduled",
"trigger_settings": {
"cron": "0 2 * * *"
}
},
"filters": [
{
"type": "name",
"value": "library/app-*"
},
{
"type": "tag",
"value": "v*"
},
{
"type": "label",
"value": "environment=production"
}
],
"deletion": false,
"override": true,
"enabled": true,
"speed": 0
}'/home/user/ai-coding/new-skills/harbor-expert/references/replication-guide.md# Enable content trust in Harbor project settings
curl -X PUT "https://harbor.example.com/api/v2.0/projects/1/metadata/enable_content_trust" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{"enable_content_trust": "true"}'
# Sign image with Cosign (keyless with OIDC)
export COSIGN_EXPERIMENTAL=1
cosign sign --oidc-issuer https://token.actions.githubusercontent.com \
harbor.example.com/library/app:v1.0.0
# Verify signature
cosign verify --certificate-identity-regexp "https://github.com/example/*" \
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
harbor.example.com/library/app:v1.0.0
# Attach SBOM
cosign attach sbom --sbom sbom.spdx.json \
harbor.example.com/library/app:v1.0.0apiVersion: kyverno.io/v1
kind: ClusterPolicy
metadata:
name: verify-harbor-images
spec:
validationFailureAction: Enforce
background: false
rules:
- name: verify-signature
match:
any:
- resources:
kinds: [Pod]
verifyImages:
- imageReferences:
- "harbor.example.com/library/*"
attestors:
- count: 1
entries:
- keyless:
subject: "https://github.com/example/*"
issuer: "https://token.actions.githubusercontent.com"
rekor:
url: https://rekor.sigstore.dev# Configure retention policy
curl -X POST "https://harbor.example.com/api/v2.0/projects/library/retentions" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"rules": [
{
"disabled": false,
"action": "retain",
"template": "latestPushedK",
"params": {
"latestPushedK": 10
},
"tag_selectors": [
{
"kind": "doublestar",
"decoration": "matches",
"pattern": "v*"
}
],
"scope_selectors": {
"repository": [
{
"kind": "doublestar",
"decoration": "repoMatches",
"pattern": "**"
}
]
}
},
{
"disabled": false,
"action": "retain",
"template": "nDaysSinceLastPush",
"params": {
"nDaysSinceLastPush": 90
},
"tag_selectors": [
{
"kind": "doublestar",
"decoration": "matches",
"pattern": "main-*"
}
]
}
],
"algorithm": "or",
"trigger": {
"kind": "Schedule",
"settings": {
"cron": "0 0 * * 0"
}
}
}'
# Enable tag immutability for production
curl -X POST "https://harbor.example.com/api/v2.0/projects/library/immutabletagrules" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"tag_selectors": [
{
"kind": "doublestar",
"decoration": "matches",
"pattern": "v*.*.*"
}
],
"scope_selectors": {
"repository": [
{
"kind": "doublestar",
"decoration": "repoMatches",
"pattern": "production/**"
}
]
}
}'# Configure webhook for vulnerability scan results
curl -X POST "https://harbor.example.com/api/v2.0/projects/library/webhook/policies" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"name": "notify-security-team",
"description": "Alert on critical vulnerabilities",
"enabled": true,
"event_types": [
"SCANNING_COMPLETED",
"SCANNING_FAILED"
],
"targets": [
{
"type": "http",
"address": "https://slack.com/api/webhooks/xxx",
"skip_cert_verify": false,
"payload_format": "CloudEvents"
}
]
}'{
"specversion": "1.0",
"type": "harbor.scanning.completed",
"source": "harbor.example.com",
"id": "unique-id",
"time": "2024-01-15T10:30:00Z",
"data": {
"repository": "library/app",
"tag": "v1.0.0",
"scan_overview": {
"severity": "High",
"total_count": 5,
"fixable_count": 3,
"summary": {
"Critical": 0,
"High": 5,
"Medium": 12
}
}
}
}# tests/test_harbor_config.py
import pytest
import requests
from unittest.mock import patch, MagicMock
class TestHarborProjectConfiguration:
"""Test Harbor project settings before implementation."""
def test_project_vulnerability_policy_blocks_critical(self):
"""Test that CVE policy blocks critical vulnerabilities."""
# Arrange
project_config = {
"prevent_vulnerable": True,
"severity": "critical",
"scan_on_push": True
}
# Act
result = validate_vulnerability_policy(project_config)
# Assert
assert result["blocks_critical"] == True
assert result["scan_enabled"] == True
def test_robot_account_follows_least_privilege(self):
"""Test robot account has minimal required permissions."""
# Arrange
robot_permissions = {
"namespace": "library",
"access": [
{"resource": "repository", "action": "pull"},
{"resource": "repository", "action": "push"}
]
}
# Act
result = validate_robot_permissions(robot_permissions)
# Assert
assert result["is_scoped"] == True
assert result["has_admin"] == False
assert len(result["permissions"]) <= 3
def test_replication_policy_has_filters(self):
"""Test replication policy includes proper filters."""
# Arrange
replication_config = {
"filters": [
{"type": "name", "value": "library/app-*"},
{"type": "tag", "value": "v*"}
],
"trigger": {"type": "scheduled"}
}
# Act
result = validate_replication_policy(replication_config)
# Assert
assert result["has_name_filter"] == True
assert result["has_tag_filter"] == True
assert result["is_scheduled"] == True
class TestHarborAPIIntegration:
"""Integration tests for Harbor API operations."""
@pytest.fixture
def harbor_client(self):
"""Create Harbor API client for testing."""
return HarborClient(
url="https://harbor.example.com",
username="admin",
password="test"
)
def test_create_project_with_security_policies(self, harbor_client):
"""Test project creation includes security policies."""
# Arrange
project_spec = {
"project_name": "test-project",
"public": False,
"metadata": {
"enable_content_trust": "true",
"prevent_vul": "true",
"severity": "high",
"auto_scan": "true"
}
}
# Act
result = harbor_client.create_project(project_spec)
# Assert
assert result.status_code == 201
project = harbor_client.get_project("test-project")
assert project["metadata"]["enable_content_trust"] == "true"
assert project["metadata"]["prevent_vul"] == "true"
def test_garbage_collection_schedule_configured(self, harbor_client):
"""Test GC schedule is properly configured."""
# Arrange
gc_schedule = {
"schedule": {
"type": "Weekly",
"cron": "0 2 * * 6"
},
"parameters": {
"delete_untagged": True,
"dry_run": False
}
}
# Act
result = harbor_client.set_gc_schedule(gc_schedule)
# Assert
assert result.status_code == 200
current_schedule = harbor_client.get_gc_schedule()
assert current_schedule["schedule"]["cron"] == "0 2 * * 6"# harbor_client.py
import requests
from typing import Dict, Any
class HarborClient:
"""Harbor API client with security-first defaults."""
def __init__(self, url: str, username: str, password: str):
self.url = url.rstrip('/')
self.auth = (username, password)
self.session = requests.Session()
self.session.auth = self.auth
self.session.headers.update({"Content-Type": "application/json"})
def create_project(self, spec: Dict[str, Any]) -> requests.Response:
"""Create project with security policies."""
# Ensure security defaults
if "metadata" not in spec:
spec["metadata"] = {}
spec["metadata"].setdefault("enable_content_trust", "true")
spec["metadata"].setdefault("prevent_vul", "true")
spec["metadata"].setdefault("severity", "high")
spec["metadata"].setdefault("auto_scan", "true")
return self.session.post(
f"{self.url}/api/v2.0/projects",
json=spec
)
def set_gc_schedule(self, schedule: Dict[str, Any]) -> requests.Response:
"""Configure garbage collection schedule."""
return self.session.post(
f"{self.url}/api/v2.0/system/gc/schedule",
json=schedule
)# Refactored with retry logic and connection pooling
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
class HarborClient:
def __init__(self, url: str, username: str, password: str):
self.url = url.rstrip('/')
self.auth = (username, password)
self.session = self._create_session()
def _create_session(self) -> requests.Session:
"""Create session with retry and connection pooling."""
session = requests.Session()
session.auth = self.auth
session.headers.update({"Content-Type": "application/json"})
# Configure retries for resilience
retry_strategy = Retry(
total=3,
backoff_factor=1,
status_forcelist=[429, 500, 502, 503, 504]
)
adapter = HTTPAdapter(
max_retries=retry_strategy,
pool_connections=10,
pool_maxsize=10
)
session.mount("https://", adapter)
return session# Run all tests
pytest tests/test_harbor_config.py -v
# Run with coverage
pytest tests/test_harbor_config.py --cov=harbor_client --cov-report=term-missing
# Validate actual Harbor configuration
curl -s "https://harbor.example.com/api/v2.0/systeminfo" \
-u "admin:password" | jq '.harbor_version'
# Test scanner connectivity
curl -s "https://harbor.example.com/api/v2.0/scanners" \
-u "admin:password" | jq '.[].is_default'
# Verify replication endpoints
curl -s "https://harbor.example.com/api/v2.0/registries" \
-u "admin:password" | jq '.[].status'# ❌ Monthly GC - storage fills up
{
"schedule": {
"type": "Custom",
"cron": "0 0 1 * *"
},
"parameters": {
"delete_untagged": false
}
}# ✅ Weekly GC with untagged cleanup
curl -X POST "https://harbor.example.com/api/v2.0/system/gc/schedule" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"schedule": {
"type": "Weekly",
"cron": "0 2 * * 6"
},
"parameters": {
"delete_untagged": true,
"dry_run": false,
"workers": 4
}
}'
# Monitor GC performance
curl -s "https://harbor.example.com/api/v2.0/system/gc" \
-u "admin:password" | jq '.[-1] | {status, deleted, duration: (.end_time - .start_time)}'# ❌ Replicate everything - wastes bandwidth
{
"name": "replicate-all",
"filters": [],
"trigger": {"type": "event_based"},
"speed": 0
}# ✅ Filtered replication with scheduling and rate limiting
curl -X POST "https://harbor.example.com/api/v2.0/replication/policies" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"name": "replicate-production",
"filters": [
{"type": "name", "value": "production/**"},
{"type": "tag", "value": "v*"},
{"type": "label", "value": "approved=true"}
],
"trigger": {
"type": "scheduled",
"trigger_settings": {
"cron": "0 */4 * * *"
}
},
"speed": 10485760,
"override": true,
"enabled": true
}'
# Monitor replication performance
curl -s "https://harbor.example.com/api/v2.0/replication/executions?policy_id=1" \
-u "admin:password" | jq '[.[] | select(.status=="Succeed")] | length'# ❌ Every pull hits upstream registry
docker pull docker.io/library/nginx:latest
# Slow and uses bandwidth# ✅ Configure proxy cache endpoint
curl -X POST "https://harbor.example.com/api/v2.0/registries" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"name": "dockerhub-cache",
"type": "docker-hub",
"url": "https://hub.docker.com",
"credential": {
"access_key": "username",
"access_secret": "token"
}
}'
# Create proxy cache project
curl -X POST "https://harbor.example.com/api/v2.0/projects" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"project_name": "dockerhub-proxy",
"registry_id": 1,
"public": true
}'
# Pull through cache - subsequent pulls are instant
docker pull harbor.example.com/dockerhub-proxy/library/nginx:latest# ❌ Filesystem storage - no HA, backup complexity
storage_service:
filesystem:
rootdirectory: /data/registry# ✅ S3 storage with intelligent tiering
REGISTRY_STORAGE_PROVIDER_NAME=s3
REGISTRY_STORAGE_PROVIDER_CONFIG='{
"bucket": "harbor-artifacts",
"region": "us-east-1",
"rootdirectory": "/harbor",
"storageclass": "INTELLIGENT_TIERING",
"multipartcopythresholdsize": 33554432,
"multipartcopychunksize": 33554432,
"multipartcopymaxconcurrency": 100,
"encrypt": true,
"v4auth": true
}'
# Configure lifecycle policy for old artifacts
aws s3api put-bucket-lifecycle-configuration \
--bucket harbor-artifacts \
--lifecycle-configuration '{
"Rules": [{
"ID": "archive-old-artifacts",
"Status": "Enabled",
"Filter": {"Prefix": "harbor/"},
"Transitions": [{
"Days": 90,
"StorageClass": "GLACIER"
}],
"NoncurrentVersionTransitions": [{
"NoncurrentDays": 30,
"StorageClass": "GLACIER"
}]
}]
}'# ❌ Default connections - bottleneck under load
POSTGRESQL_MAX_OPEN_CONNS=0
POSTGRESQL_MAX_IDLE_CONNS=2# ✅ Tuned connection pool for production
POSTGRESQL_HOST=postgres.example.com
POSTGRESQL_PORT=5432
POSTGRESQL_MAX_OPEN_CONNS=100
POSTGRESQL_MAX_IDLE_CONNS=50
POSTGRESQL_CONN_MAX_LIFETIME=5m
POSTGRESQL_SSLMODE=require
# Redis connection optimization
REDIS_HOST=redis.example.com:6379
REDIS_PASSWORD=${REDIS_PASSWORD}
REDIS_DB_INDEX=0
REDIS_IDLE_TIMEOUT_SECONDS=30
# Monitor connection usage
psql -h postgres.example.com -U harbor -c \
"SELECT count(*) as active_connections FROM pg_stat_activity WHERE datname='registry';"# ❌ Slow scanning blocks pushes
SCANNER_TRIVY_TIMEOUT=30m
# No parallelization# ✅ Optimized Trivy scanner configuration
trivy:
environment:
SCANNER_TRIVY_TIMEOUT: "10m"
SCANNER_TRIVY_VULN_TYPE: "os,library"
SCANNER_TRIVY_SEVERITY: "UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL"
SCANNER_TRIVY_SKIP_UPDATE: "false"
SCANNER_TRIVY_GITHUB_TOKEN: "${GITHUB_TOKEN}"
SCANNER_TRIVY_CACHE_DIR: "/home/scanner/.cache/trivy"
SCANNER_STORE_REDIS_URL: "redis://redis:6379/5"
SCANNER_JOB_QUEUE_REDIS_URL: "redis://redis:6379/6"
volumes:
- trivy-cache:/home/scanner/.cache/trivy
deploy:
replicas: 3
resources:
limits:
memory: 4G
cpus: '2'
# Pre-download vulnerability database
docker exec trivy trivy image --download-db-onlyrobot$service-environment-action# Harbor OIDC configuration
auth_mode: oidc_auth
oidc_name: Keycloak
oidc_endpoint: https://keycloak.example.com/auth/realms/harbor
oidc_client_id: harbor
oidc_client_secret: ${OIDC_SECRET}
oidc_scope: openid,profile,email,groups
oidc_verify_cert: true
oidc_auto_onboard: true
oidc_user_claim: preferred_username
oidc_group_claim: groups# ❌ No signature verification
apiVersion: v1
kind: Pod
spec:
containers:
- image: harbor.example.com/library/app:latest# ✅ Kyverno enforces signatures
apiVersion: kyverno.io/v1
kind: ClusterPolicy
metadata:
name: require-signed-images
spec:
validationFailureAction: Enforce
rules:
- name: verify-signature
verifyImages:
- imageReferences: ["harbor.example.com/library/*"]
required: true# ❌ Project admin for CI/CD
{
"permissions": [{
"namespace": "library",
"access": [{"resource": "*", "action": "*"}]
}]
}# ✅ Minimal scoped permissions
{
"name": "ci-pipeline",
"duration": 90,
"permissions": [{
"namespace": "library",
"access": [
{"resource": "repository", "action": "pull"},
{"resource": "repository", "action": "push"},
{"resource": "artifact-label", "action": "create"}
]
}]
}// ❌ Scan only, no enforcement
{
"scan_on_push": true,
"prevent_vulnerable": false
}// ✅ Block critical/high CVEs
{
"scan_on_push": true,
"prevent_vulnerable": true,
"severity": "high",
"auto_scan": true
}# ❌ Set and forget replication
# No monitoring, failures go unnoticed# ✅ Monitor replication health
curl "https://harbor.example.com/api/v2.0/replication/executions?policy_id=1" \
-u "admin:password" | jq -r '.[] | select(.status=="Failed")'
# Alert on replication lag > 1 hour
LAST_SUCCESS=$(curl -s "..." | jq -r '.[-1].end_time')
LAG=$(( $(date +%s) - $(date -d "$LAST_SUCCESS" +%s) ))
if [ $LAG -gt 3600 ]; then
alert "Replication lag detected"
fi# ❌ Storage grows indefinitely
# Deleted artifacts never cleaned up# ✅ Scheduled garbage collection
# Harbor UI: Administration > Garbage Collection > Schedule
# Cron: 0 2 * * 6 (every Saturday 2 AM)
# Or via API
curl -X POST "https://harbor.example.com/api/v2.0/system/gc/schedule" \
-u "admin:password" \
-H "Content-Type: application/json" \
-d '{
"schedule": {
"type": "Weekly",
"cron": "0 2 * * 6"
},
"parameters": {
"delete_untagged": true,
"dry_run": false
}
}'# ❌ Non-deterministic deployments
image: harbor.example.com/library/app:latest# ✅ Immutable digest-based references
image: harbor.example.com/library/app@sha256:abc123...
# Or immutable semantic version
image: harbor.example.com/library/app:v1.2.3
# + tag immutability rule for v*.*.* pattern# tests/test_harbor_policies.py
import pytest
from harbor_client import HarborClient, validate_project_config
class TestProjectPolicies:
"""Unit tests for Harbor project configuration."""
def test_vulnerability_policy_requires_scanning(self):
"""Verify CVE policy requires scan_on_push."""
config = {
"prevent_vulnerable": True,
"severity": "high",
"scan_on_push": False # Invalid combination
}
result = validate_project_config(config)
assert result["valid"] == False
assert "scan_on_push required" in result["errors"]
def test_content_trust_requires_notary(self):
"""Verify content trust needs Notary configured."""
config = {
"enable_content_trust": True,
"notary_url": None
}
result = validate_project_config(config)
assert result["valid"] == False
def test_retention_policy_validation(self):
"""Verify retention rules are valid."""
policy = {
"rules": [{
"template": "latestPushedK",
"params": {"latestPushedK": -1} # Invalid
}]
}
result = validate_retention_policy(policy)
assert result["valid"] == False
class TestRobotAccounts:
"""Test robot account permission validation."""
def test_robot_account_expiration_required(self):
"""Robot accounts must have expiration."""
robot = {
"name": "ci-pipeline",
"duration": 0, # Never expires - bad
"permissions": [{"resource": "repository", "action": "push"}]
}
result = validate_robot_account(robot)
assert result["valid"] == False
assert "expiration required" in result["errors"]
def test_robot_account_max_duration(self):
"""Robot account max duration is 90 days."""
robot = {
"name": "ci-pipeline",
"duration": 365, # Too long
"permissions": [{"resource": "repository", "action": "push"}]
}
result = validate_robot_account(robot)
assert result["valid"] == False
assert "max duration 90 days" in result["errors"]# tests/integration/test_harbor_api.py
import pytest
import os
from harbor_client import HarborClient
@pytest.fixture(scope="module")
def harbor():
"""Create Harbor client for integration tests."""
return HarborClient(
url=os.getenv("HARBOR_URL", "https://harbor.example.com"),
username=os.getenv("HARBOR_USER", "admin"),
password=os.getenv("HARBOR_PASSWORD")
)
class TestHarborAPIIntegration:
"""Integration tests against live Harbor instance."""
def test_health_check(self, harbor):
"""Verify Harbor API is accessible."""
result = harbor.health()
assert result.status_code == 200
assert result.json()["status"] == "healthy"
def test_scanner_configured(self, harbor):
"""Verify Trivy scanner is default."""
scanners = harbor.get_scanners()
default_scanner = next(
(s for s in scanners if s["is_default"]), None
)
assert default_scanner is not None
assert "trivy" in default_scanner["name"].lower()
def test_project_security_defaults(self, harbor):
"""Verify projects have security settings."""
# Create test project
project = harbor.create_project({
"project_name": "test-security-defaults",
"public": False
})
# Verify security defaults applied
metadata = harbor.get_project("test-security-defaults")["metadata"]
assert metadata.get("enable_content_trust") == "true"
assert metadata.get("prevent_vul") == "true"
assert metadata.get("auto_scan") == "true"
# Cleanup
harbor.delete_project("test-security-defaults")
def test_gc_schedule_exists(self, harbor):
"""Verify garbage collection is scheduled."""
schedule = harbor.get_gc_schedule()
assert schedule["schedule"]["type"] in ["Weekly", "Daily", "Custom"]
assert schedule["parameters"]["delete_untagged"] == True
class TestReplicationPolicies:
"""Test replication policy configurations."""
def test_replication_endpoint_tls(self, harbor):
"""Verify replication endpoints use TLS."""
endpoints = harbor.get_registries()
for endpoint in endpoints:
assert endpoint["url"].startswith("https://")
assert endpoint["insecure"] == False
def test_replication_has_filters(self, harbor):
"""Verify replication policies have filters."""
policies = harbor.get_replication_policies()
for policy in policies:
if policy["enabled"]:
assert len(policy.get("filters", [])) > 0, \
f"Policy {policy['name']} has no filters"#!/bin/bash
# tests/e2e/test_harbor_workflow.sh
set -e
HARBOR_URL="${HARBOR_URL:-https://harbor.example.com}"
PROJECT="e2e-test-$(date +%s)"
echo "=== Harbor E2E Test Suite ==="
# Test 1: Create project with security defaults
echo "Test 1: Creating project with security defaults..."
curl -s -X POST "${HARBOR_URL}/api/v2.0/projects" \
-u "${HARBOR_USER}:${HARBOR_PASSWORD}" \
-H "Content-Type: application/json" \
-d "{\"project_name\": \"${PROJECT}\", \"public\": false}" \
-o /dev/null -w "%{http_code}" | grep -q "201"
echo "✓ Project created"
# Test 2: Verify security policies applied
echo "Test 2: Verifying security policies..."
METADATA=$(curl -s "${HARBOR_URL}/api/v2.0/projects/${PROJECT}" \
-u "${HARBOR_USER}:${HARBOR_PASSWORD}" | jq '.metadata')
echo "$METADATA" | jq -e '.auto_scan == "true"' > /dev/null
echo "✓ Auto scan enabled"
echo "$METADATA" | jq -e '.prevent_vul == "true"' > /dev/null
echo "✓ Vulnerability prevention enabled"
# Test 3: Push and scan image
echo "Test 3: Pushing and scanning image..."
docker pull alpine:latest
docker tag alpine:latest "${HARBOR_URL}/${PROJECT}/alpine:test"
docker push "${HARBOR_URL}/${PROJECT}/alpine:test"
# Wait for scan
sleep 30
SCAN_STATUS=$(curl -s "${HARBOR_URL}/api/v2.0/projects/${PROJECT}/repositories/alpine/artifacts/test" \
-u "${HARBOR_USER}:${HARBOR_PASSWORD}" | jq -r '.scan_overview.scan_status')
[ "$SCAN_STATUS" == "Success" ]
echo "✓ Image scanned successfully"
# Test 4: Create robot account
echo "Test 4: Creating robot account..."
ROBOT=$(curl -s -X POST "${HARBOR_URL}/api/v2.0/projects/${PROJECT}/robots" \
-u "${HARBOR_USER}:${HARBOR_PASSWORD}" \
-H "Content-Type: application/json" \
-d '{
"name": "e2e-test",
"duration": 1,
"permissions": [{"namespace": "'${PROJECT}'", "access": [{"resource": "repository", "action": "pull"}]}]
}')
echo "$ROBOT" | jq -e '.secret' > /dev/null
echo "✓ Robot account created"
# Cleanup
echo "Cleaning up..."
curl -s -X DELETE "${HARBOR_URL}/api/v2.0/projects/${PROJECT}" \
-u "${HARBOR_USER}:${HARBOR_PASSWORD}"
echo "✓ Cleanup complete"
echo "=== All E2E tests passed ==="# Run unit tests
pytest tests/test_harbor_policies.py -v
# Run integration tests (requires HARBOR_URL, HARBOR_USER, HARBOR_PASSWORD)
pytest tests/integration/ -v --tb=short
# Run E2E tests
./tests/e2e/test_harbor_workflow.sh
# Run all tests with coverage
pytest tests/ --cov=harbor_client --cov-report=html
# Specific test markers
pytest -m "not integration" # Skip integration tests
pytest -m "security" # Run only security tests/home/user/ai-coding/new-skills/harbor-expert/references/security-scanning.md/home/user/ai-coding/new-skills/harbor-expert/references/replication-guide.md