Loading...
Loading...
Operational traps for Terraform provisioners, multi-environment isolation, and zero-to-deployment reliability. Covers provisioner timing races, SSH connection conflicts, DNS record duplication, volume permissions, database bootstrap gaps, snapshot cross-contamination, Cloudflare credential format errors, hardcoded domains in Caddyfiles/compose, and init-data-only-on-first-boot pitfalls. Activate when writing null_resource provisioners, creating multi-environment Terraform setups, debugging containers that are Restarting/unhealthy after terraform apply, setting up fresh instances with cloud-init, or any IaC code that SSHs into remote hosts. Also activate when the user mentions terraform plan/apply errors, provisioner failures, infrastructure drift, TLS certificate errors, or Caddy/gateway configuration.
npx skill4agent add daymade/claude-code-skills terraform-skilldocker: not foundprovisioner "remote-exec" {
inline = [
"cloud-init status --wait || true",
"which docker || { echo 'FATAL: Docker not ready'; exit 1; }",
]
}rsync: connection unexpectedly closedprovisioner "local-exec" {
command = "tar czf /tmp/src.tar.gz --exclude=node_modules --exclude=.git -C ${path.module}/../../.. myproject"
}
provisioner "file" {
source = "/tmp/src.tar.gz"
destination = "/tmp/src.tar.gz"
}
provisioner "remote-exec" {
inline = ["tar xzf /tmp/src.tar.gz -C /data/ && rm -f /tmp/src.tar.gz"]
}--excludecloud-init statusapt-get -yiptables-persistent- |
echo iptables-persistent iptables-persistent/autosave_v4 boolean true | debconf-set-selections
echo iptables-persistent iptables-persistent/autosave_v6 boolean true | debconf-set-selections
DEBIAN_FRONTEND=noninteractive apt-get install -y iptables-persistentiptables-persistentpostfixmysql-serverwireshark-commonEACCES: permission denieddocker compose upmkdir -p /data/myapp/data /data/myapp/logs
chown -R 1001:1001 /data/myapp/data /data/myapp/logsadduser.*-uUSERset -edocker logsset -u-eprovisioner "remote-exec" {
inline = [
"set -u",
"docker compose up -d",
"sleep 15",
"docker logs myapp --tail 20 2>&1 || true",
"docker ps --format 'table {{.Names}}\\t{{.Status}}' || true",
"docker ps --filter name=myapp --format '{{.Status}}' | grep -q healthy || exit 1",
]
}Restartingdocker-entrypoint-initdb.d# After postgres healthy:
docker exec pg psql -U postgres -tc "SELECT 1 FROM pg_database WHERE datname='mydb'" | grep -q 1 \
|| docker exec pg psql -U postgres -c "CREATE DATABASE mydb;"
# Idempotent migrations:
for f in migrations/*.sql; do
VER=$(basename $f)
APPLIED=$($PSQL -tAc "SELECT 1 FROM schema_migrations WHERE version='$VER'" | tr -d ' ')
[ "$APPLIED" = "1" ] && continue
{ echo 'BEGIN;'; cat $f; echo 'COMMIT;'; } | $PSQL
$PSQL -tAc "INSERT INTO schema_migrations(version) VALUES ('$VER') ON CONFLICT DO NOTHING"
donedocker compose build.envVAR=x docker compose build# WRONG
DOCKER_WITH_PROXY_MODE=disabled docker compose build
# RIGHT
grep -q DOCKER_WITH_PROXY_MODE .env || echo 'DOCKER_WITH_PROXY_MODE=disabled' >> .env
docker compose buildInvalid format for Authorization headercfut_HTTP 400 Code:6003# Verify token format before deploy:
TOKEN=$(grep CLOUDFLARE_API_TOKEN .env | cut -d= -f2)
echo "$TOKEN" | grep -q "^cfut_" || echo "FATAL: needs API Token, not Global Key"curl -s "https://api.cloudflare.com/client/v4/user/tokens" -X POST \
-H "X-Auth-Email: $CF_EMAIL" -H "X-Auth-Key: $CF_GLOBAL_KEY" \
-d '{"name":"caddy-dns-acme","policies":[{"effect":"allow",
"resources":{"com.cloudflare.api.account.zone.<ZONE_ID>":"*"},
"permission_groups":[
{"id":"4755a26eedb94da69e1066d98aa820be","name":"DNS Write"},
{"id":"c8fed203ed3043cba015a93ad1616f1f","name":"Zone Read"}]}]}'{$VAR}# WRONG
gpt-6.pro { tls { dns cloudflare {env.CLOUDFLARE_API_TOKEN} } }
# RIGHT
{$LOBEHUB_DOMAIN} { tls { dns cloudflare {env.CLOUDFLARE_API_TOKEN} } }${VAR:?required}# WRONG
- APP_URL=https://gpt-6.pro
# RIGHT
- APP_URL=${APP_URL:?APP_URL is required}environment:
- LOBEHUB_DOMAIN=${LOBEHUB_DOMAIN:?LOBEHUB_DOMAIN is required}
- CLOUDFLARE_API_TOKEN=${CLOUDFLARE_API_TOKEN:?required for DNS-01 TLS}Social sign in failedinit_data.json--createDatabase=true# Replace production domain with staging in existing Casdoor DB
$PSQL -c "UPDATE application SET redirect_uris = REPLACE(redirect_uris,
'gpt-6.pro', 'staging.gpt-6.pro')
WHERE name='lobechat'
AND redirect_uris LIKE '%gpt-6.pro%'
AND redirect_uris NOT LIKE '%staging.gpt-6.pro%';"AUTH_CASDOOR_ISSUERauth.staging.example.com.tf| Resource | Scope | Fix |
|---|---|---|
| SSH key pair | Region | |
| SLS log project | Account | |
| CloudMonitor contact | Account | |
staging.example.comauth.stagingminio.stagingdata "alicloud_ecs_snapshots"locals {
latest_snapshot_id = var.enable_snapshot_recovery && length(local.available_snapshots) > 0
? local.available_snapshots[0].snapshot_id : null
}countterraform applyterraform validateLOBEHUB_DOMAINCLAUDE4DEV_DOMAINCLOUDFLARE_API_TOKENAPP_URLauth.*make pre-deploy ENV=stagingmake applymkdir -p /data/{svc1,svc2}fileCREATE DATABASEschema_migrationsdepends_on{$VAR}${VAR:?required}cfut_