Loading...
Loading...
Cloudflare Workers CLI for deploying, developing, and managing Workers, KV, R2, D1, Vectorize, Hyperdrive, Workers AI, Containers, Queues, Workflows, Pipelines, and Secrets Store. Load before running wrangler commands to ensure correct syntax and best practices.
npx skill4agent add cloudflare/cloudflare-docs wranglerwrangler --version # Requires v4.x+npm install -D wrangler@latestwrangler.jsonccompatibility_datewrangler typesremote: truewrangler checkenv.stagingenv.production# Initialize new project
npx wrangler init my-worker
# Or with a framework
npx create-cloudflare@latest my-app| Task | Command |
|---|---|
| Start local dev server | |
| Deploy to Cloudflare | |
| Deploy dry run | |
| Generate TypeScript types | |
| Validate configuration | |
| View live logs | |
| Delete Worker | |
| Auth status | |
{
"$schema": "./node_modules/wrangler/config-schema.json",
"name": "my-worker",
"main": "src/index.ts",
"compatibility_date": "2026-01-01"
}{
"$schema": "./node_modules/wrangler/config-schema.json",
"name": "my-worker",
"main": "src/index.ts",
"compatibility_date": "2026-01-01",
"compatibility_flags": ["nodejs_compat_v2"],
// Environment variables
"vars": {
"ENVIRONMENT": "production"
},
// KV Namespace
"kv_namespaces": [
{ "binding": "KV", "id": "<KV_NAMESPACE_ID>" }
],
// R2 Bucket
"r2_buckets": [
{ "binding": "BUCKET", "bucket_name": "my-bucket" }
],
// D1 Database
"d1_databases": [
{ "binding": "DB", "database_name": "my-db", "database_id": "<DB_ID>" }
],
// Workers AI (always remote)
"ai": { "binding": "AI" },
// Vectorize
"vectorize": [
{ "binding": "VECTOR_INDEX", "index_name": "my-index" }
],
// Hyperdrive
"hyperdrive": [
{ "binding": "HYPERDRIVE", "id": "<HYPERDRIVE_ID>" }
],
// Durable Objects
"durable_objects": {
"bindings": [
{ "name": "COUNTER", "class_name": "Counter" }
]
},
// Cron triggers
"triggers": {
"crons": ["0 * * * *"]
},
// Environments
"env": {
"staging": {
"name": "my-worker-staging",
"vars": { "ENVIRONMENT": "staging" }
}
}
}# Generate worker-configuration.d.ts
wrangler types
# Custom output path
wrangler types ./src/env.d.ts
# Check types are up to date (CI)
wrangler types --check# Local mode (default) - uses local storage simulation
wrangler dev
# With specific environment
wrangler dev --env staging
# Force local-only (disable remote bindings)
wrangler dev --local
# Remote mode - runs on Cloudflare edge (legacy)
wrangler dev --remote
# Custom port
wrangler dev --port 8787
# Live reload for HTML changes
wrangler dev --live-reload
# Test scheduled/cron handlers
wrangler dev --test-scheduled
# Then visit: http://localhost:8787/__scheduledremote: true{
"r2_buckets": [
{ "binding": "BUCKET", "bucket_name": "my-bucket", "remote": true }
],
"ai": { "binding": "AI", "remote": true },
"vectorize": [
{ "binding": "INDEX", "index_name": "my-index", "remote": true }
]
}.dev.varsAPI_KEY=local-dev-key
DATABASE_URL=postgres://localhost:5432/dev# Deploy to production
wrangler deploy
# Deploy specific environment
wrangler deploy --env staging
# Dry run (validate without deploying)
wrangler deploy --dry-run
# Keep dashboard-set variables
wrangler deploy --keep-vars
# Minify code
wrangler deploy --minify# Set secret interactively
wrangler secret put API_KEY
# Set from stdin
echo "secret-value" | wrangler secret put API_KEY
# List secrets
wrangler secret list
# Delete secret
wrangler secret delete API_KEY
# Bulk secrets from JSON file
wrangler secret bulk secrets.json# List recent versions
wrangler versions list
# View specific version
wrangler versions view <VERSION_ID>
# Rollback to previous version
wrangler rollback
# Rollback to specific version
wrangler rollback <VERSION_ID># Create namespace
wrangler kv namespace create MY_KV
# List namespaces
wrangler kv namespace list
# Delete namespace
wrangler kv namespace delete --namespace-id <ID># Put value
wrangler kv key put --namespace-id <ID> "key" "value"
# Put with expiration (seconds)
wrangler kv key put --namespace-id <ID> "key" "value" --expiration-ttl 3600
# Get value
wrangler kv key get --namespace-id <ID> "key"
# List keys
wrangler kv key list --namespace-id <ID>
# Delete key
wrangler kv key delete --namespace-id <ID> "key"
# Bulk put from JSON
wrangler kv bulk put --namespace-id <ID> data.json{
"kv_namespaces": [
{ "binding": "CACHE", "id": "<NAMESPACE_ID>" }
]
}# Create bucket
wrangler r2 bucket create my-bucket
# Create with location hint
wrangler r2 bucket create my-bucket --location wnam
# List buckets
wrangler r2 bucket list
# Get bucket info
wrangler r2 bucket info my-bucket
# Delete bucket
wrangler r2 bucket delete my-bucket# Upload object
wrangler r2 object put my-bucket/path/file.txt --file ./local-file.txt
# Download object
wrangler r2 object get my-bucket/path/file.txt
# Delete object
wrangler r2 object delete my-bucket/path/file.txt{
"r2_buckets": [
{ "binding": "ASSETS", "bucket_name": "my-bucket" }
]
}# Create database
wrangler d1 create my-database
# Create with location
wrangler d1 create my-database --location wnam
# List databases
wrangler d1 list
# Get database info
wrangler d1 info my-database
# Delete database
wrangler d1 delete my-database# Execute SQL command (remote)
wrangler d1 execute my-database --remote --command "SELECT * FROM users"
# Execute SQL file (remote)
wrangler d1 execute my-database --remote --file ./schema.sql
# Execute locally
wrangler d1 execute my-database --local --command "SELECT * FROM users"# Create migration
wrangler d1 migrations create my-database create_users_table
# List pending migrations
wrangler d1 migrations list my-database --local
# Apply migrations locally
wrangler d1 migrations apply my-database --local
# Apply migrations to remote
wrangler d1 migrations apply my-database --remote# Export schema and data
wrangler d1 export my-database --remote --output backup.sql
# Export schema only
wrangler d1 export my-database --remote --output schema.sql --no-data{
"d1_databases": [
{
"binding": "DB",
"database_name": "my-database",
"database_id": "<DATABASE_ID>",
"migrations_dir": "./migrations"
}
]
}# Create index with dimensions
wrangler vectorize create my-index --dimensions 768 --metric cosine
# Create with preset (auto-configures dimensions/metric)
wrangler vectorize create my-index --preset @cf/baai/bge-base-en-v1.5
# List indexes
wrangler vectorize list
# Get index info
wrangler vectorize get my-index
# Delete index
wrangler vectorize delete my-index# Insert vectors from NDJSON file
wrangler vectorize insert my-index --file vectors.ndjson
# Query vectors
wrangler vectorize query my-index --vector "[0.1, 0.2, ...]" --top-k 10{
"vectorize": [
{ "binding": "SEARCH_INDEX", "index_name": "my-index" }
]
}# Create config
wrangler hyperdrive create my-hyperdrive \
--connection-string "postgres://user:pass@host:5432/database"
# List configs
wrangler hyperdrive list
# Get config details
wrangler hyperdrive get <HYPERDRIVE_ID>
# Update config
wrangler hyperdrive update <HYPERDRIVE_ID> --origin-password "new-password"
# Delete config
wrangler hyperdrive delete <HYPERDRIVE_ID>{
"compatibility_flags": ["nodejs_compat_v2"],
"hyperdrive": [
{ "binding": "HYPERDRIVE", "id": "<HYPERDRIVE_ID>" }
]
}# List available models
wrangler ai models
# List finetunes
wrangler ai finetune list{
"ai": { "binding": "AI" }
}# Create queue
wrangler queues create my-queue
# List queues
wrangler queues list
# Delete queue
wrangler queues delete my-queue
# Add consumer to queue
wrangler queues consumer add my-queue my-worker
# Remove consumer
wrangler queues consumer remove my-queue my-worker{
"queues": {
"producers": [
{ "binding": "MY_QUEUE", "queue": "my-queue" }
],
"consumers": [
{
"queue": "my-queue",
"max_batch_size": 10,
"max_batch_timeout": 30
}
]
}
}# Build container image
wrangler containers build -t my-app:latest .
# Build and push in one command
wrangler containers build -t my-app:latest . --push
# Push existing image to Cloudflare registry
wrangler containers push my-app:latest# List containers
wrangler containers list
# Get container info
wrangler containers info <CONTAINER_ID>
# Delete container
wrangler containers delete <CONTAINER_ID># List images in registry
wrangler containers images list
# Delete image
wrangler containers images delete my-app:latest# List configured registries
wrangler containers registries list
# Configure external registry (e.g., ECR)
wrangler containers registries configure <DOMAIN> \
--public-credential <AWS_ACCESS_KEY_ID>
# Delete registry configuration
wrangler containers registries delete <DOMAIN># List workflows
wrangler workflows list
# Describe workflow
wrangler workflows describe my-workflow
# Trigger workflow instance
wrangler workflows trigger my-workflow
# Trigger with parameters
wrangler workflows trigger my-workflow --params '{"key": "value"}'
# Delete workflow
wrangler workflows delete my-workflow# List instances
wrangler workflows instances list my-workflow
# Describe instance
wrangler workflows instances describe my-workflow <INSTANCE_ID>
# Terminate instance
wrangler workflows instances terminate my-workflow <INSTANCE_ID>{
"workflows": [
{
"binding": "MY_WORKFLOW",
"name": "my-workflow",
"class_name": "MyWorkflow"
}
]
}# Create pipeline
wrangler pipelines create my-pipeline --r2 my-bucket
# List pipelines
wrangler pipelines list
# Show pipeline details
wrangler pipelines show my-pipeline
# Update pipeline
wrangler pipelines update my-pipeline --batch-max-mb 100
# Delete pipeline
wrangler pipelines delete my-pipeline{
"pipelines": [
{ "binding": "MY_PIPELINE", "pipeline": "my-pipeline" }
]
}# Create store
wrangler secrets-store store create my-store
# List stores
wrangler secrets-store store list
# Delete store
wrangler secrets-store store delete <STORE_ID># Add secret to store
wrangler secrets-store secret put <STORE_ID> my-secret
# List secrets in store
wrangler secrets-store secret list <STORE_ID>
# Get secret
wrangler secrets-store secret get <STORE_ID> my-secret
# Delete secret from store
wrangler secrets-store secret delete <STORE_ID> my-secret{
"secrets_store_secrets": [
{
"binding": "MY_SECRET",
"store_id": "<STORE_ID>",
"secret_name": "my-secret"
}
]
}# Create Pages project
wrangler pages project create my-site
# Deploy directory to Pages
wrangler pages deploy ./dist
# Deploy with specific branch
wrangler pages deploy ./dist --branch main
# List deployments
wrangler pages deployment list --project-name my-site# Stream live logs
wrangler tail
# Tail specific Worker
wrangler tail my-worker
# Filter by status
wrangler tail --status error
# Filter by search term
wrangler tail --search "error"
# JSON output
wrangler tail --format json{
"observability": {
"enabled": true,
"head_sampling_rate": 1
}
}npm install -D @cloudflare/vitest-pool-workers vitestvitest.config.tsimport { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config";
export default defineWorkersConfig({
test: {
poolOptions: {
workers: {
wrangler: { configPath: "./wrangler.jsonc" },
},
},
},
});# Enable in dev
wrangler dev --test-scheduled
# Trigger via HTTP
curl http://localhost:8787/__scheduled| Issue | Solution |
|---|---|
| Install: |
| Auth errors | Run |
| Config validation errors | Run |
| Type errors after config change | Run |
| Local storage not persisting | Check |
| Binding undefined in Worker | Verify binding name matches config exactly |
# Check auth status
wrangler whoami
# Validate config
wrangler check
# View config schema
wrangler docs configurationwrangler.jsoncwrangler typesenv.stagingenv.productioncompatibility_date.dev.varswrangler dev--dry-run