Loading...
Loading...
Build type-safe D1 databases with Drizzle ORM. Includes schema definition, migrations with Drizzle Kit, relations, and D1 batch API patterns. Prevents 18 errors including SQL BEGIN failures, cascade data loss, 100-parameter limits, and foreign key issues. Use when: defining D1 schemas, managing migrations, bulk inserts, or troubleshooting D1_ERROR, BEGIN TRANSACTION, foreign keys, "too many SQL variables".
npx skill4agent add jezweb/claude-skills drizzle-orm-d1| Command | Purpose |
|---|---|
| Set up Drizzle ORM with D1 (schema, config, migrations) |
| Generate and apply database migrations |
| Seed database with initial or test data |
| Latest Version: drizzle-orm@0.45.1, drizzle-kit@0.31.8, better-sqlite3@12.5.0 | |
| Dependencies: cloudflare-d1, cloudflare-worker-base |
# 1. Install
npm install drizzle-orm
npm install -D drizzle-kit
# 2. Configure drizzle.config.ts
import { defineConfig } from 'drizzle-kit';
export default defineConfig({
schema: './src/db/schema.ts',
out: './migrations',
dialect: 'sqlite',
driver: 'd1-http',
dbCredentials: {
accountId: process.env.CLOUDFLARE_ACCOUNT_ID!,
databaseId: process.env.CLOUDFLARE_DATABASE_ID!,
token: process.env.CLOUDFLARE_D1_TOKEN!,
},
});
# 3. Configure wrangler.jsonc
{
"d1_databases": [{
"binding": "DB",
"database_name": "my-database",
"database_id": "your-database-id",
"migrations_dir": "./migrations" // CRITICAL: Points to Drizzle migrations
}]
}
# 4. Define schema (src/db/schema.ts)
import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core';
export const users = sqliteTable('users', {
id: integer('id').primaryKey({ autoIncrement: true }),
email: text('email').notNull().unique(),
createdAt: integer('created_at', { mode: 'timestamp' }).$defaultFn(() => new Date()),
});
# 5. Generate & apply migrations
npx drizzle-kit generate
npx wrangler d1 migrations apply my-database --local # Test first
npx wrangler d1 migrations apply my-database --remote # Then production
# 6. Query in Worker
import { drizzle } from 'drizzle-orm/d1';
import { users } from './db/schema';
const db = drizzle(env.DB);
const allUsers = await db.select().from(users).all();db.batch()--local--remoteintegermode: 'timestamp'.$defaultFn().default()migrations_dir./migrationsBEGIN TRANSACTIONdrizzle-kit pushgenerateapplynpx drizzle-kit studio
# Opens http://local.drizzle.studio
# For remote D1 database
npx drizzle-kit studio --port 3001| Command | Purpose |
|---|---|
| Generate SQL migrations from schema changes |
| Push schema directly (dev only, not for production) |
| Introspect existing database → Drizzle schema |
| Validate migration integrity (race conditions) |
| Upgrade migration snapshots to latest format |
# Introspect existing D1 database
npx drizzle-kit pull
# Validate migrations haven't collided
npx drizzle-kit check.$dynamic()import { eq, and, or, like, sql } from 'drizzle-orm';
// Base query
function getUsers(filters: { name?: string; email?: string; active?: boolean }) {
let query = db.select().from(users).$dynamic();
if (filters.name) {
query = query.where(like(users.name, `%${filters.name}%`));
}
if (filters.email) {
query = query.where(eq(users.email, filters.email));
}
if (filters.active !== undefined) {
query = query.where(eq(users.active, filters.active));
}
return query;
}
// Usage
const results = await getUsers({ name: 'John', active: true });import { users } from './schema';
// Insert or ignore if exists
await db.insert(users)
.values({ id: 1, email: 'test@example.com', name: 'Test' })
.onConflictDoNothing();
// Insert or update specific fields on conflict
await db.insert(users)
.values({ id: 1, email: 'test@example.com', name: 'Test' })
.onConflictDoUpdate({
target: users.email, // Conflict on unique email
set: {
name: sql`excluded.name`, // Use value from INSERT
updatedAt: new Date(),
},
});import { drizzle } from 'drizzle-orm/d1';
// Enable query logging
const db = drizzle(env.DB, { logger: true });
// Custom logger
const db = drizzle(env.DB, {
logger: {
logQuery(query, params) {
console.log('SQL:', query);
console.log('Params:', params);
},
},
});
// Get SQL without executing (for debugging)
const query = db.select().from(users).where(eq(users.id, 1));
const sql = query.toSQL();
console.log(sql.sql, sql.params);D1_ERROR: Cannot use BEGIN TRANSACTIONBEGIN TRANSACTIONdb.batch([...])db.transaction()FOREIGN KEY constraint failed: SQLITE_CONSTRAINTPRAGMA foreign_keys = OFF;.references(() => users.id, { onDelete: 'cascade' })Error: No such module "wrangler"wranglerimport { drizzle } from 'drizzle-orm/d1'wranglerTypeError: Cannot read property 'prepare' of undefined"binding": "DB"env.DBMigration failed to apply: near "...": syntax error--localType instantiation is excessively deep and possibly infiniteInferSelectModel<typeof users>.all().get()strict: truePromise<User | undefined>Cannot find drizzle.config.tsdrizzle.config.ts--local--remotewrangler.jsonctoo many SQL variables at offset(rows × columns) > 100// 35 rows × 3 columns = 105 parameters → FAILS
const books = Array(35).fill({}).map((_, i) => ({
id: i.toString(),
title: "Book",
author: "Author",
}));
await db.insert(schema.books).values(books);
// Error: too many SQL variables at offsetasync function batchInsert<T>(
db: any,
table: any,
items: T[],
chunkSize = 32
) {
for (let i = 0; i < items.length; i += chunkSize) {
await db.insert(table).values(items.slice(i, i + chunkSize));
}
}
await batchInsert(db, schema.books, books);const D1_MAX_PARAMETERS = 100;
async function autochunk<T extends Record<string, unknown>, U>(
{ items, otherParametersCount = 0 }: {
items: T[];
otherParametersCount?: number;
},
cb: (chunk: T[]) => Promise<U>,
) {
const chunks: T[][] = [];
let chunk: T[] = [];
let chunkParameters = 0;
for (const item of items) {
const itemParameters = Object.keys(item).length;
if (chunkParameters + itemParameters + otherParametersCount > D1_MAX_PARAMETERS) {
chunks.push(chunk);
chunkParameters = itemParameters;
chunk = [item];
continue;
}
chunk.push(item);
chunkParameters += itemParameters;
}
if (chunk.length) chunks.push(chunk);
const results: U[] = [];
for (const c of chunks) {
results.push(await cb(c));
}
return results.flat();
}
// Usage
const inserted = await autochunk(
{ items: books },
(chunk) => db.insert(schema.books).values(chunk).returning()
);drizzle-seedseed(db, schema, { count: 10 })findFirstTypeError: Cannot read properties of undefined (reading '0')findFirstnullundefinedpnpm patchfindFirst// Works fine - returns null/undefined when not found
const result = await db.query.table.findFirst({
where: eq(schema.table.key, 'not-existing'),
});
// Throws TypeError instead of returning undefined
const [result] = await db.batch([
db.query.table.findFirst({
where: eq(schema.table.key, 'not-existing'),
}),
]);
// Error: TypeError: Cannot read properties of undefined (reading '0')# Create patch with pnpm
pnpm patch drizzle-ormnode_modules/drizzle-orm/d1/session.js// In mapGetResult method, add null check:
if (!result) {
return undefined;
}
if (this.customResultMapper) {
return this.customResultMapper([result]);
}// Instead of batch with findFirst, use separate queries
const result = await db.query.table.findFirst({
where: eq(schema.table.key, key),
});-- D1 supports this, but Drizzle has no JS equivalent
CREATE TABLE products (
id INTEGER PRIMARY KEY,
data TEXT,
price REAL GENERATED ALWAYS AS (json_extract(data, '$.price')) STORED
);
CREATE INDEX idx_price ON products(price);import { sql } from 'drizzle-orm';
// Current workaround - raw SQL only
await db.run(sql`
CREATE TABLE products (
id INTEGER PRIMARY KEY,
data TEXT,
price REAL GENERATED ALWAYS AS (json_extract(data, '$.price')) STORED
)
`);
// Or in migration file (migrations/XXXX_add_generated.sql)
CREATE INDEX idx_price ON products(price);PRAGMA foreign_keys=OFFonDelete: "cascade"// Schema with cascade relationships
export const account = sqliteTable("account", {
accountId: integer("account_id").primaryKey(),
name: text("name"),
});
export const property = sqliteTable("property", {
propertyId: integer("property_id").primaryKey(),
accountId: integer("account_id").references(() => account.accountId, {
onDelete: "cascade" // ⚠️ CASCADE DELETE
}),
});
// Change account schema (e.g., add a column)
// npx drizzle-kit generate creates:
// DROP TABLE account; -- ⚠️ Silently destroys ALL properties via cascade!
// CREATE TABLE account (...);-- Manually rewrite migration to backup related data
PRAGMA foreign_keys=OFF; -- D1 ignores this, but include anyway
-- 1. Backup related tables
CREATE TABLE backup_property AS SELECT * FROM property;
-- 2. Drop and recreate parent table
DROP TABLE account;
CREATE TABLE account (
account_id INTEGER PRIMARY KEY,
name TEXT,
-- new columns here
);
-- 3. Restore related data
INSERT INTO property SELECT * FROM backup_property;
DROP TABLE backup_property;
PRAGMA foreign_keys=ON;DROP TABLEonDelete: "cascade"onDelete: "set null""cascade"sqlTypeError: Cannot read properties of undefined (reading 'bind')sqldb.batch()sqlconst upsertSql = sql`insert into ${schema.subscriptions}
(id, status) values (${id}, ${status})
on conflict (id) do update set status = ${status}
returning *`;
// Works fine
const [subscription] = await db.all<Subscription>(upsertSql);
// Throws TypeError: Cannot read properties of undefined (reading 'bind')
const [[batchSubscription]] = await db.batch([
db.all<Subscription>(upsertSql),
]);// Use Drizzle query builder instead
const [result] = await db.batch([
db.insert(schema.subscriptions)
.values({ id, status })
.onConflictDoUpdate({
target: schema.subscriptions.id,
set: { status }
})
.returning()
]);import { SQLiteSyncDialect } from 'drizzle-orm/sqlite-core';
const sqliteDialect = new SQLiteSyncDialect();
const upsertQuery = sqliteDialect.sqlToQuery(upsertSql);
const [result] = await D1.batch([
D1.prepare(upsertQuery.sql).bind(...upsertQuery.params),
]);wrangler d1 migrations apply# Drizzle 1.0 beta generates this:
migrations/
20260116123456_random/
migration.sql
20260117234567_another/
migration.sql
# But wrangler expects this:
migrations/
20260116123456_random.sql
20260117234567_another.sqlnpx wrangler d1 migrations apply my-db --remote
# Output: "No migrations found" (even though migrations exist)// scripts/flatten-migrations.ts
import fs from 'fs/promises';
import path from 'path';
const migrationsDir = './migrations';
async function flattenMigrations() {
const entries = await fs.readdir(migrationsDir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isDirectory()) {
const sqlFile = path.join(migrationsDir, entry.name, 'migration.sql');
const flatFile = path.join(migrationsDir, `${entry.name}.sql`);
// Move migration.sql out of folder
await fs.rename(sqlFile, flatFile);
// Remove empty folder
await fs.rmdir(path.join(migrationsDir, entry.name));
console.log(`Flattened: ${entry.name}/migration.sql → ${entry.name}.sql`);
}
}
}
flattenMigrations().catch(console.error);{
"scripts": {
"db:generate": "drizzle-kit generate",
"db:flatten": "tsx scripts/flatten-migrations.ts",
"db:migrate": "npm run db:generate && npm run db:flatten && wrangler d1 migrations apply my-db"
}
}npx drizzle-kit generate
tsx scripts/flatten-migrations.ts
npx wrangler d1 migrations apply my-db --remoteflat: true// ❌ DON'T: Use traditional transactions (fails with D1_ERROR)
await db.transaction(async (tx) => { /* ... */ });
// ✅ DO: Use D1 batch API
const results = await db.batch([
db.insert(users).values({ email: 'test@example.com', name: 'Test' }),
db.insert(posts).values({ title: 'Post', content: 'Content', authorId: 1 }),
]);
// With error handling
try {
await db.batch([...]);
} catch (error) {
console.error('Batch failed:', error);
// Manual cleanup if needed
}./scripts/check-versions.shChecking Drizzle ORM versions...
✓ drizzle-orm: 0.44.7 (latest)
✓ drizzle-kit: 0.31.5 (latest)drizzle-orm@0.45.1drizzle-kit@0.31.8better-sqlite3@12.4.6@cloudflare/workers-types@4.20251125.0/drizzle-team/drizzle-orm-docs{
"dependencies": {
"drizzle-orm": "^0.45.1"
},
"devDependencies": {
"drizzle-kit": "^0.31.8",
"@cloudflare/workers-types": "^4.20260103.0",
"better-sqlite3": "^12.5.0"
}
}