Loading...
Loading...
V8 JIT optimization patterns for writing high-performance JavaScript in Next.js server internals. Use when writing or reviewing hot-path code in app-render, stream-utils, routing, caching, or any per-request code path. Covers hidden classes / shapes, monomorphic call sites, inline caches, megamorphic deopt, closure allocation, array packing, and profiling with --trace-opt / --trace-deopt.
npx skill4agent add vercel/next.js v8-jit// GOOD — consistent shape, single hidden class transition chain
class RequestContext {
url: string
method: string
headers: Record<string, string>
startTime: number
cached: boolean
constructor(url: string, method: string, headers: Record<string, string>) {
this.url = url
this.method = method
this.headers = headers
this.startTime = performance.now()
this.cached = false // always initialize, even defaults
}
}// BAD — conditional property addition creates multiple hidden classes
class RequestContext {
constructor(url, method, headers, options) {
this.url = url
this.method = method
if (options.timing) {
this.startTime = performance.now() // shape fork!
}
if (options.cache) {
this.cached = false // another shape fork!
}
this.headers = headers
}
}nullundefinedfalsedeleteobj.newProp = x// GOOD — same key order, shares hidden class
const a = { type: 'static', value: 1 }
const b = { type: 'dynamic', value: 2 }
// BAD — different key order, different hidden classes
const a = { type: 'static', value: 1 }
const b = { value: 2, type: 'dynamic' }Spansrc/trace/trace.tsnameparentIdattrsstatusid_startnowSpan| IC State | Shapes Seen | Speed |
|---|---|---|
| Monomorphic | 1 | Fastest — single direct check |
| Polymorphic | 2–4 | Fast — linear search through cases |
| Megamorphic | 5+ | Slow — hash-table lookup, no inlining |
// GOOD — always called with the same argument shape
function processChunk(chunk: Uint8Array): void {
// chunk is always Uint8Array → monomorphic
}
// BAD — called with different types at the same call site
function processChunk(chunk: Uint8Array | Buffer | string): void {
// IC becomes polymorphic/megamorphic
}BufferUint8Arraynullundefined// Entry point dispatches once
function handleStream(stream: ReadableStream | Readable) {
if (stream instanceof ReadableStream) {
return handleWebStream(stream) // monomorphic call
}
return handleNodeStream(stream) // monomorphic call
}stream-ops.ts// BAD — closure allocated for every request
function handleRequest(req) {
stream.on('data', (chunk) => processChunk(chunk, req.id))
}
// GOOD — shared listener, request context looked up by stream
const requestIdByStream = new WeakMap()
function onData(chunk) {
const id = requestIdByStream.get(this)
if (id !== undefined) processChunk(chunk, id)
}
function processChunk(chunk, id) {
/* ... */
}
function handleRequest(req) {
requestIdByStream.set(stream, req.id)
stream.on('data', onData)
}// BEST — pre-allocate the callback as a method on a context object
class StreamProcessor {
id: string
constructor(id: string) {
this.id = id
}
handleChunk(chunk: Uint8Array) {
processChunk(chunk, this.id)
}
}// BAD — allocates a new object per iteration
for (const item of items) {
doSomething({ key: item.key, value: item.value })
}
// GOOD — reuse a mutable scratch object
const scratch = { key: '', value: '' }
for (const item of items) {
scratch.key = item.key
scratch.value = item.value
doSomething(scratch)
}node-stream-helpers.tsencoderBUFFER_TAGSbufferIndexOfBuffer.indexOf| Element Kind | Description | Speed |
|---|---|---|
| Small integers only, no holes | Fastest |
| Numbers only, no holes | Fast |
| Mixed/objects, no holes | Moderate |
| Any of above with holes | Slower (extra bounds check) |
HOLEYPACKED_ELEMENTSnew Array(n)[]push()Array.from({ length: n }, initFn)arr[100] = x[1, 'two', {}]PACKED_ELEMENTS// GOOD — packed SMI array
const indices: number[] = []
for (let i = 0; i < n; i++) {
indices.push(i)
}
// BAD — holey from the start
const indices = new Array(n)
for (let i = 0; i < n; i++) {
indices[i] = i
}accumulateStreamChunksapp-render.tsxconst staticChunks: Array<Uint8Array> = []push()argumentsargumentsarguments[i]iargumentsevalwithfor...inObject.keys()Object.entries()// GOOD — predictable: always returns same type
function getStatus(code: number): string {
if (code === 200) return 'ok'
if (code === 404) return 'not found'
return 'error'
}
// BAD — returns different types
function getStatus(code: number): string | null | undefined {
if (code === 200) return 'ok'
if (code === 404) return null
// implicitly returns undefined
}switch// WATCH OUT — `node.type` IC can go megamorphic if many shapes hit one site
function render(node) {
switch (node.type) {
case 'div':
return { tag: 'div', children: node.children }
case 'span':
return { tag: 'span', text: node.text }
case 'img':
return { src: node.src, alt: node.alt }
// Many distinct node layouts can make this dispatch site polymorphic
}
}node.typeBuffer.concat()string.indexOf()new RegExp()// GOOD — regex hoisted to module scope
const ROUTE_PATTERN = /^\/api\//
function isApiRoute(path: string): boolean {
return ROUTE_PATTERN.test(path)
}
// BAD — regex recreated on every call
function isApiRoute(path: string): boolean {
return /^\/api\//.test(path) // V8 may or may not cache this
}MapSetMapSetobj[key] = trueObject.freeze({...})# Trace which functions get optimized
node --trace-opt server.js 2>&1 | grep "my-function-name"
# Trace deoptimizations (critical for finding perf regressions)
node --trace-deopt server.js 2>&1 | grep "my-function-name"
# Combined: see the full opt/deopt lifecycle
node --trace-opt --trace-deopt server.js 2>&1 | tee /tmp/v8-trace.log
# Show IC state transitions (verbose)
node --trace-ic server.js 2>&1 | tee /tmp/ic-trace.log
# Print optimized code (advanced)
node --print-opt-code --code-comments server.js# Profile a production build
node --cpu-prof --cpu-prof-dir=/tmp/profiles \
node_modules/.bin/next build
# Profile the server during a benchmark
node --cpu-prof --cpu-prof-dir=/tmp/profiles \
node_modules/.bin/next start &
# ... run benchmark ...
# Analyze in Chrome DevTools: chrome://inspect → Open dedicated DevTools
# Quick trace-deopt check on a specific test
node --trace-deopt $(which jest) --runInBand test/path/to/test.ts \
2>&1 | grep -i "deopt" | head -50%--allow-natives-syntaxfunction hotFunction(x) {
return x + 1
}
// Force optimization
%PrepareFunctionForOptimization(hotFunction)
hotFunction(1)
hotFunction(2) % OptimizeFunctionOnNextCall(hotFunction)
hotFunction(3)
// Check optimization status
// 1 = optimized, 2 = not optimized, 3 = always optimized, 6 = maglev
console.log(%GetOptimizationStatus(hotFunction))deleteMapSetargumentstry/catchjoin()Buffer.concat()string | null | undefined$dce-edge$runtime-debug