Loading...
Loading...
Render images, GIFs, PNGs, and video in the terminal with @blecsd/media. Covers GIF/PNG parsing, ANSI rendering, image widgets, video playback, and W3M overlay.
npx skill4agent add kadajett/blecsd-skill blecsd-media@blecsd/mediapnpm add @blecsd/mediablecsd >= 0.6.0import { parseGIF, frameToRGBA } from '@blecsd/media/gif';
import { parsePNG, extractPixels } from '@blecsd/media/png';
import { renderToAnsi, scaleBitmap, rgbTo256Color } from '@blecsd/media/render';
import { createImage, setImageData, play, pause } from '@blecsd/media/widgets/image';
import { createVideo, detectVideoPlayer } from '@blecsd/media/widgets/video';
import { createW3MOverlay } from '@blecsd/media/overlay';import { gif, png, ansiRender, imageWidget, videoWidget, w3m } from '@blecsd/media';import { parseGIF, frameToRGBA, validateGIFSignature } from '@blecsd/media/gif';
// Read file
const buffer = await fs.readFile('animation.gif');
// Validate
if (!validateGIFSignature(buffer)) throw new Error('Not a GIF');
// Parse
const result = parseGIF(buffer);
// result: { header, frames[], globalColorTable }
// Get RGBA pixels for a frame
const rgba = frameToRGBA(result.frames[0]);
// rgba: Uint8Array of [r, g, b, a, r, g, b, a, ...]parseGIF(buffer)parseGIFHeader(buffer)validateGIFSignature(buffer)frameToRGBA(frame)deinterlace(pixels, width, height)parseColorTable(buffer, offset, size)readSubBlocks(buffer, offset)decompressLZW(data, minCodeSize)createBitReader(data)readCode(reader, codeSize)import { parsePNG, extractPixels, parseChunks } from '@blecsd/media/png';
const buffer = await fs.readFile('image.png');
const result = parsePNG(buffer);
// result: { header (IHDR), pixels, chunks[] }
// Or parse step by step
const chunks = parseChunks(buffer);
const header = parseIHDR(chunks[0].data);
const pixels = extractPixels(chunks);parsePNG(buffer)parseChunks(buffer)parseIHDR(data)reconstructFilters(scanlines, width, height, bitDepth)paethPredictor(a, b, c)extractPixels(chunks)parsePLTE(buffer)import { renderToAnsi, scaleBitmap, cellMapToString, rgbTo256Color } from '@blecsd/media/render';
// Render bitmap to ANSI cells
const cellMap = renderToAnsi(bitmap, {
width: 40, // Target width in terminal columns
height: 20, // Target height in terminal rows
mode: 'halfblock' // 'halfblock' | 'braille' | 'ascii'
});
// Convert to string for output
const output = cellMapToString(cellMap);
// Scale a bitmap
const scaled = scaleBitmap(bitmap, 40, 20);
// Color conversion
const color256 = rgbTo256Color(255, 128, 0); // RGB to 256-color palette
const lum = rgbLuminance(255, 128, 0); // Compute luminance
const char = luminanceToChar(lum); // Map to ASCII char
// Blend colors
const blended = blendWithBackground([255, 128, 0], [0, 0, 0]);'halfblock''braille''ascii'import { createImage, setImageData, play, pause, stop } from '@blecsd/media/widgets/image';
// Create image widget
const eid = createImage(world, {
position: { x: 0, y: 0 },
dimensions: { width: 40, height: 20 },
renderMode: 'halfblock',
});
// Set image data (from parsed GIF/PNG)
setImageData(eid, {
frames: gifResult.frames.map(f => frameToRGBA(f)),
width: gifResult.header.width,
height: gifResult.header.height,
frameCount: gifResult.frames.length,
delays: gifResult.frames.map(f => f.delay),
});
// Animation control (for animated GIFs)
play(eid);
pause(eid);
stop(eid);
// Set specific frame
setFrame(eid, 3);
// Get current state
const bitmap = getImageBitmap(eid);
const cellMap = getImageCellMap(eid);
// Aspect ratio helper
const { width, height } = calculateAspectRatioDimensions(
sourceWidth, sourceHeight,
targetWidth, targetHeight
);
// Cleanup
clearImageCache(eid);
clearAllImageCaches();
// Type guard
if (isImage(world, eid)) { /* ... */ }import { createVideo, detectVideoPlayer, getVideoPlaybackState } from '@blecsd/media/widgets/video';
// Auto-detect available video player
const player = detectVideoPlayer(); // 'mpv' | 'mplayer' | undefined
// Create video widget
const eid = createVideo(world, {
position: { x: 0, y: 0 },
dimensions: { width: 80, height: 24 },
source: '/path/to/video.mp4',
player: player, // 'mpv' | 'mplayer'
autoplay: true,
});
// Check state
const state = getVideoPlaybackState(eid); // 'stopped' | 'playing' | 'paused'
// Get detected player
const detectedPlayer = getVideoPlayer(eid);
// Build command args (for manual control)
const args = buildMpvArgs({ source: 'video.mp4', width: 80, height: 24 });
const args2 = buildMplayerArgs({ source: 'video.mp4' });
const args3 = buildPlayerArgs({ source: 'video.mp4', player: 'mpv' });
// Send commands to running player
sendPauseCommand(handle);
sendSeekCommand(handle, 10); // Seek 10 seconds
// Type guard
if (isVideo(world, eid)) { /* ... */ }import { createW3MOverlay } from '@blecsd/media/overlay';
const eid = createW3MOverlay(world, {
position: { x: 0, y: 0 },
dimensions: { width: 40, height: 20 },
imagePath: '/path/to/image.png',
});import { parseGIF, frameToRGBA } from '@blecsd/media/gif';
import { renderToAnsi, cellMapToString, scaleBitmap } from '@blecsd/media/render';
const buffer = await fs.readFile('cat.gif');
const gif = parseGIF(buffer);
for (const frame of gif.frames) {
const rgba = frameToRGBA(frame);
const bitmap = { data: rgba, width: gif.header.width, height: gif.header.height };
const scaled = scaleBitmap(bitmap, 40, 20);
const cellMap = renderToAnsi(scaled, { mode: 'halfblock' });
console.log(cellMapToString(cellMap));
}import { parsePNG } from '@blecsd/media/png';
import { renderToAnsi, cellMapToString } from '@blecsd/media/render';
const buffer = await fs.readFile('photo.png');
const png = parsePNG(buffer);
const cellMap = renderToAnsi(
{ data: png.pixels, width: png.header.width, height: png.header.height },
{ width: 60, height: 30, mode: 'braille' }
);
console.log(cellMapToString(cellMap));braillehalfblockasciiscaleBitmapdetectVideoPlayer()clearImageCache(eid)clearAllImageCaches()@blecsd/media/gif@blecsd/media