TypeScript wrapper for convert-buddy (Rust/WASM core)
npm install convert-buddy-js> ⚠️ Experimental / In Development
> This project is under active development and may introduce breaking changes without notice.
convert-buddy-js is a TypeScript wrapper around a Rust → WASM core, designed for throughput and low memory overhead on large files, with unified APIs for Node.js and modern browsers.
bash
npm install convert-buddy-js
`
Quick Start
$3
`ts
import { convert, convertToString } from "convert-buddy-js";
// From URL
const json = await convertToString("https://example.com/data.csv", {
outputFormat: "json",
});
// From File (browser)
const file = fileInput.files![0];
const ndjson = await convertToString(file, { outputFormat: "ndjson" });
// From string data
const csv = "name,age\nAda,36";
const out = await convertToString(csv, { outputFormat: "json" });
// Returns Uint8Array instead of string
const bytes = await convert(file, { outputFormat: "json" });
`
$3
`ts
import { ConvertBuddy } from "convert-buddy-js";
const buddy = new ConvertBuddy();
// Process records as they're converted
const controller = buddy.stream("large-file.csv", {
outputFormat: "ndjson",
onRecords: async (controller, records) => {
// Automatically waits for async operations
await saveToDatabase(records);
console.log(Saved ${records.length} records);
},
onDone: (finalStats) => {
console.log(Processed ${finalStats.recordsOut} records in ${finalStats.durationMs}ms);
}
});
// Optional: await final stats
const stats = await controller.done;
console.log(Throughput: ${stats.throughputMbPerSec.toFixed(2)} MB/s);
`
$3
`ts
import { ConvertBuddy } from "convert-buddy-js";
const buddy = new ConvertBuddy({
debug: true,
maxMemoryMB: 512,
});
// Simple conversion
const result = await buddy.convert("https://example.com/data.csv", {
outputFormat: "json",
});
// Or use streaming for large files
const controller = buddy.stream("https://example.com/data.csv", {
outputFormat: "json",
onRecords: (controller, records, stats) => {
console.log(Progress: ${stats.throughputMbPerSec.toFixed(2)} MB/s);
}
});
`
$3
Browser
`ts
import { ConvertBuddy } from "convert-buddy-js/browser";
const buddy = new ConvertBuddy();
const file = document.querySelector('input[type="file"]')!.files![0];
// Simple conversion
const json = await buddy.convertToString(file, {
inputFormat: "auto",
outputFormat: "json",
});
// Or stream large files
const controller = buddy.stream(file, {
outputFormat: "json",
onRecords: (ctrl, records) => {
displayRecords(records);
}
});
`
Node.js
`ts
import { ConvertBuddy } from "convert-buddy-js/node";
const buddy = new ConvertBuddy();
// Simple conversion
const json = await buddy.convertToString("input.csv", {
outputFormat: "json",
});
// Or stream large files
const controller = buddy.stream("input.csv", {
outputFormat: "ndjson",
onRecords: async (ctrl, records) => {
await saveToDatabase(records);
}
});
await controller.done; // Wait for completion
`
---
API Overview
Convert Buddy offers multiple layers of control, from one-liners to fully managed streaming.
$3
`ts
import { convert, convertToString } from "convert-buddy-js";
const json = await convertToString(input, { outputFormat: "json" });
`
Supported input types:
- URLs (string)
- Browser File / Blob
- Uint8Array / Node Buffer
- Raw strings
- ReadableStream
- Node.js streams
- Node.js file paths
$3
Automatic backpressure management
`ts
import { ConvertBuddy } from "convert-buddy-js";
const buddy = new ConvertBuddy();
const controller = buddy.stream("large-file.csv", {
outputFormat: "ndjson",
recordBatchSize: 500, // Records per batch (default: 500)
onRecords: async (controller, records, stats, totalCount) => {
// Stream automatically waits for async operations
await processRecords(records);
console.log(Processed ${totalCount} records so far);
},
onData: (chunk, stats) => {
// Optional: get raw output chunks
writeToFile(chunk);
},
onError: (error) => {
console.error("Conversion failed:", error);
},
onDone: (finalStats) => {
console.log(Complete! ${finalStats.recordsOut} records in ${finalStats.durationMs}ms);
}
});
// Optional: await final stats
const finalStats = await controller.done;
console.log(Throughput: ${finalStats.throughputMbPerSec.toFixed(2)} MB/s);
`
Manual flow control
`ts
const controller = buddy.stream(dataSource, {
outputFormat: "json",
onRecords: (controller, records) => {
if (needsToSlowDown()) {
controller.pause();
// Later...
setTimeout(() => controller.resume(), 1000);
}
}
});
// Cancel processing
controller.cancel("User requested cancellation");
`
Fire-and-forget processing
`ts
// No need to await if you don't need stats
buddy.stream(fileUrl, {
outputFormat: "ndjson",
onRecords: (ctrl, records) => {
displayRecords(records);
},
onDone: () => {
console.log("Processing complete!");
}
});
// Continues processing in the background
`
Parser-only mode (no output)
`ts
// Set emitOutput to false to parse without generating output data
const controller = buddy.stream(csvFile, {
emitOutput: false,
onRecords: async (ctrl, records) => {
// Just process records, no output chunks generated
await validateAndStore(records);
}
});
`
Standalone stream function
`ts
import { stream } from "convert-buddy-js";
// Use without creating a ConvertBuddy instance
const controller = stream(input, {
inputFormat: "csv",
outputFormat: "json",
onRecords: handleRecords
});
`
$3
`ts
import { ConvertBuddy } from "convert-buddy-js";
const buddy = new ConvertBuddy({
profile: true,
progressIntervalBytes: 1024 * 1024,
});
const out = await buddy.convert(file, { outputFormat: "ndjson" });
console.log(buddy.lastStats()); // Get conversion stats
`
$3
Manual chunked streaming
`ts
import { ConvertBuddy } from "convert-buddy-js";
const converter = await ConvertBuddy.create({
inputFormat: "xml",
outputFormat: "ndjson",
xmlConfig: { recordElement: "row", includeAttributes: true },
});
converter.push(new Uint8Array([/ bytes /]));
converter.push(new Uint8Array([/ bytes /]));
const final = converter.finish();
console.log(converter.stats());
`
Node.js Transform stream
`ts
import { createNodeTransform } from "convert-buddy-js/node";
import { createReadStream, createWriteStream } from "node:fs";
const transform = await createNodeTransform({
inputFormat: "csv",
outputFormat: "ndjson",
csvConfig: { hasHeaders: true },
profile: true,
});
createReadStream("input.csv")
.pipe(transform)
.pipe(createWriteStream("output.ndjson"));
`
Web Streams
`ts
import { ConvertBuddyTransformStream } from "convert-buddy-js";
const transform = new ConvertBuddyTransformStream({
inputFormat: "csv",
outputFormat: "ndjson",
});
const response = await fetch("/data.csv");
const output = response.body?.pipeThrough(transform);
`
---
Stats & Monitoring
Every conversion provides detailed statistics for monitoring performance and progress.
$3
All streaming operations provide real-time stats:
`ts
interface BuddyStats {
bytesIn: number; // Total bytes consumed
bytesOut: number; // Total bytes generated
recordsOut: number; // Total records processed
batchesOut: number; // Number of batches emitted
durationMs: number; // Elapsed time in milliseconds
throughputMbPerSec: number; // Processing speed (MB/s)
startedAt: number; // Timestamp when started
endedAt?: number; // Timestamp when completed
isPaused: boolean; // Current pause state
isDone: boolean; // Whether processing is complete
error?: string; // Error message if failed
}
`
$3
During streaming:
`ts
const controller = buddy.stream(input, {
outputFormat: "json",
onRecords: (controller, records, stats) => {
// Stats snapshot at this point in time
console.log(Progress: ${stats.recordsOut} records);
console.log(Speed: ${stats.throughputMbPerSec.toFixed(2)} MB/s);
}
});
// Get current stats at any time
const currentStats = controller.stats();
// Wait for final stats
const finalStats = await controller.done;
console.log(Total: ${finalStats.recordsOut} records);
console.log(Duration: ${finalStats.durationMs}ms);
console.log(Avg speed: ${finalStats.throughputMbPerSec.toFixed(2)} MB/s);
`
After simple conversion:
`ts
const buddy = new ConvertBuddy({ profile: true });
await buddy.convert(input, { outputFormat: "json" });
const stats = buddy.lastStats();
console.log(Processed ${stats.recordsOut} records in ${stats.durationMs}ms);
`
$3
All stats objects returned are frozen (immutable) to prevent accidental modification:
`ts
const stats = controller.stats();
stats.recordsOut = 999; // Error: Cannot modify frozen object
`
---
Format Detection & Structure Analysis
Convert Buddy can automatically detect input formats and analyze data structure.
$3
`ts
import { detectFormat } from "convert-buddy-js";
// From any input source
const format = await detectFormat(fileOrUrlOrStream);
console.log(format); // "csv" | "json" | "ndjson" | "xml" | "unknown"
// With options
const format = await detectFormat(input, {
maxBytes: 256 * 1024, // Sample up to 256KB
preferredFormat: "csv" // Hint if ambiguous
});
`
Supported inputs:
- Files (browser File / Blob)
- URLs (string)
- Streams (ReadableStream, Node.js streams)
- Buffers (Uint8Array, Buffer)
- Strings
$3
Analyze the structure of your data:
`ts
import { detectStructure } from "convert-buddy-js";
const structure = await detectStructure(input);
console.log(structure);
// {
// format: "csv",
// confidence: 0.95,
// details: {
// hasHeaders: true,
// delimiter: ",",
// quote: '"',
// estimatedColumns: 5,
// estimatedRows: 1000,
// sampleFields: ["name", "email", "age", "city", "country"]
// }
// }
`
For CSV:
- Detects delimiter (,, ;, \t, |)
- Identifies quote character
- Determines if headers are present
- Samples field names
For XML:
- Identifies record elements
- Detects namespace usage
- Finds attribute patterns
For JSON/NDJSON:
- Determines if newline-delimited or array
- Analyzes nesting depth
- Samples field names
$3
`ts
// Input format defaults to "auto"
const json = await buddy.convertToString(unknownFile, {
outputFormat: "json"
});
// Explicitly use auto-detection
const controller = buddy.stream(input, {
inputFormat: "auto", // Detect automatically
outputFormat: "ndjson",
onRecords: handleRecords
});
`
---
Real-World Examples
$3
`ts
import { ConvertBuddy } from "convert-buddy-js";
const buddy = new ConvertBuddy();
const controller = buddy.stream("large-export.csv", {
outputFormat: "ndjson",
recordBatchSize: 1000, // Process 1000 records at a time
onRecords: async (controller, records, stats) => {
// Batch insert to database
await db.users.insertMany(records);
console.log(Inserted ${stats.recordsOut} records (${stats.throughputMbPerSec.toFixed(2)} MB/s));
},
onError: (error) => {
console.error("Import failed:", error);
// Rollback or cleanup
},
onDone: (finalStats) => {
console.log(Import complete! ${finalStats.recordsOut} total records in ${finalStats.durationMs}ms);
}
});
await controller.done; // Wait for completion
`
$3
`ts
const validRecords = [];
const invalidRecords = [];
const controller = buddy.stream(uploadedFile, {
outputFormat: "json",
onRecords: (ctrl, records) => {
for (const record of records) {
if (validateRecord(record)) {
validRecords.push(record);
} else {
invalidRecords.push(record);
}
}
},
onDone: () => {
console.log(Valid: ${validRecords.length}, Invalid: ${invalidRecords.length});
}
});
`
$3
`ts
const controller = buddy.stream(file, {
outputFormat: "ndjson",
onRecords: (ctrl, records, stats) => {
// Update progress bar
const progressPercent = (stats.bytesIn / file.size) * 100;
updateProgressBar(progressPercent, stats.throughputMbPerSec);
}
});
`
$3
`ts
// Node.js server endpoint
app.get('/export/users', async (req, res) => {
const buddy = new ConvertBuddy();
res.setHeader('Content-Type', 'text/csv');
res.setHeader('Content-Disposition', 'attachment; filename="users.csv"');
const controller = buddy.stream(getUsersStream(), {
outputFormat: "csv",
onData: (chunk) => {
res.write(chunk);
},
onDone: () => {
res.end();
},
onError: (error) => {
res.status(500).send(error.message);
}
});
});
`
$3
`ts
// Convert and download in browser
import { ConvertBuddy } from "convert-buddy-js/browser";
async function convertAndDownload(file, outputFormat) {
const buddy = new ConvertBuddy();
const chunks = [];
const controller = buddy.stream(file, {
outputFormat,
onData: (chunk) => {
chunks.push(chunk);
},
onDone: (stats) => {
// Create blob and download
const blob = new Blob(chunks, { type: getMimeType(outputFormat) });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = converted.${outputFormat};
a.click();
URL.revokeObjectURL(url);
console.log(Converted in ${stats.durationMs}ms);
}
});
}
`
---
Formats & Configuration
Supported
- csv
- xml
- ndjson
- json
- auto
$3
`ts
{
csvConfig: {
delimiter: ",",
quote: '"',
hasHeaders: true,
trimWhitespace: false,
}
}
`
$3
`ts
{
xmlConfig: {
recordElement: "row",
trimText: true,
includeAttributes: true,
}
}
`
$3
`ts
{
chunkTargetBytes: 1024 * 1024,
parallelism: 4,
profile: true,
debug: false,
}
`
---
Transformations & Field Mapping
Convert Buddy supports field-level transformations during conversion via the transform option. Example:
`ts
const out = await buddy.convert(csvString, {
outputFormat: "json",
transform: {
mode: "augment",
fields: [
{ targetFieldName: "full_name", compute: "concat(first, ' ', last)" },
{ targetFieldName: "age", coerce: { type: "i64" }, defaultValue: 0 },
],
onMissingField: "null",
onCoerceError: "null",
},
});
`
Runtime compute helpers depend on the Rust/WASM core build. For complex transforms consider pre/post-processing in JS.
---
How it works
- Rust core implements streaming parsers and conversion
- WASM bindings generated via wasm-bindgen
- TypeScript wrapper exposes high-level APIs and stream adapters
What ships in the npm package
- Prebuilt WASM binaries
- Compiled JS / TypeScript output
Rust sources, demos, and benchmarks live in the repository but are not published in the npm package.
---
Benchmarks (repository)
`bash
cd packages/convert-buddy-js
npm run bench
npm run bench:check
npm run bench:competitors
``