This is a library for dbf files, supporting operations such as reading, writing, and updating dbf files
npm install dbf-nodeTo get started, simply install the module using npm:
```
$ npm install dbf-node
and then import it:
``
const { Dbf } = require("dbf-node")
The module exports the Dbf class, which has the following shape:
`typescript
/* Represents a DBF file. /
class Dbf {
/* Opens an existing DBF file. /
static open(path, options);
/* Creates a new DBF file with no records. /
static create(path, fields, options);
/* Read the specified records to this DBF file. /
static readRecords(startIndex, endIndex);
/* Write the specified records to this DBF file. /
static write(records);
/* Update the specified records to this DBF file. /
static update(records);
}
`
`javascript
import {DBFFile} from 'dbffile';
async function iterativeRead() {
let dbf = await DBFFile.open('
console.log(DBF file contains ${dbf.recordCount} records.);Field names: ${dbf.fields.map(f => f.name).join(', ')}
console.log();`
for await (const record of dbf) console.log(record);
}
`javascript
import {DBFFile} from 'dbffile';
async function batchRead() {
let dbf = await DBFFile.open('
console.log(DBF file contains ${dbf.recordCount} records.);Field names: ${dbf.fields.map(f => f.name).join(', ')}
console.log();`
let records = await dbf.readRecords(100); // batch-reads up to 100 records, returned as an array
for (let record of records) console.log(record);
}
`javascript
import {DBFFile} from 'dbffile';
async function batchWrite() {
let fieldDescriptors = [
{ name: 'fname', type: 'C', size: 255 },
{ name: 'lname', type: 'C', size: 255 }
];
let records = [
{ fname: 'Joe', lname: 'Bloggs' },
{ fname: 'Mary', lname: 'Smith' }
];
let dbf = await DBFFile.create('
console.log('DBF file created.');
await dbf.appendRecords(records);
console.log(${records.length} records added.);``
}