Convert CSV file to JSON https://www.npmjs.com/package/convert-csv-to-json
This guide will help you transition from the synchronous API to the new asynchronous API in csvToJson.
// After (async) - using Promises csvToJson.getJsonFromCsvAsync(‘input.csv’) .then(json => console.log(json)) .catch(err => console.error(‘Error:’, err));
// After (async) - using async/await async function readCsv() { try { const json = await csvToJson.getJsonFromCsvAsync(‘input.csv’); console.log(json); } catch (err) { console.error(‘Error:’, err); } }
2. File generation:
```js
// Before (sync)
csvToJson.generateJsonFileFromCsv('input.csv', 'output.json');
// After (async) - using Promises
csvToJson.generateJsonFileFromCsvAsync('input.csv', 'output.json')
.then(() => console.log('File created'))
.catch(err => console.error('Error:', err));
// After (async) await csvToJson .fieldDelimiter(‘,’) .formatValueByType() .getJsonFromCsvAsync(‘input.csv’);
## Common Patterns and Best Practices
1. Processing multiple files:
```js
// Sequential processing
async function processFiles(files) {
const results = [];
for (const file of files) {
const json = await csvToJson.getJsonFromCsvAsync(file);
results.push(json);
}
return results;
}
// Parallel processing
async function processFilesParallel(files) {
const promises = files.map(file =>
csvToJson.getJsonFromCsvAsync(file)
);
return Promise.all(promises);
}
// Robust error handling
async function processWithRetry(file, maxRetries = 3) {
for (let i = 0; i < maxRetries; i++) {
try {
return await csvToJson.getJsonFromCsvAsync(file);
} catch (err) {
if (i === maxRetries - 1) throw err;
await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1)));
}
}
}
// Processing CSV from network request
async function processCsvFromApi() {
const response = await fetch('https://api.example.com/data.csv');
const csvText = await response.text();
return csvToJson.getJsonFromCsvAsync(csvText, { raw: true });
}
async function* processLargeCsv(filePath) { const fileStream = createReadStream(filePath); const lines = createInterface({ input: fileStream, crlfDelay: Infinity });
const headers = await lines.next(); for await (const line of lines) { const json = await csvToJson .getJsonFromCsvAsync(headers.value + ‘\n’ + line, { raw: true }); yield json[0]; } }
// Usage for await (const record of processLargeCsv(‘large.csv’)) { console.log(record); }
2. Custom data transformation:
```js
async function processWithTransform(file) {
const json = await csvToJson
.formatValueByType()
.getJsonFromCsvAsync(file);
return json.map(record => ({
...record,
timestamp: new Date().toISOString(),
processed: true
}));
}
return json.filter(record => { // Validate required fields if (!record.id || !record.name) return false; // Validate data types if (typeof record.age !== ‘number’) return false; return true; }); } ```
Promise.all() for parallel processing