Since the input JSON could be really large, here is a Node.JS steaming version (using stream-json package):
#!/usr/bin/env node let fs = require('fs'); let { Transform } = require('stream'); let StreamArray = require("stream-json/utils/StreamArray"); let stream = StreamArray.make(); function escapeCSV(str) { if (str == null) { return ''; } return /[",]/.test(str) ? `"${str.replace(/"/g, '\\"')}"` : str; } class CsvStream extends Transform { constructor() { super({objectMode: true}); } _transform(chunk, enc, cb) { let { name, creditcard } = chunk.value; let line = [name, creditcard].map(escapeCSV).join(','); this.push(`${line}\n`); cb(); } } process.stdin .pipe(stream.input); stream.output .pipe(new CsvStream()) .pipe(process.stdout);
Nice! There is also csv-write-stream then you can save some code :)
Are you sure you want to hide this comment? It will become hidden in your post, but will still be visible via the comment's permalink.
Hide child comments as well
Confirm
For further actions, you may consider blocking this person and/or reporting abuse
We're a place where coders share, stay up-to-date and grow their careers.
Since the input JSON could be really large, here is a Node.JS steaming version (using stream-json package):
Nice! There is also csv-write-stream then you can save some code :)