Parse and Stringify for CSV strings. It's like JSON object but for CSV. It can also work row by row. And, if can parse strings, it can be use to parse files or streams too.
With npm do:
$ npm install csv-string
Use mocha to run the tests.
$ npm install mocha
$ mocha test
Parse input to convert to an array.
var CSV = require('csv-string'),
arr = CSV.parse('a,b,c\na,b,c');
console.log(arr);Output:
[ [ 'a', 'b', 'c' ], [ 'a', 'b', 'c' ] ]
If separator parameter is not provided, it is automatically detected.
Converts input to a CSV string.
var CSV = require('csv-string');
console.log(CSV.stringify(['a', 'b', 'c']));
console.log(CSV.stringify([['c', 'd', 'e'], ['c','d','e']]));
console.log(CSV.stringify({a:'e', b:'f', c:'g'}));Output:
a,b,c
c,d,e
c,d,e
e,f,g
Detects the best separator.
var CSV = require('csv-string');
console.log(CSV.detect('a,b,c'));
console.log(CSV.detect('a;b;c'));
console.log(CSV.detect('a|b|c'));
console.log(CSV.detect('a\tb\tc'));Output:
,
;
|
\t
callback(row : Array, index : Number) : undefined
Calls callback for each CSV row/line. The Array passed to callback contains the fields of the current row.
var CSV = require('csv-string');
var data = 'a,b,c\nd,e,f';
CSV.forEach(data, ',', function(row, index) {
console.log('#' + index + ' : ', row);
});Output:
#0 : [ 'a', 'b', 'c' ]
#1 : [ 'd', 'e', 'f' ]
callback(row : Array) : undefined
Calls callback when a CSV row is readed. The Array passed to callback contains the fields of the row.
Returns the first offset after the row.
var CSV = require('csv-string');
var data = 'a,b,c\nd,e,f';
var index = CSV.read(data, ',', function(row) {
console.log(row);
});
console.log(data.slice(index));Output:
[ 'a', 'b', 'c' ]
d,e,f
callback(rows : Array) : undefined
Calls callback when a all CSV rows is readed. The Array passed to callback contains the rows of the file.
Returns the offset of the end of parsing (generaly it's the end of the input string).
var CSV = require('csv-string');
var data = 'a,b,c\nd,e,f';
index = CSV.readAll(data, function(row) {
console.log(row);
});
console.log('-' + data.slice(index) + '-');Output:
[ [ 'a', 'b', 'c' ], [ 'd', 'e', 'f' ] ]
--
callback(rows : Array) : undefined
Calls callback when a all CSV rows is readed. The last row could be ignored, because the remainder could be in another chunk.
The Array passed to callback contains the rows of the file.
Returns the offset of the end of parsing. When the last row is ignored, the offset point at the begin of row.
var CSV = require('csv-string');
var data = 'a,b,c\nd,e';
index = CSV.readChunk(data, function(row) {
console.log(row);
});
console.log('-' + data.slice(index) + '-');Output:
[ [ 'a', 'b', 'c' ] ]
--
Create a writable stream for CSV chunk. Options are :
- separator : To indicate the CSV separator. By default is auto (see the detect function)
Example : Read CSV file from the standard input.
var stream = CSV.createStream();
stream.on('data', function (row) {
console.log(row);
}
)
process.stdin.resume();
process.stdin.setEncoding('utf8');
process.stdin.pipe(stream);- https://npmjs.org/browse/keyword/csv
- http://www.uselesscode.org/javascript/csv/
- https://github.com/archan937/csonv.js
A for file and stream, there are many others packages that already exists. To compare them, I made a very basic benchmark (see ./bench for source code)
time node ./SCRITPNAME.js >/dev/null
| Package | Input equal Output | Time for ~1 200 000 rows |
|---|---|---|
| a-csv | almost | 0m13.903s |
| csv-streamer | yes | 0m15.599s |
| csv-stream | yes | 0m17.265s |
| csv-string | yes | 0m15.432s |
| fast-csv | no | - |
| nodecsv | yes | 0m22.129s |

