Parse and Stringify for CSV strings.
- API similar to the JSON parser (
CSV.parse
andCSV.stringify
). - Can also work row by row.
- Can also be used to parse strings from readable streams (e.g. file streams).
With npm:
$ npm install csv-string
Use mocha to run the tests.
$ npm install mocha
$ mocha test
Converts a CSV string input
to array output.
var CSV = require('csv-string'),
arr = CSV.parse('a,b,c\na,b,c');
console.log(arr);
Output:
[ [ 'a', 'b', 'c' ], [ 'a', 'b', 'c' ] ]
If separator parameter is not provided, it is automatically detected.
Converts object input
to a CSV string.
var CSV = require('csv-string');
console.log(CSV.stringify(['a', 'b', 'c']));
console.log(CSV.stringify([['c', 'd', 'e'], ['c','d','e']]));
console.log(CSV.stringify({a:'e', b:'f', c:'g'}));
Output:
a,b,c
c,d,e
c,d,e
e,f,g
Detects the best separator.
var CSV = require('csv-string');
console.log(CSV.detect('a,b,c'));
console.log(CSV.detect('a;b;c'));
console.log(CSV.detect('a|b|c'));
console.log(CSV.detect('a\tb\tc'));
Output:
,
;
|
\t
callback(row : Array, index : Number) : undefined
Calls callback
for each CSV row/line. The Array passed to callback contains the fields of the current row.
var CSV = require('csv-string');
var data = 'a,b,c\nd,e,f';
CSV.forEach(data, ',', function(row, index) {
console.log('#' + index + ' : ', row);
});
Output:
#0 : [ 'a', 'b', 'c' ]
#1 : [ 'd', 'e', 'f' ]
callback(row : Array) : undefined
Calls callback
when a CSV row is read. The Array passed to callback contains the fields of the row.
Returns the first offset after the row.
var CSV = require('csv-string');
var data = 'a,b,c\nd,e,f';
var index = CSV.read(data, ',', function(row) {
console.log(row);
});
console.log(data.slice(index));
Output:
[ 'a', 'b', 'c' ]
d,e,f
callback(rows : Array) : undefined
Calls callback
when all CSV rows are read. The Array passed to callback contains the rows of the file.
Returns the offset of the end of parsing (generaly it's the end of the input string).
var CSV = require('csv-string');
var data = 'a,b,c\nd,e,f';
index = CSV.readAll(data, function(row) {
console.log(row);
});
console.log('-' + data.slice(index) + '-');
Output:
[ [ 'a', 'b', 'c' ], [ 'd', 'e', 'f' ] ]
--
callback(rows : Array) : undefined
Calls callback
when all CSV rows are read. The last row could be ignored, because the remainder could be in another chunk.
The Array passed to callback contains the rows of the file.
Returns the offset of the end of parsing. If the last row is ignored, the offset will point to the beginnning of the row.
var CSV = require('csv-string');
var data = 'a,b,c\nd,e';
index = CSV.readChunk(data, function(row) {
console.log(row);
});
console.log('-' + data.slice(index) + '-');
Output:
[ [ 'a', 'b', 'c' ] ]
--
Create a writable stream for CSV chunk. Options are :
- separator : To indicate the CSV separator. By default is auto (see the detect function)
Example : Read CSV file from the standard input.
var stream = CSV.createStream();
stream.on('data', function (row) {
console.log(row);
}
)
process.stdin.resume();
process.stdin.setEncoding('utf8');
process.stdin.pipe(stream);
- https://npmjs.org/browse/keyword/csv
- http://www.uselesscode.org/javascript/csv/
- https://github.com/archan937/csonv.js
I made a very basic benchmark to compare this project to other related projects, using file streams as input. See ./bench
for source code.
time node ./SCRITPNAME.js >/dev/null
Package | Input equal Output | Time for ~1 200 000 rows |
---|---|---|
a-csv | almost | 0m13.903s |
csv-streamer | yes | 0m15.599s |
csv-stream | yes | 0m17.265s |
csv-string | yes | 0m15.432s |
fast-csv | no | - |
nodecsv | yes | 0m22.129s |