Javascript CSV Strings
Parse and Stringify for CSV strings.
- API similar to the JSON parser (
CSV.parseandCSV.stringify). - Can also work row by row.
- Can also be used to parse strings from readable streams (e.g. file streams).
- tolerant with the weird data
- ES6 support
import * as CSV from 'csv-string';
// with String
const arr = CSV.parse('a,b,c\na,b,c');
const str = CSV.stringify(arr);
// with Stream
const stream = CSV.createStream();
stream.on('data', (rows) => {
process.stdout.write(CSV.stringify(rows, ','));
})
process.stdin.pipe(stream);Contributors
- Nicolas Thouvenin
- Stéphane Gully
- J. Baumbach
- Sam Hauglustaine
- Rick Huizinga
- doleksy1
- François Parmentier
Installation
With npm:
$ npm install csv-string
Tests
Use mocha to run the tests.
$ npm install mocha
$ mocha test
API Documentation
parse(input : String, [separator : String], [quote : String]) : Object
Converts a CSV string input to array output.
var CSV = require('csv-string'),
arr = CSV.parse('a,b,c\na,b,c');
console.log(arr);Output:
[ [ 'a', 'b', 'c' ], [ 'a', 'b', 'c' ] ]
If separator parameter is not provided, it is automatically detected.
stringify(input : Object, [separator : String]) : String
Converts object input to a CSV string.
var CSV = require('csv-string');
console.log(CSV.stringify(['a', 'b', 'c']));
console.log(CSV.stringify([['c', 'd', 'e'], ['c','d','e']]));
console.log(CSV.stringify({a:'e', b:'f', c:'g'}));Output:
a,b,c
c,d,e
c,d,e
e,f,g
detect(input : String) : String
Detects the best separator.
var CSV = require('csv-string');
console.log(CSV.detect('a,b,c'));
console.log(CSV.detect('a;b;c'));
console.log(CSV.detect('a|b|c'));
console.log(CSV.detect('a\tb\tc'));Output:
,
;
|
\t
forEach(input : String, sep : String, quo : String, callback : Function)
forEach(input : String, sep : String, callback : Function)
forEach(input : String, callback : Function)
callback(row : Array, index : Number) : undefined
Calls callback for each CSV row/line. The Array passed to callback contains the fields of the current row.
var CSV = require('csv-string');
var data = 'a,b,c\nd,e,f';
CSV.forEach(data, ',', function(row, index) {
console.log('#' + index + ' : ', row);
});Output:
#0 : [ 'a', 'b', 'c' ]
#1 : [ 'd', 'e', 'f' ]
read(input : String, sep : String, quo : String, callback : Function) : Number
read(input : String, sep : String, callback : Function) : Number
read(input : String, callback : Function) : Number
callback(row : Array) : undefined
Calls callback when a CSV row is read. The Array passed to callback contains the fields of the row.
Returns the first offset after the row.
var CSV = require('csv-string');
var data = 'a,b,c\nd,e,f';
var index = CSV.read(data, ',', function(row) {
console.log(row);
});
console.log(data.slice(index));Output:
[ 'a', 'b', 'c' ]
d,e,f
readAll(input : String, sep : String, quo : String, callback : Function) : Number
readAll(input : String, sep : String, callback : Function) : Number
readAll(input : String, callback : Function) : Number
callback(rows : Array) : undefined
Calls callback when all CSV rows are read. The Array passed to callback contains the rows of the file.
Returns the offset of the end of parsing (generaly it's the end of the input string).
var CSV = require('csv-string');
var data = 'a,b,c\nd,e,f';
var index = CSV.readAll(data, function(row) {
console.log(row);
});
console.log('-' + data.slice(index) + '-');Output:
[ [ 'a', 'b', 'c' ], [ 'd', 'e', 'f' ] ]
--
readChunk(input : String, sep : String, quo : String, callback : Function) : Number
readChunk(input : String, sep : String, callback : Function) : Number
readChunk(input : String, callback : Function) : Number
callback(rows : Array) : undefined
Calls callback when all CSV rows are read. The last row could be ignored, because the remainder could be in another chunk.
The Array passed to callback contains the rows of the file.
Returns the offset of the end of parsing. If the last row is ignored, the offset will point to the beginnning of the row.
var CSV = require('csv-string');
var data = 'a,b,c\nd,e';
var index = CSV.readChunk(data, function(row) {
console.log(row);
});
console.log('-' + data.slice(index) + '-');Output:
[ [ 'a', 'b', 'c' ] ]
--
createStream(options : Object) : WritableStream
createStream() : WritableStream
Create a writable stream for CSV chunk. Options are :
- separator : To indicate the CSV separator. By default is auto (see the detect function)
- quote** : To indicate the CSVquote.
Example : Read CSV file from the standard input.
var stream = CSV.createStream();
stream.on('data', function (row) {
console.log(row);
}
)
process.stdin.resume();
process.stdin.setEncoding('utf8');
process.stdin.pipe(stream);Related projects
- https://npmjs.org/browse/keyword/csv
- http://www.uselesscode.org/javascript/csv/
- https://github.com/archan937/csonv.js
Benchmark
I made a very basic benchmark to compare this project to other related projects, using file streams as input. See ./bench for source code.
the test
time node ./SCRITPNAME.js >/dev/nullthe result
| Package | Input equal Output | Time for ~1 200 000 rows |
|---|---|---|
| a-csv | almost | 0m13.903s |
| csv-streamer | yes | 0m15.599s |
| csv-stream | yes | 0m17.265s |
| csv-string | yes | 0m15.432s |
| fast-csv | no | - |
| nodecsv | yes | 0m22.129s |
