I'm having a problem with streams2 transform. I have a stream which reads
from a file and transforms the csv data to json. the transform works fine
but at present if I have an incomplete chunk of data my 'unshift' command
moves the data to my transform output rather than back to my transform
input !
Transform code :
var Transform = require('stream').Transform,
util = require('util')
module.exports = function (columns, opts, cb) { return new
CSVStream(columns, opts, cb) }
module.exports.CSVStream = CSVStream
function CSVStream (columns, opts, cb) {
opts = opts || {}
Transform.call(this, columns, opts)
this.delimiter = opts.delimiter || ','
this.newline = opts.newline || '\n'
this.columns = columns;
}
util.inherits(CSVStream, Transform)
CSVStream.prototype._transform = function (chunk, encoding, done)
{
try {
this._parse(chunk)
done()
} catch (err) {
done(err)
}
}
CSVStream.prototype._parse = function (data)
{
console.log('_parse start');
var i=0,j=0;
var fieldsArray;
var output={};
var rowsArray = data.toString().split(this.newline);//split entire buffer
into rows
for (i=0;i<rowsArray.length;i++){
fieldsArray = rowsArray[i].toString().split(this.delimiter);//split
each row into fields
//console.log('row:'+rowsArray[i]);
if (fieldsArray.length<this.columns.length){//check we have an entire
row
console.log('unshift:'+ rowsArray[i]);
this.unshift(rowsArray[i]);//push incomplete data back onto the
beginning of the next buffer to arrive
}
else{
for (j=0;j<this.columns.length;j++){
key = this.columns[j];
output[key] = fieldsArray[j];
}
//console.log('push');
//console.log(JSON.stringify(output));
this.push(JSON.stringify(output));
}
}
console.log('_parse end');
}
CSVStream.prototype.end = function (buf) {
Transform.prototype.end.call(this, buf)
}
Stream/pipe code
var options = {};
options.delimiter = '\t';
var columns =
['geonameid','name','asciiname','alternatenames','latitude','longitude','feature
class','feature code','country code','cc2','admin1 code','admin2
code','admin3 code','admin4
code','population','elevation','demmeters','timezone','modification date'];
var fstream = fs.createReadStream(source),
parser = csv(columns, options);
parser.on('readable', function () {
var line = parser.read();
console.log('line:'+line.toString());
})
parser.on('end', function () {
console.log('end');
})
// now pump some data into it (and pipe it somewhere else)
fstream.pipe(parser);
--
--
Job Board: http://jobs.nodejs.org/
Posting guidelines:
https://github.com/joyent/node/wiki/Mailing-List-Posting-Guidelines
You received this message because you are subscribed to the Google
Groups "nodejs" group.
To post to this group, send email to [email protected]
To unsubscribe from this group, send email to
[email protected]
For more options, visit this group at
http://groups.google.com/group/nodejs?hl=en?hl=en
---
You received this message because you are subscribed to the Google Groups
"nodejs" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.