I'v been using the async module for this kind of thing. The above example
could be rewritten as:
```
var fs = require('fs');
var reader = require ("buffered-reader");
var DataReader = reader.DataReader;
var async = require('async');
// You kinda need to understand map-reduce concept to fully grok this.
async.reduce(
// Source array.
['file1', 'file2', ...],
// Initialize your hash object.
{},
// Process each file.
function (hash, file, cb) {
new DataReader (file, { encoding: "utf8" })
.on('end', function () {
cb(null, hash);
})
.on('error', cb)
.on ('line', function (line){
var reg = new RegExp(";", "g");
var fields = line.split(reg);
var value = fields[0];
var key = fields[1];
hash[key] = value;
})
.read();
},
// Operate on results.
function (err, hash) {
console.log(hash);
}
);
```
The counter example is perfectly fine (and what I started out with), but
ever since switching to async I've found my code is a little more readable.
On Friday, January 18, 2013 3:57:30 AM UTC-5, Jean-Michel Hiver wrote:
>
> This is what I was looking for : thank you!
--
Job Board: http://jobs.nodejs.org/
Posting guidelines:
https://github.com/joyent/node/wiki/Mailing-List-Posting-Guidelines
You received this message because you are subscribed to the Google
Groups "nodejs" group.
To post to this group, send email to [email protected]
To unsubscribe from this group, send email to
[email protected]
For more options, visit this group at
http://groups.google.com/group/nodejs?hl=en?hl=en