Hi again
I here are the map functions we have.
I shall give that "incluse_docs=true" thing a try. Is this just appended
to the url like a key?
Thanks for your help guys.
Rob
function(doc)
{
if(doc.type.toLowerCase() != 'user' && doc.jobname != undefined){
emit(doc.jobname, doc);
}
}
function(doc)
{ if(doc.type != undefined && doc._id != undefined && doc._rev !=
undefined){
var docType = doc.type.toLowerCase();
if(docType == 'job' || docType == 'npc' || docType == 'cpc' ||
docType == 'txt' || docType == 'page' || docType == 'style' || docType
== 'tag')
emit(doc._id, doc._rev);
}
}
function(doc)
{
if(doc.type.toLowerCase() == 'cpc' && doc.jobindex != undefined){
// Split up job index into an array
var ji = doc.jobindex.split('.');
// Convert from strings to numbers
for (i in ji)
ji[i] = parseInt(ji[i]);
emit([doc.jobname, ji], doc);
}
}
function(doc)
{
if(doc.type != undefined && doc._id != undefined && doc._rev !=
undefined && doc.jobname != undefined){
var docType = doc.type.toLowerCase();
if(docType == 'job' || docType == 'npc' || docType == 'cpc' ||
docType == 'txt' || docType == 'page' || docType == 'tag')
emit(doc.jobname, {"_id":doc._id, "_rev":doc._rev,
"type":doc.type});
}
}
function(doc)
{
if(doc.type.toLowerCase() == 'job' && doc.jobname != undefined){
emit(doc.jobname, doc);
}
}
function(doc)
{
if(doc.type.toLowerCase() == 'job' && doc._id != undefined &&
doc.jobname != undefined)
emit(doc._id, doc.jobname);
}
function(doc)
{
if(doc.type.toLowerCase() == 'job' && doc.jobname != undefined &&
doc.revlevels != undefined)
emit(doc.jobname, doc.revlevels);
}
function(doc)
{
if(doc.type.toLowerCase() == 'npc' && doc.jobindex != undefined &&
doc.jobname != undefined){
// Split up job index into an array
var ji = doc.jobindex.split('.');
// Convert from strings to numbers
for (i in ji)
ji[i] = parseInt(ji[i]);
emit([doc.jobname, ji], doc);
}
}
function(doc)
{
if(doc.type.toLowerCase() == 'page' && doc.jobname != undefined &&
doc.jobindex != undefined){
// Split up job index into an array
var ji = doc.jobindex.split('.');
// Convert from strings to numbers
for (i in ji)
ji[i] = parseInt(ji[i]);
emit([doc.jobname, ji], doc);
}
}
function(doc)
{
if(doc.type.toLowerCase() == 'page' && doc.jobname != undefined &&
doc.sectionID != undefined && doc.status != undefined && doc.table !=
undefined)
{
var batch = ( doc.batch == undefined ) ? "NONE" : doc.batch;
emit(doc.jobname,{ "_id":doc._id, "jobindex": doc.jobindex ,
"sectionID" : doc.sectionID , "status" : doc.status , "batch" : batch,
"table" : doc.table});
}
}
function(doc)
{
if(doc.jobindex != undefined && doc.jobname != undefined &&
doc.data != undefined){
if(doc.type.toLowerCase() == 'job' || doc.type.toLowerCase() ==
'page')
{
// Split up job index into an array
var ji = doc.jobindex.split('.');
// Convert from strings to numbers
for (i in ji)
ji[i] = parseInt(ji[i]);
emit({"jobname" : doc.jobname, "jobindex" : ji},{
"jobindex": doc.jobindex, "data" : doc.data });
}
}
}
function(doc)
{
var docType = doc.type.toLowerCase();
if( docType == 'job'){
emit([doc.customer, doc.jobname, 0, 0], doc);
} else if ( docType == 'page'){
// Add order job before page
var ji = doc.jobindex.split('.');
// Convert from strings to numbers
for (i in ji) ji[i] = parseInt(ji[i]);
emit([doc.customer, doc.jobname, ji, 1], doc);
}
}
function(doc)
{
if(doc.type.toLowerCase() == 'style' && doc.stylename != undefined)
{
emit(doc.stylename,doc);
}
}
function(doc)
{
if(doc.type.toLowerCase() == 'style')
emit(doc._id, doc.stylename);
}
function(doc)
{
if(doc.type.toLowerCase() == 'txt' && doc.jobname != undefined){
// Split up job index into an array
var ji = doc.jobindex.split('.');
// Convert from strings to numbers
for (i in ji)
ji[i] = parseInt(ji[i]);
emit([doc.jobname, ji], doc);
}
}
function(doc)
{
if(doc.type.toLowerCase() == 'user' && doc.username != undefined &&
doc.password != undefined && doc.user_level_user != undefined &&
doc.user_level_user != undefined && doc.user_level_user != undefined){
emit(doc._id, {"username" : doc.username,
"password" : doc.password,
"user_level_user" : doc.user_level_user,
"user_level_dev" : doc.user_level_dev,
"user_level_admin" : doc.user_level_admin});
}
}
On 09/02/2011 10:41, Robert Newson wrote:
One reason I can see for the view update taking so long is you have a
lot of views that emit the full 'doc' as the value. A lighter, faster
alternative is to emit null for the value and use ?include_docs=true
to get the doc at query time from the database file instead of the
view file.
That shouldn't be necessary though, and receiving this timeout means
it took a very long time to get a response. I read as much of your
view code as I could in the form above but didn't see anything
obviously contentious. If you could post your map/reduce functions in
a clearer form (i.e, without all the escaping), perhaps something will
stand out.
B.
On 9 February 2011 09:45, Dave Cottlehuber<[email protected]> wrote:
On 9 February 2011 06:20, Rob Pettefar<[email protected]> wrote:
Hi guys
I have an issue with views crashing.
This has occurred on both Linux and Windows distributions of CouchDB 1.0.1
Has this issue occurred on those platforms, on a previous version?
Sometimes the views for a particular database will break. In Futon they
appear unresponsive.
The only way round this that I have seen is to delete and rebuild the view
file for the database in question.
This seems to happen more often on the Windows version but I don't think it
is anything to do with the>4Gb file issue.
Any help you could lend would be invaluable.
Thanks
Rob
I have included the error that was logged in the couchdb log file:
[Fri, 21 Jan 2011 12:18:28 GMT] [debug] [<0.738.0>] Exit from linked pid:
{<0.742.0>,
{timeout,
{gen_server,call,
[couch_query_servers,
{get_proc,<<"javascript">>}]}}}
[Fri, 21 Jan 2011 12:18:28 GMT] [error] [<0.738.0>] ** Generic server
<0.738.0> terminating
** Last message in was {'EXIT',<0.742.0>,
{timeout,
{gen_server,call,
[couch_query_servers,
{get_proc,<<"javascript">>}]}}}
** When Server state == {group_state,undefined,<<"testdb">>,
{"../var/lib/couchdb",<<"testdb">>,
{group,
[<<"npc">>],
Hi Rob
by no means am I am expert but it looks like this is a "normal"
timeout in couch_query_servers.
ProcTimeout = list_to_integer(couch_config:get(
"couchdb", "os_process_timeout", "5000")),
If so you can try upping this from the default 5 seconds in local.ini:
[couchdb]
os_process_timeout = 5000 ; 5 seconds. for view and external servers.
& restart.
The more important question is - why should these views take so long to process?
A+
Dave