It looks as if you would do better to run your job as one or more transactions and to use the Sqlite API, with the sequence:
          sqlite3_prepare
          begin transaction
            loop
              sqlite3_bind
              sqlite3_step
              sqlite3_reset
            end
          commit
          sqlite3_finalize
You may need to split your job into multiple transactions to manage their size.

RaghavendraK 70574 wrote:
Hi All,

Pls find the sample test below,
A) Using Sqlite 3.3.17,SuSE9 version,8GB Ram,4 CPU machine,
1) Compile the generate the test data program and generate the data
2) Import the data to DB
3) Compile the perf Data program and then generate perf result.
4) Result for 100,000 records it takes 17 sec. Find one record at a time.

regards
ragha
@@1) Program to generate the test data:

//Compile: g++ -g gen.cpp -o gen

#include <unistd.h>
#include <iostream>
#include <stdio.h>
#include <fstream>

using namespace std;


int main()
{
 fstream fout("data.sql",ios::out);
 fstream fout1("query.sql",ios::out);
  //Schema
  fout<<"create table test (serName text,doid text,primary key 
(serName,doid));"<<endl;

 for(int j=0; j<100000;j++)
 {
char* ptr = tmpnam(0); string key = "111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111.";
     key += ptr;
     fout1<<key<<endl;
    fout<<"insert into test values 
('"<<key<<"',"<<"'22222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222');"<<endl;

 }

 return 0;
}

@@2) Use sqlite3 to import the data to "abc". use begin and end.To make it 
faster.

@@3) Program to check the performance
//VersionUsed:3.3.17
//Compile:g++ -g main.cpp -I. -L. -lsqlite3  -lpthread

#include <iostream>
#include <fstream>
#include <time.h>
#include <list>

#include <string>
#include <unistd.h>

#include "sqlite3.h"

using namespace std;

int main()
{
  sqlite3* db;
  int ret = sqlite3_open("abc",&db);

  char* errmsg;
  char buf[1024];
sprintf(buf,"create temporary table ttest(column1 text,column2 text,primary key (column1,column2));"); ret = sqlite3_exec(db , buf, NULL ,NULL,&errmsg);


  sprintf(buf,"insert into ttest select * from test;");
  ret = sqlite3_exec(db , buf, NULL ,NULL,&errmsg);


  //read all the srvName from file and store it in arr and query.
  fstream fin("query.sql",ios::in);
  string data[100000];
for(int j=0;!fin.eof();j++)
   {
      fin>>data[j];
      //cout<<data[j]<<endl;
} fin.close();

   cout<<"Calling Test Now"<<endl;
   sleep(1);

   //Now Query Data.
   time_t start = time(0);
   list<string> lst;

   char* szError=0;
   const char* szTail=0;
   sqlite3_stmt* pVM;

   for(int k=0;k<100000;k++)
   {
sprintf(buf,"select * from ttest where column1 = '%s'",data[k].c_str()); ret = sqlite3_prepare(db, buf, -1, &pVM, &szTail); ret = sqlite3_step(pVM); //lst.push_back(); ret = sqlite3_finalize(pVM); }
   //
   time_t end = time(0);
   cout<<"start="<<start<<endl;
   cout<<"end="<<end<<endl;

  return 0;
}

//Result: It takes 17 sec to read 100,000 records. Can it be reduced to 1 
sec.???
//All help appreciated.

//*************

regards
ragha



******************************************************************************************
 This email and its attachments contain confidential information from HUAWEI, 
which is intended only for the person or entity whose address is listed above. 
Any use of the information contained herein in any way (including, but not 
limited to, total or partial disclosure, reproduction, or dissemination) by 
persons other than the intended recipient(s) is prohibited. If you receive this 
e-mail in error, please notify the sender by phone or email immediately and 
delete it!
 
*****************************************************************************************

----- Original Message -----
From: "A. Pagaltzis" <[EMAIL PROTECTED]>
Date: Saturday, June 30, 2007 9:48 pm
Subject: [sqlite] Re: In Mem Query Performance


Hi RaghavendraK,

* RaghavendraK 70574 <[EMAIL PROTECTED]> [2007-06-25 08:45]:

When the DB is opened in "in Memory mode",performance of query
does not improve. For table which has 10 columns of type Text
and each column having 128bytes data and having a total of
10000 records.

that is small enough to fit into memory, and therefore small
enough to fit into the OS disk cache. In such a case, the
performance of SQLite does not differ significantly between
in-memory and on-disk databases.

Your problem is elsewhere. If you provide your schema and
queries, someone might be able to tell you what about them makes
SQLite go so slow and how to make it faster.

Regards,
--
Aristotle Pagaltzis // <http://plasmasturm.org/>

-------------------------------------------------------------------
----------
To unsubscribe, send email to [EMAIL PROTECTED]
-------------------------------------------------------------------
----------




-----------------------------------------------------------------------------
To unsubscribe, send email to [EMAIL PROTECTED]
-----------------------------------------------------------------------------



-----------------------------------------------------------------------------
To unsubscribe, send email to [EMAIL PROTECTED]
-----------------------------------------------------------------------------

Reply via email to