Finally this works
by doing this , it avoid pickling but opens database connection that is
local to each processes.
def pool_initalizer(*val_arr):
global database
db_name,db_path = val_arr
database = generate_db(db_name,db_path)
print "obj id %s " % id(database)
def pooler(fpath, db_name, db_path, processes=4):
lst = recursive_flist(fpath)
processor_pool =
Pool(initializer=pool_initalizer,initargs=[db_name,db_path],processes=processes)
enum_lst =enumerate(lst)
return processor_pool.map(generated_edoc_pooling ,enum_lst )
On Thu, Jan 12, 2012 at 10:01 PM, Massimo Di Pierro <
[email protected]> wrote:
> no. you cannot pickle a database connection.
>
> On Jan 12, 9:04 am, Phyo Arkar <[email protected]> wrote:
> > I think what i am trying to do is pickling db object.
> > This will never work , right?
> >
> > On Thu, Jan 12, 2012 at 9:10 PM, Phyo Arkar <[email protected]
> >wrote:
> >
> >
> >
> >
> >
> >
> >
> > > Hello Web2py , happy new year!
> >
> > > I am trying to use DAL outside in a script file , here is the code :
> >
> > > def generate_dbs(db_name,db_path):
> > > #casesdb = DAL( 'mysql://root@localhost/' + db_name, folder =
> db_path
> > > )
> > > #casesdb.define_table( 'email_data', migrate = True,
> *email_halfschema
> > > )
> > > #casesdb.define_table( 'loosefile_data', migrate = True,
> > > *file_halfschema )
> > > #casesdb.define_table( 'attach_data', migrate = True,
> *file_halfschema
> > > )
> > > #casesdb.define_table( 'meta_data', migrate = True,
> *metadatas_schema )
> > > #return casesdb
> > > casesdb = DAL('mysql://root@localhost/' + db_name, folder =
> db_path)
> > > casesdb.define_table('files_data', migrate = False, *files_schema)
> > > casesdb.define_table('files_meta_data', migrate = False,
> > > *files_meta_schema)
> > > casesdb.define_table('email_meta_data', migrate = False,
> > > *email_meta_schema)
> > > return casesdb
> >
> > > def pooler(fpath, db_name, db_path, processes=4):
> > > lst = recursive_flist(fpath)
> > > print lst
> > > databases = [generate_dbs(db_name,db_path) for i in
> range(1,processes)]
> > > print databases
> > > processor_pool = Pool(processes=processes)
> > > print "Pooling Ready"
> > > matcher_db_lst =list(enumerate(zip(lst,cycle(databases))))
> > > #this gets something like this
> > > [1,["path1",dbcon1],2,["path2",dbcon2],...,21,["path21",dbcon1]]
> > > return processor_pool.map(generated_edoc_pooling ,matcher_db_lst)
> >
> > > Here is the error :
> >
> > > Exception in thread Thread-2:
> > > Traceback (most recent call last):
> > > File "/usr/lib64/python2.7/threading.py", line 552, in
> __bootstrap_inner
> > > self.run()
> > > File "/usr/lib64/python2.7/threading.py", line 505, in run
> > > self.__target(*self.__args, **self.__kwargs)
> > > File "/usr/lib64/python2.7/multiprocessing/pool.py", line 313, in
> > > _handle_tasks
> > > put(task)
> > > PicklingError: Can't pickle <type 'NoneType'>: attribute lookup
> > > __builtin__.NoneType failed
> >
> > > The problem is that , there are too many Nones in DAL's database object
> > > and cant be seralized into pickle. how can i solve this?
> > > What i want is to pool only 4 db connections however large the list is.
> >
> > > Thanks
> >
> > > Phyo.
>