if only those announcements came with actual reproducible tests..... life 
would be easier for us :-P
I'd guess your usecase is either 1k rows with 50 columns or a really slow 
python server, because I can't reproduce those kind of "advertised 
magnitude gains". Just a 4x that is quite expected since Rows carries 
around lots of helpers.

CPython 2.7.3
speed_executesql() total=0.846748 avg=0.008467
speed_normal_rows() total=4.214647 avg=0.042146
speed_rows_with_cacheable() total=3.705196 avg=0.037052
speed_executesql_with_colnames() total=0.844287 avg=0.008443
speed_rows_with_simple_processor() total=0.891577 avg=0.008916

pypy 1.9.0
speed_executesql() total=3.344496 avg=0.033445
speed_normal_rows() total=3.832903 avg=0.038329
speed_rows_with_cacheable() total=3.472066 avg=0.034721
speed_executesql_with_colnames() total=2.808536 avg=0.028085
speed_rows_with_simple_processor() total=3.268108 avg=0.032681

Attached the test code.

-- 
Resources:
- http://web2py.com
- http://web2py.com/book (Documentation)
- http://github.com/web2py/web2py (Source code)
- https://code.google.com/p/web2py/issues/list (Report Issues)
--- 
You received this message because you are subscribed to the Google Groups 
"web2py-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.
import timeit
from gluon.dal import DAL, Field
import datetime

times = 100
print("Times are for %i iterations" % times)

##prepare a large dict
print 'Preparing....'

db = DAL('sqlite:memory')

#load 1k records into a test table
db.define_table('test_table', 
    Field('f_a'),
    Field('f_b', 'integer'),
    Field('f_c', 'datetime')
)

db.commit()
db(db.test_table.id > 0).delete()

for a in range(1000):
    db.test_table.insert(f_a='a', f_b=a, f_c=datetime.datetime.now())

tb = db.test_table

fields = [tb.f_a, tb.f_b, tb.f_c] 

def bench(cmd, imprt):
    t = timeit.Timer(cmd, imprt)
    s = t.timeit(number=times)
    print("%s total=%02f avg=%02f" % (cmd, s, (s/times)))
    return s

def myprocessor(rows, fields, colnames, blob_decode=True, cacheable = False):
    return [dict(zip(colnames,row)) for row in rows]

def speed_executesql():
    rtn = db.executesql("select f_a, f_b, f_c from test_table", as_dict=True)

def speed_executesql_with_colnames():
    rtn = db.executesql("select f_a, f_b, f_c from test_table", colnames=['test_table.f_a', 'test_table.f_b', 'test_table.f_c'], as_dict=True)

def speed_normal_rows():
    rtn = db(db.test_table.id > 0).select()

def speed_rows_with_cacheable():
    rtn = db(db.test_table.id > 0).select(cacheable=True)

def speed_rows_with_simple_processor():
    rtn = db(db.test_table.id > 0).select(processor=myprocessor)

def full_bench():
    print 'started bench'
    b1 = bench('speed_executesql()', 'from __main__ import speed_executesql')
    b2 = bench('speed_normal_rows()', 'from __main__ import speed_normal_rows')
    b3 = bench('speed_rows_with_cacheable()', 'from __main__ import speed_rows_with_cacheable')
    b4 = bench('speed_executesql_with_colnames()', 'from __main__ import speed_executesql_with_colnames')
    b5 = bench('speed_rows_with_simple_processor()', 'from __main__ import speed_rows_with_simple_processor, myprocessor')
    
if __name__ == '__main__':
    full_bench()

Reply via email to