Massimo,
Sure, just downloaded the nightly source zip and managed to reproduce with
a simple app. Note it does seem to have to be using pyodbc & MS SQL Server,
it doesn't happen if you're using SQLite.
Model
db.define_table('person',
Field('name', 'string', notnull=True)
)
db.define_table('pet',
Field('name', 'string', notnull=True),
Field('person_id', 'reference person', notnull=True)
)
Controllers
def setup():
# initial population of database
db(db.pet.id>0).delete()
db(db.person.id>0).delete()
person_id = db.person.insert(name='Charlie Brown')
db.pet.insert(name='Snoopy', person_id=person_id)
person_id = db.person.insert(name='John Arbuckle')
db.pet.insert(name='Garfield', person_id=person_id)
db.pet.insert(name='Odie', person_id=person_id)
pet_owners = db(db.pet.person_id==db.person.id).select()
return dict(pet_owners=pet_owners)
def RAMcache():
pet_owners = db(db.person.id > 0).select(
db.person.id,
db.person.name,
db.pet.id,
db.pet.name,
join=[
db.pet.on(db.person.id==db.pet.person_id)
],
cache=(cache.ram, 3600), cacheable=True
)
return dict(pet_owners=pet_owners, last_sql=last_sql)
def DISKcache_Failing():
#Fails because multiple fields with same name (id & name)
pet_owners = db(db.person.id > 0).select(
db.person.id,
db.person.name,
db.pet.id,
db.pet.name,
join=[
db.pet.on(db.person.id==db.pet.person_id)
],
cache=(cache.disk, 3600), cacheable=True
)
return dict(pet_owners=pet_owners)
def DISKcache_Working():
# Works find since all unique field names
pet_owners = db(db.person.id > 0).select(
db.person.id,
db.person.name,
db.pet.id.with_alias('pet_id'),
db.pet.name.with_alias('pet_name'),
join=[
db.pet.on(db.person.id==db.pet.person_id)
],
cache=(cache.disk, 3600), cacheable=True
)
return dict(pet_owners=pet_owners)
Attempting to load the DISKCache_Failing controller a second time results
in an error:
<type 'exceptions.TypeError'> ("cannot create 'pyodbc.Row' instances",
<type 'pyodbc.Row'>, (((u'id', <type 'int'>, None, 10, 10, 0, False),
(u'name', <type 'str'>, None, 512, 512, 0, False), (u'id', <type 'int'>,
None, 10, 10, 0, False), (u'name', <type 'str'>, None, 512, 512, 0,
False)), {u'id': 2, u'name': 3}, 1, u'Charlie Brown', 1, u'Snoopy')) Version
web2py™ Version 2.16.1-stable+timestamp.2017.11.14.05.54.25 Traceback
1.
2.
3.
4.
5.
6.
7.
8.
9.
10.
11.
12.
13.
14.
15.
16.
17.
18.
19.
20.
21.
22.
Traceback (most recent call last):
File
"C:\Users\Brian\Documents\source_code\web2py_src\web2py\gluon\restricted.py",
line 219, in restricted
exec(ccode, environment)
File
"C:/Users/Brian/Documents/source_code/web2py_src/web2py/applications/caching_demo/controllers/cache_demo.py"
<http://localhost:8000/admin/default/edit/caching_demo/controllers/cache_demo.py>,
line 48, in <module>
File
"C:\Users\Brian\Documents\source_code\web2py_src\web2py\gluon\globals.py", line
419, in <lambda>
self._caller = lambda f: f()
File
"C:/Users/Brian/Documents/source_code/web2py_src/web2py/applications/caching_demo/controllers/cache_demo.py"
<http://localhost:8000/admin/default/edit/caching_demo/controllers/cache_demo.py>,
line 43, in DISKcache
cache=(cache.disk, 3600), cacheable=True
File
"C:\Users\Brian\Documents\source_code\web2py_src\web2py\gluon\packages\dal\pydal\objects.py",
line 2250, in select
return adapter.select(self.query, fields, attributes)
File
"C:\Users\Brian\Documents\source_code\web2py_src\web2py\gluon\packages\dal\pydal\adapters\base.py",
line 761, in select
cache, sql, fields, attributes, colnames)
File
"C:\Users\Brian\Documents\source_code\web2py_src\web2py\gluon\packages\dal\pydal\adapters\base.py",
line 752, in _cached_select
time_expire)
File "C:\Users\Brian\Documents\source_code\web2py_src\web2py\gluon\cache.py",
line 444, in __call__
item = self.storage.get(key)
File "C:\Users\Brian\Documents\source_code\web2py_src\web2py\gluon\cache.py",
line 401, in get
return self[key]
File "C:\Users\Brian\Documents\source_code\web2py_src\web2py\gluon\cache.py",
line 350, in __getitem__
value = pickle.load(val_file)
TypeError: ("cannot create 'pyodbc.Row' instances", <type 'pyodbc.Row'>,
(((u'id', <type 'int'>, None, 10, 10, 0, False), (u'name', <type 'str'>, None,
512, 512, 0, False), (u'id', <type 'int'>, None, 10, 10, 0, False), (u'name',
<type 'str'>, None, 512, 512, 0, False)), {u'id': 2, u'name': 3}, 1, u'Charlie
Brown', 1, u'Snoopy'))
While running DISKCache_Working multiple times works just fine. The
difference seems to be that the version that fails ends up with two "id"
and two "name" fields while the version that works has all unique field
names.
pet_owners :
person.idperson.namepet_idpet_name
1 Charlie Brown 1 Snoopy
2 John Arbuckle 2 Garfield
2 John Arbuckle 3 Odie
On Tuesday, April 24, 2018 at 4:44:46 PM UTC-5, Massimo Di Pierro wrote:
>
> please post a minimal model to reproduce the problem.
>
> On Tuesday, 3 April 2018 17:15:25 UTC-5, Brian M wrote:
>>
>> Further investigation shows that cache.redis plays nicely with pyodbc *as
>> long as you have all uniquely named fields in your select*. The selects
>> that I was working with involved multiple tables that each had a "Name"
>> field that was being selected. If I add .with_alias() to provide unique
>> names then caching worked but that's also not really convenient. Is this
>> the way it is supposed to work?
>>
>> On Tuesday, April 3, 2018 at 2:16:30 PM UTC-5, Anthony wrote:
>>>
>>> You can also see what happens if you do not set cacheable=True.
>>>
>>> On Tuesday, April 3, 2018 at 3:15:55 PM UTC-4, Anthony wrote:
>>>>
>>>> It should work. Could be a bug specific to using pyodbc. Have you tried
>>>> it with other databases/drivers? Feel free to file a PyDAL issue.
>>>>
>>>> Anthony
>>>>
>>>> On Tuesday, April 3, 2018 at 12:40:50 PM UTC-4, Brian M wrote:
>>>>>
>>>>> I've been playing with caching and looking through the mailing list
>>>>> and have a bit of confusion about whether or not you can actually cache
>>>>> DAL
>>>>> Selects with anything other than cache.ram due to issues with what
>>>>> can/cannot be pickled. When I use
>>>>>
>>>>> db(...).select(...,cache=(cache.disk, 300), cacheable=True)
>>>>>
>>>>> Caching works just fine. But if I try it with cache.disk or
>>>>> cache.redis instead then I get errors which seem to be related to not
>>>>> being
>>>>> able to unpickle the cached select results.
>>>>>
>>>>> type 'exceptions.TypeError'> ("cannot create 'pyodbc.Row' instances",
>>>>> <type 'pyodbc.Row'>, ....)
>>>>>
>>>>>
>>>>> This, admittedly old, post seems to suggest that cache.redis should be
>>>>> pretty much drop-in for cache.ram but that's not my experience.
>>>>> https://groups.google.com/forum/?fromgroups#!searchin/web2py/caching$20model$20data/web2py/Tn4TiLPScII/jtMR1KtT4V8J
>>>>>
>>>>>
>>>>> So does the ability to cache DAL Selects really only exist for
>>>>> cache.ram or am I missing something?
>>>>>
>>>>>
>>>>> Brian
>>>>>
>>>>
--
Resources:
- http://web2py.com
- http://web2py.com/book (Documentation)
- http://github.com/web2py/web2py (Source code)
- https://code.google.com/p/web2py/issues/list (Report Issues)
---
You received this message because you are subscribed to the Google Groups
"web2py-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.