On Apr 12, 2012, at 3:27 PM, binadam wrote:

> On Thursday, April 12, 2012 12:24:42 PM UTC-7, binadam wrote:
> Hello all,
> 
> I used the following cookbook for an asynchronous setup:
> http://michael.merickel.org/2011/6/21/tictactoe-and-long-polling-with-pyramid/
>  
> but ran into problems.
> 
> First I'd be interested to know if anyone has successfully done this (in 
> production environment) using the following packages (or something similar):
> pyramid
> gunicorn + gevent
> postgresql (with psycopg2 made green)
> 
> I should also add sqlalchemy 

I'm playing with this right now.

It's working for me, here are the two things I'm observing so far.

1. it might be better to use NullPool with create_engine(), not sure yet.   
This eliminates all connection pooling.   I'm not sure if theres's some kind of 
twinge with using a psycopg2 connection in a greenlet that it wasn't created 
in, the statement at 
http://initd.org/psycopg/docs/advanced.html#support-to-coroutine-libraries 
doesn't seem to say this, but I am seeing it "hang" more often if I don't use 
NullPool.

2. then it runs great, but watching this go, I can see that there might be a 
greater chance of old fashioned deadlocks occurring, it's not clear yet.  Try 
running "ps -ef | grep post" or select from pg_stat_activity to see if anything 
is just locking.

script is attached

with gevent I can run through about 55K rows of work in 53 seconds, with 
threads it takes 66 seconds.





> 
> 
> -- 
> You received this message because you are subscribed to the Google Groups 
> "pylons-discuss" group.
> To view this discussion on the web visit 
> https://groups.google.com/d/msg/pylons-discuss/-/SvFCNJmz7A0J.
> To post to this group, send email to [email protected].
> To unsubscribe from this group, send email to 
> [email protected].
> For more options, visit this group at 
> http://groups.google.com/group/pylons-discuss?hl=en.

-- 
You received this message because you are subscribed to the Google Groups 
"pylons-discuss" group.
To post to this group, send email to [email protected].
To unsubscribe from this group, send email to 
[email protected].
For more options, visit this group at 
http://groups.google.com/group/pylons-discuss?hl=en.






--
You received this message because you are subscribed to the Google Groups "pylons-discuss" group.
To view this discussion on the web visit https://groups.google.com/d/msg/pylons-discuss/-/SvFCNJmz7A0J.
To post to this group, send email to [email protected].
To unsubscribe from this group, send email to [email protected].
For more options, visit this group at http://groups.google.com/group/pylons-discuss?hl=en.

from sqlalchemy import Column, Integer, create_engine, ForeignKey, \
    String, Numeric

from sqlalchemy.orm import Session, relationship

from sqlalchemy.ext.declarative import declarative_base
import random
from decimal import Decimal

Base = declarative_base()

class Employee(Base):
    __tablename__ = 'employee'

    id = Column(Integer, primary_key=True)
    name = Column(String(100), nullable=False)
    type = Column(String(50), nullable=False)

    __mapper_args__ = {'polymorphic_on':type}

class Boss(Employee):
    __tablename__ = 'boss'

    id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
    golf_average = Column(Numeric)

    __mapper_args__ = {'polymorphic_identity':'boss'}

class Grunt(Employee):
    __tablename__ = 'grunt'

    id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
    savings = Column(Numeric)

    employer_id = Column(Integer, ForeignKey('boss.id'))

    employer = relationship("Boss", backref="employees", 
                                primaryjoin=Boss.id==employer_id)

    __mapper_args__ = {'polymorphic_identity':'grunt'}

def runit(engine):
    sess = Session(engine)
    # create 1000 Boss objects.
    bosses = [
        Boss(
            name="Boss %d" % i, 
            golf_average=Decimal(random.randint(40, 150))
        )
        for i in xrange(1000)
    ]

    sess.add_all(bosses)

    # create 10000 Grunt objects.
    grunts = [
        Grunt(
            name="Grunt %d" % i,
            savings=Decimal(random.randint(5000000, 15000000) / 100)
        )
        for i in xrange(10000)
    ]

    # Assign each Grunt a Boss.  Look them up in the DB
    # to simulate a little bit of two-way activity with the 
    # DB while we populate.  Autoflush occurs on each query.
    while grunts:
        boss = sess.query(Boss).\
                    filter_by(name="Boss %d" % (101 - len(grunts) / 100)).\
                    first()
        for grunt in grunts[0:100]:
            grunt.employer = boss

        grunts = grunts[100:]

    sess.flush()

    report = []

    # load all the Grunts, print a report with their name, stats,
    # and their bosses' stats.
    for grunt in sess.query(Grunt):
        report.append((
                        grunt.name, 
                        grunt.savings, 
                        grunt.employer.name, 
                        grunt.employer.golf_average
                    ))

def timeit(fn):
    def time_fn(*arg, **kw):
        import time
        now = time.time()
        fn(*arg, **kw)
        total = time.time() - now
        print "total time for %s: %d" % (fn.__name__, total)
    return time_fn

@timeit
def run_with_gevent():
    from psyco_gevent import make_psycopg_green
    make_psycopg_green()

    from sqlalchemy.pool import NullPool
    engine = create_engine('postgresql+psycopg2://scott:tiger@localhost/test', 
                                echo=True, 
                                poolclass=NullPool
                                )
    Base.metadata.drop_all(engine)
    Base.metadata.create_all(engine)

    import gevent
    threads = [gevent.spawn(runit, engine) for i in xrange(5)]
    for t in threads:
        t.join()

@timeit
def run_with_threads():
    from sqlalchemy.pool import NullPool
    engine = create_engine('postgresql+psycopg2://scott:tiger@localhost/test', 
                                echo=True, 
                                poolclass=NullPool
                                )
    Base.metadata.drop_all(engine)
    Base.metadata.create_all(engine)

    import threading
    threads = [threading.Thread(target=runit, args=(engine, )) for i in xrange(5)]
    for t in threads:
        t.start()
    for t in threads:
        t.join()

run_with_gevent()
#run_with_threads()

Reply via email to