Author: lukeplant
Date: 2010-11-23 07:54:58 -0600 (Tue, 23 Nov 2010)
New Revision: 14687

Modified:
   django/trunk/django/db/models/query.py
   django/trunk/django/db/models/sql/compiler.py
Log:
Fixed #14697 - speeded up model instance creation by moving work outside of 
loops

Thanks to akaariai for the report and initial patch.

Modified: django/trunk/django/db/models/query.py
===================================================================
--- django/trunk/django/db/models/query.py      2010-11-23 03:54:13 UTC (rev 
14686)
+++ django/trunk/django/db/models/query.py      2010-11-23 13:54:58 UTC (rev 
14687)
@@ -266,11 +266,14 @@
                     init_list.append(field.attname)
             model_cls = deferred_class_factory(self.model, skip)
 
-        compiler = self.query.get_compiler(using=self.db)
+        # Cache db and model outside the loop
+        db = self.db
+        model = self.model
+        compiler = self.query.get_compiler(using=db)
         for row in compiler.results_iter():
             if fill_cache:
-                obj, _ = get_cached_row(self.model, row,
-                            index_start, using=self.db, max_depth=max_depth,
+                obj, _ = get_cached_row(model, row,
+                            index_start, using=db, max_depth=max_depth,
                             requested=requested, offset=len(aggregate_select),
                             only_load=only_load)
             else:
@@ -280,19 +283,21 @@
                     obj = model_cls(**dict(zip(init_list, row_data)))
                 else:
                     # Omit aggregates in object creation.
-                    obj = self.model(*row[index_start:aggregate_start])
+                    obj = model(*row[index_start:aggregate_start])
 
                 # Store the source database of the object
-                obj._state.db = self.db
+                obj._state.db = db
                 # This object came from the database; it's not being added.
                 obj._state.adding = False
 
-            for i, k in enumerate(extra_select):
-                setattr(obj, k, row[i])
+            if extra_select:
+                for i, k in enumerate(extra_select):
+                    setattr(obj, k, row[i])
 
             # Add the aggregates to the model
-            for i, aggregate in enumerate(aggregate_select):
-                setattr(obj, aggregate, row[i+aggregate_start])
+            if aggregate_select:
+                for i, aggregate in enumerate(aggregate_select):
+                    setattr(obj, aggregate, row[i+aggregate_start])
 
             yield obj
 

Modified: django/trunk/django/db/models/sql/compiler.py
===================================================================
--- django/trunk/django/db/models/sql/compiler.py       2010-11-23 03:54:13 UTC 
(rev 14686)
+++ django/trunk/django/db/models/sql/compiler.py       2010-11-23 13:54:58 UTC 
(rev 14687)
@@ -672,6 +672,7 @@
         """
         resolve_columns = hasattr(self, 'resolve_columns')
         fields = None
+        has_aggregate_select = bool(self.query.aggregate_select)
         for rows in self.execute_sql(MULTI):
             for row in rows:
                 if resolve_columns:
@@ -692,7 +693,7 @@
                                       f.column in only_load[db_table]]
                     row = self.resolve_columns(row, fields)
 
-                if self.query.aggregate_select:
+                if has_aggregate_select:
                     aggregate_start = len(self.query.extra_select.keys()) + 
len(self.query.select)
                     aggregate_end = aggregate_start + 
len(self.query.aggregate_select)
                     row = tuple(row[:aggregate_start]) + tuple([

-- 
You received this message because you are subscribed to the Google Groups 
"Django updates" group.
To post to this group, send email to django-upda...@googlegroups.com.
To unsubscribe from this group, send email to 
django-updates+unsubscr...@googlegroups.com.
For more options, visit this group at 
http://groups.google.com/group/django-updates?hl=en.

Reply via email to