dabla commented on code in PR #38715:
URL: https://github.com/apache/airflow/pull/38715#discussion_r1551139206


##########
airflow/providers/common/sql/hooks/sql.py:
##########
@@ -545,35 +541,19 @@ def insert_rows(
             conn.commit()
 
             with closing(conn.cursor()) as cur:
-                if executemany:
-                    for chunked_rows in chunked(rows, commit_every):
-                        values = list(
-                            map(
-                                lambda row: tuple(map(lambda cell: 
self._serialize_cell(cell, conn), row)),
-                                chunked_rows,
-                            )
+                for chunked_rows in chunked(rows, commit_every):
+                    values = list(
+                        map(
+                            lambda row: tuple(map(lambda cell: 
self._serialize_cell(cell, conn), row)),
+                            chunked_rows,
                         )
-                        sql = self._generate_insert_sql(table, values[0], 
target_fields, replace, **kwargs)
-                        self.log.debug("Generated sql: %s", sql)
-                        cur.fast_executemany = True
-                        cur.executemany(sql, values)
-                        conn.commit()
-                        self.log.info("Loaded %s rows into %s so far", 
len(chunked_rows), table)
-                else:
-                    for i, row in enumerate(rows, 1):
-                        lst = []
-                        for cell in row:
-                            lst.append(self._serialize_cell(cell, conn))
-                        values = tuple(lst)
-                        sql = self._generate_insert_sql(table, values, 
target_fields, replace, **kwargs)
-                        self.log.debug("Generated sql: %s", sql)
-                        cur.execute(sql, values)
-                        if commit_every and i % commit_every == 0:
-                            conn.commit()
-                            self.log.info("Loaded %s rows into %s so far", i, 
table)
-
-            if not executemany:
-                conn.commit()
+                    )
+                    sql = self._generate_insert_sql(table, values[0], 
target_fields, replace, **kwargs)
+                    self.log.debug("Generated sql: %s", sql)
+                    cur.fast_executemany = True

Review Comment:
   Yes, that's why in the meanwhile I've putted a condition for it, @Joffreybvn 
told me the same remark ;-)  But I doesn't make a difference in our case, as it 
still faster even without setting that property as I removed it locally in our 
patched Airflow env and re-ran the same DAG and we still have the same 
performance.  I've also looked it up and it seems setting this property won't 
raise an exception when not supported, anyway I've putted a condition on it so 
by default it won't be set.
   
![Capture](https://github.com/apache/airflow/assets/189402/785c0dc3-e395-498b-8346-f27cb188d072)
   



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to