deniskuzZ commented on a change in pull request #2266:
URL: https://github.com/apache/hive/pull/2266#discussion_r642235334



##########
File path: 
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
##########
@@ -5394,6 +5410,476 @@ public void countOpenTxns() throws MetaException {
     }
   }
 
+  private void cleanOldStatsFromPartColStatTable(Map<PartitionInfo, 
ColumnStatistics> statsPartInfoMap,
+                                                 Connection dbConn) throws 
SQLException {
+    PreparedStatement preparedStatement = null;
+    int numRows = 0;
+    int maxNumRows = MetastoreConf.getIntVar(conf, 
ConfVars.DIRECT_SQL_MAX_ELEMENTS_VALUES_CLAUSE);
+
+    // Index is present on DB_NAME,TABLE_NAME,COLUMN_NAME,PARTITION_NAME. use 
that.
+    // TODO : Need to add catalog name to the index
+    String delete = "DELETE FROM \"PART_COL_STATS\" where \"DB_NAME\" = ? AND "
+            + "\"TABLE_NAME\" = ? AND \"COLUMN_NAME\" = ? AND 
\"PARTITION_NAME\" = ? "
+            + "AND \"PART_ID\" = ?";
+
+    try {
+      preparedStatement = sqlGenerator.prepareStmtWithParameters(dbConn, 
delete, null);
+      for (Map.Entry entry : statsPartInfoMap.entrySet()) {
+        ColumnStatistics colStats = (ColumnStatistics) entry.getValue();
+        PartitionInfo partitionInfo = (PartitionInfo) entry.getKey();
+        for (ColumnStatisticsObj statisticsObj : colStats.getStatsObj()) {
+          preparedStatement.setString(1, colStats.getStatsDesc().getDbName());
+          preparedStatement.setString(2, 
colStats.getStatsDesc().getTableName());
+          preparedStatement.setString(3, statisticsObj.getColName());
+          preparedStatement.setString(4, 
colStats.getStatsDesc().getPartName());
+          preparedStatement.setLong(5, partitionInfo.partitionId);
+          numRows++;
+          preparedStatement.addBatch();
+          if (numRows == maxNumRows) {
+            preparedStatement.executeBatch();
+            numRows = 0;
+            LOG.debug("Executed delete " + delete + " for numRows " + numRows);
+          }
+        }
+      }
+
+      if (numRows != 0) {
+        preparedStatement.executeBatch();
+        LOG.debug("Executed delete " + delete + " for numRows " + numRows);
+      }
+    } finally {
+      closeStmt(preparedStatement);
+    }
+  }
+
+  private void insertIntoPartColStatTable(Map<PartitionInfo, ColumnStatistics> 
partitionInfoMap,
+                                          long maxCsId,
+                                          Connection dbConn) throws 
SQLException, MetaException, NoSuchObjectException {
+    PreparedStatement preparedStatement = null;
+    int numRows = 0;
+    int maxNumRows = MetastoreConf.getIntVar(conf, 
ConfVars.DIRECT_SQL_MAX_ELEMENTS_VALUES_CLAUSE);

Review comment:
       same as above, check if maxBatchSize is applicable




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to