[ 
https://issues.apache.org/jira/browse/HIVE-27732?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Aman Raj updated HIVE-27732:
----------------------------
    Description: 
Added additional functions for OSS Spark 3.3 and HMS 3.1.2 Compatibility.
These are the functions used by Spark when it integrates with Hive :
 
List of all functions called by HiveShim.scala. This can be found in Spark 3.3 
codebase.
1. hive.dropDatabase(dbName, deleteData, ignoreUnknownDb, cascade)
2. hive.alterDatabase(dbName, d)
3. hive.getDatabase(dbName)
4. hive.getAllDatabases.asScala.toSeq
5. hive.getDatabasesByPattern(pattern).asScala.toSeq
6. hive.databaseExists(dbName)
7. getAllPartitionsMethod.invoke(hive, table)
8. getPartitionsByFilterMethod.invoke(hive, table, filter)
9. alterTableMethod.invoke(hive, tableName, table, 
environmentContextInAlterTable)
10. alterPartitionsMethod.invoke(hive, tableName, newParts, 
environmentContextInAlterTable)
11. hive.createTable(table, ifNotExists)
12. hive.getTable(database, tableName)
13. hive.getTable(dbName, tableName, throwException)
14. hive.getTable(tableName)
15. getTablesByTypeMethod.invoke(hive, dbName, pattern, tableType)
16. hive.getTablesByPattern(dbName, pattern).asScala.toSeq
17. hive.getAllTables(dbName).asScala.toSeq
18. hive.dropTable(dbName, tableName, deleteData, ignoreIfNotExists)
19. hive.dropTable(dbName, tableName)
20. dropTableMethod.invoke(hive, dbName, tableName, deleteData: 
JBoolean,ignoreIfNotExists: JBoolean, purge: JBoolean)
21. hive.getPartition(table, partSpec, forceCreate)
22. hive.getPartitions(table, partSpec).asScala.toSeq
23. hive.getPartitionNames(dbName, tableName, max).asScala.toSeq
24. hive.getPartitionNames(dbName, tableName, partSpec, max).asScala.toSeq
25. createPartitionMethod.invoke(
          hive,
          table,
          spec,
          location,
          params, // partParams
          null, // inputFormat
          null, // outputFormat
          -1: JInteger, // numBuckets
          null, // cols
          null, // serializationLib
          null, // serdeParams
          null, // bucketCols
          null) // sortCols
      }
26. hive.createPartitions(addPartitionDesc)
27. loadPartitionMethod.invoke(hive, loadPath, tableName, partSpec, replace: 
JBoolean,
      inheritTableSpecs: JBoolean, isSkewedStoreAsSubdir: JBoolean,
      isSrcLocal: JBoolean, isAcid, hasFollowingStatsTask)
28. hive.renamePartition(table, oldPartSpec, newPart)
29. loadTableMethod.invoke(hive, loadPath, tableName, loadFileType.get, 
isSrcLocal: JBoolean,
      isSkewedStoreAsSubdir, isAcidIUDoperation, hasFollowingStatsTask,
      writeIdInLoadTableOrPartition, stmtIdInLoadTableOrPartition: JInteger, 
replace: JBoolean)

 

30. loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, 
loadFileType.get,
      numDP: JInteger, listBucketingLevel, isAcid, 
writeIdInLoadTableOrPartition,
      stmtIdInLoadTableOrPartition, hasFollowingStatsTask, 
AcidUtils.Operation.NOT_ACID,
      replace: JBoolean)
31. hive.createFunction(toHiveFunction(func, db))
32. hive.dropFunction(db, name)
33. hive.alterFunction(db, oldName, hiveFunc)
34. hive.getFunctions(db, pattern).asScala.toSeq
35. dropIndexMethod.invoke(hive, dbName, tableName, indexName, 
throwExceptionInDropIndex,
      deleteDataInDropIndex)

> Backward compatibility for Hive with Components like Spark
> ----------------------------------------------------------
>
>                 Key: HIVE-27732
>                 URL: https://issues.apache.org/jira/browse/HIVE-27732
>             Project: Hive
>          Issue Type: Sub-task
>    Affects Versions: 3.2.0
>            Reporter: Aman Raj
>            Assignee: Aman Raj
>            Priority: Major
>
> Added additional functions for OSS Spark 3.3 and HMS 3.1.2 Compatibility.
> These are the functions used by Spark when it integrates with Hive :
>  
> List of all functions called by HiveShim.scala. This can be found in Spark 
> 3.3 codebase.
> 1. hive.dropDatabase(dbName, deleteData, ignoreUnknownDb, cascade)
> 2. hive.alterDatabase(dbName, d)
> 3. hive.getDatabase(dbName)
> 4. hive.getAllDatabases.asScala.toSeq
> 5. hive.getDatabasesByPattern(pattern).asScala.toSeq
> 6. hive.databaseExists(dbName)
> 7. getAllPartitionsMethod.invoke(hive, table)
> 8. getPartitionsByFilterMethod.invoke(hive, table, filter)
> 9. alterTableMethod.invoke(hive, tableName, table, 
> environmentContextInAlterTable)
> 10. alterPartitionsMethod.invoke(hive, tableName, newParts, 
> environmentContextInAlterTable)
> 11. hive.createTable(table, ifNotExists)
> 12. hive.getTable(database, tableName)
> 13. hive.getTable(dbName, tableName, throwException)
> 14. hive.getTable(tableName)
> 15. getTablesByTypeMethod.invoke(hive, dbName, pattern, tableType)
> 16. hive.getTablesByPattern(dbName, pattern).asScala.toSeq
> 17. hive.getAllTables(dbName).asScala.toSeq
> 18. hive.dropTable(dbName, tableName, deleteData, ignoreIfNotExists)
> 19. hive.dropTable(dbName, tableName)
> 20. dropTableMethod.invoke(hive, dbName, tableName, deleteData: 
> JBoolean,ignoreIfNotExists: JBoolean, purge: JBoolean)
> 21. hive.getPartition(table, partSpec, forceCreate)
> 22. hive.getPartitions(table, partSpec).asScala.toSeq
> 23. hive.getPartitionNames(dbName, tableName, max).asScala.toSeq
> 24. hive.getPartitionNames(dbName, tableName, partSpec, max).asScala.toSeq
> 25. createPartitionMethod.invoke(
>           hive,
>           table,
>           spec,
>           location,
>           params, // partParams
>           null, // inputFormat
>           null, // outputFormat
>           -1: JInteger, // numBuckets
>           null, // cols
>           null, // serializationLib
>           null, // serdeParams
>           null, // bucketCols
>           null) // sortCols
>       }
> 26. hive.createPartitions(addPartitionDesc)
> 27. loadPartitionMethod.invoke(hive, loadPath, tableName, partSpec, replace: 
> JBoolean,
>       inheritTableSpecs: JBoolean, isSkewedStoreAsSubdir: JBoolean,
>       isSrcLocal: JBoolean, isAcid, hasFollowingStatsTask)
> 28. hive.renamePartition(table, oldPartSpec, newPart)
> 29. loadTableMethod.invoke(hive, loadPath, tableName, loadFileType.get, 
> isSrcLocal: JBoolean,
>       isSkewedStoreAsSubdir, isAcidIUDoperation, hasFollowingStatsTask,
>       writeIdInLoadTableOrPartition, stmtIdInLoadTableOrPartition: JInteger, 
> replace: JBoolean)
>  
> 30. loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, 
> loadFileType.get,
>       numDP: JInteger, listBucketingLevel, isAcid, 
> writeIdInLoadTableOrPartition,
>       stmtIdInLoadTableOrPartition, hasFollowingStatsTask, 
> AcidUtils.Operation.NOT_ACID,
>       replace: JBoolean)
> 31. hive.createFunction(toHiveFunction(func, db))
> 32. hive.dropFunction(db, name)
> 33. hive.alterFunction(db, oldName, hiveFunc)
> 34. hive.getFunctions(db, pattern).asScala.toSeq
> 35. dropIndexMethod.invoke(hive, dbName, tableName, indexName, 
> throwExceptionInDropIndex,
>       deleteDataInDropIndex)



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to