[ 
https://issues.apache.org/jira/browse/OMID-292?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17844567#comment-17844567
 ] 

Nikita Pande edited comment on OMID-292 at 5/9/24 11:44 AM:
------------------------------------------------------------

I have observed the issue in the hbase cluster (2.6.0) with phoenix (5.1.4) and 
phoenix-omid(1.1.1) on transactional table with jdk17. I have identified the 
issue and will be taking that up in the subsequent tasks.
 * Omid is added as shaded jars in phoenix 5.1 branch. Fixed this in my 
environment (now map reduce job started) and went ahead and tried csv bulkload, 
got following error.

{code:java}
2024-05-08 07:06:18,436 INFO  [main] mapreduce.Job:  map 0% reduce 0%
2024-05-08 07:06:28,470 INFO  [main] mapreduce.Job: Task Id : 
attempt_1715115365998_0006_m_000000_2, Status : FAILED
Error: java.lang.RuntimeException: java.lang.RuntimeException: 
java.sql.SQLException: ERROR 1077 (44A08): Transaction Failure  Could not get 
new timestamp
        at 
org.apache.phoenix.mapreduce.FormatToBytesWritableMapper.map(FormatToBytesWritableMapper.java:206)
        at 
org.apache.phoenix.mapreduce.FormatToBytesWritableMapper.map(FormatToBytesWritableMapper.java:77)
        at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:146)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:800)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:348)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:178)
        at 
java.base/java.security.AccessController.doPrivileged(AccessController.java:712)
        at java.base/javax.security.auth.Subject.doAs(Subject.java:439)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:172)
Caused by: java.lang.RuntimeException: java.sql.SQLException: ERROR 1077 
(44A08): Transaction Failure  Could not get new timestamp
        at 
org.apache.phoenix.thirdparty.com.google.common.base.Throwables.propagate(Throwables.java:231)
        at 
org.apache.phoenix.mapreduce.FormatToBytesWritableMapper$MapperUpsertListener.errorOnRecord(FormatToBytesWritableMapper.java:408)
        at 
org.apache.phoenix.util.csv.CsvUpsertExecutor.execute(CsvUpsertExecutor.java:103)
        at 
org.apache.phoenix.util.csv.CsvUpsertExecutor.execute(CsvUpsertExecutor.java:55)
        at 
org.apache.phoenix.util.UpsertExecutor.execute(UpsertExecutor.java:133)
        at 
org.apache.phoenix.mapreduce.FormatToBytesWritableMapper.map(FormatToBytesWritableMapper.java:174)
        ... 9 more
Caused by: java.sql.SQLException: ERROR 1077 (44A08): Transaction Failure  
Could not get new timestamp
        at 
org.apache.phoenix.exception.SQLExceptionCode$Factory$1.newException(SQLExceptionCode.java:623)
        at 
org.apache.phoenix.exception.SQLExceptionInfo.buildException(SQLExceptionInfo.java:217)
        at 
org.apache.phoenix.transaction.OmidTransactionContext.begin(OmidTransactionContext.java:114)
        at 
org.apache.phoenix.execute.MutationState.startTransaction(MutationState.java:404)
        at 
org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:442)
        at 
org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:430)
        at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
        at 
org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:429)
        at 
org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:417)
        at 
org.apache.phoenix.jdbc.PhoenixPreparedStatement.execute(PhoenixPreparedStatement.java:182)
        at 
org.apache.phoenix.util.csv.CsvUpsertExecutor.execute(CsvUpsertExecutor.java:95)
        ... 12 more
Caused by: 
org.apache.phoenix.shaded.org.apache.omid.transaction.TransactionException: 
Could not get new timestamp
        at 
org.apache.phoenix.shaded.org.apache.omid.transaction.AbstractTransactionManager.begin(AbstractTransactionManager.java:159)
        at 
org.apache.phoenix.transaction.OmidTransactionContext.begin(OmidTransactionContext.java:109)
        ... 20 more
Caused by: java.util.concurrent.ExecutionException: 
org.apache.phoenix.shaded.org.apache.omid.tso.client.ConnectionException
        at 
org.apache.phoenix.thirdparty.com.google.common.util.concurrent.AbstractFuture.getDoneValue(AbstractFuture.java:592)
        at 
org.apache.phoenix.thirdparty.com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:571)
        at 
org.apache.phoenix.shaded.org.apache.omid.tso.client.ForwardingTSOFuture.get(ForwardingTSOFuture.java:51)
        at 
org.apache.phoenix.shaded.org.apache.omid.transaction.AbstractTransactionManager.begin(AbstractTransactionManager.java:144)
        ... 21 more
Caused by: 
org.apache.phoenix.shaded.org.apache.omid.tso.client.ConnectionException
        at 
org.apache.phoenix.shaded.org.apache.omid.tso.client.TSOClient$DisconnectedState$1.operationComplete(TSOClient.java:586)
        at 
org.apache.phoenix.shaded.org.apache.omid.tso.client.TSOClient$DisconnectedState$1.operationComplete(TSOClient.java:577)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:583)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:559)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
        at 
io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
        at 
io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
        at 
io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
        at 
io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.fulfillConnectPromise(AbstractNioChannel.java:321)
        at 
io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.finishConnect(AbstractNioChannel.java:337)
        at 
io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:776)
        at 
io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724)
        at 
io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
        at 
io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
        at java.base/java.lang.Thread.run(Thread.java:833){code}
 * Should 
[https://github.com/apache/phoenix/commit/d0ed96f2aa23c487f60207062835183e35cabfb4]
 be applied on 5.1 branch as well for phoenix? Because if OMID is added as 
shaded jar it will fail in csv bulk load with the error mentioned in the 
Jirasince the config files refer to omid classes without 
'org.apache.phoenix.shaded.org.apache.omid'
 * I also tested by commenting following lines in phoenix-omid which was 
causing shading.
{code:java}
<!--                    <relocation>-->
<!--                        <pattern>org.apache.omid</pattern>-->
<!--                        
<shadedPattern>${shaded.package}.org.apache.omid</shadedPattern>-->
<!--                    </relocation>--> {code}
With this change, above mentioned CSV Bulkload issue fixes So the question here 
is that in YAMLUtils class, we can maintain both the list or we can also make 
the trusted list as configurable so that it wont fail if omid is shaded/not.
 * Also phoenix csv bulkload doesnt create shadow cell, so scanning on 
transactional table is not giving tables. Otherwise transaction cant be used 
through OMID. Had discussed with [~chrajeshbab...@gmail.com]  offline to work 
on this to make this shadow cells gets created with Bulkload also.


was (Author: JIRAUSER298527):
I have observed the issue in the hbase cluster (2.6.0) with phoenix (5.1.4) and 
phoenix-omid(1.1.1) on transactional table with jdk17. I have identified the 
issue and will be taking that up in the subsequent tasks.
 * Omid is added as shaded jars in phoenix 5.1 branch. Fixed this in my 
environment (now map reduce job started) and went ahead and tried csv bulkload, 
got following error.

{code:java}
2024-05-08 07:06:18,436 INFO  [main] mapreduce.Job:  map 0% reduce 0%
2024-05-08 07:06:28,470 INFO  [main] mapreduce.Job: Task Id : 
attempt_1715115365998_0006_m_000000_2, Status : FAILED
Error: java.lang.RuntimeException: java.lang.RuntimeException: 
java.sql.SQLException: ERROR 1077 (44A08): Transaction Failure  Could not get 
new timestamp
        at 
org.apache.phoenix.mapreduce.FormatToBytesWritableMapper.map(FormatToBytesWritableMapper.java:206)
        at 
org.apache.phoenix.mapreduce.FormatToBytesWritableMapper.map(FormatToBytesWritableMapper.java:77)
        at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:146)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:800)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:348)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:178)
        at 
java.base/java.security.AccessController.doPrivileged(AccessController.java:712)
        at java.base/javax.security.auth.Subject.doAs(Subject.java:439)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:172)
Caused by: java.lang.RuntimeException: java.sql.SQLException: ERROR 1077 
(44A08): Transaction Failure  Could not get new timestamp
        at 
org.apache.phoenix.thirdparty.com.google.common.base.Throwables.propagate(Throwables.java:231)
        at 
org.apache.phoenix.mapreduce.FormatToBytesWritableMapper$MapperUpsertListener.errorOnRecord(FormatToBytesWritableMapper.java:408)
        at 
org.apache.phoenix.util.csv.CsvUpsertExecutor.execute(CsvUpsertExecutor.java:103)
        at 
org.apache.phoenix.util.csv.CsvUpsertExecutor.execute(CsvUpsertExecutor.java:55)
        at 
org.apache.phoenix.util.UpsertExecutor.execute(UpsertExecutor.java:133)
        at 
org.apache.phoenix.mapreduce.FormatToBytesWritableMapper.map(FormatToBytesWritableMapper.java:174)
        ... 9 more
Caused by: java.sql.SQLException: ERROR 1077 (44A08): Transaction Failure  
Could not get new timestamp
        at 
org.apache.phoenix.exception.SQLExceptionCode$Factory$1.newException(SQLExceptionCode.java:623)
        at 
org.apache.phoenix.exception.SQLExceptionInfo.buildException(SQLExceptionInfo.java:217)
        at 
org.apache.phoenix.transaction.OmidTransactionContext.begin(OmidTransactionContext.java:114)
        at 
org.apache.phoenix.execute.MutationState.startTransaction(MutationState.java:404)
        at 
org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:442)
        at 
org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:430)
        at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
        at 
org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:429)
        at 
org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:417)
        at 
org.apache.phoenix.jdbc.PhoenixPreparedStatement.execute(PhoenixPreparedStatement.java:182)
        at 
org.apache.phoenix.util.csv.CsvUpsertExecutor.execute(CsvUpsertExecutor.java:95)
        ... 12 more
Caused by: 
org.apache.phoenix.shaded.org.apache.omid.transaction.TransactionException: 
Could not get new timestamp
        at 
org.apache.phoenix.shaded.org.apache.omid.transaction.AbstractTransactionManager.begin(AbstractTransactionManager.java:159)
        at 
org.apache.phoenix.transaction.OmidTransactionContext.begin(OmidTransactionContext.java:109)
        ... 20 more
Caused by: java.util.concurrent.ExecutionException: 
org.apache.phoenix.shaded.org.apache.omid.tso.client.ConnectionException
        at 
org.apache.phoenix.thirdparty.com.google.common.util.concurrent.AbstractFuture.getDoneValue(AbstractFuture.java:592)
        at 
org.apache.phoenix.thirdparty.com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:571)
        at 
org.apache.phoenix.shaded.org.apache.omid.tso.client.ForwardingTSOFuture.get(ForwardingTSOFuture.java:51)
        at 
org.apache.phoenix.shaded.org.apache.omid.transaction.AbstractTransactionManager.begin(AbstractTransactionManager.java:144)
        ... 21 more
Caused by: 
org.apache.phoenix.shaded.org.apache.omid.tso.client.ConnectionException
        at 
org.apache.phoenix.shaded.org.apache.omid.tso.client.TSOClient$DisconnectedState$1.operationComplete(TSOClient.java:586)
        at 
org.apache.phoenix.shaded.org.apache.omid.tso.client.TSOClient$DisconnectedState$1.operationComplete(TSOClient.java:577)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:583)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:559)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
        at 
io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
        at 
io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
        at 
io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
        at 
io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.fulfillConnectPromise(AbstractNioChannel.java:321)
        at 
io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.finishConnect(AbstractNioChannel.java:337)
        at 
io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:776)
        at 
io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724)
        at 
io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
        at 
io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
        at java.base/java.lang.Thread.run(Thread.java:833){code}
 * Should 
[https://github.com/apache/phoenix/commit/d0ed96f2aa23c487f60207062835183e35cabfb4]
 be applied on 5.1 branch as well for phoenix? Because if OMID is added as 
shaded jar it will fail in csv bulk load with the error mentioned in the 
Jirasince the config files refer to omid classes without 
'org.apache.phoenix.shaded.org.apache.omid'
 * Also phoenix csv bulkload doesnt create shadow cell, so scanning on 
transactional table is not giving tables. Otherwise transaction cant be used 
through OMID. Had discussed with Rajesh offline to work on this to make this 
shadow cells gets created with Bulkload also.

> Phoenix CSV BulkloadTool fails with "Global tag is not allowed" error on 
> transactional table
> --------------------------------------------------------------------------------------------
>
>                 Key: OMID-292
>                 URL: https://issues.apache.org/jira/browse/OMID-292
>             Project: Phoenix Omid
>          Issue Type: Bug
>            Reporter: Nikita Pande
>            Priority: Major
>
> I get the following error when I invoke CSV Bulkload for transactional table 
> CLI:
> hbase org.apache.phoenix.mapreduce.CsvBulkLoadTool --table  <Transactional 
> table> --input <csv filr>
> {code:java}
> org.apache.phoenix.mapreduce.CsvBulkLoadTool.main(CsvBulkLoadTool.java:117)
> Exception in thread "main" Global tag is not allowed: 
> tag:yaml.org,2002:org.apache.omid.tso.client.OmidClientConfiguration
>  in 'string', line 5, column 26:
>     omidClientConfiguration: !!org.apache.omid.tso.client.Omi ... 
>                              ^
>       at 
> org.yaml.snakeyaml.composer.Composer.composeSequenceNode(Composer.java:259)
>       at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:207)
>       at 
> org.yaml.snakeyaml.composer.Composer.composeValueNode(Composer.java:369)
>       at 
> org.yaml.snakeyaml.composer.Composer.composeMappingChildren(Composer.java:348)
>       at 
> org.yaml.snakeyaml.composer.Composer.composeMappingNode(Composer.java:323)
>       at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:209)
>       at org.yaml.snakeyaml.composer.Composer.getNode(Composer.java:131)
>       at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:157)
>       at 
> org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:178)
>       at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:493)
>       at org.yaml.snakeyaml.Yaml.loadAs(Yaml.java:473)
>       at 
> org.apache.phoenix.shaded.org.apache.omid.YAMLUtils.loadStringAsMap(YAMLUtils.java:87)
>       at 
> org.apache.phoenix.shaded.org.apache.omid.YAMLUtils.loadAsMap(YAMLUtils.java:75)
>       at 
> org.apache.phoenix.shaded.org.apache.omid.YAMLUtils.loadSettings(YAMLUtils.java:62)
>       at 
> org.apache.phoenix.shaded.org.apache.omid.YAMLUtils.loadSettings(YAMLUtils.java:45)
>       at 
> org.apache.phoenix.shaded.org.apache.omid.transaction.HBaseOmidClientConfiguration.<init>(HBaseOmidClientConfiguration.java:71)
>       at 
> org.apache.phoenix.shaded.org.apache.omid.transaction.HBaseOmidClientConfiguration.<init>(HBaseOmidClientConfiguration.java:58)
>       at 
> org.apache.phoenix.transaction.OmidTransactionProvider.getTransactionClient(OmidTransactionProvider.java:72)
>       at 
> org.apache.phoenix.query.ConnectionQueryServicesImpl.initTransactionClient(ConnectionQueryServicesImpl.java:5907)
>       at 
> org.apache.phoenix.transaction.OmidTransactionContext.<init>(OmidTransactionContext.java:60)
>       at 
> org.apache.phoenix.transaction.OmidTransactionProvider.getTransactionContext(OmidTransactionProvider.java:65)
>       at 
> org.apache.phoenix.execute.MutationState.startTransaction(MutationState.java:390)
>       at 
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:613)
>       at 
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:547)
>       at 
> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:777)
>       at 
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:447)
>       at 
> org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:232)
>       at 
> org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:210)
>       at org.apache.phoenix.util.ParseNodeUtil.rewrite(ParseNodeUtil.java:177)
>       at 
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:537)
>       at 
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:510)
>       at 
> org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:314)
>       at 
> org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:303)
>       at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>       at 
> org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:302)
>       at 
> org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:295)
>       at 
> org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:2024)
>       at 
> org.apache.phoenix.mapreduce.AbstractBulkLoadTool.loadData(AbstractBulkLoadTool.java:238)
>       at 
> org.apache.phoenix.mapreduce.AbstractBulkLoadTool.run(AbstractBulkLoadTool.java:186)
>       at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:82)
>       at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:97)
>       at 
> org.apache.phoenix.mapreduce.CsvBulkLoadTool.main(CsvBulkLoadTool.java:117)  
> {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to