[ 
https://issues.apache.org/jira/browse/CARBONDATA-4204?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Kunal Kapoor resolved CARBONDATA-4204.
--------------------------------------
    Fix Version/s: 2.2.0
       Resolution: Fixed

> When the path is empty in Carbon add segments then "String Index out of 
> range" error is thrown.
> -----------------------------------------------------------------------------------------------
>
>                 Key: CARBONDATA-4204
>                 URL: https://issues.apache.org/jira/browse/CARBONDATA-4204
>             Project: CarbonData
>          Issue Type: Bug
>    Affects Versions: 2.1.1
>         Environment: 3 node FI cluster
>            Reporter: Prasanna Ravichandran
>            Priority: Minor
>             Fix For: 2.2.0
>
>          Time Spent: 12.5h
>  Remaining Estimate: 0h
>
> Test queries:
> CREATE TABLE uniqdata(cust_id int,cust_name String,active_emui_version 
> string, dob timestamp, doj timestamp, bigint_column1 bigint,bigint_column2 
> bigint,decimal_column1 decimal(30,10), decimal_column2 
> decimal(36,36),double_column1 double, double_column2 double,integer_column1 
> int) stored as carbondata;
> load data inpath 'hdfs://hacluster/user/prasanna/2000_UniqData.csv' into 
> table uniqdata 
> options('fileheader'='cust_id,cust_name,active_emui_version,dob,doj,bigint_column1,bigint_column2,decimal_column1,decimal_column2,double_column1,double_column2,integer_column1','bad_records_action'='force');
> Alter table uniqdata add segment options ('path'='','format'='carbon');
> --
> Error: org.apache.hive.service.cli.HiveSQLException: Error running query: 
> java.lang.StringIndexOutOfBoundsException: String index out of range: -1
>  at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:396)
>  at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.$anonfun$run$3(SparkExecuteStatementOperation.scala:281)
>  at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>  at 
> org.apache.spark.sql.hive.thriftserver.SparkOperation.withLocalProperties(SparkOperation.scala:78)
>  at 
> org.apache.spark.sql.hive.thriftserver.SparkOperation.withLocalProperties$(SparkOperation.scala:62)
>  at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.withLocalProperties(SparkExecuteStatementOperation.scala:46)
>  at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:281)
>  at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:268)
>  at java.security.AccessController.doPrivileged(Native Method)
>  at javax.security.auth.Subject.doAs(Subject.java:422)
>  at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1761)
>  at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2.run(SparkExecuteStatementOperation.scala:295)
>  at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>  at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>  at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>  at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>  at java.lang.Thread.run(Thread.java:748)
> Caused by: java.lang.StringIndexOutOfBoundsException: String index out of 
> range: -1
>  at java.lang.String.charAt(String.java:658)
>  at 
> org.apache.spark.sql.execution.command.management.CarbonAddLoadCommand.processMetadata(CarbonAddLoadCommand.scala:93)
>  at 
> org.apache.spark.sql.execution.command.MetadataCommand.$anonfun$run$1(package.scala:137)
>  at 
> org.apache.spark.sql.execution.command.Auditable.runWithAudit(package.scala:118)
>  at 
> org.apache.spark.sql.execution.command.Auditable.runWithAudit$(package.scala:114)
>  at 
> org.apache.spark.sql.execution.command.MetadataCommand.runWithAudit(package.scala:134)
>  at 
> org.apache.spark.sql.execution.command.MetadataCommand.run(package.scala:137)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:71)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:69)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:80)
>  at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:231)
>  at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3697)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:108)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:170)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:91)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:777)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:65)
>  at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3695)
>  at org.apache.spark.sql.Dataset.<init>(Dataset.scala:231)
>  at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:777)
>  at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
>  at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:615)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:777)
>  at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:610)
>  at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:650)
>  at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:356)
>  ... 16 more (state=,code=0)
>  
> Expected results: Error message should be proper.
> Actual results: Error message is throwing complete error traces and it is not 
> proper,



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to