[
https://issues.apache.org/jira/browse/PHOENIX-5707?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17029325#comment-17029325
]
Hadoop QA commented on PHOENIX-5707:
------------------------------------
{color:red}-1 overall{color}. Here are the results of testing the latest
attachment
http://issues.apache.org/jira/secure/attachment/12992521/PHOENIX-5707.4.x-HBase-1.3.v1.patch
against 4.x-HBase-1.3 branch at commit
b27168de1922d429229935b16eef49c00a88aab1.
ATTACHMENT ID: 12992521
{color:green}+1 @author{color}. The patch does not contain any @author
tags.
{color:red}-1 tests included{color}. The patch doesn't appear to include
any new or modified tests.
Please justify why no new tests are needed for this
patch.
Also please list what manual steps were performed to
verify this patch.
{color:green}+1 javac{color}. The applied patch does not increase the
total number of javac compiler warnings.
{color:green}+1 release audit{color}. The applied patch does not increase
the total number of release audit warnings.
{color:green}+1 lineLengths{color}. The patch does not introduce lines
longer than 100
{color:red}-1 core tests{color}. The patch failed these unit tests:
org.apache.phoenix.hbase.index.covered.NonTxIndexBuilderTest
Test results:
https://builds.apache.org/job/PreCommit-PHOENIX-Build/3384//testReport/
Console output:
https://builds.apache.org/job/PreCommit-PHOENIX-Build/3384//console
This message is automatically generated.
> Index rebuild after truncate incorrectly writes the included column value
> -------------------------------------------------------------------------
>
> Key: PHOENIX-5707
> URL: https://issues.apache.org/jira/browse/PHOENIX-5707
> Project: Phoenix
> Issue Type: Bug
> Affects Versions: 4.15.0
> Reporter: Swaroopa Kadam
> Assignee: Swaroopa Kadam
> Priority: Minor
> Fix For: 5.1.0, 4.15.1
>
> Attachments: PHOENIX-5707.4.x-HBase-1.3.v1.patch
>
> Time Spent: 1h 40m
> Remaining Estimate: 0h
>
>
> {code:java}
> @Test
> public void testIncorrectRebuild() throws Exception {
> String schemaName = generateUniqueName();
> String dataTableName = generateUniqueName();
> String dataTableFullName = SchemaUtil.getTableName(schemaName,
> dataTableName);
> String indexTableName = generateUniqueName();
> String indexTableFullName = SchemaUtil.getTableName(schemaName,
> indexTableName);
> Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
> try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
> conn.setAutoCommit(true);
> conn.createStatement().execute("create table " + dataTableFullName +
> " (id varchar(10) not null primary key, val1 varchar(10),
> val2 varchar(10), val3 varchar(10)) COLUMN_ENCODED_BYTES=0");
> conn.createStatement().execute("CREATE INDEX " + indexTableName + "
> on " +
> dataTableFullName + " (val1) include (val2, val3)" );
> //insert a full row
> conn.createStatement().execute("upsert into " + dataTableFullName + "
> values ('a', 'ab', 'efgh', 'abcd')");
> Thread.sleep(1000);
> // insert a partial row
> conn.createStatement().execute("upsert into " + dataTableFullName + "
> (id, val3) values ('a', 'uvwx')");
> Thread.sleep(1000);
> //insert a full row
> conn.createStatement().execute("upsert into " + dataTableFullName + "
> values ('a', 'ab', 'efgh', 'yuio')");
> Thread.sleep(1000);
> //insert a partial row
> conn.createStatement().execute("upsert into " + dataTableFullName + "
> (id, val3) values ('a', 'asdf')");
> //truncate index table
> ConnectionQueryServices queryServices =
> conn.unwrap(PhoenixConnection.class).getQueryServices();
> Admin admin = queryServices.getAdmin();
> TableName tableName = TableName.valueOf(indexTableFullName);
> admin.disableTable(tableName);
> admin.truncateTable(tableName, false);
> //rebuild index
> runIndexTool(true, false, schemaName, dataTableName, indexTableName);
> // we expect 2 versions to be written after rebuild, one for last full
> row update and one for latest update
> //assert
> Scan scan = new Scan();
> scan.setRaw(true);
> scan.setMaxVersions(10);
> HTable indexTable = new HTable(getUtility().getConfiguration(),
> indexTableFullName);
> HTable dataTable = new HTable(getUtility().getConfiguration(),
> dataTableFullName);
> long dataFullRowTS = 0;
> ResultScanner rs = dataTable.getScanner(scan);
> for (Result r : rs) {
> for (Cell c : r.listCells()) {
> String column = new String(CellUtil.cloneQualifier(c));
> String value = new String(CellUtil.cloneValue(c));
> if (column.equalsIgnoreCase("VAL3") &&
> value.equalsIgnoreCase("yuio")) {
> dataFullRowTS = c.getTimestamp();
> }
> }
> }
> rs = indexTable.getScanner(scan);
> for (Result r : rs) {
> for (Cell c : r.listCells()) {
> long indexTS = c.getTimestamp();
> String column = new String(CellUtil.cloneQualifier(c));
> if (column.equalsIgnoreCase("0:VAL3") && indexTS ==
> dataFullRowTS) {
> String value = new String(CellUtil.cloneValue(c));
> assertEquals("yuio", value); // if the ts is from full
> rebuild row , value should also be from full rebuild row
> }
> }
> }
> }
> }
> {code}
--
This message was sent by Atlassian Jira
(v8.3.4#803005)