What does your local result for this test show?  Perhaps JSON is sorting
differently in your environment...

On Tue, Nov 1, 2016 at 11:16 PM 李文海 <[email protected]> wrote:

> BTW, the expected result:
>
> {
>     "cc": {
>         "configUri": "http://127.0.0.1:19002/admin/cluster/cc/config";,
>         "statsUri": "http://127.0.0.1:19002/admin/cluster/cc/stats";,
>         "threadDumpUri": "
> http://127.0.0.1:19002/admin/cluster/cc/threaddump";
>     },
>     "config": {
>         "api.port": 19002,
>         "cluster.partitions": {
>             "0": "ID:0, Original Node: asterix_nc1, IODevice: 0, Active
> Node: asterix_nc1",
>             "1": "ID:1, Original Node: asterix_nc1, IODevice: 1, Active
> Node: asterix_nc1",
>             "2": "ID:2, Original Node: asterix_nc2, IODevice: 0, Active
> Node: asterix_nc2",
>             "3": "ID:3, Original Node: asterix_nc2, IODevice: 1, Active
> Node: asterix_nc2"
>         },
>         "compiler.framesize": 32768,
>         "compiler.groupmemory": 163840,
>         "compiler.joinmemory": 163840,
>         "compiler.pregelix.home": "~/pregelix",
>         "compiler.sortmemory": 327680,
>         "core.dump.paths": {},
>         "feed.central.manager.port": 4500,
>         "feed.max.threshold.period": 5,
>         "feed.memory.available.wait.timeout": 10,
>         "feed.memory.global.budget": 67108864,
>         "feed.pending.work.threshold": 50,
>         "feed.port": 19003,
>         "instance.name": null,
>         "log.level": "INFO",
>         "max.wait.active.cluster": 60,
>         "metadata.callback.port": 0,
>         "metadata.node": "asterix_nc1",
>         "metadata.partition": "ID:0, Original Node: asterix_nc1, IODevice:
> 0, Active Node: asterix_nc1",
>         "metadata.port": 0,
>         "metadata.registration.timeout.secs": 60,
>         "node.partitions": {
>             "asterix_nc1": [
>                 "ID:0, Original Node: asterix_nc1, IODevice: 0, Active
> Node: asterix_nc1",
>                 "ID:1, Original Node: asterix_nc1, IODevice: 1, Active
> Node: asterix_nc1"
>             ],
>             "asterix_nc2": [
>                 "ID:2, Original Node: asterix_nc2, IODevice: 0, Active
> Node: asterix_nc2",
>                 "ID:3, Original Node: asterix_nc2, IODevice: 1, Active
> Node: asterix_nc2"
>             ]
>         },
>         "node.stores": {
>             "asterix_nc1": [
>                 "iodevice0",
>                 "iodevice1"
>             ],
>             "asterix_nc2": [
>                 "iodevice0",
>                 "iodevice1"
>             ]
>         },
>         "plot.activate": false,
>         "storage.buffercache.maxopenfiles": 2147483647 <(214)%20748-3647>,
>         "storage.buffercache.pagesize": 32768,
>         "storage.buffercache.size": 33554432,
>         "storage.lsm.bloomfilter.falsepositiverate": 0.01,
>         "storage.memorycomponent.globalbudget": 536870912,
>         "storage.memorycomponent.numcomponents": 2,
>         "storage.memorycomponent.numpages": 8,
>         "storage.memorycomponent.pagesize": 131072,
>         "storage.metadata.memorycomponent.numpages": 256,
>         "transaction.log.dirs": {
>             "asterix_nc1": "target/txnLogDir/asterix_nc1",
>             "asterix_nc2": "target/txnLogDir/asterix_nc2"
>         },
>         "txn.commitprofiler.reportinterval": 5,
>         "txn.job.recovery.memorysize": 67108864,
>         "txn.lock.escalationthreshold": 1000,
>         "txn.lock.shrinktimer": 5000,
>         "txn.lock.timeout.sweepthreshold": 10000,
>         "txn.lock.timeout.waitthreshold": 60000,
>         "txn.log.buffer.numpages": 8,
>         "txn.log.buffer.pagesize": 131072,
>         "txn.log.checkpoint.history": 0,
>         "txn.log.checkpoint.lsnthreshold": 67108864,
>         "txn.log.checkpoint.pollfrequency": 120,
>         "txn.log.partitionsize": 268435456,
>         "web.port": 19001,
>         "web.queryinterface.port": 19006,
>         "web.secondary.port": 19005
>     },
>     "diagnosticsUri": "http://127.0.0.1:19002/admin/diagnostics";,
>     "fullShutdownUri": "http://127.0.0.1:19002/admin/shutdown?all=true";,
>     "metadata_node": "asterix_nc1",
>     "ncs": [
>         {
>             "configUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/config";,
>             "node_id": "asterix_nc1",
>             "partitions": [
>                 {
>                     "active": true,
>                     "partition_id": "partition_0"
>                 },
>                 {
>                     "active": true,
>                     "partition_id": "partition_1"
>                 }
>             ],
>             "state": "ACTIVE",
>             "statsUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/stats";,
>             "threadDumpUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/threaddump";
>         },
>         {
>             "configUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/config";,
>             "node_id": "asterix_nc2",
>             "partitions": [
>                 {
>                     "active": true,
>                     "partition_id": "partition_2"
>                 },
>                 {
>                     "active": true,
>                     "partition_id": "partition_3"
>                 }
>             ],
>             "state": "ACTIVE",
>             "statsUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/stats";,
>             "threadDumpUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/threaddump";
>         }
>     ],
>     "replicationUri": "http://127.0.0.1:19002/admin/cluster/replication";,
>     "shutdownUri": "http://127.0.0.1:19002/admin/shutdown";,
>     "state": "ACTIVE",
>     "versionUri": "http://127.0.0.1:19002/admin/version";
> }
>
> But I got:
>
> {
>     "cc": {
>         "statsUri": "http://127.0.0.1:19002/admin/cluster/cc/stats";,
>         "configUri": "http://127.0.0.1:19002/admin/cluster/cc/config";,
>         "threadDumpUri": "
> http://127.0.0.1:19002/admin/cluster/cc/threaddump";
>     },
>     "replicationUri": "http://127.0.0.1:19002/admin/cluster/replication";,
>     "fullShutdownUri": "http://127.0.0.1:19002/admin/shutdown?all=true";,
>     "state": "ACTIVE",
>     "metadata_node": "asterix_nc1",
>     "shutdownUri": "http://127.0.0.1:19002/admin/shutdown";,
>     "versionUri": "http://127.0.0.1:19002/admin/version";,
>     "config": {
>         "feed.port": 19003,
>         "txn.log.checkpoint.history": 0,
>         "compiler.joinmemory": 163840,
>         "feed.central.manager.port": 4500,
>         "compiler.sortmemory": 327680,
>         "node.partitions": {
>             "asterix_nc1": [
>                 {
>                     "partitionId": 0,
>                     "IODeviceNum": 0,
>                     "active": true,
>                     "activeNodeId": "asterix_nc1",
>                     "nodeId": "asterix_nc1"
>                 },
>                 {
>                     "partitionId": 1,
>                     "IODeviceNum": 1,
>                     "active": true,
>                     "activeNodeId": "asterix_nc1",
>                     "nodeId": "asterix_nc1"
>                 }
>             ],
>             "asterix_nc2": [
>                 {
>                     "partitionId": 2,
>                     "IODeviceNum": 0,
>                     "active": true,
>                     "activeNodeId": "asterix_nc2",
>                     "nodeId": "asterix_nc2"
>                 },
>                 {
>                     "partitionId": 3,
>                     "IODeviceNum": 1,
>                     "active": true,
>                     "activeNodeId": "asterix_nc2",
>                     "nodeId": "asterix_nc2"
>                 }
>             ]
>         },
>         "txn.lock.shrinktimer": 5000,
>         "metadata.node": "asterix_nc1",
>         "metadata.port": 0,
>         "log.level": "INFO",
>         "api.port": 19002,
>         "cluster.partitions": {
>             "0": {
>                 "partitionId": 0,
>                 "IODeviceNum": 0,
>                 "active": true,
>                 "activeNodeId": "asterix_nc1",
>                 "nodeId": "asterix_nc1"
>             },
>             "1": {
>                 "partitionId": 1,
>                 "IODeviceNum": 1,
>                 "active": true,
>                 "activeNodeId": "asterix_nc1",
>                 "nodeId": "asterix_nc1"
>             },
>             "2": {
>                 "partitionId": 2,
>                 "IODeviceNum": 0,
>                 "active": true,
>                 "activeNodeId": "asterix_nc2",
>                 "nodeId": "asterix_nc2"
>             },
>             "3": {
>                 "partitionId": 3,
>                 "IODeviceNum": 1,
>                 "active": true,
>                 "activeNodeId": "asterix_nc2",
>                 "nodeId": "asterix_nc2"
>             }
>         },
>         "storage.buffercache.size": 33554432,
>         "web.port": 19001,
>         "txn.log.checkpoint.lsnthreshold": 67108864,
>         "txn.log.partitionsize": 268435456,
>         "compiler.groupmemory": 163840,
>         "max.wait.active.cluster": 60,
>         "metadata.registration.timeout.secs": 60,
>         "storage.lsm.bloomfilter.falsepositiverate": 0.01,
>         "plot.activate": false,
>         "storage.memorycomponent.globalbudget": 536870912,
>         "web.queryinterface.port": 19006,
>         "txn.log.buffer.numpages": 8,
>         "web.secondary.port": 19005,
>         "feed.memory.global.budget": 67108864,
>         "compiler.pregelix.home": "~/pregelix",
>         "compiler.framesize": 32768,
>         "feed.memory.available.wait.timeout": 10,
>         "core.dump.paths": {},
>         "txn.log.buffer.pagesize": 131072,
>         "txn.job.recovery.memorysize": 67108864,
>         "storage.metadata.memorycomponent.numpages": 256,
>         "metadata.partition": {
>             "partitionId": 0,
>             "IODeviceNum": 0,
>             "active": true,
>             "activeNodeId": "asterix_nc1",
>             "nodeId": "asterix_nc1"
>         },
>         "txn.log.checkpoint.pollfrequency": 120,
>         "feed.pending.work.threshold": 50,
>         "feed.max.threshold.period": 5,
>         "storage.buffercache.maxopenfiles": 2147483647 <(214)%20748-3647>,
>         "txn.lock.escalationthreshold": 1000,
>         "txn.lock.timeout.waitthreshold": 60000,
>         "storage.memorycomponent.numcomponents": 2,
>         "storage.buffercache.pagesize": 32768,
>         "storage.memorycomponent.numpages": 8,
>         "transaction.log.dirs": {
>             "asterix_nc1": "target/txnLogDir/asterix_nc1",
>             "asterix_nc2": "target/txnLogDir/asterix_nc2"
>         },
>         "txn.commitprofiler.reportinterval": 5,
>         "metadata.callback.port": 0,
>         "txn.lock.timeout.sweepthreshold": 10000,
>         "storage.memorycomponent.pagesize": 131072,
>         "node.stores": {
>             "asterix_nc1": [
>                 "iodevice0",
>                 "iodevice1"
>             ],
>             "asterix_nc2": [
>                 "iodevice0",
>                 "iodevice1"
>             ]
>         }
>     },
>     "diagnosticsUri": "http://127.0.0.1:19002/admin/diagnostics";,
>     "ncs": [
>         {
>             "partitions": [
>                 {
>                     "partition_id": "partition_0",
>                     "active": true
>                 },
>                 {
>                     "partition_id": "partition_1",
>                     "active": true
>                 }
>             ],
>             "statsUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/stats";,
>             "configUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/config";,
>             "state": "ACTIVE",
>             "node_id": "asterix_nc1",
>             "threadDumpUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/threaddump";
>         },
>         {
>             "partitions": [
>                 {
>                     "partition_id": "partition_2",
>                     "active": true
>                 },
>                 {
>                     "partition_id": "partition_3",
>                     "active": true
>                 }
>             ],
>             "statsUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/stats";,
>             "configUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/config";,
>             "state": "ACTIVE",
>             "node_id": "asterix_nc2",
>             "threadDumpUri": "
> http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/threaddump";
>         }
>     ]
> }
>
>
> -----Original Messages-----
> From: "李文海" <[email protected]>
> Sent Time: 2016-11-02 11:13:58 (Wednesday)
> To: [email protected]
> Cc:
> Subject: Re: Re: Question about the execution test issue.
>
> Hi, Till.
>     This is a local error. Since it didn't pass the execution test, I have
> not push it onto gerrit.
> I remembered that before it got error, I have added org.reflection into
> asterix-app/pom.xml. But after that,
> I removed this dependency, but the error always happens. I addition, in
> another local branch, this error
> does not exist. I am very confused what on earth happened on my local
> branch.
> Best,
> Wenhai
>
>
> > -----Original Messages-----
> > From: "Till Westmann" <[email protected]>
> > Sent Time: Wednesday, November 2, 2016
> > To: [email protected]
> > Cc:
> > Subject: Re: Question about the execution test issue.
> >
> > Hi Wenhai,
> >
> > these are relatively simple tests to validate the result of some HTTP
> > APIs that provide information
> > on the status of the cluster. AFAIK They seem to be working reliably in
> > the regression tests.
> > Not sure why that would change in your branch ..
> > Did you get those errors locally or on Jenkins?
> >
> > Cheers,
> > Till
> >
> > On 1 Nov 2016, at 8:51, 李文海 wrote:
> >
> > > Hi, all
> > >     In the recent merging from merge to one local branch, I got the
> > > following error that should not to be. Who knows why this happened?
> > >
> > > Best,
> > > Wenhai
> > >
> > > Hi, all
> > > Yestoday, after I applying the latest master onto the branch. I got a
> > > series of error (13 testcases) that should not be posed by my branch,
> > > like the following:
> > > {noformat}
> > > java.lang.Exception: Test
> > >
> "src/test/resources/runtimets/queries/api/cluster_state_1/cluster_state_1.1.cstate.aql"
> > > FAILED!
> > >     at
> > >
> org.apache.asterix.test.aql.TestExecutor.executeTest(TestExecutor.java:1028)
> > >     at
> > >
> org.apache.asterix.test.runtime.ExecutionTest.test(ExecutionTest.java:127)
> > >     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> > >     at
> > >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> > >     at
> > >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> > >     at java.lang.reflect.Method.invoke(Method.java:497)
> > >     at
> > >
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
> > >     at
> > >
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
> > >     at
> > >
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
> > >     at
> > >
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
> > >     at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
> > >     at
> > >
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
> > >     at
> > >
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
> > >     at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
> > >     at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
> > >     at
> org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
> > >     at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
> > >     at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
> > >     at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
> > >     at org.junit.runners.Suite.runChild(Suite.java:128)
> > >     at org.junit.runners.Suite.runChild(Suite.java:27)
> > >     at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
> > >     at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
> > >     at
> org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
> > >     at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
> > >     at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
> > >     at
> > >
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
> > >     at
> > >
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
> > >     at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
> > >     at
> > >
> org.eclipse.jdt.internal.junit4.runner.JUnit4TestReference.run(JUnit4TestReference.java:86)
> > >     at
> > >
> org.eclipse.jdt.internal.junit.runner.TestExecution.run(TestExecution.java:38)
> > >     at
> > >
> org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:459)
> > >     at
> > >
> org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:675)
> > >     at
> > >
> org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:382)
> > >     at
> > >
> org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:192)
> > > Caused by: org.apache.asterix.test.base.ComparisonException: Result
> > > for
> > >
> src/test/resources/runtimets/queries/api/cluster_state_1/cluster_state_1.1.cstate.aql
> > > changed at line 3:
> > > <         "configUri":
> > > "http://127.0.0.1:19002/admin/cluster/cc/config";,
> > >>         "statsUri": "http://127.0.0.1:19002/admin/cluster/cc/stats";,
> > >     at
> > >
> org.apache.asterix.test.aql.TestExecutor.throwLineChanged(TestExecutor.java:203)
> > >     at
> > >
> org.apache.asterix.test.aql.TestExecutor.runScriptAndCompareWithResult(TestExecutor.java:163)
> > >     at
> > >
> org.apache.asterix.test.aql.TestExecutor.executeTest(TestExecutor.java:870)
> > >     at
> > >
> org.apache.asterix.test.aql.TestExecutor.executeTest(TestExecutor.java:1000)
> > >     ... 34 more
> > >
> > > {noformat}
>
>

Reply via email to