Build failed in Jenkins: beam_PreCommit_Python2_PVR_Flink_Cron #253

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
[...truncated 32.91 MB...]
at 
org.apache.beam.vendor.grpc.v1p21p0.io.grpc.internal.AbstractManagedChannelImplBuilder.build(AbstractManagedChannelImplBuilder.java:514)
at 
org.apache.beam.sdk.fn.channel.ManagedChannelFactory.forDescriptor(ManagedChannelFactory.java:44)
at 
org.apache.beam.runners.fnexecution.environment.ExternalEnvironmentFactory$1.close(ExternalEnvironmentFactory.java:155)
at 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$WrappedSdkHarnessClient.$closeResource(DefaultJobBundleFactory.java:381)
at 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$WrappedSdkHarnessClient.close(DefaultJobBundleFactory.java:381)
at 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$WrappedSdkHarnessClient.unref(DefaultJobBundleFactory.java:401)
at 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$WrappedSdkHarnessClient.access$800(DefaultJobBundleFactory.java:347)
at 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory.lambda$createEnvironmentCaches$3(DefaultJobBundleFactory.java:154)
at 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.processPendingNotifications(LocalCache.java:1809)
at 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.runUnlockedCleanup(LocalCache.java:3462)
at 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.postWriteCleanup(LocalCache.java:3438)
at 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.clear(LocalCache.java:3215)
at 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.clear(LocalCache.java:4270)
at 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LocalManualCache.invalidateAll(LocalCache.java:4909)
at 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory.close(DefaultJobBundleFactory.java:224)
at 
org.apache.beam.runners.fnexecution.control.DefaultExecutableStageContext.close(DefaultExecutableStageContext.java:43)
at 
org.apache.beam.runners.fnexecution.control.ReferenceCountingExecutableStageContextFactory$WrappedContext.closeActual(ReferenceCountingExecutableStageContextFactory.java:208)
at 
org.apache.beam.runners.fnexecution.control.ReferenceCountingExecutableStageContextFactory$WrappedContext.access$200(ReferenceCountingExecutableStageContextFactory.java:184)
at 
org.apache.beam.runners.fnexecution.control.ReferenceCountingExecutableStageContextFactory.release(ReferenceCountingExecutableStageContextFactory.java:173)
at 
org.apache.beam.runners.fnexecution.control.ReferenceCountingExecutableStageContextFactory.scheduleRelease(ReferenceCountingExecutableStageContextFactory.java:132)
at 
org.apache.beam.runners.fnexecution.control.ReferenceCountingExecutableStageContextFactory.access$300(ReferenceCountingExecutableStageContextFactory.java:44)
at 
org.apache.beam.runners.fnexecution.control.ReferenceCountingExecutableStageContextFactory$WrappedContext.close(ReferenceCountingExecutableStageContextFactory.java:204)
at 
org.apache.beam.runners.flink.translation.wrappers.streaming.ExecutableStageDoFnOperator.$closeResource(ExecutableStageDoFnOperator.java:489)
at 
org.apache.beam.runners.flink.translation.wrappers.streaming.ExecutableStageDoFnOperator.dispose(ExecutableStageDoFnOperator.java:489)
at 
org.apache.flink.streaming.runtime.tasks.StreamTask.tryDisposeAllOperators(StreamTask.java:562)
at 
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:443)
at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:705)
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:530)
at java.lang.Thread.run(Thread.java:748)

[assert_that/Group/GroupByKey -> [3]assert_that/{Group, Unkey, Match} (1/2)] 
INFO org.apache.flink.runtime.state.heap.HeapKeyedStateBackend - Initializing 
heap keyed state backend with stream factory.
[assert_that/Group/GroupByKey -> [3]assert_that/{Group, Unkey, Match} (2/2)] 
INFO org.apache.flink.runtime.state.heap.HeapKeyedStateBackend - Initializing 
heap keyed state backend with stream factory.
INFO:apache_beam.runners.worker.statecache:Creating state cache with size 0
INFO:apache_beam.runners.worker.sdk_worker:Creating insecure control channel 
for localhost:37717.
INFO:apache_beam.runners.worker.sdk_worker:Control channel established.
INFO:apache_beam.runners.worker.sdk_worker:Initializing SDKHarness with 
unbounded number of workers.
[grpc-default-executor-1] INFO 
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService - 
Beam Fn Control client connected 

Build failed in Jenkins: beam_PostCommit_Python2 #1133

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
[...truncated 1.64 MB...]
"name": "dataflow_python_sdk.tar"
  }, 
  {
"location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-1204070151-897789.1575442911.897929/dataflow-worker.jar",
 
"name": "dataflow-worker.jar"
  }
], 
"taskrunnerSettings": {
  "parallelWorkerSettings": {
"baseUrl": "https://dataflow.googleapis.com;, 
"servicePath": "https://dataflow.googleapis.com;
  }
}, 
"workerHarnessContainerImage": 
"gcr.io/cloud-dataflow/v1beta3/python:beam-master-20191112"
  }
]
  }, 
  "name": "beamapp-jenkins-1204070151-897789", 
  "steps": [
{
  "kind": "ParallelRead", 
  "name": "s1", 
  "properties": {
"bigquery_export_format": "FORMAT_AVRO", 
"bigquery_flatten_results": true, 
"bigquery_query": "SELECT bytes, date, time FROM 
[python_query_to_table_15754429102349.python_new_types_table]", 
"bigquery_use_legacy_sql": true, 
"display_data": [
  {
"key": "source", 
"label": "Read Source", 
"namespace": "apache_beam.io.iobase.Read", 
"shortValue": "BigQuerySource", 
"type": "STRING", 
"value": "apache_beam.io.gcp.bigquery.BigQuerySource"
  }, 
  {
"key": "query", 
"label": "Query", 
"namespace": "apache_beam.io.gcp.bigquery.BigQuerySource", 
"type": "STRING", 
"value": "SELECT bytes, date, time FROM 
[python_query_to_table_15754429102349.python_new_types_table]"
  }, 
  {
"key": "validation", 
"label": "Validation Enabled", 
"namespace": "apache_beam.io.gcp.bigquery.BigQuerySource", 
"type": "BOOLEAN", 
"value": false
  }
], 
"format": "bigquery", 
"output_info": [
  {
"encoding": {
  "@type": "kind:windowed_value", 
  "component_encodings": [
{
  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
  "component_encodings": [
{
  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
  "component_encodings": []
}, 
{
  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
  "component_encodings": []
}
  ], 
  "is_pair_like": true
}, 
{
  "@type": "kind:global_window"
}
  ], 
  "is_wrapper": true
}, 
"output_name": "out", 
"user_name": "read.out"
  }
], 
"user_name": "read"
  }
}, 
{
  "kind": "ParallelWrite", 
  "name": "s2", 
  "properties": {
"create_disposition": "CREATE_IF_NEEDED", 
"dataset": "python_query_to_table_15754429102349", 
"display_data": [], 
"encoding": {
  "@type": "kind:windowed_value", 
  "component_encodings": [
{
  "@type": 
"RowAsDictJsonCoder$eNprYEpOLEhMzkiNT0pNzNXLzNdLTy7QS8pMLyxNLaqML8nPzynmCsovdyx2yUwu8SrOz3POT0kt4ipk0GwsZKwtZErSAwBKpRfo",
 
  "component_encodings": []
}, 
{
  "@type": "kind:global_window"
}
  ], 
  "is_wrapper": true
}, 
"format": "bigquery", 
"parallel_input": {
  "@type": "OutputReference", 
  "output_name": "out", 
  "step_name": "s1"
}, 
"schema": "{\"fields\": [{\"type\": \"BYTES\", \"name\": \"bytes\", 
\"mode\": \"NULLABLE\"}, {\"type\": \"DATE\", \"name\": \"date\", \"mode\": 
\"NULLABLE\"}, {\"type\": \"TIME\", \"name\": \"time\", \"mode\": 
\"NULLABLE\"}]}", 
"table": "output_table", 
"user_name": "write/WriteToBigQuery/NativeWrite", 
"write_disposition": "WRITE_EMPTY"
  }
}
  ], 
  "type": "JOB_TYPE_BATCH"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: 
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id: 
[2019-12-03_23_02_01-13470319390034103051]
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow 
monitoring console, please navigate to 

beam_PostCommit_Py_VR_Dataflow - Build # 5219 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Py_VR_Dataflow (build 
#5219)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/5219/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Build failed in Jenkins: beam_PreCommit_JavaPortabilityApi_Cron #1579

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
[...truncated 31.20 KB...]
Resolving github.com/pelletier/go-toml: 
commit='acdc4509485b587f5e675510c4f2c63e90ff68a8', 
urls=[https://github.com/pelletier/go-toml.git, 
g...@github.com:pelletier/go-toml.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/pierrec/lz4: 
commit='ed8d4cc3b461464e69798080a0092bd028910298', 
urls=[https://github.com/pierrec/lz4.git, g...@github.com:pierrec/lz4.git]
Resolving github.com/pierrec/xxHash: 
commit='a0006b13c722f7f12368c00a3d3c2ae8a999a0c6', 
urls=[https://github.com/pierrec/xxHash.git, g...@github.com:pierrec/xxHash.git]
Resolving github.com/pkg/errors: 
commit='30136e27e2ac8d167177e8a583aa4c3fea5be833', 
urls=[https://github.com/pkg/errors.git, g...@github.com:pkg/errors.git]
Resolving github.com/pkg/sftp: 
commit='22e9c1ccc02fc1b9fa3264572e49109b68a86947', 
urls=[https://github.com/pkg/sftp.git, g...@github.com:pkg/sftp.git]

> Task :sdks:java:core:shadowJar
> Task :vendor:sdks-java-extensions-protobuf:compileJava FROM-CACHE
> Task :vendor:sdks-java-extensions-protobuf:classes UP-TO-DATE
> Task :sdks:java:extensions:protobuf:extractIncludeProto
> Task :sdks:java:extensions:protobuf:generateProto NO-SOURCE
> Task :sdks:java:io:kafka:compileJava FROM-CACHE
> Task :sdks:java:io:kafka:classes UP-TO-DATE
> Task :runners:local-java:compileJava FROM-CACHE
> Task :runners:local-java:classes UP-TO-DATE
> Task :sdks:java:extensions:protobuf:compileJava FROM-CACHE
> Task :sdks:java:extensions:protobuf:classes UP-TO-DATE
> Task :runners:local-java:jar
> Task :sdks:java:extensions:protobuf:jar
> Task :vendor:sdks-java-extensions-protobuf:shadowJar
> Task :runners:core-construction-java:compileJava FROM-CACHE
> Task :runners:core-construction-java:classes UP-TO-DATE
> Task :sdks:java:extensions:google-cloud-platform-core:compileJava FROM-CACHE
> Task :sdks:java:extensions:google-cloud-platform-core:classes UP-TO-DATE
> Task :sdks:java:io:kafka:jar
> Task :sdks:java:extensions:google-cloud-platform-core:jar
> Task :sdks:java:fn-execution:compileJava FROM-CACHE
> Task :sdks:java:fn-execution:classes UP-TO-DATE
> Task :sdks:java:fn-execution:jar
> Task :runners:core-construction-java:jar
> Task :sdks:java:io:google-cloud-platform:compileJava FROM-CACHE
> Task :sdks:java:io:google-cloud-platform:classes UP-TO-DATE
> Task :runners:core-java:compileJava FROM-CACHE
> Task :runners:core-java:classes UP-TO-DATE
> Task :runners:core-java:jar
> Task :sdks:java:io:google-cloud-platform:jar
> Task :sdks:java:harness:compileJava FROM-CACHE
> Task :sdks:java:harness:classes UP-TO-DATE
> Task :sdks:java:harness:jar
> Task :examples:java:compileJava FROM-CACHE
> Task :examples:java:classes UP-TO-DATE
> Task :runners:google-cloud-dataflow-java:compileJava FROM-CACHE
> Task :runners:google-cloud-dataflow-java:classes
> Task :examples:java:jar
> Task :runners:google-cloud-dataflow-java:jar
> Task :examples:java:compileTestJava FROM-CACHE
> Task :examples:java:testClasses
> Task :examples:java:testJar
> Task :sdks:java:core:compileTestJava FROM-CACHE
> Task :sdks:java:core:testClasses

> Task :sdks:go:resolveBuildDependencies
Resolving github.com/prometheus/client_golang: 
commit='9bb6ab929dcbe1c8393cd9ef70387cb69811bd1c', 
urls=[https://github.com/prometheus/client_golang.git, 
g...@github.com:prometheus/client_golang.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/prometheus/procfs: 
commit='cb4147076ac75738c9a7d279075a253c0cc5acbd', 
urls=[https://github.com/prometheus/procfs.git, 
g...@github.com:prometheus/procfs.git]
Resolving github.com/rcrowley/go-metrics: 
commit='8732c616f52954686704c8645fe1a9d59e9df7c1', 
urls=[https://github.com/rcrowley/go-metrics.git, 
g...@github.com:rcrowley/go-metrics.git]
Resolving github.com/cpuguy83/go-md2man: 
commit='dc9f53734905c233adfc09fd4f063dce63ce3daf', 
urls=[https://github.com/cpuguy83/go-md2man.git, 
g...@github.com:cpuguy83/go-md2man.git]
Resolving cached github.com/cpuguy83/go-md2man: 
commit='dc9f53734905c233adfc09fd4f063dce63ce3daf', 
urls=[https://github.com/cpuguy83/go-md2man.git, 
g...@github.com:cpuguy83/go-md2man.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, 

Build failed in Jenkins: beam_PostCommit_XVR_Flink #1101

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
[...truncated 3.22 MB...]
[CHAIN MapPartition (MapPartition at 
[3]{ExternalTransform(beam:transforms:xlang:filter_less_than_eq), 
ExternalTransform(beam:transforms:xlang:count)}) -> FlatMap (FlatMap at 
ExtractOutput[0]) -> Map (Key Extractor) -> GroupCombine (GroupCombine at 
GroupCombine: 
ExternalTransform(beam:transforms:xlang:count)/Combine.perKey(Count)/Group) -> 
Map (Key Extractor) (5/16)] WARN org.apache.flink.metrics.MetricGroup - The 
operator name GroupCombine (GroupCombine at GroupCombine: 
ExternalTransform(beam:transforms:xlang:count)/Combine.perKey(Count)/Group) 
exceeded the 80 characters length limit and was truncated.
[CHAIN MapPartition (MapPartition at 
[3]{ExternalTransform(beam:transforms:xlang:filter_less_than_eq), 
ExternalTransform(beam:transforms:xlang:count)}) -> FlatMap (FlatMap at 
ExtractOutput[0]) -> Map (Key Extractor) -> GroupCombine (GroupCombine at 
GroupCombine: 
ExternalTransform(beam:transforms:xlang:count)/Combine.perKey(Count)/Group) -> 
Map (Key Extractor) (8/16)] INFO org.apache.flink.runtime.taskmanager.Task - 
Registering task at network: CHAIN MapPartition (MapPartition at 
[3]{ExternalTransform(beam:transforms:xlang:filter_less_than_eq), 
ExternalTransform(beam:transforms:xlang:count)}) -> FlatMap (FlatMap at 
ExtractOutput[0]) -> Map (Key Extractor) -> GroupCombine (GroupCombine at 
GroupCombine: 
ExternalTransform(beam:transforms:xlang:count)/Combine.perKey(Count)/Group) -> 
Map (Key Extractor) (8/16) (e961b78aafc796a16eea9fca8b7501eb) [DEPLOYING].
[CHAIN MapPartition (MapPartition at 
[3]{ExternalTransform(beam:transforms:xlang:filter_less_than_eq), 
ExternalTransform(beam:transforms:xlang:count)}) -> FlatMap (FlatMap at 
ExtractOutput[0]) -> Map (Key Extractor) -> GroupCombine (GroupCombine at 
GroupCombine: 
ExternalTransform(beam:transforms:xlang:count)/Combine.perKey(Count)/Group) -> 
Map (Key Extractor) (8/16)] INFO org.apache.flink.runtime.taskmanager.Task - 
CHAIN MapPartition (MapPartition at 
[3]{ExternalTransform(beam:transforms:xlang:filter_less_than_eq), 
ExternalTransform(beam:transforms:xlang:count)}) -> FlatMap (FlatMap at 
ExtractOutput[0]) -> Map (Key Extractor) -> GroupCombine (GroupCombine at 
GroupCombine: 
ExternalTransform(beam:transforms:xlang:count)/Combine.perKey(Count)/Group) -> 
Map (Key Extractor) (8/16) (e961b78aafc796a16eea9fca8b7501eb) switched from 
DEPLOYING to RUNNING.
[CHAIN MapPartition (MapPartition at 
[3]{ExternalTransform(beam:transforms:xlang:filter_less_than_eq), 
ExternalTransform(beam:transforms:xlang:count)}) -> FlatMap (FlatMap at 
ExtractOutput[0]) -> Map (Key Extractor) -> GroupCombine (GroupCombine at 
GroupCombine: 
ExternalTransform(beam:transforms:xlang:count)/Combine.perKey(Count)/Group) -> 
Map (Key Extractor) (8/16)] WARN org.apache.flink.metrics.MetricGroup - The 
operator name MapPartition (MapPartition at 
[3]{ExternalTransform(beam:transforms:xlang:filter_less_than_eq), 
ExternalTransform(beam:transforms:xlang:count)}) exceeded the 80 characters 
length limit and was truncated.
[flink-akka.actor.default-dispatcher-5] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - CHAIN MapPartition 
(MapPartition at 
[3]{ExternalTransform(beam:transforms:xlang:filter_less_than_eq), 
ExternalTransform(beam:transforms:xlang:count)}) -> FlatMap (FlatMap at 
ExtractOutput[0]) -> Map (Key Extractor) -> GroupCombine (GroupCombine at 
GroupCombine: 
ExternalTransform(beam:transforms:xlang:count)/Combine.perKey(Count)/Group) -> 
Map (Key Extractor) (8/16) (e961b78aafc796a16eea9fca8b7501eb) switched from 
DEPLOYING to RUNNING.
[CHAIN MapPartition (MapPartition at 
[3]{ExternalTransform(beam:transforms:xlang:filter_less_than_eq), 
ExternalTransform(beam:transforms:xlang:count)}) -> FlatMap (FlatMap at 
ExtractOutput[0]) -> Map (Key Extractor) -> GroupCombine (GroupCombine at 
GroupCombine: 
ExternalTransform(beam:transforms:xlang:count)/Combine.perKey(Count)/Group) -> 
Map (Key Extractor) (8/16)] WARN org.apache.flink.metrics.MetricGroup - The 
operator name GroupCombine (GroupCombine at GroupCombine: 
ExternalTransform(beam:transforms:xlang:count)/Combine.perKey(Count)/Group) 
exceeded the 80 characters length limit and was truncated.
[CHAIN MapPartition (MapPartition at [3]{Create, Map(unicode), Map()}) -> FlatMap (FlatMap at ExtractOutput[0]) (5/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - CHAIN MapPartition (MapPartition at 
[3]{Create, Map(unicode), Map()}) -> FlatMap 
(FlatMap at ExtractOutput[0]) (5/16) (4b8a79353b4d2ddba1f4824a6f41275d) 
switched from RUNNING to FINISHED.
[CHAIN MapPartition (MapPartition at [3]{Create, Map(unicode), Map()}) -> FlatMap (FlatMap at ExtractOutput[0]) (5/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Freeing task resources for CHAIN 
MapPartition (MapPartition at [3]{Create, Map(unicode), Map()}) -> FlatMap 

Build failed in Jenkins: beam_BiqQueryIO_Streaming_Performance_Test_Java #205

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
[...truncated 409.02 KB...]
"message" : "Invalid table ID 
\"bqio_write_10GB_java_2f8340d2-f155-4936-bfe8-d8710ce6c8a8\". Table IDs must 
be alphanumeric (plus underscores) and must be at most 1024 characters long. 
Also, Table decorators cannot be used.",
"reason" : "invalid"
  } ],
  "message" : "Invalid table ID 
\"bqio_write_10GB_java_2f8340d2-f155-4936-bfe8-d8710ce6c8a8\". Table IDs must 
be alphanumeric (plus underscores) and must be at most 1024 characters long. 
Also, Table decorators cannot be used.",
  "status" : "INVALID_ARGUMENT"
}
at 
com.google.api.client.googleapis.json.GoogleJsonResponseException.from(GoogleJsonResponseException.java:150)
at 
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:113)
at 
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:40)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest$1.interceptResponse(AbstractGoogleClientRequest.java:417)
at com.google.api.client.http.HttpRequest.execute(HttpRequest.java:1132)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:515)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:448)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.execute(AbstractGoogleClientRequest.java:565)
at 
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$DatasetServiceImpl.tryCreateTable(BigQueryServicesImpl.java:520)
at 
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$DatasetServiceImpl.createTable(BigQueryServicesImpl.java:505)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.tryCreateTable(CreateTables.java:205)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.getTableDestination(CreateTables.java:160)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.lambda$processElement$0(CreateTables.java:113)
at java.util.HashMap.computeIfAbsent(HashMap.java:1126)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.processElement(CreateTables.java:112)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn$DoFnInvoker.invokeProcessElement(Unknown
 Source)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:218)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:183)
at 
org.apache.beam.runners.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:335)
at 
org.apache.beam.runners.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:44)
at 
org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:49)
at 
org.apache.beam.runners.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:280)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:252)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:74)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:576)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:564)
at 
org.apache.beam.sdk.io.gcp.bigquery.PrepareWrite$1.processElement(PrepareWrite.java:82)
at 
org.apache.beam.sdk.io.gcp.bigquery.PrepareWrite$1$DoFnInvoker.invokeProcessElement(Unknown
 Source)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:218)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:180)
at 
org.apache.beam.runners.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:335)
at 
org.apache.beam.runners.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:44)
at 
org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:49)
at 

Build failed in Jenkins: beam_PostCommit_Python_VR_Spark #1676

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[echauchot] [BEAM-8470] Update capability matrix: add Spark Structured Streaming

[echauchot] [BEAM-8470] Update Spark runner page: add Spark Structured Streaming


--
[...truncated 1.44 MB...]
  File "apache_beam/runners/portability/portable_runner_test.py", line 75, in 
handler
raise BaseException(msg)
BaseException: Timed out after 60 seconds.

==
ERROR: test_pardo_timers (__main__.SparkRunnerTest)
--
Traceback (most recent call last):
  File "apache_beam/runners/portability/fn_api_runner_test.py", line 328, in 
test_pardo_timers
assert_that(actual, equal_to(expected))
  File "apache_beam/pipeline.py", line 436, in __exit__
self.run().wait_until_finish()
  File "apache_beam/runners/portability/portable_runner.py", line 428, in 
wait_until_finish
l_finish_read, started daemon 139703307187968)>
for state_response in self._state_stream:

  File 
"
 line 395, in next
return self._next()
# Thread: 

  File 
"
 line 552, in _next
# Thread: 

_common.wait(self._state.condition.wait, _response_ready)
# Thread: 

  File 
"
 line 140, in wait
# Thread: 

_wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb)
# Thread: 

  File 
"
 line 105, in _wait_once
# Thread: 

# Thread: 

wait_fn(timeout=timeout)
# Thread: 

  File "/usr/lib/python2.7/threading.py", line 359, in wait
_sleep(delay)
# Thread: 

# Thread: 
 Timed out after 60 seconds. 

  File "apache_beam/runners/portability/portable_runner_test.py", line 75, in 
handler
raise BaseException(msg)
BaseException: Timed out after 60 seconds.

# Thread: 

# Thread: 

# Thread: <_MainThread(MainThread, started 139705073448704)>

# Thread: 

# Thread: 

# Thread: 

# Thread: 

# Thread: 

# Thread: 

# Thread: 

# Thread: 
==
ERROR: test_pardo_unfusable_side_inputs (__main__.SparkRunnerTest)
--
Traceback (most recent call last):
  File "apache_beam/runners/portability/fn_api_runner_test.py", line 244, in 
test_pardo_unfusable_side_inputs
equal_to([('a', 'a'), ('a', 'b'), ('b', 'a'), ('b', 'b')]))
  File "apache_beam/pipeline.py", line 436, in __exit__
self.run().wait_until_finish()
  File "apache_beam/runners/portability/portable_runner.py", line 428, in 
wait_until_finish
for state_response in self._state_stream:
  File 
"
 line 395, in next
return self._next()
  File 
"
 line 552, in _next
_common.wait(self._state.condition.wait, _response_ready)
  File 
"
 line 140, in wait
_wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb)
  File 
"
 line 105, in _wait_once
wait_fn(timeout=timeout)
  File "/usr/lib/python2.7/threading.py", line 359, in wait
_sleep(delay)
  File "apache_beam/runners/portability/portable_runner_test.py", line 75, in 
handler
raise BaseException(msg)
BaseException: Timed out after 60 seconds.

==
ERROR: test_pardo_windowed_side_inputs (__main__.SparkRunnerTest)
--
Traceback (most recent call last):
  File "apache_beam/runners/portability/fn_api_runner_test.py", line 181, in 
test_pardo_windowed_side_inputs
label='windowed')
  File "apache_beam/pipeline.py", line 436, in __exit__
self.run().wait_until_finish()
  File "apache_beam/runners/portability/portable_runner.py", line 428, in 
wait_until_finish

beam_PostCommit_Java_PortabilityApi - Build # 3510 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Java_PortabilityApi 
(build #3510)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Java_PortabilityApi/3510/ to view 
the results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Build failed in Jenkins: beam_PostCommit_Python2 #1124

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[echauchot] [BEAM-8470] Update capability matrix: add Spark Structured Streaming

[echauchot] [BEAM-8470] Update Spark runner page: add Spark Structured Streaming


--
[...truncated 1.72 MB...]
  {
"encoding": {
  "@type": "kind:windowed_value", 
  "component_encodings": [
{
  "@type": "kind:pair", 
  "component_encodings": [
{
  "@type": "kind:bytes"
}, 
{
  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
  "component_encodings": [
{
  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
  "component_encodings": [], 
  "pipeline_proto_coder_id": 
"ref_Coder_FastPrimitivesCoder_3"
}, 
{
  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
  "component_encodings": [], 
  "pipeline_proto_coder_id": 
"ref_Coder_FastPrimitivesCoder_3"
}
  ], 
  "is_pair_like": true, 
  "pipeline_proto_coder_id": 
"ref_Coder_FastPrimitivesCoder_3"
}
  ], 
  "is_pair_like": true
}, 
{
  "@type": "kind:global_window"
}
  ], 
  "is_wrapper": true
}, 
"output_name": "out", 
"user_name": "write/Write/WriteImpl/FinalizeWrite/MapToVoidKey0.out"
  }
], 
"parallel_input": {
  "@type": "OutputReference", 
  "output_name": "out", 
  "step_name": "s17"
}, 
"serialized_fn": 
"ref_AppliedPTransform_write/Write/WriteImpl/FinalizeWrite/MapToVoidKey0_38", 
"user_name": "write/Write/WriteImpl/FinalizeWrite/MapToVoidKey0"
  }
}, 
{
  "kind": "ParallelDo", 
  "name": "s41", 
  "properties": {
"display_data": [
  {
"key": "fn", 
"label": "Transform Function", 
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
"type": "STRING", 
"value": ""
  }, 
  {
"key": "fn", 
"label": "Transform Function", 
"namespace": "apache_beam.transforms.core.ParDo", 
"shortValue": "CallableWrapperDoFn", 
"type": "STRING", 
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
  }
], 
"non_parallel_inputs": {}, 
"output_info": [
  {
"encoding": {
  "@type": "kind:windowed_value", 
  "component_encodings": [
{
  "@type": "kind:pair", 
  "component_encodings": [
{
  "@type": "kind:bytes"
}, 
{
  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
  "component_encodings": [
{
  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
  "component_encodings": [], 
  "pipeline_proto_coder_id": 
"ref_Coder_FastPrimitivesCoder_3"
}, 
{
  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
  "component_encodings": [], 
  "pipeline_proto_coder_id": 
"ref_Coder_FastPrimitivesCoder_3"
}
  ], 
  "is_pair_like": true, 
  "pipeline_proto_coder_id": 
"ref_Coder_FastPrimitivesCoder_3"
}
  ], 
  "is_pair_like": true
}, 
{
  "@type": "kind:global_window"
}
  ], 
  

Build failed in Jenkins: beam_PostCommit_Python_VR_Spark #1677

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[chadrik] [BEAM-8523] JobAPI: Give access to timestamped state change history

[chadrik] Rename GetJobStateResponse to JobStateEvent

[chadrik] Move state history utilities to AbstractBeamJob

[chadrik] Small bugfix to FlinkBeamJob job state mapping

[chadrik] Fix existing bugs in AbstractJobServiceServicer

[chadrik] Use timestamp.Timestamp instead of float


--
[...truncated 1.32 MB...]
19/12/03 11:35:02 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:41335
19/12/03 11:35:02 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/03 11:35:02 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/03 11:35:02 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575372899.81_4d5c66d7-175f-482a-966c-31a058f1590f',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/03 11:35:02 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575372899.81', 'environment_config': u'{"command": 
"
 'expansion_port': u'0', 'sdk_worker_parallelism': u'1', 'job_endpoint': 
u'localhost:36757', 'job_port': u'0'}
19/12/03 11:35:02 INFO statecache.__init__: Creating state cache with size 0
19/12/03 11:35:02 INFO sdk_worker.__init__: Creating insecure control channel 
for localhost:45955.
19/12/03 11:35:02 INFO sdk_worker.__init__: Control channel established.
19/12/03 11:35:02 INFO 
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: Beam 
Fn Control client connected with id 262-1
19/12/03 11:35:02 INFO sdk_worker.__init__: Initializing SDKHarness with 
unbounded number of workers.
19/12/03 11:35:02 INFO sdk_worker.create_state_handler: Creating insecure state 
channel for localhost:41491.
19/12/03 11:35:02 INFO sdk_worker.create_state_handler: State channel 
established.
19/12/03 11:35:02 INFO data_plane.create_data_channel: Creating client data 
channel for localhost:42757
19/12/03 11:35:02 INFO 
org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client 
connected.
19/12/03 11:35:02 INFO 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory: Closing 
environment urn: "beam:env:process:v1"
payload: 
"\032\202\001

19/12/03 11:35:02 INFO sdk_worker.run: No more requests from control plane
19/12/03 11:35:02 INFO sdk_worker.run: SDK Harness waiting for in-flight 
requests to complete
19/12/03 11:35:02 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 11:35:02 INFO data_plane.close: Closing all cached grpc data channels.
19/12/03 11:35:02 INFO sdk_worker.close: Closing all cached gRPC state handlers.
19/12/03 11:35:02 INFO sdk_worker.run: Done consuming work.
19/12/03 11:35:02 INFO sdk_worker_main.main: Python sdk harness exiting.
19/12/03 11:35:02 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Logging client 
hanged up.
19/12/03 11:35:03 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 11:35:03 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
GetManifest for __no_artifacts_staged__
19/12/03 11:35:03 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Beam Fn Logging 
client connected.
19/12/03 11:35:03 INFO sdk_worker_main.main: Logging handler created.
19/12/03 11:35:03 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:36193
19/12/03 11:35:03 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/03 11:35:03 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/03 11:35:03 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575372899.81_4d5c66d7-175f-482a-966c-31a058f1590f',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/03 11:35:03 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 

Build failed in Jenkins: beam_PreCommit_Python_pytest_Cron #117

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[chadrik] [BEAM-8523] JobAPI: Give access to timestamped state change history

[chadrik] Rename GetJobStateResponse to JobStateEvent

[chadrik] Move state history utilities to AbstractBeamJob

[chadrik] Small bugfix to FlinkBeamJob job state mapping

[chadrik] Fix existing bugs in AbstractJobServiceServicer

[chadrik] Use timestamp.Timestamp instead of float

[echauchot] [BEAM-8470] Update capability matrix: add Spark Structured Streaming

[echauchot] [BEAM-8470] Update Spark runner page: add Spark Structured Streaming


--
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-4 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 >  # 
 > timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision f8496d318b789716242f2f8b6381ee862b84ef3f (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f f8496d318b789716242f2f8b6381ee862b84ef3f
Commit message: "Merge pull request #9959: [BEAM-8523] JobAPI: Give access to 
timestamped state change history"
 > git rev-list --no-walk 08d9e7b453e99c2812915407a74dfbf473c0ffcb # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :pythonPreCommitPytest
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.

FAILURE: Build failed with an exception.

* What went wrong:
Could not determine the dependencies of task 
':sdks:python:test-suites:tox:py35:preCommitPy35Pytest'.
> Task with path 'lint' not found in project 
> ':sdks:python:test-suites:tox:py35'.

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 13s

Publishing build scan...
https://scans.gradle.com/s/ut5j6a6u4olns

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PostCommit_Python_VR_Spark #1678

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
[...truncated 1.31 MB...]
19/12/03 12:09:39 INFO org.apache.beam.runners.spark.SparkPipelineRunner: 
Running job test_windowing_1575374977.56_6084a658-2be8-4d03-8ebc-83558061f5a5 
on Spark master local
19/12/03 12:09:39 WARN 
org.apache.beam.runners.spark.translation.GroupNonMergingWindowsFunctions: 
Either coder LengthPrefixCoder(ByteArrayCoder) or GlobalWindow$Coder is not 
consistent with equals. That might cause issues on some runners.
19/12/03 12:09:39 INFO org.apache.beam.runners.spark.SparkPipelineRunner: Job 
test_windowing_1575374977.56_6084a658-2be8-4d03-8ebc-83558061f5a5: Pipeline 
translated successfully. Computing outputs
19/12/03 12:09:39 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
GetManifest for __no_artifacts_staged__
19/12/03 12:09:40 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Beam Fn Logging 
client connected.
19/12/03 12:09:40 INFO sdk_worker_main.main: Logging handler created.
19/12/03 12:09:40 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:36999
19/12/03 12:09:40 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/03 12:09:40 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/03 12:09:40 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575374977.56_6084a658-2be8-4d03-8ebc-83558061f5a5',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/03 12:09:40 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575374977.56', 'environment_config': u'{"command": 
"
 'expansion_port': u'0', 'sdk_worker_parallelism': u'1', 'job_endpoint': 
u'localhost:36381', 'job_port': u'0'}
19/12/03 12:09:40 INFO statecache.__init__: Creating state cache with size 0
19/12/03 12:09:40 INFO sdk_worker.__init__: Creating insecure control channel 
for localhost:41905.
19/12/03 12:09:40 INFO 
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: Beam 
Fn Control client connected with id 261-1
19/12/03 12:09:40 INFO sdk_worker.__init__: Control channel established.
19/12/03 12:09:40 INFO sdk_worker.__init__: Initializing SDKHarness with 
unbounded number of workers.
19/12/03 12:09:40 INFO sdk_worker.create_state_handler: Creating insecure state 
channel for localhost:41869.
19/12/03 12:09:40 INFO sdk_worker.create_state_handler: State channel 
established.
19/12/03 12:09:40 INFO data_plane.create_data_channel: Creating client data 
channel for localhost:32883
19/12/03 12:09:40 INFO 
org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client 
connected.
19/12/03 12:09:40 INFO 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory: Closing 
environment urn: "beam:env:process:v1"
payload: 
"\032\202\001

19/12/03 12:09:40 INFO sdk_worker.run: No more requests from control plane
19/12/03 12:09:40 INFO sdk_worker.run: SDK Harness waiting for in-flight 
requests to complete
19/12/03 12:09:40 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 12:09:40 INFO data_plane.close: Closing all cached grpc data channels.
19/12/03 12:09:40 INFO sdk_worker.close: Closing all cached gRPC state handlers.
19/12/03 12:09:40 INFO sdk_worker.run: Done consuming work.
19/12/03 12:09:40 INFO sdk_worker_main.main: Python sdk harness exiting.
19/12/03 12:09:40 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Logging client 
hanged up.
19/12/03 12:09:40 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 12:09:40 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
GetManifest for __no_artifacts_staged__
19/12/03 12:09:41 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Beam Fn Logging 
client connected.
19/12/03 12:09:41 INFO sdk_worker_main.main: Logging handler created.
19/12/03 12:09:41 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:41361
19/12/03 12:09:41 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/03 12:09:41 WARN sdk_worker_main._load_main_session: No 

Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_Flink #5994

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[echauchot] [BEAM-8470] Update capability matrix: add Spark Structured Streaming

[echauchot] [BEAM-8470] Update Spark runner page: add Spark Structured Streaming


--
Started by GitHub push by aromanenko-dev
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-7 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 72a2d0490b3cee903a4315e59d157565ba4b0e75 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 72a2d0490b3cee903a4315e59d157565ba4b0e75
Commit message: "Merge pull request #10210: Update capability matrix and spark 
website pages  to add details about Spark Structured Streaming Runner."
 > git rev-list --no-walk 08d9e7b453e99c2812915407a74dfbf473c0ffcb # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :runners:flink:1.9:validatesRunner
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :runners:flink:1.9:copyResourcesOverrides NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :model:job-management:extractProto
> Task :model:fn-execution:extractProto
> Task :model:job-management:processResources
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :model:fn-execution:processResources
> Task :runners:flink:1.9:copySourceOverrides
> Task :runners:flink:1.9:copyTestResourcesOverrides NO-SOURCE
> Task :runners:flink:1.9:processResources
> Task :sdks:java:build-tools:compileJava FROM-CACHE
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task :sdks:java:build-tools:processResources
> Task :sdks:java:build-tools:classes
> Task :sdks:java:build-tools:jar
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :sdks:java:core:generateTestAvroJava
> Task :sdks:java:core:generateTestGrammarSource NO-SOURCE
> Task :sdks:java:core:processTestResources
> Task :model:pipeline:generateProto
> Task 

Jenkins build is back to normal : beam_PostCommit_Python35 #1130

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Jenkins build is back to normal : beam_PostCommit_Java #4801

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_PortabilityApi_Dataflow #2557

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
[...truncated 31.24 KB...]
> Task :sdks:java:io:common:compileTestJava FROM-CACHE
> Task :sdks:java:io:common:testClasses UP-TO-DATE
> Task :runners:local-java:jar
> Task :vendor:sdks-java-extensions-protobuf:shadowJar
> Task :sdks:java:io:kafka:compileJava FROM-CACHE
> Task :sdks:java:io:kafka:classes UP-TO-DATE
> Task :sdks:java:io:common:testJar
> Task :runners:core-construction-java:compileJava FROM-CACHE
> Task :runners:core-construction-java:classes UP-TO-DATE
> Task :sdks:java:io:kafka:jar
> Task :sdks:java:fn-execution:compileJava FROM-CACHE
> Task :sdks:java:fn-execution:classes UP-TO-DATE
> Task :sdks:java:fn-execution:jar
> Task :runners:core-construction-java:jar
> Task :sdks:java:extensions:protobuf:compileJava FROM-CACHE
> Task :sdks:java:extensions:protobuf:classes UP-TO-DATE
> Task :sdks:java:extensions:protobuf:jar
> Task :runners:core-java:compileJava FROM-CACHE
> Task :runners:core-java:classes UP-TO-DATE
> Task :sdks:java:extensions:google-cloud-platform-core:compileJava FROM-CACHE
> Task :sdks:java:extensions:google-cloud-platform-core:classes UP-TO-DATE
> Task :sdks:java:extensions:google-cloud-platform-core:jar
> Task :runners:core-java:jar
> Task :sdks:java:testing:test-utils:compileJava FROM-CACHE
> Task :sdks:java:testing:test-utils:classes UP-TO-DATE
> Task :sdks:java:testing:test-utils:jar
> Task :sdks:java:testing:test-utils:compileTestJava FROM-CACHE
> Task :sdks:java:testing:test-utils:testClasses UP-TO-DATE
> Task :sdks:java:harness:compileJava FROM-CACHE
> Task :sdks:java:harness:classes UP-TO-DATE
> Task :sdks:java:testing:test-utils:testJar
> Task :sdks:java:harness:jar
> Task :sdks:java:io:google-cloud-platform:compileJava FROM-CACHE
> Task :sdks:java:io:google-cloud-platform:classes UP-TO-DATE
> Task :sdks:java:core:compileTestJava FROM-CACHE
> Task :sdks:java:core:testClasses
> Task :sdks:java:io:google-cloud-platform:jar
> Task :runners:google-cloud-dataflow-java:compileJava FROM-CACHE
> Task :runners:google-cloud-dataflow-java:classes

> Task :sdks:go:resolveBuildDependencies
Resolving github.com/ianlancetaylor/demangle: 
commit='4883227f66371e02c4948937d3e2be1664d9be38', 
urls=[https://github.com/ianlancetaylor/demangle.git, 
g...@github.com:ianlancetaylor/demangle.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/kr/fs: commit='2788f0dbd16903de03cb8186e5c7d97b69ad387b', 
urls=[https://github.com/kr/fs.git, g...@github.com:kr/fs.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/magiconair/properties: 
commit='49d762b9817ba1c2e9d0c69183c2b4a8b8f1d934', 
urls=[https://github.com/magiconair/properties.git, 
g...@github.com:magiconair/properties.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/mitchellh/go-homedir: 
commit='b8bc1bf767474819792c23f32d8286a45736f1c6', 
urls=[https://github.com/mitchellh/go-homedir.git, 
g...@github.com:mitchellh/go-homedir.git]
Resolving github.com/mitchellh/mapstructure: 
commit='a4e142e9c047c904fa2f1e144d9a84e6133024bc', 
urls=[https://github.com/mitchellh/mapstructure.git, 
g...@github.com:mitchellh/mapstructure.git]
Resolving github.com/nightlyone/lockfile: 
commit='0ad87eef1443f64d3d8c50da647e2b1552851124', 
urls=[https://github.com/nightlyone/lockfile, 
g...@github.com:nightlyone/lockfile.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/openzipkin/zipkin-go: 
commit='3741243b287094fda649c7f0fa74bd51f37dc122', 
urls=[https://github.com/openzipkin/zipkin-go.git, 
g...@github.com:openzipkin/zipkin-go.git]
Resolving github.com/pelletier/go-toml: 
commit='acdc4509485b587f5e675510c4f2c63e90ff68a8', 
urls=[https://github.com/pelletier/go-toml.git, 
g...@github.com:pelletier/go-toml.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/pierrec/lz4: 
commit='ed8d4cc3b461464e69798080a0092bd028910298', 

Jenkins build is back to normal : beam_PostCommit_Java11_ValidatesRunner_PortabilityApi_Dataflow #1661

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_Flink #5995

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[chadrik] [BEAM-8523] JobAPI: Give access to timestamped state change history

[chadrik] Rename GetJobStateResponse to JobStateEvent

[chadrik] Move state history utilities to AbstractBeamJob

[chadrik] Small bugfix to FlinkBeamJob job state mapping

[chadrik] Fix existing bugs in AbstractJobServiceServicer

[chadrik] Use timestamp.Timestamp instead of float


--
[...truncated 46 B...]
Building remotely on apache-beam-jenkins-7 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision f8496d318b789716242f2f8b6381ee862b84ef3f (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f f8496d318b789716242f2f8b6381ee862b84ef3f
Commit message: "Merge pull request #9959: [BEAM-8523] JobAPI: Give access to 
timestamped state change history"
 > git rev-list --no-walk 72a2d0490b3cee903a4315e59d157565ba4b0e75 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :runners:flink:1.9:validatesRunner
Starting a Gradle Daemon (subsequent builds will be faster)
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :runners:flink:1.9:copyResourcesOverrides NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :model:job-management:extractProto
> Task :model:fn-execution:extractProto
> Task :model:job-management:processResources
> Task :runners:flink:1.9:copySourceOverrides
> Task :runners:flink:1.9:copyTestResourcesOverrides NO-SOURCE
> Task :model:fn-execution:processResources
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :runners:flink:1.9:processResources
> Task :sdks:java:build-tools:compileJava FROM-CACHE
> Task :sdks:java:build-tools:processResources
> Task :sdks:java:build-tools:classes
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task :sdks:java:build-tools:jar
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :sdks:java:core:generateTestAvroJava
> Task :sdks:java:core:generateTestGrammarSource NO-SOURCE
> Task :sdks:java:core:processTestResources

Jenkins build is back to normal : beam_PostCommit_Java11_Examples_Dataflow #2244

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



beam_PostCommit_Py_VR_Dataflow - Build # 5211 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Py_VR_Dataflow (build 
#5211)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/5211/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Jenkins build is back to normal : beam_PostCommit_Java11_ValidatesRunner_Dataflow #1653

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Jenkins build is back to normal : beam_PostCommit_Python2 #1125

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



beam_PostCommit_Py_VR_Dataflow - Build # 5212 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Py_VR_Dataflow (build 
#5212)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/5212/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

beam_PostCommit_Python37 - Build # 1051 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Python37 (build #1051)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Python37/1051/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

beam_PostCommit_Python36 - Build # 1129 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Python36 (build #1129)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Python36/1129/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

beam_PostCommit_Python37 - Build # 1052 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Python37 (build #1052)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Python37/1052/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_Flink #5996

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-7 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision f8496d318b789716242f2f8b6381ee862b84ef3f (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f f8496d318b789716242f2f8b6381ee862b84ef3f
Commit message: "Merge pull request #9959: [BEAM-8523] JobAPI: Give access to 
timestamped state change history"
 > git rev-list --no-walk f8496d318b789716242f2f8b6381ee862b84ef3f # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :runners:flink:1.9:validatesRunner
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :runners:flink:1.9:copyResourcesOverrides NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :model:fn-execution:extractProto
> Task :model:job-management:extractProto
> Task :model:job-management:processResources
> Task :model:fn-execution:processResources
> Task :runners:flink:1.9:copySourceOverrides
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :runners:flink:1.9:copyTestResourcesOverrides NO-SOURCE
> Task :runners:flink:1.9:processResources
> Task :sdks:java:build-tools:compileJava FROM-CACHE
> Task :sdks:java:build-tools:processResources
> Task :sdks:java:build-tools:classes
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task :sdks:java:build-tools:jar
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :sdks:java:core:generateTestAvroJava
> Task :sdks:java:core:generateTestGrammarSource NO-SOURCE
> Task :sdks:java:core:processTestResources
> Task :model:pipeline:generateProto
> Task :model:pipeline:compileJava FROM-CACHE
> Task :model:pipeline:processResources
> Task :model:pipeline:classes
> Task :model:pipeline:jar
> Task :model:fn-execution:extractIncludeProto
> Task :model:job-management:extractIncludeProto
> Task 

Build failed in Jenkins: beam_PostCommit_Java11_Examples_Dataflow #2243

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[echauchot] [BEAM-8470] Update capability matrix: add Spark Structured Streaming

[echauchot] [BEAM-8470] Update Spark runner page: add Spark Structured Streaming


--
Started by GitHub push by aromanenko-dev
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-12 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 72a2d0490b3cee903a4315e59d157565ba4b0e75 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 72a2d0490b3cee903a4315e59d157565ba4b0e75
Commit message: "Merge pull request #10210: Update capability matrix and spark 
website pages  to add details about Spark Structured Streaming Runner."
 > git rev-list --no-walk 08d9e7b453e99c2812915407a74dfbf473c0ffcb # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=120 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g 
:runners:google-cloud-dataflow-java:examples:java11PostCommit
Starting a Gradle Daemon, 1 stopped Daemon could not be reused, use --status 
for details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.

> Configure project :sdks:java:container
Found go 1.12 in /usr/bin/go, use it.

> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :sdks:java:io:google-cloud-platform:processResources NO-SOURCE
> Task 
> :runners:google-cloud-dataflow-java:worker:legacy-worker:processResources 
> NO-SOURCE
> Task :runners:google-cloud-dataflow-java:examples:compileJava NO-SOURCE
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :runners:direct-java:processResources NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :runners:local-java:processResources NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :runners:google-cloud-dataflow-java:examples:processResources NO-SOURCE
> Task :examples:java:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :runners:google-cloud-dataflow-java:examples:classes UP-TO-DATE
> Task :examples:java:processTestResources
> Task :sdks:java:extensions:protobuf:extractProto
> Task :model:fn-execution:extractProto
> Task :runners:google-cloud-dataflow-java:examples:compileTestJava NO-SOURCE
> Task :model:job-management:extractProto
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :runners:google-cloud-dataflow-java:examples:processTestResources 

Jenkins build is back to normal : beam_PostCommit_Java_ValidatesRunner_Dataflow #3928

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PostRelease_NightlySnapshot #818

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-4132] Set multi-output PCollections types to Any

[ehudm] [BEAM-7594] Fix flaky filename generation

[migryz] Bump Release Build Timeout

[migryz] fix syntax

[github] Bump time to 5 hours.

[sunjincheng121] [BEAM-8733]  Handle the registration request synchronously in 
the Python

[ehudm] [BEAM-8842] Disable the correct test

[kcweaver] [BEAM-8863] experiment=beam_fn_api in runtime/environments page

[robertwb] [BEAM-8645] A test case for TimestampCombiner. (#10081)

[lcwik] [BEAM-2929] Ensure that the Beam Java SDK sends the property

[lcwik] [BEAM-2929] Ensure that the Beam Go SDK sends the property

[lcwik] [BEAM-2929] Ensure that the Beam Python SDK sends the property

[lostluck] [BEAM-2929] Fix go code format for

[altay] Increase overhaed budget for test_sampler_transition_overhead

[aaltay] [BEAM-8814] Changed no_auth option from bool to store_true (#10202)

[echauchot] [BEAM-8470] Update capability matrix: add Spark Structured Streaming

[echauchot] [BEAM-8470] Update Spark runner page: add Spark Structured Streaming


--
[...truncated 3.45 MB...]
Dec 03, 2019 11:30:07 AM org.apache.beam.runners.dataflow.util.PackageUtil 
stageClasspathElements
INFO: Uploading 133 files from PipelineOptions.filesToStage to staging location 
to prepare for execution.
Dec 03, 2019 11:30:07 AM org.apache.beam.runners.dataflow.util.PackageUtil 
tryStagePackage
INFO: Uploading 
/tmp/groovy-generated-6514975238164265212-tmpdir/word-count-beam/target/classes 
to 
gs://temp-storage-for-release-validation-tests/nightly-snapshot-validation/tmp/staging/classes-AxTjNxNGo-s3-12ECHEpgw.jar
Dec 03, 2019 11:30:08 AM org.apache.beam.runners.dataflow.util.PackageUtil 
stageClasspathElements
INFO: Staging files complete: 132 files cached, 1 files newly uploaded in 1 
seconds
Dec 03, 2019 11:30:08 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding ReadLines/Read as step s1
Dec 03, 2019 11:30:10 AM org.apache.beam.sdk.io.FileBasedSource 
getEstimatedSizeBytes
INFO: Filepattern gs://apache-beam-samples/shakespeare/* matched 44 files with 
total size 5443510
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding WordCount.CountWords/ParDo(ExtractWords) as step s2
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding WordCount.CountWords/Count.PerElement/Init/Map as step s3
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding 
WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey as step 
s4
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding 
WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues
 as step s5
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding MapElements/Map as step s6
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding WriteCounts/WriteFiles/RewindowIntoGlobal/Window.Assign as step s7
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding 
WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles 
as step s8
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding 
WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten as step 
s9
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding 
WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnwritten as step 
s10
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding 
WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/DropShardNum as step s11
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding 
WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/Flatten.PCollections as 
step s12
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding 
WriteCounts/WriteFiles/GatherTempFileResults/View.AsList/ParDo(ToIsmRecordForGlobalWindow)
 as step s13
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding 
WriteCounts/WriteFiles/GatherTempFileResults/View.AsList/CreateDataflowView as 
step s14
Dec 03, 2019 11:30:10 AM 
org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator addStep
INFO: Adding 

Build failed in Jenkins: beam_sonarqube_report #1108

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[chadrik] [BEAM-8523] JobAPI: Give access to timestamped state change history

[chadrik] Rename GetJobStateResponse to JobStateEvent

[chadrik] Move state history utilities to AbstractBeamJob

[chadrik] Small bugfix to FlinkBeamJob job state mapping

[chadrik] Fix existing bugs in AbstractJobServiceServicer

[chadrik] Use timestamp.Timestamp instead of float

[echauchot] [BEAM-8470] Update capability matrix: add Spark Structured Streaming

[echauchot] [BEAM-8470] Update Spark runner page: add Spark Structured Streaming


--
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-6 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init  # 
 > timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision f8496d318b789716242f2f8b6381ee862b84ef3f (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f f8496d318b789716242f2f8b6381ee862b84ef3f
Commit message: "Merge pull request #9959: [BEAM-8523] JobAPI: Give access to 
timestamped state change history"
 > git rev-list --no-walk 08d9e7b453e99c2812915407a74dfbf473c0ffcb # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
ERROR: SonarQube installation defined in this job (ASF Sonar Analysis) does not 
match any configured installation. Number of installations that can be 
configured: 0.
If you want to reassign jobs to a different SonarQube installation, check the 
documentation under https://redirect.sonarsource.com/plugins/jenkins.html

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_PortabilityApi_Dataflow #2556

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-7594] Fix flaky filename generation

[ehudm] [BEAM-8842] Disable the correct test

[altay] Increase overhaed budget for test_sampler_transition_overhead

[aaltay] [BEAM-8814] Changed no_auth option from bool to store_true (#10202)


--
[...truncated 34.72 KB...]
> Task :sdks:java:extensions:protobuf:classes UP-TO-DATE
> Task :sdks:java:extensions:protobuf:jar
> Task :sdks:java:testing:test-utils:compileJava FROM-CACHE
> Task :sdks:java:testing:test-utils:classes UP-TO-DATE
> Task :sdks:java:testing:test-utils:jar
> Task :runners:core-java:compileJava FROM-CACHE
> Task :runners:core-java:classes UP-TO-DATE
> Task :sdks:java:testing:test-utils:compileTestJava FROM-CACHE
> Task :sdks:java:testing:test-utils:testClasses UP-TO-DATE
> Task :sdks:java:testing:test-utils:testJar
> Task :runners:core-java:jar
> Task :sdks:java:io:google-cloud-platform:compileJava FROM-CACHE
> Task :sdks:java:io:google-cloud-platform:classes UP-TO-DATE
> Task :sdks:java:harness:compileJava FROM-CACHE
> Task :sdks:java:harness:classes UP-TO-DATE
> Task :sdks:java:harness:jar
> Task :sdks:java:core:compileTestJava FROM-CACHE
> Task :sdks:java:core:testClasses
> Task :sdks:java:io:google-cloud-platform:jar

> Task :sdks:go:resolveBuildDependencies
Resolving github.com/google/pprof: 
commit='a8f279b7952b27edbcb72e5a6c69ee9be4c8ad93', 
urls=[https://github.com/google/pprof.git, g...@github.com:google/pprof.git]
Resolving github.com/googleapis/gax-go: 
commit='317e0006254c44a0ac427cc52a0e083ff0b9622f', 
urls=[https://github.com/googleapis/gax-go.git, 
g...@github.com:googleapis/gax-go.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/hashicorp/hcl: 
commit='23c074d0eceb2b8a5bfdbb271ab780cde70f05a8', 
urls=[https://github.com/hashicorp/hcl.git, g...@github.com:hashicorp/hcl.git]
Resolving github.com/ianlancetaylor/demangle: 
commit='4883227f66371e02c4948937d3e2be1664d9be38', 
urls=[https://github.com/ianlancetaylor/demangle.git, 
g...@github.com:ianlancetaylor/demangle.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/kr/fs: commit='2788f0dbd16903de03cb8186e5c7d97b69ad387b', 
urls=[https://github.com/kr/fs.git, g...@github.com:kr/fs.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/magiconair/properties: 
commit='49d762b9817ba1c2e9d0c69183c2b4a8b8f1d934', 
urls=[https://github.com/magiconair/properties.git, 
g...@github.com:magiconair/properties.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/mitchellh/go-homedir: 
commit='b8bc1bf767474819792c23f32d8286a45736f1c6', 
urls=[https://github.com/mitchellh/go-homedir.git, 
g...@github.com:mitchellh/go-homedir.git]
Resolving github.com/mitchellh/mapstructure: 
commit='a4e142e9c047c904fa2f1e144d9a84e6133024bc', 
urls=[https://github.com/mitchellh/mapstructure.git, 
g...@github.com:mitchellh/mapstructure.git]
Resolving github.com/nightlyone/lockfile: 
commit='0ad87eef1443f64d3d8c50da647e2b1552851124', 
urls=[https://github.com/nightlyone/lockfile, 
g...@github.com:nightlyone/lockfile.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/openzipkin/zipkin-go: 
commit='3741243b287094fda649c7f0fa74bd51f37dc122', 
urls=[https://github.com/openzipkin/zipkin-go.git, 
g...@github.com:openzipkin/zipkin-go.git]
Resolving github.com/pelletier/go-toml: 
commit='acdc4509485b587f5e675510c4f2c63e90ff68a8', 
urls=[https://github.com/pelletier/go-toml.git, 
g...@github.com:pelletier/go-toml.git]
Resolving cached github.com/coreos/etcd: 

Build failed in Jenkins: beam_PostCommit_Java11_ValidatesRunner_PortabilityApi_Dataflow #1660

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-7594] Fix flaky filename generation

[ehudm] [BEAM-8842] Disable the correct test

[altay] Increase overhaed budget for test_sampler_transition_overhead

[aaltay] [BEAM-8814] Changed no_auth option from bool to store_true (#10202)


--
[...truncated 37.02 KB...]
> Task :sdks:java:testing:test-utils:compileTestJava FROM-CACHE
> Task :sdks:java:testing:test-utils:testClasses UP-TO-DATE
> Task :sdks:java:testing:test-utils:testJar
> Task :sdks:java:extensions:protobuf:compileJava FROM-CACHE
> Task :sdks:java:extensions:protobuf:classes UP-TO-DATE
> Task :sdks:java:extensions:protobuf:jar
> Task :runners:core-java:compileJava FROM-CACHE
> Task :runners:core-java:classes UP-TO-DATE
> Task :runners:core-java:jar
> Task :sdks:java:io:google-cloud-platform:compileJava FROM-CACHE
> Task :sdks:java:io:google-cloud-platform:classes UP-TO-DATE
> Task :sdks:java:harness:compileJava FROM-CACHE
> Task :sdks:java:harness:classes UP-TO-DATE
> Task :sdks:java:harness:jar
> Task :sdks:java:io:google-cloud-platform:jar
> Task :sdks:java:core:compileTestJava FROM-CACHE
> Task :sdks:java:core:testClasses
> Task :runners:google-cloud-dataflow-java:compileJava FROM-CACHE
> Task :runners:google-cloud-dataflow-java:classes
> Task :runners:google-cloud-dataflow-java:jar

> Task :sdks:go:resolveBuildDependencies
Resolving github.com/magiconair/properties: 
commit='49d762b9817ba1c2e9d0c69183c2b4a8b8f1d934', 
urls=[https://github.com/magiconair/properties.git, 
g...@github.com:magiconair/properties.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/mitchellh/go-homedir: 
commit='b8bc1bf767474819792c23f32d8286a45736f1c6', 
urls=[https://github.com/mitchellh/go-homedir.git, 
g...@github.com:mitchellh/go-homedir.git]
Resolving github.com/mitchellh/mapstructure: 
commit='a4e142e9c047c904fa2f1e144d9a84e6133024bc', 
urls=[https://github.com/mitchellh/mapstructure.git, 
g...@github.com:mitchellh/mapstructure.git]
Resolving github.com/nightlyone/lockfile: 
commit='0ad87eef1443f64d3d8c50da647e2b1552851124', 
urls=[https://github.com/nightlyone/lockfile, 
g...@github.com:nightlyone/lockfile.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/openzipkin/zipkin-go: 
commit='3741243b287094fda649c7f0fa74bd51f37dc122', 
urls=[https://github.com/openzipkin/zipkin-go.git, 
g...@github.com:openzipkin/zipkin-go.git]
Resolving github.com/pelletier/go-toml: 
commit='acdc4509485b587f5e675510c4f2c63e90ff68a8', 
urls=[https://github.com/pelletier/go-toml.git, 
g...@github.com:pelletier/go-toml.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/pierrec/lz4: 
commit='ed8d4cc3b461464e69798080a0092bd028910298', 
urls=[https://github.com/pierrec/lz4.git, g...@github.com:pierrec/lz4.git]
Resolving github.com/pierrec/xxHash: 
commit='a0006b13c722f7f12368c00a3d3c2ae8a999a0c6', 
urls=[https://github.com/pierrec/xxHash.git, g...@github.com:pierrec/xxHash.git]
Resolving github.com/pkg/errors: 
commit='30136e27e2ac8d167177e8a583aa4c3fea5be833', 
urls=[https://github.com/pkg/errors.git, g...@github.com:pkg/errors.git]
Resolving github.com/pkg/sftp: 
commit='22e9c1ccc02fc1b9fa3264572e49109b68a86947', 
urls=[https://github.com/pkg/sftp.git, g...@github.com:pkg/sftp.git]
Resolving github.com/prometheus/client_golang: 
commit='9bb6ab929dcbe1c8393cd9ef70387cb69811bd1c', 
urls=[https://github.com/prometheus/client_golang.git, 
g...@github.com:prometheus/client_golang.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/prometheus/procfs: 
commit='cb4147076ac75738c9a7d279075a253c0cc5acbd', 
urls=[https://github.com/prometheus/procfs.git, 
g...@github.com:prometheus/procfs.git]
Resolving github.com/rcrowley/go-metrics: 
commit='8732c616f52954686704c8645fe1a9d59e9df7c1', 
urls=[https://github.com/rcrowley/go-metrics.git, 
g...@github.com:rcrowley/go-metrics.git]
Resolving github.com/cpuguy83/go-md2man: 
commit='dc9f53734905c233adfc09fd4f063dce63ce3daf', 

Build failed in Jenkins: beam_PostCommit_Python36 #1131

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
[...truncated 619.19 KB...]
19/12/03 13:12:23 INFO 
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: Beam 
Fn Control client connected with id 1-1
INFO:apache_beam.runners.worker.sdk_worker:Creating insecure state channel for 
localhost:45803.
INFO:apache_beam.runners.worker.sdk_worker:State channel established.
INFO:apache_beam.runners.worker.data_plane:Creating client data channel for 
localhost:39613
19/12/03 13:12:23 INFO 
org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client 
connected.
19/12/03 13:12:24 WARN 
org.apache.beam.runners.spark.translation.GroupNonMergingWindowsFunctions: 
Either coder LengthPrefixCoder(ByteArrayCoder) or GlobalWindow$Coder is not 
consistent with equals. That might cause issues on some runners.
WARNING:apache_beam.io.filebasedsink:Deleting 4 existing files in target path 
matching: -*-of-%(num_shards)05d
19/12/03 13:12:48 INFO org.apache.beam.runners.spark.SparkPipelineRunner: Job 
BeamApp-jenkins-1203131218-762e0a0e_3511b277-9b25-41d0-a7f2-abe9d0b4263d: 
Pipeline translated successfully. Computing outputs
INFO:apache_beam.io.filebasedsink:Starting finalize_write threads with 
num_shards: 4 (skipped: 0), batches: 4, num_threads: 4
INFO:apache_beam.io.filebasedsink:Renamed 4 shards in 0.10 seconds.
19/12/03 13:13:09 INFO org.apache.beam.runners.spark.SparkPipelineRunner: Job 
BeamApp-jenkins-1203131218-762e0a0e_3511b277-9b25-41d0-a7f2-abe9d0b4263d 
finished.
19/12/03 13:13:09 WARN 
org.apache.beam.runners.spark.SparkPipelineResult$BatchMode: Collecting 
monitoring infos is not implemented yet in Spark portable runner.
19/12/03 13:13:09 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
Manifest at 
/tmp/beam-temp87p126i3/artifactsdg3zpgje/job_aea19955-b894-4818-bc3b-ead66fc874f1/MANIFEST
 has 1 artifact locations
19/12/03 13:13:09 INFO 
org.apache.beam.runners.fnexecution.artifact.BeamFileSystemArtifactStagingService:
 Removed dir 
/tmp/beam-temp87p126i3/artifactsdg3zpgje/job_aea19955-b894-4818-bc3b-ead66fc874f1/
INFO:apache_beam.runners.portability.portable_runner:Job state changed to DONE
19/12/03 13:13:09 INFO 
org.apache.beam.runners.fnexecution.jobsubmission.InMemoryJobService: Getting 
job metrics for 
BeamApp-jenkins-1203131218-762e0a0e_3511b277-9b25-41d0-a7f2-abe9d0b4263d
19/12/03 13:13:09 INFO 
org.apache.beam.runners.fnexecution.jobsubmission.InMemoryJobService: Finished 
getting job metrics for 
BeamApp-jenkins-1203131218-762e0a0e_3511b277-9b25-41d0-a7f2-abe9d0b4263d
Exception in thread read_state:
Traceback (most recent call last):
  File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
  File "/usr/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
  File 
"
 line 530, in pull_responses
for response in responses:
  File 
"
 line 392, in __next__
return self._next()
  File 
"
 line 561, in _next
raise self
grpc._channel._Rendezvous: <_Rendezvous of RPC that terminated with:
status = StatusCode.UNAVAILABLE
details = "Socket closed"
debug_error_string = 
"{"created":"@1575378790.098989259","description":"Error received from peer 
ipv4:127.0.0.1:45803","file":"src/core/lib/surface/call.cc","file_line":1055,"grpc_message":"Socket
 closed","grpc_status":14}"
>

ERROR:apache_beam.runners.worker.data_plane:Failed to read inputs in the data 
plane.
Traceback (most recent call last):
  File 
"
 line 272, in _read_inputs
for elements in elements_iterator:
  File 
"
 line 392, in __next__
return self._next()
  File 
"
 line 561, in _next
raise self
grpc._channel._Rendezvous: <_Rendezvous of RPC that terminated with:
status = StatusCode.UNAVAILABLE
details = "Socket closed"
debug_error_string = 
"{"created":"@1575378790.098958411","description":"Error received from peer 
ipv4:127.0.0.1:39613","file":"src/core/lib/surface/call.cc","file_line":1055,"grpc_message":"Socket
 closed","grpc_status":14}"
>
Exception in thread run_worker_1-1:

Jenkins build is back to normal : beam_PostCommit_Python37 #1053

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



beam_PostCommit_Py_VR_Dataflow - Build # 5213 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Py_VR_Dataflow (build 
#5213)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/5213/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Build failed in Jenkins: beam_PreCommit_Python_Cron #2111

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[chadrik] [BEAM-8523] JobAPI: Give access to timestamped state change history

[chadrik] Rename GetJobStateResponse to JobStateEvent

[chadrik] Move state history utilities to AbstractBeamJob

[chadrik] Small bugfix to FlinkBeamJob job state mapping

[chadrik] Fix existing bugs in AbstractJobServiceServicer

[chadrik] Use timestamp.Timestamp instead of float

[echauchot] [BEAM-8470] Update capability matrix: add Spark Structured Streaming

[echauchot] [BEAM-8470] Update Spark runner page: add Spark Structured Streaming


--
[...truncated 6.10 MB...]
self._pipeline_proto)
  File 
"
 line 399, in run_via_runner_api
return self.run_stages(stage_context, stages)
  File 
"
 line 481, in run_stages
stage_context.safe_coders)
  File 
"
 line 777, in _run_stage
result, splits = bundle_manager.process_bundle(data_input, data_output)
  File 
"
 line 1814, in process_bundle
part, expected_outputs), part_inputs):
  File 
"
 line 641, in result_iterator
yield fs.pop().result()
  File 
"
 line 462, in result
return self.__get_result()
  File 
"
 line 42, in run
self._future.set_result(self._fn(*self._fn_args, **self._fn_kwargs))
  File 
"
 line 1814, in 
part, expected_outputs), part_inputs):
  File 
"
 line 1750, in process_bundle
result_future = self._worker_handler.control_conn.push(process_bundle_req)
  File 
"
 line 1176, in push
response = self.worker.do_instruction(request)
  File 
"
 line 284, in do_instruction
request.instruction_id)
  File 
"
 line 310, in process_bundle
bundle_processor.process_bundle(instruction_id))
  File 
"
 line 675, in process_bundle
data.transform_id].process_encoded(data.data)
  File 
"
 line 146, in process_encoded
self.output(decoded_value)
  File "apache_beam/runners/worker/operations.py", line 258, in 
apache_beam.runners.worker.operations.Operation.output
def output(self, windowed_value, output_index=0):
  File "apache_beam/runners/worker/operations.py", line 259, in 
apache_beam.runners.worker.operations.Operation.output
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 146, in 
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 596, in 

Build failed in Jenkins: beam_PostCommit_Python_Chicago_Taxi_Dataflow #127

2019-12-03 Thread Apache Jenkins Server
21)) (1.5)
Requirement already satisfied: more-itertools in 
<https://builds.apache.org/job/beam_PostCommit_Python_Chicago_Taxi_Dataflow/ws/src/build/gradleenv/-194514014/lib/python2.7/site-packages>
 (from zipp>=0.5->importlib-metadata; python_version < 
"3.8"->jsonschema!=2.5.0,>=2.4->nbformat->notebook<5.8,>=5.7.8->-r 
requirements.txt (line 18)) (5.0.0)
ERROR: tensorflow-serving-api 1.13.1 has requirement tensorflow<2,>=1.2.0, but 
you'll have tensorflow 2.0.0 which is incompatible.
Installing collected packages: webencodings, bleach, entrypoints, defusedxml, 
mistune, ipython-genutils, decorator, traitlets, jupyter-core, functools32, 
pyrsistent, jsonschema, nbformat, pandocfilters, pygments, testpath, 
MarkupSafe, jinja2, nbconvert, prompt-toolkit, ptyprocess, pexpect, 
backports.shutil-get-terminal-size, simplegeneric, pickleshare, IPython, 
backports-abc, singledispatch, tornado, pyzmq, jupyter-client, Send2Trash, 
terminado, ipykernel, prometheus-client, ipaddress, notebook, 
widgetsnbextension, ipywidgets, qtconsole, jupyter-console, jupyter, protobuf, 
absl-py, wrapt, keras-preprocessing, backports.weakref, astor, h5py, 
keras-applications, termcolor, tensorflow-estimator, google-pasta, opt-einsum, 
gast, markdown, oauthlib, requests-oauthlib, google-auth-oauthlib, werkzeug, 
tensorboard, tensorflow, joblib, tensorflow-metadata, scipy, scikit-learn, 
pydot, tensorflow-transform, tensorflow-data-validation, 
tensorflow-model-analysis, tensorflow-serving-api
  Found existing installation: protobuf 3.11.1
Uninstalling protobuf-3.11.1:
  Successfully uninstalled protobuf-3.11.1
  Found existing installation: pydot 1.4.1
Uninstalling pydot-1.4.1:
  Successfully uninstalled pydot-1.4.1
Successfully installed IPython-5.8.0 MarkupSafe-1.1.1 Send2Trash-1.5.0 
absl-py-0.8.1 astor-0.8.0 backports-abc-0.5 
backports.shutil-get-terminal-size-1.0.0 backports.weakref-1.0.post1 
bleach-3.1.0 decorator-4.4.1 defusedxml-0.6.0 entrypoints-0.3 
functools32-3.2.3.post2 gast-0.2.2 google-auth-oauthlib-0.4.1 
google-pasta-0.1.8 h5py-2.10.0 ipaddress-1.0.23 ipykernel-4.10.1 
ipython-genutils-0.2.0 ipywidgets-7.5.1 jinja2-2.10.3 joblib-0.14.0 
jsonschema-3.2.0 jupyter-1.0.0 jupyter-client-5.3.4 jupyter-console-5.2.0 
jupyter-core-4.6.1 keras-applications-1.0.8 keras-preprocessing-1.1.0 
markdown-3.1.1 mistune-0.8.4 nbconvert-5.6.1 nbformat-4.4.0 notebook-5.7.8 
oauthlib-3.1.0 opt-einsum-2.3.2 pandocfilters-1.4.2 pexpect-4.7.0 
pickleshare-0.7.5 prometheus-client-0.7.1 prompt-toolkit-1.0.18 protobuf-3.7.1 
ptyprocess-0.6.0 pydot-1.2.4 pygments-2.5.2 pyrsistent-0.15.6 pyzmq-18.1.1 
qtconsole-4.6.0 requests-oauthlib-1.3.0 scikit-learn-0.20.4 scipy-1.1.0 
simplegeneric-0.8.1 singledispatch-3.4.0.3 tensorboard-2.0.2 tensorflow-2.0.0 
tensorflow-data-validation-0.13.1 tensorflow-estimator-2.0.1 
tensorflow-metadata-0.13.0 tensorflow-model-analysis-0.13.2 
tensorflow-serving-api-1.13.1 tensorflow-transform-0.13.0 termcolor-1.1.0 
terminado-0.8.3 testpath-0.4.4 tornado-5.1.1 traitlets-4.3.3 webencodings-0.5.1 
werkzeug-0.16.0 widgetsnbextension-3.5.1 wrapt-1.11.2
Starting distributed TFDV stats computation and schema generation...
Using GCP project: apache-beam-testing
Job output path: 
gs://temp-storage-for-perf-tests/chicago-taxi/chicago-taxi-tfdv-20191203-142743/chicago_taxi_output
TFDV output path: 
gs://temp-storage-for-perf-tests/chicago-taxi/chicago-taxi-tfdv-20191203-142743/chicago_taxi_output/tfdv_output
<https://builds.apache.org/job/beam_PostCommit_Python_Chicago_Taxi_Dataflow/ws/src/sdks/python/apache_beam/__init__.py>:84:
 UserWarning: You are using Apache Beam with Python 2. New releases of Apache 
Beam will soon support Python 3 only.
  'You are using Apache Beam with Python 2. '
Traceback (most recent call last):
  File "tfdv_analyze_and_validate.py", line 24, in 
import tensorflow_data_validation as tfdv
  File 
"<https://builds.apache.org/job/beam_PostCommit_Python_Chicago_Taxi_Dataflow/ws/src/build/gradleenv/-194514014/local/lib/python2.7/site-packages/tensorflow_data_validation/__init__.py;,>
 line 18, in 
from tensorflow_data_validation.api.stats_api import GenerateStatistics
  File 
"<https://builds.apache.org/job/beam_PostCommit_Python_Chicago_Taxi_Dataflow/ws/src/build/gradleenv/-194514014/local/lib/python2.7/site-packages/tensorflow_data_validation/api/stats_api.py;,>
 line 51, in 
from tensorflow_data_validation.statistics import stats_impl
  File 
"<https://builds.apache.org/job/beam_PostCommit_Python_Chicago_Taxi_Dataflow/ws/src/build/gradleenv/-194514014/local/lib/python2.7/site-packages/tensorflow_data_validation/statistics/stats_impl.py;,>
 line 27, in 
from tensorflow_data_validation.statistics.generators import 
basic_stats_generator
  File 
"<https://builds.apache.org/job/beam_PostCommit_Python_Chicago_Taxi_Dataflow/ws/src/build/gradleenv/-194514014/local/li

Jenkins build is back to normal : beam_PostCommit_Java_ValidatesRunner_PortabilityApi_Dataflow #2558

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PreCommit_Java_Cron #2111

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[chadrik] [BEAM-8523] JobAPI: Give access to timestamped state change history

[chadrik] Rename GetJobStateResponse to JobStateEvent

[chadrik] Move state history utilities to AbstractBeamJob

[chadrik] Small bugfix to FlinkBeamJob job state mapping

[chadrik] Fix existing bugs in AbstractJobServiceServicer

[chadrik] Use timestamp.Timestamp instead of float

[echauchot] [BEAM-8470] Update capability matrix: add Spark Structured Streaming

[echauchot] [BEAM-8470] Update Spark runner page: add Spark Structured Streaming


--
[...truncated 458.24 KB...]
> Task :runners:extensions-java:metrics:assemble
> Task :runners:extensions-java:metrics:analyzeClassesDependencies SKIPPED
> Task :vendor:sdks-java-extensions-protobuf:test NO-SOURCE
> Task 
> :vendor:sdks-java-extensions-protobuf:validateShadedJarDoesntLeakNonProjectClasses
> Task :vendor:sdks-java-extensions-protobuf:check
> Task :vendor:sdks-java-extensions-protobuf:build
> Task :runners:samza:job-server:startShadowScripts
> Task :sdks:java:core:packageTests
> Task :sdks:java:core:assemble
> Task :sdks:java:core:analyzeClassesDependencies SKIPPED
> Task :sdks:java:core:analyzeTestClassesDependencies SKIPPED
> Task :sdks:java:core:analyzeDependencies SKIPPED
> Task :runners:samza:job-server:shadowDistTar
> Task :runners:extensions-java:metrics:compileTestJava
> Task :runners:extensions-java:metrics:testClasses
> Task :runners:extensions-java:metrics:analyzeTestClassesDependencies SKIPPED
> Task :runners:extensions-java:metrics:analyzeDependencies SKIPPED
> Task :runners:extensions-java:metrics:checkstyleMain
> Task :runners:extensions-java:metrics:checkstyleTest
> Task :sdks:java:fn-execution:assemble
> Task :sdks:java:fn-execution:analyzeClassesDependencies SKIPPED
> Task :runners:local-java:test
> Task :runners:local-java:check
> Task :runners:local-java:build
> Task :sdks:java:extensions:google-cloud-platform-core:assemble
> Task 
> :sdks:java:extensions:google-cloud-platform-core:analyzeClassesDependencies 
> SKIPPED
> Task 
> :sdks:java:extensions:google-cloud-platform-core:analyzeTestClassesDependencies
>  SKIPPED
> Task :sdks:java:extensions:google-cloud-platform-core:analyzeDependencies 
> SKIPPED
> Task :sdks:java:extensions:google-cloud-platform-core:checkstyleMain
> Task :sdks:java:extensions:google-cloud-platform-core:checkstyleTest
> Task :runners:extensions-java:metrics:javadoc
> Task :sdks:java:core:checkstyleMain
[main] INFO org.gradle.internal.nativeintegration.services.NativeServices - 
Initialized native services in: /home/jenkins/.gradle/native

> Task :runners:extensions-java:metrics:spotbugsMain
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further 
details.

> Task :sdks:java:extensions:google-cloud-platform-core:javadoc
[main] INFO org.gradle.internal.nativeintegration.services.NativeServices - 
Initialized native services in: /home/jenkins/.gradle/native
> Task :runners:samza:job-server:shadowDistZip
> Task :runners:samza:job-server:assemble

> Task :sdks:java:extensions:google-cloud-platform-core:spotbugsMain
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further 
details.

> Task :sdks:java:fn-execution:compileTestJava
> Task :runners:samza:job-server:analyzeClassesDependencies SKIPPED
> Task :runners:samza:job-server:analyzeTestClassesDependencies SKIPPED
> Task :runners:samza:job-server:analyzeDependencies SKIPPED
> Task :runners:samza:job-server:checkstyleMain NO-SOURCE
> Task :runners:samza:job-server:checkstyleTest NO-SOURCE
> Task :runners:samza:job-server:javadoc NO-SOURCE
> Task :runners:samza:job-server:spotbugsMain NO-SOURCE
> Task :runners:samza:job-server:test NO-SOURCE
> Task :runners:samza:job-server:check UP-TO-DATE
> Task :runners:samza:job-server:build
> Task :runners:samza:job-server:buildDependents
> Task :runners:samza:buildDependents
> Task :sdks:java:fn-execution:testClasses
> Task :sdks:java:fn-execution:analyzeTestClassesDependencies SKIPPED
> Task :sdks:java:fn-execution:analyzeDependencies SKIPPED
> Task :sdks:java:fn-execution:checkstyleMain
> Task :sdks:java:fn-execution:checkstyleTest
> Task :sdks:java:io:elasticsearch-tests:elasticsearch-tests-2:check
> Task :sdks:java:io:elasticsearch-tests:elasticsearch-tests-2:build
> Task :sdks:java:io:elasticsearch-tests:elasticsearch-tests-2:buildDependents
> Task :sdks:java:extensions:protobuf:assemble
> Task :sdks:java:extensions:protobuf:analyzeClassesDependencies SKIPPED
> Task :sdks:java:extensions:protobuf:extractIncludeTestProto
> Task :sdks:java:extensions:protobuf:generateTestProto
> Task :sdks:java:harness:test
> Task 

beam_PostCommit_Python36 - Build # 1130 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Python36 (build #1130)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Python36/1130/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

beam_PostCommit_Py_VR_Dataflow - Build # 5214 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Py_VR_Dataflow (build 
#5214)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/5214/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Build failed in Jenkins: beam_BiqQueryIO_Streaming_Performance_Test_Java #202

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[chadrik] [BEAM-8523] JobAPI: Give access to timestamped state change history

[chadrik] Rename GetJobStateResponse to JobStateEvent

[chadrik] Move state history utilities to AbstractBeamJob

[chadrik] Small bugfix to FlinkBeamJob job state mapping

[chadrik] Fix existing bugs in AbstractJobServiceServicer

[chadrik] Use timestamp.Timestamp instead of float

[echauchot] [BEAM-8470] Update capability matrix: add Spark Structured Streaming

[echauchot] [BEAM-8470] Update Spark runner page: add Spark Structured Streaming


--
[...truncated 430.20 KB...]
"message" : "Invalid table ID 
\"bqio_write_10GB_java_f01b6481-ec2d-406c-8db7-573756f21160\". Table IDs must 
be alphanumeric (plus underscores) and must be at most 1024 characters long. 
Also, Table decorators cannot be used.",
"reason" : "invalid"
  } ],
  "message" : "Invalid table ID 
\"bqio_write_10GB_java_f01b6481-ec2d-406c-8db7-573756f21160\". Table IDs must 
be alphanumeric (plus underscores) and must be at most 1024 characters long. 
Also, Table decorators cannot be used.",
  "status" : "INVALID_ARGUMENT"
}
at 
com.google.api.client.googleapis.json.GoogleJsonResponseException.from(GoogleJsonResponseException.java:150)
at 
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:113)
at 
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:40)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest$1.interceptResponse(AbstractGoogleClientRequest.java:417)
at com.google.api.client.http.HttpRequest.execute(HttpRequest.java:1132)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:515)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:448)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.execute(AbstractGoogleClientRequest.java:565)
at 
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$DatasetServiceImpl.tryCreateTable(BigQueryServicesImpl.java:520)
at 
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$DatasetServiceImpl.createTable(BigQueryServicesImpl.java:505)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.tryCreateTable(CreateTables.java:205)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.getTableDestination(CreateTables.java:160)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.lambda$processElement$0(CreateTables.java:113)
at java.util.HashMap.computeIfAbsent(HashMap.java:1126)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.processElement(CreateTables.java:112)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn$DoFnInvoker.invokeProcessElement(Unknown
 Source)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:218)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:183)
at 
org.apache.beam.runners.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:335)
at 
org.apache.beam.runners.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:44)
at 
org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:49)
at 
org.apache.beam.runners.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:280)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:252)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:74)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:576)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:564)
at 
org.apache.beam.sdk.io.gcp.bigquery.PrepareWrite$1.processElement(PrepareWrite.java:82)
at 
org.apache.beam.sdk.io.gcp.bigquery.PrepareWrite$1$DoFnInvoker.invokeProcessElement(Unknown
 Source)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:218)
 

Jenkins build is back to normal : beam_BiqQueryIO_Read_Performance_Test_Python #147

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_BiqQueryIO_Write_Performance_Test_Python_Batch #147

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-4132] Set multi-output PCollections types to Any

[ehudm] [BEAM-7594] Fix flaky filename generation

[migryz] Bump Release Build Timeout

[migryz] fix syntax

[github] Bump time to 5 hours.

[sunjincheng121] [BEAM-8733]  Handle the registration request synchronously in 
the Python

[kamil.wasilewski] Fixed a bug where the output PCollection was assigned to 
self.result

[ehudm] [BEAM-8842] Disable the correct test

[kcweaver] [BEAM-8863] experiment=beam_fn_api in runtime/environments page

[robertwb] [BEAM-8645] A test case for TimestampCombiner. (#10081)

[chadrik] [BEAM-8523] JobAPI: Give access to timestamped state change history

[chadrik] Rename GetJobStateResponse to JobStateEvent

[chadrik] Move state history utilities to AbstractBeamJob

[chadrik] Small bugfix to FlinkBeamJob job state mapping

[chadrik] Fix existing bugs in AbstractJobServiceServicer

[chadrik] Use timestamp.Timestamp instead of float

[lcwik] [BEAM-2929] Ensure that the Beam Java SDK sends the property

[lcwik] [BEAM-2929] Ensure that the Beam Go SDK sends the property

[lcwik] [BEAM-2929] Ensure that the Beam Python SDK sends the property

[lostluck] [BEAM-2929] Fix go code format for

[altay] Increase overhaed budget for test_sampler_transition_overhead

[aaltay] [BEAM-8814] Changed no_auth option from bool to store_true (#10202)

[echauchot] [BEAM-8470] Update capability matrix: add Spark Structured Streaming

[echauchot] [BEAM-8470] Update Spark runner page: add Spark Structured Streaming


--
[...truncated 1.76 KB...]
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[beam_BiqQueryIO_Write_Performance_Test_Python_Batch] $ /bin/bash -xe 
/tmp/jenkins4695907597206945565.sh
+ echo src BigQueryIO Write Performance Test Python Batch 10 GB src
src BigQueryIO Write Performance Test Python Batch 10 GB src
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g 
-PloadTest.mainClass=apache_beam.io.gcp.bigquery_write_perf_test:BigQueryWritePerfTest.test
 -Prunner=TestDataflowRunner 
'-PloadTest.args=--job_name=performance-tests-bqio-write-python-batch-10gb1203160839
 --project=apache-beam-testing 
--temp_location=gs://temp-storage-for-perf-tests/loadtests 
--output_dataset=beam_performance --output_table=bqio_write_10GB 
--publish_to_big_query=true --metrics_dataset=beam_performance 
--metrics_table=bqio_write_10GB_results --input_options='{"num_records": 
10485760,"key_size": 1,"value_size": 1024}' --num_workers=5 
--autoscaling_algorithm=NONE --runner=DataflowRunner' 
:sdks:python:apache_beam:testing:load_tests:run
Starting a Gradle Daemon, 1 busy and 1 stopped Daemons could not be reused, use 
--status for details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.

> Task :sdks:python:setupVirtualenv
New python executable in 

Also creating executable in 

Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python2.7
DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. 
Please upgrade your Python as Python 2.7 won't be maintained after that date. A 
future version of pip will drop support for Python 2.7.
Collecting tox==3.11.1
  Using cached 
https://files.pythonhosted.org/packages/8b/38/71c2fe0c3915fc0e93bdd1bf8cd697be48cdacedbdcd438e0f0629c69024/tox-3.11.1-py2.py3-none-any.whl
Collecting grpcio-tools==1.3.5
  Using cached 

beam_PostCommit_Py_VR_Dataflow - Build # 5215 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Py_VR_Dataflow (build 
#5215)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/5215/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_Flink #5997

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[kamil.wasilewski] Fixed a bug where the output PCollection was assigned to 
self.result


--
Started by GitHub push by lgajowy
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-6 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision c1e759c10a1a5650ba7cc07ff6676637aa17dff1 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f c1e759c10a1a5650ba7cc07ff6676637aa17dff1
Commit message: "Merge pull request #10249: Fixed flaky BigQueryIO read 
performance test"
 > git rev-list --no-walk f8496d318b789716242f2f8b6381ee862b84ef3f # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :runners:flink:1.9:validatesRunner
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :runners:flink:1.9:copyResourcesOverrides NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :model:job-management:extractProto
> Task :model:fn-execution:extractProto
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :model:fn-execution:processResources
> Task :model:job-management:processResources
> Task :runners:flink:1.9:copySourceOverrides
> Task :runners:flink:1.9:copyTestResourcesOverrides NO-SOURCE
> Task :runners:flink:1.9:processResources
> Task :sdks:java:build-tools:compileJava FROM-CACHE
> Task :sdks:java:build-tools:processResources
> Task :sdks:java:build-tools:classes
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task :sdks:java:build-tools:jar
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :sdks:java:core:generateTestAvroJava
> Task :sdks:java:core:generateTestGrammarSource NO-SOURCE
> Task :sdks:java:core:processTestResources
> Task :model:pipeline:generateProto
> Task :model:pipeline:compileJava FROM-CACHE
> Task :model:pipeline:processResources
> Task :model:pipeline:classes
> Task :model:pipeline:jar
> Task 

Build failed in Jenkins: beam_PostCommit_Python_VR_Spark #1679

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[kamil.wasilewski] Fixed a bug where the output PCollection was assigned to 
self.result


--
[...truncated 1.32 MB...]
19/12/03 15:50:45 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:43901
19/12/03 15:50:45 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/03 15:50:45 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/03 15:50:45 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575388242.88_c278ce29-8768-480a-a1af-1a7aa80c9420',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/03 15:50:45 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575388242.88', 'environment_config': u'{"command": 
"
 'expansion_port': u'0', 'sdk_worker_parallelism': u'1', 'job_endpoint': 
u'localhost:39685', 'job_port': u'0'}
19/12/03 15:50:45 INFO statecache.__init__: Creating state cache with size 0
19/12/03 15:50:45 INFO sdk_worker.__init__: Creating insecure control channel 
for localhost:38113.
19/12/03 15:50:45 INFO sdk_worker.__init__: Control channel established.
19/12/03 15:50:45 INFO sdk_worker.__init__: Initializing SDKHarness with 
unbounded number of workers.
19/12/03 15:50:45 INFO 
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: Beam 
Fn Control client connected with id 262-1
19/12/03 15:50:45 INFO sdk_worker.create_state_handler: Creating insecure state 
channel for localhost:34311.
19/12/03 15:50:45 INFO sdk_worker.create_state_handler: State channel 
established.
19/12/03 15:50:45 INFO data_plane.create_data_channel: Creating client data 
channel for localhost:42601
19/12/03 15:50:45 INFO 
org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client 
connected.
19/12/03 15:50:45 INFO 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory: Closing 
environment urn: "beam:env:process:v1"
payload: 
"\032\202\001

19/12/03 15:50:45 INFO sdk_worker.run: No more requests from control plane
19/12/03 15:50:45 INFO sdk_worker.run: SDK Harness waiting for in-flight 
requests to complete
19/12/03 15:50:45 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 15:50:45 INFO data_plane.close: Closing all cached grpc data channels.
19/12/03 15:50:45 INFO sdk_worker.close: Closing all cached gRPC state handlers.
19/12/03 15:50:45 INFO sdk_worker.run: Done consuming work.
19/12/03 15:50:45 INFO sdk_worker_main.main: Python sdk harness exiting.
19/12/03 15:50:45 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Logging client 
hanged up.
19/12/03 15:50:45 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 15:50:45 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
GetManifest for __no_artifacts_staged__
19/12/03 15:50:46 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Beam Fn Logging 
client connected.
19/12/03 15:50:46 INFO sdk_worker_main.main: Logging handler created.
19/12/03 15:50:46 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:34583
19/12/03 15:50:46 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/03 15:50:46 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/03 15:50:46 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575388242.88_c278ce29-8768-480a-a1af-1a7aa80c9420',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/03 15:50:46 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575388242.88', 'environment_config': u'{"command": 

Build failed in Jenkins: beam_PostCommit_Java11_ValidatesRunner_Dataflow #1656

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[kamil.wasilewski] Fixed a bug where the output PCollection was assigned to 
self.result


--
Started by GitHub push by lgajowy
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-1 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision c1e759c10a1a5650ba7cc07ff6676637aa17dff1 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f c1e759c10a1a5650ba7cc07ff6676637aa17dff1
Commit message: "Merge pull request #10249: Fixed flaky BigQueryIO read 
performance test"
 > git rev-list --no-walk f8496d318b789716242f2f8b6381ee862b84ef3f # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g 
-Dorg.gradle.java.home=/usr/lib/jvm/java-8-openjdk-amd64 
:runners:google-cloud-dataflow-java:shadowJar 
:runners:google-cloud-dataflow-java:shadowTestJar
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.

> Configure project :sdks:java:container
Found go 1.12 in /usr/bin/go, use it.

FAILURE: Build failed with an exception.

* What went wrong:
Task 'shadowJar' not found in project ':runners:google-cloud-dataflow-java'.

* Try:
Run gradlew tasks to get a list of available tasks. Run with --stacktrace 
option to get the stack trace. Run with --info or --debug option to get more 
log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 13s

Publishing build scan...
https://scans.gradle.com/s/b6kzulqu77rey

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Jenkins build is back to normal : beam_PostCommit_Python36 #1132

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PostCommit_Python_VR_Spark #1680

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
[...truncated 1.32 MB...]
19/12/03 18:22:56 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:41071
19/12/03 18:22:56 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/03 18:22:56 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/03 18:22:56 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575397373.58_ad87b30f-be4f-4393-94ac-6f006cce3e50',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/03 18:22:56 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575397373.58', 'environment_config': u'{"command": 
"
 'expansion_port': u'0', 'sdk_worker_parallelism': u'1', 'job_endpoint': 
u'localhost:35285', 'job_port': u'0'}
19/12/03 18:22:56 INFO statecache.__init__: Creating state cache with size 0
19/12/03 18:22:56 INFO sdk_worker.__init__: Creating insecure control channel 
for localhost:46753.
19/12/03 18:22:56 INFO 
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: Beam 
Fn Control client connected with id 262-1
19/12/03 18:22:56 INFO sdk_worker.__init__: Control channel established.
19/12/03 18:22:56 INFO sdk_worker.__init__: Initializing SDKHarness with 
unbounded number of workers.
19/12/03 18:22:56 INFO sdk_worker.create_state_handler: Creating insecure state 
channel for localhost:34779.
19/12/03 18:22:56 INFO sdk_worker.create_state_handler: State channel 
established.
19/12/03 18:22:56 INFO data_plane.create_data_channel: Creating client data 
channel for localhost:46315
19/12/03 18:22:56 INFO 
org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client 
connected.
19/12/03 18:22:56 INFO 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory: Closing 
environment urn: "beam:env:process:v1"
payload: 
"\032\202\001

19/12/03 18:22:56 INFO sdk_worker.run: No more requests from control plane
19/12/03 18:22:56 INFO sdk_worker.run: SDK Harness waiting for in-flight 
requests to complete
19/12/03 18:22:56 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 18:22:56 INFO data_plane.close: Closing all cached grpc data channels.
19/12/03 18:22:56 INFO sdk_worker.close: Closing all cached gRPC state handlers.
19/12/03 18:22:56 INFO sdk_worker.run: Done consuming work.
19/12/03 18:22:56 INFO sdk_worker_main.main: Python sdk harness exiting.
19/12/03 18:22:56 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Logging client 
hanged up.
19/12/03 18:22:57 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 18:22:57 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
GetManifest for __no_artifacts_staged__
19/12/03 18:22:57 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Beam Fn Logging 
client connected.
19/12/03 18:22:57 INFO sdk_worker_main.main: Logging handler created.
19/12/03 18:22:57 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:44997
19/12/03 18:22:57 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/03 18:22:57 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/03 18:22:57 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575397373.58_ad87b30f-be4f-4393-94ac-6f006cce3e50',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/03 18:22:57 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575397373.58', 'environment_config': u'{"command": 
"
 

Build failed in Jenkins: beam_PostCommit_XVR_Flink #1096

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
[...truncated 4.28 MB...]
[DataSink (DiscardingOutput) (15/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Freeing task resources for DataSink 
(DiscardingOutput) (15/16) (97f995ec3dd4b90893c3662f4aa1be2c).
[DataSink (DiscardingOutput) (15/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Ensuring all FileSystem streams are 
closed for task DataSink (DiscardingOutput) (15/16) 
(97f995ec3dd4b90893c3662f4aa1be2c) [FINISHED]
[flink-akka.actor.default-dispatcher-3] INFO 
org.apache.flink.runtime.taskexecutor.TaskExecutor - Received task DataSink 
(DiscardingOutput) (12/16).
[flink-akka.actor.default-dispatcher-8] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - MapPartition 
(MapPartition at [3]assert_that/{Group, Unkey, Match}) (12/16) 
(9b78b638bfc602b15bdf242b51e0d85e) switched from RUNNING to FINISHED.
[flink-akka.actor.default-dispatcher-3] INFO 
org.apache.flink.runtime.taskexecutor.TaskExecutor - Un-registering task and 
sending final execution state FINISHED to JobManager for task DataSink 
(DiscardingOutput) 97f995ec3dd4b90893c3662f4aa1be2c.
[DataSink (DiscardingOutput) (12/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - DataSink (DiscardingOutput) (12/16) 
(64a4f20c4b7d2a6545dfe3865f9ae06b) switched from CREATED to DEPLOYING.
[DataSink (DiscardingOutput) (12/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Creating FileSystem stream leak 
safety net for task DataSink (DiscardingOutput) (12/16) 
(64a4f20c4b7d2a6545dfe3865f9ae06b) [DEPLOYING]
[DataSink (DiscardingOutput) (12/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Loading JAR files for task DataSink 
(DiscardingOutput) (12/16) (64a4f20c4b7d2a6545dfe3865f9ae06b) [DEPLOYING].
[flink-akka.actor.default-dispatcher-3] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - DataSink 
(DiscardingOutput) (15/16) (97f995ec3dd4b90893c3662f4aa1be2c) switched from 
RUNNING to FINISHED.
[DataSink (DiscardingOutput) (12/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Registering task at network: 
DataSink (DiscardingOutput) (12/16) (64a4f20c4b7d2a6545dfe3865f9ae06b) 
[DEPLOYING].
[DataSink (DiscardingOutput) (12/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - DataSink (DiscardingOutput) (12/16) 
(64a4f20c4b7d2a6545dfe3865f9ae06b) switched from DEPLOYING to RUNNING.
[flink-akka.actor.default-dispatcher-3] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - DataSink 
(DiscardingOutput) (12/16) (64a4f20c4b7d2a6545dfe3865f9ae06b) switched from 
DEPLOYING to RUNNING.
[DataSink (DiscardingOutput) (12/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - DataSink (DiscardingOutput) (12/16) 
(64a4f20c4b7d2a6545dfe3865f9ae06b) switched from RUNNING to FINISHED.
[DataSink (DiscardingOutput) (12/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Freeing task resources for DataSink 
(DiscardingOutput) (12/16) (64a4f20c4b7d2a6545dfe3865f9ae06b).
[DataSink (DiscardingOutput) (12/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Ensuring all FileSystem streams are 
closed for task DataSink (DiscardingOutput) (12/16) 
(64a4f20c4b7d2a6545dfe3865f9ae06b) [FINISHED]
[flink-akka.actor.default-dispatcher-10] INFO 
org.apache.flink.runtime.taskexecutor.TaskExecutor - Un-registering task and 
sending final execution state FINISHED to JobManager for task DataSink 
(DiscardingOutput) 64a4f20c4b7d2a6545dfe3865f9ae06b.
[flink-akka.actor.default-dispatcher-9] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - DataSink 
(DiscardingOutput) (12/16) (64a4f20c4b7d2a6545dfe3865f9ae06b) switched from 
RUNNING to FINISHED.
[MapPartition (MapPartition at [3]assert_that/{Group, Unkey, Match}) (14/16)] 
INFO org.apache.flink.runtime.taskmanager.Task - MapPartition (MapPartition at 
[3]assert_that/{Group, Unkey, Match}) (14/16) 
(6eb2ef6b2f6fe0879913912d5f3551d1) switched from RUNNING to FINISHED.
[MapPartition (MapPartition at [3]assert_that/{Group, Unkey, Match}) (14/16)] 
INFO org.apache.flink.runtime.taskmanager.Task - Freeing task resources for 
MapPartition (MapPartition at [3]assert_that/{Group, Unkey, Match}) (14/16) 
(6eb2ef6b2f6fe0879913912d5f3551d1).
[flink-akka.actor.default-dispatcher-3] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - DataSink 
(DiscardingOutput) (14/16) (5c21a459e1b278ec7e46e0cdc55d7ea0) switched from 
CREATED to SCHEDULED.
[MapPartition (MapPartition at [3]assert_that/{Group, Unkey, Match}) (14/16)] 
INFO org.apache.flink.runtime.taskmanager.Task - Ensuring all FileSystem 
streams are closed for task MapPartition (MapPartition at 
[3]assert_that/{Group, Unkey, Match}) (14/16) 
(6eb2ef6b2f6fe0879913912d5f3551d1) [FINISHED]
[flink-akka.actor.default-dispatcher-9] INFO 
org.apache.flink.runtime.taskexecutor.TaskExecutor - Un-registering task and 
sending final execution state FINISHED to JobManager for task 

beam_PostCommit_Java_PVR_Spark_Batch - Build # 1445 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Java_PVR_Spark_Batch 
(build #1445)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Java_PVR_Spark_Batch/1445/ to 
view the results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_Flink #5998

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-6 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision c1e759c10a1a5650ba7cc07ff6676637aa17dff1 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f c1e759c10a1a5650ba7cc07ff6676637aa17dff1
Commit message: "Merge pull request #10249: Fixed flaky BigQueryIO read 
performance test"
 > git rev-list --no-walk c1e759c10a1a5650ba7cc07ff6676637aa17dff1 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :runners:flink:1.9:validatesRunner
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :runners:flink:1.9:copyResourcesOverrides NO-SOURCE
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :model:job-management:extractProto
> Task :model:fn-execution:extractProto
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :runners:flink:1.9:copySourceOverrides
> Task :model:job-management:processResources
> Task :runners:flink:1.9:copyTestResourcesOverrides NO-SOURCE
> Task :model:fn-execution:processResources
> Task :sdks:java:build-tools:compileJava FROM-CACHE
> Task :runners:flink:1.9:processResources
> Task :sdks:java:build-tools:processResources
> Task :sdks:java:build-tools:classes
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task :sdks:java:build-tools:jar
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :sdks:java:core:generateTestAvroJava
> Task :sdks:java:core:generateTestGrammarSource NO-SOURCE
> Task :sdks:java:core:processTestResources
> Task :model:pipeline:generateProto
> Task :model:pipeline:compileJava FROM-CACHE
> Task :model:pipeline:processResources
> Task :model:pipeline:classes
> Task :model:pipeline:jar
> Task :model:fn-execution:extractIncludeProto
> Task :model:job-management:extractIncludeProto
> Task 

Build failed in Jenkins: beam_PreCommit_Python_pytest_Cron #118

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[kamil.wasilewski] Fixed a bug where the output PCollection was assigned to 
self.result


--
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-15 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 >  # 
 > timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision c1e759c10a1a5650ba7cc07ff6676637aa17dff1 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f c1e759c10a1a5650ba7cc07ff6676637aa17dff1
Commit message: "Merge pull request #10249: Fixed flaky BigQueryIO read 
performance test"
 > git rev-list --no-walk f8496d318b789716242f2f8b6381ee862b84ef3f # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :pythonPreCommitPytest
Starting a Gradle Daemon, 2 busy Daemons could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.

FAILURE: Build failed with an exception.

* What went wrong:
Could not determine the dependencies of task 
':sdks:python:test-suites:tox:py35:preCommitPy35Pytest'.
> Task with path 'lint' not found in project 
> ':sdks:python:test-suites:tox:py35'.

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 33s

Publishing build scan...
https://scans.gradle.com/s/a2hawzzybju6y

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_sonarqube_report #1109

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[kamil.wasilewski] Fixed a bug where the output PCollection was assigned to 
self.result


--
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-1 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init  # 
 > timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision c1e759c10a1a5650ba7cc07ff6676637aa17dff1 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f c1e759c10a1a5650ba7cc07ff6676637aa17dff1
Commit message: "Merge pull request #10249: Fixed flaky BigQueryIO read 
performance test"
 > git rev-list --no-walk f8496d318b789716242f2f8b6381ee862b84ef3f # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
ERROR: SonarQube installation defined in this job (ASF Sonar Analysis) does not 
match any configured installation. Number of installations that can be 
configured: 0.
If you want to reassign jobs to a different SonarQube installation, check the 
documentation under https://redirect.sonarsource.com/plugins/jenkins.html

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_SQLBigQueryIO_Batch_Performance_Test_Java #2

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-10 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 2cc161967a0e5db6291f19296fd019cdcbb66e45 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 2cc161967a0e5db6291f19296fd019cdcbb66e45
Commit message: "Merge pull request #10150 from ibzib/df-env"
First time build. Skipping changelog.
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g --info 
-DintegrationTestPipelineOptions=["--project=apache-beam-testing","--tempLocation=gs://temp-storage-for-perf-tests/loadtests","--tempRoot=gs://temp-storage-for-perf-tests/loadtests","--metricsBigQueryDataset=beam_performance","--metricsBigQueryTable=sql_bqio_read_java_batch","--runner=DataflowRunner","--maxNumWorkers=5","--numWorkers=5","--autoscalingAlgorithm=NONE"]
 -DintegrationTestRunner=dataflow 
:sdks:java:extensions:sql:perf-tests:integrationTest --tests 
org.apache.beam.sdk.extensions.sql.meta.provider.bigquery.BigQueryIOPushDownIT
Initialized native services in: /home/jenkins/.gradle/native
Removing 0 daemon stop events from registry
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
Starting process 'Gradle build daemon'. Working directory: 
/home/jenkins/.gradle/daemon/5.2.1 Command: 
/usr/lib/jvm/java-8-openjdk-amd64/bin/java -Xmx4g -Dfile.encoding=UTF-8 
-Duser.country=US -Duser.language=en -Duser.variant -cp 
/home/jenkins/.gradle/wrapper/dists/gradle-5.2.1-all/bviwmvmbexq6idcscbicws5me/gradle-5.2.1/lib/gradle-launcher-5.2.1.jar
 org.gradle.launcher.daemon.bootstrap.GradleDaemon 5.2.1
Successfully started process 'Gradle build daemon'
An attempt to start the daemon took 3.722 secs.
The client will now receive all logging from the daemon (pid: 19991). The 
daemon log file: /home/jenkins/.gradle/daemon/5.2.1/daemon-19991.out.log
Starting build in new daemon [memory: 3.8 GB]
Closing daemon's stdin at end of input.
The daemon will no longer process any standard input.
Using 12 worker leases.
Starting Build
Using local directory build cache for build ':buildSrc' (location = 
/home/jenkins/.gradle/caches/build-cache-1, removeUnusedEntriesAfter = 7 days).

> Configure project :buildSrc
Evaluating project ':buildSrc' using build file 
'
Selected primary task 'build' from project :
file or directory 
'
 not found
:buildSrc:compileJava (Thread[Execution worker for ':buildSrc' Thread 
2,5,main]) started.

> Task :buildSrc:compileJava NO-SOURCE
file or directory 
'
 not found
Skipping task ':buildSrc:compileJava' as it has no source files and no previous 
output files.
:buildSrc:compileJava (Thread[Execution worker for ':buildSrc' Thread 
2,5,main]) completed. Took 0.04 secs.
:buildSrc:compileGroovy (Thread[Execution worker for ':buildSrc' Thread 
2,5,main]) started.

> Task :buildSrc:compileGroovy FROM-CACHE
Build cache key for task ':buildSrc:compileGroovy' is 
e883e260111f9524ce6886f17d1e2c5d
Task 

Build failed in Jenkins: beam_BiqQueryIO_Streaming_Performance_Test_Java #203

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[kcweaver] [BEAM-8251] plumb worker_(region|zone) to Environment proto

[kcweaver] Add null checks for worker region/zone options

[kamil.wasilewski] Fixed a bug where the output PCollection was assigned to 
self.result


--
[...truncated 413.41 KB...]
"message" : "Invalid table ID 
\"bqio_write_10GB_java_d7609323-305e-4c6f-ac0c-cbe1737c8cec\". Table IDs must 
be alphanumeric (plus underscores) and must be at most 1024 characters long. 
Also, Table decorators cannot be used.",
"reason" : "invalid"
  } ],
  "message" : "Invalid table ID 
\"bqio_write_10GB_java_d7609323-305e-4c6f-ac0c-cbe1737c8cec\". Table IDs must 
be alphanumeric (plus underscores) and must be at most 1024 characters long. 
Also, Table decorators cannot be used.",
  "status" : "INVALID_ARGUMENT"
}
at 
com.google.api.client.googleapis.json.GoogleJsonResponseException.from(GoogleJsonResponseException.java:150)
at 
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:113)
at 
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:40)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest$1.interceptResponse(AbstractGoogleClientRequest.java:417)
at com.google.api.client.http.HttpRequest.execute(HttpRequest.java:1132)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:515)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:448)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.execute(AbstractGoogleClientRequest.java:565)
at 
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$DatasetServiceImpl.tryCreateTable(BigQueryServicesImpl.java:520)
at 
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$DatasetServiceImpl.createTable(BigQueryServicesImpl.java:505)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.tryCreateTable(CreateTables.java:205)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.getTableDestination(CreateTables.java:160)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.lambda$processElement$0(CreateTables.java:113)
at java.util.HashMap.computeIfAbsent(HashMap.java:1126)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.processElement(CreateTables.java:112)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn$DoFnInvoker.invokeProcessElement(Unknown
 Source)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:218)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:183)
at 
org.apache.beam.runners.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:335)
at 
org.apache.beam.runners.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:44)
at 
org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:49)
at 
org.apache.beam.runners.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:280)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:252)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:74)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:576)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:564)
at 
org.apache.beam.sdk.io.gcp.bigquery.PrepareWrite$1.processElement(PrepareWrite.java:82)
at 
org.apache.beam.sdk.io.gcp.bigquery.PrepareWrite$1$DoFnInvoker.invokeProcessElement(Unknown
 Source)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:218)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:180)
at 
org.apache.beam.runners.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:335)
at 

beam_PostCommit_Py_VR_Dataflow - Build # 5216 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Py_VR_Dataflow (build 
#5216)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/5216/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

beam_PostCommit_Python37 - Build # 1055 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Python37 (build #1055)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Python37/1055/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Jenkins build is back to normal : beam_PreCommit_Java_Cron #2112

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_Flink #5999

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[kcweaver] [BEAM-8251] plumb worker_(region|zone) to Environment proto

[kcweaver] Add null checks for worker region/zone options


--
Started by GitHub push by ibzib
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-14 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 2cc161967a0e5db6291f19296fd019cdcbb66e45 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 2cc161967a0e5db6291f19296fd019cdcbb66e45
Commit message: "Merge pull request #10150 from ibzib/df-env"
 > git rev-list --no-walk c1e759c10a1a5650ba7cc07ff6676637aa17dff1 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :runners:flink:1.9:validatesRunner
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :runners:flink:1.9:copyResourcesOverrides NO-SOURCE
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :model:fn-execution:extractProto
> Task :model:job-management:extractProto
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :model:job-management:processResources
> Task :model:fn-execution:processResources
> Task :runners:flink:1.9:copySourceOverrides
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :runners:flink:1.9:copyTestResourcesOverrides NO-SOURCE
> Task :runners:flink:1.9:processResources
> Task :sdks:java:build-tools:compileJava FROM-CACHE
> Task :sdks:java:build-tools:processResources
> Task :sdks:java:build-tools:classes
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task :sdks:java:build-tools:jar
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :sdks:java:core:generateTestAvroJava
> Task :sdks:java:core:generateTestGrammarSource NO-SOURCE
> Task :sdks:java:core:processTestResources
> Task :model:pipeline:generateProto
> Task :model:pipeline:compileJava FROM-CACHE
> Task :model:pipeline:processResources
> Task :model:pipeline:classes
> Task 

Build failed in Jenkins: beam_PostCommit_Python_VR_Spark #1681

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[kcweaver] [BEAM-8251] plumb worker_(region|zone) to Environment proto

[kcweaver] Add null checks for worker region/zone options


--
[...truncated 1.32 MB...]
19/12/03 19:01:18 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:41195
19/12/03 19:01:18 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/03 19:01:18 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/03 19:01:18 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575399675.95_649faa8c-3a04-4926-a03b-27dd2c3e5f7c',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/03 19:01:18 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575399675.95', 'environment_config': u'{"command": 
"
 'expansion_port': u'0', 'sdk_worker_parallelism': u'1', 'job_endpoint': 
u'localhost:50261', 'job_port': u'0'}
19/12/03 19:01:18 INFO statecache.__init__: Creating state cache with size 0
19/12/03 19:01:18 INFO sdk_worker.__init__: Creating insecure control channel 
for localhost:40043.
19/12/03 19:01:18 INFO sdk_worker.__init__: Control channel established.
19/12/03 19:01:18 INFO 
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: Beam 
Fn Control client connected with id 262-1
19/12/03 19:01:18 INFO sdk_worker.__init__: Initializing SDKHarness with 
unbounded number of workers.
19/12/03 19:01:18 INFO sdk_worker.create_state_handler: Creating insecure state 
channel for localhost:36633.
19/12/03 19:01:18 INFO sdk_worker.create_state_handler: State channel 
established.
19/12/03 19:01:18 INFO data_plane.create_data_channel: Creating client data 
channel for localhost:43169
19/12/03 19:01:18 INFO 
org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client 
connected.
19/12/03 19:01:18 INFO 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory: Closing 
environment urn: "beam:env:process:v1"
payload: 
"\032\202\001

19/12/03 19:01:18 INFO sdk_worker.run: No more requests from control plane
19/12/03 19:01:18 INFO sdk_worker.run: SDK Harness waiting for in-flight 
requests to complete
19/12/03 19:01:18 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 19:01:18 INFO data_plane.close: Closing all cached grpc data channels.
19/12/03 19:01:18 INFO sdk_worker.close: Closing all cached gRPC state handlers.
19/12/03 19:01:18 INFO sdk_worker.run: Done consuming work.
19/12/03 19:01:18 INFO sdk_worker_main.main: Python sdk harness exiting.
19/12/03 19:01:18 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Logging client 
hanged up.
19/12/03 19:01:18 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 19:01:18 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
GetManifest for __no_artifacts_staged__
19/12/03 19:01:19 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Beam Fn Logging 
client connected.
19/12/03 19:01:19 INFO sdk_worker_main.main: Logging handler created.
19/12/03 19:01:19 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:33569
19/12/03 19:01:19 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/03 19:01:19 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/03 19:01:19 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575399675.95_649faa8c-3a04-4926-a03b-27dd2c3e5f7c',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/03 19:01:19 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575399675.95', 'environment_config': 

Build failed in Jenkins: beam_PostCommit_Python35 #1135

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[kcweaver] [BEAM-8251] plumb worker_(region|zone) to Environment proto

[kcweaver] Add null checks for worker region/zone options


--
[...truncated 566.34 KB...]
19/12/03 19:41:32 INFO 
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: Beam 
Fn Control client connected with id 1-1
INFO:apache_beam.runners.worker.sdk_worker:Creating insecure state channel for 
localhost:33585.
INFO:apache_beam.runners.worker.sdk_worker:State channel established.
INFO:apache_beam.runners.worker.data_plane:Creating client data channel for 
localhost:36391
19/12/03 19:41:32 INFO 
org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client 
connected.
19/12/03 19:41:32 WARN 
org.apache.beam.runners.spark.translation.GroupNonMergingWindowsFunctions: 
Either coder LengthPrefixCoder(ByteArrayCoder) or GlobalWindow$Coder is not 
consistent with equals. That might cause issues on some runners.
WARNING:apache_beam.io.filebasedsink:Deleting 4 existing files in target path 
matching: -*-of-%(num_shards)05d
19/12/03 19:41:48 INFO org.apache.beam.runners.spark.SparkPipelineRunner: Job 
BeamApp-jenkins-1203194127-581d7def_f9da9117-529c-47f3-bf0f-aef9cdbb7d95: 
Pipeline translated successfully. Computing outputs
INFO:apache_beam.io.filebasedsink:Starting finalize_write threads with 
num_shards: 4 (skipped: 0), batches: 4, num_threads: 4
INFO:apache_beam.io.filebasedsink:Renamed 4 shards in 0.10 seconds.
19/12/03 19:42:03 INFO org.apache.beam.runners.spark.SparkPipelineRunner: Job 
BeamApp-jenkins-1203194127-581d7def_f9da9117-529c-47f3-bf0f-aef9cdbb7d95 
finished.
19/12/03 19:42:03 WARN 
org.apache.beam.runners.spark.SparkPipelineResult$BatchMode: Collecting 
monitoring infos is not implemented yet in Spark portable runner.
19/12/03 19:42:03 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
Manifest at 
/tmp/beam-temphg7027vw/artifacts54h443r_/job_8bdffb47-5788-4beb-bcd9-f882bc82c94a/MANIFEST
 has 1 artifact locations
19/12/03 19:42:03 INFO 
org.apache.beam.runners.fnexecution.artifact.BeamFileSystemArtifactStagingService:
 Removed dir 
/tmp/beam-temphg7027vw/artifacts54h443r_/job_8bdffb47-5788-4beb-bcd9-f882bc82c94a/
INFO:apache_beam.runners.portability.portable_runner:Job state changed to DONE
19/12/03 19:42:03 INFO 
org.apache.beam.runners.fnexecution.jobsubmission.InMemoryJobService: Getting 
job metrics for 
BeamApp-jenkins-1203194127-581d7def_f9da9117-529c-47f3-bf0f-aef9cdbb7d95
19/12/03 19:42:03 INFO 
org.apache.beam.runners.fnexecution.jobsubmission.InMemoryJobService: Finished 
getting job metrics for 
BeamApp-jenkins-1203194127-581d7def_f9da9117-529c-47f3-bf0f-aef9cdbb7d95
Exception in thread run_worker_1-1:
Traceback (most recent call last):
  File "/usr/lib/python3.5/threading.py", line 914, in _bootstrap_inner
self.run()
  File "/usr/lib/python3.5/threading.py", line 862, in run
self._target(*self._args, **self._kwargs)
  File 
"
 line 111, in run
for work_request in control_stub.Control(get_responses()):
  File 
"
 line 392, in __next__
return self._next()
  File 
"
 line 561, in _next
raise self
grpc._channel._Rendezvous: <_Rendezvous of RPC that terminated with:
status = StatusCode.UNAVAILABLE
details = "Socket closed"
debug_error_string = 
"{"created":"@1575402124.323893814","description":"Error received from peer 
ipv4:127.0.0.1:40323","file":"src/core/lib/surface/call.cc","file_line":1055,"grpc_message":"Socket
 closed","grpc_status":14}"
>

ERROR:apache_beam.runners.worker.data_plane:Failed to read inputs in the data 
plane.
Traceback (most recent call last):
  File 
"
 line 272, in _read_inputs
for elements in elements_iterator:
  File 
"
 line 392, in __next__
return self._next()
  File 
"
 line 561, in _next
raise self
grpc._channel._Rendezvous: <_Rendezvous of RPC that terminated with:
status = StatusCode.UNAVAILABLE
details = "Socket closed"
debug_error_string = 
"{"created":"@1575402124.323843227","description":"Error received from peer 

Build failed in Jenkins: beam_PostCommit_XVR_Flink #1097

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[kcweaver] [BEAM-8251] plumb worker_(region|zone) to Environment proto

[kcweaver] Add null checks for worker region/zone options


--
[...truncated 4.40 MB...]
[DataSink (DiscardingOutput) (3/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Ensuring all FileSystem streams are 
closed for task DataSink (DiscardingOutput) (3/16) 
(e4ca025b8c1ad94e6f78cd607ffaaea3) [FINISHED]
[flink-akka.actor.default-dispatcher-9] INFO 
org.apache.flink.runtime.taskexecutor.TaskExecutor - Un-registering task and 
sending final execution state FINISHED to JobManager for task DataSink 
(DiscardingOutput) e4ca025b8c1ad94e6f78cd607ffaaea3.
[flink-akka.actor.default-dispatcher-9] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - DataSink 
(DiscardingOutput) (3/16) (e4ca025b8c1ad94e6f78cd607ffaaea3) switched from 
RUNNING to FINISHED.
[MapPartition (MapPartition at [3]assert_that/{Group, Unkey, Match}) (4/16)] 
INFO org.apache.flink.runtime.taskmanager.Task - MapPartition (MapPartition at 
[3]assert_that/{Group, Unkey, Match}) (4/16) (d994ba843914ee0e40a6f9e60741914f) 
switched from RUNNING to FINISHED.
[MapPartition (MapPartition at [3]assert_that/{Group, Unkey, Match}) (4/16)] 
INFO org.apache.flink.runtime.taskmanager.Task - Freeing task resources for 
MapPartition (MapPartition at [3]assert_that/{Group, Unkey, Match}) (4/16) 
(d994ba843914ee0e40a6f9e60741914f).
[flink-akka.actor.default-dispatcher-9] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - DataSink 
(DiscardingOutput) (4/16) (bea865bd5035e6269acf507d4785e5cb) switched from 
CREATED to SCHEDULED.
[MapPartition (MapPartition at [3]assert_that/{Group, Unkey, Match}) (4/16)] 
INFO org.apache.flink.runtime.taskmanager.Task - Ensuring all FileSystem 
streams are closed for task MapPartition (MapPartition at 
[3]assert_that/{Group, Unkey, Match}) (4/16) (d994ba843914ee0e40a6f9e60741914f) 
[FINISHED]
[flink-akka.actor.default-dispatcher-8] INFO 
org.apache.flink.runtime.taskexecutor.TaskExecutor - Un-registering task and 
sending final execution state FINISHED to JobManager for task MapPartition 
(MapPartition at [3]assert_that/{Group, Unkey, Match}) 
d994ba843914ee0e40a6f9e60741914f.
[flink-akka.actor.default-dispatcher-9] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - DataSink 
(DiscardingOutput) (4/16) (bea865bd5035e6269acf507d4785e5cb) switched from 
SCHEDULED to DEPLOYING.
[flink-akka.actor.default-dispatcher-9] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - Deploying DataSink 
(DiscardingOutput) (4/16) (attempt #0) to d14a12fd-486a-4347-85b0-5ae235f575cd 
@ localhost (dataPort=-1)
[flink-akka.actor.default-dispatcher-9] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - MapPartition 
(MapPartition at [3]assert_that/{Group, Unkey, Match}) (4/16) 
(d994ba843914ee0e40a6f9e60741914f) switched from RUNNING to FINISHED.
[flink-akka.actor.default-dispatcher-8] INFO 
org.apache.flink.runtime.taskexecutor.TaskExecutor - Received task DataSink 
(DiscardingOutput) (4/16).
[DataSink (DiscardingOutput) (4/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - DataSink (DiscardingOutput) (4/16) 
(bea865bd5035e6269acf507d4785e5cb) switched from CREATED to DEPLOYING.
[DataSink (DiscardingOutput) (4/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Creating FileSystem stream leak 
safety net for task DataSink (DiscardingOutput) (4/16) 
(bea865bd5035e6269acf507d4785e5cb) [DEPLOYING]
[DataSink (DiscardingOutput) (4/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Loading JAR files for task DataSink 
(DiscardingOutput) (4/16) (bea865bd5035e6269acf507d4785e5cb) [DEPLOYING].
[DataSink (DiscardingOutput) (4/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Registering task at network: 
DataSink (DiscardingOutput) (4/16) (bea865bd5035e6269acf507d4785e5cb) 
[DEPLOYING].
[DataSink (DiscardingOutput) (4/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - DataSink (DiscardingOutput) (4/16) 
(bea865bd5035e6269acf507d4785e5cb) switched from DEPLOYING to RUNNING.
[flink-akka.actor.default-dispatcher-8] INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph - DataSink 
(DiscardingOutput) (4/16) (bea865bd5035e6269acf507d4785e5cb) switched from 
DEPLOYING to RUNNING.
[DataSink (DiscardingOutput) (4/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - DataSink (DiscardingOutput) (4/16) 
(bea865bd5035e6269acf507d4785e5cb) switched from RUNNING to FINISHED.
[DataSink (DiscardingOutput) (4/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Freeing task resources for DataSink 
(DiscardingOutput) (4/16) (bea865bd5035e6269acf507d4785e5cb).
[DataSink (DiscardingOutput) (4/16)] INFO 
org.apache.flink.runtime.taskmanager.Task - Ensuring all FileSystem streams are 
closed for task DataSink (DiscardingOutput) (4/16) 
(bea865bd5035e6269acf507d4785e5cb) 

Jenkins build is back to normal : beam_PostCommit_Java11_ValidatesRunner_Dataflow #1657

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PostCommit_Python2 #1129

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[kcweaver] [BEAM-8251] plumb worker_(region|zone) to Environment proto

[kcweaver] Add null checks for worker region/zone options


--
[...truncated 1.63 MB...]
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.state.TaskExecutorLocalStateStoresManager - Shutting 
down TaskExecutorLocalStateStoresManager.
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager 
removed spill file directory /tmp/flink-io-b16fb077-bbd1-4675-aa4f-231f77131e89
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.io.network.NettyShuffleEnvironment - Shutting down the 
network environment and its components.
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager 
removed spill file directory 
/tmp/flink-netty-shuffle-0ec9fa07-ab48-4e17-88c2-be6e5d850bb6
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.taskexecutor.KvStateService - Shutting down the 
kvState service and its components.
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.taskexecutor.JobLeaderService - Stop job leader 
service.
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.filecache.FileCache - removed file cache directory 
/tmp/flink-dist-cache-a94579ff-db93-45a4-aa69-ad448108754d
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.taskexecutor.TaskExecutor - Stopped TaskExecutor 
akka://flink/user/taskmanager_0.
[ForkJoinPool.commonPool-worker-9] INFO 
org.apache.flink.runtime.dispatcher.DispatcherRestEndpoint - Removing cache 
directory /tmp/flink-web-ui
[ForkJoinPool.commonPool-worker-9] INFO 
org.apache.flink.runtime.dispatcher.DispatcherRestEndpoint - Shut down complete.
[flink-akka.actor.default-dispatcher-10] INFO 
org.apache.flink.runtime.resourcemanager.StandaloneResourceManager - Shut down 
cluster because application is in CANCELED, diagnostics 
DispatcherResourceManagerComponent has been closed..
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.dispatcher.StandaloneDispatcher - Stopping dispatcher 
akka://flink/user/dispatcher.
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.dispatcher.StandaloneDispatcher - Stopping all 
currently running jobs of dispatcher akka://flink/user/dispatcher.
[flink-akka.actor.default-dispatcher-14] INFO 
org.apache.flink.runtime.resourcemanager.slotmanager.SlotManagerImpl - Closing 
the SlotManager.
[flink-akka.actor.default-dispatcher-14] INFO 
org.apache.flink.runtime.resourcemanager.slotmanager.SlotManagerImpl - 
Suspending the SlotManager.
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.rest.handler.legacy.backpressure.StackTraceSampleCoordinator
 - Shutting down stack trace sample coordinator.
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.dispatcher.StandaloneDispatcher - Stopped dispatcher 
akka://flink/user/dispatcher.
[flink-akka.actor.default-dispatcher-12] INFO 
org.apache.flink.runtime.rpc.akka.AkkaRpcService - Stopping Akka RPC service.
[flink-metrics-2] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - 
Shutting down remote daemon.
[flink-metrics-2] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - 
Remote daemon shut down; proceeding with flushing remote transports.
[flink-metrics-2] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - 
Remoting shut down.
[flink-metrics-2] INFO org.apache.flink.runtime.rpc.akka.AkkaRpcService - 
Stopping Akka RPC service.
[flink-metrics-2] INFO org.apache.flink.runtime.rpc.akka.AkkaRpcService - 
Stopped Akka RPC service.
[flink-akka.actor.default-dispatcher-8] INFO 
org.apache.flink.runtime.blob.PermanentBlobCache - Shutting down BLOB cache
[flink-akka.actor.default-dispatcher-8] INFO 
org.apache.flink.runtime.blob.TransientBlobCache - Shutting down BLOB cache
[flink-akka.actor.default-dispatcher-8] INFO 
org.apache.flink.runtime.blob.BlobServer - Stopped BLOB server at 0.0.0.0:37771
[flink-akka.actor.default-dispatcher-8] INFO 
org.apache.flink.runtime.rpc.akka.AkkaRpcService - Stopped Akka RPC service.
[flink-runner-job-invoker] INFO 
org.apache.beam.runners.flink.FlinkPipelineRunner - Execution finished in 43693 
msecs
[flink-runner-job-invoker] INFO 
org.apache.beam.runners.flink.FlinkPipelineRunner - Final accumulator values:
[flink-runner-job-invoker] INFO 
org.apache.beam.runners.flink.FlinkPipelineRunner - __metricscontainers : 
MetricQueryResults(Counters(19group/GroupByWindow.None/beam:env:external:v1:0:beam:metric:pardo_execution_time:process_bundle_msecs:v1
 {PTRANSFORM=ref_AppliedPTransform_format_24}: 0, 
6format.None/beam:env:external:v1:0:beam:metric:pardo_execution_time:start_bundle_msecs:v1
 

Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_Flink #6000

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[rohde.samuel] change definition of has_unbounded_sources in PIN to a 
pre-determined

[rohde.samuel] typo

[rohde.samuel] lint

[rohde.samuel] remove BigQueryReader from list

[rohde.samuel] lint

[rohde.samuel] remove external

[rohde.samuel] remove external

[github] Merge pull request #10248: [BEAM-7274] Add type conversions factory


--
Started by GitHub push by reuvenlax
Started by GitHub push by reuvenlax
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-15 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 7681f8a7d86755cad69bfc4ebd20d7268df1efab (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 7681f8a7d86755cad69bfc4ebd20d7268df1efab
Commit message: "[BEAM-8335] Change has_unbounded_sources to predetermined list 
of sources"
 > git rev-list --no-walk 2cc161967a0e5db6291f19296fd019cdcbb66e45 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :runners:flink:1.9:validatesRunner
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :runners:flink:1.9:copyResourcesOverrides NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :model:fn-execution:extractProto
> Task :model:job-management:extractProto
> Task :model:job-management:processResources
> Task :model:fn-execution:processResources
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :runners:flink:1.9:copySourceOverrides
> Task :runners:flink:1.9:copyTestResourcesOverrides NO-SOURCE
> Task :runners:flink:1.9:processResources
> Task :sdks:java:build-tools:compileJava FROM-CACHE
> Task :sdks:java:build-tools:processResources
> Task :sdks:java:build-tools:classes
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task :sdks:java:build-tools:jar
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :sdks:java:core:generateTestAvroJava

Jenkins build is back to normal : beam_PostCommit_XVR_Flink #1098

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Jenkins build is back to normal : beam_PostCommit_Python2 #1130

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PostCommit_Go_VR_Spark #1704

2019-12-03 Thread Apache Jenkins Server
See 
<https://builds.apache.org/job/beam_PostCommit_Go_VR_Spark/1704/display/redirect?page=changes>

Changes:

[chamikara] Merge pull request #10262: [BEAM-8575] Revert validates runner test 
tag

[github] [BEAM-8835] Disable Flink Uber Jar by default. (#10270)

[github] [BEAM-8651] [BEAM-8874] Change pickle_lock to be a reentrant lock, and


--
[...truncated 158.87 KB...]
key: "n1"
value: <
  unique_name: "n1"
  coder_id: "c0"
  is_bounded: BOUNDED
  windowing_strategy_id: "w0"
>
  >
  pcollections: <
key: "n2"
value: <
  unique_name: "n2"
  coder_id: "c3"
  is_bounded: BOUNDED
  windowing_strategy_id: "w0"
>
  >
  pcollections: <
key: "n3"
value: <
  unique_name: "n3"
  coder_id: "c0"
  is_bounded: BOUNDED
  windowing_strategy_id: "w0"
>
  >
  pcollections: <
key: "n4"
value: <
  unique_name: "n4"
  coder_id: "c3"
  is_bounded: BOUNDED
  windowing_strategy_id: "w0"
>
  >
  pcollections: <
key: "n5"
value: <
  unique_name: "n5"
  coder_id: "c0"
  is_bounded: BOUNDED
  windowing_strategy_id: "w0"
>
  >
  pcollections: <
key: "n6"
value: <
  unique_name: "n6"
  coder_id: "c3"
  is_bounded: BOUNDED
  windowing_strategy_id: "w0"
>
  >
  pcollections: <
key: "n7"
value: <
  unique_name: "n7"
  coder_id: "c3"
  is_bounded: BOUNDED
  windowing_strategy_id: "w0"
>
  >
  pcollections: <
key: "n8"
value: <
  unique_name: "n8"
  coder_id: "c4"
  is_bounded: BOUNDED
  windowing_strategy_id: "w0"
>
  >
  pcollections: <
key: "n9"
value: <
  unique_name: "n9"
  coder_id: "c6"
  is_bounded: BOUNDED
  windowing_strategy_id: "w0"
>
  >
  windowing_strategies: <
key: "w0"
value: <
  window_fn: <
spec: <
  urn: "beam:windowfn:global_windows:v0.1"
>
  >
  merge_status: NON_MERGING
  window_coder_id: "c1"
  trigger: <
default: <
>
  >
  accumulation_mode: DISCARDING
  output_time: END_OF_WINDOW
  closing_behavior: EMIT_IF_NONEMPTY
  OnTimeBehavior: FIRE_ALWAYS
>
  >
  coders: <
key: "c0"
value: <
  spec: <
urn: "beam:coder:bytes:v1"
  >
>
  >
  coders: <
key: "c1"
value: <
  spec: <
urn: "beam:coder:global_window:v1"
  >
>
  >
  coders: <
key: "c2"
value: <
  spec: <
urn: "beam:go:coder:custom:v1"
payload: 
"Cgd2YXJpbnR6EgIIAhqFAQpxZ2l0aHViLmNvbS9hcGFjaGUvYmVhbS9zZGtzL2dvL3Rlc3QvdmVuZG9yL2dpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby9wa2cvYmVhbS9jb3JlL3J1bnRpbWUvY29kZXJ4LmVuY1ZhckludFoSEAgWIgQIGUAPKgYIFBICCAgikQEKcWdpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby90ZXN0L3ZlbmRvci9naXRodWIuY29tL2FwYWNoZS9iZWFtL3Nka3MvZ28vcGtnL2JlYW0vY29yZS9ydW50aW1lL2NvZGVyeC5kZWNWYXJJbnRaEhwIFiIECBlAAyIGCBQSAggIKgQIGUAPKgQIGUAB"
  >
>
  >
  coders: <
key: "c3"
value: <
  spec: <
urn: "beam:coder:length_prefix:v1"
  >
  component_coder_ids: "c2"
>
  >
  coders: <
key: "c4"
value: <
  spec: <
urn: "beam:coder:kv:v1"
  >
  component_coder_ids: "c3"
  component_coder_ids: "c3"
>
  >
  coders: <
key: "c5"
value: <
  spec: <
urn: "beam:coder:iterable:v1"
  >
  component_coder_ids: "c3"
>
  >
  coders: <
key: "c6"
value: <
  spec: <
urn: "beam:coder:kv:v1"
  >
  component_coder_ids: "c3"
  component_coder_ids: "c5"
>
  >
  environments: <
key: "go"
value: <
  urn: "beam:env:docker:v1"
  payload: "\n
  >
>
root_transform_ids: "e5"
root_transform_ids: "e3"
root_transform_ids: "e4"
root_transform_ids: "e6"
root_transform_ids: "e1"
root_transform_ids: "e2"
root_transform_ids: "e7"
root_transform_ids: "s1"
2019/12/03 23:00:17 Test flatten:flatten failed:connecting 

Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_Direct #2003

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[rohde.samuel] change definition of has_unbounded_sources in PIN to a 
pre-determined

[rohde.samuel] typo

[rohde.samuel] lint

[rohde.samuel] remove BigQueryReader from list

[rohde.samuel] lint

[rohde.samuel] remove external

[rohde.samuel] remove external

[github] Merge pull request #10248: [BEAM-7274] Add type conversions factory

[chamikara] Merge pull request #10262: [BEAM-8575] Revert validates runner test 
tag


--
Started by GitHub push by reuvenlax
Started by GitHub push by reuvenlax
Started by GitHub push by reuvenlax
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-15 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 2ee01c7f86fd061f6f6da528e0bc145cc3c40903 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 2ee01c7f86fd061f6f6da528e0bc145cc3c40903
Commit message: "Merge pull request #10262: [BEAM-8575] Revert validates runner 
test tag to some tests. They caus…"
 > git rev-list --no-walk 2cc161967a0e5db6291f19296fd019cdcbb66e45 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 :runners:direct-java:validatesRunner
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :runners:direct-java:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :runners:local-java:processResources NO-SOURCE
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :runners:direct-java:processTestResources NO-SOURCE
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :runners:core-construction-java:processTestResources NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :model:fn-execution:extractProto
> Task :model:job-management:extractProto
> Task :model:job-management:processResources
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :model:fn-execution:processResources
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :sdks:java:core:generateTestAvroJava
> Task :sdks:java:core:generateTestGrammarSource NO-SOURCE
> Task 

Build failed in Jenkins: beam_PostCommit_Python_VR_Spark #1682

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[rohde.samuel] change definition of has_unbounded_sources in PIN to a 
pre-determined

[rohde.samuel] typo

[rohde.samuel] lint

[rohde.samuel] remove BigQueryReader from list

[rohde.samuel] lint

[rohde.samuel] remove external

[rohde.samuel] remove external

[github] Merge pull request #10248: [BEAM-7274] Add type conversions factory

[chamikara] Merge pull request #10262: [BEAM-8575] Revert validates runner test 
tag


--
[...truncated 1.31 MB...]
19/12/03 21:51:57 INFO org.apache.beam.runners.spark.SparkPipelineRunner: 
Running job test_windowing_1575409916.95_17d55ea6-f15e-4fb4-921d-c3ebbaeb60c6 
on Spark master local
19/12/03 21:51:57 WARN 
org.apache.beam.runners.spark.translation.GroupNonMergingWindowsFunctions: 
Either coder LengthPrefixCoder(ByteArrayCoder) or GlobalWindow$Coder is not 
consistent with equals. That might cause issues on some runners.
19/12/03 21:51:57 INFO org.apache.beam.runners.spark.SparkPipelineRunner: Job 
test_windowing_1575409916.95_17d55ea6-f15e-4fb4-921d-c3ebbaeb60c6: Pipeline 
translated successfully. Computing outputs
19/12/03 21:51:58 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
GetManifest for __no_artifacts_staged__
19/12/03 21:51:58 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Beam Fn Logging 
client connected.
19/12/03 21:51:58 INFO sdk_worker_main.main: Logging handler created.
19/12/03 21:51:58 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:42641
19/12/03 21:51:58 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/03 21:51:58 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/03 21:51:58 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575409916.95_17d55ea6-f15e-4fb4-921d-c3ebbaeb60c6',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/03 21:51:58 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575409916.95', 'environment_config': u'{"command": 
"
 'expansion_port': u'0', 'sdk_worker_parallelism': u'1', 'job_endpoint': 
u'localhost:53383', 'job_port': u'0'}
19/12/03 21:51:58 INFO statecache.__init__: Creating state cache with size 0
19/12/03 21:51:58 INFO sdk_worker.__init__: Creating insecure control channel 
for localhost:36467.
19/12/03 21:51:58 INFO sdk_worker.__init__: Control channel established.
19/12/03 21:51:58 INFO 
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: Beam 
Fn Control client connected with id 261-1
19/12/03 21:51:58 INFO sdk_worker.__init__: Initializing SDKHarness with 
unbounded number of workers.
19/12/03 21:51:58 INFO sdk_worker.create_state_handler: Creating insecure state 
channel for localhost:41631.
19/12/03 21:51:58 INFO sdk_worker.create_state_handler: State channel 
established.
19/12/03 21:51:58 INFO data_plane.create_data_channel: Creating client data 
channel for localhost:43143
19/12/03 21:51:58 INFO 
org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client 
connected.
19/12/03 21:51:58 INFO 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory: Closing 
environment urn: "beam:env:process:v1"
payload: 
"\032\202\001

19/12/03 21:51:58 INFO sdk_worker.run: No more requests from control plane
19/12/03 21:51:58 INFO sdk_worker.run: SDK Harness waiting for in-flight 
requests to complete
19/12/03 21:51:58 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 21:51:58 INFO data_plane.close: Closing all cached grpc data channels.
19/12/03 21:51:58 INFO sdk_worker.close: Closing all cached gRPC state handlers.
19/12/03 21:51:58 INFO sdk_worker.run: Done consuming work.
19/12/03 21:51:58 INFO sdk_worker_main.main: Python sdk harness exiting.
19/12/03 21:51:58 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Logging client 
hanged up.
19/12/03 21:51:58 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/03 21:51:58 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
GetManifest for 

Build failed in Jenkins: beam_PostCommit_Java_PortabilityApi #3517

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[rohde.samuel] change definition of has_unbounded_sources in PIN to a 
pre-determined

[rohde.samuel] typo

[rohde.samuel] lint

[rohde.samuel] remove BigQueryReader from list

[rohde.samuel] lint

[rohde.samuel] remove external

[rohde.samuel] remove external

[github] Merge pull request #10248: [BEAM-7274] Add type conversions factory


--
[...truncated 32.63 KB...]
> Task :sdks:java:fn-execution:jar
> Task :runners:core-construction-java:jar
> Task :runners:core-java:compileJava FROM-CACHE
> Task :runners:core-java:classes UP-TO-DATE
> Task :runners:core-java:jar
> Task :sdks:java:io:kafka:compileJava FROM-CACHE
> Task :sdks:java:io:kafka:classes UP-TO-DATE
> Task :sdks:java:extensions:google-cloud-platform-core:compileJava FROM-CACHE
> Task :sdks:java:extensions:google-cloud-platform-core:classes UP-TO-DATE
> Task :sdks:java:io:common:compileTestJava FROM-CACHE
> Task :sdks:java:io:common:testClasses UP-TO-DATE
> Task :sdks:java:io:common:testJar
> Task :sdks:java:io:kafka:jar
> Task :sdks:java:extensions:google-cloud-platform-core:jar
> Task :sdks:java:testing:test-utils:compileJava FROM-CACHE
> Task :sdks:java:testing:test-utils:classes UP-TO-DATE
> Task :sdks:java:testing:test-utils:jar
> Task :sdks:java:harness:compileJava FROM-CACHE
> Task :sdks:java:harness:classes UP-TO-DATE
> Task :sdks:java:testing:test-utils:compileTestJava FROM-CACHE
> Task :sdks:java:testing:test-utils:testClasses UP-TO-DATE
> Task :sdks:java:testing:test-utils:testJar
> Task :sdks:java:io:google-cloud-platform:compileJava FROM-CACHE
> Task :sdks:java:io:google-cloud-platform:classes UP-TO-DATE
> Task :sdks:java:io:google-cloud-platform:jar
> Task :sdks:java:core:compileTestJava FROM-CACHE
> Task :sdks:java:core:testClasses
> Task :runners:google-cloud-dataflow-java:compileJava FROM-CACHE
> Task :runners:google-cloud-dataflow-java:classes
> Task :runners:google-cloud-dataflow-java:jar

> Task :sdks:go:resolveBuildDependencies
Resolving github.com/google/pprof: 
commit='a8f279b7952b27edbcb72e5a6c69ee9be4c8ad93', 
urls=[https://github.com/google/pprof.git, g...@github.com:google/pprof.git]
Resolving github.com/googleapis/gax-go: 
commit='317e0006254c44a0ac427cc52a0e083ff0b9622f', 
urls=[https://github.com/googleapis/gax-go.git, 
g...@github.com:googleapis/gax-go.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/hashicorp/hcl: 
commit='23c074d0eceb2b8a5bfdbb271ab780cde70f05a8', 
urls=[https://github.com/hashicorp/hcl.git, g...@github.com:hashicorp/hcl.git]
Resolving github.com/ianlancetaylor/demangle: 
commit='4883227f66371e02c4948937d3e2be1664d9be38', 
urls=[https://github.com/ianlancetaylor/demangle.git, 
g...@github.com:ianlancetaylor/demangle.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/kr/fs: commit='2788f0dbd16903de03cb8186e5c7d97b69ad387b', 
urls=[https://github.com/kr/fs.git, g...@github.com:kr/fs.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/magiconair/properties: 
commit='49d762b9817ba1c2e9d0c69183c2b4a8b8f1d934', 
urls=[https://github.com/magiconair/properties.git, 
g...@github.com:magiconair/properties.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: 
commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', 
urls=[https://github.com/coreos/etcd.git, g...@github.com:coreos/etcd.git]
Resolving github.com/mitchellh/go-homedir: 
commit='b8bc1bf767474819792c23f32d8286a45736f1c6', 
urls=[https://github.com/mitchellh/go-homedir.git, 
g...@github.com:mitchellh/go-homedir.git]
Resolving github.com/mitchellh/mapstructure: 
commit='a4e142e9c047c904fa2f1e144d9a84e6133024bc', 
urls=[https://github.com/mitchellh/mapstructure.git, 
g...@github.com:mitchellh/mapstructure.git]
Resolving github.com/nightlyone/lockfile: 
commit='0ad87eef1443f64d3d8c50da647e2b1552851124', 

beam_PostCommit_Py_VR_Dataflow - Build # 5217 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Py_VR_Dataflow (build 
#5217)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/5217/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

beam_PostCommit_Python36 - Build # 1134 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Python36 (build #1134)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Python36/1134/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Jenkins build is back to normal : beam_PostCommit_Python35 #1136

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



beam_PostCommit_Python37 - Build # 1056 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Python37 (build #1056)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Python37/1056/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

beam_PreCommit_Python_Cron - Build # 2112 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PreCommit_Python_Cron (build 
#2112)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PreCommit_Python_Cron/2112/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_Flink #6001

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[chamikara] Merge pull request #10262: [BEAM-8575] Revert validates runner test 
tag

[github] [BEAM-8835] Disable Flink Uber Jar by default. (#10270)

[github] [BEAM-8651] [BEAM-8874] Change pickle_lock to be a reentrant lock, and


--
Started by GitHub push by chamikaramj
Started by GitHub push by chamikaramj
Started by GitHub push by chamikaramj
Started by GitHub push by chamikaramj
Started by GitHub push by chamikaramj
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-6 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision fd8299d290b70206789805386a57af61e9e7b628 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f fd8299d290b70206789805386a57af61e9e7b628
Commit message: "[BEAM-8651] [BEAM-8874] Change pickle_lock to be a reentrant 
lock, and use it on Py3 only. (#10265)"
 > git rev-list --no-walk 7681f8a7d86755cad69bfc4ebd20d7268df1efab # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :runners:flink:1.9:validatesRunner
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :runners:flink:1.9:copyResourcesOverrides NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :model:fn-execution:extractProto
> Task :model:job-management:extractProto
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :model:job-management:processResources
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :model:fn-execution:processResources
> Task :runners:flink:1.9:copySourceOverrides
> Task :runners:flink:1.9:copyTestResourcesOverrides NO-SOURCE
> Task :runners:flink:1.9:processResources
> Task :sdks:java:build-tools:compileJava FROM-CACHE
> Task :sdks:java:build-tools:processResources
> Task :sdks:java:build-tools:classes
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task :sdks:java:build-tools:jar
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task 

beam_PostCommit_Python37 - Build # 1057 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Python37 (build #1057)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Python37/1057/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Jenkins build is back to normal : beam_PostCommit_Java_ValidatesRunner_Direct #2004

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PostCommit_Python2 #1131

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-8489] Filter: don't use callable's output type

[lostluck] [GoSDK] Handle data write errors & stream recreate

[github] [BEAM-8835] Disable Flink Uber Jar by default. (#10270)

[lostluck] [GoSDK] Cancel stream context on dataWriter error

[github] [BEAM-8651] [BEAM-8874] Change pickle_lock to be a reentrant lock, and

[lostluck] [GoSDK] Don't panic if debug symbols are striped

[lcwik] [BEAM-8523] Regenerate Go protos with respect to changes in #9959


--
[...truncated 281.69 KB...]
test_1  | DEBUG Uploading 'kinglear.txt' to '/kinglear.txt'.
test_1  | INFO  Writing to '/kinglear.txt'.
test_1  | DEBUG Resolved path '/kinglear.txt' to '/kinglear.txt'.
test_1  | DEBUG http://namenode:50070 "PUT 
/webhdfs/v1/kinglear.txt?user.name=root=True=CREATE HTTP/1.1" 307 0
test_1  | DEBUG Starting new HTTP connection (1): datanode:50075
datanode_1  | 19/12/04 01:55:34 INFO datanode.webhdfs: 192.168.112.4 PUT 
/webhdfs/v1/kinglear.txt?op=CREATE=root=namenode:8020==true=true=root
 201
namenode_1  | 19/12/04 01:55:34 INFO hdfs.StateChange: BLOCK* allocate 
blk_1073741825_1001, replicas=192.168.112.3:50010 for /kinglear.txt
datanode_1  | 19/12/04 01:55:34 INFO datanode.DataNode: Receiving 
BP-9727828-192.168.112.2-1575424483590:blk_1073741825_1001 src: 
/192.168.112.3:44030 dest: /192.168.112.3:50010
datanode_1  | 19/12/04 01:55:34 INFO DataNode.clienttrace: src: 
/192.168.112.3:44030, dest: /192.168.112.3:50010, bytes: 157283, op: 
HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_-230449465_67, offset: 0, srvID: 
a7ae053e-45d1-426c-b0c6-48633d0e1fe0, blockid: 
BP-9727828-192.168.112.2-1575424483590:blk_1073741825_1001, duration: 15100102
datanode_1  | 19/12/04 01:55:34 INFO datanode.DataNode: PacketResponder: 
BP-9727828-192.168.112.2-1575424483590:blk_1073741825_1001, 
type=LAST_IN_PIPELINE terminating
namenode_1  | 19/12/04 01:55:34 INFO namenode.FSNamesystem: BLOCK* 
blk_1073741825_1001 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) 
in file /kinglear.txt
namenode_1  | 19/12/04 01:55:34 INFO namenode.EditLogFileOutputStream: Nothing 
to flush
namenode_1  | 19/12/04 01:55:34 INFO hdfs.StateChange: DIR* completeFile: 
/kinglear.txt is closed by DFSClient_NONMAPREDUCE_-230449465_67
test_1  | DEBUG Upload of 'kinglear.txt' to '/kinglear.txt' complete.
test_1  | 
/usr/local/lib/python2.7/site-packages/apache_beam/__init__.py:84: UserWarning: 
You are using Apache Beam with Python 2. New releases of Apache Beam will soon 
support Python 3 only.
test_1  |   'You are using Apache Beam with Python 2. '
test_1  | INFO:root:Missing pipeline option (runner). Executing pipeline 
using the default runner: DirectRunner.
test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  

test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  
test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  
test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  
test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  
test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  
test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  
test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  
test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  
test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  
test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  
test_1  | 
INFO:apache_beam.runners.portability.fn_api_runner_transforms:
  
test_1  | INFO:apache_beam.runners.worker.statecache:Creating state cache 
with size 100
test_1  | INFO:apache_beam.runners.portability.fn_api_runner:Created Worker 
handler  for environment urn: "beam:env:embedded_python:v1"
test_1  | 
test_1  | INFO:apache_beam.runners.portability.fn_api_runner:Running 
(ref_AppliedPTransform_read/Read_3)+((ref_AppliedPTransform_split_4)+((ref_AppliedPTransform_pair_with_one_5)+(group/Write)))
datanode_1  | 19/12/04 01:55:38 INFO datanode.webhdfs: 192.168.112.4 GET 
/webhdfs/v1/kinglear.txt?op=OPEN=root=namenode:8020=157284=0
 200
test_1  | INFO:apache_beam.runners.portability.fn_api_runner:Running 

Jenkins build is back to normal : beam_SQLBigQueryIO_Batch_Performance_Test_Java #3

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Jenkins build is back to normal : beam_PostCommit_Java_PortabilityApi #3518

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Build failed in Jenkins: beam_PostCommit_Python_VR_Spark #1683

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-8489] Filter: don't use callable's output type

[lostluck] [GoSDK] Handle data write errors & stream recreate

[github] [BEAM-8835] Disable Flink Uber Jar by default. (#10270)

[lostluck] [GoSDK] Cancel stream context on dataWriter error

[github] [BEAM-8651] [BEAM-8874] Change pickle_lock to be a reentrant lock, and

[lostluck] [GoSDK] Don't panic if debug symbols are striped

[lcwik] [BEAM-8523] Regenerate Go protos with respect to changes in #9959


--
[...truncated 1.32 MB...]
19/12/04 01:40:08 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:35573
19/12/04 01:40:08 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/04 01:40:08 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/04 01:40:08 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575423603.25_e9c31a43-4921-421e-b4c0-3413b1f8578c',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/04 01:40:08 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575423603.25', 'environment_config': u'{"command": 
"
 'expansion_port': u'0', 'sdk_worker_parallelism': u'1', 'job_endpoint': 
u'localhost:47731', 'job_port': u'0'}
19/12/04 01:40:08 INFO statecache.__init__: Creating state cache with size 0
19/12/04 01:40:08 INFO sdk_worker.__init__: Creating insecure control channel 
for localhost:39001.
19/12/04 01:40:08 INFO sdk_worker.__init__: Control channel established.
19/12/04 01:40:08 INFO sdk_worker.__init__: Initializing SDKHarness with 
unbounded number of workers.
19/12/04 01:40:08 INFO 
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: Beam 
Fn Control client connected with id 262-1
19/12/04 01:40:08 INFO sdk_worker.create_state_handler: Creating insecure state 
channel for localhost:38877.
19/12/04 01:40:08 INFO sdk_worker.create_state_handler: State channel 
established.
19/12/04 01:40:08 INFO data_plane.create_data_channel: Creating client data 
channel for localhost:40891
19/12/04 01:40:08 INFO 
org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client 
connected.
19/12/04 01:40:08 INFO 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory: Closing 
environment urn: "beam:env:process:v1"
payload: 
"\032\202\001

19/12/04 01:40:08 INFO sdk_worker.run: No more requests from control plane
19/12/04 01:40:08 INFO sdk_worker.run: SDK Harness waiting for in-flight 
requests to complete
19/12/04 01:40:08 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/04 01:40:08 INFO data_plane.close: Closing all cached grpc data channels.
19/12/04 01:40:08 INFO sdk_worker.close: Closing all cached gRPC state handlers.
19/12/04 01:40:08 INFO sdk_worker.run: Done consuming work.
19/12/04 01:40:08 INFO sdk_worker_main.main: Python sdk harness exiting.
19/12/04 01:40:08 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Logging client 
hanged up.
19/12/04 01:40:08 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/04 01:40:08 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
GetManifest for __no_artifacts_staged__
19/12/04 01:40:09 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Beam Fn Logging 
client connected.
19/12/04 01:40:09 INFO sdk_worker_main.main: Logging handler created.
19/12/04 01:40:09 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:46009
19/12/04 01:40:09 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/04 01:40:09 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/04 01:40:09 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575423603.25_e9c31a43-4921-421e-b4c0-3413b1f8578c',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/04 

Build failed in Jenkins: beam_PostCommit_Java_ValidatesRunner_Flink #6002

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-8489] Filter: don't use callable's output type

[lostluck] [GoSDK] Handle data write errors & stream recreate

[lostluck] [GoSDK] Cancel stream context on dataWriter error

[lostluck] [GoSDK] Don't panic if debug symbols are striped

[lcwik] [BEAM-8523] Regenerate Go protos with respect to changes in #9959


--
Started by GitHub push by lostluck
Started by GitHub push by lostluck
Started by GitHub push by lostluck
Started by GitHub push by lostluck
Started by GitHub push by lostluck
Started by GitHub push by lostluck
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-15 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision b7fad128a710213267683f3ecf3f890c7a3b82d9 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f b7fad128a710213267683f3ecf3f890c7a3b82d9
Commit message: "Merge pull request #9890 [BEAM-8489] Filter: don't use 
callable's output type"
 > git rev-list --no-walk fd8299d290b70206789805386a57af61e9e7b628 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :runners:flink:1.9:validatesRunner
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :runners:flink:1.9:copyResourcesOverrides NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :model:fn-execution:extractProto
> Task :model:job-management:extractProto
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :model:job-management:processResources
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :runners:flink:1.9:copySourceOverrides
> Task :model:fn-execution:processResources
> Task :runners:flink:1.9:copyTestResourcesOverrides NO-SOURCE
> Task :runners:flink:1.9:processResources
> Task :sdks:java:build-tools:compileJava FROM-CACHE
> Task :sdks:java:build-tools:processResources
> Task :sdks:java:build-tools:classes
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task 

Build failed in Jenkins: beam_BiqQueryIO_Streaming_Performance_Test_Java #204

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-8489] Filter: don't use callable's output type

[rohde.samuel] change definition of has_unbounded_sources in PIN to a 
pre-determined

[rohde.samuel] typo

[rohde.samuel] lint

[rohde.samuel] remove BigQueryReader from list

[rohde.samuel] lint

[lostluck] [GoSDK] Handle data write errors & stream recreate

[rohde.samuel] remove external

[rohde.samuel] remove external

[github] Merge pull request #10248: [BEAM-7274] Add type conversions factory

[chamikara] Merge pull request #10262: [BEAM-8575] Revert validates runner test 
tag

[github] [BEAM-8835] Disable Flink Uber Jar by default. (#10270)

[lostluck] [GoSDK] Cancel stream context on dataWriter error

[github] [BEAM-8651] [BEAM-8874] Change pickle_lock to be a reentrant lock, and

[lostluck] [GoSDK] Don't panic if debug symbols are striped

[lcwik] [BEAM-8523] Regenerate Go protos with respect to changes in #9959


--
[...truncated 377.38 KB...]
"message" : "Invalid table ID 
\"bqio_write_10GB_java_5e100b4a-61bf-4f99-9d4b-3fa89b0daa9d\". Table IDs must 
be alphanumeric (plus underscores) and must be at most 1024 characters long. 
Also, Table decorators cannot be used.",
"reason" : "invalid"
  } ],
  "message" : "Invalid table ID 
\"bqio_write_10GB_java_5e100b4a-61bf-4f99-9d4b-3fa89b0daa9d\". Table IDs must 
be alphanumeric (plus underscores) and must be at most 1024 characters long. 
Also, Table decorators cannot be used.",
  "status" : "INVALID_ARGUMENT"
}
at 
com.google.api.client.googleapis.json.GoogleJsonResponseException.from(GoogleJsonResponseException.java:150)
at 
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:113)
at 
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:40)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest$1.interceptResponse(AbstractGoogleClientRequest.java:417)
at com.google.api.client.http.HttpRequest.execute(HttpRequest.java:1132)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:515)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:448)
at 
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.execute(AbstractGoogleClientRequest.java:565)
at 
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$DatasetServiceImpl.tryCreateTable(BigQueryServicesImpl.java:520)
at 
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$DatasetServiceImpl.createTable(BigQueryServicesImpl.java:505)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.tryCreateTable(CreateTables.java:205)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.getTableDestination(CreateTables.java:160)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.lambda$processElement$0(CreateTables.java:113)
at java.util.HashMap.computeIfAbsent(HashMap.java:1126)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn.processElement(CreateTables.java:112)
at 
org.apache.beam.sdk.io.gcp.bigquery.CreateTables$CreateTablesFn$DoFnInvoker.invokeProcessElement(Unknown
 Source)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:218)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:183)
at 
org.apache.beam.runners.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:335)
at 
org.apache.beam.runners.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:44)
at 
org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:49)
at 
org.apache.beam.runners.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:280)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:252)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:74)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:576)
at 
org.apache.beam.runners.dataflow.worker.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:564)
 

Build failed in Jenkins: beam_PostCommit_Java11_ValidatesRunner_Direct #2625

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-8489] Filter: don't use callable's output type

[lostluck] [GoSDK] Handle data write errors & stream recreate

[lostluck] [GoSDK] Cancel stream context on dataWriter error

[lostluck] [GoSDK] Don't panic if debug symbols are striped

[lcwik] [BEAM-8523] Regenerate Go protos with respect to changes in #9959


--
[...truncated 225 B...]
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-3 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision b7fad128a710213267683f3ecf3f890c7a3b82d9 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f b7fad128a710213267683f3ecf3f890c7a3b82d9
Commit message: "Merge pull request #9890 [BEAM-8489] Filter: don't use 
callable's output type"
 > git rev-list --no-walk fd8299d290b70206789805386a57af61e9e7b628 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 -Dorg.gradle.java.home=/usr/lib/jvm/java-8-openjdk-amd64 
:runners:direct-java:shadowJar :runners:direct-java:shadowTestJar
Starting a Gradle Daemon (subsequent builds will be faster)
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :runners:local-java:processResources NO-SOURCE
> Task :runners:direct-java:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :runners:direct-java:processTestResources NO-SOURCE
> Task :model:job-management:extractProto
> Task :model:fn-execution:extractProto
> Task :model:job-management:processResources
> Task :model:fn-execution:processResources
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :sdks:java:core:generateTestAvroJava
> Task :sdks:java:core:generateTestGrammarSource NO-SOURCE
> Task :sdks:java:core:processTestResources
> Task :model:pipeline:generateProto
> Task :model:pipeline:compileJava FROM-CACHE
> Task :model:pipeline:processResources
> Task :model:pipeline:classes
> Task :model:pipeline:jar
> Task 

Jenkins build is back to normal : beam_PostCommit_Go_VR_Spark #1705

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



beam_PostCommit_Py_VR_Dataflow - Build # 5218 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Py_VR_Dataflow (build 
#5218)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/5218/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

beam_PostCommit_Python36 - Build # 1135 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Python36 (build #1135)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Python36/1135/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Build failed in Jenkins: beam_PostCommit_Python_VR_Spark #1684

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
[...truncated 1.32 MB...]
19/12/04 04:58:28 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:33121
19/12/04 04:58:28 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/04 04:58:28 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/04 04:58:28 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575435506.04_1a77ed46-84e8-4661-8b1d-2474e24a29cf',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/04 04:58:28 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575435506.04', 'environment_config': u'{"command": 
"
 'expansion_port': u'0', 'sdk_worker_parallelism': u'1', 'job_endpoint': 
u'localhost:54053', 'job_port': u'0'}
19/12/04 04:58:28 INFO statecache.__init__: Creating state cache with size 0
19/12/04 04:58:28 INFO sdk_worker.__init__: Creating insecure control channel 
for localhost:35433.
19/12/04 04:58:28 INFO 
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: Beam 
Fn Control client connected with id 262-1
19/12/04 04:58:28 INFO sdk_worker.__init__: Control channel established.
19/12/04 04:58:28 INFO sdk_worker.__init__: Initializing SDKHarness with 
unbounded number of workers.
19/12/04 04:58:28 INFO sdk_worker.create_state_handler: Creating insecure state 
channel for localhost:34081.
19/12/04 04:58:28 INFO sdk_worker.create_state_handler: State channel 
established.
19/12/04 04:58:28 INFO data_plane.create_data_channel: Creating client data 
channel for localhost:45463
19/12/04 04:58:28 INFO 
org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client 
connected.
19/12/04 04:58:28 INFO 
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory: Closing 
environment urn: "beam:env:process:v1"
payload: 
"\032\202\001

19/12/04 04:58:28 INFO sdk_worker.run: No more requests from control plane
19/12/04 04:58:28 INFO sdk_worker.run: SDK Harness waiting for in-flight 
requests to complete
19/12/04 04:58:28 INFO data_plane.close: Closing all cached grpc data channels.
19/12/04 04:58:28 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/04 04:58:28 INFO sdk_worker.close: Closing all cached gRPC state handlers.
19/12/04 04:58:28 INFO sdk_worker.run: Done consuming work.
19/12/04 04:58:28 INFO sdk_worker_main.main: Python sdk harness exiting.
19/12/04 04:58:28 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Logging client 
hanged up.
19/12/04 04:58:29 WARN org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: 
Hanged up for unknown endpoint.
19/12/04 04:58:29 INFO 
org.apache.beam.runners.fnexecution.artifact.AbstractArtifactRetrievalService: 
GetManifest for __no_artifacts_staged__
19/12/04 04:58:29 INFO 
org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Beam Fn Logging 
client connected.
19/12/04 04:58:29 INFO sdk_worker_main.main: Logging handler created.
19/12/04 04:58:29 INFO sdk_worker_main.start: Status HTTP server running at 
localhost:34309
19/12/04 04:58:29 INFO sdk_worker_main.main: semi_persistent_directory: /tmp
19/12/04 04:58:29 WARN sdk_worker_main._load_main_session: No session file 
found: /tmp/staged/pickled_main_session. Functions defined in __main__ 
(interactive session) may fail. 
19/12/04 04:58:29 WARN pipeline_options.get_all_options: Discarding unparseable 
args: 
[u'--app_name=test_windowing_1575435506.04_1a77ed46-84e8-4661-8b1d-2474e24a29cf',
 u'--job_server_timeout=60', u'--pipeline_type_check', 
u'--direct_runner_use_stacked_bundle', u'--spark_master=local', 
u'--options_id=30', u'--enable_spark_metric_sinks'] 
19/12/04 04:58:29 INFO sdk_worker_main.main: Python sdk harness started with 
pipeline_options: {'runner': u'None', 'experiments': [u'beam_fn_api'], 
'environment_cache_millis': u'0', 'artifact_port': u'0', 'environment_type': 
u'PROCESS', 'sdk_location': u'container', 'job_name': 
u'test_windowing_1575435506.04', 'environment_config': u'{"command": 
"
 

Build failed in Jenkins: beam_PreCommit_Python_pytest_Cron #119

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-8489] Filter: don't use callable's output type

[kcweaver] [BEAM-8251] plumb worker_(region|zone) to Environment proto

[rohde.samuel] change definition of has_unbounded_sources in PIN to a 
pre-determined

[rohde.samuel] typo

[rohde.samuel] lint

[rohde.samuel] remove BigQueryReader from list

[rohde.samuel] lint

[kcweaver] Add null checks for worker region/zone options

[lostluck] [GoSDK] Handle data write errors & stream recreate

[rohde.samuel] remove external

[rohde.samuel] remove external

[github] Merge pull request #10248: [BEAM-7274] Add type conversions factory

[chamikara] Merge pull request #10262: [BEAM-8575] Revert validates runner test 
tag

[github] [BEAM-8835] Disable Flink Uber Jar by default. (#10270)

[lostluck] [GoSDK] Cancel stream context on dataWriter error

[github] [BEAM-8651] [BEAM-8874] Change pickle_lock to be a reentrant lock, and

[lostluck] [GoSDK] Don't panic if debug symbols are striped

[lcwik] [BEAM-8523] Regenerate Go protos with respect to changes in #9959


--
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-7 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 >  # 
 > timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision b7fad128a710213267683f3ecf3f890c7a3b82d9 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f b7fad128a710213267683f3ecf3f890c7a3b82d9
Commit message: "Merge pull request #9890 [BEAM-8489] Filter: don't use 
callable's output type"
 > git rev-list --no-walk c1e759c10a1a5650ba7cc07ff6676637aa17dff1 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g :pythonPreCommitPytest
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.

FAILURE: Build failed with an exception.

* What went wrong:
Could not determine the dependencies of task 
':sdks:python:test-suites:tox:py35:preCommitPy35Pytest'.
> Task with path 'lint' not found in project 
> ':sdks:python:test-suites:tox:py35'.

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 15s

Publishing build scan...
https://scans.gradle.com/s/c266h52pimw7g

Build step 'Invoke Gradle script' changed build 

Build failed in Jenkins: beam_sonarqube_report #1110

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-8489] Filter: don't use callable's output type

[kcweaver] [BEAM-8251] plumb worker_(region|zone) to Environment proto

[rohde.samuel] change definition of has_unbounded_sources in PIN to a 
pre-determined

[rohde.samuel] typo

[rohde.samuel] lint

[rohde.samuel] remove BigQueryReader from list

[rohde.samuel] lint

[kcweaver] Add null checks for worker region/zone options

[lostluck] [GoSDK] Handle data write errors & stream recreate

[rohde.samuel] remove external

[rohde.samuel] remove external

[github] Merge pull request #10248: [BEAM-7274] Add type conversions factory

[chamikara] Merge pull request #10262: [BEAM-8575] Revert validates runner test 
tag

[github] [BEAM-8835] Disable Flink Uber Jar by default. (#10270)

[lostluck] [GoSDK] Cancel stream context on dataWriter error

[github] [BEAM-8651] [BEAM-8874] Change pickle_lock to be a reentrant lock, and

[lostluck] [GoSDK] Don't panic if debug symbols are striped

[lcwik] [BEAM-8523] Regenerate Go protos with respect to changes in #9959


--
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-8 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init  # 
 > timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision b7fad128a710213267683f3ecf3f890c7a3b82d9 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f b7fad128a710213267683f3ecf3f890c7a3b82d9
Commit message: "Merge pull request #9890 [BEAM-8489] Filter: don't use 
callable's output type"
 > git rev-list --no-walk c1e759c10a1a5650ba7cc07ff6676637aa17dff1 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
ERROR: SonarQube installation defined in this job (ASF Sonar Analysis) does not 
match any configured installation. Number of installations that can be 
configured: 0.
If you want to reassign jobs to a different SonarQube installation, check the 
documentation under https://redirect.sonarsource.com/plugins/jenkins.html

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



Jenkins build is back to normal : beam_PostCommit_Python2 #1132

2019-12-03 Thread Apache Jenkins Server
See 



-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org



beam_PostCommit_Python37 - Build # 1058 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Python37 (build #1058)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Python37/1058/ to view the 
results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Build failed in Jenkins: beam_SQLBigQueryIO_Batch_Performance_Test_Java #5

2019-12-03 Thread Apache Jenkins Server
See 


Changes:

[ehudm] [BEAM-8489] Filter: don't use callable's output type

[rohde.samuel] change definition of has_unbounded_sources in PIN to a 
pre-determined

[rohde.samuel] typo

[rohde.samuel] lint

[rohde.samuel] remove BigQueryReader from list

[rohde.samuel] lint

[lostluck] [GoSDK] Handle data write errors & stream recreate

[rohde.samuel] remove external

[rohde.samuel] remove external

[github] Merge pull request #10248: [BEAM-7274] Add type conversions factory

[chamikara] Merge pull request #10262: [BEAM-8575] Revert validates runner test 
tag

[github] [BEAM-8835] Disable Flink Uber Jar by default. (#10270)

[lostluck] [GoSDK] Cancel stream context on dataWriter error

[github] [BEAM-8651] [BEAM-8874] Change pickle_lock to be a reentrant lock, and

[lostluck] [GoSDK] Don't panic if debug symbols are striped

[lcwik] [BEAM-8523] Regenerate Go protos with respect to changes in #9959


--
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-4 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision b7fad128a710213267683f3ecf3f890c7a3b82d9 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f b7fad128a710213267683f3ecf3f890c7a3b82d9
Commit message: "Merge pull request #9890 [BEAM-8489] Filter: don't use 
callable's output type"
 > git rev-list --no-walk 2cc161967a0e5db6291f19296fd019cdcbb66e45 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g 
-Dorg.gradle.jvmargs=-Xmx4g --info 
-DintegrationTestPipelineOptions=["--project=apache-beam-testing","--tempLocation=gs://temp-storage-for-perf-tests/loadtests","--tempRoot=gs://temp-storage-for-perf-tests/loadtests","--metricsBigQueryDataset=beam_performance","--metricsBigQueryTable=sql_bqio_read_java_batch","--runner=DataflowRunner","--maxNumWorkers=5","--numWorkers=5","--autoscalingAlgorithm=NONE"]
 -DintegrationTestRunner=dataflow 
:sdks:java:extensions:sql:perf-tests:integrationTest --tests 
org.apache.beam.sdk.extensions.sql.meta.provider.bigquery.BigQueryIOPushDownIT
Initialized native services in: /home/jenkins/.gradle/native
Removing 0 daemon stop events from registry
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for 
details
Starting process 'Gradle build daemon'. Working directory: 
/home/jenkins/.gradle/daemon/5.2.1 Command: 
/usr/lib/jvm/java-8-openjdk-amd64/bin/java -Xmx4g -Dfile.encoding=UTF-8 
-Duser.country=US -Duser.language=en -Duser.variant -cp 
/home/jenkins/.gradle/wrapper/dists/gradle-5.2.1-all/bviwmvmbexq6idcscbicws5me/gradle-5.2.1/lib/gradle-launcher-5.2.1.jar
 org.gradle.launcher.daemon.bootstrap.GradleDaemon 5.2.1
Successfully started process 'Gradle build daemon'
An attempt to start the daemon took 0.769 secs.
The client will now receive all logging from the daemon (pid: 2593). The daemon 
log file: /home/jenkins/.gradle/daemon/5.2.1/daemon-2593.out.log
Starting build in new daemon [memory: 3.8 GB]
Closing daemon's stdin at end of input.
The daemon will no longer process any standard input.
Using 12 worker leases.
Starting Build
Using local directory build cache for build ':buildSrc' (location = 
/home/jenkins/.gradle/caches/build-cache-1, removeUnusedEntriesAfter = 7 days).

> Configure project :buildSrc
Evaluating project ':buildSrc' using build file 

beam_PostCommit_Java_PVR_Spark_Batch - Build # 1448 - Aborted

2019-12-03 Thread Apache Jenkins Server
The Apache Jenkins build system has built beam_PostCommit_Java_PVR_Spark_Batch 
(build #1448)

Status: Aborted

Check console output at 
https://builds.apache.org/job/beam_PostCommit_Java_PVR_Spark_Batch/1448/ to 
view the results.

-
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Build failed in Jenkins: beam_PostCommit_Java11_ValidatesRunner_Direct #2626

2019-12-03 Thread Apache Jenkins Server
See 


Changes:


--
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-4 (beam) in workspace 

No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init 
 > 
 >  # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # 
 > timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git 
 > +refs/heads/*:refs/remotes/origin/* 
 > +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision b7fad128a710213267683f3ecf3f890c7a3b82d9 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f b7fad128a710213267683f3ecf3f890c7a3b82d9
Commit message: "Merge pull request #9890 [BEAM-8489] Filter: don't use 
callable's output type"
 > git rev-list --no-walk b7fad128a710213267683f3ecf3f890c7a3b82d9 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ 

 -Dorg.gradle.java.home=/usr/lib/jvm/java-8-openjdk-amd64 
:runners:direct-java:shadowJar :runners:direct-java:shadowTestJar
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources 
> NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :runners:direct-java:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :runners:local-java:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :runners:core-java:processTestResources NO-SOURCE
> Task :runners:direct-java:processTestResources NO-SOURCE
> Task :model:job-management:extractProto
> Task :model:fn-execution:extractProto
> Task :model:job-management:processResources
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :model:fn-execution:processResources
> Task :sdks:java:core:processResources
> Task :sdks:java:core:generateTestAvroProtocol NO-SOURCE
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :sdks:java:core:generateTestAvroJava
> Task :sdks:java:core:generateTestGrammarSource NO-SOURCE
> Task :sdks:java:core:processTestResources
> Task :model:pipeline:generateProto
> Task :model:pipeline:compileJava FROM-CACHE
> Task :model:pipeline:processResources
> Task :model:pipeline:classes
> Task :model:pipeline:jar
> Task :model:fn-execution:extractIncludeProto
> Task :model:job-management:extractIncludeProto
> Task :model:job-management:generateProto
> Task :model:fn-execution:generateProto
> Task :model:job-management:compileJava FROM-CACHE
> Task :model:job-management:classes
> Task :model:fn-execution:compileJava FROM-CACHE
> Task :model:fn-execution:classes
> Task :model:pipeline:shadowJar
> Task 

  1   2   >