This is an automated email from the ASF dual-hosted git repository.
zhaijia pushed a commit to branch branch-4.6
in repository https://gitbox.apache.org/repos/asf/bookkeeper.git
The following commit(s) were added to refs/heads/branch-4.6 by this push:
new 60197b8 ISSUE #734: bookkeeper-benchmark is broken after protobuf
files are moved to bookkeeper-proto
60197b8 is described below
commit 60197b82f7ab474405f453fcb74ccffe8a522feb
Author: Sijie Guo <[email protected]>
AuthorDate: Thu Nov 16 21:06:26 2017 +0800
ISSUE #734: bookkeeper-benchmark is broken after protobuf files are moved
to bookkeeper-proto
Descriptions of the changes in this PR:
Problem:
The issue was introduced after protobuf files are moved to
bookkeeper-proto. so the protobuf is not shaded correctly.
So in bookkeeper-benchmark, it is conflicted with the protobuf version
introduced by hadoop.
Solution:
- update the bookkeeper-server pom.xml to shade protobuf correctly
- remove hadoop dependency from bookkeeper-benchmark.
Author: Sijie Guo <[email protected]>
Reviewers: Ivan Kelly <[email protected]>, Enrico Olivelli
<[email protected]>, Jia Zhai <None>
This closes #735 from sijie/protobuf_issue, closes #734
(cherry picked from commit 9b538aff7da5cba15ae59b70e0b0c9bd2464dca3)
Signed-off-by: Jia Zhai <[email protected]>
---
bookkeeper-benchmark/pom.xml | 49 --------
.../apache/bookkeeper/benchmark/MySqlClient.java | 138 ---------------------
.../apache/bookkeeper/benchmark/TestClient.java | 56 ---------
bookkeeper-server/pom.xml | 9 +-
pom.xml | 7 ++
5 files changed, 9 insertions(+), 250 deletions(-)
diff --git a/bookkeeper-benchmark/pom.xml b/bookkeeper-benchmark/pom.xml
index 35cf238..b9cb781 100644
--- a/bookkeeper-benchmark/pom.xml
+++ b/bookkeeper-benchmark/pom.xml
@@ -84,24 +84,6 @@
</build>
<dependencies>
<dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- <version>${slf4j.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.zookeeper</groupId>
- <artifactId>zookeeper</artifactId>
- <version>${zookeeper.version}</version>
- <type>jar</type>
- <scope>compile</scope>
- <exclusions>
- <exclusion>
- <groupId>net.java.dev.javacc</groupId>
- <artifactId>javacc</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<version>${zookeeper.version}</version>
@@ -127,17 +109,6 @@
</exclusions>
</dependency>
<dependency>
- <groupId>io.netty</groupId>
- <artifactId>netty-all</artifactId>
- <version>${netty.version}</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- <version>${guava.version}</version>
- </dependency>
- <dependency>
<groupId>org.apache.bookkeeper</groupId>
<artifactId>bookkeeper-server</artifactId>
<version>${project.parent.version}</version>
@@ -151,25 +122,5 @@
<scope>test</scope>
<type>test-jar</type>
</dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <version>0.23.1</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
- <version>0.23.1</version>
- <scope>compile</scope>
- <!-- commons-daemon has a malformed pom, which can cause the build to
fail in an
- environment which uses an artifactory cache. -->
- <exclusions>
- <exclusion>
- <groupId>commons-daemon</groupId>
- <artifactId>commons-daemon</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
</dependencies>
</project>
diff --git
a/bookkeeper-benchmark/src/main/java/org/apache/bookkeeper/benchmark/MySqlClient.java
b/bookkeeper-benchmark/src/main/java/org/apache/bookkeeper/benchmark/MySqlClient.java
deleted file mode 100644
index f3903f3..0000000
---
a/bookkeeper-benchmark/src/main/java/org/apache/bookkeeper/benchmark/MySqlClient.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bookkeeper.benchmark;
-
-import static com.google.common.base.Charsets.UTF_8;
-
-import java.io.FileOutputStream;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.HashMap;
-import org.apache.bookkeeper.client.BookKeeper;
-import org.apache.bookkeeper.client.LedgerHandle;
-import org.apache.zookeeper.KeeperException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * A mysql client performing writes and reads that used for benchmark
comparison with BookKeeper.
- */
-public class MySqlClient {
- static final Logger LOG = LoggerFactory.getLogger(MySqlClient.class);
-
- BookKeeper x;
- LedgerHandle lh;
- Integer entryId;
- HashMap<Integer, Integer> map;
-
- FileOutputStream fStream;
- FileOutputStream fStreamLocal;
- long start, lastId;
- Connection con;
- Statement stmt;
-
-
- public MySqlClient(String hostport, String user, String pass)
- throws ClassNotFoundException {
- entryId = 0;
- map = new HashMap<Integer, Integer>();
- Class.forName("com.mysql.jdbc.Driver");
- // database is named "bookkeeper"
- String url = "jdbc:mysql://" + hostport + "/bookkeeper";
- try {
- con = DriverManager.getConnection(url, user, pass);
- stmt = con.createStatement();
- // drop table and recreate it
- stmt.execute("DROP TABLE IF EXISTS data;");
- stmt.execute("create table data(transaction_id bigint PRIMARY KEY
AUTO_INCREMENT, content TEXT);");
- LOG.info("Database initialization terminated");
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
-
- public void closeHandle() throws KeeperException, InterruptedException,
SQLException {
- con.close();
- }
-
- /**
- * First parameter is an integer defining the length of the message.
- * Second parameter is the number of writes.
- * Third parameter is host:port.
- * Fourth parameter is username.
- * Fifth parameter is password.
- * @param args
- * @throws ClassNotFoundException
- * @throws SQLException
- */
- public static void main(String[] args) throws ClassNotFoundException,
SQLException {
- int lenght = Integer.parseInt(args[1]);
- StringBuilder sb = new StringBuilder();
- while (lenght-- > 0) {
- sb.append('a');
- }
- try {
- MySqlClient c = new MySqlClient(args[2], args[3], args[4]);
- c.writeSameEntryBatch(sb.toString().getBytes(UTF_8),
Integer.parseInt(args[0]));
- c.writeSameEntry(sb.toString().getBytes(UTF_8),
Integer.parseInt(args[0]));
- c.closeHandle();
- } catch (NumberFormatException e) {
- e.printStackTrace();
- } catch (InterruptedException e) {
- e.printStackTrace();
- } catch (KeeperException e) {
- e.printStackTrace();
- }
-
- }
-
- /**
- * Adds data entry to the DB.
- * @param data the entry to be written, given as a byte array
- * @param times the number of times the entry should be written on the DB
- */
- void writeSameEntryBatch(byte[] data, int times) throws
InterruptedException, SQLException {
- start = System.currentTimeMillis();
- int count = times;
- String content = new String(data, UTF_8);
- System.out.println("Data: " + content + ", " + data.length);
- while (count-- > 0) {
- stmt.addBatch("insert into data(content) values(\"" + content +
"\");");
- }
- LOG.info("Finished writing batch SQL command in ms: " +
(System.currentTimeMillis() - start));
- start = System.currentTimeMillis();
- stmt.executeBatch();
- System.out.println("Finished " + times + " writes in ms: " +
(System.currentTimeMillis() - start));
- LOG.info("Ended computation");
- }
-
- void writeSameEntry(byte[] data, int times) throws InterruptedException,
SQLException {
- start = System.currentTimeMillis();
- int count = times;
- String content = new String(data, UTF_8);
- System.out.println("Data: " + content + ", " + data.length);
- while (count-- > 0) {
- stmt.executeUpdate("insert into data(content) values(\"" + content
+ "\");");
- }
- System.out.println("Finished " + times + " writes in ms: " +
(System.currentTimeMillis() - start));
- LOG.info("Ended computation");
- }
-
-}
diff --git
a/bookkeeper-benchmark/src/main/java/org/apache/bookkeeper/benchmark/TestClient.java
b/bookkeeper-benchmark/src/main/java/org/apache/bookkeeper/benchmark/TestClient.java
index 6c1ee05..49f98a7 100644
---
a/bookkeeper-benchmark/src/main/java/org/apache/bookkeeper/benchmark/TestClient.java
+++
b/bookkeeper-benchmark/src/main/java/org/apache/bookkeeper/benchmark/TestClient.java
@@ -48,10 +48,6 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -140,19 +136,6 @@ public class TestClient {
for (int i = 0; i < numFiles; i++) {
clients.add(new BKClient(handles, data, runfor,
cmd.hasOption("sync")));
}
- } else if (target.equals("hdfs")) {
- FileSystem fs = FileSystem.get(new Configuration());
- LOG.info("Default replication for HDFS: {}",
fs.getDefaultReplication());
-
- List<FSDataOutputStream> streams = new
ArrayList<FSDataOutputStream>();
- for (int i = 0; i < numFiles; i++) {
- String path = cmd.getOptionValue("path", "/foobar");
- streams.add(fs.create(new Path(path + runid + "_" + i)));
- }
-
- for (int i = 0; i < numThreads; i++) {
- clients.add(new HDFSClient(streams, data, runfor));
- }
} else if (target.equals("fs")) {
List<FileOutputStream> streams = new
ArrayList<FileOutputStream>();
for (int i = 0; i < numFiles; i++) {
@@ -211,45 +194,6 @@ public class TestClient {
timeouter.cancel();
}
- static class HDFSClient implements Callable<Long> {
- final List<FSDataOutputStream> streams;
- final byte[] data;
- final long time;
- final Random r;
-
- HDFSClient(List<FSDataOutputStream> streams, byte[] data, long time) {
- this.streams = streams;
- this.data = data;
- this.time = time;
- this.r = new Random(System.identityHashCode(this));
- }
-
- public Long call() {
- try {
- long count = 0;
- long start = System.currentTimeMillis();
- long stopat = start + time;
- while (System.currentTimeMillis() < stopat) {
- FSDataOutputStream stream =
streams.get(r.nextInt(streams.size()));
- synchronized (stream) {
- stream.write(data);
- stream.flush();
- stream.hflush();
- }
- count++;
- }
-
- long time = (System.currentTimeMillis() - start);
- LOG.info("Worker finished processing writes (ms): {} TPT: {}
op/s", time,
- count / ((double) time / 1000));
- return count;
- } catch (IOException ioe) {
- LOG.error("Exception in worker thread", ioe);
- return 0L;
- }
- }
- }
-
static class FileClient implements Callable<Long> {
final List<FileOutputStream> streams;
final byte[] data;
diff --git a/bookkeeper-server/pom.xml b/bookkeeper-server/pom.xml
index 2adc3ee..6243844 100644
--- a/bookkeeper-server/pom.xml
+++ b/bookkeeper-server/pom.xml
@@ -180,13 +180,6 @@
</dependency>
</dependencies>
<build>
- <extensions>
- <extension>
- <groupId>kr.motd.maven</groupId>
- <artifactId>os-maven-plugin</artifactId>
- <version>1.4.1.Final</version>
- </extension>
- </extensions>
<plugins>
<plugin>
<!-- for mini-kdc -->
@@ -215,8 +208,10 @@
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>shaded</shadedClassifierName>
<minimizeJar>true</minimizeJar>
+ <shadeTestJar>true</shadeTestJar>
<artifactSet>
<includes>
+ <include>org.apache.bookkeeper:bookkeeper-proto</include>
<include>com.google.protobuf:protobuf-java</include>
<include>com.google.guava:guava</include>
</includes>
diff --git a/pom.xml b/pom.xml
index 2e1cc95..1614cff 100644
--- a/pom.xml
+++ b/pom.xml
@@ -187,6 +187,13 @@
</dependencies>
<build>
+ <extensions>
+ <extension>
+ <groupId>kr.motd.maven</groupId>
+ <artifactId>os-maven-plugin</artifactId>
+ <version>1.4.1.Final</version>
+ </extension>
+ </extensions>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
--
To stop receiving notification emails like this one, please contact
['"[email protected]" <[email protected]>'].