This is an automated email from the ASF dual-hosted git repository.

otto pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/metron-bro-plugin-kafka.git


The following commit(s) were added to refs/heads/master by this push:
     new 853285c  METRON-2353 Plugin e2e tests should support multiple brokers 
(JonZeolla via ottobackwards) closes apache/metron-bro-plugin-kafka#45
853285c is described below

commit 853285cca8bcac6927dc983151fb03a1a2062cc9
Author: JonZeolla <zeo...@gmail.com>
AuthorDate: Mon May 18 18:28:17 2020 -0400

    METRON-2353 Plugin e2e tests should support multiple brokers (JonZeolla via 
ottobackwards) closes apache/metron-bro-plugin-kafka#45
---
 .gitignore                                         | 136 ++++++++++++++++++++-
 README.md                                          |  16 +--
 docker/README.md                                   |  29 +++--
 docker/containers/kafka/Dockerfile                 |  11 +-
 docker/containers/zeek/Dockerfile                  |  37 +++---
 docker/containers/zeek/requirements.txt            |   6 +-
 docker/containers/zookeeper/Dockerfile             |   7 +-
 docker/docker-compose.yml                          |  30 +++--
 docker/finish_end_to_end.sh                        |   1 +
 docker/in_docker_scripts/build_plugin.sh           |   2 +-
 docker/in_docker_scripts/configure_plugin.sh       |   2 +-
 docker/in_docker_scripts/process_data_file.sh      |   8 +-
 docker/remove_timeout_message.sh                   |  51 --------
 docker/run_end_to_end.sh                           | 110 ++++++++---------
 docker/scripts/docker_execute_build_plugin.sh      |   5 +-
 docker/scripts/docker_execute_configure_plugin.sh  |   3 -
 .../docker_execute_configure_zeek_plugin.sh        |  94 --------------
 .../docker_execute_create_topic_in_kafka.sh        |  24 ++--
 docker/scripts/docker_execute_process_data_file.sh |   5 +-
 docker/scripts/docker_run_consume_kafka.sh         |  24 ++--
 docker/scripts/docker_run_get_offset_kafka.sh      |   8 +-
 docker/scripts/split_kafka_output_by_log.sh        |  21 ++--
 src/KafkaWriter.cc                                 |   2 +-
 src/KafkaWriter.h                                  |   1 -
 .../output                                         |   2 +-
 .../kafka/resolved-topic-override-and-config.zeek  |   4 +-
 26 files changed, 323 insertions(+), 316 deletions(-)

diff --git a/.gitignore b/.gitignore
index e08b80e..2fd1f45 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,12 +1,22 @@
-#ide stuff
+# ide stuff
 .idea
 *.iml
 *.iws
-.DS_Store
 /cmake-build-*
 .state
 build
 
+# Log files
+*.log
+
+
+# Created by https://www.gitignore.io/api/vim,c++,emacs,git,macos
+# Edit at https://www.gitignore.io/?templates=vim,c++,emacs,git,macos
+
+### C++ ###
+# Prerequisites
+*.d
+
 # Compiled Object files
 *.slo
 *.lo
@@ -24,6 +34,7 @@ build
 
 # Fortran module files
 *.mod
+*.smod
 
 # Compiled Static libraries
 *.lai
@@ -36,5 +47,122 @@ build
 *.out
 *.app
 
-# Log files
-*.log
+### Emacs ###
+# -*- mode: gitignore; -*-
+*~
+\#*\#
+/.emacs.desktop
+/.emacs.desktop.lock
+*.elc
+auto-save-list
+tramp
+.\#*
+
+# Org-mode
+.org-id-locations
+*_archive
+
+# flymake-mode
+*_flymake.*
+
+# eshell files
+/eshell/history
+/eshell/lastdir
+
+# elpa packages
+/elpa/
+
+# reftex files
+*.rel
+
+# AUCTeX auto folder
+/auto/
+
+# cask packages
+.cask/
+dist/
+
+# Flycheck
+flycheck_*.el
+
+# server auth directory
+/server/
+
+# projectiles files
+.projectile
+
+# directory configuration
+.dir-locals.el
+
+# network security
+/network-security.data
+
+
+### Git ###
+# Created by git for backups. To disable backups in Git:
+# $ git config --global mergetool.keepBackup false
+*.orig
+
+# Created by git when using merge tools for conflicts
+*.BACKUP.*
+*.BASE.*
+*.LOCAL.*
+*.REMOTE.*
+*_BACKUP_*.txt
+*_BASE_*.txt
+*_LOCAL_*.txt
+*_REMOTE_*.txt
+
+### macOS ###
+# General
+.DS_Store
+.AppleDouble
+.LSOverride
+
+# Icon must end with two \r
+Icon
+
+# Thumbnails
+._*
+
+# Files that might appear in the root of a volume
+.DocumentRevisions-V100
+.fseventsd
+.Spotlight-V100
+.TemporaryItems
+.Trashes
+.VolumeIcon.icns
+.com.apple.timemachine.donotpresent
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
+
+### Vim ###
+# Swap
+[._]*.s[a-v][a-z]
+[._]*.sw[a-p]
+[._]s[a-rt-v][a-z]
+[._]ss[a-gi-z]
+[._]sw[a-p]
+
+# Session
+Session.vim
+Sessionx.vim
+
+# Temporary
+.netrwhist
+
+# Auto-generated tag files
+tags
+
+# Persistent undo
+[._]*.un~
+
+# Coc configuration directory
+.vim
+
+# End of https://www.gitignore.io/api/vim,c++,emacs,git,macos
diff --git a/README.md b/README.md
index 054ca04..b4aa98d 100644
--- a/README.md
+++ b/README.md
@@ -16,13 +16,13 @@ This software is a part of the [Apache 
Metron](https://metron.apache.org/) proje
 
 `zkg` is the preferred mechanism for installing this plugin, as it will 
dynamically retrieve, build, test, and load the plugin.  Note, that you will 
still need to [activate](#activation) and configure the plugin after your 
installation.
 
-1. Install [librdkafka](https://github.com/edenhill/librdkafka), a native 
client library for Kafka.  This plugin has been tested against librdkafka 
v1.4.2-RC3.
+1. Install [librdkafka](https://github.com/edenhill/librdkafka), a native 
client library for Kafka.  This plugin has been tested against librdkafka 
v1.4.2.
 
     In order to use this plugin within a kerberized Kafka environment, you 
will also need `libsasl2` installed and will need to pass `--enable-sasl` to 
the `configure` script.
 
     ```
-    $ curl -L https://github.com/edenhill/librdkafka/archive/v1.4.2-RC3.tar.gz 
| tar xvz
-    $ cd librdkafka-1.4.2-RC3/
+    $ curl -L https://github.com/edenhill/librdkafka/archive/v1.4.2.tar.gz | 
tar xvz
+    $ cd librdkafka-1.4.2/
     $ ./configure --enable-sasl
     $ make
     $ sudo make install
@@ -40,7 +40,7 @@ This software is a part of the [Apache 
Metron](https://metron.apache.org/) proje
     Verify the following REQUIRED external dependencies:
     (Ensure their installation on all relevant systems before proceeding):
       from zeek/apache/metron-bro-plugin-kafka (master):
-        librdkafka ~1.4.2-RC3
+        librdkafka ~1.4.2
 
     Proceed? [Y/n]
     zeek/apache/metron-bro-plugin-kafka asks for LIBRDKAFKA_ROOT (Path to 
librdkafka installation tree) ? [/usr/local/lib]
@@ -67,13 +67,13 @@ Manually installing the plugin should *only* occur in 
situations where installin
 
 These instructions could also be helpful if you were interested in 
distributing this as a package (such as a deb or rpm).
 
-1. Install [librdkafka](https://github.com/edenhill/librdkafka), a native 
client library for Kafka.  This plugin has been tested against librdkafka 
v1.4.2-RC3.
+1. Install [librdkafka](https://github.com/edenhill/librdkafka), a native 
client library for Kafka.  This plugin has been tested against librdkafka 
v1.4.2.
 
     In order to use this plugin within a kerberized Kafka environment, you 
will also need `libsasl2` installed and will need to pass `--enable-sasl` to 
the `configure` script.
 
     ```
-    $ curl -L https://github.com/edenhill/librdkafka/archive/v1.4.2-RC3.tar.gz 
| tar xvz
-    $ cd librdkafka-1.4.2-RC3/
+    $ curl -L https://github.com/edenhill/librdkafka/archive/v1.4.2.tar.gz | 
tar xvz
+    $ cd librdkafka-1.4.2/
     $ ./configure --enable-sasl
     $ make
     $ sudo make install
@@ -306,7 +306,7 @@ redef Kafka::topic_name = "zeek";
 The global configuration settings for Kafka.  These values are passed through
 directly to librdkafka.  Any valid librdkafka settings can be defined in this
 table.  The full set of valid librdkafka settings are available
-[here](https://github.com/edenhill/librdkafka/blob/v1.4.2-RC3/CONFIGURATION.md).
+[here](https://github.com/edenhill/librdkafka/blob/v1.4.2/CONFIGURATION.md).
 
 ```
 redef Kafka::kafka_conf = table(
diff --git a/docker/README.md b/docker/README.md
index 4155d6b..3c9b89c 100644
--- a/docker/README.md
+++ b/docker/README.md
@@ -46,13 +46,13 @@ testing scripts to be added to a pull request, and 
subsequently to a test suite.
 #### Scripts that execute _in_ the docker container
 
 ```bash
-├── build_zeek_plugin.sh
-├── configure_zeek_plugin.sh
+├── build_plugin.sh
+├── configure_plugin.sh
 ├── process_data_file.sh
 ```
 
-- `build_zeek_plugin.sh`: Runs `zeek-pkg` to build and install the provided 
version of the plugin.
-- `configure_zeek_plugin.sh`: Configures the plugin for the kafka container, 
and routes all traffic types.
+- `build_plugin.sh`: Runs `zkg` to build and install the provided version of 
the plugin.
+- `configure_plugin.sh`: Configures the plugin for the kafka container, and 
routes all traffic types.
   ###### Parameters
   ```bash
   --kafka-topic                  [OPTIONAL] The kafka topic to configure. 
Default: zeek"
@@ -64,8 +64,8 @@ testing scripts to be added to a pull request, and 
subsequently to a test suite.
 
 ```bash
 ├── analyze_results.sh
-├── docker_execute_build_zeek_plugin.sh
-├── docker_execute_configure_zeek_plugin.sh
+├── docker_execute_build_plugin.sh
+├── docker_execute_configure_plugin.sh
 ├── docker_execute_create_topic_in_kafka.sh
 ├── docker_execute_process_data_file.sh
 ├── docker_execute_shell.sh
@@ -81,12 +81,12 @@ testing scripts to be added to a pull request, and 
subsequently to a test suite.
   ```bash
   --test-directory               [REQUIRED] The directory for the tests
   ```
-- `docker_execute_build_zeek_plugin.sh`: Executes `build_zeek_plugin.sh` in 
the zeek container
+- `docker_execute_build_plugin.sh`: Executes `build_plugin.sh` in the zeek 
container
   ###### Parameters
   ```bash
    --container-name              [OPTIONAL] The Docker container name. 
Default: metron-bro-plugin-kafka_zeek_1
   ```
-- `docker_execute_configure_zeek_plugin.sh`: Executes 
`configure_zeek_plugin.sh` in the zeek container
+- `docker_execute_configure_plugin.sh`: Executes `configure_plugin.sh` in the 
zeek container
   ###### Parameters
   ```bash
   --container-name               [OPTIONAL] The Docker container name. 
Default: metron-bro-plugin-kafka_zeek_1
@@ -94,8 +94,9 @@ testing scripts to be added to a pull request, and 
subsequently to a test suite.
 - `docker_execute_create_topic_in_kafka.sh`: Creates the specified kafka topic 
in the kafka container
   ###### Parameters
   ```bash
-  --container-name               [OPTIONAL] The Docker container name. 
Default: metron-bro-plugin-kafka_kafka_1
+  --container-name               [OPTIONAL] The Docker container name. 
Default: metron-bro-plugin-kafka_kafka-1_1
   --kafka-topic                  [OPTIONAL] The kafka topic to create. 
Default: zeek
+  --partitions                   [OPTIONAL] The number of kafka partitions to 
create. Default: 2
   ```
 - `docker_execute_process_data_file.sh`: Executes `process_data_file.sh` in 
the zeek container
   ###### Parameters
@@ -107,11 +108,12 @@ testing scripts to be added to a pull request, and 
subsequently to a test suite.
   ```bash
   --container-name               [OPTIONAL] The Docker container name. 
Default: metron-bro-plugin-kafka_zeek_1
   ```
-- `docker_run_consume_kafka.sh`: Runs an instance of the kafka container, with 
the console consumer `kafka-console-consumer.sh --topic $KAFKA_TOPIC --offset 
$OFFSET --partition 0 --bootstrap-server kafka:9092`
+- `docker_run_consume_kafka.sh`: Runs an instance of the kafka container, with 
the console consumer `kafka-console-consumer.sh --topic $KAFKA_TOPIC --offset 
$OFFSET --partition $PARTITION --bootstrap-server kafka-1:9092`
   ###### Parameters
   ```bash
   --network-name                 [OPTIONAL] The Docker network name. Default: 
metron-bro-plugin-kafka_default
-  --offset                       [OPTIONAL] The kafka offset. Default: 0
+  --offset                       [OPTIONAL] The kafka offset to read from. 
Default: 0
+  --partition                    [OPTIONAL] The kafka partition to read from. 
Default: 0
   --kafka-topic                  [OPTIONAL] The kafka topic to consume from. 
Default: zeek
   ```
 - `docker_run_get_offset_kafka.sh`: Runs an instance of the kafka container 
and gets the current offset for the specified topic
@@ -151,7 +153,7 @@ testing scripts to be added to a pull request, and 
subsequently to a test suite.
 This script does the following:
 
 1. Runs docker compose
-1. Creates the specified topic
+1. Creates the specified topic with the specified number of partitions
 1. Downloads sample PCAP data
 1. Runs the zeek container in the background
 1. Builds the zeek plugin
@@ -218,8 +220,9 @@ Other scripts may then be used to do your testing, for 
example running:
 --no-pcaps                      [OPTIONAL] Do not run pcaps.
 --data-path                     [OPTIONAL] The pcap data path. Default: ./data
 --kafka-topic                   [OPTIONAL] The kafka topic name to use. 
Default: zeek
+--partitions                    [OPTIONAL] The number of kafka partitions to 
create. Default: 2
 --plugin-version                [OPTIONAL] The plugin version. Default: the 
current branch name
 ```
 
-> NOTE: The provided `--plugin-version` is passed to the [`zeek-pkg 
install`](https://docs.zeek.org/projects/package-manager/en/stable/zeek-pkg.html#install-command)
 command within the container, which allows you to specify a version tag, 
branch name, or commit hash.  However, that tag, branch, or commit *must* be 
available in the currently checked out plugin repository.
+> NOTE: The provided `--plugin-version` is passed to the [`zkg 
install`](https://docs.zeek.org/projects/package-manager/en/stable/zeek-pkg.html#install-command)
 command within the container, which allows you to specify a version tag, 
branch name, or commit hash.  However, that tag, branch, or commit *must* be 
available in the currently checked out plugin repository.
 
diff --git a/docker/containers/kafka/Dockerfile 
b/docker/containers/kafka/Dockerfile
index 3755d35..4c0af8f 100644
--- a/docker/containers/kafka/Dockerfile
+++ b/docker/containers/kafka/Dockerfile
@@ -14,6 +14,11 @@
 #  See the License for the specific language governing permissions and
 #  limitations under the License.
 #
-FROM ches/kafka:0.10.2.1
-HEALTHCHECK --interval=5s --timeout=10s --start-period=2s --retries=2 \
-  CMD JMX_PORT= /kafka/bin/kafka-configs.sh --describe --zookeeper 
zookeeper:2181 --entity-type brokers || exit 1
+ARG FROM_IMAGE="wurstmeister/kafka"
+ARG FROM_IMAGE_TAG="2.12-2.5.0"
+
+FROM "${FROM_IMAGE}":"${FROM_IMAGE_TAG}"
+
+HEALTHCHECK --interval=5s --timeout=10s --start-period=2s --retries=3 \
+  CMD JMX_PORT= /opt/kafka/bin/kafka-configs.sh --describe --zookeeper 
zookeeper:2181 --entity-type brokers || exit 1
+
diff --git a/docker/containers/zeek/Dockerfile 
b/docker/containers/zeek/Dockerfile
index 51014e4..dba31d7 100644
--- a/docker/containers/zeek/Dockerfile
+++ b/docker/containers/zeek/Dockerfile
@@ -14,17 +14,24 @@
 #  See the License for the specific language governing permissions and
 #  limitations under the License.
 #
-FROM centos:8
+ARG FROM_IMAGE="centos"
+ARG FROM_IMAGE_TAG="8"
+
+FROM "${FROM_IMAGE}":"${FROM_IMAGE_TAG}"
+
 ARG ZEEK_VERSION
 ARG LIBRDKAFKA_VERSION
 
-# install powertools for libpcap-devel
-RUN dnf install -y 'dnf-command(config-manager)' && \
-    yum config-manager --set-enabled PowerTools
-
 # install epel for screen
 RUN dnf install -y epel-release
 
+# copy in the .screenrc
+COPY .screenrc /root
+
+# install powertools for libpcap-devel
+RUN dnf install -y 'dnf-command(config-manager)' && \
+    dnf config-manager --set-enabled PowerTools
+
 # install prereqs then clean dnf cache
 RUN dnf -y update && \
    dnf -y install cmake make gcc gcc-c++ \
@@ -35,18 +42,15 @@ RUN dnf -y update && \
    git jq screen tree vim && \
    dnf -y clean all
 
-# copy in the .screenrc
-COPY .screenrc /root
-
 # install zeek
 WORKDIR /root
 RUN git clone https://github.com/zeek/zeek
 WORKDIR zeek/
-RUN git checkout "v${ZEEK_VERSION}"
-RUN git submodule update --init --recursive
-RUN ./configure
-RUN make
-RUN make install
+RUN git checkout "v${ZEEK_VERSION}" && \
+  git submodule update --init --recursive && \
+  ./configure && \
+  make && \
+  make install
 ENV PATH="${PATH}:/usr/local/zeek/bin"
 ENV PATH="${PATH}:/usr/bin"
 
@@ -63,6 +67,7 @@ RUN dnf -y install python3-pip && \
 WORKDIR /root
 RUN curl -L 
"https://github.com/edenhill/librdkafka/archive/v${LIBRDKAFKA_VERSION}.tar.gz"; 
| tar xvz
 WORKDIR "librdkafka-${LIBRDKAFKA_VERSION}/"
-RUN ./configure --enable-sasl
-RUN make
-RUN make install
+RUN ./configure --enable-sasl && \
+  make && \
+  make install
+
diff --git a/docker/containers/zeek/requirements.txt 
b/docker/containers/zeek/requirements.txt
index 21d0054..0dc05d7 100644
--- a/docker/containers/zeek/requirements.txt
+++ b/docker/containers/zeek/requirements.txt
@@ -1,7 +1,7 @@
 btest==0.61
 configparser==5.0.0
-gitdb==4.0.4
-GitPython==3.1.1
+gitdb==4.0.5
+GitPython==3.1.2
 semantic-version==2.8.5
-smmap==3.0.2
+smmap==3.0.4
 zkg==2.1.2
diff --git a/docker/containers/zookeeper/Dockerfile 
b/docker/containers/zookeeper/Dockerfile
index 71af679..4fcec72 100644
--- a/docker/containers/zookeeper/Dockerfile
+++ b/docker/containers/zookeeper/Dockerfile
@@ -14,6 +14,11 @@
 #  See the License for the specific language governing permissions and
 #  limitations under the License.
 #
-FROM zookeeper:3.4
+ARG FROM_IMAGE="zookeeper"
+ARG FROM_IMAGE_TAG="3.4"
+
+FROM "${FROM_IMAGE}":"${FROM_IMAGE_TAG}"
+
 HEALTHCHECK --interval=2s --timeout=1s --start-period=.5s --retries=4 \
   CMD echo ruok | nc localhost 2181 && echo stat | nc localhost 2181 | grep 
Mode || exit 1
+
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
index 8e54926..0579887 100644
--- a/docker/docker-compose.yml
+++ b/docker/docker-compose.yml
@@ -3,29 +3,41 @@ services:
   zookeeper:
     build: containers/zookeeper
     image: metron-bro-plugin-kafka_zookeeper:latest
-    ports:
-      - "2181:2181"
-  kafka:
+  kafka-1:
     build: containers/kafka
     image: metron-bro-plugin-kafka_kafka:latest
     depends_on:
       zookeeper:
         condition: service_healthy
     environment:
-      - ZOOKEEPER_IP=zookeeper
-    ports:
-      - "9092:9092"
+      - KAFKA_BROKER_ID=1
+      - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
+      - KAFKA_LISTENERS=PLAINTEXT://kafka-1:9092
+      - KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://kafka-1:9092
+  kafka-2:
+    build: containers/kafka
+    image: metron-bro-plugin-kafka_kafka:latest
+    depends_on:
+      zookeeper:
+        condition: service_healthy
+    environment:
+      - KAFKA_BROKER_ID=2
+      - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
+      - KAFKA_LISTENERS=PLAINTEXT://kafka-2:9092
+      - KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://kafka-2:9092
   zeek:
     build:
       context: containers/zeek
       args:
-        ZEEK_VERSION: "3.1.2"
-        LIBRDKAFKA_VERSION: "1.4.2-RC3"
+        ZEEK_VERSION: "3.1.3"
+        LIBRDKAFKA_VERSION: "1.4.2"
     image: metron-bro-plugin-kafka_zeek:latest
     depends_on:
       zookeeper:
         condition: service_healthy
-      kafka:
+      kafka-1:
+        condition: service_healthy
+      kafka-2:
         condition: service_healthy
     volumes:
       - "${DATA_PATH}:/root/data"
diff --git a/docker/finish_end_to_end.sh b/docker/finish_end_to_end.sh
index bb753e4..5b4f45d 100755
--- a/docker/finish_end_to_end.sh
+++ b/docker/finish_end_to_end.sh
@@ -32,3 +32,4 @@ PROJECT_NAME="metron-bro-plugin-kafka"
 
 # Stop docker compose
 COMPOSE_PROJECT_NAME="${PROJECT_NAME}" docker-compose down
+
diff --git a/docker/in_docker_scripts/build_plugin.sh 
b/docker/in_docker_scripts/build_plugin.sh
index b6cf9f7..691b20c 100755
--- a/docker/in_docker_scripts/build_plugin.sh
+++ b/docker/in_docker_scripts/build_plugin.sh
@@ -97,7 +97,7 @@ cd /root || exit 1
 
 echo "==================================================="
 
-zkg -vvv test code
+zkg -vvv test code --version "${PLUGIN_VERSION}"
 rc=$?; if [[ ${rc} != 0 ]]; then
   echo "ERROR running zkg test ${rc}"
   printfiles
diff --git a/docker/in_docker_scripts/configure_plugin.sh 
b/docker/in_docker_scripts/configure_plugin.sh
index 1c79f14..c4479db 100755
--- a/docker/in_docker_scripts/configure_plugin.sh
+++ b/docker/in_docker_scripts/configure_plugin.sh
@@ -73,7 +73,7 @@ echo "Configuring kafka plugin"
   echo "redef Kafka::logs_to_send = set(HTTP::LOG, DNS::LOG, Conn::LOG, 
DPD::LOG, FTP::LOG, Files::LOG, Known::CERTS_LOG, SMTP::LOG, SSL::LOG, 
Weird::LOG, Notice::LOG, DHCP::LOG, SSH::LOG, Software::LOG, RADIUS::LOG, 
X509::LOG, RFB::LOG, Stats::LOG, CaptureLoss::LOG, SIP::LOG);"
   echo "redef Kafka::topic_name = \"${KAFKA_TOPIC}\";"
   echo "redef Kafka::tag_json = T;"
-  echo "redef Kafka::kafka_conf = table([\"metadata.broker.list\"] = 
\"kafka:9092\");"
+  echo "redef Kafka::kafka_conf = table([\"metadata.broker.list\"] = 
\"kafka-1:9092,kafka-2:9092\");"
   echo "redef Kafka::logs_to_exclude = set(Conn::LOG, DHCP::LOG);"
   echo "redef Known::cert_tracking = ALL_HOSTS;"
   echo "redef Software::asset_tracking = ALL_HOSTS;"
diff --git a/docker/in_docker_scripts/process_data_file.sh 
b/docker/in_docker_scripts/process_data_file.sh
index 584f0a4..13fb4e6 100755
--- a/docker/in_docker_scripts/process_data_file.sh
+++ b/docker/in_docker_scripts/process_data_file.sh
@@ -67,15 +67,13 @@ done
 echo "PCAP_FILE_NAME = ${PCAP_FILE_NAME}"
 echo "OUTPUT_DIRECTORY_NAME = ${OUTPUT_DIRECTORY_NAME}"
 
-cd /root || exit 1
 echo "================================"
 if [ ! -d /root/data ]; then
-  echo "DATA_PATH has not been set and mapped"
+  echo "DATA_PATH is not available"
   exit 1
 fi
+
 cd /root/test_output/"${OUTPUT_DIRECTORY_NAME}" || exit 1
 find /root/data -type f -name "${PCAP_FILE_NAME}" -print0 | xargs -0 zeek 
/usr/local/zeek/share/zeek/site/local.zeek -C -r
-rc=$?; if [[ ${rc} != 0 ]]; then
-  exit ${rc}
-fi
 echo "done with ${PCAP_FILE_NAME}"
+
diff --git a/docker/remove_timeout_message.sh b/docker/remove_timeout_message.sh
deleted file mode 100755
index dc34f2c..0000000
--- a/docker/remove_timeout_message.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#! /usr/bin/env bash
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-#
-# Remove the exception text from piped input when we have purposefully timed
-# out reading kafka
-#
-
-shopt -s nocasematch
-set -u # nounset
-set -e # errexit
-set -E # errtrap
-set -o pipefail
-
-LAST_CMD=
-SKIP_EXCEPTION_TEXT=false
-
-while read -r CMD; do
-    if [[ ${CMD} =~ ('ERROR Error processing message') ]]; then
-        LAST_CMD=${CMD}
-    elif [[ ${CMD} =~ ('kafka.consumer.ConsumerTimeoutException') ]]; then
-        SKIP_EXCEPTION_TEXT=true
-    elif [[ "$SKIP_EXCEPTION_TEXT" = true ]]; then
-        if [[ ! ${CMD} =~ (^at) ]]; then
-            echo "${CMD}"
-        fi
-    else
-        if [[ -n "$LAST_CMD" ]]; then
-            LAST_CMD=
-        fi
-        if [[ ! ${CMD} =~ (^--) ]]; then
-            echo "${CMD}"
-        fi
-    fi
-done
-
diff --git a/docker/run_end_to_end.sh b/docker/run_end_to_end.sh
index cb0e24c..9c669a1 100755
--- a/docker/run_end_to_end.sh
+++ b/docker/run_end_to_end.sh
@@ -28,6 +28,7 @@ function help {
   echo "    --skip-docker-build             [OPTIONAL] Skip build of zeek 
docker machine."
   echo "    --data-path                     [OPTIONAL] The pcap data path. 
Default: ./data"
   echo "    --kafka-topic                   [OPTIONAL] The kafka topic to 
consume from. Default: zeek"
+  echo "    --partitions                    [OPTIONAL] The number of kafka 
partitions to create. Default: 2"
   echo "    --plugin-version                [OPTIONAL] The plugin version. 
Default: the current branch name"
   echo "    --no-pcap                       [OPTIONAL] Do not run pcaps."
   echo "    -h/--help                       Usage information."
@@ -54,6 +55,7 @@ DATE=$(date)
 LOG_DATE=${DATE// /_}
 TEST_OUTPUT_PATH="${ROOT_DIR}/test_output/"${LOG_DATE//:/_}
 KAFKA_TOPIC="zeek"
+PARTITIONS=2
 PROJECT_NAME="metron-bro-plugin-kafka"
 OUR_SCRIPTS_PATH="${PLUGIN_ROOT_DIR}/docker/in_docker_scripts"
 
@@ -85,7 +87,8 @@ fi
 # set errexit for the rest of the run
 set -e
 
-PLUGIN_VERSION=$(git rev-parse --symbolic-full-name --abbrev-ref HEAD)
+# use the local hash as refs will use remotes by default
+PLUGIN_VERSION=$(git rev-parse --verify HEAD)
 
 # Handle command line options
 for i in "$@"; do
@@ -108,7 +111,6 @@ for i in "$@"; do
       NO_PCAP=true
       shift # past argument
     ;;
-
   #
   # DATA_PATH
   #
@@ -116,7 +118,6 @@ for i in "$@"; do
       DATA_PATH="${i#*=}"
       shift # past argument=value
     ;;
-
   #
   # KAFKA_TOPIC
   #
@@ -126,7 +127,15 @@ for i in "$@"; do
       KAFKA_TOPIC="${i#*=}"
       shift # past argument=value
     ;;
-
+  #
+  # PARTITIONS
+  #
+  #   --partitions
+  #
+    --partitions=*)
+      PARTITIONS="${i#*=}"
+      shift # past argument=value
+    ;;
   #
   # PLUGIN_VERSION
   #
@@ -136,7 +145,6 @@ for i in "$@"; do
       PLUGIN_VERSION="${i#*=}"
       shift # past argument=value
     ;;
-
   #
   # -h/--help
   #
@@ -148,12 +156,17 @@ for i in "$@"; do
   esac
 done
 
-cd "${ROOT_DIR}" || { echo "NO ROOT" ; exit 1; }
-echo "Running docker compose with "
-echo "SKIP_REBUILD_ZEEK = ${SKIP_REBUILD_ZEEK}"
-echo "DATA_PATH         = ${DATA_PATH}"
-echo "KAFKA_TOPIC       = ${KAFKA_TOPIC}"
-echo "PLUGIN_VERSION    = ${PLUGIN_VERSION}"
+cd "${ROOT_DIR}" || { echo "ROOT_DIR unavailable" ; exit 1; }
+echo "Running the end to end tests with"
+echo "COMPOSE_PROJECT_NAME = ${PROJECT_NAME}"
+echo "SKIP_REBUILD_ZEEK    = ${SKIP_REBUILD_ZEEK}"
+echo "KAFKA_TOPIC          = ${KAFKA_TOPIC}"
+echo "PARTITIONS           = ${PARTITIONS}"
+echo "PLUGIN_VERSION       = ${PLUGIN_VERSION}"
+echo "DATA_PATH            = ${DATA_PATH}"
+echo "TEST_OUTPUT_PATH     = ${TEST_OUTPUT_PATH}"
+echo "PLUGIN_ROOT_DIR      = ${PLUGIN_ROOT_DIR}"
+echo "OUR_SCRIPTS_PATH     = ${OUR_SCRIPTS_PATH}"
 echo "==================================================="
 
 # Run docker compose, rebuilding as specified
@@ -164,9 +177,6 @@ if [[ "$SKIP_REBUILD_ZEEK" = false ]]; then
     PLUGIN_ROOT_DIR=${PLUGIN_ROOT_DIR} \
     OUR_SCRIPTS_PATH=${OUR_SCRIPTS_PATH} \
     docker-compose up -d --build
-  rc=$?; if [[ ${rc} != 0 ]]; then
-    exit ${rc}
-  fi
 else
   COMPOSE_PROJECT_NAME="${PROJECT_NAME}" \
     DATA_PATH=${DATA_PATH} \
@@ -174,43 +184,25 @@ else
     PLUGIN_ROOT_DIR=${PLUGIN_ROOT_DIR} \
     OUR_SCRIPTS_PATH=${OUR_SCRIPTS_PATH} \
     docker-compose up -d
-  rc=$?; if [[ ${rc} != 0 ]]; then
-    exit ${rc}
-  fi
 fi
 
 # Create the kafka topic
-bash "${SCRIPT_DIR}"/docker_execute_create_topic_in_kafka.sh 
--kafka-topic="${KAFKA_TOPIC}"
-rc=$?; if [[ ${rc} != 0 ]]; then
-  exit ${rc}
-fi
+"${SCRIPT_DIR}"/docker_execute_create_topic_in_kafka.sh 
--kafka-topic="${KAFKA_TOPIC}" --partitions="${PARTITIONS}"
 
 # Download the pcaps
-bash "${SCRIPT_DIR}"/download_sample_pcaps.sh --data-path="${DATA_PATH}"
-# By not catching $? here we are accepting that a failed pcap download will not
-# exit the script
+"${SCRIPT_DIR}"/download_sample_pcaps.sh --data-path="${DATA_PATH}"
 
 # Build the zeek plugin
-bash "${SCRIPT_DIR}"/docker_execute_build_plugin.sh 
--plugin-version="${PLUGIN_VERSION}"
-rc=$?; if [[ ${rc} != 0 ]]; then
-  echo "ERROR> FAILED TO BUILD PLUGIN.  CHECK LOGS  ${rc}"
-  exit ${rc}
-fi
+"${SCRIPT_DIR}"/docker_execute_build_plugin.sh 
--plugin-version="${PLUGIN_VERSION}"
 
 # Configure the plugin
-bash "${SCRIPT_DIR}"/docker_execute_configure_plugin.sh 
--kafka-topic="${KAFKA_TOPIC}"
-rc=$?; if [[ ${rc} != 0 ]]; then
-  echo "ERROR> FAILED TO CONFIGURE PLUGIN.  CHECK LOGS  ${rc}"
-  exit ${rc}
-fi
+"${SCRIPT_DIR}"/docker_execute_configure_plugin.sh 
--kafka-topic="${KAFKA_TOPIC}"
 
-if [[ "$NO_PCAP" = false ]]; then
+if [[ "$NO_PCAP" == false ]]; then
   # for each pcap in the data directory, we want to
   # run zeek then read the output from kafka
   # and output both of them to the same directory named
   # for the date/pcap
-
-
   for file in "${DATA_PATH}"/**/*.pcap*
   do
     # get the file name
@@ -220,43 +212,37 @@ if [[ "$NO_PCAP" = false ]]; then
     mkdir "${TEST_OUTPUT_PATH}/${DOCKER_DIRECTORY_NAME}" || exit 1
     echo "MADE ${TEST_OUTPUT_PATH}/${DOCKER_DIRECTORY_NAME}"
 
-    # get the current offset in kafka
-    # this is where we are going to _start_
-    OFFSET=$(bash "${SCRIPT_DIR}"/docker_run_get_offset_kafka.sh 
--kafka-topic="${KAFKA_TOPIC}" | sed "s/^${KAFKA_TOPIC}:0:\(.*\)$/\1/")
-    echo "OFFSET------------------> ${OFFSET}"
+    # get the offsets in kafka for the provided topic
+    # this is where we are going to _start_, and must happen
+    # before processing the pcap
+    OFFSETS=$("${SCRIPT_DIR}"/docker_run_get_offset_kafka.sh 
--kafka-topic="${KAFKA_TOPIC}")
 
-    bash "${SCRIPT_DIR}"/docker_execute_process_data_file.sh 
--pcap-file-name="${BASE_FILE_NAME}" 
--output-directory-name="${DOCKER_DIRECTORY_NAME}"
-    rc=$?; if [[ ${rc} != 0 ]]; then
-      echo "ERROR> FAILED TO PROCESS ${file} DATA.  CHECK LOGS, please run the 
finish_end_to_end.sh when you are done."
-      exit ${rc}
-    fi
+    "${SCRIPT_DIR}"/docker_execute_process_data_file.sh 
--pcap-file-name="${BASE_FILE_NAME}" 
--output-directory-name="${DOCKER_DIRECTORY_NAME}"
 
-    
KAFKA_OUTPUT_FILE="${TEST_OUTPUT_PATH}/${DOCKER_DIRECTORY_NAME}/kafka-output.log"
-    bash "${SCRIPT_DIR}"/docker_run_consume_kafka.sh --offset="${OFFSET}" 
--kafka-topic="${KAFKA_TOPIC}" | "${ROOT_DIR}"/remove_timeout_message.sh | tee 
"${KAFKA_OUTPUT_FILE}"
+    # loop through each partition
+    while IFS= read -r line; do
+      # shellcheck disable=SC2001
+      OFFSET=$(echo "${line}" | sed "s/^${KAFKA_TOPIC}:.*:\(.*\)$/\1/")
+      # shellcheck disable=SC2001
+      PARTITION=$(echo "${line}" | sed "s/^${KAFKA_TOPIC}:\(.*\):.*$/\1/")
 
-    rc=$?; if [[ ${rc} != 0 ]]; then
-      echo "ERROR> FAILED TO PROCESS ${DATA_PATH} DATA.  CHECK LOGS"
-    fi
+      echo "PARTITION---------------> ${PARTITION}"
+      echo "OFFSET------------------> ${OFFSET}"
+
+      
KAFKA_OUTPUT_FILE="${TEST_OUTPUT_PATH}/${DOCKER_DIRECTORY_NAME}/kafka-output.log"
+      "${SCRIPT_DIR}"/docker_run_consume_kafka.sh --offset="${OFFSET}" 
--partition="${PARTITION}" --kafka-topic="${KAFKA_TOPIC}" 
1>>"${KAFKA_OUTPUT_FILE}" 2>/dev/null
+    done <<< "${OFFSETS}"
 
     "${SCRIPT_DIR}"/split_kafka_output_by_log.sh 
--log-directory="${TEST_OUTPUT_PATH}/${DOCKER_DIRECTORY_NAME}"
-    rc=$?; if [[ ${rc} != 0 ]]; then
-      echo "ERROR> ISSUE ENCOUNTERED WHEN SPLITTING KAFKA OUTPUT LOGS"
-    fi
   done
 
   "${SCRIPT_DIR}"/print_results.sh --test-directory="${TEST_OUTPUT_PATH}"
-  rc=$?; if [[ ${rc} != 0 ]]; then
-    echo "ERROR> ISSUE ENCOUNTERED WHEN PRINTING RESULTS"
-    exit ${rc}
-  fi
 
   "${SCRIPT_DIR}"/analyze_results.sh --test-directory="${TEST_OUTPUT_PATH}"
-  rc=$?; if [[ ${rc} != 0 ]]; then
-    echo "ERROR> ISSUE ENCOUNTERED WHEN ANALYZING RESULTS"
-    exit ${rc}
-  fi
 fi
+
 echo ""
 echo "Run complete"
 echo "The kafka and zeek output can be found at ${TEST_OUTPUT_PATH}"
 echo "You may now work with the containers if you will.  You need to call 
finish_end_to_end.sh when you are done"
+
diff --git a/docker/scripts/docker_execute_build_plugin.sh 
b/docker/scripts/docker_execute_build_plugin.sh
index d1f8d7f..7d26e40 100755
--- a/docker/scripts/docker_execute_build_plugin.sh
+++ b/docker/scripts/docker_execute_build_plugin.sh
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash -x
+#!/usr/bin/env bash
 
 #
 #  Licensed to the Apache Software Foundation (ASF) under one or more
@@ -93,9 +93,6 @@ echo "CONTAINER_NAME = $CONTAINER_NAME"
 echo "==================================================="
 
 docker exec -w /root "${CONTAINER_NAME}" bash -c 
"/root/built_in_scripts/build_plugin.sh --plugin-version=${PLUGIN_VERSION}"
-rc=$?; if [[ ${rc} != 0 ]]; then
-  exit ${rc};
-fi
 
 echo "Built the plugin"
 
diff --git a/docker/scripts/docker_execute_configure_plugin.sh 
b/docker/scripts/docker_execute_configure_plugin.sh
index 0e853fe..768eb23 100755
--- a/docker/scripts/docker_execute_configure_plugin.sh
+++ b/docker/scripts/docker_execute_configure_plugin.sh
@@ -86,9 +86,6 @@ echo "KAFKA_TOPIC = ${KAFKA_TOPIC}"
 echo "==================================================="
 
 docker exec -w /root "${CONTAINER_NAME}" bash -c 
"/root/built_in_scripts/configure_plugin.sh --kafka-topic=\"${KAFKA_TOPIC}\""
-rc=$?; if [[ ${rc} != 0 ]]; then
-  exit ${rc};
-fi
 
 echo "configured the kafka plugin"
 
diff --git a/docker/scripts/docker_execute_configure_zeek_plugin.sh 
b/docker/scripts/docker_execute_configure_zeek_plugin.sh
deleted file mode 100755
index 0e853fe..0000000
--- a/docker/scripts/docker_execute_configure_zeek_plugin.sh
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env bash
-
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-shopt -s nocasematch
-set -u # nounset
-set -e # errexit
-set -E # errtrap
-set -o pipefail
-
-#
-# Executes the configure_plugin.sh in the docker container
-#
-
-function help {
-  echo " "
-  echo "usage: ${0}"
-  echo "    --container-name                [OPTIONAL] The Docker container 
name. Default: metron-bro-plugin-kafka_zeek_1"
-  echo "    --kafka-topic                   [OPTIONAL] The kafka topic to 
create. Default: zeek"
-  echo "    -h/--help                       Usage information."
-  echo " "
-  echo " "
-}
-
-CONTAINER_NAME=metron-bro-plugin-kafka_zeek_1
-KAFKA_TOPIC=zeek
-
-# Handle command line options
-for i in "$@"; do
-  case $i in
-  #
-  # CONTAINER_NAME
-  #
-  #   --container-name
-  #
-    --container-name=*)
-      CONTAINER_NAME="${i#*=}"
-      shift # past argument=value
-    ;;
-  #
-  # KAFKA_TOPIC
-  #
-  #   --kafka-topic
-  #
-    --kafka-topic=*)
-      KAFKA_TOPIC="${i#*=}"
-      shift # past argument=value
-    ;;
-  #
-  # -h/--help
-  #
-    -h | --help)
-      help
-      exit 0
-      shift # past argument with no value
-    ;;
-  #
-  # Unknown option
-  #
-    *)
-      UNKNOWN_OPTION="${i#*=}"
-      echo "Error: unknown option: $UNKNOWN_OPTION"
-      help
-    ;;
-  esac
-done
-
-echo "Running docker_execute_configure_plugin.sh with "
-echo "CONTAINER_NAME = ${CONTAINER_NAME}"
-echo "KAFKA_TOPIC = ${KAFKA_TOPIC}"
-echo "==================================================="
-
-docker exec -w /root "${CONTAINER_NAME}" bash -c 
"/root/built_in_scripts/configure_plugin.sh --kafka-topic=\"${KAFKA_TOPIC}\""
-rc=$?; if [[ ${rc} != 0 ]]; then
-  exit ${rc};
-fi
-
-echo "configured the kafka plugin"
-
diff --git a/docker/scripts/docker_execute_create_topic_in_kafka.sh 
b/docker/scripts/docker_execute_create_topic_in_kafka.sh
index 470e160..6e5b8cc 100755
--- a/docker/scripts/docker_execute_create_topic_in_kafka.sh
+++ b/docker/scripts/docker_execute_create_topic_in_kafka.sh
@@ -26,14 +26,16 @@ set -o pipefail
 function help {
   echo " "
   echo "usage: ${0}"
-  echo "    --container-name                [OPTIONAL] The Docker container 
name. Default: metron-bro-plugin-kafka_kafka_1"
+  echo "    --container-name                [OPTIONAL] The Docker container 
name. Default: metron-bro-plugin-kafka_kafka-1_1"
   echo "    --kafka-topic                   [OPTIONAL] The kafka topic to 
create. Default: zeek"
+  echo "    --partitions                    [OPTIONAL] The number of kafka 
partitions to create. Default: 2"
   echo "    -h/--help                       Usage information."
   echo " "
 }
 
-CONTAINER_NAME="metron-bro-plugin-kafka_kafka_1"
+CONTAINER_NAME="metron-bro-plugin-kafka_kafka-1_1"
 KAFKA_TOPIC=zeek
+PARTITIONS=2
 
 # handle command line options
 for i in "$@"; do
@@ -57,6 +59,15 @@ for i in "$@"; do
       shift # past argument=value
     ;;
   #
+  # PARTITIONS
+  #
+  #   --partitions
+  #
+    --partitions=*)
+      PARTITIONS="${i#*=}"
+      shift # past argument=value
+    ;;
+  #
   # -h/--help
   #
     -h | --help)
@@ -79,10 +90,9 @@ done
 echo "Running docker_execute_create_topic_in_kafka.sh with "
 echo "CONTAINER_NAME = ${CONTAINER_NAME}"
 echo "KAFKA_TOPIC = ${KAFKA_TOPIC}"
+echo "PARTITIONS = ${PARTITIONS}"
 echo "==================================================="
 
-docker exec -w /kafka/bin/ "${CONTAINER_NAME}" \
-  bash -c "JMX_PORT= ./kafka-topics.sh --create --topic ${KAFKA_TOPIC} 
--replication-factor 1 --partitions 1 --zookeeper zookeeper:2181"
-rc=$?; if [[ ${rc} != 0 ]]; then
-  exit ${rc}
-fi
+docker exec -w /opt/kafka/bin/ "${CONTAINER_NAME}" \
+  bash -c "JMX_PORT= ./kafka-topics.sh --create --topic ${KAFKA_TOPIC} 
--replication-factor 1 --partitions ${PARTITIONS} --zookeeper zookeeper:2181"
+
diff --git a/docker/scripts/docker_execute_process_data_file.sh 
b/docker/scripts/docker_execute_process_data_file.sh
index 37d25c9..1b5480f 100755
--- a/docker/scripts/docker_execute_process_data_file.sh
+++ b/docker/scripts/docker_execute_process_data_file.sh
@@ -106,8 +106,5 @@ echo " "
 
 docker exec -w /root "${CONTAINER_NAME}" bash -c 
"built_in_scripts/process_data_file.sh --pcap-file-name=${PCAP_FILE_NAME} 
--output-directory-name=${OUTPUT_DIRECTORY_NAME}"
 
-rc=$?; if [[ ${rc} != 0 ]]; then
-  exit ${rc};
-fi
-
 echo "done processing ${PCAP_FILE_NAME}"
+
diff --git a/docker/scripts/docker_run_consume_kafka.sh 
b/docker/scripts/docker_run_consume_kafka.sh
index e8ac98e..a5728b9 100755
--- a/docker/scripts/docker_run_consume_kafka.sh
+++ b/docker/scripts/docker_run_consume_kafka.sh
@@ -25,21 +25,24 @@ set -o pipefail
 
 #
 # Runs a kafka container with the console consumer for the appropriate topic.
-# The consumer should quit when it has read all of the messages available.
+# The consumer should quit when it has read all of the messages available on
+# the given partition.
 #
 
 function help {
   echo " "
   echo "usage: ${0}"
   echo "    --network-name                  [OPTIONAL] The Docker network 
name. Default: metron-bro-plugin-kafka_default"
-  echo "    --offset                        [OPTIONAL] The kafka offset to 
read from. Default: -1"
+  echo "    --offset                        [OPTIONAL] The kafka offset to 
read from. Default: 0"
+  echo "    --partition                     [OPTIONAL] The kafka partition to 
read from. Default: 0"
   echo "    --kafka-topic                   [OPTIONAL] The kafka topic to 
consume from. Default: zeek"
   echo "    -h/--help                       Usage information."
   echo " "
 }
 
 NETWORK_NAME=metron-bro-plugin-kafka_default
-OFFSET=-1
+OFFSET=0
+PARTITION=0
 KAFKA_TOPIC=zeek
 
 # handle command line options
@@ -64,6 +67,15 @@ for i in "$@"; do
       shift # past argument=value
     ;;
   #
+  # PARTITION
+  #
+  #   --partition
+  #
+    --partition=*)
+      PARTITION="${i#*=}"
+      shift # past argument=value
+    ;;
+  #
   # KAFKA_TOPIC
   #
   #   --kafka-topic
@@ -80,7 +92,6 @@ for i in "$@"; do
       exit 0
       shift # past argument with no value
     ;;
-
   #
   # Unknown option
   #
@@ -93,8 +104,5 @@ for i in "$@"; do
 done
 
 docker run --rm --network "${NETWORK_NAME}" metron-bro-plugin-kafka_kafka \
-  kafka-console-consumer.sh --topic "${KAFKA_TOPIC}" --offset "${OFFSET}" 
--partition 0 --bootstrap-server kafka:9092 --timeout-ms 1000
-rc=$?; if [[ ${rc} != 0 ]]; then
-  exit ${rc}
-fi
+  kafka-console-consumer.sh --topic "${KAFKA_TOPIC}" --offset "${OFFSET}" 
--partition "${PARTITION}" --bootstrap-server kafka-1:9092 --timeout-ms 5000
 
diff --git a/docker/scripts/docker_run_get_offset_kafka.sh 
b/docker/scripts/docker_run_get_offset_kafka.sh
index 6f53926..86ea68f 100755
--- a/docker/scripts/docker_run_get_offset_kafka.sh
+++ b/docker/scripts/docker_run_get_offset_kafka.sh
@@ -24,8 +24,7 @@ set -E # errtrap
 set -o pipefail
 
 #
-# Runs a kafka container with the console consumer for the provided topic.  The
-# consumer should quit when it has read all of the messages available.
+# Runs a kafka container to retrieve the offset for the provided topic
 #
 
 function help {
@@ -82,8 +81,5 @@ for i in "$@"; do
 done
 
 docker run --rm --network "${NETWORK_NAME}" metron-bro-plugin-kafka_kafka \
-  kafka-run-class.sh kafka.tools.GetOffsetShell --topic "${KAFKA_TOPIC}" 
--broker-list kafka:9092
-rc=$?; if [[ ${rc} != 0 ]]; then
-  exit ${rc}
-fi
+  kafka-run-class.sh kafka.tools.GetOffsetShell --topic "${KAFKA_TOPIC}" 
--broker-list "kafka-1:9092,kafka-2:9092"
 
diff --git a/docker/scripts/split_kafka_output_by_log.sh 
b/docker/scripts/split_kafka_output_by_log.sh
index 04139a9..61ad23d 100755
--- a/docker/scripts/split_kafka_output_by_log.sh
+++ b/docker/scripts/split_kafka_output_by_log.sh
@@ -79,7 +79,7 @@ if [[ -z "$LOG_DIRECTORY" ]]; then
 fi
 
 echo "Running ${SCRIPT_NAME} with"
-echo "$LOG_DIRECTORY = $LOG_DIRECTORY"
+echo "LOG_DIRECTORY = $LOG_DIRECTORY"
 echo "==================================================="
 
 # Move over to the docker area
@@ -93,15 +93,20 @@ echo "LOG,ZEEK_COUNT,KAFKA_COUNT" >> "${RESULTS_FILE}"
 for log in "${LOG_DIRECTORY}"/*.log
 do
   BASE_LOG_FILE_NAME=$(basename "$log" .log)
-  if [[ ! "$BASE_LOG_FILE_NAME" == "kafka-output.log" ]]; then
-    if [[ $(grep {\""${BASE_LOG_FILE_NAME}"\": 
"${LOG_DIRECTORY}"/kafka-output.log) ]]; then
-      grep {\""${BASE_LOG_FILE_NAME}"\": "${LOG_DIRECTORY}"/kafka-output.log > 
"${LOG_DIRECTORY}"/"${BASE_LOG_FILE_NAME}".kafka.log
 
-      KAKFA_COUNT=$(cat "${LOG_DIRECTORY}/${BASE_LOG_FILE_NAME}.kafka.log" | 
wc -l)
-      ZEEK_COUNT=$(grep -v "^#" "${log}" | wc -l)
+  # skip kafka-output.log
+  if [[ "$BASE_LOG_FILE_NAME" == "kafka-output" ]]; then
+    continue
+  fi
+
+  # search the kafka output for each log and count them
+  if grep -q \{\""${BASE_LOG_FILE_NAME}"\": 
"${LOG_DIRECTORY}"/kafka-output.log ; then
+    grep \{\""${BASE_LOG_FILE_NAME}"\": "${LOG_DIRECTORY}"/kafka-output.log > 
"${LOG_DIRECTORY}"/"${BASE_LOG_FILE_NAME}".kafka.log
+
+    KAKFA_COUNT=$(cat "${LOG_DIRECTORY}/${BASE_LOG_FILE_NAME}.kafka.log" | wc 
-l)
+    ZEEK_COUNT=$(grep -v "^#" "${log}" | wc -l)
 
-      echo "${BASE_LOG_FILE_NAME},${ZEEK_COUNT},${KAKFA_COUNT}" >> 
"${RESULTS_FILE}"
-    fi
+    echo "${BASE_LOG_FILE_NAME},${ZEEK_COUNT},${KAKFA_COUNT}" >> 
"${RESULTS_FILE}"
   fi
 done
 
diff --git a/src/KafkaWriter.cc b/src/KafkaWriter.cc
index 62e4ac3..7f26092 100644
--- a/src/KafkaWriter.cc
+++ b/src/KafkaWriter.cc
@@ -290,7 +290,7 @@ bool KafkaWriter::DoSetBuf(bool enabled)
 bool KafkaWriter::DoFlush(double network_time)
 {
     if (!mocking) {
-        producer->poll(0);
+        producer->flush(0);
     }
     return true;
 }
diff --git a/src/KafkaWriter.h b/src/KafkaWriter.h
index e9193f8..5ebf4ef 100644
--- a/src/KafkaWriter.h
+++ b/src/KafkaWriter.h
@@ -20,7 +20,6 @@
 
 #include <librdkafka/rdkafkacpp.h>
 #include <string>
-#include <Type.h>
 #include <Desc.h>
 #include <logging/WriterBackend.h>
 #include <threading/formatters/JSON.h>
diff --git a/tests/Baseline/kafka.resolved-topic-override-and-config/output 
b/tests/Baseline/kafka.resolved-topic-override-and-config/output
index 54bee9c..1a30216 100644
--- a/tests/Baseline/kafka.resolved-topic-override-and-config/output
+++ b/tests/Baseline/kafka.resolved-topic-override-and-config/output
@@ -1,2 +1,2 @@
-Kafka topic set to const-variable-topic
 Kafka topic set to configuration-table-topic
+Kafka topic set to const-variable-topic
diff --git a/tests/kafka/resolved-topic-override-and-config.zeek 
b/tests/kafka/resolved-topic-override-and-config.zeek
index 37d1258..0d12e12 100644
--- a/tests/kafka/resolved-topic-override-and-config.zeek
+++ b/tests/kafka/resolved-topic-override-and-config.zeek
@@ -15,12 +15,11 @@
 #  limitations under the License.
 #
 
-# @TEST-EXEC: zeek -r ../../../tests/pcaps/exercise-traffic.pcap 
../../../scripts/Apache/Kafka/ %INPUT > output
+# @TEST-EXEC: zeek -r ../../../tests/pcaps/exercise-traffic.pcap 
../../../scripts/Apache/Kafka/ %INPUT | sort > output
 # @TEST-EXEC: btest-diff output
 
 module Kafka;
 
-
 redef Kafka::logs_to_send = set(Conn::LOG);
 redef Kafka::topic_name = "const-variable-topic";
 redef Kafka::mock = T;
@@ -35,3 +34,4 @@ event zeek_init() &priority=-10
     ];
     Log::add_filter(Conn::LOG, xxx_filter);
 }
+

Reply via email to