Edenhill has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/93858


Change subject: Configuration property namespace fixup
......................................................................

Configuration property namespace fixup

* Kafka properties are now prefixed with "kafka.".
* Varnish log related properties are now prefixed with "logline." not to
  interfere with the "log." syslog properties.

Change-Id: I526fa8028d9be17716da2cb537c6fc5c58847004
---
M config.c
M varnishkafka.conf.example
2 files changed, 43 insertions(+), 44 deletions(-)


  git pull 
ssh://gerrit.wikimedia.org:29418/operations/software/varnish/varnishkafka 
refs/changes/58/93858/1

diff --git a/config.c b/config.c
index e5b1f07..6b10b41 100644
--- a/config.c
+++ b/config.c
@@ -91,33 +91,29 @@
                     char *errstr, size_t errstr_size) {
        rd_kafka_conf_res_t res;
 
-       /* Try librdkafka configuration properties first.
-        * If it doesnt match, we try our own. */
 
-       if (!strncmp(name, "topic.", strlen("topic."))) {
+       /* Kafka configuration */
+       if (!strncmp(name, "kafka.", strlen("kafka."))) {
+               name += strlen("kafka.");
+
                /* Kafka topic configuration. */
-               
-               res = rd_kafka_topic_conf_set(conf.topic_conf,
-                                             name+strlen("topic."), val,
-                                             errstr, errstr_size);
-               if (res == RD_KAFKA_CONF_INVALID)
-                       return -1;
-               else if (res == RD_KAFKA_CONF_OK)
+               if (!strncmp(name, "topic.", strlen("topic.")))
+                       res = rd_kafka_topic_conf_set(conf.topic_conf,
+                                                     name+strlen("topic."),
+                                                     val,
+                                                     errstr, errstr_size);
+               else /* Kafka global configuration */
+                       res = rd_kafka_conf_set(conf.rk_conf, name,
+                                               val, errstr, errstr_size);
+
+               if (res == RD_KAFKA_CONF_OK)
                        return 0;
                else if (res != RD_KAFKA_CONF_UNKNOWN)
-                       return 0;
-       }
-
-       /* Kafka main configuration */
-       res = rd_kafka_conf_set(conf.rk_conf, name, val, errstr, errstr_size);
-       if (res == RD_KAFKA_CONF_INVALID)
-               return -1;
-       else if (res == RD_KAFKA_CONF_OK)
-               return 0;
-       else if (res != RD_KAFKA_CONF_UNKNOWN)
-               return 0;
-
+                       return -1;
                
+               /* Unknown configs: fall thru */
+               name -= strlen("kafka.");
+       }
 
        /* librdkafka handles NULL configuration values, we dont. */
        if (!val) {
@@ -127,9 +123,9 @@
        }
                         
        /* varnishkafka configuration options */
-       if (!strcmp(name, "topic"))
+       if (!strcmp(name, "kafka.topic"))
                conf.topic = strdup(val);
-       else if (!strcmp(name, "partition"))
+       else if (!strcmp(name, "kafka.partition"))
                conf.partition = atoi(val);
        else if (!strcmp(name, "format"))
                conf.format[FMT_CONF_MAIN] = strdup(val);
@@ -191,13 +187,13 @@
                                 "try \"stdout\" or \"kafka\"", val);
                        return -1;
                }
-       } else if (!strcmp(name, "log.data.copy"))
+       } else if (!strcmp(name, "logline.data.copy"))
                conf.datacopy = conf_tof(val);
-       else if (!strcmp(name, "log.hash.size"))
+       else if (!strcmp(name, "logline.hash.size"))
                conf.loglines_hsize = atoi(val);
-       else if (!strcmp(name, "log.hash.max"))
+       else if (!strcmp(name, "logline.hash.max"))
                conf.loglines_hmax = atoi(val);
-       else if (!strcmp(name, "log.line.scratch.size"))
+       else if (!strcmp(name, "logline.scratch.size"))
                conf.scratch_size = atoi(val);
        else if (!strncmp(name, "varnish.arg.", strlen("varnish.arg."))) {
                const char *t = name + strlen("varnish.arg.");
diff --git a/varnishkafka.conf.example b/varnishkafka.conf.example
index f35aac3..b79155c 100644
--- a/varnishkafka.conf.example
+++ b/varnishkafka.conf.example
@@ -119,32 +119,32 @@
 # NOTE:
 #   Must be set to true for offline files (-r file..) due to the way
 #   libvarnishapi reads its data.
-log.data.copy = true
+logline.data.copy = true
 
 
 # TUNING
 # Logline cache hash tuning
-# 'log.hash.size * log.hash.max' dictates the maximum number of cached logline
-# entries in memory.
+# 'logline.hash.size * logline.hash.max' dictates the maximum number of
+# cached logline entries in memory.
 
 # Number of hash buckets (keyed by log id).
 # Higher number yields more performance at the expense of memory.
 # Set this to avg_requests_per_second / 5.
 # Defaults to 5000
-#log.hash.size = 5000
+#logline.hash.size = 5000
 
 # Maximum number of loglines per hash bucket
 # Higher number yields less memory consumption at the expense of performance.
-# Set this to avg_requests_per_second / log.hash.size.
+# Set this to avg_requests_per_second / logline.hash.size.
 # Defaults to 5
-#log.hash.max = 5
+#logline.hash.max = 5
 
 # Size of per logline scratch buffer.
 # The scratch buffer is used as a temporary storage space while
 # collecting tags for the log line.
 # If the scratch size is too small the logline tag match will be incomplete.
 # Defaults to 4096 bytes.
-#log.line.scratch.size = 4096
+#logline.scratch.size = 4096
 
 
 # Start for sequence number (%n)
@@ -217,6 +217,9 @@
 #                                                                     #
 # Kafka configuration                                                 #
 #                                                                     #
+# Kafka configuration properties are prefixed with "kafka."           #
+# and topic properties are prefixed with "kafka.topic.".              #
+#                                                                     #
 # For the full range of Kafka handle and topic configuration          #
 # properties, see:                                                    #
 #  https://github.com/edenhill/librdkafka/blob/master/rdkafka.h       #
@@ -227,14 +230,14 @@
 #######################################################################
 
 # Initial list of kafka brokers
-metadata.broker.list = localhost:9092
+kafka.metadata.broker.list = localhost:9092
 
 # Maximum number of messages allowed on the local producer queue
 # Defaults to 1000000
-queue.buffering.max.messages = 1000000
+kafka.queue.buffering.max.messages = 1000000
 
 # Maximum number of retries per messageset.
-message.send.max.retries = 3
+kafka.message.send.max.retries = 3
 
 
 #
@@ -242,17 +245,17 @@
 #
 
 # Topic to produce messages to
-topic = varnish
+kafka.topic = varnish
 
 # Partition (-1: random, else one of the available partitions)
-partition = -1
+kafka.partition = -1
 
 
 # Required number of acks
-topic.request.required.acks = 1
+kafka.topic.request.required.acks = 1
 
 # Local message timeout (milliseconds)
-topic.message.timeout.ms = 60000
+kafka.topic.message.timeout.ms = 60000
 
 
 #
@@ -260,10 +263,10 @@
 #
 
 # partition on user name
-#partitioner = crc32(%u)
+#kafka.partitioner = crc32(%u)
 
 # partition on client IP
-#partitioner = iphash(%{X-Forwarded-For}i)
+#kafka.partitioner = iphash(%{X-Forwarded-For}i)
 
 
 

-- 
To view, visit https://gerrit.wikimedia.org/r/93858
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I526fa8028d9be17716da2cb537c6fc5c58847004
Gerrit-PatchSet: 1
Gerrit-Project: operations/software/varnish/varnishkafka
Gerrit-Branch: master
Gerrit-Owner: Edenhill <[email protected]>

_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to