Repository: trafficserver
Updated Branches:
  refs/heads/master 61a49ad0f -> 851a52bc2


TS-3171: minor style updates to Tokenizer

Make some minor style tweaks to Tokenizer so that it is a little
more conventional and easier to use. Rewrite the regression test
so it tests something.


Project: http://git-wip-us.apache.org/repos/asf/trafficserver/repo
Commit: http://git-wip-us.apache.org/repos/asf/trafficserver/commit/851a52bc
Tree: http://git-wip-us.apache.org/repos/asf/trafficserver/tree/851a52bc
Diff: http://git-wip-us.apache.org/repos/asf/trafficserver/diff/851a52bc

Branch: refs/heads/master
Commit: 851a52bc2787bb758b75af5799c47379f61d5049
Parents: 61a49ad
Author: James Peach <[email protected]>
Authored: Fri Oct 31 11:58:37 2014 -0700
Committer: James Peach <[email protected]>
Committed: Wed Nov 5 15:01:16 2014 -0800

----------------------------------------------------------------------
 CHANGES                     |  2 ++
 lib/ts/Tokenizer.cc         | 56 ++++++++++++++++++++++++++++++----------
 lib/ts/Tokenizer.h          | 41 +++++++++++++++--------------
 mgmt/api/CfgContextUtils.cc | 18 ++++++-------
 mgmt/web2/WebHttpMessage.cc |  2 +-
 proxy/InkAPITest.cc         | 10 -------
 6 files changed, 77 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/trafficserver/blob/851a52bc/CHANGES
----------------------------------------------------------------------
diff --git a/CHANGES b/CHANGES
index 9bb9488..9bbe980 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,6 +1,8 @@
                                                          -*- coding: utf-8 -*-
 Changes with Apache Traffic Server 5.2.0
 
+  *) [TS-3171] Minor style updates to Tokenizer interface.
+
   *) [TS-3156] Remove MutexLock bool operators.
    Author: Powell Molleti <[email protected]>
 

http://git-wip-us.apache.org/repos/asf/trafficserver/blob/851a52bc/lib/ts/Tokenizer.cc
----------------------------------------------------------------------
diff --git a/lib/ts/Tokenizer.cc b/lib/ts/Tokenizer.cc
index cb9c6d7..b4c039d 100644
--- a/lib/ts/Tokenizer.cc
+++ b/lib/ts/Tokenizer.cc
@@ -50,7 +50,7 @@ Tokenizer::Tokenizer(const char *StrOfDelimiters)
   memset(&start_node, 0, sizeof(tok_node));
 
   numValidTokens = 0;
-  maxTokens = -1;
+  maxTokens = UINT_MAX;
   options = 0;
 
   add_node = &start_node;
@@ -85,7 +85,7 @@ Tokenizer::~Tokenizer()
   }
 }
 
-int
+unsigned
 Tokenizer::Initialize(const char *str)
 {
   return Initialize((char *) str, COPY_TOKS);
@@ -113,13 +113,13 @@ Tokenizer::isDelimiter(char c)
   return 0;
 }
 
-int
-Tokenizer::Initialize(char *str, int opt)
+unsigned
+Tokenizer::Initialize(char *str, unsigned opt)
 {
   char *strStart;
   int priorCharWasDelimit = 1;
   char *tokStart = NULL;
-  int tok_count = 0;
+  unsigned tok_count = 0;
   bool max_limit_hit = false;
 
   // We can't depend on ReUse() being called so just do it
@@ -138,7 +138,6 @@ Tokenizer::Initialize(char *str, int opt)
   // Make sure that both options are not set
   ink_assert(!((opt & COPY_TOKS) && (opt & SHARE_TOKS)));
 
-
   str = strStart;
   priorCharWasDelimit = 1;
 
@@ -273,12 +272,11 @@ Tokenizer::addToken(char *startAddr, int length)
 
 
 const char *
-Tokenizer::operator[] (int index)
+Tokenizer::operator[] (unsigned index) const
 {
-  tok_node *
-    cur_node = &start_node;
-  int
-    cur_start = 0;
+  const tok_node * cur_node = &start_node;
+  unsigned cur_start = 0;
+
   if (index >= numValidTokens) {
     return NULL;
   } else {
@@ -291,8 +289,8 @@ Tokenizer::operator[] (int index)
   }
 }
 
-int
-Tokenizer::getNumber()
+unsigned
+Tokenizer::count() const
 {
   return numValidTokens;
 }
@@ -374,3 +372,35 @@ Tokenizer::ReUse()
   add_node = &start_node;
   add_index = 0;
 }
+
+#if TS_HAS_TESTS
+#include "TestBox.h"
+
+REGRESSION_TEST(libts_Tokenizer) (RegressionTest * test, int /* atype 
ATS_UNUSED */, int *pstatus)
+{
+  TestBox box(test, pstatus);
+  box = REGRESSION_TEST_PASSED;
+
+  Tokenizer remap(" \t");
+
+  const char * line = "map https://abc.com https://abc.com 
@plugin=conf_remap.so @pparam=proxy.config.abc='ABC DEF'";
+
+  const char * toks[] = {
+    "map",
+    "https://abc.com";,
+    "https://abc.com";,
+    "@plugin=conf_remap.so",
+    "@pparam=proxy.config.abc='ABC DEF'"
+  };
+
+  unsigned count = remap.Initialize(const_cast<char *>(line), (COPY_TOKS | 
ALLOW_SPACES));
+
+  box.check(count == 5, "check that we parsed 5 tokens");
+  box.check(count == remap.count(), "parsed %u tokens, but now we have %u 
tokens", count, remap.count());
+
+  for (unsigned i = 0; i < count; ++i) {
+    box.check(strcmp(remap[i], toks[i]) == 0, "expected token %u to be '%s' 
but found '%s'",
+        count, toks[i], remap[i]);
+  }
+}
+#endif

http://git-wip-us.apache.org/repos/asf/trafficserver/blob/851a52bc/lib/ts/Tokenizer.h
----------------------------------------------------------------------
diff --git a/lib/ts/Tokenizer.h b/lib/ts/Tokenizer.h
index bfb6a4a..81ac7cf 100644
--- a/lib/ts/Tokenizer.h
+++ b/lib/ts/Tokenizer.h
@@ -89,14 +89,14 @@
  *      Using iterFirst, iterNext the running time is O(n), so use
  *      the iteration where possible
  *
- *  getNumber() - returns the number of tokens
+ *  count() - returns the number of tokens
  *
  *  setMaxTokens() - sets the maximum number of tokens.  Once maxTokens
  *                     is reached, delimiters are ignored and the
  *                     last token is rest of the string.  Negative numbers
  *                     mean no limit on the number of tokens
  *
- *  getMaxTokens() - returns maxTokens.  Negative number mean no limit
+ *  getMaxTokens() - returns maxTokens.  UINT_MAX means no limit
  *
  *  Print() - Debugging method to print out the tokens
  *
@@ -104,12 +104,13 @@
 
 #include "ink_apidefs.h"
 
-#define COPY_TOKS         1 << 0
-#define SHARE_TOKS        1 << 1
-#define ALLOW_EMPTY_TOKS  1 << 2
-#define ALLOW_SPACES      1 << 3
+#define COPY_TOKS         (1u << 0)
+#define SHARE_TOKS        (1u << 1)
+#define ALLOW_EMPTY_TOKS  (1u << 2)
+#define ALLOW_SPACES      (1u << 3)
 
 #define TOK_NODE_ELEMENTS  16
+
 struct tok_node
 {
   char *el[TOK_NODE_ELEMENTS];
@@ -122,28 +123,30 @@ struct tok_iter_state
   int index;
 };
 
-
-
 class Tokenizer
 {
 public:
   inkcoreapi Tokenizer(const char *StrOfDelimiters);
-    inkcoreapi ~ Tokenizer();
-  int Initialize(char *str, int opt);
-  inkcoreapi int Initialize(const char *str);   // Automatically sets option 
to copy
-  const char *operator [] (int index);
-  void setMaxTokens(int max)
-  {
+  inkcoreapi ~Tokenizer();
+
+  unsigned Initialize(char *str, unsigned options);
+  inkcoreapi unsigned Initialize(const char *str);   // Automatically sets 
option to copy
+  const char * operator[] (unsigned index) const;
+
+  void setMaxTokens(unsigned max) {
     maxTokens = max;
   };
-  int getMaxTokens()
-  {
+
+  unsigned getMaxTokens() const {
     return maxTokens;
   };
-  int getNumber();
+
+  unsigned count() const;
   void Print();                 // Debugging print out
+
   inkcoreapi const char *iterFirst(tok_iter_state * state);
   inkcoreapi const char *iterNext(tok_iter_state * state);
+
 private:
   Tokenizer & operator=(const Tokenizer &);
   Tokenizer(const Tokenizer &);
@@ -152,8 +155,8 @@ private:
   void ReUse();
   char *strOfDelimit;
   tok_node start_node;
-  int numValidTokens;
-  int maxTokens;
+  unsigned numValidTokens;
+  unsigned maxTokens;
   int options;
   bool quoteFound;
 

http://git-wip-us.apache.org/repos/asf/trafficserver/blob/851a52bc/mgmt/api/CfgContextUtils.cc
----------------------------------------------------------------------
diff --git a/mgmt/api/CfgContextUtils.cc b/mgmt/api/CfgContextUtils.cc
index 244d9ce..9d39c4a 100644
--- a/mgmt/api/CfgContextUtils.cc
+++ b/mgmt/api/CfgContextUtils.cc
@@ -63,12 +63,12 @@ string_to_ip_addr_ele(const char *str)
 
   // determine if range or single type
   range_tokens.Initialize(buf, COPY_TOKS);
-  numTokens = range_tokens.getNumber();
+  numTokens = range_tokens.count();
   if (numTokens == 1) {         // SINGLE TYPE
     ele->type = TS_IP_SINGLE;
     // determine if cidr type
     cidr_tokens.Initialize(buf, COPY_TOKS);
-    numTokens = cidr_tokens.getNumber();
+    numTokens = cidr_tokens.count();
     if (numTokens == 1) {       // Single, NON-CIDR TYPE
       ele->ip_a = string_to_ip_addr(str);
     } else {                    // Single, CIDR TYPE
@@ -88,7 +88,7 @@ string_to_ip_addr_ele(const char *str)
 
     // determine if ip's are cidr type; only test if ip_a is cidr, assume both 
are same
     cidr_tokens.Initialize(ip_a, COPY_TOKS);
-    numTokens = cidr_tokens.getNumber();
+    numTokens = cidr_tokens.count();
     if (numTokens == 1) {       // Range, NON-CIDR TYPE
       ele->ip_a = string_to_ip_addr(ip_a);
       ele->ip_b = string_to_ip_addr(ip_b);
@@ -287,7 +287,7 @@ string_to_ip_addr_list(const char *str_list, const char 
*delimiter)
     return TS_INVALID_LIST;
 
   tokens.Initialize(str_list);
-  numToks = tokens.getNumber();
+  numToks = tokens.count();
 
   ip_list = TSIpAddrListCreate();
 
@@ -386,7 +386,7 @@ string_to_port_list(const char *str_list, const char 
*delimiter)
 
   tokens.Initialize(str_list);
 
-  numToks = tokens.getNumber();
+  numToks = tokens.count();
 
   port_list = TSPortListCreate();
 
@@ -454,7 +454,7 @@ string_to_port_ele(const char *str)
   ele = TSPortEleCreate();
   if (tokens.Initialize(copy, COPY_TOKS) > 2)
     goto Lerror;
-  if (tokens.getNumber() == 1) {        // Not a Range of ports
+  if (tokens.count() == 1) {        // Not a Range of ports
     if (!isNumber(str))
       goto Lerror;
     ele->port_a = ink_atoi(str);
@@ -534,7 +534,7 @@ string_to_string_list(const char *str, const char 
*delimiter)
     return TS_INVALID_LIST;
 
   TSStringList str_list = TSStringListCreate();
-  for (int i = 0; i < tokens.getNumber(); i++) {
+  for (unsigned i = 0; i < tokens.count(); i++) {
     TSStringListEnqueue(str_list, ats_strdup(tokens[i]));
   }
 
@@ -601,7 +601,7 @@ string_to_int_list(const char *str_list, const char 
*delimiter)
 
   tokens.Initialize(str_list);
 
-  numToks = tokens.getNumber();
+  numToks = tokens.count();
   list = TSIntListCreate();
 
   for (i = 0; i < numToks; i++) {
@@ -640,7 +640,7 @@ string_to_domain_list(const char *str_list, const char 
*delimiter)
 
   tokens.Initialize(str_list);
 
-  numToks = tokens.getNumber();
+  numToks = tokens.count();
 
   list = TSDomainListCreate();
 

http://git-wip-us.apache.org/repos/asf/trafficserver/blob/851a52bc/mgmt/web2/WebHttpMessage.cc
----------------------------------------------------------------------
diff --git a/mgmt/web2/WebHttpMessage.cc b/mgmt/web2/WebHttpMessage.cc
index 00bd238..a272e1c 100644
--- a/mgmt/web2/WebHttpMessage.cc
+++ b/mgmt/web2/WebHttpMessage.cc
@@ -333,7 +333,7 @@ httpMessage::getModDate()
     delete[]dateStr;
 
     // Now figure out the content length from if modified
-    if (parser->getNumber() > numDateFields + 1) {
+    if (parser->count() > (unsigned)(numDateFields + 1)) {
       clStr = (*parser)[numDateFields + 1];
       equalTok = new Tokenizer("=\r\n");
       equalTok->Initialize(clStr);

http://git-wip-us.apache.org/repos/asf/trafficserver/blob/851a52bc/proxy/InkAPITest.cc
----------------------------------------------------------------------
diff --git a/proxy/InkAPITest.cc b/proxy/InkAPITest.cc
index 16f818e..613eddf 100644
--- a/proxy/InkAPITest.cc
+++ b/proxy/InkAPITest.cc
@@ -7512,16 +7512,6 @@ REGRESSION_TEST(SDK_API_OVERRIDABLE_CONFIGS) 
(RegressionTest * test, int /* atyp
     *pstatus = REGRESSION_TEST_FAILED;
   }
 
-  Tokenizer whiteTok(" \t");
-
-  char cur_line[] = "map https://abc.com https://abc.com @plugin=conf_remap.so 
@pparam=proxy.config.abc='ABC DEF'";
-
-  int tok_count = whiteTok.Initialize((char*)cur_line, (SHARE_TOKS | 
ALLOW_SPACES));
-
-  for (int i=0; i<tok_count; i++) {
-    SDK_RPRINT(test, "Tokenizer::Initialize", "TokenizerTest", TC_PASS, "Token 
%d : %s\n", i, whiteTok[i]);
-  }
-
   return;
 }
 

Reply via email to