common/Authorization.cpp |    6 ++++--
 common/Protocol.hpp      |   29 +++++++++++++++++++++--------
 2 files changed, 25 insertions(+), 10 deletions(-)

New commits:
commit 8aa9b37b8601a01751369112742d012cc6800ecd
Author:     Pranam Lashkari <lpra...@collabora.com>
AuthorDate: Sat Nov 23 22:23:11 2019 +0530
Commit:     Michael Meeks <michael.me...@collabora.com>
CommitDate: Mon May 4 15:11:26 2020 +0200

    removed tokenize method with regex
    Added new tokenize method with const char* delimiter
    
    Change-Id: Id1c4e89e5418d66aaf348ff4d8c3855f80fb4656
    Reviewed-on: https://gerrit.libreoffice.org/c/online/+/83574
    Tested-by: Jenkins CollaboraOffice <jenkinscollaboraoff...@gmail.com>
    Reviewed-by: Michael Meeks <michael.me...@collabora.com>

diff --git a/common/Authorization.cpp b/common/Authorization.cpp
index 138f98889..cb605fd41 100644
--- a/common/Authorization.cpp
+++ b/common/Authorization.cpp
@@ -53,9 +53,11 @@ void Authorization::authorizeRequest(Poco::Net::HTTPRequest& 
request) const
             //   Authorization: Basic ....
             //   X-Something-Custom: Huh
             // Regular expression evaluates and finds "\n\r" and tokenizes 
accordingly
-            std::vector<std::string> tokens(LOOLProtocol::tokenize(_data, 
std::regex(R"(\n\r)"), /*skipEmpty =*/ true));
-            for (const auto& token : tokens)
+            StringVector tokens(LOOLProtocol::tokenize(_data, "\n\r"));
+            for (auto it = tokens.begin(); it != tokens.end(); ++it)
             {
+                std::string token = tokens.getParam(*it);
+
                 size_t separator = token.find_first_of(':');
                 if (separator != std::string::npos)
                 {
diff --git a/common/Protocol.hpp b/common/Protocol.hpp
index 72a9aaa34..1327c5fc5 100644
--- a/common/Protocol.hpp
+++ b/common/Protocol.hpp
@@ -138,16 +138,29 @@ namespace LOOLProtocol
         return tokenize(s.data(), s.size(), delimiter);
     }
 
-    /// Tokenize according to the regex, potentially skip empty tokens.
     inline
-    std::vector<std::string> tokenize(const std::string& s, const std::regex& 
pattern, bool skipEmpty = false)
+    StringVector tokenize(const std::string& s, const char* delimiter)
     {
-        std::vector<std::string> tokens;
-        if (skipEmpty)
-            std::copy_if(std::sregex_token_iterator(s.begin(), s.end(), 
pattern, -1), std::sregex_token_iterator(), std::back_inserter(tokens), 
[](std::string in) { return !in.empty(); });
-        else
-            std::copy(std::sregex_token_iterator(s.begin(), s.end(), pattern, 
-1), std::sregex_token_iterator(), std::back_inserter(tokens));
-        return tokens;
+        std::vector<StringToken> tokens;
+        if (s.size() == 0)
+        {
+            return StringVector(std::string(), {});
+        }
+
+        size_t start = 0;
+        size_t end = s.find(delimiter, start);
+
+        tokens.emplace_back(start, end - start);
+        start = end + std::strlen(delimiter);
+
+        while(end != std::string::npos)
+        {
+            end = s.find(delimiter, start);
+            tokens.emplace_back(start, end - start);
+            start = end + std::strlen(delimiter);
+        }
+
+        return StringVector(s, tokens);
     }
 
     inline
_______________________________________________
Libreoffice-commits mailing list
libreoffice-comm...@lists.freedesktop.org
https://lists.freedesktop.org/mailman/listinfo/libreoffice-commits

Reply via email to