Matthias Brantner has proposed merging lp:~zorba-coders/zorba/tokenize into 
lp:zorba.

Requested reviews:
  William Candillon (wcandillon)
  Paul J. Lucas (paul-lucas)
Related bugs:
  Bug #898074 in Zorba: "fn:tokenize() doesn't stream"
  https://bugs.launchpad.net/zorba/+bug/898074

For more details, see:
https://code.launchpad.net/~zorba-coders/zorba/tokenize/+merge/86626

implementation of string:tokenize function that doesn't accept regular 
expressions but allows for streamable processing of the input (resolves bug 
#898074)
-- 
https://code.launchpad.net/~zorba-coders/zorba/tokenize/+merge/86626
Your team Zorba Coders is subscribed to branch lp:zorba.
=== modified file 'ChangeLog'
--- ChangeLog	2011-12-21 14:40:33 +0000
+++ ChangeLog	2011-12-21 21:44:29 +0000
@@ -9,6 +9,8 @@
     set multiple times via the c++ api).
   * Fixed bug #905050 (setting and getting the context item type via the c++ api)
   * Added createDayTimeDuration, createYearMonthDuration, createDocumentNode, createCommentNode, createPiNode to api's ItemFactory.
+  * Added tokenize function to the string module that allows for streamable tokenization but doesn't have regular expression
+    support.
 
 version 2.1
 

=== modified file 'modules/com/zorba-xquery/www/modules/CMakeLists.txt'
--- modules/com/zorba-xquery/www/modules/CMakeLists.txt	2011-12-21 14:40:33 +0000
+++ modules/com/zorba-xquery/www/modules/CMakeLists.txt	2011-12-21 21:44:29 +0000
@@ -58,7 +58,7 @@
   URI "http://www.zorba-xquery.com/modules/reflection";)
 DECLARE_ZORBA_MODULE(FILE schema.xq VERSION 2.0
   URI "http://www.zorba-xquery.com/modules/schema";)
-DECLARE_ZORBA_MODULE(FILE string.xq VERSION 2.0
+DECLARE_ZORBA_MODULE(FILE string.xq VERSION 2.1
   URI "http://www.zorba-xquery.com/modules/string";)
 DECLARE_ZORBA_MODULE(FILE xml.xq VERSION 2.0
   URI "http://www.zorba-xquery.com/modules/xml";)

=== modified file 'modules/com/zorba-xquery/www/modules/string.xq'
--- modules/com/zorba-xquery/www/modules/string.xq	2011-08-03 15:12:40 +0000
+++ modules/com/zorba-xquery/www/modules/string.xq	2011-12-21 21:44:29 +0000
@@ -25,7 +25,7 @@
  :)
 module namespace string = "http://www.zorba-xquery.com/modules/string";;
 declare namespace ver = "http://www.zorba-xquery.com/options/versioning";;
-declare option ver:module-version "2.0";
+declare option ver:module-version "2.1";
 
 (:~
  : This function materializes a streamable string.
@@ -63,3 +63,23 @@
  :
  :)
 declare function string:is-streamable($s as xs:string) as xs:boolean external;
+
+(:~
+ : Returns a sequence of strings constructed by splitting the input wherever the given
+ : separator is found.
+ :
+ : The function is different from fn:tokenize. It doesn't allow
+ : the separator to be a regular expression. This restriction allows for more
+ : performant implementation. Specifically, the function processes
+ : streamable strings as input in a streamable way which is particularly useful
+ : to tokenize huge strings (e.g. if returned by the file module's read-text
+ : function).
+ :
+ : @param $s the input string to tokenize
+ : @param $separator the separator used for splitting the input string $s
+ :
+ : @return a sequence of strings constructed by splitting the input
+ :)
+declare function string:tokenize(
+  $s as xs:string,
+  $separator as xs:string) as xs:string* external;

=== modified file 'src/functions/pregenerated/func_strings.cpp'
--- src/functions/pregenerated/func_strings.cpp	2011-12-21 14:40:33 +0000
+++ src/functions/pregenerated/func_strings.cpp	2011-12-21 21:44:29 +0000
@@ -320,6 +320,16 @@
   return new StringIsStreamableIterator(sctx, loc, argv);
 }
 
+PlanIter_t fn_zorba_string_tokenize::codegen(
+  CompilerCB*,
+  static_context* sctx,
+  const QueryLoc& loc,
+  std::vector<PlanIter_t>& argv,
+  AnnotationHolder& ann) const
+{
+  return new StringTokenizeIterator(sctx, loc, argv);
+}
+
 void populate_context_strings(static_context* sctx)
 {
   {
@@ -890,6 +900,19 @@
 
   }
 
+
+  {
+    
+
+    DECL_WITH_KIND(sctx, fn_zorba_string_tokenize,
+        (createQName("http://www.zorba-xquery.com/modules/string","","tokenize";), 
+        GENV_TYPESYSTEM.STRING_TYPE_ONE, 
+        GENV_TYPESYSTEM.STRING_TYPE_ONE, 
+        GENV_TYPESYSTEM.STRING_TYPE_STAR),
+        FunctionConsts::FN_ZORBA_STRING_TOKENIZE_2);
+
+  }
+
 }
 
 

=== modified file 'src/functions/pregenerated/func_strings.h'
--- src/functions/pregenerated/func_strings.h	2011-12-21 14:40:33 +0000
+++ src/functions/pregenerated/func_strings.h	2011-12-21 21:44:29 +0000
@@ -423,6 +423,19 @@
 };
 
 
+//fn-zorba-string:tokenize
+class fn_zorba_string_tokenize : public function
+{
+public:
+  fn_zorba_string_tokenize(const signature& sig, FunctionConsts::FunctionKind kind)
+    : function(sig, kind) {
+
+}
+
+  CODEGEN_DECL();
+};
+
+
 } //namespace zorba
 
 

=== modified file 'src/functions/pregenerated/function_enum.h'
--- src/functions/pregenerated/function_enum.h	2011-12-21 14:40:33 +0000
+++ src/functions/pregenerated/function_enum.h	2011-12-21 21:44:29 +0000
@@ -371,6 +371,7 @@
   FN_ANALYZE_STRING_3,
   FN_ZORBA_STRING_MATERIALIZE_1,
   FN_ZORBA_STRING_IS_STREAMABLE_1,
+  FN_ZORBA_STRING_TOKENIZE_2,
   FN_ZORBA_XQDOC_XQDOC_1,
   FN_ZORBA_XQDOC_XQDOC_CONTENT_1,
 

=== modified file 'src/runtime/spec/strings/strings.xml'
--- src/runtime/spec/strings/strings.xml	2011-12-21 14:40:33 +0000
+++ src/runtime/spec/strings/strings.xml	2011-12-21 21:44:29 +0000
@@ -729,4 +729,35 @@
 
 </zorba:iterator>
 
+<!--
+/*******************************************************************************
+ * string:tokenize
+********************************************************************************/
+-->
+<zorba:iterator name="StringTokenizeIterator">
+
+  <zorba:description author="Matthias Brantner">
+    string:tokenize
+  </zorba:description>
+
+  <zorba:function>
+    <zorba:signature localname="tokenize" prefix="fn-zorba-string">
+      <zorba:param>xs:string</zorba:param>
+      <zorba:param>xs:string</zorba:param>
+      <zorba:output>xs:string*</zorba:output>
+    </zorba:signature>
+  </zorba:function>
+
+  <zorba:state>
+    <zorba:member type="zstring" name="theSeparator"
+      brief="separator for the tokenization"/>
+    <zorba:member type="std::istream*" name="theIStream"
+      brief="the remaining string (if the input is streamable)"/>
+    <zorba:member type="zstring" name="theInput"
+      brief="the string to tokenize (if the input is not streamable)"/>
+    <zorba:member type="size_t" name="theNextStartPos" defaultValue="0"/>
+  </zorba:state>
+
+</zorba:iterator>
+
 </zorba:iterators>

=== modified file 'src/runtime/strings/pregenerated/strings.cpp'
--- src/runtime/strings/pregenerated/strings.cpp	2011-12-21 14:40:33 +0000
+++ src/runtime/strings/pregenerated/strings.cpp	2011-12-21 21:44:29 +0000
@@ -830,6 +830,48 @@
 // </StringIsStreamableIterator>
 
 
+// <StringTokenizeIterator>
+const char* StringTokenizeIterator::class_name_str = "StringTokenizeIterator";
+StringTokenizeIterator::class_factory<StringTokenizeIterator>
+StringTokenizeIterator::g_class_factory;
+
+const serialization::ClassVersion 
+StringTokenizeIterator::class_versions[] ={{ 1, 0x000905, false}};
+
+const int StringTokenizeIterator::class_versions_count =
+sizeof(StringTokenizeIterator::class_versions)/sizeof(struct serialization::ClassVersion);
+
+void StringTokenizeIterator::accept(PlanIterVisitor& v) const {
+  v.beginVisit(*this);
+
+  std::vector<PlanIter_t>::const_iterator lIter = theChildren.begin();
+  std::vector<PlanIter_t>::const_iterator lEnd = theChildren.end();
+  for ( ; lIter != lEnd; ++lIter ){
+    (*lIter)->accept(v);
+  }
+
+  v.endVisit(*this);
+}
+
+StringTokenizeIterator::~StringTokenizeIterator() {}
+
+StringTokenizeIteratorState::StringTokenizeIteratorState() {}
+
+StringTokenizeIteratorState::~StringTokenizeIteratorState() {}
+
+
+void StringTokenizeIteratorState::init(PlanState& planState) {
+  PlanIteratorState::init(planState);
+  theNextStartPos = 0;
+}
+
+void StringTokenizeIteratorState::reset(PlanState& planState) {
+  PlanIteratorState::reset(planState);
+  theNextStartPos = 0;
+}
+// </StringTokenizeIterator>
+
+
 
 }
 

=== modified file 'src/runtime/strings/pregenerated/strings.h'
--- src/runtime/strings/pregenerated/strings.h	2011-12-21 14:40:33 +0000
+++ src/runtime/strings/pregenerated/strings.h	2011-12-21 21:44:29 +0000
@@ -1075,6 +1075,58 @@
 };
 
 
+/**
+ * 
+ *    string:tokenize
+ *  
+ * Author: Matthias Brantner
+ */
+class StringTokenizeIteratorState : public PlanIteratorState
+{
+public:
+  zstring theSeparator; //separator for the tokenization
+  std::istream* theIStream; //the remaining string (if the input is streamable)
+  zstring theInput; //the string to tokenize (if the input is not streamable)
+  size_t theNextStartPos; //
+
+  StringTokenizeIteratorState();
+
+  ~StringTokenizeIteratorState();
+
+  void init(PlanState&);
+  void reset(PlanState&);
+};
+
+class StringTokenizeIterator : public NaryBaseIterator<StringTokenizeIterator, StringTokenizeIteratorState>
+{ 
+public:
+  SERIALIZABLE_CLASS(StringTokenizeIterator);
+
+  SERIALIZABLE_CLASS_CONSTRUCTOR2T(StringTokenizeIterator,
+    NaryBaseIterator<StringTokenizeIterator, StringTokenizeIteratorState>);
+
+  void serialize( ::zorba::serialization::Archiver& ar)
+  {
+    serialize_baseclass(ar,
+    (NaryBaseIterator<StringTokenizeIterator, StringTokenizeIteratorState>*)this);
+  }
+
+  StringTokenizeIterator(
+    static_context* sctx,
+    const QueryLoc& loc,
+    std::vector<PlanIter_t>& children)
+    : 
+    NaryBaseIterator<StringTokenizeIterator, StringTokenizeIteratorState>(sctx, loc, children)
+  {}
+
+  virtual ~StringTokenizeIterator();
+
+  void accept(PlanIterVisitor& v) const;
+
+  bool nextImpl(store::Item_t& result, PlanState& aPlanState) const;
+};
+
+
 }
 #endif
 /*

=== modified file 'src/runtime/strings/strings_impl.cpp'
--- src/runtime/strings/strings_impl.cpp	2011-12-21 14:40:33 +0000
+++ src/runtime/strings/strings_impl.cpp	2011-12-21 21:44:29 +0000
@@ -140,6 +140,7 @@
       p = ec;
 
       if ( utf8::read( *state->theStream, ec ) == utf8::npos )
+      {
         if ( state->theStream->good() ) {
           //
           // If read() failed but the stream state is good, it means that an
@@ -165,6 +166,7 @@
             zerr::ZOSE0003_STREAM_READ_FAILURE, ERROR_LOC( loc )
           );
         }
+      }
       state->theResult.clear();
       state->theResult.push_back( utf8::next_char( p ) );
       
@@ -2284,5 +2286,117 @@
   STACK_END(state);
 }
 
+/**
+ *______________________________________________________________________
+ *
+ * http://www.zorba-xquery.com/modules/string
+ * string:tokenize
+ */
+bool StringTokenizeIterator::nextImpl(
+    store::Item_t& result,
+    PlanState& planState) const
+{
+  store::Item_t item;
+  size_t lNewPos = 0;
+  zstring lToken;
+  zstring lPartialMatch;
+
+  StringTokenizeIteratorState* state;
+  DEFAULT_STACK_INIT(StringTokenizeIteratorState, state, planState);
+
+  // init phase, get input string and tokens
+  consumeNext(item, theChildren[0].getp(), planState);
+
+  if (item->isStreamable())
+  {
+    state->theIStream = &item->getStream();
+  }
+  else
+  {
+    state->theIStream = 0;
+    item->getStringValue2(state->theInput);
+  }
+
+  consumeNext(item, theChildren[1].getp(), planState);
+
+  item->getStringValue2(state->theSeparator);
+
+  // working phase, do the tokenization
+  if (state->theIStream)
+  {
+    while ( !state->theIStream->eof() )
+    {
+      utf8::encoded_char_type ec;
+      memset( ec, '\0' , sizeof(ec) );
+      utf8::storage_type *p;
+      p = ec;
+
+      if ( utf8::read( *state->theIStream, ec ) != utf8::npos )
+      {
+        assert(state->theIStream->good()); // otherwise, we got an invalid byte
+
+        if (state->theSeparator.compare(lNewPos, 1, ec) == 0)
+        {
+          if (++lNewPos == state->theSeparator.length())
+          {
+            GENV_ITEMFACTORY->createString(result, lToken);
+            STACK_PUSH(true, state);
+          }
+          else
+          {
+            lPartialMatch.append(ec);
+          }
+        }
+        else
+        {
+          lToken.append(lPartialMatch);
+          lToken.append(ec);
+        }
+      }
+      else
+      {
+        if (!lToken.empty())
+        {
+          GENV_ITEMFACTORY->createString(result, lToken);
+          STACK_PUSH(true, state);
+        }
+        break;
+      }
+    }
+  }
+  else
+  {
+    while (true)
+    {
+      if (state->theNextStartPos == zstring::npos)
+      {
+        break;
+      }
+
+      lNewPos =
+        state->theInput.find(state->theSeparator, state->theNextStartPos);
+      if (lNewPos != zstring::npos)
+      {
+        zstring lSubStr = state->theInput.substr(
+            state->theNextStartPos,
+            lNewPos - state->theNextStartPos);
+        GENV_ITEMFACTORY->createString(result, lSubStr);
+        state->theNextStartPos =
+          lNewPos==state->theInput.length() - state->theSeparator.length()
+          ? zstring::npos
+          : lNewPos + state->theSeparator.length();
+      }
+      else
+      {
+        zstring lSubStr = state->theInput.substr(state->theNextStartPos);
+        GENV_ITEMFACTORY->createString(result, lSubStr);
+        state->theNextStartPos = zstring::npos;
+      }
+      STACK_PUSH(true, state);
+    }
+  }
+
+  STACK_END(state);
+}
 } // namespace zorba
 /* vim:set et sw=2 ts=2: */

=== modified file 'src/runtime/visitors/pregenerated/planiter_visitor.h'
--- src/runtime/visitors/pregenerated/planiter_visitor.h	2011-12-21 14:40:33 +0000
+++ src/runtime/visitors/pregenerated/planiter_visitor.h	2011-12-21 21:44:29 +0000
@@ -582,6 +582,8 @@
 
     class StringIsStreamableIterator;
 
+    class StringTokenizeIterator;
+
     class XQDocIterator;
 
     class XQDocContentIterator;
@@ -1423,6 +1425,9 @@
     virtual void beginVisit ( const StringIsStreamableIterator& ) = 0;
     virtual void endVisit   ( const StringIsStreamableIterator& ) = 0;
 
+    virtual void beginVisit ( const StringTokenizeIterator& ) = 0;
+    virtual void endVisit   ( const StringTokenizeIterator& ) = 0;
+
     virtual void beginVisit ( const XQDocIterator& ) = 0;
     virtual void endVisit   ( const XQDocIterator& ) = 0;
 

=== modified file 'src/runtime/visitors/pregenerated/printer_visitor.cpp'
--- src/runtime/visitors/pregenerated/printer_visitor.cpp	2011-12-21 14:40:33 +0000
+++ src/runtime/visitors/pregenerated/printer_visitor.cpp	2011-12-21 21:44:29 +0000
@@ -3961,6 +3961,20 @@
 // </StringIsStreamableIterator>
 
 
+// <StringTokenizeIterator>
+void PrinterVisitor::beginVisit ( const StringTokenizeIterator& a) {
+  thePrinter.startBeginVisit("StringTokenizeIterator", ++theId);
+  printCommons( &a, theId );
+  thePrinter.endBeginVisit( theId );
+}
+
+void PrinterVisitor::endVisit ( const StringTokenizeIterator& ) {
+  thePrinter.startEndVisit();
+  thePrinter.endEndVisit();
+}
+// </StringTokenizeIterator>
+
+
 // <XQDocIterator>
 void PrinterVisitor::beginVisit ( const XQDocIterator& a) {
   thePrinter.startBeginVisit("XQDocIterator", ++theId);

=== modified file 'src/runtime/visitors/pregenerated/printer_visitor.h'
--- src/runtime/visitors/pregenerated/printer_visitor.h	2011-12-21 14:40:33 +0000
+++ src/runtime/visitors/pregenerated/printer_visitor.h	2011-12-21 21:44:29 +0000
@@ -876,6 +876,9 @@
     void beginVisit( const StringIsStreamableIterator& );
     void endVisit  ( const StringIsStreamableIterator& );
 
+    void beginVisit( const StringTokenizeIterator& );
+    void endVisit  ( const StringTokenizeIterator& );
+
     void beginVisit( const XQDocIterator& );
     void endVisit  ( const XQDocIterator& );
 

=== added file 'test/rbkt/ExpQueryResults/zorba/string/tokenize01.xml.res'
--- test/rbkt/ExpQueryResults/zorba/string/tokenize01.xml.res	1970-01-01 00:00:00 +0000
+++ test/rbkt/ExpQueryResults/zorba/string/tokenize01.xml.res	2011-12-21 21:44:29 +0000
@@ -0,0 +1,1 @@
+a d a d

=== added file 'test/rbkt/ExpQueryResults/zorba/string/tokenize02.xml.res'
--- test/rbkt/ExpQueryResults/zorba/string/tokenize02.xml.res	1970-01-01 00:00:00 +0000
+++ test/rbkt/ExpQueryResults/zorba/string/tokenize02.xml.res	2011-12-21 21:44:29 +0000
@@ -0,0 +1,1 @@
+a a

=== added file 'test/rbkt/ExpQueryResults/zorba/string/tokenize03.xml.res'
--- test/rbkt/ExpQueryResults/zorba/string/tokenize03.xml.res	1970-01-01 00:00:00 +0000
+++ test/rbkt/ExpQueryResults/zorba/string/tokenize03.xml.res	2011-12-21 21:44:29 +0000
@@ -0,0 +1,1 @@
+ d  d

=== added file 'test/rbkt/ExpQueryResults/zorba/string/tokenize04.xml.res'
--- test/rbkt/ExpQueryResults/zorba/string/tokenize04.xml.res	1970-01-01 00:00:00 +0000
+++ test/rbkt/ExpQueryResults/zorba/string/tokenize04.xml.res	2011-12-21 21:44:29 +0000
@@ -0,0 +1,1 @@
+abcd abcd

=== added file 'test/rbkt/Queries/zorba/string/token01.txt'
--- test/rbkt/Queries/zorba/string/token01.txt	1970-01-01 00:00:00 +0000
+++ test/rbkt/Queries/zorba/string/token01.txt	2011-12-21 21:44:29 +0000
@@ -0,0 +1,1 @@
+abcd
\ No newline at end of file

=== added file 'test/rbkt/Queries/zorba/string/token02.txt'
--- test/rbkt/Queries/zorba/string/token02.txt	1970-01-01 00:00:00 +0000
+++ test/rbkt/Queries/zorba/string/token02.txt	2011-12-21 21:44:29 +0000
@@ -0,0 +1,1 @@
+abc
\ No newline at end of file

=== added file 'test/rbkt/Queries/zorba/string/token03.txt'
--- test/rbkt/Queries/zorba/string/token03.txt	1970-01-01 00:00:00 +0000
+++ test/rbkt/Queries/zorba/string/token03.txt	2011-12-21 21:44:29 +0000
@@ -0,0 +1,1 @@
+bcd
\ No newline at end of file

=== added file 'test/rbkt/Queries/zorba/string/token04.txt'
--- test/rbkt/Queries/zorba/string/token04.txt	1970-01-01 00:00:00 +0000
+++ test/rbkt/Queries/zorba/string/token04.txt	2011-12-21 21:44:29 +0000
@@ -0,0 +1,1 @@
+abcd
\ No newline at end of file

=== added file 'test/rbkt/Queries/zorba/string/tokenize01.xq'
--- test/rbkt/Queries/zorba/string/tokenize01.xq	1970-01-01 00:00:00 +0000
+++ test/rbkt/Queries/zorba/string/tokenize01.xq	2011-12-21 21:44:29 +0000
@@ -0,0 +1,5 @@
+import module namespace f = "http://expath.org/ns/file";;
+import module namespace s = "http://www.zorba-xquery.com/modules/string";;
+
+s:tokenize(f:read-text(fn:resolve-uri("token01.txt")), "bc"),
+s:tokenize(s:materialize(f:read-text(fn:resolve-uri("token01.txt"))), "bc")

=== added file 'test/rbkt/Queries/zorba/string/tokenize02.xq'
--- test/rbkt/Queries/zorba/string/tokenize02.xq	1970-01-01 00:00:00 +0000
+++ test/rbkt/Queries/zorba/string/tokenize02.xq	2011-12-21 21:44:29 +0000
@@ -0,0 +1,5 @@
+import module namespace f = "http://expath.org/ns/file";;
+import module namespace s = "http://www.zorba-xquery.com/modules/string";;
+
+s:tokenize(f:read-text(fn:resolve-uri("token02.txt")), "bc"),
+s:tokenize(s:materialize(f:read-text(fn:resolve-uri("token02.txt"))), "bc")

=== added file 'test/rbkt/Queries/zorba/string/tokenize03.xq'
--- test/rbkt/Queries/zorba/string/tokenize03.xq	1970-01-01 00:00:00 +0000
+++ test/rbkt/Queries/zorba/string/tokenize03.xq	2011-12-21 21:44:29 +0000
@@ -0,0 +1,5 @@
+import module namespace f = "http://expath.org/ns/file";;
+import module namespace s = "http://www.zorba-xquery.com/modules/string";;
+
+s:tokenize(f:read-text(fn:resolve-uri("token03.txt")), "bc"),
+s:tokenize(s:materialize(f:read-text(fn:resolve-uri("token03.txt"))), "bc")

=== added file 'test/rbkt/Queries/zorba/string/tokenize04.xq'
--- test/rbkt/Queries/zorba/string/tokenize04.xq	1970-01-01 00:00:00 +0000
+++ test/rbkt/Queries/zorba/string/tokenize04.xq	2011-12-21 21:44:29 +0000
@@ -0,0 +1,5 @@
+import module namespace f = "http://expath.org/ns/file";;
+import module namespace s = "http://www.zorba-xquery.com/modules/string";;
+
+s:tokenize(f:read-text(fn:resolve-uri("token04.txt")), "f"),
+s:tokenize(s:materialize(f:read-text(fn:resolve-uri("token04.txt"))), "f")

-- 
Mailing list: https://launchpad.net/~zorba-coders
Post to     : zorba-coders@lists.launchpad.net
Unsubscribe : https://launchpad.net/~zorba-coders
More help   : https://help.launchpad.net/ListHelp

Reply via email to