Gitweb links:

...log 
http://git.netsurf-browser.org/libhubbub.git/shortlog/d0aa58c3647593044286a565294e94700c9a50a0
...commit 
http://git.netsurf-browser.org/libhubbub.git/commit/d0aa58c3647593044286a565294e94700c9a50a0
...tree 
http://git.netsurf-browser.org/libhubbub.git/tree/d0aa58c3647593044286a565294e94700c9a50a0

The branch, master has been updated
       via  d0aa58c3647593044286a565294e94700c9a50a0 (commit)
      from  873ed6e236f7669afd3ef44259c34addc6dc95b6 (commit)

Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.

- Log -----------------------------------------------------------------
commitdiff 
http://git.netsurf-browser.org/libhubbub.git/commit/?id=d0aa58c3647593044286a565294e94700c9a50a0
commit d0aa58c3647593044286a565294e94700c9a50a0
Author: John-Mark Bell <[email protected]>
Commit: John-Mark Bell <[email protected]>

    tests/tokenizer[23]: fix handling of CHARACTER tokens
    
    Where a CHARACTER token is emitted but the expected data is shorter
    than the data in the token, we want to consume the expected data
    and then process the remaining token data as if it were emitted
    separately. Sadly this didn't happen as we never initialised the
    replacement token correctly. Make this so, and also tell the
    compiler that the recursive call is actually a tail call so it
    can optimise it appropriately.

diff --git a/test/tokeniser2.c b/test/tokeniser2.c
index f38f7ab..f468d1c 100644
--- a/test/tokeniser2.c
+++ b/test/tokeniser2.c
@@ -438,7 +438,7 @@ hubbub_error token_handler(const hubbub_token *token, void 
*pw)
                        /* Expected token only contained part of the data
                         * Calculate how much is left, then try again with
                         * the next expected token */
-                       hubbub_token t;
+                       hubbub_token t = *token;
 
                        t.type = HUBBUB_TOKEN_CHARACTER;
                        t.data.character.ptr += len;
@@ -446,7 +446,7 @@ hubbub_error token_handler(const hubbub_token *token, void 
*pw)
 
                        ctx->char_off = 0;
 
-                       token_handler(&t, pw);
+                       return token_handler(&t, pw);
                } else if (strlen(expstr + ctx->char_off) >
                                token->data.character.len) {
                        /* Tokeniser output only contained part of the data
diff --git a/test/tokeniser3.c b/test/tokeniser3.c
index 416ff5d..eb921ce 100644
--- a/test/tokeniser3.c
+++ b/test/tokeniser3.c
@@ -447,7 +447,7 @@ hubbub_error token_handler(const hubbub_token *token, void 
*pw)
                        /* Expected token only contained part of the data
                         * Calculate how much is left, then try again with
                         * the next expected token */
-                       hubbub_token t;
+                       hubbub_token t = *token;
 
                        t.type = HUBBUB_TOKEN_CHARACTER;
                        t.data.character.ptr += len;
@@ -455,7 +455,7 @@ hubbub_error token_handler(const hubbub_token *token, void 
*pw)
 
                        ctx->char_off = 0;
 
-                       token_handler(&t, pw);
+                       return token_handler(&t, pw);
                } else if (strlen(expstr + ctx->char_off) >
                                token->data.character.len) {
                        /* Tokeniser output only contained part of the data


-----------------------------------------------------------------------

Summary of changes:
 test/tokeniser2.c |    4 ++--
 test/tokeniser3.c |    4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/test/tokeniser2.c b/test/tokeniser2.c
index f38f7ab..f468d1c 100644
--- a/test/tokeniser2.c
+++ b/test/tokeniser2.c
@@ -438,7 +438,7 @@ hubbub_error token_handler(const hubbub_token *token, void 
*pw)
                        /* Expected token only contained part of the data
                         * Calculate how much is left, then try again with
                         * the next expected token */
-                       hubbub_token t;
+                       hubbub_token t = *token;
 
                        t.type = HUBBUB_TOKEN_CHARACTER;
                        t.data.character.ptr += len;
@@ -446,7 +446,7 @@ hubbub_error token_handler(const hubbub_token *token, void 
*pw)
 
                        ctx->char_off = 0;
 
-                       token_handler(&t, pw);
+                       return token_handler(&t, pw);
                } else if (strlen(expstr + ctx->char_off) >
                                token->data.character.len) {
                        /* Tokeniser output only contained part of the data
diff --git a/test/tokeniser3.c b/test/tokeniser3.c
index 416ff5d..eb921ce 100644
--- a/test/tokeniser3.c
+++ b/test/tokeniser3.c
@@ -447,7 +447,7 @@ hubbub_error token_handler(const hubbub_token *token, void 
*pw)
                        /* Expected token only contained part of the data
                         * Calculate how much is left, then try again with
                         * the next expected token */
-                       hubbub_token t;
+                       hubbub_token t = *token;
 
                        t.type = HUBBUB_TOKEN_CHARACTER;
                        t.data.character.ptr += len;
@@ -455,7 +455,7 @@ hubbub_error token_handler(const hubbub_token *token, void 
*pw)
 
                        ctx->char_off = 0;
 
-                       token_handler(&t, pw);
+                       return token_handler(&t, pw);
                } else if (strlen(expstr + ctx->char_off) >
                                token->data.character.len) {
                        /* Tokeniser output only contained part of the data


-- 
HTML5 parser library
_______________________________________________
netsurf-commits mailing list -- [email protected]
To unsubscribe send an email to [email protected]

Reply via email to