This is an automated email from the ASF dual-hosted git repository.

yiguolei pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git

commit 000b81f0c7f99a3e6712c20d9e0678bdf48b6006
Author: xy720 <[email protected]>
AuthorDate: Thu Feb 29 11:09:14 2024 +0800

    [chore](docs) fix some typo in english docs in inverted index (#31497)
    
    Issue Number: close #30051
---
 docs/en/docs/data-table/index/inverted-index.md | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/docs/en/docs/data-table/index/inverted-index.md 
b/docs/en/docs/data-table/index/inverted-index.md
index a8dfccaf5cb..3e76e325b91 100644
--- a/docs/en/docs/data-table/index/inverted-index.md
+++ b/docs/en/docs/data-table/index/inverted-index.md
@@ -203,7 +203,7 @@ SELECT * FROM table_name WHERE op_type IN ('add', 'delete');
 
 To evaluate the actual effects of tokenization or to tokenize a block of text, 
the `tokenize` function can be utilized.
 ```sql
-mysql> SELECT 
TOKENIZE('武汉长江大桥','"parser"="chinese","parser_mode"="fine_grained");
+mysql> SELECT 
TOKENIZE('武汉长江大桥','"parser"="chinese","parser_mode"="fine_grained"');
 
+-----------------------------------------------------------------------------------+
 | tokenize('武汉长江大桥', '"parser"="chinese","parser_mode"="fine_grained"')       |
 
+-----------------------------------------------------------------------------------+
@@ -211,7 +211,7 @@ mysql> SELECT 
TOKENIZE('武汉长江大桥','"parser"="chinese","parser_mode"="f
 
+-----------------------------------------------------------------------------------+
 1 row in set (0.02 sec)
 
-mysql> SELECT 
TOKENIZE('武汉市长江大桥','"parser"="chinese","parser_mode"="fine_grained");
+mysql> SELECT 
TOKENIZE('武汉市长江大桥','"parser"="chinese","parser_mode"="fine_grained"');
 
+--------------------------------------------------------------------------------------+
 | tokenize('武汉市长江大桥', '"parser"="chinese","parser_mode"="fine_grained"')       
 |
 
+--------------------------------------------------------------------------------------+
@@ -219,7 +219,7 @@ mysql> SELECT 
TOKENIZE('武汉市长江大桥','"parser"="chinese","parser_mode"
 
+--------------------------------------------------------------------------------------+
 1 row in set (0.02 sec)
 
-mysql> SELECT 
TOKENIZE('武汉市长江大桥','"parser"="chinese","parser_mode"="coarse_grained");
+mysql> SELECT 
TOKENIZE('武汉市长江大桥','"parser"="chinese","parser_mode"="coarse_grained"');
 
+----------------------------------------------------------------------------------------+
 | tokenize('武汉市长江大桥', '"parser"="chinese","parser_mode"="coarse_grained"')     
   |
 
+----------------------------------------------------------------------------------------+
@@ -227,7 +227,7 @@ mysql> SELECT 
TOKENIZE('武汉市长江大桥','"parser"="chinese","parser_mode"
 
+----------------------------------------------------------------------------------------+
 1 row in set (0.02 sec)
 
-mysql> SELECT TOKENIZE('I love CHINA','"parser"="english");
+mysql> SELECT TOKENIZE('I love CHINA','"parser"="english"');
 +------------------------------------------------+
 | tokenize('I love CHINA', '"parser"="english"') |
 +------------------------------------------------+
@@ -235,7 +235,7 @@ mysql> SELECT TOKENIZE('I love CHINA','"parser"="english");
 +------------------------------------------------+
 1 row in set (0.02 sec)
 
-mysql> SELECT TOKENIZE('I love CHINA 我爱我的祖国','"parser"="unicode");
+mysql> SELECT TOKENIZE('I love CHINA 我爱我的祖国','"parser"="unicode"');
 +-------------------------------------------------------------------+
 | tokenize('I love CHINA 我爱我的祖国', '"parser"="unicode"')       |
 +-------------------------------------------------------------------+


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to