This is an automated email from the ASF dual-hosted git repository.

yiguolei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 2664d1cffb [chore](vec) Make this copy constructor of StringRef 
explicit (#25337)
2664d1cffb is described below

commit 2664d1cffb9352460f069a474a65adf148f3c8f4
Author: Jerry Hu <[email protected]>
AuthorDate: Thu Oct 12 01:12:46 2023 -0500

    [chore](vec) Make this copy constructor of StringRef explicit (#25337)
---
 be/src/vec/columns/column_object.cpp            | 2 +-
 be/src/vec/common/string_ref.h                  | 3 ++-
 be/src/vec/exec/format/json/new_json_reader.cpp | 4 ++--
 be/src/vec/json/parse2column.cpp                | 2 +-
 4 files changed, 6 insertions(+), 5 deletions(-)

diff --git a/be/src/vec/columns/column_object.cpp 
b/be/src/vec/columns/column_object.cpp
index e66b9aa968..f3571c8ba2 100644
--- a/be/src/vec/columns/column_object.cpp
+++ b/be/src/vec/columns/column_object.cpp
@@ -610,7 +610,7 @@ void ColumnObject::try_insert_from(const IColumn& src, 
size_t n) {
 
 void ColumnObject::try_insert(const Field& field) {
     const auto& object = field.get<const VariantMap&>();
-    phmap::flat_hash_set<StringRef, StringRefHash> inserted;
+    phmap::flat_hash_set<std::string> inserted;
     size_t old_size = size();
     for (const auto& [key_str, value] : object) {
         PathInData key(key_str);
diff --git a/be/src/vec/common/string_ref.h b/be/src/vec/common/string_ref.h
index abc745b1db..587924b45e 100644
--- a/be/src/vec/common/string_ref.h
+++ b/be/src/vec/common/string_ref.h
@@ -193,7 +193,8 @@ struct StringRef {
     StringRef(const unsigned char* data_, size_t size_)
             : StringRef(reinterpret_cast<const char*>(data_), size_) {}
 
-    StringRef(const std::string& s) : data(s.data()), size(s.size()) {}
+    /// Make this copy constructor explicit to prevent inadvertently 
constructing a StringRef from a temporary std::string variable.
+    explicit StringRef(const std::string& s) : data(s.data()), size(s.size()) 
{}
     explicit StringRef(const char* str) : data(str), size(strlen(str)) {}
 
     std::string to_string() const { return std::string(data, size); }
diff --git a/be/src/vec/exec/format/json/new_json_reader.cpp 
b/be/src/vec/exec/format/json/new_json_reader.cpp
index 230dfb4c80..53fbff8e5d 100644
--- a/be/src/vec/exec/format/json/new_json_reader.cpp
+++ b/be/src/vec/exec/format/json/new_json_reader.cpp
@@ -192,7 +192,7 @@ Status NewJsonReader::init_reader(
         }
     }
     for (int i = 0; i < _file_slot_descs.size(); ++i) {
-        _slot_desc_index[_file_slot_descs[i]->col_name()] = i;
+        _slot_desc_index[StringRef {_file_slot_descs[i]->col_name()}] = i;
     }
     return Status::OK();
 }
@@ -1003,7 +1003,7 @@ Status NewJsonReader::_simdjson_init_reader() {
     }
     _ondemand_json_parser = std::make_unique<simdjson::ondemand::parser>();
     for (int i = 0; i < _file_slot_descs.size(); ++i) {
-        _slot_desc_index[_file_slot_descs[i]->col_name()] = i;
+        _slot_desc_index[StringRef {_file_slot_descs[i]->col_name()}] = i;
     }
     _simdjson_ondemand_padding_buffer.resize(_padded_size);
     _simdjson_ondemand_unscape_padding_buffer.resize(_padded_size);
diff --git a/be/src/vec/json/parse2column.cpp b/be/src/vec/json/parse2column.cpp
index 1ae5d0d591..134e2e934b 100644
--- a/be/src/vec/json/parse2column.cpp
+++ b/be/src/vec/json/parse2column.cpp
@@ -210,7 +210,7 @@ void parse_json_to_variant(IColumn& column, const char* 
src, size_t length,
     }
     auto& [paths, values] = *result;
     assert(paths.size() == values.size());
-    phmap::flat_hash_set<StringRef, StringRefHash> paths_set;
+    phmap::flat_hash_set<std::string> paths_set;
     size_t num_rows = column_object.size();
     for (size_t i = 0; i < paths.size(); ++i) {
         FieldInfo field_info;


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to