This is an automated email from the ASF dual-hosted git repository.

twice pushed a commit to branch unstable
in repository https://gitbox.apache.org/repos/asf/kvrocks.git


The following commit(s) were added to refs/heads/unstable by this push:
     new 012d5295 Support format options for `JSON.GET` (#1840)
012d5295 is described below

commit 012d5295d8232007c9a6252a31ff8e1c8e1d6e50
Author: Twice <[email protected]>
AuthorDate: Sat Oct 21 09:37:15 2023 +0900

    Support format options for `JSON.GET` (#1840)
---
 src/commands/cmd_json.cc                 | 48 +++++++++++++++++++++++-
 src/config/config.cc                     |  1 +
 src/config/config.h                      |  3 ++
 src/types/json.h                         | 57 +++++++++++++++++++++++++----
 src/types/redis_json.cc                  |  9 +++--
 tests/cppunit/types/json_test.cc         | 63 +++++++++++++++++++++-----------
 tests/gocase/unit/type/json/json_test.go |  9 +++++
 7 files changed, 155 insertions(+), 35 deletions(-)

diff --git a/src/commands/cmd_json.cc b/src/commands/cmd_json.cc
index 0a622dd7..0d5e6aae 100644
--- a/src/commands/cmd_json.cc
+++ b/src/commands/cmd_json.cc
@@ -18,7 +18,10 @@
  *
  */
 
+#include <algorithm>
+
 #include "commander.h"
+#include "commands/command_parser.h"
 #include "server/redis_reply.h"
 #include "server/server.h"
 #include "types/redis_json.h"
@@ -40,16 +43,57 @@ class CommandJsonSet : public Commander {
 
 class CommandJsonGet : public Commander {
  public:
+  Status Parse(const std::vector<std::string> &args) override {
+    CommandParser parser(args, 2);
+
+    while (parser.Good()) {
+      if (parser.EatEqICase("INDENT")) {
+        auto indent = GET_OR_RET(parser.TakeStr());
+
+        if (std::any_of(indent.begin(), indent.end(), [](char v) { return v != 
' '; })) {
+          return {Status::RedisParseErr, "Currently only all-space INDENT is 
supported"};
+        }
+
+        indent_size_ = indent.size();
+      } else if (parser.EatEqICase("NEWLINE")) {
+        new_line_chars_ = GET_OR_RET(parser.TakeStr());
+      } else if (parser.EatEqICase("SPACE")) {
+        auto space = GET_OR_RET(parser.TakeStr());
+
+        if (space != "" && space != " ") {
+          return {Status::RedisParseErr, "Currently only SPACE ' ' is 
supported"};
+        }
+
+        spaces_after_colon_ = !space.empty();
+      } else {
+        break;
+      }
+    }
+
+    while (parser.Good()) {
+      paths_.push_back(GET_OR_RET(parser.TakeStr()));
+    }
+
+    return Status::OK();
+  }
+
   Status Execute(Server *svr, Connection *conn, std::string *output) override {
     redis::Json json(svr->storage, conn->GetNamespace());
 
     JsonValue result;
-    auto s = json.Get(args_[1], {args_.begin() + 2, args_.end()}, &result);
+    auto s = json.Get(args_[1], paths_, &result);
     if (!s.ok()) return {Status::RedisExecErr, s.ToString()};
 
-    *output = redis::BulkString(result.Dump());
+    *output = redis::BulkString(GET_OR_RET(result.Print(indent_size_, 
spaces_after_colon_, new_line_chars_)));
     return Status::OK();
   }
+
+ private:
+  uint8_t indent_size_ = 0;
+  bool spaces_after_colon_ = false;
+  std::string new_line_chars_;
+
+  std::vector<std::string> paths_;
 };
 
 class CommandJsonArrAppend : public Commander {
diff --git a/src/config/config.cc b/src/config/config.cc
index d3549e59..d7ee57e4 100644
--- a/src/config/config.cc
+++ b/src/config/config.cc
@@ -160,6 +160,7 @@ Config::Config() {
       {"persist-cluster-nodes-enabled", false, new 
YesNoField(&persist_cluster_nodes_enabled, true)},
       {"redis-cursor-compatible", false, new 
YesNoField(&redis_cursor_compatible, false)},
       {"repl-namespace-enabled", false, new 
YesNoField(&repl_namespace_enabled, false)},
+      {"json-max-nesting-depth", false, new IntField(&json_max_nesting_depth, 
1024, 0, INT_MAX)},
 
       /* rocksdb options */
       {"rocksdb.compression", false,
diff --git a/src/config/config.h b/src/config/config.h
index d540906f..d78ff5e6 100644
--- a/src/config/config.h
+++ b/src/config/config.h
@@ -161,6 +161,9 @@ struct Config {
   std::set<std::string> profiling_sample_commands;
   bool profiling_sample_all_commands = false;
 
+  // json
+  int json_max_nesting_depth = 1024;
+
   struct RocksDB {
     int block_size;
     bool cache_index_and_filter_blocks;
diff --git a/src/types/json.h b/src/types/json.h
index 04be4150..eb5d7878 100644
--- a/src/types/json.h
+++ b/src/types/json.h
@@ -22,19 +22,25 @@
 
 #include <jsoncons/json.hpp>
 #include <jsoncons/json_error.hpp>
+#include <jsoncons/json_options.hpp>
 #include <jsoncons_ext/jsonpath/json_query.hpp>
+#include <jsoncons_ext/jsonpath/jsonpath_error.hpp>
+#include <limits>
 
-#include "jsoncons_ext/jsonpath/jsonpath_error.hpp"
 #include "status.h"
 
 struct JsonValue {
   JsonValue() = default;
   explicit JsonValue(jsoncons::basic_json<char> value) : 
value(std::move(value)) {}
 
-  static StatusOr<JsonValue> FromString(std::string_view str) {
+  static StatusOr<JsonValue> FromString(std::string_view str, int 
max_nesting_depth = std::numeric_limits<int>::max()) {
     jsoncons::json val;
+
+    jsoncons::json_options options;
+    options.max_nesting_depth(max_nesting_depth);
+
     try {
-      val = jsoncons::json::parse(str);
+      val = jsoncons::json::parse(str, options);
     } catch (const jsoncons::ser_error &e) {
       return {Status::NotOK, e.what()};
     }
@@ -42,15 +48,50 @@ struct JsonValue {
     return JsonValue(std::move(val));
   }
 
-  std::string Dump() const {
+  StatusOr<std::string> Dump(int max_nesting_depth = 
std::numeric_limits<int>::max()) const {
     std::string res;
-    Dump(&res);
+    GET_OR_RET(Dump(&res, max_nesting_depth));
     return res;
   }
 
-  void Dump(std::string *buffer) const {
-    jsoncons::compact_json_string_encoder encoder{*buffer};
-    value.dump(encoder);
+  Status Dump(std::string *buffer, int max_nesting_depth = 
std::numeric_limits<int>::max()) const {
+    jsoncons::json_options options;
+    options.max_nesting_depth(max_nesting_depth);
+
+    jsoncons::compact_json_string_encoder encoder{*buffer, options};
+    std::error_code ec;
+    value.dump(encoder, ec);
+    if (ec) {
+      return {Status::NotOK, ec.message()};
+    }
+
+    return Status::OK();
+  }
+
+  StatusOr<std::string> Print(uint8_t indent_size = 0, bool spaces_after_colon 
= false,
+                              const std::string &new_line_chars = "") const {
+    std::string res;
+    GET_OR_RET(Print(&res, indent_size, spaces_after_colon, new_line_chars));
+    return res;
+  }
+
+  Status Print(std::string *buffer, uint8_t indent_size = 0, bool 
spaces_after_colon = false,
+               const std::string &new_line_chars = "") const {
+    jsoncons::json_options options;
+    options.indent_size(indent_size);
+    options.spaces_around_colon(spaces_after_colon ? 
jsoncons::spaces_option::space_after
+                                                   : 
jsoncons::spaces_option::no_spaces);
+    options.spaces_around_comma(jsoncons::spaces_option::no_spaces);
+    options.new_line_chars(new_line_chars);
+
+    jsoncons::json_string_encoder encoder{*buffer, options};
+    std::error_code ec;
+    value.dump(encoder, ec);
+    if (ec) {
+      return {Status::NotOK, ec.message()};
+    }
+
+    return Status::OK();
   }
 
   Status Set(std::string_view path, JsonValue &&new_value) {
diff --git a/src/types/redis_json.cc b/src/types/redis_json.cc
index 0cb5f9a3..291d14b5 100644
--- a/src/types/redis_json.cc
+++ b/src/types/redis_json.cc
@@ -35,7 +35,10 @@ rocksdb::Status Json::write(Slice ns_key, JsonMetadata 
*metadata, const JsonValu
 
   std::string val;
   metadata->Encode(&val);
-  json_val.Dump(&val);
+  auto s = json_val.Dump(&val, storage_->GetConfig()->json_max_nesting_depth);
+  if (!s) {
+    return rocksdb::Status::InvalidArgument("Failed to encode JSON into 
storage: " + s.Msg());
+  }
 
   batch->Put(metadata_cf_handle_, ns_key, val);
 
@@ -55,7 +58,7 @@ rocksdb::Status Json::Set(const std::string &user_key, const 
std::string &path,
   if (s.IsNotFound()) {
     if (path != "$") return rocksdb::Status::InvalidArgument("new objects must 
be created at the root");
 
-    auto json_res = JsonValue::FromString(value);
+    auto json_res = JsonValue::FromString(value, 
storage_->GetConfig()->json_max_nesting_depth);
     if (!json_res) return rocksdb::Status::InvalidArgument(json_res.Msg());
     auto json_val = *std::move(json_res);
 
@@ -67,7 +70,7 @@ rocksdb::Status Json::Set(const std::string &user_key, const 
std::string &path,
   if (metadata.format != JsonStorageFormat::JSON)
     return rocksdb::Status::NotSupported("JSON storage format not supported");
 
-  auto new_res = JsonValue::FromString(value);
+  auto new_res = JsonValue::FromString(value, 
storage_->GetConfig()->json_max_nesting_depth);
   if (!new_res) return rocksdb::Status::InvalidArgument(new_res.Msg());
   auto new_val = *std::move(new_res);
 
diff --git a/tests/cppunit/types/json_test.cc b/tests/cppunit/types/json_test.cc
index b559fae9..3f3db030 100644
--- a/tests/cppunit/types/json_test.cc
+++ b/tests/cppunit/types/json_test.cc
@@ -47,37 +47,37 @@ TEST_F(RedisJsonTest, Set) {
 
   ASSERT_TRUE(json_->Set(key_, "$", "  \t{\n  }  ").ok());
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), "{}");
+  ASSERT_EQ(json_val_.Dump().GetValue(), "{}");
 
   ASSERT_TRUE(json_->Set(key_, "$", "1").ok());
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), "1");
+  ASSERT_EQ(json_val_.Dump().GetValue(), "1");
 
   ASSERT_TRUE(json_->Set(key_, "$", "[1, 2, 3]").ok());
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), "[1,2,3]");
+  ASSERT_EQ(json_val_.Dump().GetValue(), "[1,2,3]");
 
   ASSERT_TRUE(json_->Set(key_, "$[1]", "233").ok());
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), "[1,233,3]");
+  ASSERT_EQ(json_val_.Dump().GetValue(), "[1,233,3]");
 
   ASSERT_TRUE(json_->Set(key_, "$", "[[1,2],[3,4],[5,6]]").ok());
   ASSERT_TRUE(json_->Set(key_, "$[*][1]", R"("x")").ok());
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"([[1,"x"],[3,"x"],[5,"x"]])");
+  ASSERT_EQ(json_val_.Dump().GetValue(), R"([[1,"x"],[3,"x"],[5,"x"]])");
 
   ASSERT_TRUE(json_->Set(key_, "$", R"({"x":1,"y":2, "z":3})").ok());
   ASSERT_TRUE(json_->Set(key_, "$.x", "[1,2,3]").ok());
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"({"x":[1,2,3],"y":2,"z":3})");
+  ASSERT_EQ(json_val_.Dump().GetValue(), R"({"x":[1,2,3],"y":2,"z":3})");
 
   ASSERT_TRUE(json_->Set(key_, "$.y", R"({"a":"xxx","x":2})").ok());
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"({"x":[1,2,3],"y":{"a":"xxx","x":2},"z":3})");
+  ASSERT_EQ(json_val_.Dump().GetValue(), 
R"({"x":[1,2,3],"y":{"a":"xxx","x":2},"z":3})");
 
   ASSERT_TRUE(json_->Set(key_, "$..x", "true").ok());
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"({"x":true,"y":{"a":"xxx","x":true},"z":3})");
+  ASSERT_EQ(json_val_.Dump().GetValue(), 
R"({"x":true,"y":{"a":"xxx","x":true},"z":3})");
 
   ASSERT_THAT(json_->Set(key_, "...", "1").ToString(), 
MatchesRegex("Invalid.*"));
   ASSERT_THAT(json_->Set(key_, "[", "1").ToString(), 
MatchesRegex("Invalid.*"));
@@ -85,12 +85,12 @@ TEST_F(RedisJsonTest, Set) {
   ASSERT_TRUE(json_->Set(key_, "$", "[[1,2],[[5,6],4]] ").ok());
   ASSERT_TRUE(json_->Set(key_, "$..[0]", "{}").ok());
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"([{},[{},4]])");
+  ASSERT_EQ(json_val_.Dump().GetValue(), R"([{},[{},4]])");
 
   ASSERT_TRUE(json_->Del(key_).ok());
   ASSERT_TRUE(json_->Set(key_, "$", "[{ }, [ ]]").ok());
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), "[{},[]]");
+  ASSERT_EQ(json_val_.Dump().GetValue(), "[{},[]]");
   ASSERT_THAT(json_->Set(key_, "$[1]", "invalid").ToString(), 
MatchesRegex(".*syntax_error.*"));
   ASSERT_TRUE(json_->Del(key_).ok());
 }
@@ -98,26 +98,44 @@ TEST_F(RedisJsonTest, Set) {
 TEST_F(RedisJsonTest, Get) {
   ASSERT_TRUE(json_->Set(key_, "$", R"({"x":[1,2,{"z":3}],"y":[]})").ok());
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"({"x":[1,2,{"z":3}],"y":[]})");
+  ASSERT_EQ(json_val_.Dump().GetValue(), R"({"x":[1,2,{"z":3}],"y":[]})");
   ASSERT_TRUE(json_->Get(key_, {"$"}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"([{"x":[1,2,{"z":3}],"y":[]}])");
+  ASSERT_EQ(json_val_.Dump().GetValue(), R"([{"x":[1,2,{"z":3}],"y":[]}])");
   ASSERT_TRUE(json_->Get(key_, {"$.y"}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"([[]])");
+  ASSERT_EQ(json_val_.Dump().GetValue(), R"([[]])");
   ASSERT_TRUE(json_->Get(key_, {"$.y[0]"}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"([])");
+  ASSERT_EQ(json_val_.Dump().GetValue(), R"([])");
   ASSERT_TRUE(json_->Get(key_, {"$.z"}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"([])");
+  ASSERT_EQ(json_val_.Dump().GetValue(), R"([])");
   ASSERT_THAT(json_->Get(key_, {"[[["}, &json_val_).ToString(), 
MatchesRegex("Invalid.*"));
 
   ASSERT_TRUE(json_->Set(key_, "$", R"([[[1,2],[3]],[4,5]])").ok());
   ASSERT_TRUE(json_->Get(key_, {"$..[0]"}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"([[[1,2],[3]],[1,2],1,3,4])");
+  ASSERT_EQ(json_val_.Dump().GetValue(), R"([[[1,2],[3]],[1,2],1,3,4])");
   ASSERT_TRUE(json_->Get(key_, {"$[0][1][0]", "$[1][1]"}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"({"$[0][1][0]":[3],"$[1][1]":[5]})");
+  ASSERT_EQ(json_val_.Dump().GetValue(), 
R"({"$[0][1][0]":[3],"$[1][1]":[5]})");
 
   ASSERT_TRUE(json_->Set(key_, "$", 
R"({"x":{"y":1},"y":[2,{"z":3}],"z":{"a":{"x":4}}})").ok());
   ASSERT_TRUE(json_->Get(key_, {"$..x", "$..y", "$..z"}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), 
R"({"$..x":[{"y":1},4],"$..y":[[2,{"z":3}],1],"$..z":[{"a":{"x":4}},3]})");
+  ASSERT_EQ(json_val_.Dump().GetValue(), 
R"({"$..x":[{"y":1},4],"$..y":[[2,{"z":3}],1],"$..z":[{"a":{"x":4}},3]})");
+}
+
+TEST_F(RedisJsonTest, Print) {
+  auto json = *JsonValue::FromString("[1,2,3]");
+  ASSERT_EQ(json.Print().GetValue(), "[1,2,3]");
+  ASSERT_EQ(json.Print(1).GetValue(), "[ 1, 2, 3]");
+  ASSERT_EQ(json.Print(0, true).GetValue(), "[1,2,3]");
+  ASSERT_EQ(json.Print(0, false, std::string("\n")).GetValue(), 
"[\n1,\n2,\n3\n]");
+  ASSERT_EQ(json.Print(1, false, std::string("\n")).GetValue(), "[\n 1,\n 2,\n 
3\n]");
+  ASSERT_EQ(json.Print(1, true, std::string("\n")).GetValue(), "[\n 1,\n 2,\n 
3\n]");
+
+  json = *JsonValue::FromString(R"({"a":1      ,"b":2})");
+  ASSERT_EQ(json.Print().GetValue(), R"({"a":1,"b":2})");
+  ASSERT_EQ(json.Print(1).GetValue(), R"({ "a":1, "b":2})");
+  ASSERT_EQ(json.Print(0, true).GetValue(), R"({"a": 1,"b": 2})");
+  ASSERT_EQ(json.Print(0, false, std::string("\n")).GetValue(), 
"{\n\"a\":1,\n\"b\":2\n}");
+  ASSERT_EQ(json.Print(1, false, std::string("\n")).GetValue(), "{\n 
\"a\":1,\n \"b\":2\n}");
+  ASSERT_EQ(json.Print(1, true, std::string("\n")).GetValue(), "{\n \"a\": 
1,\n \"b\": 2\n}");
 }
 
 TEST_F(RedisJsonTest, ArrAppend) {
@@ -130,7 +148,7 @@ TEST_F(RedisJsonTest, ArrAppend) {
   ASSERT_EQ(res.size(), 1);
   ASSERT_EQ(res[0], 0);
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"({"x":1,"y":[]})");
+  ASSERT_EQ(json_val_.Dump().GetValue(), R"({"x":1,"y":[]})");
   res.clear();
 
   ASSERT_TRUE(json_->Set(key_, "$", R"({"x":[1,2,{"z":3}],"y":[]})").ok());
@@ -138,14 +156,14 @@ TEST_F(RedisJsonTest, ArrAppend) {
   ASSERT_EQ(res.size(), 1);
   ASSERT_EQ(res[0], 4);
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"({"x":[1,2,{"z":3},1],"y":[]})");
+  ASSERT_EQ(json_val_.Dump().GetValue(), R"({"x":[1,2,{"z":3},1],"y":[]})");
   res.clear();
 
   ASSERT_TRUE(json_->ArrAppend(key_, "$..y", {"1", "2", "3"}, &res).ok());
   ASSERT_EQ(res.size(), 1);
   ASSERT_EQ(res[0], 3);
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), R"({"x":[1,2,{"z":3},1],"y":[1,2,3]})");
+  ASSERT_EQ(json_val_.Dump().GetValue(), 
R"({"x":[1,2,{"z":3},1],"y":[1,2,3]})");
   res.clear();
 
   ASSERT_TRUE(json_->Set(key_, "$.x[2]", 
R"({"x":[1,2,{"z":3,"y":[]}],"y":[{"y":1}]})").ok());
@@ -157,6 +175,7 @@ TEST_F(RedisJsonTest, ArrAppend) {
   ASSERT_EQ(res[2], 4);
   ASSERT_EQ(res[3], 6);
   ASSERT_TRUE(json_->Get(key_, {}, &json_val_).ok());
-  ASSERT_EQ(json_val_.Dump(), 
R"({"x":[1,2,{"x":[1,2,{"y":[1,2,3],"z":3}],"y":[{"y":1},1,2,3]},1],"y":[1,2,3,1,2,3]})");
+  ASSERT_EQ(json_val_.Dump().GetValue(),
+            
R"({"x":[1,2,{"x":[1,2,{"y":[1,2,3],"z":3}],"y":[{"y":1},1,2,3]},1],"y":[1,2,3,1,2,3]})");
   res.clear();
 }
diff --git a/tests/gocase/unit/type/json/json_test.go 
b/tests/gocase/unit/type/json/json_test.go
index f59746e6..14ac3372 100644
--- a/tests/gocase/unit/type/json/json_test.go
+++ b/tests/gocase/unit/type/json/json_test.go
@@ -56,6 +56,15 @@ func TestJson(t *testing.T) {
                require.Equal(t, rdb.Do(ctx, "JSON.GET", "a", "$..x", 
"$..y").Val(), `{"$..x":[1,{"y":2}],"$..y":[{"x":{"y":2},"y":3},3,2]}`)
        })
 
+       t.Run("JSON.GET with options", func(t *testing.T) {
+               require.NoError(t, rdb.Do(ctx, "JSON.SET", "a", "$", ` {"x":1, 
"y":2} `).Err())
+               require.Equal(t, rdb.Do(ctx, "JSON.GET", "a", "INDENT", " 
").Val(), `{ "x":1, "y":2}`)
+               require.Equal(t, rdb.Do(ctx, "JSON.GET", "a", "INDENT", " ", 
"SPACE", " ").Val(), `{ "x": 1, "y": 2}`)
+               require.Equal(t, rdb.Do(ctx, "JSON.GET", "a", "NEWLINE", 
"\n").Val(), "{\n\"x\":1,\n\"y\":2\n}")
+               require.Equal(t, rdb.Do(ctx, "JSON.GET", "a", "NEWLINE", "\n", 
"INDENT", " ", "SPACE", " ").Val(), "{\n \"x\": 1,\n \"y\": 2\n}")
+               require.Equal(t, rdb.Do(ctx, "JSON.GET", "a", "INDENT", " ", 
"$").Val(), `[ {  "x":1,  "y":2 }]`)
+       })
+
        t.Run("JSON.ARRAPPEND basics", func(t *testing.T) {
                require.NoError(t, rdb.Do(ctx, "SET", "a", `1`).Err())
                require.Error(t, rdb.Do(ctx, "JSON.ARRAPPEND", "a", "$", 
`1`).Err())

Reply via email to