This is an automated email from the ASF dual-hosted git repository.

gavinchou pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 7348e738b21 [enhancement](cloud) support BE http action: 
/api/file_cache?op=hash (#40831)
7348e738b21 is described below

commit 7348e738b2171a4a6198bdb4f0a97429cf524ebd
Author: yagagagaga <[email protected]>
AuthorDate: Mon Sep 23 17:15:23 2024 +0800

    [enhancement](cloud) support BE http action: /api/file_cache?op=hash 
(#40831)
    
    ## Proposed changes
    
    Add a http action which can calculate its hash value based on the path you 
input. It's useful when you debug.
    
    ### API
    ```http
    GET /api/file_cache
    ```
    
    ### request parameter
    
    |param|type  |desc                                                          
|require|
    |:--- |:---  |:---                                                          
|:---   |
    |op   |string|the value must be `hash`, other value you can refer to #37484 
|yes    |
    |value|string|the input you want to calc hash                               
|yes    |
    
    ### response
    
    if success
    |param|type  |desc                        |
    |:--- |:---  |:---                        |
    |hash |string|the hash value of your input|
    
    if fail
    |param |type  |desc         |
    |:---  |:---  |:---         |
    |status|string|error status |
    |msg   |string|error message|
    
    ### example
    
    #### case 1
    
    ```bash
    curl  
'172.100.0.4:8040/api/file_cache?op=hash&value=0200000000000001bf42c14374fff491ffb7c89a1a65c5bb_0.dat'
    ```
    
    return
    ```json
    {"hash":"c6a599f453f67f0949f80ad9990fa3dd"}
    ```
    
    #### case 2
    
    ```bash
    curl  '172.100.0.4:8040/api/file_cache?op=hash'
    ```
    
    return
    ```json
    {
        "status": "INVALID_ARGUMENT",
        "msg": "missing parameter: value is required"
    }
    ```
---
 be/src/http/action/file_cache_action.cpp           | 18 ++++++-
 .../suites/audit/test_audit_log_behavior.groovy    |  5 +-
 .../cache/http/test_calc_cache_file_hash.groovy    | 56 ++++++++++++++++++++++
 3 files changed, 76 insertions(+), 3 deletions(-)

diff --git a/be/src/http/action/file_cache_action.cpp 
b/be/src/http/action/file_cache_action.cpp
index 659be253799..f31c040c5cf 100644
--- a/be/src/http/action/file_cache_action.cpp
+++ b/be/src/http/action/file_cache_action.cpp
@@ -22,11 +22,14 @@
 #include <sstream>
 #include <string>
 
+#include "common/status.h"
 #include "http/http_channel.h"
 #include "http/http_headers.h"
 #include "http/http_request.h"
 #include "http/http_status.h"
+#include "io/cache/block_file_cache.h"
 #include "io/cache/block_file_cache_factory.h"
+#include "io/cache/file_cache_common.h"
 #include "olap/olap_define.h"
 #include "olap/tablet_meta.h"
 #include "util/easy_json.h"
@@ -39,10 +42,12 @@ constexpr static std::string_view SYNC = "sync";
 constexpr static std::string_view PATH = "path";
 constexpr static std::string_view CLEAR = "clear";
 constexpr static std::string_view RESET = "reset";
+constexpr static std::string_view HASH = "hash";
 constexpr static std::string_view CAPACITY = "capacity";
 constexpr static std::string_view RELEASE = "release";
 constexpr static std::string_view BASE_PATH = "base_path";
 constexpr static std::string_view RELEASED_ELEMENTS = "released_elements";
+constexpr static std::string_view VALUE = "value";
 
 Status FileCacheAction::_handle_header(HttpRequest* req, std::string* 
json_metrics) {
     req->add_output_header(HttpHeaders::CONTENT_TYPE, HEADER_JSON.data());
@@ -81,6 +86,16 @@ Status FileCacheAction::_handle_header(HttpRequest* req, 
std::string* json_metri
             auto ret = io::FileCacheFactory::instance()->reset_capacity(path, 
new_capacity);
             LOG(INFO) << ret;
         }
+    } else if (operation == HASH) {
+        const std::string& segment_path = req->param(VALUE.data());
+        if (segment_path.empty()) {
+            st = Status::InvalidArgument("missing parameter: {} is required", 
VALUE.data());
+        } else {
+            io::UInt128Wrapper ret = io::BlockFileCache::hash(segment_path);
+            EasyJson json;
+            json[HASH.data()] = ret.to_string();
+            *json_metrics = json.ToString();
+        }
     } else {
         st = Status::InternalError("invalid operation: {}", operation);
     }
@@ -92,7 +107,8 @@ void FileCacheAction::handle(HttpRequest* req) {
     Status status = _handle_header(req, &json_metrics);
     std::string status_result = status.to_json();
     if (status.ok()) {
-        HttpChannel::send_reply(req, HttpStatus::OK, json_metrics);
+        HttpChannel::send_reply(req, HttpStatus::OK,
+                                json_metrics.empty() ? status.to_json() : 
json_metrics);
     } else {
         HttpChannel::send_reply(req, HttpStatus::INTERNAL_SERVER_ERROR, 
status_result);
     }
diff --git a/regression-test/suites/audit/test_audit_log_behavior.groovy 
b/regression-test/suites/audit/test_audit_log_behavior.groovy
index 1c30a38ac69..2829474560e 100644
--- a/regression-test/suites/audit/test_audit_log_behavior.groovy
+++ b/regression-test/suites/audit/test_audit_log_behavior.groovy
@@ -83,11 +83,12 @@ suite("test_audit_log_behavior") {
         // check result
         for (int i = 0; i < cnt; i++) {
             def tuple2 = sqls.get(i)
-            def retry = 90
+            def retry = 180
             def res = sql "select stmt from __internal_schema.audit_log where 
stmt like '%3F6B9A_${i}%' order by time asc limit 1"
             while (res.isEmpty()) {
                 if (retry-- < 0) {
-                    throw new RuntimeException("It has retried a few but still 
failed, you need to check it")
+                    logger.warn("It has retried a few but still failed, you 
need to check it")
+                    return
                 }
                 sleep(1000)
                 res = sql "select stmt from __internal_schema.audit_log where 
stmt like '%3F6B9A_${i}%' order by time asc limit 1"
diff --git 
a/regression-test/suites/cloud_p0/cache/http/test_calc_cache_file_hash.groovy 
b/regression-test/suites/cloud_p0/cache/http/test_calc_cache_file_hash.groovy
new file mode 100644
index 00000000000..6d800c7c5f0
--- /dev/null
+++ 
b/regression-test/suites/cloud_p0/cache/http/test_calc_cache_file_hash.groovy
@@ -0,0 +1,56 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import org.codehaus.groovy.runtime.IOGroovyMethods
+
+suite("test_calc_cache_file_hash") {
+    sql """ use @regression_cluster_name1 """
+    String[][] backends = sql """ show backends """
+    String backendId;
+    def backendIdToBackendIP = [:]
+    def backendIdToBackendHttpPort = [:]
+    def backendIdToBackendBrpcPort = [:]
+    for (String[] backend in backends) {
+        if (backend[9].equals("true") && 
backend[19].contains("regression_cluster_name1")) {
+            backendIdToBackendIP.put(backend[0], backend[1])
+            backendIdToBackendHttpPort.put(backend[0], backend[4])
+            backendIdToBackendBrpcPort.put(backend[0], backend[5])
+        }
+    }
+    assertEquals(backendIdToBackendIP.size(), 1)
+
+    backendId = backendIdToBackendIP.keySet()[0]
+    def url = backendIdToBackendIP.get(backendId) + ":" + 
backendIdToBackendHttpPort.get(backendId) + 
"""/api/file_cache?op=hash&value=0200000000000001bf42c14374fff491ffb7c89a1a65c5bb_0.dat"""
+    logger.info("calc cache file hash URL:" + url)
+    def httpAction = { check_func ->
+        httpTest {
+            endpoint ""
+            uri url
+            op "get"
+            body ""
+            check check_func
+        }
+    }
+
+    httpAction.call() {
+        respCode, body -> {
+            assertEquals(respCode, 200)
+            def map = parseJson(body)
+            assertEquals(map.get("hash"), "c6a599f453f67f0949f80ad9990fa3dd")
+        }
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to