This is an automated email from the ASF dual-hosted git repository.

zhangyonglun pushed a commit to branch master
in repository 
https://gitbox.apache.org/repos/asf/incubator-shardingsphere-benchmark.git


The following commit(s) were added to refs/heads/master by this push:
     new 92d5ad5  update refactor
92d5ad5 is described below

commit 92d5ad5074164961ec66620ee5f23d160205daf8
Author: tuohai666 <zhangyong...@apache.org>
AuthorDate: Fri Feb 14 16:07:10 2020 +0800

    update refactor
---
 report/script/gen_jtl_data.py                      | 122 +++++++++------------
 report/script/gen_report.sh                        |   2 +-
 .../{output/update.jtl => input/mysql_all.jtl}     |   0
 .../{output/select.jtl => input/mysql_select.jtl}  |   0
 .../insert.jtl => input/sharding-jdbc_all.jtl}     |   0
 .../delete.jtl => input/sharding-jdbc_select.jtl}  |   0
 .../update.jtl => input/sharding-proxy_all.jtl}    |   0
 .../update.jtl => input/sharding-proxy_select.jtl} |   0
 report/script/jtl_json_test/test.json              |   2 +-
 report/script/output/mysql_all.jtl                 |   5 +
 report/script/output/mysql_select.jtl              |   5 +
 report/script/output/sharding-jdbc_all.jtl         |   5 +
 report/script/output/sharding-jdbc_select.jtl      |   5 +
 report/script/output/sharding-proxy_all.jtl        |   5 +
 report/script/output/sharding-proxy_select.jtl     |   5 +
 15 files changed, 82 insertions(+), 74 deletions(-)

diff --git a/report/script/gen_jtl_data.py b/report/script/gen_jtl_data.py
index 91663dd..667b3d3 100644
--- a/report/script/gen_jtl_data.py
+++ b/report/script/gen_jtl_data.py
@@ -3,75 +3,80 @@ import os,sys
 import json
 import time
 from datetime import datetime
-
+from collections import OrderedDict
 def gen_desc(res_json):
     """
     replace your scene description as following examples. 
     """
     desc_json={
        "mysqlVerison":"5.7.24",
-       "tableNumber":"single table(e.g:t_test)",
-       "sceneDescription":"This is a model for connecting directly to a 
database through a proxy.We used id, k, c, pad fields in the table.Including a 
database and a table",
+       "tableDescription":"id  bigint(20)  primary key,\nk int(11),\nc 
char(120),\npad  char(60)\n",
+       "shardingRule":"tables:\n  tbl:\n    actualDataNodes: 
ds_${0..3}.tbl${0..1023}\n    tableStrategy:\n      inline:\n        
shardingColumn: k\n        algorithmExpression: tbl${k % 1024}\n    
keyGenerator:\n        type: SNOWFLAKE\n        column: 
id\ndefaultDatabaseStrategy:\n  inline:\n    shardingColumn: id\n    
algorithmExpression: ds_${id % 4}",
+       "masterSlaveRule":"None",
+       "encryptRule":"None",
+       "INSERT+UPDATE+DELETE":{
+           "SqlExample":"INSERT INTO tbl(k, c, pad) VALUES(1, '###-###-###', 
'###-###');\nUPDATE tbl SET c='####-####-####', pad='####-####' WHERE 
id=?;\nDELETE FROM tbl WHERE id=?",
+           "ComparativeType": "INSERT+UPDATE+DELETE"
+       },
        "SELECT":{
-           "SqlExample":"SELECT id,k FROM t_test WHERE id = # AND k = #",
+           "SqlExample":"SELECT id,k FROM tbl ignore index(`PRIMARY`) WHERE 
id=? AND k=?",
            "ComparativeType": "SELECT"
        },
-       "INSERT":{
-           "SqlExample":"INSERT INTO t_test(k,c,pad) VALUES(#,#,#)",
-           "ComparativeType": "INSERT"
-       },
-       "DELETE":{
-           "SqlExample":"DELETE FROM t_test WHERE id = # AND k = #",
-           "ComparativeType": "DELETE"
-       },
-       "UPDATE":{
-           "SqlExample":"UPDATE t_test SET k = # WHERE id = # AND k = #",
-           "ComparativeType": "UPDATE"
-       }
     }
     res_json['DESC']=desc_json
     return res_json
-
-def gen_insert(insert_file_name,res_json):
-    if not os.path.exists(insert_file_name):
-        os.system(r"touch {}".format(insert_file_name))
-    with open(insert_file_name) as f:
+    
+def gen_select(input_file_name,res_json):
+    #if not os.path.exists(select_file_name):
+    #    os.system(r"touch {}".format(select_file_name))
+    mysql_file_name = '{}/{}'.format(input_file_name, "mysql_select.jtl")
+    sharding_proxy_file_name = '{}/{}'.format(input_file_name, 
"sharding-proxy_select.jtl")
+    sharding_jdbc_file_name = '{}/{}'.format(input_file_name, 
"sharding-jdbc_select.jtl")
+    with open(sharding_proxy_file_name) as f:
         for line in f:
             line = line.strip("\n")
             if line != "":
-                res_json['INSERT'][0]['data'].append(json.loads(line))
-    
-def gen_select(select_file_name,res_json):
-    if not os.path.exists(select_file_name):
-        os.system(r"touch {}".format(select_file_name))
-    with open(select_file_name) as f:
+                res_json['SELECT'][0]['data'].append(json.loads(line))
+    with open(sharding_jdbc_file_name) as f:
         for line in f:
             line = line.strip("\n")
             if line != "":
-                res_json['SELECT'][0]['data'].append(json.loads(line))
-def gen_update(update_file_name,res_json):
-    if not os.path.exists(update_file_name):
-        os.system(r"touch {}".format(update_file_name))
-    with open(update_file_name) as f:
+                res_json['SELECT'][1]['data'].append(json.loads(line))
+    with open(mysql_file_name) as f:
         for line in f:
             line = line.strip("\n")
             if line != "":
-                res_json['UPDATE'][0]['data'].append(json.loads(line))
-def gen_delete(delete_file_name,res_json):
-    if not os.path.exists(delete_file_name):
-        os.system(r"touch {}".format(delete_file_name))
-    with open(delete_file_name) as f:
+                res_json['SELECT'][2]['data'].append(json.loads(line))
+
+def gen_all(input_file_name,res_json):
+    mysql_file_name = '{}/{}'.format(input_file_name, "mysql_all.jtl")
+    sharding_proxy_file_name = '{}/{}'.format(input_file_name, 
"sharding-proxy_all.jtl")
+    sharding_jdbc_file_name = '{}/{}'.format(input_file_name, 
"sharding-jdbc_all.jtl")
+    with open(sharding_proxy_file_name) as f:
         for line in f:
             line = line.strip("\n")
             if line != "":
-                res_json['DELETE'][0]['data'].append(json.loads(line))
+                
res_json['INSERT+UPDATE+DELETE'][0]['data'].append(json.loads(line))
+    with open(sharding_jdbc_file_name) as f:
+        for line in f:
+            line = line.strip("\n")
+            if line != "":
+                
res_json['INSERT+UPDATE+DELETE'][1]['data'].append(json.loads(line))
+    with open(mysql_file_name) as f:
+        for line in f:
+            line = line.strip("\n")
+            if line != "":
+                
res_json['INSERT+UPDATE+DELETE'][2]['data'].append(json.loads(line))
+
 def cur_file_dir():
     path = sys.path[0]
     if os.path.isdir(path):
         return path
     elif os.path.isfile(path):
         return os.path.dirname(path)
-def gen_json(select_file_name, insert_file_name, update_file_name, 
delete_file_name, out_name):
+
+def gen_json(input_file_name, out_name):
+    res_json = OrderedDict()
     res_json = {
         "SELECT":[
              {"type":"Sharding-Proxy",
@@ -84,7 +89,7 @@ def gen_json(select_file_name, insert_file_name, 
update_file_name, delete_file_n
              "data":[]
              }
         ],
-        "INSERT":[
+        "INSERT+UPDATE+DELETE":[
              {"type":"Sharding-Proxy",
               "data":[]
              },
@@ -95,36 +100,12 @@ def gen_json(select_file_name, insert_file_name, 
update_file_name, delete_file_n
              "data":[]
              }
          ],
-        "UPDATE":[
-             {"type":"Sharding-Proxy",
-              "data":[]
-             },
-             {"type":"Sharding-JDBC",
-             "data":[]
-             },
-             {"type":"MySQL", 
-             "data":[]
-             }
-        ],
-        "DELETE":[
-             {"type":"Sharding-Proxy",
-              "data":[]
-             },
-             {"type":"Sharding-JDBC",
-             "data":[]
-             },
-             {"type":"MySQL", 
-             "data":[]
-             }
-        ],
         "DESC":{
          }
     }
     res_json=gen_desc(res_json)
-    gen_insert(insert_file_name,res_json)
-    gen_select(select_file_name,res_json)
-    gen_update(update_file_name,res_json)
-    gen_delete(delete_file_name,res_json)
+    gen_select(input_file_name,res_json)
+    gen_all(input_file_name,res_json)
     save_dir = cur_file_dir()
     newfile='%s/%s'%(save_dir,out_name)
     with open(newfile,'w') as f:
@@ -132,10 +113,7 @@ def gen_json(select_file_name, insert_file_name, 
update_file_name, delete_file_n
 
      
 if __name__ == '__main__':
-    select_file_name = sys.argv[1]
-    insert_file_name = sys.argv[2]
-    update_file_name = sys.argv[3]
-    delete_file_name = sys.argv[4]
-    out_name = sys.argv[5]
-    gen_json(select_file_name, insert_file_name, update_file_name, 
delete_file_name, out_name)
+    input_file_name = sys.argv[1]
+    out_name = sys.argv[2]
+    gen_json(input_file_name, out_name)
    
diff --git a/report/script/gen_report.sh b/report/script/gen_report.sh
index 02cd7c2..269dee8 100644
--- a/report/script/gen_report.sh
+++ b/report/script/gen_report.sh
@@ -21,5 +21,5 @@ else
 fi
 
 #parse history jtl data to json data to ui, such as the following examples
-python gen_jtl_data.py output/select.jtl output/insert.jtl output/update.jtl 
output/delete.jtl jtl_json_test/test.json
+python gen_jtl_data.py output jtl_json_test/test.json
 
diff --git a/report/script/output/update.jtl b/report/script/input/mysql_all.jtl
similarity index 100%
copy from report/script/output/update.jtl
copy to report/script/input/mysql_all.jtl
diff --git a/report/script/output/select.jtl 
b/report/script/input/mysql_select.jtl
similarity index 100%
rename from report/script/output/select.jtl
rename to report/script/input/mysql_select.jtl
diff --git a/report/script/output/insert.jtl 
b/report/script/input/sharding-jdbc_all.jtl
similarity index 100%
rename from report/script/output/insert.jtl
rename to report/script/input/sharding-jdbc_all.jtl
diff --git a/report/script/output/delete.jtl 
b/report/script/input/sharding-jdbc_select.jtl
similarity index 100%
rename from report/script/output/delete.jtl
rename to report/script/input/sharding-jdbc_select.jtl
diff --git a/report/script/output/update.jtl 
b/report/script/input/sharding-proxy_all.jtl
similarity index 100%
copy from report/script/output/update.jtl
copy to report/script/input/sharding-proxy_all.jtl
diff --git a/report/script/output/update.jtl 
b/report/script/input/sharding-proxy_select.jtl
similarity index 100%
rename from report/script/output/update.jtl
rename to report/script/input/sharding-proxy_select.jtl
diff --git a/report/script/jtl_json_test/test.json 
b/report/script/jtl_json_test/test.json
index b8fb7fd..749748e 100644
--- a/report/script/jtl_json_test/test.json
+++ b/report/script/jtl_json_test/test.json
@@ -1 +1 @@
-{"SELECT": [{"type": "Sharding-Proxy", "data": []}, {"type": "Sharding-JDBC", 
"data": []}, {"type": "MySQL", "data": []}], "INSERT": [{"type": 
"Sharding-Proxy", "data": []}, {"type": "Sharding-JDBC", "data": []}, {"type": 
"MySQL", "data": []}], "UPDATE": [{"type": "Sharding-Proxy", "data": []}, 
{"type": "Sharding-JDBC", "data": []}, {"type": "MySQL", "data": []}], 
"DELETE": [{"type": "Sharding-Proxy", "data": []}, {"type": "Sharding-JDBC", 
"data": []}, {"type": "MySQL", "data": []}], "DE [...]
\ No newline at end of file
+{"SELECT": [{"type": "Sharding-Proxy", "data": [{"Samples": 0, "Throughout": 
0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, "Min": 0, "Max": 0, "Err": 0, 
"Date": "2020.02.14 15:59:20 "}, {"Samples": 0, "Throughout": 0, "50th": 0, 
"95th": 0, "99.9th": 0, "Avg": 0, "Min": 0, "Max": 0, "Err": 0, "Date": 
"2020.02.14 15:59:48 "}, {"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, 
"99.9th": 0, "Avg": 0, "Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 
16:01:11 "}, {"Samples": 0, "Througho [...]
\ No newline at end of file
diff --git a/report/script/output/mysql_all.jtl 
b/report/script/output/mysql_all.jtl
new file mode 100644
index 0000000..7c3ac74
--- /dev/null
+++ b/report/script/output/mysql_all.jtl
@@ -0,0 +1,5 @@
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:19 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:48 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:01:11 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:04:26 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:05:41 "}
diff --git a/report/script/output/mysql_select.jtl 
b/report/script/output/mysql_select.jtl
new file mode 100644
index 0000000..7c3ac74
--- /dev/null
+++ b/report/script/output/mysql_select.jtl
@@ -0,0 +1,5 @@
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:19 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:48 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:01:11 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:04:26 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:05:41 "}
diff --git a/report/script/output/sharding-jdbc_all.jtl 
b/report/script/output/sharding-jdbc_all.jtl
new file mode 100644
index 0000000..58e7345
--- /dev/null
+++ b/report/script/output/sharding-jdbc_all.jtl
@@ -0,0 +1,5 @@
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:20 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:48 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:01:11 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:04:26 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:05:41 "}
diff --git a/report/script/output/sharding-jdbc_select.jtl 
b/report/script/output/sharding-jdbc_select.jtl
new file mode 100644
index 0000000..58e7345
--- /dev/null
+++ b/report/script/output/sharding-jdbc_select.jtl
@@ -0,0 +1,5 @@
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:20 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:48 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:01:11 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:04:26 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:05:41 "}
diff --git a/report/script/output/sharding-proxy_all.jtl 
b/report/script/output/sharding-proxy_all.jtl
new file mode 100644
index 0000000..58e7345
--- /dev/null
+++ b/report/script/output/sharding-proxy_all.jtl
@@ -0,0 +1,5 @@
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:20 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:48 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:01:11 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:04:26 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:05:41 "}
diff --git a/report/script/output/sharding-proxy_select.jtl 
b/report/script/output/sharding-proxy_select.jtl
new file mode 100644
index 0000000..58e7345
--- /dev/null
+++ b/report/script/output/sharding-proxy_select.jtl
@@ -0,0 +1,5 @@
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:20 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 15:59:48 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:01:11 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:04:26 "}
+{"Samples": 0, "Throughout": 0, "50th": 0, "95th": 0, "99.9th": 0, "Avg": 0, 
"Min": 0, "Max": 0, "Err": 0, "Date": "2020.02.14 16:05:41 "}

Reply via email to