diff --git a/cmake/taostools_CMakeLists.txt.in b/cmake/taostools_CMakeLists.txt.in index 83b7ade4074dcdca6c8a90feb4774b5e91dd62ac..e0d5250d84fe36ddb2fb9fea57b13ff2046adabf 100644 --- a/cmake/taostools_CMakeLists.txt.in +++ b/cmake/taostools_CMakeLists.txt.in @@ -2,7 +2,7 @@ # taos-tools ExternalProject_Add(taos-tools GIT_REPOSITORY https://github.com/taosdata/taos-tools.git - GIT_TAG 16eb34f + GIT_TAG 0fb640b SOURCE_DIR "${TD_SOURCE_DIR}/tools/taos-tools" BINARY_DIR "" #BUILD_IN_SOURCE TRUE diff --git a/tests/develop-test/5-taos-tools/taosbenchmark/json/queryInsertdata.json b/tests/develop-test/5-taos-tools/taosbenchmark/json/queryInsertdata.json new file mode 100644 index 0000000000000000000000000000000000000000..7e3ffe16973429200301aef7996119d3082cf7c1 --- /dev/null +++ b/tests/develop-test/5-taos-tools/taosbenchmark/json/queryInsertdata.json @@ -0,0 +1,74 @@ +{ + "filetype": "insert", + "cfgdir": "/etc/taos", + "host": "127.0.0.1", + "port": 6030, + "user": "root", + "password": "taosdata", + "thread_count": 4, + "thread_count_create_tbl": 4, + "result_file": "./insert_res.txt", + "confirm_parameter_prompt": "no", + "insert_interval": 0, + "interlace_rows": 0, + "num_of_records_per_req": 3000, + "max_sql_len": 1024000, + "databases": [{ + "dbinfo": { + "name": "db", + "drop": "yes", + "replica": 1, + "precision": "ms" + }, + "super_tables": [{ + "name": "stb0", + "child_table_exists":"no", + "childtable_count": 10, + "childtable_prefix": "stb00_", + "auto_create_table": "no", + "batch_create_tbl_num": 10, + "data_source": "rand", + "insert_mode": "taosc", + "insert_rows": 100, + "childtable_limit": 0, + "childtable_offset": 0, + "interlace_rows": 0, + "insert_interval": 0, + "max_sql_len": 1024000, + "disorder_ratio": 0, + "disorder_range": 1000, + "timestamp_step": 1, + "start_timestamp": "2020-11-01 00:00:00.000", + "sample_format": "csv", + "sample_file": "./sample.csv", + "tags_file": "", + "columns": [{"type": "BINARY", "len": 1, "count":1}, {"type": "BINARY", "len": 3, "count":1}, {"type": "INT"}, {"type": "DOUBLE", "count":1}], + "tags": [{"type": "TINYINT", "count":2}, {"type": "BINARY", "len": 16, "count":5}] + }, + { + "name": "stb1", + "child_table_exists":"no", + "childtable_count": 10, + "childtable_prefix": "stb01_", + "auto_create_table": "no", + "batch_create_tbl_num": 10, + "data_source": "rand", + "insert_mode": "taosc", + "insert_rows": 200, + "childtable_limit": 0, + "childtable_offset": 0, + "interlace_rows": 0 , + "insert_interval": 0, + "max_sql_len": 1024000, + "disorder_ratio": 0, + "disorder_range": 1000, + "timestamp_step": 1, + "start_timestamp": "2020-11-01 00:00:00.000", + "sample_format": "csv", + "sample_file": "./sample.csv", + "tags_file": "", + "columns": [{"type": "INT"}, {"type": "DOUBLE", "count":6}, {"type": "BINARY", "len": 1, "count":3}, {"type": "BINARY", "len": 2, "count":6}], + "tags": [{"type": "TINYINT", "count":2}, {"type": "BINARY", "len": 16, "count":5}] + }] + }] +} diff --git a/tests/develop-test/5-taos-tools/taosbenchmark/json/queryInsertrestdata.json b/tests/develop-test/5-taos-tools/taosbenchmark/json/queryInsertrestdata.json new file mode 100644 index 0000000000000000000000000000000000000000..67144977665c0140d2e613d1f14bb72301217a78 --- /dev/null +++ b/tests/develop-test/5-taos-tools/taosbenchmark/json/queryInsertrestdata.json @@ -0,0 +1,73 @@ +{ + "filetype": "insert", + "cfgdir": "/etc/taos", + "host": "127.0.0.1", + "port": 6030, + "user": "root", + "password": "taosdata", + "thread_count": 4, + "thread_count_create_tbl": 4, + "result_file": "./insert_res.txt", + "confirm_parameter_prompt": "no", + "insert_interval": 0, + "interlace_rows": 0, + "num_of_records_per_req": 3000, + "max_sql_len": 1024000, + "databases": [{ + "dbinfo": { + "name": "db", + "drop": "yes", + "precision": "ms" + }, + "super_tables": [{ + "name": "stb0", + "child_table_exists":"no", + "childtable_count": 2, + "childtable_prefix": "stb00_", + "auto_create_table": "no", + "batch_create_tbl_num": 10, + "data_source": "rand", + "insert_mode": "taosc", + "insert_rows": 10, + "childtable_limit": 0, + "childtable_offset": 0, + "interlace_rows": 0, + "insert_interval": 0, + "max_sql_len": 1024000, + "disorder_ratio": 0, + "disorder_range": 1000, + "timestamp_step": 1, + "start_timestamp": "2020-11-01 00:00:00.000", + "sample_format": "csv", + "sample_file": "./sample.csv", + "tags_file": "", + "columns": [{"type": "BINARY", "len": 1, "count":1}, {"type": "BINARY", "len": 3, "count":1}, {"type": "INT"}, {"type": "DOUBLE", "count":1}], + "tags": [{"type": "TINYINT", "count":2}, {"type": "BINARY", "len": 16, "count":5}] + }, + { + "name": "stb1", + "child_table_exists":"no", + "childtable_count": 2, + "childtable_prefix": "stb01_", + "auto_create_table": "no", + "batch_create_tbl_num": 10, + "data_source": "rand", + "insert_mode": "taosc", + "insert_rows": 5, + "childtable_limit": 0, + "childtable_offset": 0, + "interlace_rows": 0 , + "insert_interval": 0, + "max_sql_len": 1024000, + "disorder_ratio": 0, + "disorder_range": 1000, + "timestamp_step": 1, + "start_timestamp": "2020-11-01 00:00:00.000", + "sample_format": "csv", + "sample_file": "./sample.csv", + "tags_file": "", + "columns": [{"type": "INT"}, {"type": "DOUBLE", "count":6}, {"type": "BINARY", "len": 1, "count":3}, {"type": "BINARY", "len": 2, "count":6}], + "tags": [{"type": "TINYINT", "count":2}, {"type": "BINARY", "len": 16, "count":5}] + }] + }] +} diff --git a/tests/develop-test/5-taos-tools/taosbenchmark/json/queryQps.json b/tests/develop-test/5-taos-tools/taosbenchmark/json/queryQps.json new file mode 100644 index 0000000000000000000000000000000000000000..9e75d52a6cd360674e633be886f348556e470342 --- /dev/null +++ b/tests/develop-test/5-taos-tools/taosbenchmark/json/queryQps.json @@ -0,0 +1,35 @@ +{ + "filetype": "query", + "cfgdir": "/etc/taos", + "host": "127.0.0.1", + "port": 6030, + "user": "root", + "password": "taosdata", + "confirm_parameter_prompt": "no", + "databases": "db", + "query_times": 1, + "specified_table_query": { + "query_interval": 10, + "threads": 4, + "sqls": [ + { + "sql": "select last_row(*) from stb00_0", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_9 ", + "result": "./query_res1.txt" + }] + }, + "super_table_query": { + "stblname": "stb1", + "query_interval":20, + "threads": 4, + "sqls": [ + { + "sql": "select last_row(ts) from xxxx", + "result": "./query_res2.txt" + } + ] + } +} diff --git a/tests/develop-test/5-taos-tools/taosbenchmark/json/queryRestful.json b/tests/develop-test/5-taos-tools/taosbenchmark/json/queryRestful.json new file mode 100644 index 0000000000000000000000000000000000000000..5de560fd217d17ca94690bc39c98a5f903ad1634 --- /dev/null +++ b/tests/develop-test/5-taos-tools/taosbenchmark/json/queryRestful.json @@ -0,0 +1,38 @@ +{ + "filetype": "query", + "cfgdir": "/etc/taos", + "host": "127.0.0.1", + "port": 6030, + "user": "root", + "password": "taosdata", + "confirm_parameter_prompt": "no", + "databases": "db", + "query_times": 2, + "query_mode": "rest", + "specified_table_query": { + "query_interval": 1, + "threads": 3, + "sqls": [ + { + "sql": "select last_row(*) from db.stb0 ", + "result": "./query_res0.txt" + }, + { + "sql": "select count(*) from db.stb00_1", + "result": "./query_res1.txt" + } + ] + }, + "super_table_query": { + "stblname": "stb1", + "query_interval": 1, + "threads": 3, + "sqls": [ + { + "sql": "select last_row(ts) from xxxx", + "result": "./query_res2.txt" + } + ] + } + } + diff --git a/tests/develop-test/5-taos-tools/taosbenchmark/json/querySpeciMutisql100.json b/tests/develop-test/5-taos-tools/taosbenchmark/json/querySpeciMutisql100.json new file mode 100644 index 0000000000000000000000000000000000000000..a86d22d69dc4365e072e04233b27db1c87c5225e --- /dev/null +++ b/tests/develop-test/5-taos-tools/taosbenchmark/json/querySpeciMutisql100.json @@ -0,0 +1,429 @@ +{ + "filetype": "query", + "cfgdir": "/etc/taos", + "host": "127.0.0.1", + "port": 6030, + "user": "root", + "password": "taosdata", + "confirm_parameter_prompt": "no", + "databases": "db", + "query_times": 2, + "specified_table_query": { + "query_interval": 1, + "threads": 3, + "sqls": [ + { + "sql": "select last_row(*) from stb00_0", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_1", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_2", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_3", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_4", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_5", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_6", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_7", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_8", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_9", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_10 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_11 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_12 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_13 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_14 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_15 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_16 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_17 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_18 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_19 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_20 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_21 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_22 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_23 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_24 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_25 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_26 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_27 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_28 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_29 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_30 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_31 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_32 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_33 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_34 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_35 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_36 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_37 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_38 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_39 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_40 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_41 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_42 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_43 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_44 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_45 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_46 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_47 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_48 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_49 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_50 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_51 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_52 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_53 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_54 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_55 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_56 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_57 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_58 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_59 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_60", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_61", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_62", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_63", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_64", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_65", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_66", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_67", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_68", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_69", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_70 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_71 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_72 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_73 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_74 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_75 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_76 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_77 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_78 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_79 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_80 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_81 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_82 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_83 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_84 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_85 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_86 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_87 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_88 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_89 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_90 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_91 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_92 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_93 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_94 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_95 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_96 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_97 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_98 ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from stb00_99 ", + "result": "./query_res0.txt" + + }] + }, + "super_table_query": { + "stblname": "stb1", + "query_interval": 1, + "threads": 3, + "sqls": [ + { + "sql": "select last_row(ts) from xxxx", + "result": "./query_res2.txt" + } + ] + } +} + \ No newline at end of file diff --git a/tests/develop-test/5-taos-tools/taosbenchmark/json/querySuperMutisql100.json b/tests/develop-test/5-taos-tools/taosbenchmark/json/querySuperMutisql100.json new file mode 100644 index 0000000000000000000000000000000000000000..0f21df47e668d4e8cb5c137bf497192dc915f2f5 --- /dev/null +++ b/tests/develop-test/5-taos-tools/taosbenchmark/json/querySuperMutisql100.json @@ -0,0 +1,419 @@ +{ + "filetype": "query", + "cfgdir": "/etc/taos", + "host": "127.0.0.1", + "port": 6030, + "user": "root", + "password": "taosdata", + "confirm_parameter_prompt": "no", + "databases": "db", + "query_times": 3, + "super_table_query": { + "stblname": "stb0", + "query_interval": 10, + "threads": 9, + "sqls": [ + { + "sql": "select last_row(*) from xxxx", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select last_row(*) from xxxx ", + "result": "./query_res0.txt" + }, + { + "sql": "select * from xxxx ", + "result": "./query_res0.txt" + + }] + } +} + diff --git a/tests/develop-test/5-taos-tools/taosbenchmark/json/queryTaosc.json b/tests/develop-test/5-taos-tools/taosbenchmark/json/queryTaosc.json new file mode 100644 index 0000000000000000000000000000000000000000..9ce423766047e0ab7394c622ab110b7574edf013 --- /dev/null +++ b/tests/develop-test/5-taos-tools/taosbenchmark/json/queryTaosc.json @@ -0,0 +1,37 @@ +{ + "filetype": "query", + "cfgdir": "/etc/taos", + "host": "127.0.0.1", + "port": 6030, + "user": "root", + "password": "taosdata", + "confirm_parameter_prompt": "no", + "databases": "db", + "query_times": 2, + "query_mode": "taosc", + "specified_table_query": { + "query_interval": 1, + "threads": 3, + "sqls": [ + { + "sql": "select last_row(*) from stb0 ", + "result": "./query_res0.txt" + }, + { + "sql": "select count(*) from stb00_1", + "result": "./query_res1.txt" + } + ] + }, + "super_table_query": { + "stblname": "stb1", + "query_interval": 1, + "threads": 3, + "sqls": [ + { + "sql": "select last_row(ts) from xxxx", + "result": "./query_res2.txt" + } + ] + } +} diff --git a/tests/develop-test/5-taos-tools/taosbenchmark/taosdemoTestQueryWithJson.py b/tests/develop-test/5-taos-tools/taosbenchmark/taosdemoTestQueryWithJson.py new file mode 100644 index 0000000000000000000000000000000000000000..19500c7dca946f3c0a1887eaa3d4bdcfa35390ef --- /dev/null +++ b/tests/develop-test/5-taos-tools/taosbenchmark/taosdemoTestQueryWithJson.py @@ -0,0 +1,241 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import os +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * +import time +from datetime import datetime +import ast +import re + +# from assertpy import assert_that +import subprocess + + +class TDTestCase: + def init(self, conn, logSql, replicaVarl=1): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor(), logSql) + + def getPath(self, tool="taosBenchmark"): + selfPath = os.path.dirname(os.path.realpath(__file__)) + + if "community" in selfPath: + projPath = selfPath[: selfPath.find("community")] + elif "src" in selfPath: + projPath = selfPath[: selfPath.find("src")] + elif "/tools/" in selfPath: + projPath = selfPath[: selfPath.find("/tools/")] + elif "/tests/" in selfPath: + projPath = selfPath[: selfPath.find("/tests/")] + else: + tdLog.info("cannot found %s in path: %s, use system's" % (tool, selfPath)) + projPath = "/usr/local/taos/bin/" + + paths = [] + for root, dirs, files in os.walk(projPath): + if (tool) in files: + rootRealPath = os.path.dirname(os.path.realpath(root)) + if "packaging" not in rootRealPath: + paths.append(os.path.join(root, tool)) + break + if len(paths) == 0: + return "" + return paths[0] + + # 获取taosc接口查询的结果文件中的内容,返回每行数据,并断言数据的第一列内容。 + def assertfileDataTaosc(self, filename, expectResult): + self.filename = filename + self.expectResult = expectResult + with open("%s" % filename, "r+") as f1: + for line in f1.readlines(): + queryResultTaosc = line.strip().split()[0] + self.assertCheck(filename, queryResultTaosc, expectResult) + + # 获取restful接口查询的结果文件中的关键内容,目前的关键内容找到第一个key就跳出循,所以就只有一个数据。后续再修改多个结果文件。 + def getfileDataRestful(self, filename): + self.filename = filename + with open("%s" % filename, "r+") as f1: + for line in f1.readlines(): + contents = line.strip() + if contents.find("data") != -1: + pattern = re.compile("{.*}") + contents = pattern.search(contents).group() + contentsDict = ast.literal_eval(contents) # 字符串转换为字典 + queryResultRest = contentsDict["data"][0][0] + break + else: + queryResultRest = "" + return queryResultRest + + # 获取taosc接口查询次数 + def queryTimesTaosc(self, filename): + self.filename = filename + command = "cat %s |wc -l" % filename + times = int(subprocess.getstatusoutput(command)[1]) + return times + + # 获取restful接口查询次数 + def queryTimesRestful(self, filename): + self.filename = filename + command = 'cat %s |grep "200 OK" |wc -l' % filename + times = int(subprocess.getstatusoutput(command)[1]) + return times + + # 定义断言结果是否正确。不正确返回错误结果,正确即通过。 + def assertCheck(self, filename, queryResult, expectResult): + self.filename = filename + self.queryResult = queryResult + self.expectResult = expectResult + args0 = (filename, queryResult, expectResult) + assert queryResult == expectResult, ( + "Queryfile:%s ,result is %s != expect: %s" % args0 + ) + + def run(self): + binPath = self.getPath() + if binPath == "": + tdLog.exit("taosBenchmark not found!") + else: + tdLog.info("taosBenchmark use %s" % binPath) + + # delete useless files + os.system("rm -rf ./query_res*") + os.system("rm -rf ./all_query*") + + # taosc query: query specified table and query super table + os.system("%s -f ./5-taos-tools/taosbenchmark/json/queryInsertdata.json" % binPath) + os.system("%s -f ./5-taos-tools/taosbenchmark/json/queryTaosc.json" % binPath) + os.system("cat query_res0.txt* > all_query_res0_taosc.txt") + os.system("cat query_res1.txt* > all_query_res1_taosc.txt") + os.system("cat query_res2.txt* > all_query_res2_taosc.txt") + + # correct Times testcases + queryTimes0Taosc = self.queryTimesTaosc("all_query_res0_taosc.txt") + self.assertCheck("all_query_res0_taosc.txt", queryTimes0Taosc, 6) + + queryTimes1Taosc = self.queryTimesTaosc("all_query_res1_taosc.txt") + self.assertCheck("all_query_res1_taosc.txt", queryTimes1Taosc, 6) + + queryTimes2Taosc = self.queryTimesTaosc("all_query_res2_taosc.txt") + self.assertCheck("all_query_res2_taosc.txt", queryTimes2Taosc, 20) + + # correct data testcase + self.assertfileDataTaosc("all_query_res0_taosc.txt", "1604160000099") + self.assertfileDataTaosc("all_query_res1_taosc.txt", "100") + self.assertfileDataTaosc("all_query_res2_taosc.txt", "1604160000199") + + # delete useless files + os.system("rm -rf ./query_res*") + os.system("rm -rf ./all_query*") + + # use restful api to query + os.system("%s -f ./5-taos-tools/taosbenchmark/json/queryInsertrestdata.json" % binPath) + os.system("%s -f ./5-taos-tools/taosbenchmark/json/queryRestful.json" % binPath) + os.system("cat query_res0.txt* > all_query_res0_rest.txt") + os.system("cat query_res1.txt* > all_query_res1_rest.txt") + os.system("cat query_res2.txt* > all_query_res2_rest.txt") + + # correct Times testcases + queryTimes0Restful = self.queryTimesRestful("all_query_res0_rest.txt") + self.assertCheck("all_query_res0_rest.txt", queryTimes0Restful, 6) + + queryTimes1Restful = self.queryTimesRestful("all_query_res1_rest.txt") + self.assertCheck("all_query_res1_rest.txt", queryTimes1Restful, 6) + + queryTimes2Restful = self.queryTimesRestful("all_query_res2_rest.txt") + self.assertCheck("all_query_res2_rest.txt", queryTimes2Restful, 4) + + # correct data testcase + data0 = self.getfileDataRestful("all_query_res0_rest.txt") + if data0 != "2020-11-01 00:00:00.009" and data0 != "2020-10-31T16:00:00.009Z": + tdLog.exit( + "data0 is not 2020-11-01 00:00:00.009 and 2020-10-31T16:00:00.009Z" + ) + + data1 = self.getfileDataRestful("all_query_res1_rest.txt") + self.assertCheck("all_query_res1_rest.txt", data1, 10) + + data2 = self.getfileDataRestful("all_query_res2_rest.txt") + if data2 != "2020-11-01 00:00:00.004" and data2 != "2020-10-31T16:00:00.004Z": + tdLog.exit( + "data2 is not 2020-11-01 00:00:00.004 and 2020-10-31T16:00:00.004Z" + ) + + # query times less than or equal to 100 + assert ( + os.system("%s -f ./5-taos-tools/taosbenchmark/json/queryInsertdata.json" % binPath) == 0 + ) + assert ( + os.system("%s -f ./5-taos-tools/taosbenchmark/json/querySpeciMutisql100.json" % binPath) + != 0 + ) + assert ( + os.system("%s -f ./5-taos-tools/taosbenchmark/json/querySuperMutisql100.json" % binPath) + == 0 + ) + + # query result print QPS + os.system("%s -f ./5-taos-tools/taosbenchmark/json/queryInsertdata.json" % binPath) + exceptcode = os.system("%s -f ./5-taos-tools/taosbenchmark/json/queryQps.json" % binPath) + assert exceptcode == 0 + + # 2021.02.09 need modify taosBenchmakr code + # use illegal or out of range parameters query json file + os.system("%s -f ./5-taos-tools/taosbenchmark/json/queryInsertdata.json" % binPath) + # 2021.02.09 need modify taosBenchmakr code + # exceptcode = os.system( + # "%s -f ./taosbenchmark/json/queryTimes0.json" % + # binPath) + # assert exceptcode != 0 + + # 2021.02.09 need modify taosBenchmakr code + # exceptcode0 = os.system( + # "%s -f ./taosbenchmark/json/queryTimesless0.json" % + # binPath) + # assert exceptcode0 != 0 + + # exceptcode1 = os.system( + # "%s -f ./taosbenchmark/json/queryConcurrent0.json" % + # binPath) + # assert exceptcode2 != 0 + + # exceptcode3 = os.system( + # "%s -f ./taosbenchmark/json/querrThreadsless0.json" % + # binPath) + # assert exceptcode3 != 0 + + # exceptcode4 = os.system( + # "%s -f ./taosbenchmark/json/querrThreads0.json" % + # binPath) + # assert exceptcode4 != 0 + + # delete useless files + os.system("rm -rf ./insert_res.txt") + os.system("rm -rf 5-taos-tools/taosbenchmark/*.py.sql") + os.system("rm -rf ./querySystemInfo*") + os.system("rm -rf ./query_res*") + os.system("rm -rf ./all_query*") + os.system("rm -rf ./test_query_res0.txt") + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/develop-test/fulltest.sh b/tests/develop-test/fulltest.sh index 69cade3855b087fc7638eea22b4926d088b5d86b..e986ed69663b6333110251273330a1ead19db4e6 100644 --- a/tests/develop-test/fulltest.sh +++ b/tests/develop-test/fulltest.sh @@ -18,3 +18,4 @@ python3 ./test.py -f 5-taos-tools/taosbenchmark/sml_json_alltypes.py #python3 ./test.py -f 5-taos-tools/taosbenchmark/sml_telnet_alltypes.py #python3 ./test.py -f 5-taos-tools/taosbenchmark/taosadapter_json.py #python3 ./test.py -f 5-taos-tools/taosbenchmark/telnet_tcp.py +python3 ./test.py -f 5-taos-tools/taosbenchmark/taosdemoTestQueryWithJson.py -R diff --git a/tests/develop-test/test.py b/tests/develop-test/test.py index b25bda4a3b1b8c9a19b9e6687ca95bb0eb70d9b9..062e48b94b013d370f8be1dd785fdee35c74aef0 100644 --- a/tests/develop-test/test.py +++ b/tests/develop-test/test.py @@ -24,6 +24,7 @@ import socket import threading import toml + sys.path.append("../pytest") from util.log import * from util.dnodes import * @@ -34,14 +35,16 @@ from util.taosadapter import * import taos import taosrest + def checkRunTimeError(): import win32gui + timeCount = 0 while 1: time.sleep(1) timeCount = timeCount + 1 - print("checkRunTimeError",timeCount) - if (timeCount>600): + print("checkRunTimeError", timeCount) + if timeCount > 600: print("stop the test.") os.system("TASKKILL /F /IM taosd.exe") os.system("TASKKILL /F /IM taos.exe") @@ -53,6 +56,7 @@ def checkRunTimeError(): if hwnd: os.system("TASKKILL /F /IM taosd.exe") + if __name__ == "__main__": fileName = "all" @@ -73,102 +77,124 @@ if __name__ == "__main__": createDnodeNums = 1 restful = False replicaVar = 1 - opts, args = getopt.gnu_getopt(sys.argv[1:], 'f:p:m:l:scghrd:k:e:N:M:Q:C:RD:n:', [ - 'file=', 'path=', 'master', 'logSql', 'stop', 'cluster', 'valgrind', 'help', 'restart', 'updateCfgDict', 'killv', 'execCmd','dnodeNums','mnodeNums','queryPolicy','createDnodeNums','restful','adaptercfgupdate','replicaVar']) + opts, args = getopt.gnu_getopt( + sys.argv[1:], + "f:p:m:l:scghrd:k:e:N:M:Q:C:RD:n:", + [ + "file=", + "path=", + "master", + "logSql", + "stop", + "cluster", + "valgrind", + "help", + "restart", + "updateCfgDict", + "killv", + "execCmd", + "dnodeNums", + "mnodeNums", + "queryPolicy", + "createDnodeNums", + "restful", + "adaptercfgupdate", + "replicaVar", + ], + ) for key, value in opts: - if key in ['-h', '--help']: - tdLog.printNoPrefix( - 'A collection of test cases written using Python') - tdLog.printNoPrefix('-f Name of test case file written by Python') - tdLog.printNoPrefix('-p Deploy Path for Simulator') - tdLog.printNoPrefix('-m Master Ip for Simulator') - tdLog.printNoPrefix('-l logSql Flag') - tdLog.printNoPrefix('-s stop All dnodes') - tdLog.printNoPrefix('-c Test Cluster Flag') - tdLog.printNoPrefix('-g valgrind Test Flag') - tdLog.printNoPrefix('-r taosd restart test') - tdLog.printNoPrefix('-d update cfg dict, base64 json str') - tdLog.printNoPrefix('-k not kill valgrind processer') - tdLog.printNoPrefix('-e eval str to run') - tdLog.printNoPrefix('-N start dnodes numbers in clusters') - tdLog.printNoPrefix('-M create mnode numbers in clusters') - tdLog.printNoPrefix('-Q set queryPolicy in one dnode') - tdLog.printNoPrefix('-C create Dnode Numbers in one cluster') - tdLog.printNoPrefix('-R restful realization form') - tdLog.printNoPrefix('-D taosadapter update cfg dict ') - tdLog.printNoPrefix('-n the number of replicas') + if key in ["-h", "--help"]: + tdLog.printNoPrefix("A collection of test cases written using Python") + tdLog.printNoPrefix("-f Name of test case file written by Python") + tdLog.printNoPrefix("-p Deploy Path for Simulator") + tdLog.printNoPrefix("-m Master Ip for Simulator") + tdLog.printNoPrefix("-l logSql Flag") + tdLog.printNoPrefix("-s stop All dnodes") + tdLog.printNoPrefix("-c Test Cluster Flag") + tdLog.printNoPrefix("-g valgrind Test Flag") + tdLog.printNoPrefix("-r taosd restart test") + tdLog.printNoPrefix("-d update cfg dict, base64 json str") + tdLog.printNoPrefix("-k not kill valgrind processer") + tdLog.printNoPrefix("-e eval str to run") + tdLog.printNoPrefix("-N start dnodes numbers in clusters") + tdLog.printNoPrefix("-M create mnode numbers in clusters") + tdLog.printNoPrefix("-Q set queryPolicy in one dnode") + tdLog.printNoPrefix("-C create Dnode Numbers in one cluster") + tdLog.printNoPrefix("-R restful realization form") + tdLog.printNoPrefix("-D taosadapter update cfg dict ") + tdLog.printNoPrefix("-n the number of replicas") sys.exit(0) - if key in ['-r', '--restart']: + if key in ["-r", "--restart"]: restart = True - if key in ['-f', '--file']: + if key in ["-f", "--file"]: fileName = value - if key in ['-p', '--path']: + if key in ["-p", "--path"]: deployPath = value - if key in ['-m', '--master']: + if key in ["-m", "--master"]: masterIp = value - if key in ['-l', '--logSql']: - if (value.upper() == "TRUE"): + if key in ["-l", "--logSql"]: + if value.upper() == "TRUE": logSql = True - elif (value.upper() == "FALSE"): + elif value.upper() == "FALSE": logSql = False else: tdLog.printNoPrefix("logSql value %s is invalid" % logSql) sys.exit(0) - if key in ['-c', '--cluster']: + if key in ["-c", "--cluster"]: testCluster = True - if key in ['-g', '--valgrind']: + if key in ["-g", "--valgrind"]: valgrind = 1 - if key in ['-s', '--stop']: + if key in ["-s", "--stop"]: stop = 1 - if key in ['-d', '--updateCfgDict']: + if key in ["-d", "--updateCfgDict"]: try: updateCfgDict = eval(base64.b64decode(value.encode()).decode()) except: - print('updateCfgDict convert fail.') + print("updateCfgDict convert fail.") sys.exit(0) - if key in ['-k', '--killValgrind']: + if key in ["-k", "--killValgrind"]: killValgrind = 0 - if key in ['-e', '--execCmd']: + if key in ["-e", "--execCmd"]: try: execCmd = base64.b64decode(value.encode()).decode() except: - print('execCmd run fail.') + print("execCmd run fail.") sys.exit(0) - if key in ['-N', '--dnodeNums']: + if key in ["-N", "--dnodeNums"]: dnodeNums = value - if key in ['-M', '--mnodeNums']: + if key in ["-M", "--mnodeNums"]: mnodeNums = value - if key in ['-Q', '--queryPolicy']: + if key in ["-Q", "--queryPolicy"]: queryPolicy = value - if key in ['-C', '--createDnodeNums']: + if key in ["-C", "--createDnodeNums"]: createDnodeNums = value - if key in ['-R', '--restful']: + if key in ["-R", "--restful"]: restful = True - if key in ['-D', '--adaptercfgupdate']: + if key in ["-D", "--adaptercfgupdate"]: try: adaptercfgupdate = eval(base64.b64decode(value.encode()).decode()) except: - print('adapter cfg update convert fail.') + print("adapter cfg update convert fail.") sys.exit(0) - if key in ['-n', '--replicaVar']: + if key in ["-n", "--replicaVar"]: replicaVar = value if not execCmd == "": @@ -180,18 +206,21 @@ if __name__ == "__main__": exec(execCmd) quit() - if (stop != 0): - if (valgrind == 0): + if stop != 0: + if valgrind == 0: toBeKilled = "taosd" else: toBeKilled = "valgrind.bin" - killCmd = "ps -ef|grep -w %s| grep -v grep | awk '{print $2}' | xargs kill -TERM > /dev/null 2>&1" % toBeKilled + killCmd = ( + "ps -ef|grep -w %s| grep -v grep | awk '{print $2}' | xargs kill -TERM > /dev/null 2>&1" + % toBeKilled + ) psCmd = "ps -ef|grep -w %s| grep -v grep | awk '{print $2}'" % toBeKilled processID = subprocess.check_output(psCmd, shell=True) - while(processID): + while processID: os.system(killCmd) time.sleep(1) processID = subprocess.check_output(psCmd, shell=True) @@ -218,7 +247,7 @@ if __name__ == "__main__": # psCmd = f"pgrep {toBeKilled}" processID = subprocess.check_output(psCmd, shell=True) - while(processID): + while processID: os.system(killCmd) time.sleep(1) processID = subprocess.check_output(psCmd, shell=True) @@ -233,9 +262,9 @@ if __name__ == "__main__": fuserCmd = f"fuser -k -n tcp {port}" os.system(fuserCmd) - tdLog.info('stop taosadapter') + tdLog.info("stop taosadapter") - tdLog.info('stop All dnodes') + tdLog.info("stop All dnodes") if masterIp == "": host = socket.gethostname() @@ -247,33 +276,40 @@ if __name__ == "__main__": host = masterIp tdLog.info("Procedures for tdengine deployed in %s" % (host)) - if platform.system().lower() == 'windows': + if platform.system().lower() == "windows": fileName = fileName.replace("/", os.sep) - if (masterIp == "" and not fileName == "0-others\\udf_create.py"): - threading.Thread(target=checkRunTimeError,daemon=True).start() + if masterIp == "" and not fileName == "0-others\\udf_create.py": + threading.Thread(target=checkRunTimeError, daemon=True).start() tdLog.info("Procedures for testing self-deployment") tdDnodes.init(deployPath, masterIp) tdDnodes.setTestCluster(testCluster) tdDnodes.setValgrind(valgrind) tdDnodes.stopAll() - key_word = 'tdCases.addWindows' + key_word = "tdCases.addWindows" is_test_framework = 0 try: - if key_word in open(fileName, encoding='UTF-8').read(): + if key_word in open(fileName, encoding="UTF-8").read(): is_test_framework = 1 except Exception as r: print(r) - updateCfgDictStr = '' + updateCfgDictStr = "" # adapter_cfg_dict_str = '' if is_test_framework: moduleName = fileName.replace(".py", "").replace(os.sep, ".") uModule = importlib.import_module(moduleName) try: ucase = uModule.TDTestCase() - if ((json.dumps(updateCfgDict) == '{}') and hasattr(ucase, 'updatecfgDict')): + if (json.dumps(updateCfgDict) == "{}") and hasattr( + ucase, "updatecfgDict" + ): updateCfgDict = ucase.updatecfgDict - updateCfgDictStr = "-d %s"%base64.b64encode(json.dumps(updateCfgDict).encode()).decode() - if ((json.dumps(adapter_cfg_dict) == '{}') and hasattr(ucase, 'taosadapter_cfg_dict')): + updateCfgDictStr = ( + "-d %s" + % base64.b64encode(json.dumps(updateCfgDict).encode()).decode() + ) + if (json.dumps(adapter_cfg_dict) == "{}") and hasattr( + ucase, "taosadapter_cfg_dict" + ): adapter_cfg_dict = ucase.taosadapter_cfg_dict # adapter_cfg_dict_str = f"-D {base64.b64encode(toml.dumps(adapter_cfg_dict).encode()).decode()}" except Exception as r: @@ -284,8 +320,8 @@ if __name__ == "__main__": tAdapter.init(deployPath, masterIp) tAdapter.stop(force_kill=True) - if dnodeNums == 1 : - tdDnodes.deploy(1,updateCfgDict) + if dnodeNums == 1: + tdDnodes.deploy(1, updateCfgDict) tdDnodes.start(1) tdCases.logSql(logSql) if restful: @@ -293,11 +329,11 @@ if __name__ == "__main__": tAdapter.start() if queryPolicy != 1: - queryPolicy=int(queryPolicy) + queryPolicy = int(queryPolicy) if restful: conn = taosrest.connect(url=f"http://{host}:6041") else: - conn = taos.connect(host,config=tdDnodes.getSimCfgPath()) + conn = taos.connect(host, config=tdDnodes.getSimCfgPath()) cursor = conn.cursor() cursor.execute("create qnode on dnode 1") @@ -305,51 +341,58 @@ if __name__ == "__main__": cursor.execute("show local variables") res = cursor.fetchall() for i in range(cursor.rowcount): - if res[i][0] == "queryPolicy" : + if res[i][0] == "queryPolicy": if int(res[i][1]) == int(queryPolicy): - tdLog.success(f'alter queryPolicy to {queryPolicy} successfully') + tdLog.success( + f"alter queryPolicy to {queryPolicy} successfully" + ) else: tdLog.debug(res) tdLog.exit(f"alter queryPolicy to {queryPolicy} failed") - else : - tdLog.debug("create an cluster with %s nodes and make %s dnode as independent mnode"%(dnodeNums,mnodeNums)) - dnodeslist = cluster.configure_cluster(dnodeNums=dnodeNums,mnodeNums=mnodeNums) + else: + tdLog.debug( + "create an cluster with %s nodes and make %s dnode as independent mnode" + % (dnodeNums, mnodeNums) + ) + dnodeslist = cluster.configure_cluster( + dnodeNums=dnodeNums, mnodeNums=mnodeNums + ) tdDnodes = ClusterDnodes(dnodeslist) tdDnodes.init(deployPath, masterIp) tdDnodes.setTestCluster(testCluster) tdDnodes.setValgrind(valgrind) tdDnodes.stopAll() for dnode in tdDnodes.dnodes: - tdDnodes.deploy(dnode.index,{}) + tdDnodes.deploy(dnode.index, {}) for dnode in tdDnodes.dnodes: tdDnodes.starttaosd(dnode.index) tdCases.logSql(logSql) - + if restful: tAdapter.deploy(adapter_cfg_dict) tAdapter.start() if not restful: - conn = taos.connect(host,config=tdDnodes.getSimCfgPath()) + conn = taos.connect(host, config=tdDnodes.getSimCfgPath()) else: conn = taosrest.connect(url=f"http://{host}:6041") # tdLog.info(tdDnodes.getSimCfgPath(),host) if createDnodeNums == 1: - createDnodeNums=dnodeNums + createDnodeNums = dnodeNums else: - createDnodeNums=createDnodeNums - cluster.create_dnode(conn,createDnodeNums) + createDnodeNums = createDnodeNums + cluster.create_dnode(conn, createDnodeNums) try: - if cluster.check_dnode(conn) : + if cluster.check_dnode(conn): print("check dnode ready") except Exception as r: print(r) if queryPolicy != 1: - queryPolicy=int(queryPolicy) + queryPolicy = int(queryPolicy) if restful: conn = taosrest.connect(url=f"http://{host}:6041") else: - conn = taos.connect(host,config=tdDnodes.getSimCfgPath()) + conn = taos.connect(host, config=tdDnodes.getSimCfgPath()) cursor = conn.cursor() cursor.execute("create qnode on dnode 1") @@ -357,18 +400,20 @@ if __name__ == "__main__": cursor.execute("show local variables") res = cursor.fetchall() for i in range(cursor.rowcount): - if res[i][0] == "queryPolicy" : + if res[i][0] == "queryPolicy": if int(res[i][1]) == int(queryPolicy): - tdLog.success(f'alter queryPolicy to {queryPolicy} successfully') + tdLog.success( + f"alter queryPolicy to {queryPolicy} successfully" + ) else: tdLog.debug(res) tdLog.exit(f"alter queryPolicy to {queryPolicy} failed") - - if ucase is not None and hasattr(ucase, 'noConn') and ucase.noConn == True: + + if ucase is not None and hasattr(ucase, "noConn") and ucase.noConn == True: conn = None else: if not restful: - conn = taos.connect(host="%s"%(host), config=tdDnodes.sim.getCfgDir()) + conn = taos.connect(host="%s" % (host), config=tdDnodes.sim.getCfgDir()) else: conn = taosrest.connect(url=f"http://{host}:6041") if is_test_framework: @@ -382,7 +427,7 @@ if __name__ == "__main__": tdDnodes.setValgrind(valgrind) tdDnodes.stopAll() is_test_framework = 0 - key_word = 'tdCases.addLinux' + key_word = "tdCases.addLinux" try: if key_word in open(fileName).read(): is_test_framework = 1 @@ -393,9 +438,9 @@ if __name__ == "__main__": uModule = importlib.import_module(moduleName) try: ucase = uModule.TDTestCase() - if (json.dumps(updateCfgDict) == '{}'): + if json.dumps(updateCfgDict) == "{}": updateCfgDict = ucase.updatecfgDict - if (json.dumps(adapter_cfg_dict) == '{}'): + if json.dumps(adapter_cfg_dict) == "{}": adapter_cfg_dict = ucase.taosadapter_cfg_dict except: pass @@ -404,8 +449,8 @@ if __name__ == "__main__": tAdapter.init(deployPath, masterIp) tAdapter.stop(force_kill=True) - if dnodeNums == 1 : - tdDnodes.deploy(1,updateCfgDict) + if dnodeNums == 1: + tdDnodes.deploy(1, updateCfgDict) tdDnodes.start(1) tdCases.logSql(logSql) @@ -414,9 +459,9 @@ if __name__ == "__main__": tAdapter.start() if queryPolicy != 1: - queryPolicy=int(queryPolicy) + queryPolicy = int(queryPolicy) if not restful: - conn = taos.connect(host,config=tdDnodes.getSimCfgPath()) + conn = taos.connect(host, config=tdDnodes.getSimCfgPath()) else: conn = taosrest.connect(url=f"http://{host}:6041") # tdSql.init(conn.cursor()) @@ -437,23 +482,30 @@ if __name__ == "__main__": cursor.execute("show local variables") res = cursor.fetchall() for i in range(cursor.rowcount): - if res[i][0] == "queryPolicy" : + if res[i][0] == "queryPolicy": if int(res[i][1]) == int(queryPolicy): - tdLog.success(f'alter queryPolicy to {queryPolicy} successfully') + tdLog.success( + f"alter queryPolicy to {queryPolicy} successfully" + ) else: tdLog.debug(res) tdLog.exit(f"alter queryPolicy to {queryPolicy} failed") - else : - tdLog.debug("create an cluster with %s nodes and make %s dnode as independent mnode"%(dnodeNums,mnodeNums)) - dnodeslist = cluster.configure_cluster(dnodeNums=dnodeNums,mnodeNums=mnodeNums) + else: + tdLog.debug( + "create an cluster with %s nodes and make %s dnode as independent mnode" + % (dnodeNums, mnodeNums) + ) + dnodeslist = cluster.configure_cluster( + dnodeNums=dnodeNums, mnodeNums=mnodeNums + ) tdDnodes = ClusterDnodes(dnodeslist) tdDnodes.init(deployPath, masterIp) tdDnodes.setTestCluster(testCluster) tdDnodes.setValgrind(valgrind) tdDnodes.stopAll() for dnode in tdDnodes.dnodes: - tdDnodes.deploy(dnode.index,{}) + tdDnodes.deploy(dnode.index, {}) for dnode in tdDnodes.dnodes: tdDnodes.starttaosd(dnode.index) tdCases.logSql(logSql) @@ -463,27 +515,27 @@ if __name__ == "__main__": tAdapter.start() if not restful: - conn = taos.connect(host,config=tdDnodes.getSimCfgPath()) + conn = taos.connect(host, config=tdDnodes.getSimCfgPath()) else: conn = taosrest.connect(url=f"http://{host}:6041") - print(tdDnodes.getSimCfgPath(),host) + print(tdDnodes.getSimCfgPath(), host) if createDnodeNums == 1: - createDnodeNums=dnodeNums + createDnodeNums = dnodeNums else: - createDnodeNums=createDnodeNums - cluster.create_dnode(conn,createDnodeNums) + createDnodeNums = createDnodeNums + cluster.create_dnode(conn, createDnodeNums) try: - if cluster.check_dnode(conn) : + if cluster.check_dnode(conn): print("check dnode ready") except Exception as r: print(r) if queryPolicy != 1: - queryPolicy=int(queryPolicy) + queryPolicy = int(queryPolicy) if restful: conn = taosrest.connect(url=f"http://{host}:6041") else: - conn = taos.connect(host,config=tdDnodes.getSimCfgPath()) + conn = taos.connect(host, config=tdDnodes.getSimCfgPath()) cursor = conn.cursor() cursor.execute("create qnode on dnode 1") @@ -491,13 +543,14 @@ if __name__ == "__main__": cursor.execute("show local variables") res = cursor.fetchall() for i in range(cursor.rowcount): - if res[i][0] == "queryPolicy" : + if res[i][0] == "queryPolicy": if int(res[i][1]) == int(queryPolicy): - tdLog.success(f'alter queryPolicy to {queryPolicy} successfully') + tdLog.success( + f"alter queryPolicy to {queryPolicy} successfully" + ) else: tdLog.debug(res) tdLog.exit(f"alter queryPolicy to {queryPolicy} failed") - if testCluster: tdLog.info("Procedures for testing cluster") @@ -508,7 +561,7 @@ if __name__ == "__main__": else: tdLog.info("Procedures for testing self-deployment") if not restful: - conn = taos.connect(host,config=tdDnodes.getSimCfgPath()) + conn = taos.connect(host, config=tdDnodes.getSimCfgPath()) else: conn = taosrest.connect(url=f"http://{host}:6041") @@ -527,7 +580,7 @@ if __name__ == "__main__": tdDnodes.start(1) time.sleep(1) if not restful: - conn = taos.connect( host, config=tdDnodes.getSimCfgPath()) + conn = taos.connect(host, config=tdDnodes.getSimCfgPath()) else: conn = taosrest.connect(url=f"http://{host}:6041") tdLog.info("Procedures for tdengine deployed in %s" % (host))