diff --git a/tests/pytest/fulltest.sh b/tests/pytest/fulltest.sh index ec06eb38ef038eab2d8cfa798070be84e7f7623e..cff6db202849c16e70476c887ce5d2512a3b4071 100755 --- a/tests/pytest/fulltest.sh +++ b/tests/pytest/fulltest.sh @@ -9,6 +9,11 @@ python3 ./test.py $1 -f insert/smallint.py python3 ./test.py $1 -f insert/tinyint.py python3 ./test.py $1 -f insert/date.py python3 ./test.py $1 -f insert/binary.py + +python3 ./test.py $1 -f table/column_name.py +python3 ./test.py $1 -f table/column_num.py +python3 ./test.py $1 -f table/db_table.py + python3 ./test.py $1 -f import_merge/importBlock1HO.py python3 ./test.py $1 -f import_merge/importBlock1HPO.py python3 ./test.py $1 -f import_merge/importBlock1H.py diff --git a/tests/pytest/import_merge/importDataH2.py b/tests/pytest/import_merge/importDataH2.py index d49abff374b864e80079996a9cb5c81be248d4ba..73a412fb8046ff1f6baf9c42d39221345c22fbee 100644 --- a/tests/pytest/import_merge/importDataH2.py +++ b/tests/pytest/import_merge/importDataH2.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdDnodes.stop(1) tdDnodes.deploy(1) @@ -35,7 +35,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -43,19 +43,19 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than 10 rows less than %d rows will go to data file" % - self.rows) + self.maxrows) tdLog.info("================= step2") - tdLog.info("import %d sequential data" % (self.rows / 2)) + tdLog.info("import %d sequential data" % (self.maxrows / 2)) startTime = self.startTime sqlcmd = ['import into tb1 values'] - for rid in range(1, self.rows / 2 + 1): + for rid in range(1, self.maxrows / 2 + 1): sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) tdSql.execute(" ".join(sqlcmd)) tdLog.info("================= step3") tdSql.query('select * from tb1') - tdSql.checkRows(self.rows / 2) + tdSql.checkRows(self.maxrows / 2) tdLog.info("================= step4") tdDnodes.stop(1) @@ -70,7 +70,7 @@ class TDTestCase: tdLog.info("================= step7") tdSql.execute('reset query cache') tdSql.query('select * from tb1 order by ts desc') - tdSql.checkRows(self.rows / 2 + 1) + tdSql.checkRows(self.maxrows / 2 + 1) tdLog.info("================= step8") tdLog.info("import 10 data in batch before") @@ -83,7 +83,7 @@ class TDTestCase: tdLog.info("================= step9") tdSql.execute('reset query cache') tdSql.query('select * from tb1 order by ts desc') - tdSql.checkRows(self.rows / 2 + 11) + tdSql.checkRows(self.maxrows / 2 + 11) def stop(self): tdSql.close() diff --git a/tests/pytest/import_merge/importDataHO.py b/tests/pytest/import_merge/importDataHO.py index 0483e6844c083766a36441926ec096c3219a9130..0fe6ab71d58dcd4849ca88bf22323cfc536b6135 100644 --- a/tests/pytest/import_merge/importDataHO.py +++ b/tests/pytest/import_merge/importDataHO.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxwrows = 200 self.rowsPerTable = 20 tdDnodes.stop(1) @@ -36,7 +36,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -44,7 +44,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than 10 rows less than %d rows will go to data file" % - self.rows) + self.maxrows) tdLog.info("================= step2") tdLog.info("import %d sequential data" % self.rowsPerTable) diff --git a/tests/pytest/import_merge/importDataHO2.py b/tests/pytest/import_merge/importDataHO2.py index ab7044d2a745822e27f507b5fb20f1d058fa65f6..6246b55b324b5ccbd2543af0d7f68c153a89d0bd 100644 --- a/tests/pytest/import_merge/importDataHO2.py +++ b/tests/pytest/import_merge/importDataHO2.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 self.rowsPerTable = 100 tdDnodes.stop(1) @@ -36,7 +36,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -44,7 +44,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than 10 rows less than %d rows will go to data file" % - self.rows) + self.maxrows) tdLog.info("================= step2") tdLog.info("import %d sequential data" % self.rowsPerTable) diff --git a/tests/pytest/import_merge/importDataHPO.py b/tests/pytest/import_merge/importDataHPO.py index f165bd7b5a734bc21e5005e9413a095de5a202c4..c749dbd1138ac56942e4a38752e01d18226946e2 100644 --- a/tests/pytest/import_merge/importDataHPO.py +++ b/tests/pytest/import_merge/importDataHPO.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 self.rowsPerTable = 20 tdDnodes.stop(1) @@ -36,7 +36,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -44,7 +44,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than 10 rows less than %d rows will go to data file" % - self.rows) + self.maxrows) tdLog.info("================= step2") tdLog.info("import %d sequential data" % self.rowsPerTable) diff --git a/tests/pytest/import_merge/importDataLastH.py b/tests/pytest/import_merge/importDataLastH.py index 319fd40677d96e447e167079c4a5cf19fdc63ba5..830711a420428e27dc5edf4b56b71d0dc866ba24 100644 --- a/tests/pytest/import_merge/importDataLastH.py +++ b/tests/pytest/import_merge/importDataLastH.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdDnodes.stop(1) tdDnodes.deploy(1) @@ -35,7 +35,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -43,7 +43,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than %d rows less than %d rows will go to data and last file" % - (self.rows, 10 + self.rows)) + (self.maxrows, 10 + self.maxrows)) tdLog.info("================= step2") tdLog.info("import 205 sequential data") diff --git a/tests/pytest/import_merge/importDataLastHO.py b/tests/pytest/import_merge/importDataLastHO.py index 5a71c5db654a8b40ba1cee6f435b15d3ba6297e2..037c81f0872517e3343bb46aaa9694a658d38cc8 100644 --- a/tests/pytest/import_merge/importDataLastHO.py +++ b/tests/pytest/import_merge/importDataLastHO.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdDnodes.stop(1) tdDnodes.deploy(1) @@ -35,7 +35,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -43,7 +43,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than %d rows less than %d rows will go to data and last file" % - (self.rows, 10 + self.rows)) + (self.maxrows, 10 + self.maxrows)) tdLog.info("================= step2") tdLog.info("import 205 sequential data") diff --git a/tests/pytest/import_merge/importDataLastHPO.py b/tests/pytest/import_merge/importDataLastHPO.py index f2c95cbd4d735d1cf1648cd07754bc940a99523e..46a7e5909da922cbc554d4f5c9893346c19e9c87 100644 --- a/tests/pytest/import_merge/importDataLastHPO.py +++ b/tests/pytest/import_merge/importDataLastHPO.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdDnodes.stop(1) tdDnodes.deploy(1) @@ -35,7 +35,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -43,7 +43,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than %d rows less than %d rows will go to data and last file" % - (self.rows, 10 + self.rows)) + (self.maxrows, 10 + self.maxrows)) tdLog.info("================= step2") tdLog.info("import 205 sequential data") diff --git a/tests/pytest/import_merge/importDataLastS.py b/tests/pytest/import_merge/importDataLastS.py index 929e02dd1e5f134f229c101db1de9c5c7526ede1..2dd7cdb744e600c0d2b0508afe4b8ad46b139425 100644 --- a/tests/pytest/import_merge/importDataLastS.py +++ b/tests/pytest/import_merge/importDataLastS.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdDnodes.stop(1) tdDnodes.deploy(1) @@ -35,7 +35,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -43,7 +43,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than %d rows less than %d rows will go to data and last file" % - (self.rows, 10 + self.rows)) + (self.maxrows, 10 + self.maxrows)) tdLog.info("================= step2") tdLog.info("import 205 sequential data") diff --git a/tests/pytest/import_merge/importDataLastSub.py b/tests/pytest/import_merge/importDataLastSub.py index 158fa0fb3c7950360979a16d5916719462b841a9..bb9953057e10a29298d613a1e4047d1d6e5d1299 100644 --- a/tests/pytest/import_merge/importDataLastSub.py +++ b/tests/pytest/import_merge/importDataLastSub.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdDnodes.stop(1) tdDnodes.deploy(1) @@ -35,7 +35,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -43,7 +43,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than %d rows less than %d rows will go to data and last file" % - (self.rows, 10 + self.rows)) + (self.maxrows, 10 + self.maxrows)) tdLog.info("================= step2") tdLog.info("import 205 sequential data") diff --git a/tests/pytest/import_merge/importDataLastT.py b/tests/pytest/import_merge/importDataLastT.py index 9bc90a8275d842e9b64de79e9c8c143e2d048ed2..29f0afaf1a3a003734c1cc42674e9b96b5a7ced2 100644 --- a/tests/pytest/import_merge/importDataLastT.py +++ b/tests/pytest/import_merge/importDataLastT.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -39,7 +39,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than %d rows less than %d rows will go to data and last file" % - (self.rows, 10 + self.rows)) + (self.maxrows, 10 + self.maxrows)) tdLog.info("================= step2") tdLog.info("import 205 sequential data") diff --git a/tests/pytest/import_merge/importDataLastTO.py b/tests/pytest/import_merge/importDataLastTO.py index 0c93ac430b4fa90f9d535071869eb22ad122cb18..47639130b59f2cc67be66bbf9e2f4a2f35a50112 100644 --- a/tests/pytest/import_merge/importDataLastTO.py +++ b/tests/pytest/import_merge/importDataLastTO.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -39,7 +39,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than %d rows less than %d rows will go to data and last file" % - (self.rows, 10 + self.rows)) + (self.maxrows, 10 + self.maxrows)) tdLog.info("================= step2") tdLog.info("import 205 sequential data") diff --git a/tests/pytest/import_merge/importDataLastTPO.py b/tests/pytest/import_merge/importDataLastTPO.py index 188e93e0dbc2a4742308cb5a4a03fb7accfdba4e..41908365051586bbe4628582e0bfffa3bb97d838 100644 --- a/tests/pytest/import_merge/importDataLastTPO.py +++ b/tests/pytest/import_merge/importDataLastTPO.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -39,7 +39,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than %d rows less than %d rows will go to data and last file" % - (self.rows, 10 + self.rows)) + (self.maxrows, 10 + self.maxrows)) tdLog.info("================= step2") tdLog.info("import 205 sequential data") diff --git a/tests/pytest/import_merge/importDataS.py b/tests/pytest/import_merge/importDataS.py index 65d4087c3d79ebf266048d1971e1f4cac513cba4..daa4b2e0252920e8f98779f01d81c6a8353c4071 100644 --- a/tests/pytest/import_merge/importDataS.py +++ b/tests/pytest/import_merge/importDataS.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -39,7 +39,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than 10 rows less than %d rows will go to data file" % - self.rows) + self.maxrows) tdLog.info("================= step2") tdLog.info("import 20 sequential data") diff --git a/tests/pytest/import_merge/importDataSub.py b/tests/pytest/import_merge/importDataSub.py index 4bf85f2bdd39f1734d6e78ad8b9c813ce361b4fb..2359ca214fd7313b93802c3c34a0406f98745647 100644 --- a/tests/pytest/import_merge/importDataSub.py +++ b/tests/pytest/import_merge/importDataSub.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdDnodes.stop(1) tdDnodes.deploy(1) @@ -35,7 +35,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -43,19 +43,19 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than 10 rows less than %d rows will go to data file" % - self.rows) + self.maxrows) tdLog.info("================= step2") - tdLog.info("import %d sequential data" % (self.rows / 2)) + tdLog.info("import %d sequential data" % (self.maxrows / 2)) startTime = self.startTime sqlcmd = ['import into tb1 values'] - for rid in range(1, self.rows / 2 + 1): + for rid in range(1, self.maxrows / 2 + 1): sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) tdSql.execute(" ".join(sqlcmd)) tdLog.info("================= step3") tdSql.query('select * from tb1') - tdSql.checkRows(self.rows / 2) + tdSql.checkRows(self.maxrows / 2) tdLog.info("================= step4") tdDnodes.stop(1) @@ -73,7 +73,7 @@ class TDTestCase: tdLog.info("================= step9") tdSql.execute('reset query cache') tdSql.query('select * from tb1 order by ts desc') - tdSql.checkRows(self.rows / 2) + tdSql.checkRows(self.maxrows / 2) def stop(self): tdSql.close() diff --git a/tests/pytest/import_merge/importDataT.py b/tests/pytest/import_merge/importDataT.py index 66016c5555f2b1dcc3fcad8735d8142ae4575263..abb5e312ef5217e3b1b67583d28a09ea07d6a896 100644 --- a/tests/pytest/import_merge/importDataT.py +++ b/tests/pytest/import_merge/importDataT.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -39,7 +39,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than 10 rows less than %d rows will go to data file" % - self.rows) + self.maxrows) tdLog.info("================= step2") tdLog.info("import 20 sequential data") diff --git a/tests/pytest/import_merge/importDataTO.py b/tests/pytest/import_merge/importDataTO.py index a3c17b2846c2dbdfaa115212223568a802d458e4..2a6d9e272b1cbe5c5bb4925538ba54a2d54b2791 100644 --- a/tests/pytest/import_merge/importDataTO.py +++ b/tests/pytest/import_merge/importDataTO.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -39,7 +39,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than 10 rows less than %d rows will go to data file" % - self.rows) + self.maxrows) tdLog.info("================= step2") tdLog.info("import 20 sequential data") diff --git a/tests/pytest/import_merge/importDataTPO.py b/tests/pytest/import_merge/importDataTPO.py index 20eb41cc08755ad09237568fc1f4973e55d354c8..06d5cf3c1a2ff2d9326065f45b2451807effbe22 100644 --- a/tests/pytest/import_merge/importDataTPO.py +++ b/tests/pytest/import_merge/importDataTPO.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") @@ -39,7 +39,7 @@ class TDTestCase: tdSql.execute('create table tb1 (ts timestamp, speed int)') tdLog.info( "More than 10 rows less than %d rows will go to data file" % - self.rows) + self.maxrows) tdLog.info("================= step2") tdLog.info("import 20 sequential data") diff --git a/tests/pytest/import_merge/importLastH.py b/tests/pytest/import_merge/importLastH.py index c69f453971eb036382ad78cfc6c451dc6c8fad57..a6f9fa087c7db215a03745054ac6f8367d218e2f 100644 --- a/tests/pytest/import_merge/importLastH.py +++ b/tests/pytest/import_merge/importLastH.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") diff --git a/tests/pytest/import_merge/importLastHO.py b/tests/pytest/import_merge/importLastHO.py index ec930d1807c3ed9cf1d9b3b4096921c9389f79be..e6468b243e05941cc82310de2c3dc197ab4da7d9 100644 --- a/tests/pytest/import_merge/importLastHO.py +++ b/tests/pytest/import_merge/importLastHO.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") diff --git a/tests/pytest/import_merge/importLastHPO.py b/tests/pytest/import_merge/importLastHPO.py index 9603a7b8529d2d435a1e5e174f1d3df7bc41fe30..4a299ed82369a1b81b2a419f3d0f45d0b64404f9 100644 --- a/tests/pytest/import_merge/importLastHPO.py +++ b/tests/pytest/import_merge/importLastHPO.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") diff --git a/tests/pytest/import_merge/importLastS.py b/tests/pytest/import_merge/importLastS.py index 7dbe74e2ca0f802861c7b66c63e83fba296d0f14..2a5de46eb2d0a55039caddf352af9dba7f258fca 100644 --- a/tests/pytest/import_merge/importLastS.py +++ b/tests/pytest/import_merge/importLastS.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") diff --git a/tests/pytest/import_merge/importLastSub.py b/tests/pytest/import_merge/importLastSub.py index f028ba5fd77603a283bd7e5daf02ffb5fb738813..fa1b2387f317560784bb2b0e28cfe6309a408c89 100644 --- a/tests/pytest/import_merge/importLastSub.py +++ b/tests/pytest/import_merge/importLastSub.py @@ -27,11 +27,11 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") diff --git a/tests/pytest/import_merge/importLastT.py b/tests/pytest/import_merge/importLastT.py index 3fe4e0006c80958822e4ccd7dd1f14c562308820..b7a1e58bc5d19356c1228abe2bfb552aa0277f74 100644 --- a/tests/pytest/import_merge/importLastT.py +++ b/tests/pytest/import_merge/importLastT.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdDnodes.stop(1) tdDnodes.deploy(1) @@ -35,7 +35,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") diff --git a/tests/pytest/import_merge/importLastTO.py b/tests/pytest/import_merge/importLastTO.py index 76e5016bdbf87d850caf9b39b9aa4b4277080b5d..541cbd29ca08b61f1a94b0180673552f66452c09 100644 --- a/tests/pytest/import_merge/importLastTO.py +++ b/tests/pytest/import_merge/importLastTO.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdDnodes.stop(1) tdDnodes.deploy(1) @@ -35,7 +35,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") diff --git a/tests/pytest/import_merge/importLastTPO.py b/tests/pytest/import_merge/importLastTPO.py index 08f416806328b0f3ba391bff350bd6aa954fb7f9..6ec21d0c79e935b7581d7414d3dc098217f76204 100644 --- a/tests/pytest/import_merge/importLastTPO.py +++ b/tests/pytest/import_merge/importLastTPO.py @@ -27,7 +27,7 @@ class TDTestCase: def run(self): self.ntables = 1 self.startTime = 1520000010000 - self.rows = 200 + self.maxrows = 200 tdDnodes.stop(1) tdDnodes.deploy(1) @@ -35,7 +35,7 @@ class TDTestCase: tdSql.execute('reset query cache') tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('create database db maxrows %d' % self.maxrows) tdSql.execute('use db') tdLog.info("================= step1") diff --git a/tests/pytest/smoketest.sh b/tests/pytest/smoketest.sh index 7dbefa94023ce16ec769b203905a8d1d56f920d2..af597fb6c53123da7eb514967a93cf4d7d162642 100755 --- a/tests/pytest/smoketest.sh +++ b/tests/pytest/smoketest.sh @@ -34,12 +34,12 @@ python3 ./test.py $1 -f table/db_table.py python3 ./test.py -s $1 sleep 1 -#python3 ./test.py $1 -f import_merge/importDataLastTO.py -#python3 ./test.py -s $1 -#sleep 1 -#python3 ./test.py $1 -f import_merge/importDataLastT.py -#python3 ./test.py -s $1 -#sleep 1 +python3 ./test.py $1 -f import_merge/importDataLastTO.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importDataLastT.py +python3 ./test.py -s $1 +sleep 1 python3 ./test.py $1 -f import_merge/importDataTO.py python3 ./test.py -s $1 sleep 1