提交 f7fb724c 编写于 作者: J Jim Doty 提交者: David Krieger

Cleanup gpexpand behave tests and use dump of ICW database

- Some tests were expanding into /tmp which ran out of space, so
now expand into /data/gpdata.
- Consolidate test to verify redistribution after expand.
- Actually use dump of ICW database in the relevant test.
Co-authored-by: NDavid Krieger <dkrieger@pivotal.io>
Co-authored-by: NJim Doty <jdoty@pivotal.io>
Co-authored-by: NKalen Krempely <kkrempely@pivotal.io>
上级 99d48e1e
......@@ -12,7 +12,7 @@
## file (example: templates/gpdb-tpl.yml) and regenerate the pipeline
## using appropriate tool (example: gen_pipeline.py -t prod).
## ----------------------------------------------------------------------
## Generated by gen_pipeline.py at: 2018-11-19 13:46:58.098114
## Generated by gen_pipeline.py at: 2018-11-29 09:21:43.022135
## Template file: gpdb-tpl.yml
## OS Types: ['centos6', 'centos7', 'sles', 'aix7', 'win', 'ubuntu16']
## Test Sections: ['ICW', 'Replication', 'ResourceGroups', 'Interconnect', 'CLI', 'UD', 'AA', 'Extensions']
......@@ -300,15 +300,6 @@ resources:
region_name: {{aws-region}}
bucket: gpdb5-pipeline-dynamic-terraform
bucket_path: clusters-google/
- name: snowflake-simple-database-dump # remove this resource in favor of icw_gporca_centos6_dump once gpexpand works with a dump of ICW
type: s3
source:
access_key_id: {{bucket-access-key-id}}
bucket: {{test-data-bucket-name}}
secret_access_key: {{bucket-secret-access-key}}
region_name: {{aws-region}}
versioned_file: snowflake-simple-database/dump.sql.xz
- name: aix_environments
type: pool
source:
......@@ -1832,7 +1823,6 @@ jobs:
- get: centos-gpdb-dev-6
- get: icw_gporca_centos6_dump
passed: [gate_icw_end]
- get: snowflake-simple-database-dump # remove once icw_gporca_centos6_dump works with gpexpand
- put: terraform
params:
<<: *ccp_default_params
......@@ -1846,7 +1836,7 @@ jobs:
- task: pre_run_test_setup
file: gpdb_src/concourse/tasks/setup_for_gpexpand_to_make_new_gpdb.yml
image: centos-gpdb-dev-6
input_mapping: {sqldump: snowflake-simple-database-dump} # use icw_gporca_centos6_dump when it works with gpexpand
input_mapping: {sqldump: icw_gporca_centos6_dump}
- task: run_tests
file: gpdb_src/concourse/tasks/run_behave.yml
params:
......@@ -1871,7 +1861,7 @@ jobs:
trigger: true
- get: ccp_src
- get: centos-gpdb-dev-6
- get: snowflake-simple-database-dump # remove once icw_gporca_centos6_dump works with gpexpand
- get: icw_gporca_centos6_dump
- put: terraform
params:
<<: *ccp_default_params
......@@ -1887,7 +1877,7 @@ jobs:
- task: pre_run_test_setup
image: centos-gpdb-dev-6
file: gpdb_src/concourse/tasks/setup_for_gpexpand_to_make_new_gpdb.yml
input_mapping: {sqldump: snowflake-simple-database-dump} # use icw_gporca_centos6_dump when it works with gpexpand
input_mapping: {sqldump: icw_gporca_centos6_dump}
- task: run_tests
file: gpdb_src/concourse/tasks/run_tinc.yml
image: centos-gpdb-dev-6
......@@ -1912,7 +1902,7 @@ jobs:
trigger: true
- get: ccp_src
- get: centos-gpdb-dev-6
- get: snowflake-simple-database-dump # remove once icw_gporca_centos6_dump works with gpexpand
- get: icw_gporca_centos6_dump
- put: terraform
params:
<<: *ccp_default_params
......@@ -1928,7 +1918,7 @@ jobs:
- task: pre_run_test_setup
image: centos-gpdb-dev-6
file: gpdb_src/concourse/tasks/setup_for_gpexpand_to_make_new_gpdb.yml
input_mapping: {sqldump: snowflake-simple-database-dump} # use icw_gporca_centos6_dump when it works with gpexpand
input_mapping: {sqldump: icw_gporca_centos6_dump}
- task: run_tests
file: gpdb_src/concourse/tasks/run_tinc.yml
image: centos-gpdb-dev-6
......
......@@ -407,15 +407,6 @@ resources:
region_name: {{aws-region}}
bucket: gpdb5-pipeline-dynamic-terraform
bucket_path: clusters-google/
- name: snowflake-simple-database-dump # remove this resource in favor of icw_gporca_centos6_dump once gpexpand works with a dump of ICW
type: s3
source:
access_key_id: {{bucket-access-key-id}}
bucket: {{test-data-bucket-name}}
secret_access_key: {{bucket-secret-access-key}}
region_name: {{aws-region}}
versioned_file: snowflake-simple-database/dump.sql.xz
{% endif %}
{% if "aix7" in os_types %}
- name: aix_environments
......@@ -1924,7 +1915,6 @@ jobs:
- get: centos-gpdb-dev-6
- get: icw_gporca_centos6_dump
passed: [gate_icw_end]
- get: snowflake-simple-database-dump # remove once icw_gporca_centos6_dump works with gpexpand
- put: terraform
params:
<<: *ccp_default_params
......@@ -1938,7 +1928,7 @@ jobs:
- task: pre_run_test_setup
file: gpdb_src/concourse/tasks/setup_for_gpexpand_to_make_new_gpdb.yml
image: centos-gpdb-dev-6
input_mapping: {sqldump: snowflake-simple-database-dump} # use icw_gporca_centos6_dump when it works with gpexpand
input_mapping: {sqldump: icw_gporca_centos6_dump}
- task: run_tests
file: gpdb_src/concourse/tasks/run_behave.yml
params:
......@@ -1964,7 +1954,7 @@ jobs:
trigger: [[ test_trigger ]]
- get: ccp_src
- get: centos-gpdb-dev-6
- get: snowflake-simple-database-dump # remove once icw_gporca_centos6_dump works with gpexpand
- get: icw_gporca_centos6_dump
- put: terraform
params:
<<: *ccp_default_params
......@@ -1980,7 +1970,7 @@ jobs:
- task: pre_run_test_setup
image: centos-gpdb-dev-6
file: gpdb_src/concourse/tasks/setup_for_gpexpand_to_make_new_gpdb.yml
input_mapping: {sqldump: snowflake-simple-database-dump} # use icw_gporca_centos6_dump when it works with gpexpand
input_mapping: {sqldump: icw_gporca_centos6_dump}
- task: run_tests
file: gpdb_src/concourse/tasks/run_tinc.yml
image: centos-gpdb-dev-6
......
......@@ -2779,9 +2779,9 @@ def main(options, args, parser):
if not _gp_expand.validate_unalterable_tables():
raise ValidationError()
if _gp_expand.check_unique_indexes():
logger.info("Tables with unique indexes exist. Until these tables are successfully")
logger.info("redistributed, unique constraints may be violated. For more information")
logger.info("on this issue, see the Greenplum Database Administrator Guide")
logger.warn("Tables with unique indexes exist. Until these tables are successfully")
logger.warn("redistributed, unique constraints may be violated. For more information")
logger.warn("on this issue, see the Greenplum Database Administrator Guide")
if not options.silent:
if not ask_yesno(None, "Would you like to continue with System Expansion", 'N'):
raise ValidationError()
......
-- Replace this with a dump of ICW once it works with gpexpand
SET client_encoding = 'UTF8';
SET standard_conforming_strings = off;
SET escape_string_warning = 'off';
CREATE TABLE test (
name text,
age int
);
INSERT INTO test (name, age)
VALUES ('a', 1), ('b', 2);
......@@ -2089,11 +2089,15 @@ def impl(context, num_of_segments, num_of_hosts, hostnames):
if num_of_hosts != len(hosts):
raise Exception("Incorrect amount of hosts. number of hosts:%s\nhostnames: %s" % (num_of_hosts, hosts))
temp_base_dir = context.temp_base_dir
primary_dir = os.path.join(temp_base_dir, 'data', 'primary')
base_dir = "/tmp"
if hasattr(context, "temp_base_dir"):
base_dir = context.temp_base_dir
elif hasattr(context, "working_directory"):
base_dir = context.working_directory
primary_dir = os.path.join(base_dir, 'data', 'primary')
mirror_dir = ''
if context.gpexpand_mirrors_enabled:
mirror_dir = os.path.join(temp_base_dir, 'data', 'mirror')
mirror_dir = os.path.join(base_dir, 'data', 'mirror')
directory_pairs = []
# we need to create the tuples for the interview to work.
......@@ -2112,10 +2116,10 @@ def impl(context, num_of_segments, num_of_hosts, hostnames):
def impl(context):
map(os.remove, glob.glob("gpexpand_inputfile*"))
@when('the user runs gpexpand with the latest gpexpand_inputfile')
def impl(context):
@when('the user runs gpexpand with the latest gpexpand_inputfile with additional parameters {additional_params}')
def impl(context, additional_params=''):
gpexpand = Gpexpand(context, working_directory=context.working_directory, database='gptest')
ret_code, std_err, std_out = gpexpand.initialize_segments()
ret_code, std_err, std_out = gpexpand.initialize_segments(additional_params)
if ret_code != 0:
raise Exception("gpexpand exited with return code: %d.\nstderr=%s\nstdout=%s" % (ret_code, std_err, std_out))
......@@ -2264,10 +2268,14 @@ def impl(context, num_of_segments):
@given('the cluster is setup for an expansion on hosts "{hostnames}"')
def impl(context, hostnames):
hosts = hostnames.split(",")
temp_base_dir = context.temp_base_dir
base_dir = "/tmp"
if hasattr(context, "temp_base_dir"):
base_dir = context.temp_base_dir
elif hasattr(context, "working_directory"):
base_dir = context.working_directory
for host in hosts:
cmd = Command(name='create data directories for expansion',
cmdStr="mkdir -p %s/data/primary; mkdir -p %s/data/mirror" % (temp_base_dir, temp_base_dir),
cmdStr="mkdir -p %s/data/primary; mkdir -p %s/data/mirror" % (base_dir, base_dir),
ctxt=REMOTE,
remoteHost=host)
cmd.run(validateAfter=True)
......@@ -2283,8 +2291,10 @@ def impl(context, tmp_base_dir):
@given('the new host "{hostnames}" is ready to go')
def impl(context, hostnames):
hosts = hostnames.split(',')
reset_hosts(hosts, context.working_directory)
reset_hosts(hosts, context.temp_base_dir)
if hasattr(context, "working_directory"):
reset_hosts(hosts, context.working_directory)
if hasattr(context, "temp_base_dir"):
reset_hosts(hosts, context.temp_base_dir)
@then('the database is killed on hosts "{hostnames}"')
@given('the database is killed on hosts "{hostnames}"')
......
......@@ -63,9 +63,9 @@ class Gpexpand:
return output, p1.wait()
def initialize_segments(self):
def initialize_segments(self, additional_params=''):
input_files = sorted(glob.glob('%s/gpexpand_inputfile*' % self.working_directory))
return run_gpcommand(self.context, "gpexpand -D %s -i %s" % (self.database, input_files[-1]))
return run_gpcommand(self.context, "gpexpand -D %s -i %s %s" % (self.database, input_files[-1], additional_params))
def redistribute(self, duration, endtime):
# Can flake with "[ERROR]:-End time occurs in the past"
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册