提交 cea22a4a 编写于 作者: C Christopher Hajas

Print more output when behave backup and transfer tests fail. (#2082)

上级 2415aff4
......@@ -2129,7 +2129,7 @@ Feature: Validate command line arguments
When the user runs "gpdbrestore -a -t 30160101010101 -u /tmp"
Then gpdbrestore should return a return code of 0
And the user runs "psql -f test/behave/mgmt_utils/steps/data/check_metadata.sql bkdb > /tmp/check_metadata.out"
And verify that the contents of the files "/tmp/check_metadata.out" and "test/behave/mgmt_utils/steps/data/check_metadata.ans" are identical
And verify that the contents of the files "test/behave/mgmt_utils/steps/data/check_metadata.ans" and "/tmp/check_metadata.out" are identical
And the directory "/tmp/db_dumps" is removed or does not exist
And the directory "/tmp/check_metadata.out" is removed or does not exist
......@@ -3126,7 +3126,7 @@ Feature: Validate command line arguments
And the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " > /tmp/special_table_data.ans"
And the user runs gpdbrestore with the stored timestamp
And the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " > /tmp/special_table_data.out"
And verify that the contents of the files "/tmp/special_table_data.out" and "/tmp/special_table_data.ans" are identical
And verify that the contents of the files "/tmp/special_table_data.ans" and "/tmp/special_table_data.out" are identical
# -s option
When the user runs command "gpcrondump -a -x " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " -s " S\`~@#\$%^&*()-+[{]}|\\;: \\'\"/?><1 ""
......@@ -3135,7 +3135,7 @@ Feature: Validate command line arguments
And the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " > /tmp/special_table_data.ans"
And the user runs gpdbrestore with the stored timestamp
And the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " > /tmp/special_table_data.out"
And verify that the contents of the files "/tmp/special_table_data.out" and "/tmp/special_table_data.ans" are identical
And verify that the contents of the files "/tmp/special_table_data.ans" and "/tmp/special_table_data.out" are identical
# --exclude-schema-file option
When the user runs command "gpcrondump -a -x " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " --exclude-schema-file test/behave/mgmt_utils/steps/data/special_chars/schema-file.txt"
......@@ -3252,7 +3252,7 @@ Feature: Validate command line arguments
When the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " > /tmp/special_table_data.ans"
And the user runs gpdbrestore with the stored timestamp
And the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " > /tmp/special_table_data.out"
Then verify that the contents of the files "/tmp/special_table_data.out" and "/tmp/special_table_data.ans" are identical
Then verify that the contents of the files "/tmp/special_table_data.ans" and "/tmp/special_table_data.out" are identical
# cleanup
And the user runs "psql -f test/behave/mgmt_utils/steps/data/special_chars/drop_special_database.sql template1"
......@@ -3272,7 +3272,7 @@ Feature: Validate command line arguments
When the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " > /tmp/special_table_data.ans"
When the user runs gpdbrestore with the stored timestamp and options "--redirect " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;2 "" without -e option
And the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;2 " > /tmp/special_table_data.out"
Then verify that the contents of the files "/tmp/special_table_data.out" and "/tmp/special_table_data.ans" are identical
Then verify that the contents of the files "/tmp/special_table_data.ans" and "/tmp/special_table_data.out" are identical
# cleanup
And the directory "/tmp/special_table_data.out" is removed or does not exist
......@@ -3298,13 +3298,13 @@ Feature: Validate command line arguments
When the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " > /tmp/special_table_data.ans"
When the user runs gpdbrestore with the stored timestamp and options "-S " S\`~@#\$%^&*()-+[{]}|\\;: \\'\"/?><1 ""
And the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " > /tmp/special_table_data.out"
Then verify that the contents of the files "/tmp/special_table_data.out" and "/tmp/special_table_data.ans" are identical
Then verify that the contents of the files "/tmp/special_table_data.ans" and "/tmp/special_table_data.out" are identical
# -S with truncate option
When the user runs "gpdbrestore -S " S\`~@#\$%^&*()-+[{]}|\\;: \\'\"/?><1 " -a --truncate" with the stored timestamp
Then gpdbrestore should return a return code of 0
And the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " > /tmp/special_table_data.out"
Then verify that the contents of the files "/tmp/special_table_data.out" and "/tmp/special_table_data.ans" are identical
Then verify that the contents of the files "/tmp/special_table_data.ans" and "/tmp/special_table_data.out" are identical
# cleanup
And the directory "/tmp/special_table_data.out" is removed or does not exist
......@@ -3328,7 +3328,7 @@ Feature: Validate command line arguments
And the user runs "psql -f test/behave/mgmt_utils/steps/data/special_chars/truncate_special_ao_table.sql template1"
And the user runs gpdbrestore with the stored timestamp and options "--noplan" without -e option
And the user runs command "psql -f test/behave/mgmt_utils/steps/data/special_chars/select_from_special_ao_table.sql " DB\`~@#\$%^&*()_-+[{]}|\\;: \\'/?><;1 " > /tmp/special_ao_table_data.out"
Then verify that the contents of the files "/tmp/special_ao_table_data.out" and "/tmp/special_ao_table_data.ans" are identical
Then verify that the contents of the files "/tmp/special_ao_table_data.ans" and "/tmp/special_ao_table_data.out" are identical
# cleanup
And the directory "/tmp/special_ao_table_data.out" is removed or does not exist
......
......@@ -768,49 +768,53 @@ def get_timestamp_from_output_for_db(context):
return db_timestamps
@then('verify data integrity of database "{dbname}" between source and destination system, work-dir "{dir}"')
def impl(context, dbname, dir):
@then('verify data integrity of database "{dbname}" between source and destination system, work-dir "{dirname}"')
def impl(context, dbname, dirname):
dbconn_src = 'psql -p $GPTRANSFER_SOURCE_PORT -h $GPTRANSFER_SOURCE_HOST -U $GPTRANSFER_SOURCE_USER -d %s' % dbname
dbconn_dest = 'psql -p $GPTRANSFER_DEST_PORT -h $GPTRANSFER_DEST_HOST -U $GPTRANSFER_DEST_USER -d %s' % dbname
for file in os.listdir(dir):
if file.endswith('.sql'):
filename_prefix = os.path.splitext(file)[0]
ans_file_path = os.path.join(dir,filename_prefix + '.ans')
out_file_path = os.path.join(dir,filename_prefix + '.out')
diff_file_path = os.path.join(dir,filename_prefix + '.diff')
for filename in os.listdir(dirname):
if filename.endswith('.sql'):
filename_prefix = os.path.splitext(filename)[0]
ans_file_path = os.path.join(dirname,filename_prefix + '.ans')
out_file_path = os.path.join(dirname,filename_prefix + '.out')
diff_file_path = os.path.join(dirname,filename_prefix + '.diff')
# run the command to get the exact data from the source system
command = '%s -f %s > %s' % (dbconn_src, os.path.join(dir, file), ans_file_path)
command = '%s -f %s > %s' % (dbconn_src, os.path.join(dirname, filename), ans_file_path)
run_command(context, command)
# run the command to get the data from the destination system, locally
command = '%s -f %s > %s' % (dbconn_dest, os.path.join(dir, file), out_file_path)
command = '%s -f %s > %s' % (dbconn_dest, os.path.join(dirname, filename), out_file_path)
run_command(context, command)
gpdiff_cmd = 'gpdiff.pl -w -I NOTICE: -I HINT: -I CONTEXT: -I GP_IGNORE: --gpd_init=test/behave/mgmt_utils/steps/data/global_init_file %s %s > %s' % (ans_file_path, out_file_path, diff_file_path)
run_command(context, gpdiff_cmd)
if context.ret_code != 0:
raise Exception ("Found difference between source and destination system, see %s" % file)
with open(diff_file_path, 'r') as diff_file:
diff_file_contents = diff_file.read()
raise Exception ("Found difference between source and destination system, see %s. \n Diff contents: \n %s" % (diff_file_path, diff_file_contents))
@then('run post verifying workload under "{dir}"')
def impl(context, dir):
for file in os.listdir(dir):
if file.endswith('.sql'):
filename_prefix = os.path.splitext(file)[0]
ans_file_path = os.path.join(dir,filename_prefix+'.ans')
out_file_path = os.path.join(dir,filename_prefix+'.out')
diff_file_path = os.path.join(dir,filename_prefix+'.diff')
@then('run post verifying workload under "{dirname}"')
def impl(context, dirname):
for filename in os.listdir(dirname):
if filename.endswith('.sql'):
filename_prefix = os.path.splitext(filename)[0]
ans_file_path = os.path.join(dirname,filename_prefix+'.ans')
out_file_path = os.path.join(dirname,filename_prefix+'.out')
diff_file_path = os.path.join(dirname,filename_prefix+'.diff')
# run the command to get the data from the destination system, locally
dbconn = 'psql -d template1 -p $GPTRANSFER_DEST_PORT -U $GPTRANSFER_DEST_USER -h $GPTRANSFER_DEST_HOST'
command = '%s -f %s > %s'%(dbconn, os.path.join(dir,file), out_file_path)
command = '%s -f %s > %s'%(dbconn, os.path.join(dirname,filename), out_file_path)
run_command(context, command)
gpdiff_cmd = 'gpdiff.pl -w -I NOTICE: -I HINT: -I CONTEXT: -I GP_IGNORE: --gpd_init=test/behave/mgmt_utils/steps/data/global_init_file %s %s > %s'%(ans_file_path, out_file_path, diff_file_path)
run_command(context, gpdiff_cmd)
for file in os.listdir(dir):
if file.endswith('.diff') and os.path.getsize(os.path.join(dir,file)) > 0:
for filename in os.listdir(dirname):
if filename.endswith('.diff') and os.path.getsize(os.path.join(dirname,filename)) > 0:
with open(filename, 'r') as diff_file:
diff_file_contents = diff_file.read()
# if there is some difference generated into the diff file, raise expception
raise Exception ("Found difference between source and destination system, see %s"%file)
raise Exception ("Found difference between source and destination system, see %s. \n Diff contents: \n %s" % (filename, diff_file_contents))
@then('verify that the incremental file has the stored timestamp')
def impl(context):
......@@ -1386,8 +1390,7 @@ def impl(context, filename):
current_dir = os.path.dirname(current_path)
golden_filename = "%s/%s" % (current_dir, filename)
generated_filename = get_plan_filename(context)
if not filecmp.cmp(generated_filename, golden_filename):
raise Exception("File contents do not match '%s' and '%s'" % (generated_filename, golden_filename))
diff_files(golden_filename, generated_filename)
def parse_plan_file(filename):
plan = {}
......@@ -3564,20 +3567,11 @@ def impl(context, query, dbname, sec):
thread.start_new_thread(getRows, (dbname, query))
time.sleep(30)
@given('verify that the contents of the files "{filepath1}" and "{filepath2}" are identical')
@when('verify that the contents of the files "{filepath1}" and "{filepath2}" are identical')
@then('verify that the contents of the files "{filepath1}" and "{filepath2}" are identical')
def impl(context, filepath1, filepath2):
contents1 = []
contents2 = []
with open(filepath1) as fr1:
contents1 = fr1.readlines()
with open(filepath2) as fr2:
contents2 = fr2.readlines()
if (contents1 != contents2):
raise Exception("Contents of the files: %s and %s do not match" % (filepath1, filepath2))
@given('verify that the contents of the files "{expected_filepath}" and "{result_filepath}" are identical')
@when('verify that the contents of the files "{expected_filepath}" and "{result_filepath}" are identical')
@then('verify that the contents of the files "{expected_filepath}" and "{result_filepath}" are identical')
def impl(context, expected_filepath, result_filepath):
diff_files(expected_filepath, result_filepath)
@given('the standby is not initialized')
@then('the standby is not initialized')
......
......@@ -8,6 +8,7 @@ import stat
import time
import glob
import shutil
import difflib
import yaml
......@@ -295,9 +296,13 @@ def get_table_data_to_file(filename, tablename, dbname):
print "Exception: %s" % str(e)
conn.close()
def diff_backup_restore_data(context, backup_file, restore_file):
if not filecmp.cmp(backup_file, restore_file):
raise Exception('%s and %s do not match' % (backup_file, restore_file))
def diff_files(expected_file, result_file):
with open (expected_file,'r') as expected_f:
with open(result_file, 'r') as result_f:
diff_contents = difflib.unified_diff(expected_f.readlines(), result_f.readlines())
diff_contents = ''.join(diff_contents)
if diff_contents:
raise Exception('Expected file %s does not match result file %s. Diff Contents: %s\r' % (expected_file, result_file, diff_contents))
def validate_restore_data(context, new_table, dbname, backedup_table=None, backedup_dbname=None):
if new_table == "public.gpcrondump_history":
......@@ -322,7 +327,7 @@ def validate_restore_data(context, new_table, dbname, backedup_table=None, backe
restore_filename = dbname + '_' + new_table.strip()
restore_path = os.path.join(current_dir, './test/data', restore_filename + "_restore")
diff_backup_restore_data(context, backup_path, restore_path)
diff_files(backup_path, restore_path)
def validate_restore_data_in_file(context, tablename, dbname, file_name, backedup_table=None):
filename = file_name + "_restore"
......@@ -333,7 +338,7 @@ def validate_restore_data_in_file(context, tablename, dbname, file_name, backedu
else:
backup_file = os.path.join(current_dir, './test/data', file_name + "_backup")
restore_file = os.path.join(current_dir, './test/data', file_name + "_restore")
diff_backup_restore_data(context, backup_file, restore_file)
diff_files(backup_file, restore_file)
def validate_db_data(context, dbname, expected_table_count, backedup_dbname=None):
tbls = get_table_names(dbname)
......@@ -854,7 +859,7 @@ def validate_distribution_policy(context, dbname):
current_dir = os.getcwd()
backup_file = os.path.join(current_dir, './test/data', dbname.strip() + "_dist_policy_backup")
restore_file = os.path.join(current_dir, './test/data', dbname.strip() + "_dist_policy_restore")
diff_backup_restore_data(context, backup_file, restore_file)
diff_files(backup_file, restore_file)
def check_row_count(tablename, dbname, nrows):
NUM_ROWS_QUERY = 'select count(*) from %s' % tablename
......
......@@ -608,9 +608,15 @@ class BackupTestCase(TINCTestCase):
dump_dirty_list = self.sort_file_contents(os.path.join(dump_dir, 'db_dumps', '%s' % self.backup_timestamp[0:8] ,'%sgp_dump_%s_dirty_list' % (prefix, self.backup_timestamp)))
tinctest.logger.info("output dump_dirty_list_file: %s" % (os.path.join(dump_dir, 'db_dumps', '%s' % self.backup_timestamp[0:8] ,'%sgp_dump_%s_dirty_list' % (prefix, self.backup_timestamp))))
if dirty_list != dump_dirty_list :
ans_set = set(dirty_list)
out_set = set(dump_dirty_list)
if ans_set > out_set:
msg = "The following tables are present in the answer file but not the output: " + ','.join(ans_set - out_set)
else:
msg = "The following tables are present in the output but not the answer file: " + ','.join(out_set - ans_set)
tinctest.logger.info("dirty_list:\n%s" % dirty_list)
tinctest.logger.info("dump_dirty_list:\n%s" % dump_dirty_list)
raise Exception("Incremental backup validation failed with diff")
raise Exception("Incremental backup validation failed with diff: %s" % msg)
else:
tinctest.logger.info("The tablelist for the incremental backup is validated")
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册