tests: Clean up failed test list in parallel-vm.py
Instead of printing a very long line of the failed tests, print the test case names on separate lines up to the number of available lines at the bottom of the screen. This avoids some issues with curses and overlong lines. Furthermore, display the last failed test cases instead of somewhat confusing sequence of test case names from the VMs. Signed-off-by: Jouni Malinen <j@w1.fi>
This commit is contained in:
parent
e36a7c7940
commit
72641f924e
1 changed files with 18 additions and 11 deletions
|
@ -96,6 +96,7 @@ def vm_read_stdout(vm, test_queue):
|
||||||
global total_started, total_passed, total_failed, total_skipped
|
global total_started, total_passed, total_failed, total_skipped
|
||||||
global rerun_failures
|
global rerun_failures
|
||||||
global first_run_failures
|
global first_run_failures
|
||||||
|
global all_failed
|
||||||
|
|
||||||
ready = False
|
ready = False
|
||||||
try:
|
try:
|
||||||
|
@ -136,6 +137,7 @@ def vm_read_stdout(vm, test_queue):
|
||||||
name = vals[1]
|
name = vals[1]
|
||||||
logger.debug("VM[%d] test case failed: %s" % (vm['idx'], name))
|
logger.debug("VM[%d] test case failed: %s" % (vm['idx'], name))
|
||||||
vm['failed'].append(name)
|
vm['failed'].append(name)
|
||||||
|
all_failed.append(name)
|
||||||
if name != vm['current_name']:
|
if name != vm['current_name']:
|
||||||
logger.info("VM[%d] test result mismatch: %s (expected %s)" % (vm['idx'], name, vm['current_name']))
|
logger.info("VM[%d] test result mismatch: %s (expected %s)" % (vm['idx'], name, vm['current_name']))
|
||||||
else:
|
else:
|
||||||
|
@ -267,20 +269,20 @@ def update_screen(scr, total_tests):
|
||||||
scr.addstr("{} %".format(int(100.0 * (total_passed + total_failed + total_skipped) / total_tests)))
|
scr.addstr("{} %".format(int(100.0 * (total_passed + total_failed + total_skipped) / total_tests)))
|
||||||
scr.addstr(num_servers + 1, 20,
|
scr.addstr(num_servers + 1, 20,
|
||||||
"TOTAL={} STARTED={} PASS={} FAIL={} SKIP={}".format(total_tests, total_started, total_passed, total_failed, total_skipped))
|
"TOTAL={} STARTED={} PASS={} FAIL={} SKIP={}".format(total_tests, total_started, total_passed, total_failed, total_skipped))
|
||||||
failed = get_failed(vm)
|
global all_failed
|
||||||
if len(failed) > 0:
|
max_y, max_x = scr.getmaxyx()
|
||||||
|
max_lines = max_y - num_servers - 3
|
||||||
|
if len(all_failed) > 0 and max_lines > 0:
|
||||||
scr.move(num_servers + 2, 0)
|
scr.move(num_servers + 2, 0)
|
||||||
scr.clrtoeol()
|
scr.addstr("Last failed test cases:")
|
||||||
scr.addstr("Failed test cases: ")
|
if max_lines >= len(all_failed):
|
||||||
|
max_lines = len(all_failed)
|
||||||
count = 0
|
count = 0
|
||||||
for f in failed:
|
for i in range(len(all_failed) - max_lines, len(all_failed)):
|
||||||
count += 1
|
count += 1
|
||||||
if count > 30:
|
scr.move(num_servers + 2 + count, 0)
|
||||||
scr.addstr('...')
|
scr.addstr(all_failed[i])
|
||||||
scr.clrtoeol()
|
scr.clrtoeol()
|
||||||
break
|
|
||||||
scr.addstr(f)
|
|
||||||
scr.addstr(' ')
|
|
||||||
scr.refresh()
|
scr.refresh()
|
||||||
|
|
||||||
def show_progress(scr):
|
def show_progress(scr):
|
||||||
|
@ -366,6 +368,7 @@ def main():
|
||||||
import os
|
import os
|
||||||
global num_servers
|
global num_servers
|
||||||
global vm
|
global vm
|
||||||
|
global all_failed
|
||||||
global dir
|
global dir
|
||||||
global timestamp
|
global timestamp
|
||||||
global tests
|
global tests
|
||||||
|
@ -484,6 +487,7 @@ def main():
|
||||||
log_handler.setFormatter(log_formatter)
|
log_handler.setFormatter(log_formatter)
|
||||||
logger.addHandler(log_handler)
|
logger.addHandler(log_handler)
|
||||||
|
|
||||||
|
all_failed = []
|
||||||
vm = {}
|
vm = {}
|
||||||
for i in range(0, num_servers):
|
for i in range(0, num_servers):
|
||||||
cmd = [os.path.join(scriptsdir, 'vm-run.sh'),
|
cmd = [os.path.join(scriptsdir, 'vm-run.sh'),
|
||||||
|
@ -520,6 +524,9 @@ def main():
|
||||||
pass
|
pass
|
||||||
def clrtoeol(self):
|
def clrtoeol(self):
|
||||||
pass
|
pass
|
||||||
|
def getmaxyx(self):
|
||||||
|
return (25, 80)
|
||||||
|
|
||||||
show_progress(FakeScreen())
|
show_progress(FakeScreen())
|
||||||
|
|
||||||
with open('{}/{}-parallel.log'.format(dir, timestamp), 'w') as f:
|
with open('{}/{}-parallel.log'.format(dir, timestamp), 'w') as f:
|
||||||
|
|
Loading…
Reference in a new issue