tests: Maintain test result counters through the run
This avoids having to go through the full output buffer from each VM for each update during a test run. Signed-off-by: Jouni Malinen <j@w1.fi>
This commit is contained in:
parent
8041102874
commit
1f4de34ec1
1 changed files with 44 additions and 28 deletions
|
@ -16,25 +16,15 @@ import time
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
def get_results():
|
def get_failed(vm):
|
||||||
global vm
|
|
||||||
started = []
|
|
||||||
passed = []
|
|
||||||
failed = []
|
failed = []
|
||||||
skipped = []
|
for i in range(num_servers):
|
||||||
for i in range(0, num_servers):
|
failed += vm[i]['failed']
|
||||||
lines = vm[i]['out'].splitlines()
|
return failed
|
||||||
started += [ l for l in lines if l.startswith('START ') ]
|
|
||||||
passed += [ l for l in lines if l.startswith('PASS ') ]
|
|
||||||
failed += [ l for l in lines if l.startswith('FAIL ') ]
|
|
||||||
skipped += [ l for l in lines if l.startswith('SKIP ') ]
|
|
||||||
return (started, passed, failed, skipped)
|
|
||||||
|
|
||||||
def vm_ready_str(s):
|
|
||||||
return s.startswith("READY") or s.startswith("PASS") or \
|
|
||||||
s.startswith("FAIL") or s.startswith("SKIP")
|
|
||||||
|
|
||||||
def vm_read_stdout(vm, i):
|
def vm_read_stdout(vm, i):
|
||||||
|
global total_started, total_passed, total_failed, total_skipped
|
||||||
|
|
||||||
ready = False
|
ready = False
|
||||||
try:
|
try:
|
||||||
out = vm['proc'].stdout.read()
|
out = vm['proc'].stdout.read()
|
||||||
|
@ -50,8 +40,26 @@ def vm_read_stdout(vm, i):
|
||||||
line = pending[0:pos].rstrip()
|
line = pending[0:pos].rstrip()
|
||||||
pending = pending[(pos + 1):]
|
pending = pending[(pos + 1):]
|
||||||
logger.debug("VM[%d] stdout full line[%s]" % (i, line))
|
logger.debug("VM[%d] stdout full line[%s]" % (i, line))
|
||||||
if vm_ready_str(line):
|
if line.startswith("READY"):
|
||||||
ready = True
|
ready = True
|
||||||
|
elif line.startswith("PASS"):
|
||||||
|
ready = True
|
||||||
|
total_passed += 1
|
||||||
|
elif line.startswith("FAIL"):
|
||||||
|
ready = True
|
||||||
|
total_failed += 1
|
||||||
|
name = line.split(' ')[1]
|
||||||
|
logger.debug("VM[%d] test case failed: %s" % (i, name))
|
||||||
|
vm['failed'].append(name)
|
||||||
|
elif line.startswith("NOT-FOUND"):
|
||||||
|
ready = True
|
||||||
|
total_failed += 1
|
||||||
|
logger.info("VM[%d] test case not found" % i)
|
||||||
|
elif line.startswith("SKIP"):
|
||||||
|
ready = True
|
||||||
|
total_skipped += 1
|
||||||
|
elif line.startswith("START"):
|
||||||
|
total_started += 1
|
||||||
vm['out'] += line + '\n'
|
vm['out'] += line + '\n'
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
vm['pending'] = pending
|
vm['pending'] = pending
|
||||||
|
@ -64,6 +72,7 @@ def show_progress(scr):
|
||||||
global timestamp
|
global timestamp
|
||||||
global tests
|
global tests
|
||||||
global first_run_failures
|
global first_run_failures
|
||||||
|
global total_started, total_passed, total_failed, total_skipped
|
||||||
|
|
||||||
total_tests = len(tests)
|
total_tests = len(tests)
|
||||||
logger.info("Total tests: %d" % total_tests)
|
logger.info("Total tests: %d" % total_tests)
|
||||||
|
@ -137,9 +146,7 @@ def show_progress(scr):
|
||||||
logger.info("Unexpected test cases remaining from first round: " + str(tests))
|
logger.info("Unexpected test cases remaining from first round: " + str(tests))
|
||||||
raise Exception("Unexpected test cases remaining from first round")
|
raise Exception("Unexpected test cases remaining from first round")
|
||||||
completed_first_pass = True
|
completed_first_pass = True
|
||||||
(started, passed, failed, skipped) = get_results()
|
for name in get_failed(vm):
|
||||||
for f in failed:
|
|
||||||
name = f.split(' ')[1]
|
|
||||||
rerun_tests.append(name)
|
rerun_tests.append(name)
|
||||||
first_run_failures.append(name)
|
first_run_failures.append(name)
|
||||||
|
|
||||||
|
@ -195,17 +202,17 @@ def show_progress(scr):
|
||||||
break
|
break
|
||||||
|
|
||||||
if updated:
|
if updated:
|
||||||
(started, passed, failed, skipped) = get_results()
|
|
||||||
scr.move(num_servers + 1, 10)
|
scr.move(num_servers + 1, 10)
|
||||||
scr.clrtoeol()
|
scr.clrtoeol()
|
||||||
scr.addstr("{} %".format(int(100.0 * (len(passed) + len(failed) + len(skipped)) / total_tests)))
|
scr.addstr("{} %".format(int(100.0 * (total_passed + total_failed + total_skipped) / total_tests)))
|
||||||
scr.addstr(num_servers + 1, 20, "TOTAL={} STARTED={} PASS={} FAIL={} SKIP={}".format(total_tests, len(started), len(passed), len(failed), len(skipped)))
|
scr.addstr(num_servers + 1, 20, "TOTAL={} STARTED={} PASS={} FAIL={} SKIP={}".format(total_tests, total_started, total_passed, total_failed, total_skipped))
|
||||||
|
failed = get_failed(vm)
|
||||||
if len(failed) > 0:
|
if len(failed) > 0:
|
||||||
scr.move(num_servers + 2, 0)
|
scr.move(num_servers + 2, 0)
|
||||||
scr.clrtoeol()
|
scr.clrtoeol()
|
||||||
scr.addstr("Failed test cases: ")
|
scr.addstr("Failed test cases: ")
|
||||||
for f in failed:
|
for f in failed:
|
||||||
scr.addstr(f.split(' ')[1])
|
scr.addstr(f)
|
||||||
scr.addstr(' ')
|
scr.addstr(' ')
|
||||||
|
|
||||||
scr.move(0, 35)
|
scr.move(0, 35)
|
||||||
|
@ -229,6 +236,12 @@ def main():
|
||||||
global timestamp
|
global timestamp
|
||||||
global tests
|
global tests
|
||||||
global first_run_failures
|
global first_run_failures
|
||||||
|
global total_started, total_passed, total_failed, total_skipped
|
||||||
|
|
||||||
|
total_started = 0
|
||||||
|
total_passed = 0
|
||||||
|
total_failed = 0
|
||||||
|
total_skipped = 0
|
||||||
|
|
||||||
debug_level = logging.INFO
|
debug_level = logging.INFO
|
||||||
|
|
||||||
|
@ -344,6 +357,7 @@ def main():
|
||||||
vm[i]['out'] = ""
|
vm[i]['out'] = ""
|
||||||
vm[i]['pending'] = ""
|
vm[i]['pending'] = ""
|
||||||
vm[i]['err'] = ""
|
vm[i]['err'] = ""
|
||||||
|
vm[i]['failed'] = []
|
||||||
for stream in [ vm[i]['proc'].stdout, vm[i]['proc'].stderr ]:
|
for stream in [ vm[i]['proc'].stdout, vm[i]['proc'].stderr ]:
|
||||||
fd = stream.fileno()
|
fd = stream.fileno()
|
||||||
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
|
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
|
||||||
|
@ -356,7 +370,7 @@ def main():
|
||||||
for i in range(0, num_servers):
|
for i in range(0, num_servers):
|
||||||
f.write('VM {}\n{}\n{}\n'.format(i, vm[i]['out'], vm[i]['err']))
|
f.write('VM {}\n{}\n{}\n'.format(i, vm[i]['out'], vm[i]['err']))
|
||||||
|
|
||||||
(started, passed, failed, skipped) = get_results()
|
failed = get_failed(vm)
|
||||||
|
|
||||||
if first_run_failures:
|
if first_run_failures:
|
||||||
print "Failed test cases:"
|
print "Failed test cases:"
|
||||||
|
@ -365,8 +379,7 @@ def main():
|
||||||
logger.info("Failed: " + f)
|
logger.info("Failed: " + f)
|
||||||
print
|
print
|
||||||
double_failed = []
|
double_failed = []
|
||||||
for f in failed:
|
for name in failed:
|
||||||
name = f.split(' ')[1]
|
|
||||||
double_failed.append(name)
|
double_failed.append(name)
|
||||||
for test in first_run_failures:
|
for test in first_run_failures:
|
||||||
double_failed.remove(test)
|
double_failed.remove(test)
|
||||||
|
@ -379,7 +392,10 @@ def main():
|
||||||
print f,
|
print f,
|
||||||
logger.info("Failed on retry: " + f)
|
logger.info("Failed on retry: " + f)
|
||||||
print
|
print
|
||||||
res = "TOTAL={} PASS={} FAIL={} SKIP={}".format(len(started), len(passed), len(failed), len(skipped))
|
res = "TOTAL={} PASS={} FAIL={} SKIP={}".format(total_started,
|
||||||
|
total_passed,
|
||||||
|
total_failed,
|
||||||
|
total_skipped)
|
||||||
print(res)
|
print(res)
|
||||||
logger.info(res)
|
logger.info(res)
|
||||||
print "Logs: " + dir + '/' + str(timestamp)
|
print "Logs: " + dir + '/' + str(timestamp)
|
||||||
|
|
Loading…
Add table
Reference in a new issue