test: runner support for flaky tests
Adding --flaky-tests option, to allow regarding flaky tests failures as non-fatal. Currently only observed by the TapProgressIndicator, which will add a # TODO directive to tests classified as flaky. According to the TAP specification, the test harness is supposed to treat failures that have a # TODO directive as non-fatal.
This commit is contained in:
parent
13ad06e095
commit
7f87b82fc4
@ -55,8 +55,9 @@ VERBOSE = False
|
||||
|
||||
class ProgressIndicator(object):
|
||||
|
||||
def __init__(self, cases):
|
||||
def __init__(self, cases, flaky_tests_mode):
|
||||
self.cases = cases
|
||||
self.flaky_tests_mode = flaky_tests_mode
|
||||
self.queue = Queue(len(cases))
|
||||
for case in cases:
|
||||
self.queue.put_nowait(case)
|
||||
@ -234,13 +235,19 @@ class TapProgressIndicator(SimpleProgressIndicator):
|
||||
self._done += 1
|
||||
command = basename(output.command[-1])
|
||||
if output.UnexpectedOutput():
|
||||
print 'not ok %i - %s' % (self._done, command)
|
||||
status_line = 'not ok %i - %s' % (self._done, command)
|
||||
if FLAKY in output.test.outcomes and self.flaky_tests_mode == "dontcare":
|
||||
status_line = status_line + " # TODO : Fix flaky test"
|
||||
print status_line
|
||||
for l in output.output.stderr.splitlines():
|
||||
print '#' + l
|
||||
for l in output.output.stdout.splitlines():
|
||||
print '#' + l
|
||||
else:
|
||||
print 'ok %i - %s' % (self._done, command)
|
||||
status_line = 'ok %i - %s' % (self._done, command)
|
||||
if FLAKY in output.test.outcomes:
|
||||
status_line = status_line + " # TODO : Fix flaky test"
|
||||
print status_line
|
||||
|
||||
duration = output.test.duration
|
||||
|
||||
@ -258,8 +265,8 @@ class TapProgressIndicator(SimpleProgressIndicator):
|
||||
|
||||
class CompactProgressIndicator(ProgressIndicator):
|
||||
|
||||
def __init__(self, cases, templates):
|
||||
super(CompactProgressIndicator, self).__init__(cases)
|
||||
def __init__(self, cases, flaky_tests_mode, templates):
|
||||
super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode)
|
||||
self.templates = templates
|
||||
self.last_status_length = 0
|
||||
self.start_time = time.time()
|
||||
@ -314,13 +321,13 @@ class CompactProgressIndicator(ProgressIndicator):
|
||||
|
||||
class ColorProgressIndicator(CompactProgressIndicator):
|
||||
|
||||
def __init__(self, cases):
|
||||
def __init__(self, cases, flaky_tests_mode):
|
||||
templates = {
|
||||
'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s",
|
||||
'stdout': "\033[1m%s\033[0m",
|
||||
'stderr': "\033[31m%s\033[0m",
|
||||
}
|
||||
super(ColorProgressIndicator, self).__init__(cases, templates)
|
||||
super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
|
||||
|
||||
def ClearLine(self, last_line_length):
|
||||
print "\033[1K\r",
|
||||
@ -328,7 +335,7 @@ class ColorProgressIndicator(CompactProgressIndicator):
|
||||
|
||||
class MonochromeProgressIndicator(CompactProgressIndicator):
|
||||
|
||||
def __init__(self, cases):
|
||||
def __init__(self, cases, flaky_tests_mode):
|
||||
templates = {
|
||||
'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s",
|
||||
'stdout': '%s',
|
||||
@ -336,7 +343,7 @@ class MonochromeProgressIndicator(CompactProgressIndicator):
|
||||
'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"),
|
||||
'max_length': 78
|
||||
}
|
||||
super(MonochromeProgressIndicator, self).__init__(cases, templates)
|
||||
super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
|
||||
|
||||
def ClearLine(self, last_line_length):
|
||||
print ("\r" + (" " * last_line_length) + "\r"),
|
||||
@ -737,8 +744,8 @@ class Context(object):
|
||||
def GetTimeout(self, mode):
|
||||
return self.timeout * TIMEOUT_SCALEFACTOR[mode]
|
||||
|
||||
def RunTestCases(cases_to_run, progress, tasks):
|
||||
progress = PROGRESS_INDICATORS[progress](cases_to_run)
|
||||
def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode):
|
||||
progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode)
|
||||
return progress.Run(tasks)
|
||||
|
||||
|
||||
@ -762,6 +769,7 @@ OKAY = 'okay'
|
||||
TIMEOUT = 'timeout'
|
||||
CRASH = 'crash'
|
||||
SLOW = 'slow'
|
||||
FLAKY = 'flaky'
|
||||
|
||||
|
||||
class Expression(object):
|
||||
@ -1209,6 +1217,9 @@ def BuildOptions():
|
||||
default=False, action="store_true")
|
||||
result.add_option("--cat", help="Print the source of the tests",
|
||||
default=False, action="store_true")
|
||||
result.add_option("--flaky-tests",
|
||||
help="Regard tests marked as flaky (run|skip|dontcare)",
|
||||
default="run")
|
||||
result.add_option("--warn-unused", help="Report unused rules",
|
||||
default=False, action="store_true")
|
||||
result.add_option("-j", help="The number of parallel tasks to run",
|
||||
@ -1234,6 +1245,13 @@ def ProcessOptions(options):
|
||||
VERBOSE = options.verbose
|
||||
options.arch = options.arch.split(',')
|
||||
options.mode = options.mode.split(',')
|
||||
def CheckTestMode(name, option):
|
||||
if not option in ["run", "skip", "dontcare"]:
|
||||
print "Unknown %s mode %s" % (name, option)
|
||||
return False
|
||||
return True
|
||||
if not CheckTestMode("--flaky-tests", options.flaky_tests):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@ -1436,15 +1454,15 @@ def Main():
|
||||
|
||||
result = None
|
||||
def DoSkip(case):
|
||||
return SKIP in case.outcomes or SLOW in case.outcomes
|
||||
return SKIP in case.outcomes or SLOW in case.outcomes or (FLAKY in case.outcomes and options.flaky_tests == "skip")
|
||||
cases_to_run = [ c for c in all_cases if not DoSkip(c) ]
|
||||
if len(cases_to_run) == 0:
|
||||
print "No tests to run."
|
||||
return 0
|
||||
return 1
|
||||
else:
|
||||
try:
|
||||
start = time.time()
|
||||
if RunTestCases(cases_to_run, options.progress, options.j):
|
||||
if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests):
|
||||
result = 0
|
||||
else:
|
||||
result = 1
|
||||
|
Loading…
x
Reference in New Issue
Block a user