def _run_flaky_test(self, case, result, flaky):
"""
Run a test that has been decorated with the `@flaky` decorator.
:param TestCase case: A ``testtools.TestCase`` to run.
:param TestResult result: A ``TestResult`` object that conforms to the
testtools extended result interface.
:param _FlakyAnnotation flaky: A description of the conditions of
flakiness.
:return: A ``TestResult`` with the result of running the flaky test.
"""
result.startTest(case)
successes = 0
results = []
# Optimization to stop running early if there's no way that we can
# reach the minimum number of successes.
max_fails = flaky.max_runs - flaky.min_passes
while (successes < flaky.min_passes and
len(results) - successes <= max_fails):
was_successful, result_type, details = self._attempt_test(case)
if was_successful:
successes += 1
results.append((result_type, details))
successful = successes >= flaky.min_passes
flaky_data = flaky.to_dict()
flaky_data.update({'runs': len(results), 'passes': successes})
flaky_details = {
'flaky': text_content(pformat(flaky_data)),
}
combined_details = _combine_details(
[flaky_details] + list(r[1] for r in results))
if successful:
skip_reported = False
for result_type, details in results:
if result_type == _ResultType.skip:
result.addSkip(case, details=details)
skip_reported = True
if not skip_reported:
Message.new(
message_type=u"flocker:test:flaky",
id=case.id(),
successes=successes,
passes=len(results),
min_passes=flaky.min_passes,
max_runs=flaky.max_runs,
).write()
result.addSuccess(case, details=combined_details)
else:
# XXX: How are we going to report on tests that sometimes fail,
# sometimes error, sometimes skip? Currently we just error.
result.addError(case, details=combined_details)
result.stopTest(case)
return result