Skip to content

Commit

Permalink
add @ok_to_fail decorator
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewhsu authored and savex committed Sep 12, 2023
1 parent 873a139 commit 7074119
Show file tree
Hide file tree
Showing 11 changed files with 119 additions and 13 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -115,3 +115,4 @@ venv.bak/
.idea
/.vagrant/
.vscode
tags
2 changes: 1 addition & 1 deletion ducktape/mark/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from ._mark import parametrize, matrix, defaults, ignore, parametrized, ignored, env, is_env # NOQA
from ._mark import parametrize, matrix, defaults, ignore, ok_to_fail, parametrized, ignored, oked_to_fail, env, is_env # NOQA
43 changes: 43 additions & 0 deletions ducktape/mark/_mark.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,24 @@ def __init__(self):
self.injected_args = None


class OkToFail(Mark):
"""Run the test but categorize status as OPASS or OFAIL instead of PASS or FAIL."""

def __init__(self):
super(OkToFail, self).__init__()
self.injected_args = None

@property
def name(self):
return "OK_TO_FAIL"

def apply(self, seed_context, context_list):
assert len(context_list) > 0, "ignore annotation is not being applied to any test cases"
for ctx in context_list:
ctx.ok_to_fail = ctx.ok_to_fail or self.injected_args is None
return context_list


class Matrix(Mark):
"""Parametrize with a matrix of arguments.
Assume each values in self.injected_args is iterable
Expand Down Expand Up @@ -218,6 +236,7 @@ def __eq__(self, other):
MATRIX = Matrix()
DEFAULTS = Defaults()
IGNORE = Ignore()
OK_TO_FAIL = OkToFail()
ENV = Env()


Expand All @@ -235,6 +254,11 @@ def ignored(f):
return Mark.marked(f, IGNORE)


def oked_to_fail(f):
"""Is this function or object decorated with @ok_to_fail?"""
return Mark.marked(f, OK_TO_FAIL)


def is_env(f):
return Mark.marked(f, ENV)

Expand Down Expand Up @@ -411,6 +435,25 @@ def ignorer(f):
return ignorer


def ok_to_fail(*args, **kwargs):
"""
Test method decorator which signals to the test runner to run test but set status as OPASS or OFAIL.
This will keep test results separate from the status PASS and FAIL.
Example::
@ok_to_fail
def the_test(...):
...
"""
if len(args) == 1 and len(kwargs) == 0:
# this corresponds to the usage of the decorator with no arguments
# @ok_to_fail
# def test_function:
# ...
Mark.mark(args[0], OkToFail())
return args[0]


def env(**kwargs):
def environment(f):
Mark.mark(f, Env(**kwargs))
Expand Down
8 changes: 8 additions & 0 deletions ducktape/templates/report/report.css
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,14 @@ h1, h2, h3, h4, h5, h6 {
background-color: #555;
}

.ofail {
background-color: #ffc;
}

.opass {
background-color: #9cf;
}

.testcase {
margin-left: 2em;
}
12 changes: 12 additions & 0 deletions ducktape/templates/report/report.html
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
<div id="failed_test_panel"></div>
<div id="ignored_test_panel"></div>
<div id="flaky_test_panel"></div>
<div id="opassed_test_panel"></div>
<div id="ofailed_test_panel"></div>
<div id="passed_test_panel"></div>
<script type="text/jsx">
/* This small block makes it possible to use React dev tools in the Chrome browser */
Expand Down Expand Up @@ -42,6 +44,8 @@ <h1>
<td colSpan='5' align='center'>{this.props.summary_prop.flaky}</td>
<td colSpan='5' align='center'>{this.props.summary_prop.failures}</td>
<td colSpan='5' align='center'>{this.props.summary_prop.ignored}</td>
<td colSpan='5' align='center'>{this.props.summary_prop.opassed}</td>
<td colSpan='5' align='center'>{this.props.summary_prop.ofailed}</td>
<td colSpan='5' align='center'>{this.props.summary_prop.run_time}</td>
</tr>
);
Expand All @@ -59,6 +63,8 @@ <h1>
<th colSpan='5' align='center'>Flaky</th>
<th colSpan='5' align='center'>Failures</th>
<th colSpan='5' align='center'>Ignored</th>
<th colSpan='5' align='center'>OPassed</th>
<th colSpan='5' align='center'>OFailed</th>
<th colSpan='5' align='center'>Time</th>
</tr>
</thead>
Expand Down Expand Up @@ -183,6 +189,8 @@ <h2>{this.props.title}</h2>
"flaky": %(num_flaky)d,
"failures": %(num_failures)d,
"ignored": %(num_ignored)d,
"opassed": %(num_opassed)d,
"ofailed": %(num_ofailed)d,
"run_time": '%(run_time)s'
}];

Expand All @@ -197,13 +205,17 @@ <h2>{this.props.title}</h2>
FLAKY_TESTS=[%(flaky_tests)s];
FAILED_TESTS=[%(failed_tests)s];
IGNORED_TESTS=[%(ignored_tests)s];
OPASSED_TESTS=[%(opassed_tests)s];
OFAILED_TESTS=[%(ofailed_tests)s];

React.render(<Heading heading={HEADING}/>, document.getElementById('heading'));
React.render(<ColorKeyPanel test_status_names={COLOR_KEYS}/>, document.getElementById('color_key_panel'));
React.render(<SummaryPanel summary_props={SUMMARY}/>, document.getElementById('summary_panel'));
React.render(<TestPanel title="Failed Tests" tests={FAILED_TESTS}/>, document.getElementById('failed_test_panel'));
React.render(<TestPanel title="Ignored Tests" tests={IGNORED_TESTS}/>, document.getElementById('ignored_test_panel'));
React.render(<TestPanel title="Flaky Tests" tests={FLAKY_TESTS}/>, document.getElementById('flaky_test_panel'));
React.render(<TestPanel title="OPassed Tests" tests={OPASSED_TESTS}/>, document.getElementById('opassed_test_panel'));
React.render(<TestPanel title="OFailed Tests" tests={OFAILED_TESTS}/>, document.getElementById('ofailed_test_panel'));
React.render(<TestPanel title="Passed Tests" tests={PASSED_TESTS}/>, document.getElementById('passed_test_panel'));
</script>
</body>
Expand Down
34 changes: 30 additions & 4 deletions ducktape/tests/reporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

from ducktape.utils.terminal_size import get_terminal_size
from ducktape.utils.util import ducktape_version
from ducktape.tests.status import PASS, FAIL, IGNORE, FLAKY
from ducktape.tests.status import PASS, FAIL, IGNORE, FLAKY, OPASS, OFAIL
from ducktape.json_serializable import DucktapeJSONEncoder


Expand Down Expand Up @@ -115,6 +115,8 @@ def footer_string(self):
"flaky: %d" % self.results.num_flaky,
"failed: %d" % self.results.num_failed,
"ignored: %d" % self.results.num_ignored,
"opassed: %d" % self.results.num_opassed,
"ofailed: %d" % self.results.num_ofailed,
"=" * self.width
]

Expand All @@ -126,15 +128,21 @@ def report_string(self):
passed = []
ignored = []
failed = []
ofail = []
opass = []
for result in self.results:
if result.test_status == FAIL:
failed.append(result)
elif result.test_status == IGNORE:
ignored.append(result)
elif result.test_status == OPASS:
opass.append(result)
elif result.test_status == OFAIL:
ofail.append(result)
else:
passed.append(result)

ordered_results = passed + ignored + failed
ordered_results = passed + ignored + failed + opass + ofail

report_lines = \
[SingleResultReporter(result).result_string() + "\n" + "-" * self.width for result in ordered_results]
Expand Down Expand Up @@ -193,8 +201,13 @@ def report(self):
testsuite['failures'] += 1
elif result.test_status == IGNORE:
testsuite['skipped'] += 1
elif result.test_status == OPASS:
testsuite['skipped'] += 1
elif result.test_status == OFAIL:
testsuite['skipped'] += 1

total = self.results.num_failed + self.results.num_ignored + self.results.num_passed + self.results.num_flaky
total = self.results.num_failed + self.results.num_ignored + self.results.num_passed + \
self.results.num_flaky + self.results.num_opassed + self.results.num_passed
# Now start building XML document
root = ET.Element('testsuites', attrib=dict(
name="ducktape", time=str(self.results.run_time_seconds),
Expand Down Expand Up @@ -282,6 +295,8 @@ def format_report(self):
passed_result_string = []
ignored_result_string = []
flaky_result_string = []
opassed_result_string = []
ofailed_result_string = []

for result in self.results:
json_string = json.dumps(self.format_result(result))
Expand All @@ -298,6 +313,12 @@ def format_report(self):
elif result.test_status == FLAKY:
flaky_result_string.append(json_string)
flaky_result_string.append(",")
elif result.test_status == OPASS:
opassed_result_string.append(json_string)
opassed_result_string.append(",")
elif result.test_status == OFAIL:
ofailed_result_string.append(json_string)
ofailed_result_string.append(",")
else:
raise Exception("Unknown test status in report: {}".format(result.test_status.to_json()))

Expand All @@ -308,13 +329,18 @@ def format_report(self):
'num_flaky': self.results.num_flaky,
'num_failures': self.results.num_failed,
'num_ignored': self.results.num_ignored,
'num_opassed': self.results.num_opassed,
'num_ofailed': self.results.num_ofailed,
'run_time': format_time(self.results.run_time_seconds),
'session': self.results.session_context.session_id,
'passed_tests': "".join(passed_result_string),
'flaky_tests': "".join(flaky_result_string),
'failed_tests': "".join(failed_result_string),
'ignored_tests': "".join(ignored_result_string),
'test_status_names': ",".join(["\'%s\'" % str(status) for status in [PASS, FAIL, IGNORE, FLAKY]])
'ofailed_tests': "".join(ofailed_result_string),
'opassed_tests': "".join(opassed_result_string),
'test_status_names': ",".join(["\'%s\'" % str(status) for status in [PASS, FAIL, IGNORE, FLAKY, OPASS,
OFAIL]])
}

html = template % args
Expand Down
12 changes: 11 additions & 1 deletion ducktape/tests/result.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from ducktape.tests.reporter import SingleResultFileReporter
from ducktape.utils.local_filesystem_utils import mkdir_p
from ducktape.utils.util import ducktape_version
from ducktape.tests.status import FLAKY, PASS, FAIL, IGNORE
from ducktape.tests.status import FLAKY, PASS, FAIL, IGNORE, OPASS, OFAIL


class TestResult(object):
Expand Down Expand Up @@ -166,6 +166,14 @@ def num_ignored(self):
def num_flaky(self):
return len([r for r in self._results if r.test_status == FLAKY])

@property
def num_opassed(self):
return len([r for r in self._results if r.test_status == OPASS])

@property
def num_ofailed(self):
return len([r for r in self._results if r.test_status == OFAIL])

@property
def run_time_seconds(self):
if self.start_time < 0:
Expand Down Expand Up @@ -222,6 +230,8 @@ def to_json(self):
"num_passed": self.num_passed,
"num_failed": self.num_failed,
"num_ignored": self.num_ignored,
"num_opassed": self.num_opassed,
"num_ofailed": self.num_ofailed,
"parallelism": parallelism,
"results": [r for r in self._results]
}
Expand Down
8 changes: 5 additions & 3 deletions ducktape/tests/runner_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from ducktape.tests.status import FLAKY
from ducktape.tests.test import test_logger, TestContext

from ducktape.tests.result import TestResult, IGNORE, PASS, FAIL
from ducktape.tests.result import TestResult, IGNORE, PASS, FAIL, OPASS, OFAIL
from ducktape.utils.local_filesystem_utils import mkdir_p


Expand Down Expand Up @@ -240,8 +240,10 @@ def _check_cluster_utilization(self, result, summary):
# only check node utilization on test pass
if result == PASS or result == FLAKY:
self.log(logging.INFO, "FAIL: " + message)

result = FAIL
result = FAIL
elif result == OPASS:
self.log(logging.INFO, "OFAIL: " + message)
result = OFAIL
summary += message
else:
self.log(logging.WARN, message)
Expand Down
2 changes: 2 additions & 0 deletions ducktape/tests/status.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,3 +31,5 @@ def to_json(self):
FLAKY = TestStatus("flaky")
FAIL = TestStatus("fail")
IGNORE = TestStatus("ignore")
OPASS = TestStatus("opass")
OFAIL = TestStatus("ofail")
9 changes: 5 additions & 4 deletions ducktape/tests/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from ducktape.services.service_registry import ServiceRegistry
from ducktape.template import TemplateRenderer
from ducktape.mark.resource import CLUSTER_SPEC_KEYWORD, CLUSTER_SIZE_KEYWORD
from ducktape.tests.status import FAIL
from ducktape.tests.status import FAIL, OFAIL


class Test(TemplateRenderer):
Expand Down Expand Up @@ -151,7 +151,7 @@ def copy_service_logs(self, test_status):
# Gather locations of logs to collect
node_logs = []
for log_name in log_dirs.keys():
if test_status == FAIL or self.should_collect_log(log_name, service):
if test_status == FAIL or test_status == OFAIL or self.should_collect_log(log_name, service):
node_logs.append(log_dirs[log_name]["path"])

self.test_context.logger.debug("Preparing to copy logs from %s: %s" %
Expand Down Expand Up @@ -304,6 +304,7 @@ def __init__(self, **kwargs):
self.function = kwargs.get("function")
self.injected_args = kwargs.get("injected_args")
self.ignore = kwargs.get("ignore", False)
self.ok_to_fail = kwargs.get("ok_to_fail", False)

# cluster_use_metadata is a dict containing information about how this test will use cluster resources
self.cluster_use_metadata = copy.copy(kwargs.get("cluster_use_metadata", {}))
Expand All @@ -320,9 +321,9 @@ def __init__(self, **kwargs):
def __repr__(self):
return \
"<module=%s, cls=%s, function=%s, injected_args=%s, file=%s, ignore=%s, " \
"cluster_size=%s, cluster_spec=%s>" % \
"ok_to_fail=%s, cluster_size=%s, cluster_spec=%s>" % \
(self.module, self.cls_name, self.function_name, str(self.injected_args), str(self.file),
str(self.ignore), str(self.expected_num_nodes), str(self.expected_cluster_spec))
str(self.ignore), str(self.ok_to_fail), str(self.expected_num_nodes), str(self.expected_cluster_spec))

def copy(self, **kwargs):
"""Construct a new TestContext object from another TestContext object
Expand Down
1 change: 1 addition & 0 deletions tests/mark/check_ok_to_fail.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import pytest


class CheckOkToFail(object):
def check_simple(self):
@ok_to_fail
Expand Down

0 comments on commit 7074119

Please sign in to comment.