Skip to content

Commit

Permalink
Code formatting using black (infra) (#1163)
Browse files Browse the repository at this point in the history
* Set line length to 79 when calling black

black uses pyproject.toml to store its configuration. Update the
different projects in the monorepo to match line length of 79 when
calling black.

See black documentation[1] for more information.

[1]
https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-via-a-file

* Update contributing guide to mention black

* Add black GitHub Action

* Ignore vendorized code when running black

* Format all the codebase using black

Command used:

    black . --line-length 79 --extend-exclude "/vendor/"


* Ignore E203 (Whitespace before ':')

As explained in this issue[1], E203[2] is not PEP 8 compliant so it
should be ignored when running flake8.

Black is now being used in the codebase, and it takes care of the
formatting.

[1] psf/black#315
[2] https://www.flake8rules.com/rules/E203.html

* Ignore flake8 complaints for a few specific lines in the base provider

Although Black is configured at 79 chars line length, it keeps a few
lines to 80, 81 or 82 chars. Flake8 will complain about this, but we can
ignore these.
  • Loading branch information
pieqq authored Apr 11, 2024
1 parent 91a4ce8 commit 3789fdd
Show file tree
Hide file tree
Showing 483 changed files with 31,331 additions and 20,025 deletions.
17 changes: 17 additions & 0 deletions .github/workflows/black.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: Black formatting (79 chars lines)

on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
workflow_dispatch:

jobs:
black:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: psf/black@stable
with:
options: "--check --diff --line-length 79 --extend-exclude '/vendor/'"
5 changes: 5 additions & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ its providers and its documentation.

## General recommendations

- The codebase uses [black] for formatting, with `line-length` set to `79`.
- Use `git config blame.ignoreRevsFile .git-blame-ignore-revs` if you want to
[ignore commits related to black formatting].
- Setup your editor of choice to run [autopep8] on save. This helps keep
everything passing [flake8].
- The code doesn’t have to be pylint-clean, but
Expand Down Expand Up @@ -385,6 +388,8 @@ review of the source files.
Once all is good, you can submit your documentation change like any other
changes using a pull request.

[black]: https://black.readthedocs.io/
[ignore commits related to black formatting]: https://black.readthedocs.io/en/stable/guides/introducing_black_to_your_project.html#avoiding-ruining-git-blame
[autopep8]: https://pypi.org/project/autopep8/
[flake8]: https://flake8.pycqa.org/en/latest/
[pylint]: https://www.pylint.org/
Expand Down
5 changes: 3 additions & 2 deletions checkbox-ng/checkbox_ng/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,5 +33,6 @@
__version__ = version(__package__)
except PackageNotFoundError:
import logging
logging.error('Failed to retrieve checkbox-ng version')
__version__ = 'unknown'

logging.error("Failed to retrieve checkbox-ng version")
__version__ = "unknown"
26 changes: 14 additions & 12 deletions checkbox-ng/checkbox_ng/certification.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class SubmissionServiceTransport(TransportBase):
- Payload can be in:
* LZMA compressed tarball that includes a submission.json and results
from checkbox.
"""
"""

def __init__(self, where, options):
"""
Expand All @@ -57,7 +57,7 @@ def __init__(self, where, options):
a 15-character (or longer) alphanumeric ID for the system.
"""
super().__init__(where, options)
self._secure_id = self.options.get('secure_id')
self._secure_id = self.options.get("secure_id")
if self._secure_id is not None:
self._validate_secure_id(self._secure_id)

Expand Down Expand Up @@ -91,19 +91,19 @@ def send(self, data, config=None, session_state=None):
if secure_id is None:
raise InvalidSecureIDError(_("Secure ID not specified"))
self._validate_secure_id(secure_id)
logger.debug(
_("Sending to %s, Secure ID is %s"), self.url, secure_id)
logger.debug(_("Sending to %s, Secure ID is %s"), self.url, secure_id)
try:
response = requests.post(self.url, data=data)
except requests.exceptions.Timeout as exc:
raise TransportError(
_("Request to {0} timed out: {1}").format(self.url, exc))
_("Request to {0} timed out: {1}").format(self.url, exc)
)
except requests.exceptions.InvalidSchema as exc:
raise TransportError(
_("Invalid destination URL: {0}").format(exc))
raise TransportError(_("Invalid destination URL: {0}").format(exc))
except requests.exceptions.ConnectionError as exc:
raise TransportError(
_("Unable to connect to {0}: {1}").format(self.url, exc))
_("Unable to connect to {0}: {1}").format(self.url, exc)
)
if response is not None:
try:
# This will raise HTTPError for status != 20x
Expand All @@ -121,8 +121,10 @@ def send(self, data, config=None, session_state=None):

def _validate_secure_id(self, secure_id):
if not re.match(SECURE_ID_PATTERN, secure_id):
message = _((
"{} is not a valid secure_id. secure_id must be a "
"15-character (or more) alphanumeric string"
).format(secure_id))
message = _(
(
"{} is not a valid secure_id. secure_id must be a "
"15-character (or more) alphanumeric string"
).format(secure_id)
)
raise InvalidSecureIDError(message)
4 changes: 3 additions & 1 deletion checkbox-ng/checkbox_ng/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,9 @@ def load_configs(launcher_file=None, cfg=None):
- anything that /etc/xdg/A imports has the lowest possible
priority
"""
assert not (launcher_file and cfg), "config_filename in cfg will be ignored, FIXME"
assert not (
launcher_file and cfg
), "config_filename in cfg will be ignored, FIXME"
if not cfg:
cfg = Configuration()
if launcher_file:
Expand Down
1 change: 1 addition & 0 deletions checkbox-ng/checkbox_ng/launcher/checkbox_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ class Context:
def __init__(self, args, sa):
self.args = args
self.sa = sa

def reset_sa(self):
self.sa = SessionAssistant()

Expand Down
4 changes: 3 additions & 1 deletion checkbox-ng/checkbox_ng/launcher/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,9 @@ def should_start_via_launcher(self):
tp_forced = self.launcher.get_value("test plan", "forced")
chosen_tp = self.launcher.get_value("test plan", "unit")
if tp_forced and not chosen_tp:
raise SystemExit("The test plan selection was forced but no unit was provided") # split me into lines
raise SystemExit(
"The test plan selection was forced but no unit was provided"
) # split me into lines
return tp_forced

@contextlib.contextmanager
Expand Down
126 changes: 72 additions & 54 deletions checkbox-ng/checkbox_ng/launcher/merge_reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,88 +36,99 @@


#: Name-space prefix for Canonical Certification
CERTIFICATION_NS = 'com.canonical.certification::'
CERTIFICATION_NS = "com.canonical.certification::"


class MergeReports():
class MergeReports:
def register_arguments(self, parser):
parser.add_argument(
'submission', nargs='*', metavar='SUBMISSION',
help='submission tarball')
"submission",
nargs="*",
metavar="SUBMISSION",
help="submission tarball",
)
parser.add_argument(
'-o', '--output-file', metavar='FILE', required=True,
help='save combined test results to the specified FILE')
"-o",
"--output-file",
metavar="FILE",
required=True,
help="save combined test results to the specified FILE",
)

def _parse_submission(self, submission, tmpdir, mode="list"):
try:
with tarfile.open(submission) as tar:
tar.extractall(tmpdir.name)
with open(os.path.join(
tmpdir.name, 'submission.json')) as f:
with open(os.path.join(tmpdir.name, "submission.json")) as f:
data = json.load(f)
for result in data['results']:
result['plugin'] = 'shell' # Required so default to shell
result['summary'] = result['name']
for result in data["results"]:
result["plugin"] = "shell" # Required so default to shell
result["summary"] = result["name"]
# 'id' field in json file only contains partial id
result['id'] = result.get('full_id', result['id'])
if "::" not in result['id']:
result['id'] = CERTIFICATION_NS + result['id']
result["id"] = result.get("full_id", result["id"])
if "::" not in result["id"]:
result["id"] = CERTIFICATION_NS + result["id"]
if mode == "list":
self.job_list.append(JobDefinition(result))
elif mode == "dict":
self.job_dict[result['id']] = JobDefinition(result)
for result in data['resource-results']:
result['plugin'] = 'resource'
result['summary'] = result['name']
self.job_dict[result["id"]] = JobDefinition(result)
for result in data["resource-results"]:
result["plugin"] = "resource"
result["summary"] = result["name"]
# 'id' field in json file only contains partial id
result['id'] = result.get('full_id', result['id'])
if "::" not in result['id']:
result['id'] = CERTIFICATION_NS + result['id']
result["id"] = result.get("full_id", result["id"])
if "::" not in result["id"]:
result["id"] = CERTIFICATION_NS + result["id"]
if mode == "list":
self.job_list.append(JobDefinition(result))
elif mode == "dict":
self.job_dict[result['id']] = JobDefinition(result)
for result in data['attachment-results']:
result['plugin'] = 'attachment'
result['summary'] = result['name']
self.job_dict[result["id"]] = JobDefinition(result)
for result in data["attachment-results"]:
result["plugin"] = "attachment"
result["summary"] = result["name"]
# 'id' field in json file only contains partial id
result['id'] = result.get('full_id', result['id'])
if "::" not in result['id']:
result['id'] = CERTIFICATION_NS + result['id']
result["id"] = result.get("full_id", result["id"])
if "::" not in result["id"]:
result["id"] = CERTIFICATION_NS + result["id"]
if mode == "list":
self.job_list.append(JobDefinition(result))
elif mode == "dict":
self.job_dict[result['id']] = JobDefinition(result)
for cat_id, cat_name in data['category_map'].items():
self.job_dict[result["id"]] = JobDefinition(result)
for cat_id, cat_name in data["category_map"].items():
if mode == "list":
self.category_list.append(
CategoryUnit({'id': cat_id, 'name': cat_name}))
CategoryUnit({"id": cat_id, "name": cat_name})
)
elif mode == "dict":
self.category_dict[cat_id] = CategoryUnit(
{'id': cat_id, 'name': cat_name})
{"id": cat_id, "name": cat_name}
)
except OSError as e:
raise SystemExit(e)
except KeyError as e:
self._output_potential_action(str(e))
raise SystemExit(e)
return data['title']
return data["title"]

def _populate_session_state(self, job, state):
io_log = [
IOLogRecord(count, 'stdout', line.encode('utf-8'))
IOLogRecord(count, "stdout", line.encode("utf-8"))
for count, line in enumerate(
job.get_record_value('io_log').splitlines(
keepends=True))
job.get_record_value("io_log").splitlines(keepends=True)
)
]
result = MemoryJobResult({
'outcome': job.get_record_value('outcome',
job.get_record_value('status')),
'comments': job.get_record_value('comments'),
'execution_duration': job.get_record_value('duration'),
'io_log': io_log,
})
result = MemoryJobResult(
{
"outcome": job.get_record_value(
"outcome", job.get_record_value("status")
),
"comments": job.get_record_value("comments"),
"execution_duration": job.get_record_value("duration"),
"io_log": io_log,
}
)
state.update_job_result(job, result)
if job.plugin == 'resource':
if job.plugin == "resource":
new_resource_list = []
for record in gen_rfc822_records_from_io_log(job, result):
resource = Resource(record.data)
Expand All @@ -127,29 +138,34 @@ def _populate_session_state(self, job, state):
state.set_resource_list(job.id, new_resource_list)
job_state = state.job_state_map[job.id]
job_state.effective_category_id = job.get_record_value(
'category_id', 'com.canonical.plainbox::uncategorised')
"category_id", "com.canonical.plainbox::uncategorised"
)
job_state.effective_certification_status = job.get_record_value(
'certification_status', 'unspecified')
"certification_status", "unspecified"
)

def _create_exporter(self, exporter_id):
exporter_map = {}
exporter_units = get_exporters().unit_list
for unit in exporter_units:
if unit.Meta.name == 'exporter':
if unit.Meta.name == "exporter":
support = unit.support
if support:
exporter_map[unit.id] = support
exporter_support = exporter_map[exporter_id]
return exporter_support.exporter_cls(
[], exporter_unit=exporter_support)
[], exporter_unit=exporter_support
)

def _output_potential_action(self, message):
hint = ""
keys = ['resource', 'attachment']
keys = ["resource", "attachment"]
for key in keys:
if key in message:
hint = ("Make sure your input submission provides {}-related "
"information.".format(key))
hint = (
"Make sure your input submission provides {}-related "
"information.".format(key)
)
if hint:
print("Fail to merge. " + hint)
else:
Expand All @@ -163,13 +179,15 @@ def invoked(self, ctx):
self.category_list = []
session_title = self._parse_submission(submission, tmpdir)
manager = SessionManager.create_with_unit_list(
self.job_list + self.category_list)
self.job_list + self.category_list
)
manager.state.metadata.title = session_title
for job in self.job_list:
self._populate_session_state(job, manager.state)
manager_list.append(manager)
exporter = self._create_exporter(
'com.canonical.plainbox::html-multi-page')
with open(ctx.args.output_file, 'wb') as stream:
"com.canonical.plainbox::html-multi-page"
)
with open(ctx.args.output_file, "wb") as stream:
exporter.dump_from_session_manager_list(manager_list, stream)
print(ctx.args.output_file)
Loading

0 comments on commit 3789fdd

Please sign in to comment.