Skip to content

Commit

Permalink
Merge branch 'main' into fix-pipewire-comparision-bug
Browse files Browse the repository at this point in the history
  • Loading branch information
fernando79513 authored Apr 16, 2024
2 parents 5ee3f2f + 5b342a6 commit 59d0798
Show file tree
Hide file tree
Showing 497 changed files with 32,075 additions and 20,036 deletions.
2 changes: 2 additions & 0 deletions .git-blame-ignore-revs
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Black formatting
3789fdd9225f5bf7f871af1e69ae11cca90cd555
17 changes: 17 additions & 0 deletions .github/workflows/black.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: Black formatting (79 chars lines)

on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
workflow_dispatch:

jobs:
black:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: psf/black@stable
with:
options: "--check --diff --line-length 79 --extend-exclude '/vendor/'"
36 changes: 36 additions & 0 deletions .github/workflows/checkbox-ce-oem-daily-build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
name: checkbox-ce-oem daily builds

on:
schedule:
- cron: '00 21 * * *'
workflow_dispatch:

jobs:
check_for_commits:
runs-on: [self-hosted, linux, jammy, large]
name: Check for commits
outputs:
new_commit_count: ${{ steps.commit_check.outputs.new_commit_count }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Check for commits
id: commit_check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# get the last daily build that passed by
# 1. getting all workflows that are daily builds (path == ...)
# 2. removing those that did not result in success
# 3. selecting the date/time they started at (format: %Y-%m-%dT%H:%M:%sZ)
# 4. removing the Z postfix from the date to make it usable by git directly
export LAST_PASS_DAILY_BUILD=`gh api repos/canonical/checkbox/actions/runs --paginate --jq '.workflow_runs[] | select(.path == ".github/workflows/checkbox-ce-oem-daily-build.yml") | select(.conclusion == "success") | .run_started_at | .[:-1]' | head -1`
commit_count=$(git log --since="$LAST_PASS_DAILY_BUILD" --oneline -- contrib/checkbox-ce-oem | wc -l)
echo "new_commit_count=$commit_count" | tee $GITHUB_OUTPUT
checkbox-ce-oem-snap-daily:
needs: check_for_commits
if: ${{ needs.check_for_commits.outputs.new_commit_count > 0 || github.event_name == 'workflow_dispatch' }}
uses: ./.github/workflows/checkbox-ce-oem-edge-builds.yml
secrets: inherit
5 changes: 0 additions & 5 deletions .github/workflows/checkbox-ce-oem-edge-builds.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
name: checkbox-ce-oem snap edge build

on:
push:
branches:
- main
paths:
- 'contrib/checkbox-ce-oem/**'
workflow_dispatch:
workflow_call:

Expand Down
7 changes: 7 additions & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,11 @@ its providers and its documentation.

## General recommendations

- The codebase uses [black] for formatting, with `line-length` set to `79`.
- Install [black] inside your virtual environment using pip instead of using
Debian packages from apt, as the versions in pip are more up-to-date.
- Use `git config blame.ignoreRevsFile .git-blame-ignore-revs` if you want to
[ignore commits related to black formatting].
- Setup your editor of choice to run [autopep8] on save. This helps keep
everything passing [flake8].
- The code doesn’t have to be pylint-clean, but
Expand Down Expand Up @@ -385,6 +390,8 @@ review of the source files.
Once all is good, you can submit your documentation change like any other
changes using a pull request.

[black]: https://black.readthedocs.io/
[ignore commits related to black formatting]: https://black.readthedocs.io/en/stable/guides/introducing_black_to_your_project.html#avoiding-ruining-git-blame
[autopep8]: https://pypi.org/project/autopep8/
[flake8]: https://flake8.pycqa.org/en/latest/
[pylint]: https://www.pylint.org/
Expand Down
5 changes: 3 additions & 2 deletions checkbox-ng/checkbox_ng/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,5 +33,6 @@
__version__ = version(__package__)
except PackageNotFoundError:
import logging
logging.error('Failed to retrieve checkbox-ng version')
__version__ = 'unknown'

logging.error("Failed to retrieve checkbox-ng version")
__version__ = "unknown"
26 changes: 14 additions & 12 deletions checkbox-ng/checkbox_ng/certification.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class SubmissionServiceTransport(TransportBase):
- Payload can be in:
* LZMA compressed tarball that includes a submission.json and results
from checkbox.
"""
"""

def __init__(self, where, options):
"""
Expand All @@ -57,7 +57,7 @@ def __init__(self, where, options):
a 15-character (or longer) alphanumeric ID for the system.
"""
super().__init__(where, options)
self._secure_id = self.options.get('secure_id')
self._secure_id = self.options.get("secure_id")
if self._secure_id is not None:
self._validate_secure_id(self._secure_id)

Expand Down Expand Up @@ -91,19 +91,19 @@ def send(self, data, config=None, session_state=None):
if secure_id is None:
raise InvalidSecureIDError(_("Secure ID not specified"))
self._validate_secure_id(secure_id)
logger.debug(
_("Sending to %s, Secure ID is %s"), self.url, secure_id)
logger.debug(_("Sending to %s, Secure ID is %s"), self.url, secure_id)
try:
response = requests.post(self.url, data=data)
except requests.exceptions.Timeout as exc:
raise TransportError(
_("Request to {0} timed out: {1}").format(self.url, exc))
_("Request to {0} timed out: {1}").format(self.url, exc)
)
except requests.exceptions.InvalidSchema as exc:
raise TransportError(
_("Invalid destination URL: {0}").format(exc))
raise TransportError(_("Invalid destination URL: {0}").format(exc))
except requests.exceptions.ConnectionError as exc:
raise TransportError(
_("Unable to connect to {0}: {1}").format(self.url, exc))
_("Unable to connect to {0}: {1}").format(self.url, exc)
)
if response is not None:
try:
# This will raise HTTPError for status != 20x
Expand All @@ -121,8 +121,10 @@ def send(self, data, config=None, session_state=None):

def _validate_secure_id(self, secure_id):
if not re.match(SECURE_ID_PATTERN, secure_id):
message = _((
"{} is not a valid secure_id. secure_id must be a "
"15-character (or more) alphanumeric string"
).format(secure_id))
message = _(
(
"{} is not a valid secure_id. secure_id must be a "
"15-character (or more) alphanumeric string"
).format(secure_id)
)
raise InvalidSecureIDError(message)
4 changes: 3 additions & 1 deletion checkbox-ng/checkbox_ng/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,9 @@ def load_configs(launcher_file=None, cfg=None):
- anything that /etc/xdg/A imports has the lowest possible
priority
"""
assert not (launcher_file and cfg), "config_filename in cfg will be ignored, FIXME"
assert not (
launcher_file and cfg
), "config_filename in cfg will be ignored, FIXME"
if not cfg:
cfg = Configuration()
if launcher_file:
Expand Down
1 change: 1 addition & 0 deletions checkbox-ng/checkbox_ng/launcher/checkbox_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ class Context:
def __init__(self, args, sa):
self.args = args
self.sa = sa

def reset_sa(self):
self.sa = SessionAssistant()

Expand Down
4 changes: 3 additions & 1 deletion checkbox-ng/checkbox_ng/launcher/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,9 @@ def should_start_via_launcher(self):
tp_forced = self.launcher.get_value("test plan", "forced")
chosen_tp = self.launcher.get_value("test plan", "unit")
if tp_forced and not chosen_tp:
raise SystemExit("The test plan selection was forced but no unit was provided") # split me into lines
raise SystemExit(
"The test plan selection was forced but no unit was provided"
) # split me into lines
return tp_forced

@contextlib.contextmanager
Expand Down
126 changes: 72 additions & 54 deletions checkbox-ng/checkbox_ng/launcher/merge_reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,88 +36,99 @@


#: Name-space prefix for Canonical Certification
CERTIFICATION_NS = 'com.canonical.certification::'
CERTIFICATION_NS = "com.canonical.certification::"


class MergeReports():
class MergeReports:
def register_arguments(self, parser):
parser.add_argument(
'submission', nargs='*', metavar='SUBMISSION',
help='submission tarball')
"submission",
nargs="*",
metavar="SUBMISSION",
help="submission tarball",
)
parser.add_argument(
'-o', '--output-file', metavar='FILE', required=True,
help='save combined test results to the specified FILE')
"-o",
"--output-file",
metavar="FILE",
required=True,
help="save combined test results to the specified FILE",
)

def _parse_submission(self, submission, tmpdir, mode="list"):
try:
with tarfile.open(submission) as tar:
tar.extractall(tmpdir.name)
with open(os.path.join(
tmpdir.name, 'submission.json')) as f:
with open(os.path.join(tmpdir.name, "submission.json")) as f:
data = json.load(f)
for result in data['results']:
result['plugin'] = 'shell' # Required so default to shell
result['summary'] = result['name']
for result in data["results"]:
result["plugin"] = "shell" # Required so default to shell
result["summary"] = result["name"]
# 'id' field in json file only contains partial id
result['id'] = result.get('full_id', result['id'])
if "::" not in result['id']:
result['id'] = CERTIFICATION_NS + result['id']
result["id"] = result.get("full_id", result["id"])
if "::" not in result["id"]:
result["id"] = CERTIFICATION_NS + result["id"]
if mode == "list":
self.job_list.append(JobDefinition(result))
elif mode == "dict":
self.job_dict[result['id']] = JobDefinition(result)
for result in data['resource-results']:
result['plugin'] = 'resource'
result['summary'] = result['name']
self.job_dict[result["id"]] = JobDefinition(result)
for result in data["resource-results"]:
result["plugin"] = "resource"
result["summary"] = result["name"]
# 'id' field in json file only contains partial id
result['id'] = result.get('full_id', result['id'])
if "::" not in result['id']:
result['id'] = CERTIFICATION_NS + result['id']
result["id"] = result.get("full_id", result["id"])
if "::" not in result["id"]:
result["id"] = CERTIFICATION_NS + result["id"]
if mode == "list":
self.job_list.append(JobDefinition(result))
elif mode == "dict":
self.job_dict[result['id']] = JobDefinition(result)
for result in data['attachment-results']:
result['plugin'] = 'attachment'
result['summary'] = result['name']
self.job_dict[result["id"]] = JobDefinition(result)
for result in data["attachment-results"]:
result["plugin"] = "attachment"
result["summary"] = result["name"]
# 'id' field in json file only contains partial id
result['id'] = result.get('full_id', result['id'])
if "::" not in result['id']:
result['id'] = CERTIFICATION_NS + result['id']
result["id"] = result.get("full_id", result["id"])
if "::" not in result["id"]:
result["id"] = CERTIFICATION_NS + result["id"]
if mode == "list":
self.job_list.append(JobDefinition(result))
elif mode == "dict":
self.job_dict[result['id']] = JobDefinition(result)
for cat_id, cat_name in data['category_map'].items():
self.job_dict[result["id"]] = JobDefinition(result)
for cat_id, cat_name in data["category_map"].items():
if mode == "list":
self.category_list.append(
CategoryUnit({'id': cat_id, 'name': cat_name}))
CategoryUnit({"id": cat_id, "name": cat_name})
)
elif mode == "dict":
self.category_dict[cat_id] = CategoryUnit(
{'id': cat_id, 'name': cat_name})
{"id": cat_id, "name": cat_name}
)
except OSError as e:
raise SystemExit(e)
except KeyError as e:
self._output_potential_action(str(e))
raise SystemExit(e)
return data['title']
return data["title"]

def _populate_session_state(self, job, state):
io_log = [
IOLogRecord(count, 'stdout', line.encode('utf-8'))
IOLogRecord(count, "stdout", line.encode("utf-8"))
for count, line in enumerate(
job.get_record_value('io_log').splitlines(
keepends=True))
job.get_record_value("io_log").splitlines(keepends=True)
)
]
result = MemoryJobResult({
'outcome': job.get_record_value('outcome',
job.get_record_value('status')),
'comments': job.get_record_value('comments'),
'execution_duration': job.get_record_value('duration'),
'io_log': io_log,
})
result = MemoryJobResult(
{
"outcome": job.get_record_value(
"outcome", job.get_record_value("status")
),
"comments": job.get_record_value("comments"),
"execution_duration": job.get_record_value("duration"),
"io_log": io_log,
}
)
state.update_job_result(job, result)
if job.plugin == 'resource':
if job.plugin == "resource":
new_resource_list = []
for record in gen_rfc822_records_from_io_log(job, result):
resource = Resource(record.data)
Expand All @@ -127,29 +138,34 @@ def _populate_session_state(self, job, state):
state.set_resource_list(job.id, new_resource_list)
job_state = state.job_state_map[job.id]
job_state.effective_category_id = job.get_record_value(
'category_id', 'com.canonical.plainbox::uncategorised')
"category_id", "com.canonical.plainbox::uncategorised"
)
job_state.effective_certification_status = job.get_record_value(
'certification_status', 'unspecified')
"certification_status", "unspecified"
)

def _create_exporter(self, exporter_id):
exporter_map = {}
exporter_units = get_exporters().unit_list
for unit in exporter_units:
if unit.Meta.name == 'exporter':
if unit.Meta.name == "exporter":
support = unit.support
if support:
exporter_map[unit.id] = support
exporter_support = exporter_map[exporter_id]
return exporter_support.exporter_cls(
[], exporter_unit=exporter_support)
[], exporter_unit=exporter_support
)

def _output_potential_action(self, message):
hint = ""
keys = ['resource', 'attachment']
keys = ["resource", "attachment"]
for key in keys:
if key in message:
hint = ("Make sure your input submission provides {}-related "
"information.".format(key))
hint = (
"Make sure your input submission provides {}-related "
"information.".format(key)
)
if hint:
print("Fail to merge. " + hint)
else:
Expand All @@ -163,13 +179,15 @@ def invoked(self, ctx):
self.category_list = []
session_title = self._parse_submission(submission, tmpdir)
manager = SessionManager.create_with_unit_list(
self.job_list + self.category_list)
self.job_list + self.category_list
)
manager.state.metadata.title = session_title
for job in self.job_list:
self._populate_session_state(job, manager.state)
manager_list.append(manager)
exporter = self._create_exporter(
'com.canonical.plainbox::html-multi-page')
with open(ctx.args.output_file, 'wb') as stream:
"com.canonical.plainbox::html-multi-page"
)
with open(ctx.args.output_file, "wb") as stream:
exporter.dump_from_session_manager_list(manager_list, stream)
print(ctx.args.output_file)
Loading

0 comments on commit 59d0798

Please sign in to comment.