Skip to content

Commit

Permalink
Replace dd call by internal write call
Browse files Browse the repository at this point in the history
We would create a file with 0 padding to force it to a certain size.
This seems fairly ugly, in particular when we later want to use the
blob in other contexts. Also, this embedded the assumption that padding
to 4096 is always appropriate in the function that generates the signature.
Let's just calculate the right amount when writing to the target partition.

Incidentally, this seems to fix the issue where we'd the backup LBA
header inserted over the output partition, as discussed in
systemd#798 (comment).
But I don't entirely understand the issue, so maybe this change is only
hiding the bug.
  • Loading branch information
keszybz committed Oct 8, 2021
1 parent 7d9fe45 commit b82d76c
Showing 1 changed file with 23 additions and 18 deletions.
41 changes: 23 additions & 18 deletions mkosi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,6 @@
nspawn_params_for_blockdev_access,
patch_file,
path_relative_to_cwd,
roundup,
run,
run_workspace_command,
should_compress_fs,
Expand All @@ -91,7 +90,7 @@
workspace,
write_grub_config,
)
from .fdutil import copy_fd, copy_file_object
from .fdutil import blkgetsize64, copy_fd, copy_file_object
from .manifest import Manifest

complete_step = MkosiPrinter.complete_step
Expand Down Expand Up @@ -3589,6 +3588,8 @@ def insert_partition(

assert args.partition_table is not None

blob.seek(0)

luks_extra = 16 * 1024 * 1024 if args.encrypt == "all" else 0
blob_size = os.stat(blob.name).st_size
part = args.partition_table.add(ident, blob_size + luks_extra, type_uuid, description, part_uuid)
Expand All @@ -3613,7 +3614,17 @@ def insert_partition(

with cm as dev:
path = dev if dev is not None else part.blockdev(loopdev)
run(["dd", f"if={blob.name}", f"of={path}", "conv=nocreat,sparse"])

with open(path, "wb") as out:
# We need to check the actual size here, to take into
# account space added for padding and removed for the
# luks headers.
out_size = blkgetsize64(out.fileno())
assert out_size >= blob_size

shutil.copyfileobj(blob, out)
padding = b'\0' * (out_size - blob_size)
out.write(padding)

return part

Expand Down Expand Up @@ -3740,34 +3751,28 @@ def make_verity_sig(
encoding=serialization.Encoding.DER
)

# We base64 the DER result, because we want to include it in
# JSON. This is not PEM (i.e. not header/footer line, no line
# breaks), but just base64 encapsulated DER).
# We base64 the DER result, because we want to include it in JSON. This is not PEM
# (i.e. no header/footer line, no line breaks), but just base64 encapsulated DER).
b64encoded = base64.b64encode(sigbytes).decode("ascii")

print(b64encoded)

# This is supposed to be extensible, but care should be taken
# not to include unprotected data here.
# This is supposed to be extensible, but care should be taken not to include unprotected
# data here.
j = json.dumps({
"rootHash": root_hash,
"certificateFingerprint": fingerprint,
"signature": b64encoded
}).encode("utf-8")

# Pad to next multiple of 4K with NUL bytes
padded = j + b"\0" * (roundup(len(j), 4096) - len(j))

f: BinaryIO = cast(BinaryIO, tempfile.NamedTemporaryFile(mode="w+b", dir=args.output.parent, prefix=".mkosi-"))
f.write(padded)
f.write(j)
f.flush()

# Returns a file with zero-padded JSON data to insert as
# signature partition as first element, and the DER PKCS7
# signature bytes as second argument (to store as detached
# PKCS7 file), and finally the SHA256 fingerprint of the
# certificate used (which is used to deterministically
# generate the partition UUID for the signature partition).
# Returns a file with JSON data to insert as signature partition as the first element, and
# the DER PKCS7 signature bytes as second argument (to store as a detached PKCS7 file), and
# finally the SHA256 fingerprint of the certificate used (which is used to
# deterministically generate the partition UUID for the signature partition).

return f, sigbytes, fingerprint

Expand Down

0 comments on commit b82d76c

Please sign in to comment.