Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change default keytype to RSA #154

Merged
merged 5 commits into from
Aug 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 27 additions & 5 deletions tuf_conformance/repository_simulator.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,12 @@

SPEC_VER = ".".join(SPECIFICATION_VERSION)

# Generate some signers once (to avoid all tests generating them)
NUM_SIGNERS = 8
RSA_PKCS_SIGNERS = [
CryptoSigner.generate_rsa(scheme="rsa-pkcs1v15-sha256") for _ in range(NUM_SIGNERS)
]


@dataclass
class Artifact:
Expand Down Expand Up @@ -104,6 +110,8 @@ def __init__(self, dump_dir: str | None) -> None:
now = datetime.datetime.utcnow()
self.safe_expiry = now.replace(microsecond=0) + datetime.timedelta(days=30)

self._rsa_pkcs_signers = RSA_PKCS_SIGNERS.copy()

# initialize a basic repository structure
self._initialize()

Expand Down Expand Up @@ -140,6 +148,20 @@ def all_targets(self) -> Iterator[tuple[str, Targets]]:
if role not in [Root.type, Timestamp.type, Snapshot.type]:
yield role, md.signed

def new_signer(
self, keytype: str = "rsa", scheme: str = "rsa-pkcs1v15-sha256"
) -> CryptoSigner:
"""Return a Signer (from a set of pre-generated signers)."""
try:
if keytype == "rsa" and scheme == "rsa-pkcs1v15-sha256":
return self._rsa_pkcs_signers.pop()
except IndexError:
raise RuntimeError(
f"Test ran out of {keytype}/{scheme} keys (NUM_SIGNERS = {NUM_SIGNERS})"
)

raise ValueError("{keytype}/{scheme} not supported yet")

def add_signer(self, role: str, signer: CryptoSigner) -> None:
if role not in self.signers:
self.signers[role] = {}
Expand All @@ -151,7 +173,7 @@ def rotate_keys(self, role: str) -> None:
self.root.roles[role].keyids.clear()
self.signers[role].clear()
for _ in range(0, self.root.roles[role].threshold):
signer = CryptoSigner.generate_ecdsa()
signer = self.new_signer()
self.root.add_key(signer.public_key, role)
self.add_signer(role, signer)

Expand All @@ -164,7 +186,7 @@ def _initialize(self) -> None:
self.mds[Root.type] = MetadataTest(RootTest(expires=self.safe_expiry))

for role in TOP_LEVEL_ROLE_NAMES:
signer = CryptoSigner.generate_ecdsa()
signer = self.new_signer()
self.root.add_key(signer.public_key, role)
self.add_signer(role, signer)

Expand Down Expand Up @@ -338,7 +360,7 @@ def add_delegation(
delegator.delegations.roles[role.name] = role

# By default add one new key for the role
signer = CryptoSigner.generate_ecdsa()
signer = self.new_signer()
delegator.add_key(signer.public_key, role.name)
self.add_signer(role.name, signer)

Expand All @@ -364,7 +386,7 @@ def add_succinct_roles(
):
raise ValueError("Can't add a succinct_roles when delegated roles are used")

signer = CryptoSigner.generate_ecdsa()
signer = self.new_signer()
succinct_roles = SuccinctRoles([], 1, bit_length, name_prefix)
delegator.delegations = Delegations({}, None, succinct_roles)

Expand Down Expand Up @@ -404,7 +426,7 @@ def debug_dump(self) -> None:

def add_key(self, role: str, delegator_name: str = Root.type) -> None:
"""add new public key to delegating metadata and store the signer for role"""
signer = CryptoSigner.generate_ecdsa()
signer = self.new_signer()

# Add key to delegating metadata
delegator = self.mds[delegator_name].signed
Expand Down
27 changes: 13 additions & 14 deletions tuf_conformance/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ def test_basic_init_and_refresh(client: ClientRunner, server: SimulatorServer) -
Run a refresh, verify client trusted metadata and requests made by the client
"""
init_data, repo = server.new_test(client.test_name)

# Run the test: step 1: initialize client
assert client.init_client(init_data) == 0

Expand Down Expand Up @@ -166,29 +165,29 @@ def test_timestamp_content_changes(
assert client.version(Timestamp.type) == initial_timestamp_meta_ver


def test_new_targets_hash_mismatch(
def test_basic_metadata_hash_support(
client: ClientRunner, server: SimulatorServer
) -> None:
# Check against snapshot role's targets hashes
"""Verify that clients supports hashes for metadata"""
init_data, repo = server.new_test(client.test_name)

assert client.init_client(init_data) == 0
client.refresh(init_data)

# Construct repository with hashes in timestamp/snapshot
repo.compute_metafile_hashes_length = True
repo.update_snapshot()
repo.update_snapshot() # v2

client.refresh(init_data)
assert client.init_client(init_data) == 0
# Verify client accepts correct hashes
assert client.refresh(init_data) == 0

# Modify targets contents without updating
# snapshot's targets hashes
repo.targets.version += 1
# Modify targets metadata, leave hashes in snapshot to wrong values
repo.targets.version += 1 # v2
repo.snapshot.meta["targets.json"].version = repo.targets.version
repo.snapshot.version += 1
repo.snapshot.version += 1 # v3
repo.update_timestamp()

client.refresh(init_data)
assert client.version(Snapshot.type) == 1
# Verify client refuses targets that does not match hashes
assert client.refresh(init_data) == 1
assert client.version(Snapshot.type) == 3
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

shouldn't this be v2, since the refresh fails due to the snapshot hash mismatch?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The mismatch is in targets hash (that is stored in snapshot) -- the docstring could make this clearer:

  • snapshot v3 is a valid snapshot (with meta that contains targets v2).
  • Updating to targets v2 fails because it does not match the hash/length in snapshot meta.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ah i think this is actually surfacing a bug in my implementation!

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

assert client.version(Targets.type) == 1


Expand Down
6 changes: 2 additions & 4 deletions tuf_conformance/test_keys.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from securesystemslib.signer import CryptoSigner
from tuf.api.metadata import Root, Snapshot

from tuf_conformance.client_runner import ClientRunner
Expand Down Expand Up @@ -62,8 +61,7 @@ def test_root_has_keys_but_not_snapshot(

# Add two keyids only to root and expect the client
# to fail updating
signer = CryptoSigner.generate_ecdsa()

signer = repo.new_signer()
repo.root.roles[Snapshot.type].keyids.append(signer.public_key.keyid)

# Sanity check
Expand Down Expand Up @@ -145,7 +143,7 @@ def test_duplicate_keys_root(client: ClientRunner, server: SimulatorServer) -> N

assert client.init_client(init_data) == 0

signer = CryptoSigner.generate_ecdsa()
signer = repo.new_signer()

# Add one key 9 times to root
for n in range(0, 9):
Expand Down
48 changes: 28 additions & 20 deletions tuf_conformance/test_rollback.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import pytest
from tuf.api.metadata import Root, Snapshot, Targets, Timestamp

from tuf_conformance.client_runner import ClientRunner
from tuf_conformance.simulator_server import SimulatorServer


def test_new_snapshot_version_rollback(
def test_simple_snapshot_rollback(
client: ClientRunner, server: SimulatorServer
) -> None:
"""Test a simple snapshot version rollback attack.
Expand Down Expand Up @@ -62,8 +63,9 @@ def test_new_timestamp_version_rollback(
assert repo.metadata_statistics[-1] == (Timestamp.type, None)


def test_new_timestamp_snapshot_rollback(
client: ClientRunner, server: SimulatorServer
@pytest.mark.parametrize("use_hashes", [False, True], ids=["basic", "with hashes"])
def test_snapshot_rollback(
client: ClientRunner, server: SimulatorServer, use_hashes: bool
) -> None:
"""Test a complete snapshot version rollback attack.

Expand All @@ -72,6 +74,11 @@ def test_new_timestamp_snapshot_rollback(
Expect client to refuse the update.
"""
init_data, repo = server.new_test(client.test_name)
# When hashes are used and timestamp update happens:
# 1) snapshot hash is is computed
# 2) hash is stored in timestamp.snapshot_meta
# The local snapshot must be used in rollback check even when hashes are used
repo.compute_metafile_hashes_length = use_hashes

assert client.init_client(init_data) == 0

Expand Down Expand Up @@ -211,34 +218,35 @@ def test_new_timestamp_fast_forward_recovery(
assert client.version(Timestamp.type) == 1


def test_snapshot_rollback_with_local_snapshot_hash_mismatch(
client: ClientRunner, server: SimulatorServer
@pytest.mark.parametrize("use_hashes", [False, True], ids=["basic", "with hashes"])
def test_targets_rollback(
client: ClientRunner, server: SimulatorServer, use_hashes: bool
) -> None:
# Test triggering snapshot rollback check on a newly downloaded snapshot
# when the local snapshot is loaded even when there is a hash mismatch
# with timestamp.snapshot_meta.
"""Test targets rollback

the targets version info in local snapshot.meta should get used in a rollback check.
"""
init_data, repo = server.new_test(client.test_name)

# When hashes are used and timestamp update happens:
# 1) snapshot hash is is computed
# 2) hash is stored in timestamp.snapshot_meta
# The local snapshot must be used in rollback check even when hashes are used
repo.compute_metafile_hashes_length = use_hashes

assert client.init_client(init_data) == 0

# Initialize all metadata and assign targets
# version higher than 1.
repo.targets.version = 2
repo.update_snapshot()
repo.update_snapshot() # v2
assert client.refresh(init_data) == 0
assert client.version(Targets.type) == 2

# By raising this flag on timestamp update the simulator would:
# 1) compute the hash of the new modified version of snapshot
# 2) assign the hash to timestamp.snapshot_meta
# The purpose is to create a hash mismatch between timestamp.meta and
# the local snapshot, but to have hash match between timestamp.meta and
# the next snapshot version.
repo.compute_metafile_hashes_length = True

# The new targets must have a lower version than the local trusted one.
repo.targets.version = 1
repo.update_snapshot()
repo.update_snapshot() # v3

# Client refresh should fail because there is a hash mismatch.
# Client refresh should fail because of targets rollback
assert client.refresh(init_data) == 1
assert client.version(Snapshot.type) == 2
assert client.version(Targets.type) == 2
Loading