Skip to content

Commit

Permalink
wip from stash
Browse files Browse the repository at this point in the history
  • Loading branch information
touilleMan committed Dec 17, 2024
1 parent e562441 commit 6e00794
Show file tree
Hide file tree
Showing 4 changed files with 652 additions and 195 deletions.
123 changes: 123 additions & 0 deletions server/parsec/components/memory/realm.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,10 @@
RealmCreateStoreBadOutcome,
RealmCreateValidateBadOutcome,
RealmDumpRealmsGrantedRolesBadOutcome,
RealmExportCertificates,
RealmExportDoBaseInfo,
RealmExportDoBaseInfoBadOutcome,
RealmExportDoCertificatesBadOutcome,
RealmGetCurrentRealmsForUserBadOutcome,
RealmGetKeysBundleBadOutcome,
RealmGetStatsAsUserBadOutcome,
Expand Down Expand Up @@ -817,3 +821,122 @@ async def dump_realms_granted_roles(
)

return granted_roles

@override
async def export_do_base_info(
self, organization_id: OrganizationID, realm_id: VlobID
) -> RealmExportDoBaseInfo | RealmExportDoBaseInfoBadOutcome:
try:
org = self._data.organizations[organization_id]
except KeyError:
return RealmExportDoBaseInfoBadOutcome.ORGANIZATION_NOT_FOUND

if not org.is_bootstrapped:
return RealmExportDoBaseInfoBadOutcome.ORGANIZATION_NOT_FOUND

root_verify_key = org.root_verify_key
assert root_verify_key is not None

try:
realm = org.realms[realm_id]
except KeyError:
return RealmExportDoBaseInfoBadOutcome.REALM_NOT_FOUND

return RealmExportDoBaseInfo(
root_verify_key=root_verify_key,
)

@override
async def export_do_certificates(
self, organization_id: OrganizationID, realm_id: VlobID, snapshot_timestamp: DateTime
) -> RealmExportCertificates | RealmExportDoCertificatesBadOutcome:
try:
org = self._data.organizations[organization_id]
except KeyError:
return RealmExportDoCertificatesBadOutcome.ORGANIZATION_NOT_FOUND

try:
realm = org.realms[realm_id]
except KeyError:
return RealmExportDoCertificatesBadOutcome.REALM_NOT_FOUND

# 1) Common certificates (i.e. user/device/revoked/update)

# Certificates must be returned ordered by timestamp, however there is a trick
# for the common certificates: when a new user is created, the corresponding
# user and device certificates have the same timestamp, but we must return
# the user certificate first (given device references the user).
# So to achieve this we use a tuple (timestamp, priority, certificate) where
# only the first two field should be used for sorting (the priority field
# handling the case where user and device have the same timestamp).

common_certificates_unordered: list[tuple[DateTime, int, bytes]] = []
for user in org.users.values():
common_certificates_unordered.append((user.cooked.timestamp, 0, user.user_certificate))

if user.is_revoked:
assert user.cooked_revoked is not None
assert user.revoked_user_certificate is not None
common_certificates_unordered.append(
(user.cooked_revoked.timestamp, 1, user.revoked_user_certificate)
)

for update in user.profile_updates:
common_certificates_unordered.append(
(update.cooked.timestamp, 1, update.user_update_certificate)
)

for device in org.devices.values():
common_certificates_unordered.append(
(device.cooked.timestamp, 1, device.device_certificate)
)

common_certificates = [
c for ts, _, c in sorted(common_certificates_unordered) if ts <= snapshot_timestamp
]

# 2) Sequester certificates

sequester_certificates: list[bytes] = []
if org.sequester_authority_certificate is not None:
assert org.cooked_sequester_authority is not None
assert org.sequester_services is not None

sequester_certificates_unordered: list[tuple[DateTime, bytes]] = []
sequester_certificates_unordered.append(
(org.cooked_sequester_authority.timestamp, org.sequester_authority_certificate)
)
sequester_certificates_unordered += [
(service.cooked.timestamp, service.sequester_service_certificate)
for service in org.sequester_services.values()
]

sequester_certificates = [
c for ts, c in sorted(sequester_certificates_unordered) if ts <= snapshot_timestamp
]

# 3) Realm certificates

# Collect all the certificates related to the realm
realm_certificates_unordered: list[tuple[DateTime, bytes]] = []
realm_certificates_unordered += [
(role.cooked.timestamp, role.realm_role_certificate) for role in realm.roles
]
realm_certificates_unordered += [
(role.cooked.timestamp, role.realm_key_rotation_certificate)
for role in realm.key_rotations
]
realm_certificates_unordered += [
(role.cooked.timestamp, role.realm_name_certificate) for role in realm.renames
]
# TODO: support archiving here !

realm_certificates = [
c for ts, c in sorted(realm_certificates_unordered) if ts <= snapshot_timestamp
]

return RealmExportCertificates(
common_certificates=common_certificates,
sequester_certificates=sequester_certificates,
realm_certificates=realm_certificates,
)
103 changes: 72 additions & 31 deletions server/parsec/components/realm.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,59 +351,87 @@ class RealmDumpRealmsGrantedRolesBadOutcome(BadOutcomeEnum):
@dataclass(slots=True)
class RealmExportDoBaseInfo:
root_verify_key: VerifyKey
common_certificates: list[bytes]
realm_certificates: list[bytes]
vlob_upper_marker: RealmExportBatchOffsetMarker
vlobs_total: int
block_upper_marker: RealmExportBatchOffsetMarker
blocks_total: int

# Offset marker is basically the internal primary key of the vlob in the PostgreSQL
# database:
# - The primary key is a serial integer that is strictly growing (i.e. the older
# the row the lower the ID).
# - The table contains multiple realms, so the IDs are not growing continuously (also
# the serial type in PostgreSQL give no guarantee on avoiding hole when e.g. a
# transaction is rolled back).
#
# So the idea here is to use this primary key from PostgreSQL as primary in
# our SQLite export. This way we end up with the rows in the correct historical
# order, and also easily know if the export is complete (i.e. if the upper
# bound is part of the export).
vlob_offset_marker_upper_bound: int
block_offset_marker_upper_bound: int

vlob_items: int
blocks_items: int

class RealmExportDoBaseInfoBadOutcome(BadOutcome):

class RealmExportDoBaseInfoBadOutcome(BadOutcomeEnum):
ORGANIZATION_NOT_FOUND = auto()
REALM_NOT_FOUND = auto()


@dataclass(slots=True)
class RealmExportCertificates:
# List of (<DB primary key>, <realm_role_certificate>)
realm_role_certificates: list[tuple[int, bytes]]

# List of (<DB primary key>, <user_certificate>, <revoked_user_certificate>)
user_certificates: list[tuple[int, bytes, bytes | None]]

# List of (<DB primary key>, <user_update_certificate>)
user_update_certificates: list[tuple[int, bytes]]

# List of (<DB primary key>, <device_certificate>)
device_certificates: list[tuple[int, bytes]]
common_certificates: list[bytes]
sequester_certificates: list[bytes]
realm_certificates: list[bytes]


class RealmExportDoCertificatesBadOutcome(BadOutcome):
class RealmExportDoCertificatesBadOutcome(BadOutcomeEnum):
ORGANIZATION_NOT_FOUND = auto()
REALM_NOT_FOUND = auto()


@dataclass(slots=True)
class RealmExportVlobsBatchItem:
realm_vlob_update_index: int
vlob_id: VlobID
version: int
key_index: int
blob: bytes
size: int
author: DeviceID
timestamp: DateTime


@dataclass(slots=True)
class RealmExportVlobsBatch:
batch_offset_marker: RealmExportBatchOffsetMarker
# List of (<DB primary key>, <vlob_id>, <version>, <blob>, <author's DB primary key>, <timestamp>)
vlobs: list[tuple[int, VlobID, int, bytes, int, DateTime]]
items: list[RealmExportVlobsBatchItem]


class RealmExportDoVlobsBatchBadOutcome(BadOutcome):
class RealmExportDoVlobsBatchBadOutcome(BadOutcomeEnum):
ORGANIZATION_NOT_FOUND = auto()
REALM_NOT_FOUND = auto()


@dataclass(slots=True)
class RealmExportBlocksBatch:
class RealmExportBlocksBatchItem:
id: int
block_id: BlockID
author: DeviceID
key_index: int
size: int


@dataclass(slots=True)
class RealmExportBlocksMetadataBatch:
batch_offset_marker: RealmExportBatchOffsetMarker
# List of (<DB primary key>, <block_id>, <block>, <author's DB primary key>)
blocks: list[tuple[int, BlockID, bytes, int]]
items: list[RealmExportBlocksBatchItem]


class RealmExportDoBlocksBatchMetadatBadOutcome(BadOutcomeEnum):

Check warning on line 429 in server/parsec/components/realm.py

View workflow job for this annotation

GitHub Actions / spelling / cspell

Unknown word (Metadat)
ORGANIZATION_NOT_FOUND = auto()
REALM_NOT_FOUND = auto()


class RealmExportDoBlocksBatchBadOutcome(BadOutcome):
class RealmExportDoBlocksDataBadOutcome(BadOutcomeEnum):
ORGANIZATION_NOT_FOUND = auto()
REALM_NOT_FOUND = auto()

Expand Down Expand Up @@ -544,18 +572,31 @@ async def export_do_base_info(
raise NotImplementedError

async def export_do_certificates(
self, organization_id: OrganizationID, realm_id: VlobID
self, organization_id: OrganizationID, realm_id: VlobID, snapshot_timestamp: DateTime
) -> RealmExportCertificates | RealmExportDoCertificatesBadOutcome:
raise NotImplementedError

async def export_do_vlobs_batch(
self, batch_offset_marker: RealmExportBatchOffsetMarker, batch_size: int = 1000
self,
organization_id: OrganizationID,
realm_id: VlobID,
batch_offset_marker: RealmExportBatchOffsetMarker,
batch_size: int,
) -> RealmExportVlobsBatch | RealmExportDoVlobsBatchBadOutcome:
raise NotImplementedError

async def export_do_blocks_batch(
self, batch_offset_marker: RealmExportBatchOffsetMarker, batch_size: int = 1000
) -> RealmExportBlocksBatch | RealmExportDoBlocksBatchBadOutcome:
async def export_do_blocks_metadata_batch(
self,
organization_id: OrganizationID,
realm_id: VlobID,
batch_offset_marker: RealmExportBatchOffsetMarker,
batch_size: int = 1000,
) -> RealmExportBlocksMetadataBatch | RealmExportDoBlocksBatchMetadatBadOutcome:

Check warning on line 594 in server/parsec/components/realm.py

View workflow job for this annotation

GitHub Actions / spelling / cspell

Unknown word (Metadat)
raise NotImplementedError

async def export_do_blocks_data(
self, organization_id: OrganizationID, realm_id: VlobID, block_id: BlockID
) -> bytes | RealmExportDoBlocksDataBadOutcome:
raise NotImplementedError

#
Expand Down
Loading

0 comments on commit 6e00794

Please sign in to comment.