Skip to content

Commit

Permalink
Automatically create array types for all scalars (#7970)
Browse files Browse the repository at this point in the history
This is mostly for the benefit of introspection so that we always have
a corresponding schema type for every backend type (identified by
`backend_id`).
  • Loading branch information
elprans authored Nov 8, 2024
1 parent a6b4afc commit 1bc18b0
Show file tree
Hide file tree
Showing 12 changed files with 211 additions and 89 deletions.
2 changes: 1 addition & 1 deletion edb/buildmeta.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
# The merge conflict there is a nice reminder that you probably need
# to write a patch in edb/pgsql/patches.py, and then you should preserve
# the old value.
EDGEDB_CATALOG_VERSION = 2024_11_08_00_00
EDGEDB_CATALOG_VERSION = 2024_11_08_01_00
EDGEDB_MAJOR_VERSION = 6


Expand Down
9 changes: 7 additions & 2 deletions edb/pgsql/metaschema.py
Original file line number Diff line number Diff line change
Expand Up @@ -4456,8 +4456,13 @@ class GetPgTypeForEdgeDBTypeFunction2(trampoline.VersionedFunction):
'invalid_parameter_value',
msg => (
format(
'cannot determine OID of Gel type %L',
"typeid"::text
'cannot determine Postgres OID of Gel %s(%L)%s',
"kind",
"typeid"::text,
(case when "elemid" is not null
then ' with element type ' || "elemid"::text
else ''
end)
)
)
)
Expand Down
17 changes: 16 additions & 1 deletion edb/schema/delta.py
Original file line number Diff line number Diff line change
Expand Up @@ -3758,7 +3758,7 @@ def _delete_finalize(
orig_schema = ctx.original_schema
if refs:
for ref in refs:
if (not context.is_deleting(ref)
if (not self._is_deleting_ref(schema, context, ref)
and ref.is_blocking_ref(orig_schema, self.scls)):
ref_strs.append(
ref.get_verbosename(orig_schema, with_parent=True))
Expand All @@ -3781,6 +3781,21 @@ def _delete_finalize(

return schema

def _is_deleting_ref(
self,
schema: s_schema.Schema,
context: CommandContext,
ref: so.Object,
) -> bool:
if context.is_deleting(ref):
return True

for op in self.get_prerequisites():
if isinstance(op, DeleteObject) and op.scls == ref:
return True

return False

def _has_outside_references(
self,
schema: s_schema.Schema,
Expand Down
2 changes: 1 addition & 1 deletion edb/schema/objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -3222,7 +3222,7 @@ def get_explicit_local_field_value(
def allow_ref_propagation(
self,
schema: s_schema.Schema,
constext: sd.CommandContext,
context: sd.CommandContext,
refdict: RefDict,
) -> bool:
return True
Expand Down
6 changes: 2 additions & 4 deletions edb/schema/objtypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ def _issubclass(
def allow_ref_propagation(
self,
schema: s_schema.Schema,
constext: sd.CommandContext,
context: sd.CommandContext,
refdict: so.RefDict,
) -> bool:
return not self.is_view(schema) or refdict.attr == 'pointers'
Expand All @@ -333,9 +333,7 @@ def as_type_delete_if_unused(
self,
schema: s_schema.Schema,
) -> Optional[sd.DeleteObject[ObjectType]]:
if not schema.get_by_id(self.id, default=None):
# this type was already deleted by some other op
# (probably alias types cleanup)
if not self._is_deletable(schema):
return None

# References to aliases can only occur inside other aliases,
Expand Down
2 changes: 1 addition & 1 deletion edb/schema/pointers.py
Original file line number Diff line number Diff line change
Expand Up @@ -857,7 +857,7 @@ def has_user_defined_properties(self, schema: s_schema.Schema) -> bool:
def allow_ref_propagation(
self,
schema: s_schema.Schema,
constext: sd.CommandContext,
context: sd.CommandContext,
refdict: so.RefDict,
) -> bool:
object_type = self.get_source(schema)
Expand Down
2 changes: 1 addition & 1 deletion edb/schema/properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def is_link_property(self, schema: s_schema.Schema) -> bool:
def allow_ref_propagation(
self,
schema: s_schema.Schema,
constext: sd.CommandContext,
context: sd.CommandContext,
refdict: so.RefDict,
) -> bool:
source = self.get_source(schema)
Expand Down
36 changes: 36 additions & 0 deletions edb/schema/scalars.py
Original file line number Diff line number Diff line change
Expand Up @@ -595,6 +595,26 @@ def _cmd_tree_from_ast(

return cmd

def _create_begin(
self,
schema: s_schema.Schema,
context: sd.CommandContext,
) -> s_schema.Schema:
schema = super()._create_begin(schema, context)
if (
not context.canonical
and not self.scls.get_abstract(schema)
and not self.scls.get_transient(schema)
):
# Create an array type for this scalar eagerly.
# We mostly do this so that we know the `backend_id`
# of the array type when running translation of SQL
# involving arrays of scalars.
schema2, arr_t = s_types.Array.from_subtypes(schema, [self.scls])
self.add_caused(arr_t.as_shell(schema2).as_create_delta(schema2))

return schema

def validate_create(
self,
schema: s_schema.Schema,
Expand Down Expand Up @@ -819,3 +839,19 @@ def _get_ast(
return None
else:
return super()._get_ast(schema, context, parent_node=parent_node)

def _delete_begin(
self,
schema: s_schema.Schema,
context: sd.CommandContext,
) -> s_schema.Schema:
if not context.canonical:
schema2, arr_typ = s_types.Array.from_subtypes(schema, [self.scls])
arr_op = arr_typ.init_delta_command(
schema2,
sd.DeleteObject,
if_exists=True,
)
self.add_prerequisite(arr_op)

return super()._delete_begin(schema, context)
41 changes: 33 additions & 8 deletions edb/schema/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -533,6 +533,14 @@ def as_type_delete_if_unused(

return None

def _is_deletable(
self,
schema: s_schema.Schema,
) -> bool:
# this type was already deleted by some other op
# (probably alias types cleanup)
return schema.get_by_id(self.id, default=None) is not None


class QualifiedType(so.QualifiedObject, Type):
pass
Expand Down Expand Up @@ -988,7 +996,14 @@ def as_delete_delta(
assert isinstance(delta, sd.DeleteObject)
if not isinstance(self, CollectionExprAlias):
delta.if_exists = True
delta.if_unused = True
if not (
isinstance(self, Array)
and self.get_element_type(schema).is_scalar()
):
# Arrays of scalars are special, because we create them
# implicitly and overload reference checks to never
# delete them unless the scalar is also deleted.
delta.if_unused = True
return delta

@classmethod
Expand Down Expand Up @@ -1140,9 +1155,7 @@ def as_type_delete_if_unused(
self: CollectionTypeT,
schema: s_schema.Schema,
) -> Optional[sd.DeleteObject[CollectionTypeT]]:
if not schema.get_by_id(self.id, default=None):
# this type was already deleted by some other op
# (probably alias types cleanup)
if not self._is_deletable(schema):
return None

return self.init_delta_command(
Expand Down Expand Up @@ -1202,9 +1215,7 @@ def as_type_delete_if_unused(
self: CollectionExprAliasT,
schema: s_schema.Schema,
) -> Optional[sd.DeleteObject[CollectionExprAliasT]]:
if not schema.get_by_id(self.id, default=None):
# this type was already deleted by some other op
# (probably alias types cleanup)
if not self._is_deletable(schema):
return None

cmd = self.init_delta_command(schema, sd.DeleteObject, if_exists=True)
Expand Down Expand Up @@ -3365,7 +3376,21 @@ class DeleteTupleExprAlias(DeleteCollectionExprAlias[TupleExprAlias]):


class DeleteArray(DeleteCollectionType[Array]):
pass
# Prevent array types from getting deleted unless the element
# type is being deleted too.
def _has_outside_references(
self,
schema: s_schema.Schema,
context: sd.CommandContext,
) -> bool:
if super()._has_outside_references(schema, context):
return True

el_type = self.scls.get_element_type(schema)
if el_type.is_scalar() and not context.is_deleting(el_type):
return True

return False


class DeleteArrayExprAlias(DeleteCollectionExprAlias[ArrayExprAlias]):
Expand Down
88 changes: 54 additions & 34 deletions edb/server/bootstrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -2026,45 +2026,65 @@ def compile_sys_queries(
# The code below re-syncs backend_id properties of Gel builtin
# types with the actual OIDs in the DB.
backend_id_fixup_edgeql = '''
WITH
_ := (
UPDATE {schema::ScalarType, schema::Tuple}
FILTER
NOT (.abstract ?? False)
AND NOT (.transient ?? False)
SET {
backend_id := sys::_get_pg_type_for_edgedb_type(
.id,
.__type__.name,
<uuid>{},
[is schema::ScalarType].sql_type ?? (
select [is schema::ScalarType]
.bases[is schema::ScalarType] limit 1
).sql_type,
)
}
),
_ := (
UPDATE {schema::Array, schema::Range, schema::MultiRange}
FILTER
NOT (.abstract ?? False)
AND NOT (.transient ?? False)
SET {
backend_id := sys::_get_pg_type_for_edgedb_type(
.id,
.__type__.name,
.element_type.id,
<str>{},
)
}
),
SELECT 1;
UPDATE schema::ScalarType
FILTER
NOT (.abstract ?? False)
AND NOT (.transient ?? False)
SET {
backend_id := sys::_get_pg_type_for_edgedb_type(
.id,
.__type__.name,
<uuid>{},
[is schema::ScalarType].sql_type ?? (
select [is schema::ScalarType]
.bases[is schema::ScalarType] limit 1
).sql_type,
)
};
UPDATE schema::Tuple
FILTER
NOT (.abstract ?? False)
AND NOT (.transient ?? False)
SET {
backend_id := sys::_get_pg_type_for_edgedb_type(
.id,
.__type__.name,
<uuid>{},
[is schema::ScalarType].sql_type ?? (
select [is schema::ScalarType]
.bases[is schema::ScalarType] limit 1
).sql_type,
)
};
UPDATE {schema::Range, schema::MultiRange}
FILTER
NOT (.abstract ?? False)
AND NOT (.transient ?? False)
SET {
backend_id := sys::_get_pg_type_for_edgedb_type(
.id,
.__type__.name,
.element_type.id,
<str>{},
)
};
UPDATE schema::Array
FILTER
NOT (.abstract ?? False)
AND NOT (.transient ?? False)
SET {
backend_id := sys::_get_pg_type_for_edgedb_type(
.id,
.__type__.name,
.element_type.id,
<str>{},
)
};
'''
_, sql = compile_bootstrap_script(
compiler,
schema,
backend_id_fixup_edgeql,
expected_cardinality_one=True,
)
queries['backend_id_fixup'] = sql

Expand Down
4 changes: 2 additions & 2 deletions tests/schemas/dump02_setup.edgeql
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@

SET MODULE default;

CREATE MIGRATION m12hdldnmvzj5weaevxsmizppnl2poo6nconx2hcfkklbwcghqsmaq
ONTO m1vyvlra26tef6oe6yu37m7lfw7i3ef3n62m6om353dvnbm3mynqqa {
CREATE MIGRATION m1t2phsw6j2rgl4ieihm6mnvoln3ssayxncjzl2kwkxmunn2f6aqha
ONTO m1iej6dr3hk33wykqwqgg4xxo3tivpiznpb2mto7qsw2zgipsbfihq {
CREATE TYPE default::Migrated;
create type default::Migrated2 {};
};
Expand Down
Loading

0 comments on commit 1bc18b0

Please sign in to comment.