diff --git a/config/settings/common_settings.py b/config/settings/common_settings.py index 0aeab18afc..2d38507417 100644 --- a/config/settings/common_settings.py +++ b/config/settings/common_settings.py @@ -68,7 +68,6 @@ def pipe_delim(pipe_string): 'mathesar.rpc.collaborators', 'mathesar.rpc.columns', 'mathesar.rpc.columns.metadata', - 'mathesar.rpc.connections', 'mathesar.rpc.constraints', 'mathesar.rpc.data_modeling', 'mathesar.rpc.databases', diff --git a/docs/docs/api/rpc.md b/docs/docs/api/rpc.md index 133c416432..b01c54193e 100644 --- a/docs/docs/api/rpc.md +++ b/docs/docs/api/rpc.md @@ -17,7 +17,7 @@ To use an RPC function: !!! example - To call function `add_from_known_connection` from the `connections` section of this page, you'd send something like: + To call function `tables.list` from the Tables section of this page, you'd send something like: `POST /api/rpc/v0/`b @@ -25,11 +25,11 @@ To use an RPC function: { "jsonrpc": "2.0", "id": 234, - "method": "connections.add_from_known_connection", + "method": "tables.list", "params": { - "nickname": "anewconnection", - "db_name": "mynewcooldb" - }, + "schema_oid": 47324, + "database_id": 1 + } } ``` @@ -128,17 +128,6 @@ Unrecognized errors from a given library return a "round number" code, so an unk - disconnect - ConfiguredDatabaseInfo - -## Connections - -::: connections - options: - members: - - add_from_known_connection - - add_from_scratch - - grant_access_to_user - - ConnectionReturn - ## Constraints ::: constraints diff --git a/mathesar/admin.py b/mathesar/admin.py index 6afc0e326d..9c195a904f 100644 --- a/mathesar/admin.py +++ b/mathesar/admin.py @@ -4,7 +4,6 @@ from mathesar.models.deprecated import Table, Schema, DataFile from mathesar.models.users import User from mathesar.models.query import Exploration -from mathesar.models.shares import SharedTable, SharedQuery class MathesarUserAdmin(UserAdmin): @@ -25,5 +24,3 @@ class MathesarUserAdmin(UserAdmin): admin.site.register(DataFile) admin.site.register(User, MathesarUserAdmin) admin.site.register(Exploration) -admin.site.register(SharedTable) -admin.site.register(SharedQuery) diff --git a/mathesar/api/db/permissions/__init__.py b/mathesar/api/db/permissions/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/mathesar/api/db/permissions/columns.py b/mathesar/api/db/permissions/columns.py deleted file mode 100644 index 24615d03ec..0000000000 --- a/mathesar/api/db/permissions/columns.py +++ /dev/null @@ -1,30 +0,0 @@ -from rest_access_policy import AccessPolicy - - -class ColumnAccessPolicy(AccessPolicy): - """ - Anyone can view a Column as long as they have - at least a Viewer access to the Schema or the database - Only superuser or schema/database manager can delete/modify/create a Column - """ - - statements = [ - { - 'action': ['list', 'retrieve'], - 'principal': '*', - 'effect': 'allow', - 'condition_expression': 'is_atleast_viewer_nested_table_resource' - }, - { - 'action': ['dependents'], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition_expression': 'is_atleast_viewer_nested_table_resource' - }, - { - 'action': ['destroy', 'update', 'partial_update', 'create'], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition_expression': 'is_atleast_manager_nested_table_resource' - }, - ] diff --git a/mathesar/api/db/permissions/constraint.py b/mathesar/api/db/permissions/constraint.py deleted file mode 100644 index dbc55ef75d..0000000000 --- a/mathesar/api/db/permissions/constraint.py +++ /dev/null @@ -1,23 +0,0 @@ -from rest_access_policy import AccessPolicy - - -class ConstraintAccessPolicy(AccessPolicy): - """ - Anyone can view Constraint as long as they have - at least a Viewer access to the schema or its database - Only superuser or schema/database manager can create/delete/update the Constraint - """ - statements = [ - { - 'action': ['list', 'retrieve'], - 'principal': '*', - 'effect': 'allow', - 'condition_expression': 'is_atleast_viewer_nested_table_resource' - }, - { - 'action': ['create', 'destroy', 'update', 'partial_update'], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition_expression': 'is_atleast_manager_nested_table_resource' - }, - ] diff --git a/mathesar/api/db/permissions/database.py b/mathesar/api/db/permissions/database.py deleted file mode 100644 index 41374c91a4..0000000000 --- a/mathesar/api/db/permissions/database.py +++ /dev/null @@ -1,36 +0,0 @@ -from django.db.models import Q -from rest_access_policy import AccessPolicy - -from mathesar.models.users import Role - - -class DatabaseAccessPolicy(AccessPolicy): - """ - Anyone can view Database objects and - Database properties like types and functions if they have a Viewer access - """ - statements = [ - { - 'action': ['list', 'retrieve', 'types', 'functions'], - 'principal': 'authenticated', - 'effect': 'allow', - }, - { - 'action': ['create', 'partial_update', 'destroy'], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition': 'is_superuser' - } - ] - - @classmethod - def scope_queryset(cls, request, qs): - if not (request.user.is_superuser or request.user.is_anonymous): - allowed_roles = (Role.MANAGER.value,) - if request.method.lower() == 'get': - allowed_roles = allowed_roles + (Role.EDITOR.value, Role.VIEWER.value) - qs = qs.filter( - Q(database_role__role__in=allowed_roles) - & Q(database_role__user=request.user) - ) - return qs diff --git a/mathesar/api/db/permissions/query.py b/mathesar/api/db/permissions/query.py deleted file mode 100644 index 5c6c0427fe..0000000000 --- a/mathesar/api/db/permissions/query.py +++ /dev/null @@ -1,94 +0,0 @@ -from django.db.models import Q -from rest_access_policy import AccessPolicy - -from mathesar.api.utils import SHARED_LINK_UUID_QUERY_PARAM -from mathesar.api.permission_utils import QueryAccessInspector -from mathesar.models.users import Role - -_statement_for_retrieving_single_queries = { - 'action': [ - 'retrieve', - 'results', - ], - 'principal': '*', - 'effect': 'allow', - 'condition_expression': 'is_atleast_query_viewer' -} - -_statement_for_other_actions = { - # Restrictions for the create method is done by the Serializers when creating the query, - # As the permissions depend on the base_table object and not on the query object. - 'action': [ - 'list', - 'destroy', - 'create', - 'run', - 'update', - 'partial_update', - 'columns', - 'records', - - ], - 'principal': 'authenticated', - 'effect': 'allow', -} - - -class QueryAccessPolicy(AccessPolicy): - - statements = [ - _statement_for_retrieving_single_queries, - _statement_for_other_actions - ] - - @staticmethod - def get_should_queryset_be_unscoped(viewset_action): - """ - Tells you if the queryset for passed viewset action should be scoped - using this class's `scope_queryset`. - - For purposes of access control, we split viewset actions (e.g. 'retrieve') - into those that are for retrieving single queries and the rest. The reason - is that single query retrieval might be performed anonymously, which - requires different access controls. - - More specifically, access during possibly-anonymous query retrieval is - controlled via `is_atleast_query_viewer`. While, for the rest of the - actions, access is controlled via `QueryAccessPolicy.scope_queryset`. - This is defined in `QueryAccessPolicy.statements`. - - Note, it is essential to handle action being `None` here, because it's - called again by `filter_queryset` with action `None` after the query is - formed. - """ - return ( - _get_is_action_for_retrieving_single_query(viewset_action) - or viewset_action is None - ) - - @classmethod - def scope_queryset(cls, request, qs): - if not (request.user.is_superuser or request.user.is_anonymous): - allowed_roles = (Role.MANAGER.value, Role.EDITOR.value, Role.VIEWER.value) - permissible_database_role_filter = ( - Q(base_table__schema__database__database_role__role__in=allowed_roles) - & Q(base_table__schema__database__database_role__user=request.user) - ) - permissible_schema_roles_filter = ( - Q(base_table__schema__schema_role__role__in=allowed_roles) - & Q(base_table__schema__schema_role__user=request.user) - ) - qs = qs.filter(permissible_database_role_filter | permissible_schema_roles_filter) - return qs - - def is_atleast_query_viewer(self, request, view, action): - query = view.get_object() - return QueryAccessInspector( - request.user, - query, - token=request.query_params.get(SHARED_LINK_UUID_QUERY_PARAM) - ).is_atleast_viewer() - - -def _get_is_action_for_retrieving_single_query(action): - return action in _statement_for_retrieving_single_queries['action'] diff --git a/mathesar/api/db/permissions/query_table.py b/mathesar/api/db/permissions/query_table.py deleted file mode 100644 index 540212ac99..0000000000 --- a/mathesar/api/db/permissions/query_table.py +++ /dev/null @@ -1,25 +0,0 @@ -from django.db.models import Q -from rest_access_policy import AccessPolicy - -from mathesar.models.users import Role - - -class QueryTableAccessPolicy(AccessPolicy): - - """ - Used for scoping Table queryset when creating a query. - We cannot use TableAccessPolicy as it restricts creation if a user does not have write access but a Query can be created by a Viewer too - """ - @classmethod - def scope_queryset(cls, request, qs): - allowed_roles = (Role.MANAGER.value, Role.EDITOR.value, Role.VIEWER.value) - if not (request.user.is_superuser or request.user.is_anonymous): - permissible_database_role_filter = ( - Q(schema__database__database_role__role__in=allowed_roles) - & Q(schema__database__database_role__user=request.user) - ) - permissible_schema_roles_filter = ( - Q(schema__schema_role__role__in=allowed_roles) & Q(schema__schema_role__user=request.user) - ) - qs = qs.filter(permissible_database_role_filter | permissible_schema_roles_filter) - return qs diff --git a/mathesar/api/db/permissions/records.py b/mathesar/api/db/permissions/records.py deleted file mode 100644 index a30d3121a3..0000000000 --- a/mathesar/api/db/permissions/records.py +++ /dev/null @@ -1,26 +0,0 @@ -from rest_access_policy import AccessPolicy - - -class RecordAccessPolicy(AccessPolicy): - """ - Anyone can view a Record as long as they are a superuser or have - at least a Viewer access to the Schema or the Database of the Table. - The permissions trickle down, so if someone has a Viewer Access for a Database - They automatically become a Schema Viewer - Refer https://wiki.mathesar.org/en/product/specs/users-permissions#database-permissions - Only superuser or schema/database Manager/Editor can delete/modify/create a Record - """ - statements = [ - { - 'action': ['list', 'retrieve'], - 'principal': '*', - 'effect': 'allow', - 'condition_expression': 'is_atleast_viewer_nested_table_resource' - }, - { - 'action': ['destroy', 'update', 'partial_update', 'create', 'delete'], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition_expression': 'is_atleast_editor_nested_table_resource' - }, - ] diff --git a/mathesar/api/db/permissions/schema.py b/mathesar/api/db/permissions/schema.py deleted file mode 100644 index 965cbb70ac..0000000000 --- a/mathesar/api/db/permissions/schema.py +++ /dev/null @@ -1,75 +0,0 @@ -from django.db.models import Q -from rest_access_policy import AccessPolicy - -from mathesar.models.users import DatabaseRole, Role, SchemaRole - - -class SchemaAccessPolicy(AccessPolicy): - """ - Anyone can view a Schema as long as they have at least a Viewer access to that Schema. - Creating is restricted to superusers or managers of the Database - Destroying/Modifying is restricted to superusers or managers of the Schema - """ - statements = [ - # Restrictions for the create method is done by the Serializers when creating the schema, - # As the permissions depend on the database object. - { - 'action': ['list', 'retrieve', 'create', 'dependents'], - 'principal': 'authenticated', - 'effect': 'allow', - }, - { - 'action': ['destroy', 'update', 'partial_update'], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition_expression': ['(is_superuser or is_schema_manager)'] - }, - ] - - @classmethod - def _scope_queryset(cls, request, qs, allowed_roles): - if not (request.user.is_superuser or request.user.is_anonymous): - permissible_database_role_filter = ( - Q(database__database_role__role__in=allowed_roles) & Q(database__database_role__user=request.user) - ) - permissible_schema_roles_filter = ( - Q(schema_role__role__in=allowed_roles) & Q(schema_role__user=request.user) - ) - qs = qs.filter(permissible_database_role_filter | permissible_schema_roles_filter) - return qs - - @classmethod - def scope_queryset(cls, request, qs): - """ - Used for scoping the queryset of Serializer RelatedField which reference a Schema - """ - allowed_roles = (Role.MANAGER.value,) - - if request.method.lower() == 'get': - allowed_roles = allowed_roles + (Role.EDITOR.value, Role.VIEWER.value) - return SchemaAccessPolicy._scope_queryset(request, qs, allowed_roles) - - @classmethod - def scope_viewset_queryset(cls, request, qs): - """ - Used for scoping queryset of the SchemaViewSet. - Filters out all the schema the user has Viewer access, - Restrictions are then applied based on the request method using the Policy statements. - This helps us to throw correct error status code instead of a 404 error code - """ - allowed_roles = (Role.MANAGER.value, Role.EDITOR.value, Role.VIEWER.value) - return SchemaAccessPolicy._scope_queryset(request, qs, allowed_roles) - - def is_schema_manager(self, request, view, action): - schema = view.get_object() - is_schema_manager = SchemaRole.objects.filter( - user=request.user, - schema=schema, - role=Role.MANAGER.value - ).exists() - is_db_manager = DatabaseRole.objects.filter( - user=request.user, - database=schema.database, - role=Role.MANAGER.value - ).exists() - return is_db_manager or is_schema_manager diff --git a/mathesar/api/db/permissions/table.py b/mathesar/api/db/permissions/table.py deleted file mode 100644 index 2ea4001a7d..0000000000 --- a/mathesar/api/db/permissions/table.py +++ /dev/null @@ -1,127 +0,0 @@ -from django.db.models import Q -from rest_access_policy import AccessPolicy - -from mathesar.api.utils import SHARED_LINK_UUID_QUERY_PARAM -from mathesar.api.permission_utils import TableAccessInspector -from mathesar.models.users import Role - - -class TableAccessPolicy(AccessPolicy): - """ - Anyone can view Table as long as they have - at least a Viewer access to the schema or its database - Create Access is restricted to superusers or managers of the schema or the database the table is part of. - Only superuser or schema/database manager can delete/modify/update the Table - """ - - statements = [ - { - 'action': [ - 'retrieve', - ], - 'principal': '*', - 'effect': 'allow', - 'condition_expression': 'is_atleast_table_viewer' - }, - { - # Restrictions for the create method is done by the Serializers when creating the schema, - # As the permissions depend on the database object. - 'action': [ - 'list', - 'create', - 'type_suggestions', - 'dependents', - 'ui_dependents', - 'joinable_tables', - ], - 'principal': 'authenticated', - 'effect': 'allow', - }, - { - 'action': [ - 'destroy', - 'update', - 'partial_update', - 'split_table', - 'move_columns', - 'previews', - 'existing_import', - 'map_imported_columns' - ], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition_expression': 'is_atleast_table_manager' - }, - ] - - @classmethod - def _scope_queryset(cls, request, qs, allowed_roles): - if not (request.user.is_superuser or request.user.is_anonymous): - db_role_access_filter = ( - Q(schema__database__database_role__role__in=allowed_roles) - & Q(schema__database__database_role__user=request.user) - ) - schema_role_access_filter = ( - Q(schema__schema_role__role__in=allowed_roles) - & Q(schema__schema_role__user=request.user) - ) - - # Filters to check whether user is a db/schema manager - is_database_manager = ( - Q(schema__database__database_role__role='manager') - & Q(schema__database__database_role__user=request.user) - ) - is_schema_manager = ( - Q(schema__schema_role__role='manager') - & Q(schema__schema_role__user=request.user) - ) - - # Filter for confirmed tables - cnf_table_filter = (Q(import_verified=True) | Q(import_verified__isnull=True)) - - # Filters for the purpose of including/removing unconfirmed tables based on user's role - permissible_database_role_filter = ( - is_database_manager & Q(import_verified=False) - | cnf_table_filter & db_role_access_filter - ) - permissible_schema_roles_filter = ( - is_schema_manager & Q(import_verified=False) - | cnf_table_filter & schema_role_access_filter - ) - - qs = qs.filter(permissible_database_role_filter | permissible_schema_roles_filter) - return qs - - @classmethod - def scope_queryset(cls, request, qs): - """ - Used for scoping the queryset of Serializer RelatedField which reference a Table - """ - allowed_roles = (Role.MANAGER.value,) - - if request.method.lower() == 'get': - allowed_roles = allowed_roles + (Role.EDITOR.value, Role.VIEWER.value) - return TableAccessPolicy._scope_queryset(request, qs, allowed_roles) - - @classmethod - def scope_viewset_queryset(cls, request, qs): - """ - Used for scoping queryset of the TableViewSet. - It is used for listing all the table the user has Viewer access. - Restrictions are then applied based on the request method using the Policy statements. - This helps us to throw correct error status code instead of a 404 error code - """ - allowed_roles = (Role.MANAGER.value, Role.EDITOR.value, Role.VIEWER.value) - return TableAccessPolicy._scope_queryset(request, qs, allowed_roles) - - def is_atleast_table_viewer(self, request, view, action): - table = view.get_object() - return TableAccessInspector( - request.user, - table, - token=request.query_params.get(SHARED_LINK_UUID_QUERY_PARAM) - ).is_atleast_viewer() - - def is_atleast_table_manager(self, request, view, action): - table = view.get_object() - return TableAccessInspector(request.user, table).is_atleast_manager() diff --git a/mathesar/api/db/permissions/table_settings.py b/mathesar/api/db/permissions/table_settings.py deleted file mode 100644 index 660cdac021..0000000000 --- a/mathesar/api/db/permissions/table_settings.py +++ /dev/null @@ -1,57 +0,0 @@ -from django.db.models import Q -from rest_access_policy import AccessPolicy - -from mathesar.models.users import DatabaseRole, Role, SchemaRole - - -class TableSettingAccessPolicy(AccessPolicy): - # Anyone can view a Table Setting as long as they have - # at least a Viewer access to the schema or its database - # Create Access is restricted to superusers or managers of the schema or the database. - statements = [ - { - 'action': ['list', 'retrieve', 'create'], - 'principal': 'authenticated', - 'effect': 'allow', - }, - # Only superuser or schema/database manager can delete the setting - { - 'action': ['destroy', 'update', 'partial_update'], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition_expression': ['(is_superuser or is_table_editor)'] - }, - ] - - @classmethod - def scope_queryset(cls, request, qs): - if not (request.user.is_superuser or request.user.is_anonymous): - allowed_roles = (Role.MANAGER.value, Role.EDITOR.value, Role.VIEWER.value,) - permissible_database_role_filter = ( - Q(table__schema__database__database_role__role__in=allowed_roles) - & Q(table__schema__database__database_role__user=request.user) - ) - permissible_schema_roles_filter = ( - Q(table__schema__schema_role__role__in=allowed_roles) - & Q(table__schema__schema_role__user=request.user) - ) - qs = qs.filter(permissible_database_role_filter | permissible_schema_roles_filter) - - return qs - - def is_table_editor(self, request, view, action): - # Column access control is based on Schema and Database Roles as of now - # TODO Include Table Role based access when Table Roles are introduced - setting = view.get_object() - editor_permission_roles = (Role.MANAGER.value, Role.EDITOR.value) - is_schema_manager = SchemaRole.objects.filter( - user=request.user, - schema=setting.table.schema, - role__in=editor_permission_roles - ).exists() - is_db_manager = DatabaseRole.objects.filter( - user=request.user, - database=setting.table.schema.database, - role__in=editor_permission_roles - ).exists() - return is_db_manager or is_schema_manager diff --git a/mathesar/api/db/viewsets/__init__.py b/mathesar/api/db/viewsets/__init__.py index 4ac927fdbc..4b4ffe8ae4 100644 --- a/mathesar/api/db/viewsets/__init__.py +++ b/mathesar/api/db/viewsets/__init__.py @@ -1,10 +1 @@ -from mathesar.api.db.viewsets.columns import ColumnViewSet # noqa -from mathesar.api.db.viewsets.constraints import ConstraintViewSet # noqa from mathesar.api.db.viewsets.data_files import DataFileViewSet # noqa -from mathesar.api.db.viewsets.databases import ConnectionViewSet # noqa -from mathesar.api.db.viewsets.records import RecordViewSet # noqa -from mathesar.api.db.viewsets.schemas import SchemaViewSet # noqa -from mathesar.api.db.viewsets.table_settings import TableSettingsViewSet # noqa -from mathesar.api.db.viewsets.tables import TableViewSet # noqa -from mathesar.api.db.viewsets.queries import QueryViewSet # noqa -from mathesar.api.db.viewsets.links import LinkViewSet # noqa diff --git a/mathesar/api/db/viewsets/columns.py b/mathesar/api/db/viewsets/columns.py deleted file mode 100644 index 6e6b2b76af..0000000000 --- a/mathesar/api/db/viewsets/columns.py +++ /dev/null @@ -1,199 +0,0 @@ -import warnings -from psycopg.errors import DuplicateColumn, InvalidTextRepresentation, NotNullViolation -from psycopg2.errors import StringDataRightTruncation -from rest_access_policy import AccessViewSetMixin -from rest_framework import status, viewsets -from rest_framework.decorators import action -from rest_framework.exceptions import NotFound -from rest_framework.response import Response -from rest_framework.permissions import IsAuthenticatedOrReadOnly - -from mathesar.api.db.permissions.columns import ColumnAccessPolicy -from mathesar.api.exceptions.database_exceptions import exceptions as database_api_exceptions -from mathesar.api.exceptions.generic_exceptions import base_exceptions as base_api_exceptions -from db.columns.exceptions import ( - DynamicDefaultWarning, InvalidDefaultError, InvalidTypeOptionError, InvalidTypeError -) -from db.types.exceptions import InvalidTypeParameters -from mathesar.api.serializers.dependents import DependentSerializer, DependentFilterSerializer -from db.records.exceptions import UndefinedFunction -from mathesar.api.pagination import DefaultLimitOffsetPagination -from mathesar.api.serializers.columns import ColumnSerializer -from mathesar.api.utils import get_table_or_404 -from mathesar.models.deprecated import Column - - -class ColumnViewSet(AccessViewSetMixin, viewsets.ModelViewSet): - serializer_class = ColumnSerializer - pagination_class = DefaultLimitOffsetPagination - permission_classes = [IsAuthenticatedOrReadOnly] - access_policy = ColumnAccessPolicy - - def get_queryset(self): - queryset = Column.objects.filter(table=self.kwargs['table_pk']).order_by('attnum') - # Prefetching instead of using select_related because select_related uses joins, - # and we need a reuse of individual Django object instead of its data - prefetched_queryset = queryset.prefetch_related('table').prefetch('name') - return prefetched_queryset - - def create(self, request, table_pk=None): - table = get_table_or_404(table_pk) - # We only support adding a single column through the API. - serializer = ColumnSerializer(data=request.data, context={'request': request}) - serializer.is_valid(raise_exception=True) - type_options = request.data.get('type_options', None) - if 'source_column' in serializer.validated_data: - column_attnum = table.duplicate_column( - serializer.validated_data['source_column'], - serializer.validated_data['copy_source_data'], - serializer.validated_data['copy_source_constraints'], - serializer.validated_data.get('name'), - ) - else: - try: - # TODO Refactor add_column to user serializer validated date instead of request data - column_attnum = table.add_column(request.data)[0] - except DuplicateColumn as e: - name = request.data['name'] - raise database_api_exceptions.DuplicateTableAPIException( - e, - message=f'Column {name} already exists', - field='name', - status_code=status.HTTP_400_BAD_REQUEST - ) - except TypeError as e: - raise base_api_exceptions.TypeErrorAPIException( - e, - message="Unknown type_option passed", - status_code=status.HTTP_400_BAD_REQUEST - ) - except InvalidDefaultError as e: - raise database_api_exceptions.InvalidDefaultAPIException( - e, - message=f'default "{request.data["default"]}" is invalid for type {request.data["type"]}', - status_code=status.HTTP_400_BAD_REQUEST - ) - except (InvalidTypeOptionError, InvalidTypeParameters) as e: - type_options = request.data.get('type_options', '') - raise database_api_exceptions.InvalidTypeOptionAPIException( - e, - message=f'parameter dict {type_options} is invalid for type {request.data["type"]}', - field="type_options", - status_code=status.HTTP_400_BAD_REQUEST - ) - except InvalidTypeError as e: - raise database_api_exceptions.InvalidTypeCastAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - # The created column's Django model was automatically reflected. It can be reflected. - dj_column = Column.objects.get( - table=table, - attnum=column_attnum, - ) - # Some properties of the column are not reflected (e.g. display options). Here we add those - # attributes to the reflected model. - if serializer.validated_model_fields: - for k, v in serializer.validated_model_fields.items(): - setattr(dj_column, k, v) - dj_column.save() - out_serializer = ColumnSerializer(dj_column) - return Response(out_serializer.data, status=status.HTTP_201_CREATED) - - def partial_update(self, request, pk=None, table_pk=None): - column_instance = self.get_object() - table = column_instance.table - serializer = ColumnSerializer(instance=column_instance, data=request.data, partial=True) - serializer.is_valid(raise_exception=True) - with warnings.catch_warnings(): - warnings.filterwarnings("error", category=DynamicDefaultWarning) - try: - attnum = column_instance._sa_column.column_attnum - table.alter_column(attnum, serializer.validated_data) - except UndefinedFunction as e: - raise database_api_exceptions.UndefinedFunctionAPIException( - e, - message='This type cast is not implemented', - status_code=status.HTTP_400_BAD_REQUEST - ) - except InvalidTextRepresentation as e: - raise database_api_exceptions.InvalidTypeCastAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - except IndexError as e: - raise base_api_exceptions.NotFoundAPIException(e) - except TypeError as e: - # TODO this error is actually much more general than just badly specified - # type_options problems. e.g. if a bad keyword argument is passed to a function, - # TypeError will be raised. - raise database_api_exceptions.InvalidTypeOptionAPIException( - e, - message="Unknown type_option passed", - status_code=status.HTTP_400_BAD_REQUEST - ) - except InvalidDefaultError as e: - raise database_api_exceptions.InvalidDefaultAPIException( - e, - message=f'default "{request.data["default"]}" is invalid for this column', - status_code=status.HTTP_400_BAD_REQUEST - ) - except DynamicDefaultWarning as e: - raise database_api_exceptions.DynamicDefaultAPIException( - e, - message='Changing type of columns with dynamically-generated defaults is not supported.' - 'Delete or change the default first.', - status_code=status.HTTP_400_BAD_REQUEST - ) - except (InvalidTypeOptionError, InvalidTypeParameters) as e: - type_options = request.data.get('type_options', '') - raise database_api_exceptions.InvalidTypeOptionAPIException( - e, - message=f'parameter dict {type_options} is invalid for type {request.data["type"]}', - status_code=status.HTTP_400_BAD_REQUEST - ) - except InvalidTypeError as e: - raise database_api_exceptions.InvalidTypeCastAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - except NotNullViolation as e: - raise database_api_exceptions.NotNullViolationAPIException( - e, - field="nullable", - status_code=status.HTTP_400_BAD_REQUEST, - table=table, - ) - except StringDataRightTruncation as e: - raise database_api_exceptions.InvalidTypeOptionAPIException( - e, - message='The requested string length is too short for the data in the selected column', - status_code=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - raise base_api_exceptions.MathesarAPIException(e) - - serializer.update(column_instance, serializer.validated_model_fields) - # Invalidate the cache as the underlying columns have changed - column_instance = self.get_object() - out_serializer = ColumnSerializer(column_instance) - return Response(out_serializer.data) - - @action(methods=['get'], detail=True) - def dependents(self, request, pk=None, table_pk=None): - serializer = DependentFilterSerializer(data=request.GET) - serializer.is_valid(raise_exception=True) - types_exclude = serializer.validated_data['exclude'] - - column = self.get_object() - serializer = DependentSerializer(column.get_dependents(types_exclude), many=True, context={'request': request}) - return Response(serializer.data) - - def destroy(self, request, pk=None, table_pk=None): - column_instance = self.get_object() - table = column_instance.table - try: - table.drop_column(column_instance.attnum) - except IndexError: - raise NotFound - return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/mathesar/api/db/viewsets/constraints.py b/mathesar/api/db/viewsets/constraints.py deleted file mode 100644 index 9e4aece735..0000000000 --- a/mathesar/api/db/viewsets/constraints.py +++ /dev/null @@ -1,42 +0,0 @@ -from psycopg2.errors import UndefinedObject -from rest_access_policy import AccessViewSetMixin -from rest_framework import status, viewsets -from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin -from rest_framework.response import Response -from rest_framework.permissions import IsAuthenticatedOrReadOnly -from sqlalchemy.exc import ProgrammingError - -import mathesar.api.exceptions.database_exceptions.base_exceptions as base_database_api_exceptions -import mathesar.api.exceptions.generic_exceptions.base_exceptions as base_api_exceptions -from mathesar.api.db.permissions.constraint import ConstraintAccessPolicy -from mathesar.api.pagination import DefaultLimitOffsetPagination -from mathesar.api.serializers.constraints import ConstraintSerializer -from mathesar.api.utils import get_table_or_404 -from mathesar.models.deprecated import Constraint - - -class ConstraintViewSet(AccessViewSetMixin, ListModelMixin, RetrieveModelMixin, CreateModelMixin, viewsets.GenericViewSet): - serializer_class = ConstraintSerializer - pagination_class = DefaultLimitOffsetPagination - permission_classes = [IsAuthenticatedOrReadOnly] - access_policy = ConstraintAccessPolicy - - def get_queryset(self): - return Constraint.objects.filter(table__id=self.kwargs['table_pk']).order_by('-created_at') - - def get_serializer_context(self): - context = super().get_serializer_context() - context['table'] = get_table_or_404(self.kwargs['table_pk']) - context['table_id'] = self.kwargs['table_pk'] - return context - - def destroy(self, request, pk=None, table_pk=None): - constraint = self.get_object() - try: - constraint.drop() - except ProgrammingError as e: - if type(e.orig) is UndefinedObject: - raise base_api_exceptions.NotFoundAPIException(e) - else: - raise base_database_api_exceptions.ProgrammingAPIException(e) - return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/mathesar/api/db/viewsets/databases.py b/mathesar/api/db/viewsets/databases.py deleted file mode 100644 index 0edac6828e..0000000000 --- a/mathesar/api/db/viewsets/databases.py +++ /dev/null @@ -1,56 +0,0 @@ -from django_filters import rest_framework as filters -from rest_access_policy import AccessViewSetMixin -from rest_framework import viewsets, status -from rest_framework.decorators import action -from rest_framework.response import Response - -from mathesar.api.db.permissions.database import DatabaseAccessPolicy -from mathesar.models.deprecated import Connection -from mathesar.api.dj_filters import DatabaseFilter -from mathesar.api.pagination import DefaultLimitOffsetPagination - -from mathesar.api.serializers.databases import ConnectionSerializer - -from db.functions.operations.check_support import get_supported_db_functions -from mathesar.api.serializers.functions import DBFunctionSerializer -from db.types.base import get_available_known_db_types -from db.types.install import uninstall_mathesar_from_database -from mathesar.api.serializers.db_types import DBTypeSerializer - - -class ConnectionViewSet(AccessViewSetMixin, viewsets.ModelViewSet): - serializer_class = ConnectionSerializer - pagination_class = DefaultLimitOffsetPagination - filter_backends = (filters.DjangoFilterBackend,) - filterset_class = DatabaseFilter - access_policy = DatabaseAccessPolicy - - def get_queryset(self): - return self.access_policy.scope_queryset( - self.request, - Connection.objects.all().order_by('-created_at') - ) - - def destroy(self, request, pk=None): - db_object = self.get_object() - if request.query_params.get('del_msar_schemas').lower() == 'true': - engine = db_object._sa_engine - uninstall_mathesar_from_database(engine) - db_object.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - @action(methods=['get'], detail=True) - def functions(self, request, pk=None): - database = self.get_object() - engine = database._sa_engine - supported_db_functions = get_supported_db_functions(engine) - serializer = DBFunctionSerializer(supported_db_functions, many=True) - return Response(serializer.data) - - @action(methods=['get'], detail=True) - def types(self, request, pk=None): - database = self.get_object() - engine = database._sa_engine - available_known_db_types = get_available_known_db_types(engine) - serializer = DBTypeSerializer(available_known_db_types, many=True) - return Response(serializer.data) diff --git a/mathesar/api/db/viewsets/links.py b/mathesar/api/db/viewsets/links.py deleted file mode 100644 index aeb4a42757..0000000000 --- a/mathesar/api/db/viewsets/links.py +++ /dev/null @@ -1,16 +0,0 @@ -from rest_framework.mixins import CreateModelMixin -from rest_framework.viewsets import GenericViewSet -from mathesar.api.pagination import DefaultLimitOffsetPagination -from mathesar.api.serializers.links import LinksMappingSerializer - - -class LinkViewSet(CreateModelMixin, GenericViewSet): - serializer_class = LinksMappingSerializer - pagination_class = DefaultLimitOffsetPagination - - def get_queryset(self): - return [] - - def create(self, request, *args, **kwargs): - response = super().create(request, *args, **kwargs) - return response diff --git a/mathesar/api/db/viewsets/queries.py b/mathesar/api/db/viewsets/queries.py deleted file mode 100644 index c9549fd238..0000000000 --- a/mathesar/api/db/viewsets/queries.py +++ /dev/null @@ -1,160 +0,0 @@ -import json -from django_filters import rest_framework as filters -from rest_access_policy import AccessViewSetMixin - -from rest_framework import viewsets -from rest_framework.mixins import ListModelMixin, RetrieveModelMixin, CreateModelMixin, UpdateModelMixin, DestroyModelMixin -from rest_framework.response import Response -from rest_framework.permissions import IsAuthenticatedOrReadOnly -from rest_framework.decorators import action - -from mathesar.api.db.permissions.query import QueryAccessPolicy -from mathesar.api.dj_filters import ExplorationFilter - -from mathesar.api.exceptions.query_exceptions.exceptions import DeletedColumnAccess, DeletedColumnAccessAPIException -from mathesar.api.pagination import DefaultLimitOffsetPagination, TableLimitOffsetPagination -from mathesar.api.serializers.queries import BaseQuerySerializer, QuerySerializer -from mathesar.api.serializers.records import RecordListParameterSerializer -from mathesar.models.query import Exploration - - -class QueryViewSet( - AccessViewSetMixin, - CreateModelMixin, - UpdateModelMixin, - RetrieveModelMixin, - ListModelMixin, - DestroyModelMixin, - viewsets.GenericViewSet -): - serializer_class = QuerySerializer - pagination_class = DefaultLimitOffsetPagination - filter_backends = (filters.DjangoFilterBackend,) - filterset_class = ExplorationFilter - permission_classes = [IsAuthenticatedOrReadOnly] - access_policy = QueryAccessPolicy - - def get_queryset(self): - queryset = self._get_scoped_queryset() - schema_id = self.request.query_params.get('schema') - if schema_id: - queryset = queryset.filter(base_table__schema=schema_id) - return queryset.order_by('-created_at') - - def _get_scoped_queryset(self): - """ - Returns a properly scoped queryset. - - Access to queries may require different access controls, some of which - include scoping while others do not. See - `QueryAccessPolicy.get_should_queryset_be_unscoped` docstring for more - information. - """ - should_queryset_be_scoped = \ - not QueryAccessPolicy.get_should_queryset_be_unscoped(self.action) - if should_queryset_be_scoped: - queryset = self.access_policy.scope_queryset( - self.request, - Exploration.objects.all() - ) - else: - queryset = Exploration.objects.all() - return queryset - - @action(methods=['get'], detail=True) - def records(self, request, pk=None): - paginator = TableLimitOffsetPagination() - query = self.get_object() - serializer = RecordListParameterSerializer(data=request.GET) - serializer.is_valid(raise_exception=True) - records = paginator.paginate_queryset( - queryset=self.get_queryset(), - request=request, - table=query, - filters=serializer.validated_data['filter'], - order_by=serializer.validated_data['order_by'], - grouping=serializer.validated_data['grouping'], - search=serializer.validated_data['search_fuzzy'], - duplicate_only=serializer.validated_data['duplicate_only'], - ) - return paginator.get_paginated_response(records) - - @action(methods=['get'], detail=True) - def columns(self, request, pk=None): - query = self.get_object() - output_col_desc = query.output_columns_described - return Response(output_col_desc) - - @action(methods=['get'], detail=True) - def results(self, request, pk=None): - paginator = TableLimitOffsetPagination() - query = self.get_object() - serializer = RecordListParameterSerializer(data=request.GET) - serializer.is_valid(raise_exception=True) - records = paginator.paginate_queryset( - queryset=self.get_queryset(), - request=request, - table=query, - filters=serializer.validated_data['filter'], - order_by=serializer.validated_data['order_by'], - grouping=serializer.validated_data['grouping'], - search=serializer.validated_data['search_fuzzy'], - duplicate_only=serializer.validated_data['duplicate_only'], - ) - paginated_records = paginator.get_paginated_response(records) - columns = query.output_columns_simple - column_metadata = query.all_columns_description_map - return Response( - { - "records": paginated_records.data, - "output_columns": columns, - "column_metadata": column_metadata, - } - ) - - @action(methods=['post'], detail=False) - def run(self, request): - params = request.data.pop("parameters", {}) - request.GET |= {k: [json.dumps(v)] for k, v in params.items()} - paginator = TableLimitOffsetPagination() - input_serializer = BaseQuerySerializer(data=request.data, context={'request': request}) - input_serializer.is_valid(raise_exception=True) - query = Exploration(**input_serializer.validated_data) - try: - query.replace_transformations_with_processed_transformations() - query.add_defaults_to_display_names() - record_serializer = RecordListParameterSerializer(data=request.GET) - record_serializer.is_valid(raise_exception=True) - output_serializer = BaseQuerySerializer(query) - records = paginator.paginate_queryset( - queryset=self.get_queryset(), - request=request, - table=query, - filters=record_serializer.validated_data['filter'], - order_by=record_serializer.validated_data['order_by'], - grouping=record_serializer.validated_data['grouping'], - search=record_serializer.validated_data['search_fuzzy'], - duplicate_only=record_serializer.validated_data['duplicate_only'], - ) - paginated_records = paginator.get_paginated_response(records) - except DeletedColumnAccess as e: - output_serializer = BaseQuerySerializer(query) - raise DeletedColumnAccessAPIException(e, query=output_serializer.data) - columns = query.output_columns_simple - column_metadata = query.all_columns_description_map - - def _get_param_val(val): - try: - ret_val = json.loads(val) - except json.JSONDecodeError: - ret_val = val - return ret_val - return Response( - { - "query": output_serializer.data, - "records": paginated_records.data, - "output_columns": columns, - "column_metadata": column_metadata, - "parameters": {k: _get_param_val(request.GET[k]) for k in request.GET}, - } - ) diff --git a/mathesar/api/db/viewsets/records.py b/mathesar/api/db/viewsets/records.py deleted file mode 100644 index b28782371f..0000000000 --- a/mathesar/api/db/viewsets/records.py +++ /dev/null @@ -1,252 +0,0 @@ -from psycopg2.errors import ForeignKeyViolation, InvalidDatetimeFormat, DatetimeFieldOverflow -from rest_access_policy import AccessViewSetMixin -from rest_framework import status, viewsets -from rest_framework.exceptions import NotFound, MethodNotAllowed -from rest_framework.renderers import BrowsableAPIRenderer -from rest_framework.response import Response -from rest_framework.permissions import IsAuthenticatedOrReadOnly -from sqlalchemy.exc import IntegrityError, DataError - -from mathesar.api.db.permissions.records import RecordAccessPolicy -from mathesar.api.exceptions.error_codes import ErrorCodes -import mathesar.api.exceptions.database_exceptions.exceptions as database_api_exceptions -import mathesar.api.exceptions.generic_exceptions.base_exceptions as generic_api_exceptions -from db.functions.exceptions import ( - BadDBFunctionFormat, ReferencedColumnsDontExist, UnknownDBFunctionID, -) -from db.records.exceptions import ( - BadGroupFormat, GroupFieldNotFound, InvalidGroupType, UndefinedFunction, - BadSortFormat, SortFieldNotFound -) -from mathesar.api.pagination import TableLimitOffsetPagination -from mathesar.api.serializers.records import RecordListParameterSerializer, RecordSerializer -from mathesar.api.utils import get_table_or_404 -from mathesar.functions.operations.convert import rewrite_db_function_spec_column_ids_to_names -from mathesar.models.deprecated import Table -from mathesar.utils.json import MathesarJSONRenderer - - -class RecordViewSet(AccessViewSetMixin, viewsets.ViewSet): - permission_classes = [IsAuthenticatedOrReadOnly] - access_policy = RecordAccessPolicy - - # There is no 'update' method. - # We're not supporting PUT requests because there aren't a lot of use cases - # where the entire record needs to be replaced, PATCH suffices for updates. - def get_queryset(self): - return Table.objects.all().order_by('-created_at') - - renderer_classes = [MathesarJSONRenderer, BrowsableAPIRenderer] - - # For filter parameter formatting, see: - # db/functions/operations/deserialize.py::get_db_function_from_ma_function_spec function doc> - def list(self, request, table_pk=None): - paginator = TableLimitOffsetPagination() - - serializer = RecordListParameterSerializer(data=request.GET) - serializer.is_valid(raise_exception=True) - table = get_table_or_404(table_pk) - - filter_unprocessed = serializer.validated_data['filter'] - order_by = serializer.validated_data['order_by'] - grouping = serializer.validated_data['grouping'] - search_fuzzy = serializer.validated_data['search_fuzzy'] - filter_processed = None - column_names_to_ids = table.get_column_name_id_bidirectional_map() - column_ids_to_names = column_names_to_ids.inverse - if filter_unprocessed: - filter_processed = rewrite_db_function_spec_column_ids_to_names( - column_ids_to_names=column_ids_to_names, - spec=filter_unprocessed, - ) - # Replace column id value used in the `field` property with column name - name_converted_group_by = None - if grouping: - group_by_columns_names = [column_ids_to_names[column_id] for column_id in grouping['columns']] - name_converted_group_by = {**grouping, 'columns': group_by_columns_names} - name_converted_order_by = [{**column, 'field': column_ids_to_names[column['field']]} for column in order_by] - name_converted_search = [{**column, 'column': column_ids_to_names[column['field']]} for column in search_fuzzy] - - try: - records = paginator.paginate_queryset( - self.get_queryset(), request, table, column_names_to_ids, - filters=filter_processed, - order_by=name_converted_order_by, - grouping=name_converted_group_by, - search=name_converted_search, - duplicate_only=serializer.validated_data['duplicate_only'] - ) - except (BadDBFunctionFormat, UnknownDBFunctionID, ReferencedColumnsDontExist) as e: - raise database_api_exceptions.BadFilterAPIException( - e, - field='filters', - status_code=status.HTTP_400_BAD_REQUEST - ) - except (BadSortFormat, SortFieldNotFound) as e: - raise database_api_exceptions.BadSortAPIException( - e, - field='order_by', - status_code=status.HTTP_400_BAD_REQUEST - ) - except (BadGroupFormat, GroupFieldNotFound, InvalidGroupType) as e: - raise database_api_exceptions.BadGroupAPIException( - e, - field='grouping', - status_code=status.HTTP_400_BAD_REQUEST - ) - except UndefinedFunction as e: - raise database_api_exceptions.UndefinedFunctionAPIException( - e, - details=e.args[0], - status_code=status.HTTP_400_BAD_REQUEST - ) - except DataError as e: - if isinstance(e.orig, InvalidDatetimeFormat): - raise database_api_exceptions.InvalidDateFormatAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST, - ) - elif isinstance(e.orig, DatetimeFieldOverflow): - raise database_api_exceptions.InvalidDateAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST, - ) - else: - raise database_api_exceptions.MathesarAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - - serializer = RecordSerializer( - records, - many=True, - context=self.get_serializer_context(table) - ) - return paginator.get_paginated_response(serializer.data) - - def retrieve(self, request, pk=None, table_pk=None): - table = get_table_or_404(table_pk) - # TODO refactor to use serializer for more DRY response logic - paginator = TableLimitOffsetPagination() - record_filters = { - "equal": [ - {"column_name": [table.primary_key_column_name]}, - {"literal": [pk]} - ] - } - column_names_to_ids = table.get_column_name_id_bidirectional_map() - records = paginator.paginate_queryset( - table, - request, - table, - column_names_to_ids, - filters=record_filters - ) - if not records: - raise NotFound - serializer = RecordSerializer( - records, - many=True, - context=self.get_serializer_context(table) - ) - return paginator.get_paginated_response(serializer.data) - - def create(self, request, table_pk=None): - table = get_table_or_404(table_pk) - primary_key_column_name = None - try: - primary_key_column_name = table.primary_key_column_name - except AssertionError: - raise generic_api_exceptions.MethodNotAllowedAPIException( - MethodNotAllowed, - error_code=ErrorCodes.MethodNotAllowed.value, - message="You cannot insert into tables without a primary key" - ) - serializer = RecordSerializer(data=request.data, context=self.get_serializer_context(table)) - serializer.is_valid(raise_exception=True) - serializer.save() - # TODO refactor to use serializer for more DRY response logic - column_name_id_map = table.get_column_name_id_bidirectional_map() - table_pk_column_id = column_name_id_map[primary_key_column_name] - pk_value = serializer.data[table_pk_column_id] - paginator = TableLimitOffsetPagination() - record_filters = { - "equal": [ - {"column_name": [primary_key_column_name]}, - {"literal": [pk_value]} - ] - } - column_names_to_ids = table.get_column_name_id_bidirectional_map() - records = paginator.paginate_queryset( - table, - request, - table, - column_names_to_ids, - filters=record_filters - ) - serializer = RecordSerializer( - records, - many=True, - context=self.get_serializer_context(table) - ) - response = paginator.get_paginated_response(serializer.data) - response.status_code = status.HTTP_201_CREATED - return response - - def partial_update(self, request, pk=None, table_pk=None): - table = get_table_or_404(table_pk) - serializer = RecordSerializer( - {'id': pk}, - data=request.data, - context=self.get_serializer_context(table), - partial=True - ) - serializer.is_valid(raise_exception=True) - serializer.save() - # TODO refactor to use serializer for more DRY response logic - paginator = TableLimitOffsetPagination() - record_filters = { - "equal": [ - {"column_name": [table.primary_key_column_name]}, - {"literal": [pk]} - ] - } - column_names_to_ids = table.get_column_name_id_bidirectional_map() - records = paginator.paginate_queryset( - table, - request, - table, - column_names_to_ids, - filters=record_filters - ) - serializer = RecordSerializer( - records, - many=True, - context=self.get_serializer_context(table) - ) - return paginator.get_paginated_response(serializer.data) - - def destroy(self, request, pk=None, table_pk=None): - table = get_table_or_404(table_pk) - if table.get_record(pk) is None: - raise generic_api_exceptions.NotFoundAPIException( - NotFound, - error_code=ErrorCodes.RecordNotFound.value, - message="Record doesn't exist" - ) - try: - table.delete_record(pk) - except IntegrityError as e: - if isinstance(e.orig, ForeignKeyViolation): - raise database_api_exceptions.ForeignKeyViolationAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST, - referent_table=table, - ) - - return Response(status=status.HTTP_204_NO_CONTENT) - - def get_serializer_context(self, table): - columns_map = table.get_column_name_id_bidirectional_map() - context = {'columns_map': columns_map, 'table': table} - return context diff --git a/mathesar/api/db/viewsets/schemas.py b/mathesar/api/db/viewsets/schemas.py deleted file mode 100644 index 153c081198..0000000000 --- a/mathesar/api/db/viewsets/schemas.py +++ /dev/null @@ -1,77 +0,0 @@ -from django_filters import rest_framework as filters -from rest_access_policy import AccessViewSetMixin -from rest_framework import status, viewsets -from rest_framework.decorators import action -from rest_framework.mixins import ListModelMixin, RetrieveModelMixin -from rest_framework.response import Response - -from mathesar.api.db.permissions.schema import SchemaAccessPolicy -from mathesar.api.dj_filters import SchemaFilter -from mathesar.api.pagination import DefaultLimitOffsetPagination -from mathesar.api.serializers.dependents import DependentSerializer, DependentFilterSerializer -from mathesar.api.serializers.schemas import SchemaSerializer -from mathesar.models.deprecated import Schema -from mathesar.utils.schemas import create_schema_and_object -from mathesar.api.exceptions.validation_exceptions.exceptions import EditingPublicSchemaIsDisallowed - - -class SchemaViewSet(AccessViewSetMixin, viewsets.GenericViewSet, ListModelMixin, RetrieveModelMixin): - serializer_class = SchemaSerializer - pagination_class = DefaultLimitOffsetPagination - filter_backends = (filters.DjangoFilterBackend,) - filterset_class = SchemaFilter - access_policy = SchemaAccessPolicy - - def get_queryset(self): - qs = Schema.objects.all().order_by('-created_at') - connection_id = self.request.query_params.get('connection_id') - if connection_id: - qs = qs.filter(database=connection_id) - return self.access_policy.scope_viewset_queryset(self.request, qs) - - def create(self, request): - serializer = SchemaSerializer(data=request.data, context={'request': request}) - serializer.is_valid(raise_exception=True) - connection_id = serializer.validated_data['database'].id - schema = create_schema_and_object( - serializer.validated_data['name'], - connection_id, - comment=serializer.validated_data.get('description') - ) - serializer = SchemaSerializer(schema, context={'request': request}) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - def partial_update(self, request, pk=None): - serializer = SchemaSerializer( - data=request.data, context={'request': request}, partial=True - ) - serializer.is_valid(raise_exception=True) - - schema = self.get_object() - - # We forbid editing the public schema - if schema.name == "public": - raise EditingPublicSchemaIsDisallowed() - - schema.update_sa_schema(serializer.validated_data) - - # Reload the schema to avoid cached properties - schema = self.get_object() - schema.clear_name_cache() - serializer = SchemaSerializer(schema, context={'request': request}) - return Response(serializer.data) - - def destroy(self, request, pk=None): - schema = self.get_object() - schema.delete_sa_schema() - return Response(status=status.HTTP_204_NO_CONTENT) - - @action(methods=['get'], detail=True) - def dependents(self, request, pk=None): - serializer = DependentFilterSerializer(data=request.GET) - serializer.is_valid(raise_exception=True) - types_exclude = serializer.validated_data['exclude'] - - schema = self.get_object() - serializer = DependentSerializer(schema.get_dependents(types_exclude), many=True, context={'request': request}) - return Response(serializer.data) diff --git a/mathesar/api/db/viewsets/table_settings.py b/mathesar/api/db/viewsets/table_settings.py deleted file mode 100644 index 62690e0ad0..0000000000 --- a/mathesar/api/db/viewsets/table_settings.py +++ /dev/null @@ -1,26 +0,0 @@ -from rest_access_policy import AccessViewSetMixin -from rest_framework.viewsets import ModelViewSet - -from mathesar.api.db.permissions.table_settings import TableSettingAccessPolicy -from mathesar.api.pagination import DefaultLimitOffsetPagination -from mathesar.api.serializers.table_settings import TableSettingsSerializer -from mathesar.api.utils import get_table_or_404 -from mathesar.models.deprecated import TableSettings - - -class TableSettingsViewSet(AccessViewSetMixin, ModelViewSet): - serializer_class = TableSettingsSerializer - pagination_class = DefaultLimitOffsetPagination - access_policy = TableSettingAccessPolicy - - def get_queryset(self): - return self.access_policy.scope_queryset( - self.request, - TableSettings.objects.filter(table=self.kwargs['table_pk']) - ) - - def get_serializer_context(self): - context = super().get_serializer_context() - context['table'] = get_table_or_404(self.kwargs['table_pk']) - - return context diff --git a/mathesar/api/db/viewsets/tables.py b/mathesar/api/db/viewsets/tables.py deleted file mode 100644 index 6bb4a533f6..0000000000 --- a/mathesar/api/db/viewsets/tables.py +++ /dev/null @@ -1,275 +0,0 @@ -import json -from django_filters import rest_framework as filters -from psycopg2.errors import CheckViolation, InvalidTextRepresentation -from rest_access_policy import AccessViewSetMixin -from rest_framework import status, viewsets -from rest_framework.decorators import action -from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin -from rest_framework.response import Response -from rest_framework.permissions import IsAuthenticatedOrReadOnly -from sqlalchemy.exc import DataError, IntegrityError, ProgrammingError - -from db.types.exceptions import UnsupportedTypeException -from db.columns.exceptions import NotNullError, ForeignKeyError, TypeMismatchError, UniqueValueError, ExclusionError, ColumnMappingsNotFound -from mathesar.api.db.permissions.table import TableAccessPolicy -from mathesar.api.serializers.dependents import DependentFilterSerializer, DependentSerializer -from mathesar.api.utils import get_table_or_404 -from mathesar.api.dj_filters import TableFilter -from mathesar.api.exceptions.database_exceptions import ( - base_exceptions as database_base_api_exceptions, - exceptions as database_api_exceptions, -) -from mathesar.api.pagination import DefaultLimitOffsetPagination -from mathesar.api.serializers.tables import ( - SplitTableRequestSerializer, - SplitTableResponseSerializer, - TablePreviewSerializer, - TableSerializer, - TableImportSerializer, - MoveTableRequestSerializer -) -from mathesar.models.deprecated import Table -from mathesar.utils.tables import get_table_column_types -from mathesar.utils.joins import get_processed_joinable_tables - - -class TableViewSet(AccessViewSetMixin, CreateModelMixin, RetrieveModelMixin, ListModelMixin, viewsets.GenericViewSet): - serializer_class = TableSerializer - pagination_class = DefaultLimitOffsetPagination - filter_backends = (filters.DjangoFilterBackend,) - filterset_class = TableFilter - permission_classes = [IsAuthenticatedOrReadOnly] - access_policy = TableAccessPolicy - - def get_queryset(self): - # Better to use prefetch_related for schema and database, - # because select_related would lead to duplicate object instances and could result in multiple engines instances - # We prefetch `columns` using Django prefetch_related to get list of column objects and - # then prefetch column properties like `column name` using prefetch library. - return self.access_policy.scope_viewset_queryset(self.request, Table.objects.prefetch_related('schema', 'schema__database', 'columns').prefetch('_sa_table', 'columns').order_by('-created_at')) - - def partial_update(self, request, pk=None): - table = self.get_object() - serializer = TableSerializer( - table, data=request.data, context={'request': request}, partial=True - ) - serializer.is_valid(raise_exception=True) - serializer.save() - - # Reload the table to avoid cached properties - table = self.get_object() - serializer = TableSerializer(table, context={'request': request}) - return Response(serializer.data) - - def destroy(self, request, pk=None): - table = self.get_object() - table.delete_sa_table() - return Response(status=status.HTTP_204_NO_CONTENT) - - @action(methods=['get'], detail=True) - def dependents(self, request, pk=None): - serializer = DependentFilterSerializer(data=request.GET) - serializer.is_valid(raise_exception=True) - types_exclude = serializer.validated_data['exclude'] - - table = self.get_object() - serializer = DependentSerializer(table.get_dependents(types_exclude), many=True, context={'request': request}) - return Response(serializer.data) - - @action(methods=['get'], detail=True) - def ui_dependents(self, request, pk=None): - table = self.get_object() - ui_dependents = table.get_ui_dependents() - return Response(ui_dependents) - - @action(methods=['get'], detail=True) - def joinable_tables(self, request, pk=None): - table = self.get_object() - limit = request.query_params.get('limit') - offset = request.query_params.get('offset') - max_depth = request.query_params.get('max_depth', 3) - processed_joinable_tables = get_processed_joinable_tables( - table, limit=limit, offset=offset, max_depth=max_depth - ) - return Response(processed_joinable_tables) - - @action(methods=['get'], detail=True) - def type_suggestions(self, request, pk=None): - table = self.get_object() - columns_might_have_defaults = _get_boolean_query_param( - request, 'columns_might_have_defaults', True - ) - col_types = get_table_column_types( - table, - columns_might_have_defaults=columns_might_have_defaults, - ) - return Response(col_types) - - @action(methods=['post'], detail=True) - def split_table(self, request, pk=None): - table = self.get_object() - column_names_id_map = table.get_column_name_id_bidirectional_map() - serializer = SplitTableRequestSerializer(data=request.data, context={"request": request, 'table': table}) - if serializer.is_valid(raise_exception=True): - # We need to get the column names before splitting the table, - # as they are the only reference to the new column after it is moved to a new table - columns_to_extract = serializer.validated_data['extract_columns'] - extracted_table_name = serializer.validated_data['extracted_table_name'] - relationship_fk_column_name = serializer.validated_data['relationship_fk_column_name'] - extracted_table, remainder_table, remainder_fk_column = table.split_table( - columns_to_extract=columns_to_extract, - extracted_table_name=extracted_table_name, - column_names_id_map=column_names_id_map, - relationship_fk_column_name=relationship_fk_column_name - ) - split_table_response = { - 'extracted_table': extracted_table.id, - 'remainder_table': remainder_table.id, - 'fk_column': remainder_fk_column.id - } - response_serializer = SplitTableResponseSerializer(data=split_table_response) - response_serializer.is_valid(raise_exception=True) - return Response(response_serializer.data, status=status.HTTP_201_CREATED) - - @action(methods=['post'], detail=True) - def move_columns(self, request, pk=None): - table = self.get_object() - serializer = MoveTableRequestSerializer(data=request.data, context={"request": request, 'table': table}) - if serializer.is_valid(raise_exception=True): - target_table = serializer.validated_data['target_table'] - move_columns = serializer.validated_data['move_columns'] - table.move_columns( - columns_to_move=move_columns, - target_table=target_table, - ) - return Response(status=status.HTTP_201_CREATED) - - @action(methods=['post'], detail=True) - def previews(self, request, pk=None): - table = self.get_object() - serializer = TablePreviewSerializer(data=request.data, context={"request": request, 'table': table}) - serializer.is_valid(raise_exception=True) - columns_field_key = "columns" - columns = serializer.data[columns_field_key] - table_data = TableSerializer(table, context={"request": request}).data - try: - preview_records = table.get_preview(columns) - except (DataError, IntegrityError) as e: - if type(e.orig) is InvalidTextRepresentation or type(e.orig) is CheckViolation: - raise database_api_exceptions.InvalidTypeCastAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST, - field='columns' - ) - else: - raise database_base_api_exceptions.IntegrityAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST, - field='columns' - ) - except UnsupportedTypeException as e: - raise database_api_exceptions.UnsupportedTypeAPIException( - e, - field='columns', - status_code=status.HTTP_400_BAD_REQUEST - ) - table_data.update( - { - # There's no way to reflect actual column data without - # creating a view, so we just use the submission, assuming - # no errors means we changed to the desired names and types - "columns": columns, - "records": preview_records - } - ) - - return Response(table_data) - - @action(methods=['post'], detail=True) - def existing_import(self, request, pk=None): - temp_table = self.get_object() - serializer = TableImportSerializer(data=request.data, context={"request": request}) - serializer.is_valid(raise_exception=True) - target_table = serializer.validated_data['import_target'] - data_files = serializer.validated_data['data_files'] - mappings = serializer.validated_data['mappings'] - - try: - temp_table.insert_records_to_existing_table( - target_table, data_files, mappings - ) - except NotNullError as e: - raise database_api_exceptions.NotNullViolationAPIException( - e, - message='Null values cannot be inserted into this column', - status_code=status.HTTP_400_BAD_REQUEST - ) - except ForeignKeyError as e: - raise database_api_exceptions.ForeignKeyViolationAPIException( - e, - message='Cannot add an invalid reference to a record', - status_code=status.HTTP_400_BAD_REQUEST - ) - except TypeMismatchError as e: - raise database_api_exceptions.TypeMismatchViolationAPIException( - e, - message='Type mismatch error', - status_code=status.HTTP_400_BAD_REQUEST - ) - except UniqueValueError as e: - raise database_api_exceptions.UniqueImportViolationAPIException( - e, - message='This column has uniqueness constraint set so non-unique values cannot be inserted', - status_code=status.HTTP_400_BAD_REQUEST - ) - except ExclusionError as e: - raise database_api_exceptions.ExclusionViolationAPIException( - e, - message='This record violates exclusion constraint', - status_code=status.HTTP_400_BAD_REQUEST - ) - except IntegrityError as e: - raise database_base_api_exceptions.IntegrityAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - except ProgrammingError as e: - raise database_base_api_exceptions.ProgrammingAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - # Reload the table to avoid cached properties - existing_table = get_table_or_404(target_table.id) - serializer = TableSerializer( - existing_table, context={'request': request} - ) - table_data = serializer.data - return Response(table_data) - - @action(methods=['post'], detail=True) - def map_imported_columns(self, request, pk=None): - temp_table = self.get_object() - target_table_id = request.data.get('import_target', None) - target_table = get_table_or_404(target_table_id) - try: - mappings = temp_table.suggest_col_mappings_for_import(target_table) - return Response({"mappings": mappings}) - except ColumnMappingsNotFound as e: - raise database_api_exceptions.ColumnMappingsNotFound( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - - -def _get_boolean_query_param(request, name, default): - """ - Deserializes a query parameter from JSON into a boolean. A default is provided in case the - parameter is undefined. - """ - value = request.query_params.get(name) - if value is not None: - value = json.loads(value) - value = bool(value) - return value - else: - return default diff --git a/mathesar/api/dj_filters.py b/mathesar/api/dj_filters.py index 70d2cddb4b..1a0f670763 100644 --- a/mathesar/api/dj_filters.py +++ b/mathesar/api/dj_filters.py @@ -1,28 +1,15 @@ -from django_filters import BooleanFilter, DateTimeFromToRangeFilter, OrderingFilter -from django_property_filter import PropertyFilterSet, PropertyBaseInFilter, PropertyCharFilter, PropertyOrderingFilter +from django_property_filter import ( + PropertyFilterSet, PropertyBaseInFilter, PropertyCharFilter, + PropertyOrderingFilter +) -from mathesar.models.deprecated import Schema, Table, Connection, DataFile -from mathesar.models.query import Exploration +from mathesar.models.deprecated import DataFile class CharInFilter(PropertyBaseInFilter, PropertyCharFilter): pass -class DatabaseFilter(PropertyFilterSet): - sort_by = OrderingFilter( - fields=( - ('id', 'id'), - ('name', 'name'), - ), - label="Sort By", - ) - - class Meta: - model = Connection - fields = ['deleted'] - - class DataFileFilter(PropertyFilterSet): database = CharInFilter(field_name='table_imported_to__schema__database__name', lookup_expr='in') name = CharInFilter(field_name='name', lookup_expr='in') @@ -38,57 +25,3 @@ class DataFileFilter(PropertyFilterSet): class Meta: model = DataFile fields = ['name'] - - -class SchemaFilter(PropertyFilterSet): - database = CharInFilter(field_name='database__name', lookup_expr='in') - name = CharInFilter(field_name='name', lookup_expr='in') - - sort_by = PropertyOrderingFilter( - fields=( - ('id', 'id'), - ('name', 'name'), - ), - label="Sort By", - ) - - class Meta: - model = Schema - fields = ['name'] - - -class TableFilter(PropertyFilterSet): - database = CharInFilter(field_name='schema__database__name', lookup_expr='in') - name = CharInFilter(field_name='name', lookup_expr='in') - created = DateTimeFromToRangeFilter(field_name='created_at') - updated = DateTimeFromToRangeFilter(field_name='updated_at') - not_imported = BooleanFilter(lookup_expr="isnull", field_name='import_verified') - - sort_by = PropertyOrderingFilter( - fields=( - ('id', 'id'), - ('name', 'name'), - ), - label="Sort By", - ) - - class Meta: - model = Table - fields = ['name', 'schema', 'created_at', 'updated_at', 'import_verified'] - - -class ExplorationFilter(PropertyFilterSet): - database = CharInFilter(field_name='base_table__schema__database__name', lookup_expr='in') - name = CharInFilter(field_name='name', lookup_expr='in') - - sort_by = PropertyOrderingFilter( - fields=( - ('id', 'id'), - ('name', 'name'), - ), - label="Sort By", - ) - - class Meta: - model = Exploration - fields = ['name'] diff --git a/mathesar/api/exceptions/data_import_exceptions/base_exceptions.py b/mathesar/api/exceptions/data_import_exceptions/base_exceptions.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/mathesar/api/exceptions/database_exceptions/exceptions.py b/mathesar/api/exceptions/database_exceptions/exceptions.py index 07e2039cf6..ed176fa07a 100644 --- a/mathesar/api/exceptions/database_exceptions/exceptions.py +++ b/mathesar/api/exceptions/database_exceptions/exceptions.py @@ -1,20 +1,15 @@ -import warnings from rest_framework import status -from db.columns.operations.select import get_column_attnum_from_name from db.constraints.operations.select import ( get_constraint_oid_by_name_and_table_oid, - get_fkey_constraint_oid_by_name_and_referent_table_oid, ) from db.columns.exceptions import InvalidTypeError -from mathesar.api.exceptions.database_exceptions.base_exceptions import ProgrammingAPIException from mathesar.api.exceptions.error_codes import ErrorCodes from mathesar.api.exceptions.generic_exceptions.base_exceptions import ( MathesarAPIException, get_default_exception_detail, ) -from mathesar.models.deprecated import Column, Constraint -from mathesar.state import get_cached_metadata +from mathesar.models.deprecated import Constraint class UniqueViolationAPIException(MathesarAPIException): @@ -65,83 +60,6 @@ def __init__( self.status_code = status_code -class CheckViolationAPIException(MathesarAPIException): - error_code = ErrorCodes.CheckViolation.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - if details is None and exception.orig.diag.constraint_name == 'email_check': - # TODO find a way to identify which column is actually causing the email violation - message = exception.orig.diag.message_primary - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class DuplicateTableAPIException(ProgrammingAPIException): - # Default message is not needed as the exception string provides enough details - error_code = ErrorCodes.DuplicateTableError.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class DuplicateColumnAPIException(ProgrammingAPIException): - # Default message is not needed as the exception string provides enough details - error_code = ErrorCodes.DuplicateColumnError.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class InvalidDefaultAPIException(MathesarAPIException): - # Default message is not needed as the exception string provides enough details - error_code = ErrorCodes.InvalidDefault.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class InvalidTypeOptionAPIException(MathesarAPIException): - # Default message is not needed as the exception string provides enough details - error_code = ErrorCodes.InvalidTypeOption.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - class InvalidTypeCastAPIException(MathesarAPIException): # Default message is not needed as the exception string provides enough details error_code = ErrorCodes.InvalidTypeCast.value @@ -163,41 +81,6 @@ def err_msg(exception): return 'Invalid type cast requested.' -class DynamicDefaultAPIException(MathesarAPIException): - # Default message is not needed as the exception string provides enough details - error_code = ErrorCodes.UndefinedFunction.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class StaticDefaultAssignmentToDynamicDefaultException(MathesarAPIException): - error_code = ErrorCodes.DynamicDefaultAlterationToStaticDefault.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, self.err_msg(exception, message), field, details, status_code) - - @staticmethod - def err_msg(exception, message): - if type(exception) is DynamicDefaultModificationError and exception.column: - return f'Dynamic Default of {exception.column.name} column can not be altered.' - return message - - class UnsupportedTypeAPIException(MathesarAPIException): # Default message is not needed as the exception string provides enough details error_code = ErrorCodes.UnsupportedType.value @@ -213,250 +96,6 @@ def __init__( super().__init__(exception, self.error_code, message, field, details, status_code) -class BadFilterAPIException(MathesarAPIException): - error_code = ErrorCodes.UnsupportedType.value - - def __init__( - self, - exception, - message="Filter arguments are not correct", - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class BadSortAPIException(MathesarAPIException): - # Default message is not needed as the exception string provides enough details - error_code = ErrorCodes.UnsupportedType.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class BadGroupAPIException(MathesarAPIException): - # Default message is not needed as the exception string provides enough details - error_code = ErrorCodes.UnsupportedType.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class RaiseExceptionAPIException(MathesarAPIException): - """ - Exception raised inside a postgres function - """ - error_code = ErrorCodes.RaiseException.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class UndefinedFunctionAPIException(MathesarAPIException): - # Default message is not needed as the exception string provides enough details - error_code = ErrorCodes.UndefinedFunction.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class NotNullViolationAPIException(MathesarAPIException): - """ - Exception raised when trying to: - - - Add not null constraint to column with null value - or when trying to add non-null value to a column with not null constraint - - or - - - Import/insert a null value to a column with not null constraint - """ - error_code = ErrorCodes.NotNullViolation.value - - def __init__( - self, exception, - message=None, - field=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - table=None - ): - try: - exception_diagnostics = exception.orig.diag - except Exception: - exception_diagnostics = exception.diag - message_str = message if message is not None else exception_diagnostics.message_primary - column_attnum = get_column_attnum_from_name( - table.oid, - exception_diagnostics.column_name, - table.schema._sa_engine, - metadata=get_cached_metadata(), - ) - column = Column.objects.get(attnum=column_attnum, table=table) - details = { - 'record_detail': exception_diagnostics.message_detail, - 'column_id': column.id - } - super().__init__(exception, self.error_code, message_str, field, details, status_code) - - -class TypeMismatchViolationAPIException(MathesarAPIException): - """ Exception raised when trying to insert a non castable datatype value to a column with certain datatype""" - error_code = ErrorCodes.TypeMismatchViolation.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class ForeignKeyViolationAPIException(MathesarAPIException): - """ Exception raised when trying to add an invalid reference to a primary key """ - error_code = ErrorCodes.ForeignKeyViolation.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - referent_table=None, - ): - try: - diagnostics = exception.orig.diag - message = diagnostics.message_detail if message is None else message - details = {} if details is None else details - constraint_oid = get_fkey_constraint_oid_by_name_and_referent_table_oid( - diagnostics.constraint_name, - referent_table.oid, - referent_table._sa_engine, - ) - constraint = Constraint.objects.get(table=referent_table, oid=constraint_oid) - details.update({ - "constraint": constraint.id, - "constraint_columns": [c.id for c in constraint.columns], - "constraint_referent_columns": [c.id for c in constraint.referent_columns], - "constraint_referent_table": constraint.referent_columns[0].table.id, - }) - except Exception: - warnings.warn("Could not enrich Exception") - super().__init__( - exception, self.error_code, message, field, details, status_code - ) - - -class UniqueImportViolationAPIException(MathesarAPIException): - """ Exception raised when trying to add duplicate values to a column with uniqueness constraint """ - error_code = ErrorCodes.UniqueImportViolation.value - - def __init__( - self, - exception, - message=None, - field=None, - details=None, - status_code=status.HTTP_400_BAD_REQUEST - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class ExclusionViolationAPIException(MathesarAPIException): - error_code = ErrorCodes.ExclusionViolation.value - - def __init__( - self, - exception, - message="The requested update violates an exclusion constraint", - field=None, - details=None, - table=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - if details is None and table is not None: - details = {} - try: - constraint_oid = get_constraint_oid_by_name_and_table_oid( - exception.orig.diag.constraint_name, - table.oid, - table._sa_engine - ) - constraint = Constraint.objects.get(table=table, oid=constraint_oid) - details = { - "constraint": constraint.id, - "constraint_columns": [c.id for c in constraint.columns], - } - except Exception: - warnings.warn("Could not enrich Exception") - details.update( - { - "original_details": exception.orig.diag.message_detail, - } - ) - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class InvalidDateAPIException(MathesarAPIException): - error_code = ErrorCodes.InvalidDateError.value - - def __init__( - self, - exception, - message="Invalid date", - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - -class InvalidDateFormatAPIException(MathesarAPIException): - error_code = ErrorCodes.InvalidDateFormatError.value - - def __init__( - self, - exception, - message="Invalid date format", - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, self.error_code, message, field, details, status_code) - - class ColumnMappingsNotFound(MathesarAPIException): error_code = ErrorCodes.MappingsNotFound.value @@ -471,22 +110,6 @@ def __init__( super().__init__(exception, self.error_code, message, field, details, status_code) -class IdentifierTooLong(MathesarAPIException): - error_code = ErrorCodes.IdentifierTooLong.value - - def __init__( - self, - exception=None, - message="Identifier is longer than Postgres' limit of 63 bytes.", - field=None, - details=None, - status_code=status.HTTP_400_BAD_REQUEST - ): - if exception is None: - exception = Exception(message) - super().__init__(exception, self.error_code, message, field, details, status_code) - - class InvalidJSONFormat(MathesarAPIException): error_code = ErrorCodes.InvalidJSONFormat.value @@ -533,8 +156,3 @@ def __init__( if exception is None: exception = Exception(message) super().__init__(exception, self.error_code, message, field, details, status_code) - - -class DynamicDefaultModificationError(Exception): - def __init__(self, column=None): - self.column = column diff --git a/mathesar/api/exceptions/error_codes.py b/mathesar/api/exceptions/error_codes.py index d4abf2b1ea..90dd217266 100644 --- a/mathesar/api/exceptions/error_codes.py +++ b/mathesar/api/exceptions/error_codes.py @@ -40,22 +40,16 @@ class ErrorCodes(Enum): UniqueImportViolation = 4303 # Validation Error - BadDBCredentials = 4428 - ColumnSizeMismatch = 4401 DistinctColumnNameRequired = 4402 MappingsNotFound = 4417 - MultipleDataFiles = 4400 MoneyDisplayOptionConflict = 4407 UnsupportedAlter = 4403 URLDownloadError = 4404 URLNotReachableError = 4405 URLInvalidContentType = 4406 UnknownDBType = 4408 - InvalidColumnOrder = 4430 InvalidDateError = 4413 InvalidDateFormatError = 4414 - InvalidLinkChoice = 4409 - InvalidTableName = 4420 IncompatibleFractionDigitValues = 4410 UnsupportedConstraint = 4411 ConstraintColumnEmpty = 4412 @@ -64,10 +58,7 @@ class ErrorCodes(Enum): DeletedColumnAccess = 4418 IncorrectOldPassword = 4419 EditingPublicSchema = 4421 - DuplicateExplorationInSchema = 4422 - IdentifierTooLong = 4423 DynamicDefaultAlterationToStaticDefault = 4424 InvalidJSONFormat = 4425 UnsupportedJSONFormat = 4426 UnsupportedFileFormat = 4427 - UnsupportedInstallationDatabase = 4429 diff --git a/mathesar/api/exceptions/exception_converters.py b/mathesar/api/exceptions/exception_converters.py deleted file mode 100644 index b6d4a3795a..0000000000 --- a/mathesar/api/exceptions/exception_converters.py +++ /dev/null @@ -1,25 +0,0 @@ -from django.utils.encoding import force_str -from rest_framework_friendly_errors.settings import FRIENDLY_EXCEPTION_DICT - -from mathesar.api.exceptions.error_codes import ErrorCodes - - -def default_api_exception_converter(exc, response): - if isinstance(response.data, list): - converted_data = [] - for data in response.data: - new_data = { - 'code': ErrorCodes.NonClassifiedError.value, - 'message': data['message'] if 'message' in data else force_str(exc), - 'details': data.get('detail', {}) - } - converted_data.append(new_data) - return converted_data - else: - error_code = FRIENDLY_EXCEPTION_DICT.get(exc.__class__.__name__) - error_data = { - 'code': error_code, - 'message': force_str(exc), - 'details': response.data.pop('detail', {}) - } - return [error_data] diff --git a/mathesar/api/exceptions/exceptions.py b/mathesar/api/exceptions/exceptions.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/mathesar/api/exceptions/generic_exceptions/base_exceptions.py b/mathesar/api/exceptions/generic_exceptions/base_exceptions.py index 3308f8a171..cb29b409aa 100644 --- a/mathesar/api/exceptions/generic_exceptions/base_exceptions.py +++ b/mathesar/api/exceptions/generic_exceptions/base_exceptions.py @@ -55,94 +55,3 @@ class GenericAPIException(MathesarAPIException): def __init__(self, error_body_list, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR): self.detail = [error_body._asdict() for error_body in error_body_list] self.status_code = status_code - - -class TypeErrorAPIException(MathesarAPIException): - # Default message is not needed as the exception string provides enough details - - def __init__( - self, - exception, - error_code=ErrorCodes.TypeError.value, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, error_code, message, field, details, status_code) - - -class NotFoundAPIException(MathesarAPIException): - - def __init__( - self, - exception, - error_code=ErrorCodes.NotFound.value, - message=None, - field=None, - details=None, - status_code=status.HTTP_404_NOT_FOUND - ): - exception_detail = get_default_exception_detail(exception, error_code, message, field, details)._asdict() - self.detail = [exception_detail] - self.status_code = status_code - - -class MethodNotAllowedAPIException(MathesarAPIException): - - def __init__( - self, - exception, - error_code=ErrorCodes.MethodNotAllowed.value, - message=None, - field=None, - details=None, - status_code=status.HTTP_405_METHOD_NOT_ALLOWED - ): - exception_detail = get_default_exception_detail(exception, error_code, message, field, details)._asdict() - self.detail = [exception_detail] - self.status_code = status_code - - -class ValueAPIException(MathesarAPIException): - # Default message is not needed as the exception string provides enough details - - def __init__( - self, - exception, - error_code=ErrorCodes.ValueError.value, - message=None, - field=None, - details=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - super().__init__(exception, error_code, message, field, details, status_code) - - -class NetworkException(MathesarAPIException): - # Default message is not needed as the exception string provides enough details - - def __init__( - self, - exception, - error_code=ErrorCodes.NetworkError.value, - message=None, - field=None, - details=None, - status_code=status.HTTP_503_SERVICE_UNAVAILABLE - ): - super().__init__(exception, error_code, message, field, details, status_code) - - -class BadDBCredentials(MathesarAPIException): - error_code = ErrorCodes.BadDBCredentials.value - - def __init__( - self, - exception, - field=None, - detail=None, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR - ): - message = f"Bad credentials for connecting to the requested database. The reported error is {exception.args[0]}" - super().__init__(exception, self.error_code, message, field, detail, status_code) diff --git a/mathesar/api/exceptions/generic_exceptions/exceptions.py b/mathesar/api/exceptions/generic_exceptions/exceptions.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/mathesar/api/exceptions/query_exceptions/exceptions.py b/mathesar/api/exceptions/query_exceptions/exceptions.py index e929df91c9..9b24583d16 100644 --- a/mathesar/api/exceptions/query_exceptions/exceptions.py +++ b/mathesar/api/exceptions/query_exceptions/exceptions.py @@ -1,34 +1,6 @@ -from rest_framework import status - -from mathesar.api.exceptions.error_codes import ErrorCodes -from mathesar.api.exceptions.generic_exceptions.base_exceptions import MathesarAPIException - - class DeletedColumnAccess(Exception): def __init__( self, column_id ): self.column_id = column_id - - -class DeletedColumnAccessAPIException(MathesarAPIException): - """ - - """ - error_code = ErrorCodes.DeletedColumnAccess.value - - def __init__( - self, - exception, - query, - message="Query contains an deleted column", - field=None, - status_code=status.HTTP_400_BAD_REQUEST, - ): - - details = { - 'query': query, - 'column_id': exception.column_id - } - super().__init__(exception, self.error_code, message, field, details, status_code) diff --git a/mathesar/api/exceptions/validation_exceptions/exceptions.py b/mathesar/api/exceptions/validation_exceptions/exceptions.py index 9187c717aa..4a4a54397a 100644 --- a/mathesar/api/exceptions/validation_exceptions/exceptions.py +++ b/mathesar/api/exceptions/validation_exceptions/exceptions.py @@ -2,91 +2,6 @@ from mathesar.api.exceptions.validation_exceptions.base_exceptions import MathesarValidationException -class DuplicateExplorationInSchemaAPIException(MathesarValidationException): - error_code = ErrorCodes.DuplicateExplorationInSchema.value - - def __init__( - self, - message="Exploration names must be unique per schema", - field=None, - details=None, - ): - super().__init__(None, self.error_code, message, field, details) - - -class DistinctColumnRequiredAPIException(MathesarValidationException): - error_code = ErrorCodes.DistinctColumnNameRequired.value - - def __init__( - self, - message="Column names must be distinct", - field=None, - details=None, - ): - super().__init__(None, self.error_code, message, field, details) - - -class ColumnSizeMismatchAPIException(MathesarValidationException): - error_code = ErrorCodes.ColumnSizeMismatch.value - - def __init__( - self, - message="Incorrect number of columns in request.", - field=None, - details=None, - ): - super().__init__(None, self.error_code, message, field, details) - - -class InvalidLinkChoiceAPIException(MathesarValidationException): - error_code = ErrorCodes.InvalidLinkChoice.value - - def __init__( - self, - message="Invalid Link type", - field=None, - details=None, - ): - super().__init__(None, self.error_code, message, field, details) - - -class MultipleDataFileAPIException(MathesarValidationException): - error_code = ErrorCodes.MultipleDataFiles.value - - def __init__( - self, - message="Multiple data files are unsupported.", - field=None, - details=None, - ): - super().__init__(None, self.error_code, message, field, details) - - -class UnknownDatabaseTypeIdentifier(MathesarValidationException): - error_code = ErrorCodes.UnknownDBType.value - - def __init__( - self, - db_type_id, - field=None, - details=None, - ): - message = f"Unknown database type identifier {db_type_id}." - super().__init__(None, self.error_code, message, field, details) - - -class MoneyDisplayOptionValueConflictAPIException(MathesarValidationException): - error_code = ErrorCodes.MoneyDisplayOptionConflict.value - - def __init__( - self, - message="Money type cannot specify a currency code display option as well as other display options.", - field=None, - details=None, - ): - super().__init__(None, self.error_code, message, field, details) - - class IncompatibleFractionDigitValuesAPIException(MathesarValidationException): error_code = ErrorCodes.IncompatibleFractionDigitValues.value @@ -99,29 +14,6 @@ def __init__( super().__init__(None, self.error_code, message, field, details) -class UnsupportedConstraintAPIException(MathesarValidationException): - error_code = ErrorCodes.UnsupportedConstraint.value - - def __init__( - self, - constraint_type, - field=None, - ): - message = f"Operations related to {constraint_type} constraint are currently not supported" - super().__init__(None, self.error_code, message, field, None) - - -class ConstraintColumnEmptyAPIException(MathesarValidationException): - error_code = ErrorCodes.ConstraintColumnEmpty.value - - def __init__( - self, - field=None, - ): - message = "Constraint column field cannot be empty" - super().__init__(None, self.error_code, message, field, None) - - class InvalidValueType(MathesarValidationException): error_code = ErrorCodes.InvalidValueType.value @@ -148,32 +40,6 @@ def __init__( super().__init__(None, self.error_code, message, field, None) -class UnsupportedInstallationDatabase(MathesarValidationException): - error_code = ErrorCodes.UnsupportedInstallationDatabase.value - - def __init__( - self, - message=None, - field=None, - ): - if message is None: - message = "Installing on the internal database isn't allowed." - super().__init__(None, self.error_code, message, field, None) - - -class InvalidTableName(MathesarValidationException): - error_code = ErrorCodes.InvalidTableName.value - - def __init__( - self, - table_name, - message=None, - field=None, - ): - message = f'Table name "{table_name}" is invalid.' - super().__init__(None, self.error_code, message, field, None) - - class IncorrectOldPassword(MathesarValidationException): error_code = ErrorCodes.IncorrectOldPassword.value @@ -183,25 +49,3 @@ def __init__( ): message = "Old password is not correct." super().__init__(None, self.error_code, message, field, None) - - -class EditingPublicSchemaIsDisallowed(MathesarValidationException): - error_code = ErrorCodes.EditingPublicSchema.value - - def __init__( - self, - message="Editing the public schema is disallowed.", - field=None, - ): - super().__init__(None, self.error_code, message, field) - - -class InvalidColumnOrder(MathesarValidationException): - error_code = ErrorCodes.InvalidColumnOrder.value - - def __init__( - self, - message="Invalid column order.", - field=None, - ): - super().__init__(None, self.error_code, message, field, None) diff --git a/mathesar/api/pagination.py b/mathesar/api/pagination.py index fad6397ea2..ec64669b37 100644 --- a/mathesar/api/pagination.py +++ b/mathesar/api/pagination.py @@ -3,12 +3,6 @@ from rest_framework.pagination import LimitOffsetPagination from rest_framework.response import Response -from db.records.operations.group import GroupBy -from mathesar.api.utils import get_table_or_404, process_annotated_records -from mathesar.models.deprecated import Column, Table -from mathesar.models.query import Exploration -from mathesar.utils.preview import get_preview_info - class DefaultLimitOffsetPagination(LimitOffsetPagination): default_limit = 50 @@ -23,126 +17,3 @@ def get_paginated_response(self, data): ] ) ) - - -class ColumnLimitOffsetPagination(DefaultLimitOffsetPagination): - - def paginate_queryset(self, queryset, request, table_id): - self.limit = self.get_limit(request) - if self.limit is None: - self.limit = self.default_limit - self.offset = self.get_offset(request) - table = get_table_or_404(pk=table_id) - self.count = len(table.sa_columns) - self.request = request - return list(table.sa_columns)[self.offset:self.offset + self.limit] - - -class TableLimitOffsetPagination(DefaultLimitOffsetPagination): - def get_paginated_response(self, data): - return Response( - self.get_wrapped_with_metadata(data) - ) - - def get_wrapped_with_metadata(self, data): - return OrderedDict( - [ - ('count', self.count), - ('grouping', self.grouping), - ('preview_data', self.preview_data), - ('results', data) - ] - ) - - def paginate_queryset( - self, - queryset, - request, - table, - column_name_id_bidirectional_map=None, - filters=None, - order_by=None, - grouping=None, - search=None, - duplicate_only=None, - ): - if order_by is None: - order_by = [] - if grouping is None: - grouping = {} - if search is None: - search = [] - group_by = GroupBy(**grouping) if grouping else None - self.limit = self.get_limit(request) - if self.limit is None: - self.limit = self.default_limit - self.offset = self.get_offset(request) - # TODO: Cache count value somewhere, since calculating it is expensive. - self.count = table.sa_num_records(filter=filters, search=search) - self.request = request - - preview_metadata = None - # Only tables have columns on the Service layer that hold data necessary for preview template. - if isinstance(table, Table): - columns_query = Column.objects.filter(table_id=table.id).select_related('table__schema__database').prefetch('name') - preview_metadata, preview_columns = get_preview_info(table.id) - table_columns = [{'id': column.id, 'alias': column.name} for column in columns_query] - columns_to_fetch = table_columns + preview_columns - - query = Exploration(name="preview", base_table=table, initial_columns=columns_to_fetch) - else: - query = table - records = query.get_records( - limit=self.limit, - offset=self.offset, - filter=filters, - order_by=order_by, - group_by=group_by, - search=search, - duplicate_only=duplicate_only - ) - - return self.process_records(records, column_name_id_bidirectional_map, group_by, preview_metadata) - - def process_records(self, records, column_name_id_bidirectional_map, group_by, preview_metadata): - if records: - processed_records, groups, preview_data = process_annotated_records( - records, - column_name_id_bidirectional_map, - preview_metadata - ) - else: - processed_records, groups, preview_data = None, None, None - - if group_by: - # NOTE when column name<->id map is None, we output column names. - # That's the case in query record listing. - if column_name_id_bidirectional_map: - columns = [ - column_name_id_bidirectional_map[n] - for n - in group_by.columns - ] - else: - columns = group_by.columns - self.grouping = { - 'columns': columns, - 'mode': group_by.mode, - 'num_groups': group_by.num_groups, - 'bound_tuples': group_by.bound_tuples, - 'count_by': group_by.count_by, - 'global_min': group_by.global_min, - 'global_max': group_by.global_max, - 'preproc': group_by.preproc, - 'prefix_length': group_by.prefix_length, - 'extract_field': group_by.extract_field, - 'ranged': group_by.ranged, - 'groups': groups, - } - else: - self.grouping = None - if preview_metadata: - self.preview_data = preview_data - else: - self.preview_data = None - return processed_records diff --git a/mathesar/api/permission_conditions.py b/mathesar/api/permission_conditions.py index d2936f2870..3521fa4c96 100644 --- a/mathesar/api/permission_conditions.py +++ b/mathesar/api/permission_conditions.py @@ -1,6 +1,3 @@ -from mathesar.api.utils import get_table_or_404, SHARED_LINK_UUID_QUERY_PARAM -from mathesar.api.permission_utils import TableAccessInspector - # These are available to all AccessPolicy instances # https://rsinger86.github.io/drf-access-policy/reusable_conditions/ @@ -12,22 +9,3 @@ def is_superuser(request, view, action): def is_self(request, view, action): user = view.get_object() return request.user == user - - -def is_atleast_manager_nested_table_resource(request, view, action): - table = get_table_or_404(view.kwargs['table_pk']) - return TableAccessInspector(request.user, table).is_atleast_manager() - - -def is_atleast_editor_nested_table_resource(request, view, action): - table = get_table_or_404(view.kwargs['table_pk']) - return TableAccessInspector(request.user, table).is_atleast_editor() - - -def is_atleast_viewer_nested_table_resource(request, view, action): - table = get_table_or_404(view.kwargs['table_pk']) - return TableAccessInspector( - request.user, - table, - token=request.query_params.get(SHARED_LINK_UUID_QUERY_PARAM) - ).is_atleast_viewer() diff --git a/mathesar/api/permission_utils.py b/mathesar/api/permission_utils.py deleted file mode 100644 index 0f9ec02410..0000000000 --- a/mathesar/api/permission_utils.py +++ /dev/null @@ -1,120 +0,0 @@ -from abc import ABC, abstractmethod - -from mathesar.api.utils import is_valid_uuid_v4 -from mathesar.models.users import DatabaseRole, Role, SchemaRole -from mathesar.models.shares import SharedTable, SharedQuery - - -class AbstractAccessInspector(ABC): - @abstractmethod - def __init__(self, user): - self.user = user - - @abstractmethod - def is_role_present(self, allowed_roles): - pass - - def has_role(self, allowed_roles): - if self.user.is_superuser: - return True - - if self.user.is_anonymous: - return False - - return self.is_role_present(allowed_roles) - - def is_atleast_manager(self): - return self.has_role([Role.MANAGER.value]) - - def is_atleast_editor(self): - allowed_roles = [Role.MANAGER.value, Role.EDITOR.value] - return self.has_role(allowed_roles) - - def is_atleast_viewer(self): - allowed_roles = [Role.MANAGER.value, Role.EDITOR.value, Role.VIEWER.value] - return self.has_role(allowed_roles) - - -class DatabaseAccessInspector(AbstractAccessInspector): - def __init__(self, user, database): - super().__init__(user) - self.database = database - - def is_role_present(self, allowed_roles): - has_db_role = DatabaseRole.objects.filter( - user=self.user, - database=self.database, - role__in=allowed_roles - ).exists() - - return has_db_role - - -class SchemaAccessInspector(AbstractAccessInspector): - def __init__(self, user, schema): - super().__init__(user) - self.schema = schema - self.db_access_inspector = DatabaseAccessInspector(self.user, self.schema.database) - - def is_role_present(self, allowed_roles): - has_db_role = self.db_access_inspector.has_role(allowed_roles) - - has_schema_role = SchemaRole.objects.filter( - user=self.user, - schema=self.schema, - role__in=allowed_roles - ).exists() - - return has_db_role or has_schema_role - - -class TableAccessInspector(AbstractAccessInspector): - def __init__(self, user, table, token=None): - super().__init__(user) - self.table = table - self.token = token if is_valid_uuid_v4(token) else None - self.schema_access_inspector = SchemaAccessInspector(self.user, self.table.schema) - - # Currently, there's no access controls on individual tables. - # If users have access to db or schema, they have access to the tables within them. - def is_role_present(self, allowed_roles): - return self.schema_access_inspector.has_role(allowed_roles) - - def is_atleast_viewer(self): - if self.token is not None: - is_table_shared = SharedTable.objects.filter( - table=self.table, - slug=self.token, - enabled=True - ).exists() - - if is_table_shared: - return True - - return super().is_atleast_viewer() - - -class QueryAccessInspector(AbstractAccessInspector): - def __init__(self, user, query, token=None): - super().__init__(user) - self.query = query - self.token = token if is_valid_uuid_v4(token) else None - self.schema_access_inspector = SchemaAccessInspector(self.user, self.query.base_table.schema) - - # Currently, there's no access controls on individual queries. - # If users have access to db or schema, they have access to the queries within them. - def is_role_present(self, allowed_roles): - return self.schema_access_inspector.has_role(allowed_roles) - - def is_atleast_viewer(self): - if self.token is not None: - is_query_shared = SharedQuery.objects.filter( - query=self.query, - slug=self.token, - enabled=True - ).exists() - - if is_query_shared: - return True - - return super().is_atleast_viewer() diff --git a/mathesar/api/serializers/columns.py b/mathesar/api/serializers/columns.py deleted file mode 100644 index 3abaa7e1b1..0000000000 --- a/mathesar/api/serializers/columns.py +++ /dev/null @@ -1,264 +0,0 @@ -from rest_framework import serializers, status -from rest_framework.exceptions import ValidationError -from rest_framework.fields import empty, SerializerMethodField -from rest_framework.settings import api_settings - -from db.identifiers import is_identifier_too_long -from db.columns.exceptions import InvalidTypeError -from mathesar.api.exceptions.database_exceptions.exceptions import DynamicDefaultModificationError -from db.columns.exceptions import InvalidTypeOptionError -from db.types.base import PostgresType, MathesarCustomType -from db.types.operations.convert import get_db_type_enum_from_id -from mathesar.api.exceptions.database_exceptions import ( - exceptions as database_api_exceptions -) -from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.api.serializers.shared_serializers import ( - DisplayOptionsMappingSerializer, - DISPLAY_OPTIONS_SERIALIZER_MAPPING_KEY, -) -from mathesar.models.deprecated import Column - - -class InputValueField(serializers.CharField): - """ - Takes in an arbitrary value. Emulates the record creation endpoint, - which takes in arbitrary values (un-validated and un-processed request.data). - This field replicates that behavior in a serializer. - """ - - def to_internal_value(self, data): - return data - - def to_representation(self, value): - return value - - -class TypeOptionSerializer(MathesarErrorMessageMixin, serializers.Serializer): - length = serializers.IntegerField(required=False) - precision = serializers.IntegerField(required=False) - scale = serializers.IntegerField(required=False) - fields = serializers.CharField(required=False) - - def validate(self, attrs): - db_type = self.context.get('db_type', None) - scale = attrs.get('scale', None) - precision = attrs.get('precision', None) - if ( - db_type == PostgresType.NUMERIC - and (scale is None) != (precision is None) - ): - raise database_api_exceptions.InvalidTypeOptionAPIException( - InvalidTypeOptionError, - message='Both scale and precision fields are required.', - status_code=status.HTTP_400_BAD_REQUEST, - ) - return super().validate(attrs) - - def run_validation(self, data=empty): - # Ensure that there are no unknown type options passed in. - if data is not empty and data is not None: - unknown = set(data) - set(self.fields) - if unknown: - errors = ['Unknown field: {}'.format(field) for field in unknown] - raise serializers.ValidationError({ - api_settings.NON_FIELD_ERRORS_KEY: errors, - }) - - return super(TypeOptionSerializer, self).run_validation(data) - - -TYPE_KEY = 'type' -DISPLAY_OPTIONS_KEY = 'display_options' - - -class SimpleColumnSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): - class Meta: - model = Column - fields = ('id', - 'name', - TYPE_KEY, - 'type_options', - DISPLAY_OPTIONS_KEY, - ) - id = serializers.IntegerField(required=False) - name = serializers.CharField() - # TODO consider renaming type and type_options to db_type and db_type_options - # The name of below attribute should match value of TYPE_KEY - type = serializers.CharField() - type_options = TypeOptionSerializer(required=False, allow_null=True) - # The name of below attribute should match value of DISPLAY_OPTIONS_KEY - display_options = DisplayOptionsMappingSerializer(required=False, allow_null=True) - - def to_representation(self, instance): - if isinstance(instance, dict): - db_type_id = instance.get(TYPE_KEY) - db_type = get_db_type_enum_from_id(db_type_id) - else: - db_type = instance.db_type - # TODO replace or remove this assert before production - assert db_type is not None - self.context[DISPLAY_OPTIONS_SERIALIZER_MAPPING_KEY] = db_type - representation = super().to_representation(instance) - _force_canonical_type(representation, db_type) - return representation - - def to_internal_value(self, data): - if self.partial and TYPE_KEY not in data: - db_type = getattr(self.instance, 'db_type', None) - else: - db_type_id = data.get(TYPE_KEY, None) - db_type = get_db_type_enum_from_id(db_type_id) if db_type_id else None - self.context[DISPLAY_OPTIONS_SERIALIZER_MAPPING_KEY] = db_type - return super().to_internal_value(data) - - def validate_name(self, name): - if is_identifier_too_long(name): - raise database_api_exceptions.IdentifierTooLong(field='name') - return name - - -def _force_canonical_type(representation, db_type): - """ - Sometimes the representation's TYPE_KEY attribute will also include type option information - (e.g. `numeric(3, 5)`). We override the attribute's value to a canonical type id. - - This might be better solved upstream, but since our Column model subclasses SA's Column, - overriding its TYPE_KEY attribute, might interfere with SA's workings. - """ - representation[TYPE_KEY] = db_type.id - return representation - - -class ColumnDefaultSerializer(MathesarErrorMessageMixin, serializers.Serializer): - value = InputValueField() - is_dynamic = serializers.BooleanField(read_only=True) - - -class ColumnSerializer(SimpleColumnSerializer): - class Meta(SimpleColumnSerializer.Meta): - fields = SimpleColumnSerializer.Meta.fields + ( - 'nullable', - 'primary_key', - 'source_column', - 'copy_source_data', - 'copy_source_constraints', - 'valid_target_types', - 'default', - 'has_dependents', - 'description', - ) - model_fields = (DISPLAY_OPTIONS_KEY,) - - name = serializers.CharField(required=False, allow_blank=True) - description = serializers.CharField( - required=False, allow_blank=True, default=None, allow_null=True - ) - - # From scratch fields - type = serializers.CharField(required=False) - nullable = serializers.BooleanField(default=True) - primary_key = serializers.BooleanField(default=False) - default = ColumnDefaultSerializer( - source='column_default_dict', required=False, allow_null=True, default=None - ) - - # From duplication fields - source_column = serializers.PrimaryKeyRelatedField(queryset=Column.current_objects.all(), required=False, write_only=True) - copy_source_data = serializers.BooleanField(default=True, write_only=True) - copy_source_constraints = serializers.BooleanField(default=True, write_only=True) - - # Read only fields - valid_target_types = SerializerMethodField(method_name='get_valid_target_types', read_only=True) - - def validate(self, data): - data = super().validate(data) - # Reevaluate column display options based on the new column type. - if self.partial and 'column_default_dict' in data: - if self.instance is not None and self.instance.column_default_dict: - if 'is_dynamic' in self.instance.column_default_dict: - if self.instance.column_default_dict['is_dynamic'] is True: - raise database_api_exceptions.StaticDefaultAssignmentToDynamicDefaultException( - DynamicDefaultModificationError(self.instance), - status_code=status.HTTP_400_BAD_REQUEST - ) - if TYPE_KEY in data and self.instance: - db_type = get_db_type_enum_from_id(data[TYPE_KEY].lower()) - target_types = self.instance.valid_target_types - if db_type not in target_types: - raise database_api_exceptions.InvalidTypeCastAPIException( - InvalidTypeError, - status_code=status.HTTP_400_BAD_REQUEST - ) - if DISPLAY_OPTIONS_KEY not in data: - db_type = getattr(self.instance, 'db_type', None) - # Invalidate display_options if type has been changed - if db_type is not None: - if str(db_type.id) != data[TYPE_KEY]: - data[DISPLAY_OPTIONS_KEY] = None - if not self.partial: - from_scratch_required_fields = [TYPE_KEY] - from_scratch_specific_fields = [TYPE_KEY, 'nullable', 'primary_key'] - from_dupe_required_fields = ['source_column'] - from_dupe_specific_fields = ['source_column', 'copy_source_data', - 'copy_source_constraints'] - - # Note that we run validation on self.initial_data, as `data` has defaults - # filled in for fields that weren't specified by the request - from_scratch_required_all = all([ - f in self.initial_data for f in from_scratch_required_fields - ]) - from_scratch_specific_in = [ - f for f in from_scratch_specific_fields if f in self.initial_data - ] - from_dupe_required_all = all([ - f in self.initial_data for f in from_dupe_required_fields - ]) - from_dupe_specific_in = [ - f for f in from_dupe_specific_fields if f in self.initial_data - ] - - if len(from_dupe_specific_in) and len(from_scratch_specific_in): - raise ValidationError( - f'{from_scratch_specific_in} cannot be passed in if ' - f'{from_dupe_specific_in} has also been passed in.' - ) - elif not from_dupe_required_all and not from_scratch_required_all: - # We default to from scratch required fields if no fields are passed - if len(from_dupe_specific_in) and not len(from_scratch_specific_in): - required_fields = from_dupe_required_fields - else: - required_fields = from_scratch_required_fields - raise ValidationError({ - f: ['This field is required.'] - for f in required_fields - if f not in self.initial_data - }) - return data - - def to_representation(self, instance): - # Set default display_options for mathesar_money type if none are provided. - if ( - instance.db_type == MathesarCustomType.MATHESAR_MONEY - and instance.display_options is None - ): - instance.display_options = { - 'use_grouping': 'true', - 'number_format': None, - 'currency_symbol': None, - 'maximum_fraction_digits': 2, - 'minimum_fraction_digits': 2, - 'currency_symbol_location': 'after-minus'} - return super().to_representation(instance) - - @property - def validated_model_fields(self): - return {key: self.validated_data[key] for key in self.validated_data if key in self.Meta.model_fields} - - def get_valid_target_types(self, column): - valid_target_types = column.valid_target_types - if valid_target_types: - valid_target_type_ids = tuple( - db_type.id for db_type in valid_target_types - ) - return valid_target_type_ids diff --git a/mathesar/api/serializers/constraints.py b/mathesar/api/serializers/constraints.py deleted file mode 100644 index 15401f170b..0000000000 --- a/mathesar/api/serializers/constraints.py +++ /dev/null @@ -1,177 +0,0 @@ -from psycopg.errors import DuplicateTable, UniqueViolation -from rest_framework import serializers, status - -from db.constraints import utils as constraint_utils -from db.identifiers import is_identifier_too_long -from db.constraints.base import ForeignKeyConstraint, UniqueConstraint - -import mathesar.api.exceptions.database_exceptions.exceptions as database_api_exceptions -from mathesar.api.exceptions.validation_exceptions.exceptions import ( - ConstraintColumnEmptyAPIException, UnsupportedConstraintAPIException, - InvalidTableName -) -from mathesar.api.serializers.shared_serializers import ( - MathesarPolymorphicErrorMixin, - ReadWritePolymorphicSerializerMappingMixin, -) -from mathesar.models.deprecated import Column, Constraint, Table - - -class TableFilteredPrimaryKeyRelatedField(serializers.PrimaryKeyRelatedField): - """ - Limits the accepted related primary key values to a specific table. - For example, if the PrimaryKeyRelatedField is instantiated with a - Column queryset, only columns in the "associated table" are - accepted. The "associated table" is defined by the context dict's - `table_id` value. - """ - def get_queryset(self): - table_id = self.context.get('table_id', None) - queryset = super(TableFilteredPrimaryKeyRelatedField, self).get_queryset() - if table_id is None or not queryset: - return None - return queryset.filter(table__id=table_id) - - -class BaseConstraintSerializer(serializers.ModelSerializer): - name = serializers.CharField(required=False) - type = serializers.CharField() - columns = TableFilteredPrimaryKeyRelatedField(queryset=Column.current_objects, many=True) - - class Meta: - model = Constraint - fields = ['id', 'name', 'type', 'columns'] - - def construct_constraint_obj(self, table, data): - columns_attnum = [column.attnum for column in data.get('columns')] - if data.get('type') == constraint_utils.ConstraintType.UNIQUE.value: - return UniqueConstraint(data.get('name', None), table.oid, columns_attnum) - return None - - def create(self, validated_data): - table = self.context['table'] - constraint_obj = self.construct_constraint_obj(table, validated_data) - # Additional check is needed because we support read operations for primary key constraint, - # but we don't support write operations - if constraint_obj is None: - constraint_type = validated_data.get('type', None) - raise UnsupportedConstraintAPIException(constraint_type=constraint_type, field='type') - try: - constraint = table.add_constraint(constraint_obj) - except DuplicateTable as e: - raise database_api_exceptions.DuplicateTableAPIException( - e, - message='Relation with the same name already exists', - status_code=status.HTTP_400_BAD_REQUEST - ) - except UniqueViolation as e: - raise database_api_exceptions.UniqueViolationAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - return constraint - - def validate_name(self, name): - if is_identifier_too_long(name): - raise database_api_exceptions.IdentifierTooLong(field='name') - return name - - -class ForeignKeyConstraintSerializer(BaseConstraintSerializer): - class Meta: - model = Constraint - fields = BaseConstraintSerializer.Meta.fields + [ - 'referent_columns', - 'referent_table', - 'onupdate', - 'ondelete', - 'deferrable', - 'match' - ] - - referent_columns = serializers.PrimaryKeyRelatedField(queryset=Column.current_objects.all(), many=True) - referent_table = serializers.SerializerMethodField() - onupdate = serializers.ChoiceField( - choices=['RESTRICT', 'CASCADE', 'SET NULL', 'NO ACTION', 'SET DEFAULT'], - required=False, - allow_null=True - ) - ondelete = serializers.ChoiceField( - choices=['RESTRICT', 'CASCADE', 'SET NULL', 'NO ACTION', 'SET DEFAULT'], - required=False, - allow_null=True - ) - deferrable = serializers.BooleanField(allow_null=True, required=False) - match = serializers.ChoiceField(choices=['SIMPLE', 'PARTIAL', 'FULL'], allow_null=True, required=False) - - def get_referent_table(self, obj): - return obj.referent_columns[0].table.id - - def construct_constraint_obj(self, table, data): - columns_attnum = [column.attnum for column in data.get('columns')] - referent_columns = data.get('referent_columns') - referent_columns_attnum = [column.attnum for column in referent_columns] - constraint_options_fields = ['onupdate', 'ondelete', 'deferrable'] - constraint_options = { - constraint_options_field: data[constraint_options_field] - for constraint_options_field in constraint_options_fields if constraint_options_field in data - } - return ForeignKeyConstraint( - data.get('name', None), - table.oid, - columns_attnum, - referent_columns[0].table.oid, - referent_columns_attnum, - constraint_options - ) - - -class ConstraintSerializer( - ReadWritePolymorphicSerializerMappingMixin, - MathesarPolymorphicErrorMixin, - serializers.ModelSerializer -): - class Meta: - model = Constraint - fields = '__all__' - - serializers_mapping = { - 'foreignkey': ForeignKeyConstraintSerializer, - 'primary': BaseConstraintSerializer, - 'unique': BaseConstraintSerializer, - # Even though 'check' & 'exclude' constraints are currently unsupported it's added here - # so that the app doesn't break in case these constraints are already present. - 'check': BaseConstraintSerializer, - 'exclude': BaseConstraintSerializer - } - - def get_mapping_field(self, data): - if isinstance(data, Constraint): - constraint_type = data.type - else: - constraint_type = data.get('type', None) - assert constraint_type is not None - return constraint_type - - def create(self, validated_data): - serializer = self.get_serializer_class(self.get_mapping_field(validated_data)) - return serializer.create(validated_data) - - def run_validation(self, data): - if referent_table := data.get('referent_table', None): - referent_table_name = Table.current_objects.get(id=referent_table).name - if any( - invalid_char in referent_table_name - for invalid_char in ('(', ')') - ): - raise InvalidTableName( - referent_table_name, - field='referent_table' - ) - constraint_type = data.get('type', None) - if constraint_type not in ('foreignkey', 'primary', 'unique'): - raise UnsupportedConstraintAPIException(constraint_type=constraint_type) - columns = data.get('columns', None) - if columns == []: - raise ConstraintColumnEmptyAPIException(field='columns') - return super(ConstraintSerializer, self).run_validation(data) diff --git a/mathesar/api/serializers/databases.py b/mathesar/api/serializers/databases.py deleted file mode 100644 index 4ddb3e76e4..0000000000 --- a/mathesar/api/serializers/databases.py +++ /dev/null @@ -1,44 +0,0 @@ -from django.urls import reverse -from rest_framework import serializers - -from mathesar.api.display_options import DISPLAY_OPTIONS_BY_UI_TYPE -from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.models.deprecated import Connection - - -class ConnectionSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): - supported_types_url = serializers.SerializerMethodField() - nickname = serializers.CharField(source='name') - database = serializers.CharField(source='db_name') - - class Meta: - model = Connection - fields = ['id', 'nickname', 'database', 'supported_types_url', 'username', 'password', 'host', 'port'] - read_only_fields = ['id', 'supported_types_url'] - extra_kwargs = { - 'password': {'write_only': True} - } - - def get_supported_types_url(self, obj): - if isinstance(obj, Connection) and not self.partial: - # Only get records if we are serializing an existing table - request = self.context['request'] - return request.build_absolute_uri(reverse('connection-types', kwargs={'pk': obj.pk})) - else: - return None - - -class TypeSerializer(MathesarErrorMessageMixin, serializers.Serializer): - identifier = serializers.CharField() - name = serializers.CharField() - db_types = serializers.ListField(child=serializers.CharField()) - display_options = serializers.DictField() - - def to_representation(self, ui_type): - primitive = dict( - identifier=ui_type.id, - name=ui_type.display_name, - db_types=ui_type.db_types, - display_options=DISPLAY_OPTIONS_BY_UI_TYPE.get(ui_type, None), - ) - return super().to_representation(primitive) diff --git a/mathesar/api/serializers/db_types.py b/mathesar/api/serializers/db_types.py deleted file mode 100644 index b96b701d5b..0000000000 --- a/mathesar/api/serializers/db_types.py +++ /dev/null @@ -1,17 +0,0 @@ -from rest_framework import serializers - -from db.types.hintsets import db_types_hinted - - -class DBTypeSerializer(serializers.Serializer): - id = serializers.CharField() - hints = serializers.ListField(child=serializers.DictField()) - - def to_representation(self, db_type): - # TODO solve db type casing holistically - # https://github.com/centerofci/mathesar/issues/1036 - uppercase_id = db_type.id.upper() - return { - "id": uppercase_id, - "hints": db_types_hinted.get(db_type, None), - } diff --git a/mathesar/api/serializers/dependents.py b/mathesar/api/serializers/dependents.py deleted file mode 100644 index 1b7751793b..0000000000 --- a/mathesar/api/serializers/dependents.py +++ /dev/null @@ -1,80 +0,0 @@ -from mathesar.api.serializers.shared_serializers import MathesarPolymorphicErrorMixin, ReadOnlyPolymorphicSerializerMappingMixin -from rest_framework import serializers - -from mathesar.models.deprecated import Constraint, Schema, Table - - -DATABASE_OBJECT_TYPES = [ - 'table', - 'table column', - 'table constraint', - 'view', - 'index', - 'trigger', - 'sequence', - 'type', - 'function' -] - - -class DependentMathesarObjectSerializer(serializers.Serializer): - id = serializers.CharField() - type = serializers.CharField() - attnum = serializers.CharField(required=False) - - def _get_object_type(self, instance): - return instance.get('type', None) - - # TODO: get ids of supported objects on a previous step in batches - def to_representation(self, instance): - object_oid = instance.get('objid', None) - object_type = self._get_object_type(instance) - object_id = 0 - - if object_type == 'table' or object_type == 'table column': - object_id = Table.objects.get(oid=object_oid).id - if object_type == 'table column': - instance['attnum'] = instance.get('objsubid', None) - elif object_type == 'table constraint': - object_id = Constraint.objects.get(oid=object_oid).id - elif object_type == 'schema': - object_id = Schema.objects.get(oid=object_oid).id - - instance['id'] = object_id - return super().to_representation(instance) - - -class DependentNonMathesarObjectSerializer(serializers.Serializer): - objid = serializers.CharField() - type = serializers.CharField() - name = serializers.CharField() - - -class BaseDependentObjectSerializer( - ReadOnlyPolymorphicSerializerMappingMixin, - MathesarPolymorphicErrorMixin, - serializers.Serializer -): - serializers_mapping = { - 'table': DependentMathesarObjectSerializer, - 'table constraint': DependentMathesarObjectSerializer, - 'schema': DependentMathesarObjectSerializer, - 'table column': DependentMathesarObjectSerializer - } - - def create(self, validated_data): - serializer = self.get_serializer_class(self.get_mapping_field(validated_data)) - return serializer.create(validated_data) - - def get_mapping_field(self, data): - return data.get('type', None) - - -class DependentSerializer(serializers.Serializer): - obj = BaseDependentObjectSerializer() - parent_obj = BaseDependentObjectSerializer() - level = serializers.IntegerField() - - -class DependentFilterSerializer(serializers.Serializer): - exclude = serializers.MultipleChoiceField(choices=DATABASE_OBJECT_TYPES, required=False) diff --git a/mathesar/api/serializers/filters.py b/mathesar/api/serializers/filters.py deleted file mode 100644 index 42cbaf68cf..0000000000 --- a/mathesar/api/serializers/filters.py +++ /dev/null @@ -1,17 +0,0 @@ -from rest_framework import serializers - - -class ParameterSerializer(serializers.Serializer): - ui_types = serializers.ListField(child=serializers.CharField()) - - -class AliasSerializer(serializers.Serializer): - alias = serializers.CharField() - ui_types = serializers.ListField(child=serializers.CharField()) - - -class FilterSerializer(serializers.Serializer): - id = serializers.CharField() - name = serializers.CharField() - aliases = serializers.ListField(child=AliasSerializer(), required=False) - parameters = serializers.ListField(child=ParameterSerializer()) diff --git a/mathesar/api/serializers/functions.py b/mathesar/api/serializers/functions.py deleted file mode 100644 index 3685e4cae2..0000000000 --- a/mathesar/api/serializers/functions.py +++ /dev/null @@ -1,7 +0,0 @@ -from rest_framework import serializers - - -class DBFunctionSerializer(serializers.Serializer): - id = serializers.CharField() - name = serializers.CharField() - hints = serializers.ListField(child=serializers.DictField()) diff --git a/mathesar/api/serializers/links.py b/mathesar/api/serializers/links.py deleted file mode 100644 index fe36bf7326..0000000000 --- a/mathesar/api/serializers/links.py +++ /dev/null @@ -1,124 +0,0 @@ -from rest_access_policy import PermittedPkRelatedField -from rest_framework import serializers - -from db.links.operations.create import create_foreign_key_link, create_many_to_many_link -from mathesar.api.db.permissions.table import TableAccessPolicy -from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.api.exceptions.validation_exceptions.exceptions import ( - InvalidLinkChoiceAPIException, InvalidTableName -) -from mathesar.api.serializers.shared_serializers import ( - MathesarPolymorphicErrorMixin, - ReadWritePolymorphicSerializerMappingMixin, -) -from mathesar.models.deprecated import Table -from mathesar.state import reset_reflection - - -class OneToOneSerializer(MathesarErrorMessageMixin, serializers.Serializer): - reference_column_name = serializers.CharField() - reference_table = PermittedPkRelatedField(access_policy=TableAccessPolicy, queryset=Table.current_objects.all()) - referent_table = PermittedPkRelatedField(access_policy=TableAccessPolicy, queryset=Table.current_objects.all()) - # TODO Fix hacky link_type detection by reflecting it correctly - link_type = serializers.CharField(default="one-to-one") - - def is_link_unique(self): - return True - - def validate(self, attrs): - if referent_table := attrs.get('referent_table', None): - referent_table_name = referent_table.name - if any( - invalid_char in referent_table_name - for invalid_char in ('(', ')') - ): - raise InvalidTableName( - referent_table_name, - field='referent_table' - ) - return super(OneToOneSerializer, self).validate(attrs) - - def create(self, validated_data): - reference_table = validated_data['reference_table'] - create_foreign_key_link( - reference_table.schema._sa_engine, - validated_data.get('reference_column_name'), - reference_table.oid, - validated_data.get('referent_table').oid, - unique_link=self.is_link_unique() - ) - reset_reflection(db_name=reference_table.schema.database.name) - return validated_data - - -class OneToManySerializer(OneToOneSerializer): - link_type = serializers.CharField(default="one-to-many") - - def is_link_unique(self): - return False - - -class MapColumnSerializer(MathesarErrorMessageMixin, serializers.Serializer): - column_name = serializers.CharField() - referent_table = PermittedPkRelatedField(access_policy=TableAccessPolicy, queryset=Table.current_objects.all()) - - -class ManyToManySerializer(MathesarErrorMessageMixin, serializers.Serializer): - referents = MapColumnSerializer(many=True) - mapping_table_name = serializers.CharField() - link_type = serializers.CharField(default="many-to-many") - - def validate(self, attrs): - if referents := attrs.get('referents', None): - referent_tables = ( - referent_table.get('referent_table').name - for referent_table in referents - ) - for referent_table_name in referent_tables: - if any( - invalid_char in referent_table_name - for invalid_char in ('(', ')') - ): - raise InvalidTableName( - referent_table_name, - field='referents' - ) - return super(ManyToManySerializer, self).validate(attrs) - - def create(self, validated_data): - referents = validated_data['referents'] - referents_dict = { - 'referent_table_oids': [i['referent_table'].oid for i in referents], - 'column_names': [i['column_name'] for i in referents] - } - create_many_to_many_link( - referents[0]['referent_table'].schema._sa_engine, - referents[0]['referent_table'].schema.oid, - validated_data.get('mapping_table_name'), - referents_dict, - ) - reset_reflection(db_name=referents[0]['referent_table'].schema.database.name) - return validated_data - - -class LinksMappingSerializer( - MathesarPolymorphicErrorMixin, - ReadWritePolymorphicSerializerMappingMixin, - serializers.Serializer -): - def create(self, validated_data): - serializer = self.serializers_mapping.get(self.get_mapping_field(validated_data)) - return serializer.create(validated_data) - - serializers_mapping = { - "one-to-one": OneToOneSerializer, - "one-to-many": OneToManySerializer, - "many-to-many": ManyToManySerializer - } - link_type = serializers.CharField(required=True) - - def get_mapping_field(self, data): - link_type = data.get('link_type', None) - if link_type is None: - raise InvalidLinkChoiceAPIException() - return link_type diff --git a/mathesar/api/serializers/queries.py b/mathesar/api/serializers/queries.py deleted file mode 100644 index e49be3cea7..0000000000 --- a/mathesar/api/serializers/queries.py +++ /dev/null @@ -1,99 +0,0 @@ -from django.core.exceptions import ValidationError -from django.urls import reverse -from django.db.models import Q - -from rest_access_policy import PermittedPkRelatedField -from rest_framework import serializers - -from mathesar.api.db.permissions.query_table import QueryTableAccessPolicy -from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.api.exceptions.validation_exceptions.exceptions import DuplicateExplorationInSchemaAPIException -from mathesar.models.deprecated import Table -from mathesar.models.query import Exploration - - -class BaseQuerySerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): - schema = serializers.SerializerMethodField('get_schema') - base_table = PermittedPkRelatedField( - access_policy=QueryTableAccessPolicy, - queryset=Table.current_objects.all() - ) - - class Meta: - model = Exploration - fields = ['schema', 'initial_columns', 'transformations', 'base_table', 'display_names'] - - def get_schema(self, uiquery): - base_table = uiquery.base_table - if base_table: - return base_table.schema.id - - def validate(self, attrs): - unexpected_fields = set(self.initial_data) - set(self.fields) - if unexpected_fields: - raise ValidationError(f"Unexpected field(s): {unexpected_fields}") - self._validate_uniqueness(attrs) - return attrs - - def _validate_uniqueness(self, attrs): - """ - Uniqueness is only defined when both name and base_table are defined. - - Would be nice to define this in terms of Django's UniqueConstraint, but that doesn't seem - possible, due to schema being a child property of base_table. - """ - name = attrs.get('name') - if name: - base_table = attrs.get('base_table') - if base_table: - schema = base_table.schema - is_duplicate_q = self._get_is_duplicate_q(name, schema) - duplicates = Exploration.objects.filter(is_duplicate_q) - if duplicates.exists(): - raise DuplicateExplorationInSchemaAPIException(field='name') - - def _get_is_duplicate_q(self, name, schema): - has_same_name_q = Q(name=name) - has_same_schema_q = Q(base_table__schema=schema) - is_duplicate_q = has_same_name_q & has_same_schema_q - is_update = self.instance is not None - if is_update: - # If this is an update, filter self out of found duplicates - id = self.instance.id - is_not_this_instance_q = ~Q(id=id) - is_duplicate_q = is_duplicate_q & is_not_this_instance_q - return is_duplicate_q - - -class QuerySerializer(BaseQuerySerializer): - results_url = serializers.SerializerMethodField('get_results_url') - records_url = serializers.SerializerMethodField('get_records_url') - columns_url = serializers.SerializerMethodField('get_columns_url') - - class Meta: - model = Exploration - fields = '__all__' - - def get_records_url(self, obj): - if isinstance(obj, Exploration) and obj.pk is not None: - # Only get records_url if we are serializing an existing persisted Exploration - request = self.context['request'] - return request.build_absolute_uri(reverse('query-records', kwargs={'pk': obj.pk})) - else: - return None - - def get_columns_url(self, obj): - if isinstance(obj, Exploration) and obj.pk is not None: - # Only get columns_url if we are serializing an existing persisted Exploration - request = self.context['request'] - return request.build_absolute_uri(reverse('query-columns', kwargs={'pk': obj.pk})) - else: - return None - - def get_results_url(self, obj): - if isinstance(obj, Exploration) and obj.pk is not None: - # Only get records_url if we are serializing an existing persisted Exploration - request = self.context['request'] - return request.build_absolute_uri(reverse('query-results', kwargs={'pk': obj.pk})) - else: - return None diff --git a/mathesar/api/serializers/records.py b/mathesar/api/serializers/records.py deleted file mode 100644 index 506b277c9c..0000000000 --- a/mathesar/api/serializers/records.py +++ /dev/null @@ -1,122 +0,0 @@ -from psycopg2.errors import NotNullViolation, UniqueViolation, CheckViolation, ExclusionViolation -from rest_framework import serializers -from rest_framework import status -from sqlalchemy.exc import IntegrityError -from db.records.exceptions import InvalidDate, InvalidDateFormat - -import mathesar.api.exceptions.database_exceptions.exceptions as database_api_exceptions -from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.models.deprecated import Column -from mathesar.api.utils import follows_json_number_spec -from mathesar.database.types import UIType - - -class RecordListParameterSerializer(MathesarErrorMessageMixin, serializers.Serializer): - filter = serializers.JSONField(required=False, default=None) - order_by = serializers.JSONField(required=False, default=[]) - grouping = serializers.JSONField(required=False, default={}) - duplicate_only = serializers.JSONField(required=False, default=None) - search_fuzzy = serializers.JSONField(required=False, default=[]) - - -class RecordSerializer(MathesarErrorMessageMixin, serializers.BaseSerializer): - def update(self, instance, validated_data): - table = self.context['table'] - try: - record = table.update_record(instance['id'], validated_data) - except InvalidDate as e: - raise database_api_exceptions.InvalidDateAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - except InvalidDateFormat as e: - raise database_api_exceptions.InvalidDateFormatAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - except IntegrityError as e: - if type(e.orig) is NotNullViolation: - raise database_api_exceptions.NotNullViolationAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST, - table=table - ) - elif type(e.orig) is UniqueViolation: - raise database_api_exceptions.UniqueViolationAPIException( - e, - message="The requested update violates a uniqueness constraint", - table=table, - status_code=status.HTTP_400_BAD_REQUEST, - ) - elif type(e.orig) is CheckViolation: - raise database_api_exceptions.CheckViolationAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - elif type(e.orig) is ExclusionViolation: - raise database_api_exceptions.ExclusionViolationAPIException( - e, - table=table, - status_code=status.HTTP_400_BAD_REQUEST, - ) - else: - raise database_api_exceptions.MathesarAPIException(e, status_code=status.HTTP_400_BAD_REQUEST) - return record - - def create(self, validated_data): - table = self.context['table'] - try: - record = table.create_record_or_records(validated_data) - except IntegrityError as e: - if type(e.orig) is NotNullViolation: - raise database_api_exceptions.NotNullViolationAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST, - table=table - ) - elif type(e.orig) is UniqueViolation: - raise database_api_exceptions.UniqueViolationAPIException( - e, - message="The requested insert violates a uniqueness constraint", - table=table, - status_code=status.HTTP_400_BAD_REQUEST, - ) - elif type(e.orig) is CheckViolation: - raise database_api_exceptions.CheckViolationAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST - ) - elif type(e.orig) is ExclusionViolation: - raise database_api_exceptions.ExclusionViolationAPIException( - e, - table=table, - status_code=status.HTTP_400_BAD_REQUEST, - ) - else: - raise database_api_exceptions.MathesarAPIException(e, status_code=status.HTTP_400_BAD_REQUEST) - return record - - def to_representation(self, instance): - records = instance._asdict() if not isinstance(instance, dict) else instance - columns_map = self.context['columns_map'] - records = {columns_map[column_name]: column_value for column_name, column_value in records.items()} - return records - - def to_internal_value(self, data): - columns_map = self.context['columns_map'].inverse - data = {columns_map[int(column_id)]: value for column_id, value in data.items()} - # If the data type of the column is number then the value must be an integer - # or a string which follows JSON number spec. - # TODO consider moving below routine to a DRF validate function - for column_name, value in data.items(): - column = Column.objects.get(id=columns_map.inverse[column_name]) - is_number = column.ui_type == UIType.NUMBER - value_is_string = type(value) is str - if is_number and value_is_string and not follows_json_number_spec(value): - raise database_api_exceptions.MathesarAPIException( - IntegrityError, - status_code=status.HTTP_400_BAD_REQUEST, - message="Number strings should follow JSON number spec", - field=column_name - ) - return data diff --git a/mathesar/api/serializers/schemas.py b/mathesar/api/serializers/schemas.py deleted file mode 100644 index 5bdf290285..0000000000 --- a/mathesar/api/serializers/schemas.py +++ /dev/null @@ -1,48 +0,0 @@ -from rest_access_policy import PermittedPkRelatedField -from rest_framework import serializers - -from db.identifiers import is_identifier_too_long - -from mathesar.api.db.permissions.table import TableAccessPolicy -from mathesar.api.db.permissions.database import DatabaseAccessPolicy -from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.models.deprecated import Connection, Schema, Table -from mathesar.api.exceptions.database_exceptions import ( - exceptions as database_api_exceptions -) - - -class SchemaSerializer(MathesarErrorMessageMixin, serializers.HyperlinkedModelSerializer): - name = serializers.CharField() - # Restrict access to databases with create access. - # Refer https://rsinger86.github.io/drf-access-policy/policy_reuse/ - connection_id = PermittedPkRelatedField( - source='database', - access_policy=DatabaseAccessPolicy, - queryset=Connection.current_objects.all() - ) - description = serializers.CharField( - required=False, allow_blank=True, default=None, allow_null=True - ) - num_tables = serializers.SerializerMethodField() - num_queries = serializers.SerializerMethodField() - - class Meta: - model = Schema - fields = [ - 'id', 'name', 'connection_id', 'has_dependents', 'description', - 'num_tables', 'num_queries' - ] - - def get_num_tables(self, obj): - qs = Table.objects.filter(schema=obj) - count = TableAccessPolicy.scope_queryset(self.context['request'], qs).count() - return count - - def get_num_queries(self, obj): - return sum(t.queries.count() for t in obj.tables.all()) - - def validate_name(self, name): - if is_identifier_too_long(name): - raise database_api_exceptions.IdentifierTooLong(field='name') - return name diff --git a/mathesar/api/serializers/table_settings.py b/mathesar/api/serializers/table_settings.py deleted file mode 100644 index d0b7ddc6e4..0000000000 --- a/mathesar/api/serializers/table_settings.py +++ /dev/null @@ -1,45 +0,0 @@ -from rest_framework import serializers - -from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.api.exceptions.validation_exceptions.exceptions import InvalidColumnOrder -from mathesar.models.deprecated import PreviewColumnSettings, TableSettings, compute_default_preview_template, ValidationError - - -class PreviewColumnSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): - class Meta: - model = PreviewColumnSettings - fields = ['customized', 'template'] - - customized = serializers.BooleanField(default=True) - template = serializers.CharField() - - -class TableSettingsSerializer(MathesarErrorMessageMixin, serializers.HyperlinkedModelSerializer): - preview_settings = PreviewColumnSerializer() - column_order = serializers.ListField(child=serializers.IntegerField()) - - class Meta: - model = TableSettings - fields = ['id', 'preview_settings', 'column_order'] - - def update(self, instance, validated_data): - preview_settings_data = validated_data.pop('preview_settings', None) - if preview_settings_data is not None: - instance.preview_settings.delete() - # The preview is customised when the client modifies the default template - if preview_settings_data.get('template', None): - preview_settings_data['customized'] = True - if preview_settings_data['customized'] is False: - preview_settings_data['template'] = compute_default_preview_template(instance.table) - preview_settings = PreviewColumnSettings.objects.create(**preview_settings_data) - instance.preview_settings = preview_settings - instance.save() - - column_order_data = validated_data.pop('column_order', None) - if column_order_data is not None: - try: - instance.column_order = column_order_data - instance.save() - except ValidationError: - raise InvalidColumnOrder() - return instance diff --git a/mathesar/api/serializers/tables.py b/mathesar/api/serializers/tables.py deleted file mode 100644 index 3f8e1a871a..0000000000 --- a/mathesar/api/serializers/tables.py +++ /dev/null @@ -1,255 +0,0 @@ -from django.urls import reverse -from rest_access_policy import PermittedPkRelatedField -from rest_framework import serializers, status -from rest_framework.exceptions import ValidationError -from sqlalchemy.exc import ProgrammingError - -from db.types.operations.convert import get_db_type_enum_from_id -from psycopg.errors import DuplicateTable -from db.columns.exceptions import InvalidTypeError -from mathesar.api.db.permissions.schema import SchemaAccessPolicy -from mathesar.api.db.permissions.table import TableAccessPolicy -from db.identifiers import is_identifier_too_long -from mathesar.api.exceptions.database_exceptions import ( - exceptions as database_api_exceptions -) - -from mathesar.api.exceptions.validation_exceptions.exceptions import ( - ColumnSizeMismatchAPIException, DistinctColumnRequiredAPIException, - MultipleDataFileAPIException, UnknownDatabaseTypeIdentifier, - InvalidTableName, -) -from mathesar.api.exceptions.database_exceptions.exceptions import DuplicateTableAPIException, InvalidTypeCastAPIException -from mathesar.api.exceptions.database_exceptions.base_exceptions import ProgrammingAPIException -from mathesar.api.exceptions.validation_exceptions import base_exceptions as base_validation_exceptions -from mathesar.api.exceptions.generic_exceptions import base_exceptions as base_api_exceptions -from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.api.serializers.columns import SimpleColumnSerializer -from mathesar.api.serializers.table_settings import TableSettingsSerializer -from mathesar.models.deprecated import Column, Schema, Table, DataFile -from mathesar.utils.tables import gen_table_name, create_table_from_datafile, create_empty_table - - -class TableSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): - columns = SimpleColumnSerializer(many=True, required=False) - settings = TableSettingsSerializer(read_only=True) - records_url = serializers.SerializerMethodField() - constraints_url = serializers.SerializerMethodField() - columns_url = serializers.SerializerMethodField() - joinable_tables_url = serializers.SerializerMethodField() - type_suggestions_url = serializers.SerializerMethodField() - previews_url = serializers.SerializerMethodField() - dependents_url = serializers.SerializerMethodField() - name = serializers.CharField(required=False, allow_blank=True, default='') - import_target = serializers.PrimaryKeyRelatedField( - required=False, allow_null=True, queryset=Table.current_objects.all() - ) - data_files = serializers.PrimaryKeyRelatedField( - required=False, many=True, queryset=DataFile.objects.all() - ) - description = serializers.CharField( - required=False, allow_blank=True, default=None, allow_null=True - ) - schema = PermittedPkRelatedField( - access_policy=SchemaAccessPolicy, - queryset=Schema.current_objects.all() - ) - - class Meta: - model = Table - fields = [ - 'id', 'name', 'import_target', 'schema', 'created_at', 'updated_at', - 'import_verified', 'columns', 'records_url', 'constraints_url', - 'columns_url', 'joinable_tables_url', 'type_suggestions_url', - 'previews_url', 'data_files', 'has_dependents', 'dependents_url', - 'settings', 'description', - ] - - def get_records_url(self, obj): - if isinstance(obj, Table): - # Only get records if we are serializing an existing table - request = self.context['request'] - return request.build_absolute_uri(reverse('table-record-list', kwargs={'table_pk': obj.pk})) - else: - return None - - def get_constraints_url(self, obj): - if isinstance(obj, Table): - # Only get constraints if we are serializing an existing table - request = self.context['request'] - return request.build_absolute_uri(reverse('table-constraint-list', kwargs={'table_pk': obj.pk})) - else: - return None - - def get_columns_url(self, obj): - if isinstance(obj, Table): - # Only get columns if we are serializing an existing table - request = self.context['request'] - return request.build_absolute_uri(reverse('table-column-list', kwargs={'table_pk': obj.pk})) - else: - return None - - def get_joinable_tables_url(self, obj): - if isinstance(obj, Table): - # Only get type suggestions if we are serializing an existing table - request = self.context['request'] - return request.build_absolute_uri(reverse('table-joinable-tables', kwargs={'pk': obj.pk})) - else: - return None - - def get_type_suggestions_url(self, obj): - if isinstance(obj, Table): - # Only get type suggestions if we are serializing an existing table - request = self.context['request'] - return request.build_absolute_uri(reverse('table-type-suggestions', kwargs={'pk': obj.pk})) - else: - return None - - def get_previews_url(self, obj): - if isinstance(obj, Table): - # Only get previews if we are serializing an existing table - request = self.context['request'] - return request.build_absolute_uri(reverse('table-previews', kwargs={'pk': obj.pk})) - else: - return None - - def get_dependents_url(self, obj): - if isinstance(obj, Table): - request = self.context['request'] - return request.build_absolute_uri(reverse('table-dependents', kwargs={'pk': obj.pk})) - else: - return None - - def validate_data_files(self, data_files): - if data_files and len(data_files) > 1: - raise MultipleDataFileAPIException() - return data_files - - def create(self, validated_data): - schema = validated_data['schema'] - data_files = validated_data.get('data_files') - name = validated_data.get('name') or gen_table_name(schema, data_files) - import_target = validated_data.get('import_target', None) - description = validated_data.get('description') - - try: - if data_files: - table = create_table_from_datafile( - data_files, name, schema, comment=description - ) - if import_target: - table.import_target = import_target - table.is_temp = True - table.save() - else: - table = create_empty_table(name, schema, comment=description) - except DuplicateTable as e: - raise DuplicateTableAPIException( - e, - message=f"Relation {validated_data['name']} already exists in schema {schema.id}", - field="name", - status_code=status.HTTP_400_BAD_REQUEST - ) - except ProgrammingError as e: - raise ProgrammingAPIException(e) - return table - - def update(self, instance, validated_data): - if self.partial: - # Save the fields that are stored in the model. - present_model_fields = [] - for model_field in instance.MODEL_FIELDS: - if model_field in validated_data: - setattr(instance, model_field, validated_data[model_field]) - present_model_fields.append(model_field) - instance.save(update_fields=present_model_fields) - for key in present_model_fields: - del validated_data[key] - # Save the fields that are stored in the underlying DB. - try: - instance.update_sa_table(validated_data) - except InvalidTypeError as e: - raise InvalidTypeCastAPIException( - e, - message=f'{e.column_name} cannot be casted to {e.new_type}.' - if e.column_name and e.new_type - else 'This type casting is invalid.', - status_code=status.HTTP_400_BAD_REQUEST - ) - except ValueError as e: - raise base_api_exceptions.ValueAPIException(e, status_code=status.HTTP_400_BAD_REQUEST) - return instance - - def validate_name(self, name): - if is_identifier_too_long(name): - raise database_api_exceptions.IdentifierTooLong(field='name') - return name - - def validate(self, data): - if self.partial: - if table_name := data.get('name', None): - if any( - invalid_char in table_name - for invalid_char in ('(', ')') - ): - raise InvalidTableName( - table_name, - field='name' - ) - columns = data.get('columns', None) - if columns is not None: - for col in columns: - id = col.get('id', None) - if id is None: - message = "'id' field is required while batch updating columns." - raise base_validation_exceptions.MathesarValidationException(ValidationError, message=message) - return data - - -class TablePreviewSerializer(MathesarErrorMessageMixin, serializers.Serializer): - name = serializers.CharField(required=False) - columns = SimpleColumnSerializer(many=True) - - def validate_columns(self, columns): - table = self.context['table'] - column_names = [col["name"] for col in columns] - if not len(column_names) == len(set(column_names)): - raise DistinctColumnRequiredAPIException() - if not len(columns) == len(table.sa_columns): - raise ColumnSizeMismatchAPIException() - for column in columns: - db_type_id = column['type'] - db_type = get_db_type_enum_from_id(db_type_id) - if db_type is None: - raise UnknownDatabaseTypeIdentifier(db_type_id=db_type_id) - return columns - - -class MoveTableRequestSerializer(MathesarErrorMessageMixin, serializers.Serializer): - move_columns = serializers.PrimaryKeyRelatedField(queryset=Column.current_objects.all(), many=True) - target_table = PermittedPkRelatedField(access_policy=TableAccessPolicy, queryset=Table.current_objects.all()) - - -class SplitTableRequestSerializer(MathesarErrorMessageMixin, serializers.Serializer): - extract_columns = serializers.PrimaryKeyRelatedField(queryset=Column.current_objects.all(), many=True) - extracted_table_name = serializers.CharField() - relationship_fk_column_name = serializers.CharField(allow_blank=False, allow_null=True, default=None) - - -class SplitTableResponseSerializer(MathesarErrorMessageMixin, serializers.Serializer): - extracted_table = serializers.PrimaryKeyRelatedField(queryset=Table.current_objects.all()) - remainder_table = serializers.PrimaryKeyRelatedField(queryset=Table.current_objects.all()) - fk_column = serializers.PrimaryKeyRelatedField(queryset=Column.current_objects.all()) - - -class MappingSerializer(MathesarErrorMessageMixin, serializers.Serializer): - def to_internal_value(self, data): - from_col = Column.current_objects.get(id=data[0]) - target_col = Column.current_objects.get(id=data[1]) - return [from_col, target_col] - - -class TableImportSerializer(MathesarErrorMessageMixin, serializers.Serializer): - import_target = PermittedPkRelatedField(access_policy=TableAccessPolicy, queryset=Table.current_objects.all(), required=True) - data_files = serializers.PrimaryKeyRelatedField(required=True, many=True, queryset=DataFile.objects.all()) - mappings = MappingSerializer(required=True, allow_null=True, many=True) diff --git a/mathesar/api/ui/permissions/database_role.py b/mathesar/api/ui/permissions/database_role.py deleted file mode 100644 index 090a4ded6e..0000000000 --- a/mathesar/api/ui/permissions/database_role.py +++ /dev/null @@ -1,47 +0,0 @@ -from django.db.models import Q -from rest_access_policy import AccessPolicy - -from mathesar.models.deprecated import Connection -from mathesar.models.users import DatabaseRole, Role - - -class DatabaseRoleAccessPolicy(AccessPolicy): - statements = [ - # Listing and retrieving a database role is allowed for everyone. - # We cannot restrict access for creating a `DatabaseRole` object here, - # because the database for which the role is created can be known only by inspecting the request body. - # So creating a database role API access permission is tied to - # the validation done on the database object (sent in the body) by the serializer - # when creating the database role. - { - 'action': ['list', 'retrieve', 'create'], - 'principal': 'authenticated', - 'effect': 'allow', - }, - # Only superuser or database manager can delete the database role - { - 'action': ['destroy', 'update', 'partial_update'], - 'principal': ['authenticated'], - 'effect': 'allow', - 'condition_expression': ['(is_superuser or is_db_manager)'] - }, - ] - - @classmethod - def scope_queryset(cls, request, qs): - if not (request.user.is_superuser or request.user.is_anonymous): - # TODO Consider moving to more reusable place - allowed_roles = (Role.MANAGER.value, Role.EDITOR.value, Role.VIEWER.value) - databases_with_view_access = Connection.objects.filter( - Q(database_role__role__in=allowed_roles) & Q(database_role__user=request.user) - ) - qs = qs.filter(database__in=databases_with_view_access) - return qs - - def is_db_manager(self, request, view, action): - database_role = view.get_object() - return DatabaseRole.objects.filter( - user=request.user, - database=database_role.database, - role=Role.MANAGER.value - ).exists() diff --git a/mathesar/api/ui/permissions/schema_role.py b/mathesar/api/ui/permissions/schema_role.py deleted file mode 100644 index b793fc98da..0000000000 --- a/mathesar/api/ui/permissions/schema_role.py +++ /dev/null @@ -1,54 +0,0 @@ -from django.db.models import Q -from rest_access_policy import AccessPolicy - -from mathesar.models.deprecated import Connection, Schema -from mathesar.models.users import DatabaseRole, Role, SchemaRole - - -class SchemaRoleAccessPolicy(AccessPolicy): - # Anyone can view schema role as long as they have at least Viewer access to that schema or its database - # Create access is restricted to superusers or managers of the schema or the database it belongs to. - statements = [ - { - 'action': ['list', 'retrieve', 'create'], - 'principal': 'authenticated', - 'effect': 'allow', - }, - # Only superuser or schema/database manager can delete the role - { - 'action': ['destroy', 'update', 'partial_update'], - 'principal': ['authenticated'], - 'effect': 'allow', - 'condition_expression': ['(is_superuser or is_schema_manager)'] - }, - ] - - @classmethod - def scope_queryset(cls, request, qs): - if not (request.user.is_superuser or request.user.is_anonymous): - allowed_roles = (Role.MANAGER.value, Role.EDITOR.value, Role.VIEWER.value) - databases_with_view_access = Connection.objects.filter( - Q(database_role__role__in=allowed_roles) & Q(database_role__user=request.user) - ) - schema_with_view_access = Schema.objects.filter( - Q(schema_role__role__in=allowed_roles) & Q(schema_role__user=request.user) - ) - qs = qs.filter( - Q(schema__in=schema_with_view_access) - | Q(schema__database__in=databases_with_view_access) - ) - return qs - - def is_schema_manager(self, request, view, action): - schema_role = view.get_object() - is_schema_manager = SchemaRole.objects.filter( - user=request.user, - schema=schema_role.schema, - role=Role.MANAGER.value - ).exists() - is_db_manager = DatabaseRole.objects.filter( - user=request.user, - database=schema_role.schema.database, - role=Role.MANAGER.value - ).exists() - return is_db_manager or is_schema_manager diff --git a/mathesar/api/ui/permissions/shares.py b/mathesar/api/ui/permissions/shares.py deleted file mode 100644 index 620d70ca52..0000000000 --- a/mathesar/api/ui/permissions/shares.py +++ /dev/null @@ -1,46 +0,0 @@ -from rest_access_policy import AccessPolicy - -from mathesar.api.utils import get_query_or_404 -from mathesar.api.permission_utils import QueryAccessInspector - - -class SharedTableAccessPolicy(AccessPolicy): - statements = [ - { - 'action': ['list', 'retrieve'], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition_expression': 'is_atleast_viewer_nested_table_resource' - }, - { - 'action': ['create', 'destroy', 'update', 'partial_update', 'regenerate'], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition_expression': 'is_atleast_editor_nested_table_resource' - }, - ] - - -class SharedQueryAccessPolicy(AccessPolicy): - statements = [ - { - 'action': ['list', 'retrieve'], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition_expression': 'is_atleast_query_viewer' - }, - { - 'action': ['create', 'destroy', 'update', 'partial_update', 'regenerate'], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition_expression': 'is_atleast_query_editor' - }, - ] - - def is_atleast_query_viewer(self, request, view, action): - query = get_query_or_404(view.kwargs['query_pk']) - return QueryAccessInspector(request.user, query).is_atleast_viewer() - - def is_atleast_query_editor(self, request, view, action): - query = get_query_or_404(view.kwargs['query_pk']) - return QueryAccessInspector(request.user, query).is_atleast_editor() diff --git a/mathesar/api/ui/permissions/ui_database.py b/mathesar/api/ui/permissions/ui_database.py deleted file mode 100644 index b1664635b8..0000000000 --- a/mathesar/api/ui/permissions/ui_database.py +++ /dev/null @@ -1,43 +0,0 @@ -from django.db.models import Q -from rest_access_policy import AccessPolicy - -from mathesar.models.users import Role - - -class UIDatabaseAccessPolicy(AccessPolicy): - """ - Anyone can view Database objects and - Database properties like types and filters if they have a Viewer access - """ - statements = [ - { - 'action': [ - 'list', 'retrieve', 'types', 'functions', 'filters' - ], - 'principal': 'authenticated', - 'effect': 'allow', - }, - { - 'action': [ - 'create', 'partial_update', 'destroy', - 'create_from_known_connection', - 'create_from_scratch', - 'create_with_new_user', - ], - 'principal': 'authenticated', - 'effect': 'allow', - 'condition': 'is_superuser' - } - ] - - @classmethod - def scope_queryset(cls, request, qs): - if not (request.user.is_superuser or request.user.is_anonymous): - allowed_roles = (Role.MANAGER.value,) - if request.method.lower() == 'get': - allowed_roles = allowed_roles + (Role.EDITOR.value, Role.VIEWER.value) - qs = qs.filter( - Q(database_role__role__in=allowed_roles) - & Q(database_role__user=request.user) - ) - return qs diff --git a/mathesar/api/ui/serializers/shares.py b/mathesar/api/ui/serializers/shares.py deleted file mode 100644 index fba3cfd8a1..0000000000 --- a/mathesar/api/ui/serializers/shares.py +++ /dev/null @@ -1,21 +0,0 @@ -from rest_framework import serializers - -from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.models.shares import SharedTable, SharedQuery - - -class SharedEntitySerializer(MathesarErrorMessageMixin, serializers.Serializer): - class Meta: - fields = ['id', 'slug', 'enabled'] - - -class SharedTableSerializer(SharedEntitySerializer, serializers.ModelSerializer): - class Meta: - model = SharedTable - fields = SharedEntitySerializer.Meta.fields - - -class SharedQuerySerializer(SharedEntitySerializer, serializers.ModelSerializer): - class Meta: - model = SharedQuery - fields = SharedEntitySerializer.Meta.fields diff --git a/mathesar/api/ui/serializers/users.py b/mathesar/api/ui/serializers/users.py index 4ff9069b6a..57ed1a281b 100644 --- a/mathesar/api/ui/serializers/users.py +++ b/mathesar/api/ui/serializers/users.py @@ -1,32 +1,15 @@ from django.contrib.auth.password_validation import validate_password from django.core.exceptions import ValidationError as DjangoValidationError -from rest_access_policy import FieldAccessMixin, PermittedPkRelatedField +from rest_access_policy import FieldAccessMixin from rest_framework import serializers -from mathesar.api.db.permissions.database import DatabaseAccessPolicy -from mathesar.api.db.permissions.schema import SchemaAccessPolicy from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin from mathesar.api.exceptions.validation_exceptions.exceptions import IncorrectOldPassword from mathesar.api.ui.permissions.users import UserAccessPolicy -from mathesar.models.deprecated import Connection, Schema -from mathesar.models.users import User, DatabaseRole, SchemaRole - - -class NestedDatabaseRoleSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): - class Meta: - model = DatabaseRole - fields = ['id', 'database', 'role'] - - -class NestedSchemaRoleSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): - class Meta: - model = SchemaRole - fields = ['id', 'schema', 'role'] +from mathesar.models.users import User class UserSerializer(MathesarErrorMessageMixin, FieldAccessMixin, serializers.ModelSerializer): - database_roles = NestedDatabaseRoleSerializer(many=True, required=False) - schema_roles = NestedSchemaRoleSerializer(many=True, required=False) access_policy = UserAccessPolicy class Meta: @@ -39,14 +22,10 @@ class Meta: 'password', 'email', 'is_superuser', - 'database_roles', - 'schema_roles', 'display_language' ] extra_kwargs = { 'password': {'write_only': True}, - 'database_roles': {'read_only': True}, - 'schema_roles': {'read_only': True} } def get_fields(self): @@ -96,27 +75,3 @@ def update(self, instance, validated_data): class PasswordResetSerializer(MathesarErrorMessageMixin, serializers.Serializer): password = serializers.CharField(write_only=True, required=True) - - -class DatabaseRoleSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): - class Meta: - model = DatabaseRole - fields = ['id', 'user', 'database', 'role'] - - # Restrict the list of databases to which the user has access to create a database role - # Refer https://rsinger86.github.io/drf-access-policy/policy_reuse/ for the usage of `PermittedPkRelatedField` - database = PermittedPkRelatedField( - access_policy=DatabaseAccessPolicy, - queryset=Connection.current_objects.all() - ) - - -class SchemaRoleSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): - class Meta: - model = SchemaRole - fields = ['id', 'user', 'schema', 'role'] - - schema = PermittedPkRelatedField( - access_policy=SchemaAccessPolicy, - queryset=Schema.current_objects.all() - ) diff --git a/mathesar/api/ui/viewsets/__init__.py b/mathesar/api/ui/viewsets/__init__.py index 53da991a40..76344d7d0b 100644 --- a/mathesar/api/ui/viewsets/__init__.py +++ b/mathesar/api/ui/viewsets/__init__.py @@ -1,5 +1 @@ -from mathesar.api.ui.viewsets.databases import ConnectionViewSet # noqa from mathesar.api.ui.viewsets.users import * # noqa -from mathesar.api.ui.viewsets.version import VersionViewSet # noqa -from mathesar.api.ui.viewsets.records import RecordViewSet # noqa -from mathesar.api.ui.viewsets.shares import * # noqa diff --git a/mathesar/api/ui/viewsets/databases.py b/mathesar/api/ui/viewsets/databases.py deleted file mode 100644 index 01a9ab26ac..0000000000 --- a/mathesar/api/ui/viewsets/databases.py +++ /dev/null @@ -1,135 +0,0 @@ -from django.db.utils import IntegrityError -from django_filters import rest_framework as filters -from rest_access_policy import AccessViewSetMixin -from rest_framework import serializers, status, viewsets -from rest_framework.decorators import action -from rest_framework.mixins import ListModelMixin, RetrieveModelMixin -from rest_framework.response import Response - -from mathesar.api.ui.permissions.ui_database import UIDatabaseAccessPolicy -from mathesar.models.deprecated import Connection -from mathesar.api.dj_filters import DatabaseFilter -from mathesar.api.exceptions.validation_exceptions.exceptions import ( - DictHasBadKeys, UnsupportedInstallationDatabase -) -from mathesar.api.exceptions.database_exceptions.base_exceptions import IntegrityAPIException -from mathesar.api.pagination import DefaultLimitOffsetPagination - -from mathesar.api.serializers.databases import ConnectionSerializer, TypeSerializer -from mathesar.api.serializers.filters import FilterSerializer - -from mathesar.filters.base import get_available_filters -from mathesar.utils.connections import ( - copy_connection_from_preexisting, create_connection_from_scratch, - create_connection_with_new_user, BadInstallationTarget -) - - -class ConnectionViewSet( - AccessViewSetMixin, - ListModelMixin, RetrieveModelMixin, - viewsets.GenericViewSet, -): - serializer_class = ConnectionSerializer - pagination_class = DefaultLimitOffsetPagination - filter_backends = (filters.DjangoFilterBackend,) - filterset_class = DatabaseFilter - access_policy = UIDatabaseAccessPolicy - - def get_queryset(self): - return self.access_policy.scope_queryset( - self.request, - Connection.objects.all().order_by('-created_at') - ) - - @action(methods=['get'], detail=True) - def types(self, request, pk=None): - database = self.get_object() - supported_ui_types = database.supported_ui_types - serializer = TypeSerializer(supported_ui_types, many=True) - return Response(serializer.data) - - @action(methods=['get'], detail=True) - def filters(self, request, pk=None): - database = self.get_object() - engine = database._sa_engine - available_filters = get_available_filters(engine) - serializer = FilterSerializer(available_filters, many=True) - return Response(serializer.data) - - @action(methods=['post'], detail=False, serializer_class=serializers.Serializer) - def create_from_known_connection(self, request): - try: - created_connection = copy_connection_from_preexisting( - request.data['credentials']['connection'], - request.data['nickname'], - request.data['database_name'], - request.data.get('create_database', False), - request.data.get('sample_data', []), - ) - except KeyError as e: - raise DictHasBadKeys( - field=e.args[0] - ) - except BadInstallationTarget: - raise UnsupportedInstallationDatabase() - except IntegrityError as e: - raise IntegrityAPIException(e, status_code=status.HTTP_400_BAD_REQUEST) - serializer = ConnectionSerializer( - created_connection, context={'request': request}, many=False - ) - return Response(serializer.data) - - @action(methods=['post'], detail=False, serializer_class=serializers.Serializer) - def create_from_scratch(self, request): - try: - credentials = request.data['credentials'] - created_connection = create_connection_from_scratch( - credentials['user'], - credentials['password'], - credentials['host'], - credentials['port'], - request.data['nickname'], - request.data['database_name'], - request.data.get('sample_data', []), - ) - except KeyError as e: - raise DictHasBadKeys( - message="Required key missing", - field=e.args[0] - ) - except BadInstallationTarget: - raise UnsupportedInstallationDatabase() - except IntegrityError as e: - raise IntegrityAPIException(e, status_code=status.HTTP_400_BAD_REQUEST) - serializer = ConnectionSerializer( - created_connection, context={'request': request}, many=False - ) - return Response(serializer.data) - - @action(methods=['post'], detail=False, serializer_class=serializers.Serializer) - def create_with_new_user(self, request): - try: - credentials = request.data['credentials'] - created_connection = create_connection_with_new_user( - credentials['create_user_via'], - credentials['user'], - credentials['password'], - request.data['nickname'], - request.data['database_name'], - request.data.get('create_database', False), - request.data.get('sample_data', []), - ) - except KeyError as e: - raise DictHasBadKeys( - message="Required key missing", - field=e.args[0] - ) - except BadInstallationTarget: - raise UnsupportedInstallationDatabase() - except IntegrityError as e: - raise IntegrityAPIException(e, status_code=status.HTTP_400_BAD_REQUEST) - serializer = ConnectionSerializer( - created_connection, context={'request': request}, many=False - ) - return Response(serializer.data) diff --git a/mathesar/api/ui/viewsets/records.py b/mathesar/api/ui/viewsets/records.py deleted file mode 100644 index 981452e5a0..0000000000 --- a/mathesar/api/ui/viewsets/records.py +++ /dev/null @@ -1,35 +0,0 @@ -from psycopg2.errors import ForeignKeyViolation -from rest_access_policy import AccessViewSetMixin -from rest_framework import status, viewsets -from rest_framework.response import Response -from rest_framework.decorators import action -from sqlalchemy.exc import IntegrityError - -from mathesar.api.db.permissions.records import RecordAccessPolicy -import mathesar.api.exceptions.database_exceptions.exceptions as database_api_exceptions - -from mathesar.api.utils import get_table_or_404 -from mathesar.models.deprecated import Table - - -class RecordViewSet(AccessViewSetMixin, viewsets.GenericViewSet): - access_policy = RecordAccessPolicy - - def get_queryset(self): - return Table.objects.all().order_by('-created_at') - - @action(methods=['post'], detail=False) - def delete(self, request, table_pk=None): - table = get_table_or_404(table_pk) - pks = request.data.get("pks") - try: - table.bulk_delete_records(pks) - except IntegrityError as e: - if isinstance(e.orig, ForeignKeyViolation): - raise database_api_exceptions.ForeignKeyViolationAPIException( - e, - status_code=status.HTTP_400_BAD_REQUEST, - referent_table=table, - ) - - return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/mathesar/api/ui/viewsets/shares.py b/mathesar/api/ui/viewsets/shares.py deleted file mode 100644 index 564b6dbc97..0000000000 --- a/mathesar/api/ui/viewsets/shares.py +++ /dev/null @@ -1,44 +0,0 @@ -import uuid -from rest_framework import viewsets -from rest_access_policy import AccessViewSetMixin -from rest_framework.decorators import action -from rest_framework.response import Response - -from mathesar.api.pagination import DefaultLimitOffsetPagination -from mathesar.api.ui.serializers.shares import SharedTableSerializer, SharedQuerySerializer -from mathesar.api.ui.permissions.shares import SharedTableAccessPolicy, SharedQueryAccessPolicy -from mathesar.models.shares import SharedTable, SharedQuery - - -class RegenerateSlugMixin(viewsets.GenericViewSet): - @action(methods=['post'], detail=True) - def regenerate(self, *args, **kwargs): - share = self.get_object() - share.slug = uuid.uuid4() - share.save() - serializer = self.get_serializer(share) - return Response(serializer.data) - - -class SharedTableViewSet(AccessViewSetMixin, viewsets.ModelViewSet, RegenerateSlugMixin): - pagination_class = DefaultLimitOffsetPagination - serializer_class = SharedTableSerializer - access_policy = SharedTableAccessPolicy - - def get_queryset(self): - return SharedTable.objects.filter(table_id=self.kwargs['table_pk']).order_by('-created_at') - - def perform_create(self, serializer): - serializer.save(table_id=self.kwargs['table_pk']) - - -class SharedQueryViewSet(AccessViewSetMixin, viewsets.ModelViewSet, RegenerateSlugMixin): - pagination_class = DefaultLimitOffsetPagination - serializer_class = SharedQuerySerializer - access_policy = SharedQueryAccessPolicy - - def get_queryset(self): - return SharedQuery.objects.filter(query_id=self.kwargs['query_pk']).order_by('-created_at') - - def perform_create(self, serializer): - serializer.save(query_id=self.kwargs['query_pk']) diff --git a/mathesar/api/ui/viewsets/users.py b/mathesar/api/ui/viewsets/users.py index 03ae399a3d..8fa726afb4 100644 --- a/mathesar/api/ui/viewsets/users.py +++ b/mathesar/api/ui/viewsets/users.py @@ -2,19 +2,14 @@ from rest_access_policy import AccessViewSetMixin from rest_framework import status, viewsets from rest_framework.decorators import action -from rest_framework.exceptions import MethodNotAllowed from rest_framework.generics import get_object_or_404 from rest_framework.response import Response -from mathesar.api.ui.permissions.database_role import DatabaseRoleAccessPolicy -from mathesar.api.ui.permissions.schema_role import SchemaRoleAccessPolicy from mathesar.api.ui.serializers.users import ( - ChangePasswordSerializer, PasswordResetSerializer, UserSerializer, DatabaseRoleSerializer, - SchemaRoleSerializer, + ChangePasswordSerializer, PasswordResetSerializer, UserSerializer, ) from mathesar.api.pagination import DefaultLimitOffsetPagination from mathesar.api.ui.permissions.users import UserAccessPolicy -from mathesar.models.users import DatabaseRole, SchemaRole class UserViewSet(AccessViewSetMixin, viewsets.ModelViewSet): @@ -45,39 +40,3 @@ def password_change(self, request): serializer.is_valid(raise_exception=True) serializer.save() return Response(status=status.HTTP_200_OK) - - -class DatabaseRoleViewSet(AccessViewSetMixin, viewsets.ModelViewSet): - queryset = DatabaseRole.objects.all().order_by('id') - serializer_class = DatabaseRoleSerializer - pagination_class = DefaultLimitOffsetPagination - access_policy = DatabaseRoleAccessPolicy - - def get_queryset(self): - return self.access_policy.scope_queryset( - self.request, super().get_queryset() - ) - - def update(self, request, pk=None): - raise MethodNotAllowed(request.method) - - def partial_update(self, request, pk=None): - raise MethodNotAllowed(request.method) - - -class SchemaRoleViewSet(AccessViewSetMixin, viewsets.ModelViewSet): - queryset = SchemaRole.objects.all().order_by('id') - serializer_class = SchemaRoleSerializer - pagination_class = DefaultLimitOffsetPagination - access_policy = SchemaRoleAccessPolicy - - def get_queryset(self): - return self.access_policy.scope_queryset( - self.request, super().get_queryset() - ) - - def update(self, request, pk=None): - raise MethodNotAllowed(request.method) - - def partial_update(self, request, pk=None): - raise MethodNotAllowed(request.method) diff --git a/mathesar/api/ui/viewsets/version.py b/mathesar/api/ui/viewsets/version.py deleted file mode 100644 index 9f8f5ab766..0000000000 --- a/mathesar/api/ui/viewsets/version.py +++ /dev/null @@ -1,12 +0,0 @@ -from rest_framework import viewsets -from rest_framework.decorators import action -from rest_framework.response import Response - -from mathesar import __version__ - - -class VersionViewSet(viewsets.ViewSet): - - @action(methods=['get'], detail=False) - def current(self, _): - return Response(__version__) diff --git a/mathesar/api/utils.py b/mathesar/api/utils.py index be83a84173..74b27df815 100644 --- a/mathesar/api/utils.py +++ b/mathesar/api/utils.py @@ -1,53 +1,8 @@ -from uuid import UUID -from rest_framework.exceptions import NotFound -from rest_framework import status -import mathesar.api.exceptions.generic_exceptions.base_exceptions as generic_api_exceptions -import re - from db.records.operations import group -from mathesar.api.exceptions.error_codes import ErrorCodes -from mathesar.models.deprecated import Table -from mathesar.models.query import Exploration from mathesar.utils.preview import column_alias_from_preview_template -from mathesar.api.exceptions.generic_exceptions.base_exceptions import BadDBCredentials -import psycopg DATA_KEY = 'data' METADATA_KEY = 'metadata' -SHARED_LINK_UUID_QUERY_PARAM = 'shared-link-uuid' - - -def get_table_or_404(pk): - """ - Get table for which the user has correct permission if it exists, - otherwise throws a DRF NotFound error. - Args: - pk: id of table - request: Viewset request, required for filtering based on permissions - Returns: - table: return the table based on a specific id - """ - try: - table = Table.objects.get(id=pk) - except Table.DoesNotExist: - raise generic_api_exceptions.NotFoundAPIException( - NotFound, - error_code=ErrorCodes.TableNotFound.value, - message="Table doesn't exist" - ) - return table - - -def get_query_or_404(pk): - try: - query = Exploration.objects.get(id=pk) - except Exploration.DoesNotExist: - raise generic_api_exceptions.NotFoundAPIException( - NotFound, - error_code=ErrorCodes.QueryNotFound.value, - message="Query doesn't exist" - ) - return query def process_annotated_records(record_list, column_name_id_map=None, preview_metadata=None): @@ -131,50 +86,3 @@ def _use_correct_column_identifier(group_metadata_item): output_groups = None return processed_records, output_groups, preview_metadata - - -def follows_json_number_spec(number): - """ - Check if a string follows JSON number spec - Args: - number: number as string - """ - patterns = [ - r"^-?0$", - r"^-?0[\.][0-9]+$", - r"^-?0[eE][+-]?[0-9]*$", - r"^-?0[\.][0-9]+[eE][+-]?[0-9]+$", - r"^-?[1-9][0-9]*$", - r"^-?[1-9][0-9]*[\.][0-9]+$", - r"^-?[1-9][0-9]*[eE][+-]?[0-9]+$", - r"^-?[1-9][0-9]*[\.][0-9]+[eE][+-]?[0-9]+$", - ] - for pattern in patterns: - if re.search(pattern, number) is not None: - return True - return False - - -def is_valid_uuid_v4(value): - try: - UUID(str(value), version=4) - return True - except ValueError: - return False - - -def is_valid_pg_creds(credentials): - dbname = credentials["db_name"] - user = credentials["username"] - password = credentials["password"] - host = credentials["host"] - port = credentials["port"] - conn_str = f'dbname={dbname} user={user} password={password} host={host} port={port}' - try: - with psycopg.connect(conn_str): - return True - except psycopg.errors.OperationalError as e: - raise BadDBCredentials( - exception=e, - status_code=status.HTTP_400_BAD_REQUEST - ) diff --git a/mathesar/database/base.py b/mathesar/database/base.py index c07552be26..d6384d3d76 100644 --- a/mathesar/database/base.py +++ b/mathesar/database/base.py @@ -1,4 +1,3 @@ -import psycopg from db import engine @@ -19,19 +18,3 @@ def _get_credentials_for_db_model(db_model): database=db_model.db_name, port=db_model.port, ) - - -def get_psycopg_connection(db_model): - """ - Get a psycopg connection, given a Database model. - - Args: - db_model: The Django model corresponding to the Database. - """ - return psycopg.connect( - host=db_model.host, - port=db_model.port, - dbname=db_model.db_name, - user=db_model.username, - password=db_model.password, - ) diff --git a/mathesar/database/types.py b/mathesar/database/types.py index 46c827640f..388f54d474 100644 --- a/mathesar/database/types.py +++ b/mathesar/database/types.py @@ -6,7 +6,6 @@ from db.types.base import ( PostgresType, MathesarCustomType ) -from db.types.hintsets import db_types_hinted class UIType(Enum): @@ -158,57 +157,8 @@ def __str__(self): return self.id -def ui_types_that_satisfy_hintset(ui_types_mapped_to_hintsets, hintset): - """ - Provided a mapping of UI types to their hintsets and a hintset, tries to find UI - types whose hintsets satisfy the passed hintset, meaning the UI types whose hintsets are - supersets of the passed hintset. - """ - hintset = set(hintset) - return frozenset( - ui_type - for ui_type, ui_type_hintset - in ui_types_mapped_to_hintsets.items() - if set.issubset(hintset, ui_type_hintset) - ) - - -def get_ui_types_mapped_to_hintsets(): - """ - Returns a dict where the keys are UI types and the values their hintsets. - A UI type's hintset is defined as the intersection of the hintsets of its associated - database types. - """ - ui_types_mapped_to_hintsets = {} - for ui_type in UIType: - associated_db_types = ui_type.db_types - associated_db_type_hintsets = tuple( - set(db_types_hinted[associated_db_type]) - for associated_db_type in associated_db_types - if associated_db_type in db_types_hinted - ) - hintsets_intersection = _safe_set_intersection(associated_db_type_hintsets) - ui_types_mapped_to_hintsets[ui_type] = frozenset(hintsets_intersection) - return ui_types_mapped_to_hintsets - - -def _safe_set_intersection(sets): - # set.intersection fails if it is not passed anything. - if len(sets) > 0: - return set.intersection(*sets) - else: - return set() - - def get_ui_type_from_db_type(db_type_to_find): for ui_type in UIType: associated_db_types = ui_type.db_types if db_type_to_find in associated_db_types: return ui_type - - -def get_ui_type_from_id(ui_type_id): - try: - return UIType(ui_type_id) - except ValueError: - return None diff --git a/mathesar/filters/__init__.py b/mathesar/filters/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/mathesar/filters/base.py b/mathesar/filters/base.py deleted file mode 100644 index 11cefb4d7a..0000000000 --- a/mathesar/filters/base.py +++ /dev/null @@ -1,123 +0,0 @@ -from db.functions import hints - -from db.functions.operations.check_support import get_supported_db_functions -from mathesar.database.types import get_ui_types_mapped_to_hintsets -from mathesar.database.types import ui_types_that_satisfy_hintset - - -def get_available_filters(engine): - available_db_functions = get_supported_db_functions(engine) - db_functions_castable_to_filter = tuple( - db_function - for db_function in available_db_functions - if _is_db_function_subclass_castable_to_filter(db_function) - ) - ui_type_hints = get_ui_types_mapped_to_hintsets() - filters = tuple( - _filter_from_db_function( - ui_type_hints, - db_function_castable_to_filter, - ) - for db_function_castable_to_filter - in db_functions_castable_to_filter - ) - return filters - - -def _is_db_function_subclass_castable_to_filter(db_function_subclass): - # Provisionary implementation; ideally would examine parameter and output - # related hints. - db_function_hints = db_function_subclass.hints - if db_function_hints: - return hints.mathesar_filter in db_function_hints - else: - return False - - -def _filter_from_db_function(ui_type_hints, db_function_subclass): - aliases = _get_aliases(ui_type_hints, db_function_subclass) - return dict( - id=db_function_subclass.id, - name=db_function_subclass.name, - aliases=aliases, - parameters=_get_filter_parameters(ui_type_hints, db_function_subclass), - ) - - -# TODO are aliases still possible? might make sense to get rid of aliases-related logic. -def _get_aliases(ui_type_hints, db_function_subclass): - alias_hints = hints.get_hints_with_id(db_function_subclass, 'use_this_alias_when') - aliases = tuple( - _process_alias_hint(ui_type_hints, alias_hint) - for alias_hint in alias_hints - ) - return aliases - - -def _process_alias_hint(ui_type_hints, alias_hint): - alias_name = alias_hint.get("alias") - when_hintset = alias_hint.get("when") - when_ui_types = ui_types_that_satisfy_hintset( - ui_type_hints, - when_hintset - ) - return dict( - alias=alias_name, - ui_types=when_ui_types, - ) - - -def _get_filter_parameters(ui_type_hints, db_function_subclass): - """ - Describes filter parameters. Returns a sequence of dicts (one per parameter described) - containing at least the MA type of parameter at that index. - """ - parameter_count = hints.get_parameter_count(db_function_subclass) - if not parameter_count: - raise Exception("Parameter count must be declared on a DbFunction with the mathesar_filter hint.") - filter_params = [] - for parameter_index in range(parameter_count): - ui_types = _get_parameter_ui_types( - ui_type_hints=ui_type_hints, - db_function_subclass=db_function_subclass, - parameter_index=parameter_index, - ) - suggested_values = _get_parameter_suggested_values( - db_function_subclass=db_function_subclass, - parameter_index=parameter_index, - ) - filter_param = _make_filter_param( - ui_types=ui_types, - suggested_values=suggested_values, - ) - filter_params.append(filter_param) - return tuple(filter_params) - - -def _get_parameter_suggested_values(db_function_subclass, parameter_index): - parameter_hints = hints.get_parameter_hints(parameter_index, db_function_subclass) - for hint in parameter_hints: - if hint['id'] == 'suggested_values': - return hint['values'] - - -def _make_filter_param(ui_types, suggested_values): - filter_param = dict(ui_types=ui_types) - if suggested_values: - filter_param['suggested_values'] = suggested_values - return filter_param - - -def _get_parameter_ui_types(ui_type_hints, db_function_subclass, parameter_index): - parameter_type_hints = hints.get_parameter_type_hints(parameter_index, db_function_subclass) - parameter_ui_types = ui_types_that_satisfy_hintset( - ui_type_hints, - parameter_type_hints - ) - if len(parameter_ui_types) == 0: - raise Exception( - f"Hints of DB function {db_function_subclass.id}" - + f" parameter at index {parameter_index} must match" - + " at least one Mathesar type (it didn't)." - ) - return parameter_ui_types diff --git a/mathesar/functions/__init__.py b/mathesar/functions/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/mathesar/functions/operations/__init__.py b/mathesar/functions/operations/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/mathesar/functions/operations/convert.py b/mathesar/functions/operations/convert.py deleted file mode 100644 index cd72d11a90..0000000000 --- a/mathesar/functions/operations/convert.py +++ /dev/null @@ -1,44 +0,0 @@ -from db.functions.operations.deserialize import get_raw_spec_components -from db.functions.exceptions import ReferencedColumnsDontExist - - -def rewrite_db_function_spec_column_ids_to_names(column_ids_to_names, spec): - """ - Takes a DB function spec, looks for columns referenced via Django IDs (e.g. `{"column_id": 3}`) - and replaces those with column names. That's necessary since the DB module is unaware of column - IDs and can only accept columns referenced by name. - """ - # Using a private method to do the heavy lifting since it uses different parameters. - return _rewrite( - column_ids_to_names=column_ids_to_names, - spec_or_literal=spec - ) - - -def _rewrite(column_ids_to_names, spec_or_literal): - its_a_spec = isinstance(spec_or_literal, dict) - if its_a_spec: - spec = spec_or_literal - db_function_id, parameters = get_raw_spec_components(spec) - if db_function_id == "column_id": - db_function_id = "column_name" - column_id = parameters[0] - column_name = column_ids_to_names[column_id] - if column_name: - parameters[0] = column_name - else: - raise ReferencedColumnsDontExist( - f"Column ID {column_id} unknown." - + f" Known id-name mapping: {column_ids_to_names}" - ) - parameters = [ - _rewrite( - column_ids_to_names=column_ids_to_names, - spec_or_literal=parameter, - ) - for parameter in parameters - ] - return {db_function_id: parameters} - else: - literal = spec_or_literal - return literal diff --git a/mathesar/install.py b/mathesar/install.py index 74c2c8c482..c9ecc5c00d 100644 --- a/mathesar/install.py +++ b/mathesar/install.py @@ -1,27 +1,14 @@ """ -This script installs functions and types for Mathesar onto the configured DB. +This script installs the Mathesar Django tables onto the configured DB server. """ -import getopt import os -import sys import django from django.core import management from decouple import config as decouple_config -from django.conf import settings -from django.db.utils import IntegrityError -from sqlalchemy.exc import OperationalError -from db import install def main(skip_static_collection=False): - # skip_confirm is temporarily enabled by default as we don't have any use - # for interactive prompts with docker only deployments - skip_confirm = True - (opts, _) = getopt.getopt(sys.argv[1:], ":s", ["skip-confirm"]) - for (opt, value) in opts: - if (opt == "-s") or (opt == "--skip-confirm"): - skip_confirm = True os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production") django.setup() management.call_command('migrate') @@ -29,32 +16,6 @@ def main(skip_static_collection=False): # if not debug_mode and not skip_static_collection: management.call_command('collectstatic', '--noinput', '--clear') - print("------------Setting up User Databases------------") - django_db_key = decouple_config('DJANGO_DATABASE_KEY', default="default") - user_databases = [key for key in settings.DATABASES if key != django_db_key] - for database_key in user_databases: - try: - install_on_db_with_key(database_key, skip_confirm) - except IntegrityError: - continue - - -def install_on_db_with_key(database_key, skip_confirm): - from mathesar.models.deprecated import Connection - db_model = Connection.create_from_settings_key(database_key) - try: - install.install_mathesar( - database_name=db_model.db_name, - hostname=db_model.host, - username=db_model.username, - password=db_model.password, - port=db_model.port, - skip_confirm=skip_confirm - ) - except OperationalError as e: - db_model.delete() - raise e - db_model.save() if __name__ == "__main__": diff --git a/mathesar/models/__init__.py b/mathesar/models/__init__.py index 1f6b3b1e86..f180d82bfd 100644 --- a/mathesar/models/__init__.py +++ b/mathesar/models/__init__.py @@ -1,2 +1,3 @@ # We need to do this to register the model correctly in Django settings from .users import User # noqa +from .shares import SharedQuery, SharedTable # noqa diff --git a/mathesar/models/deprecated.py b/mathesar/models/deprecated.py index 68087f3fae..9b1b3dbd01 100644 --- a/mathesar/models/deprecated.py +++ b/mathesar/models/deprecated.py @@ -266,7 +266,7 @@ def decorator(self, column, name): class ColumnPrefetcher(Prefetcher): - def filter(self, table_ids, tables): + def filter(self, _, tables): if len(tables) < 1: return [] columns = reduce(lambda column_objs, table: column_objs + list(table.columns.all()), tables, []) diff --git a/mathesar/rpc/connections.py b/mathesar/rpc/connections.py deleted file mode 100644 index c0486b1498..0000000000 --- a/mathesar/rpc/connections.py +++ /dev/null @@ -1,146 +0,0 @@ -""" -Classes and functions exposed to the RPC endpoint for creating connections. -""" -from typing import TypedDict - -from modernrpc.core import rpc_method -from modernrpc.auth.basic import http_basic_auth_superuser_required - -from mathesar.utils import connections, permissions -from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions - - -class ConnectionReturn(TypedDict): - """ - Information about a connection model. - - Attributes: - id (int): The Django id of the Connection object added. - nickname (str): Used to identify the added connection. - database (str): The name of the database on the server. - username (str): The username of the role for the connection. - host (str): The hostname or IP address of the Postgres server. - port (int): The port of the Postgres server. - """ - id: int - nickname: str - database: str - username: str - host: str - port: int - - @classmethod - def from_model(cls, connection): - return cls( - id=connection.id, - nickname=connection.name, - database=connection.db_name, - username=connection.username, - host=connection.host, - port=connection.port - ) - - -@rpc_method(name='connections.add_from_known_connection') -@http_basic_auth_superuser_required -@handle_rpc_exceptions -def add_from_known_connection( - *, - nickname: str, - database: str, - create_db: bool = False, - connection_id: int = None, - sample_data: list[str] = [], -) -> ConnectionReturn: - """ - Add a new connection from an already existing one. - - If no `connection_id` is passed, the internal database connection - will be used. - - Args: - nickname: Identify the added connection. Should be unique. - database: The name of the database on the server. - create_db: Whether we should create the database `database` if it - doesn't already exist. - connection_id: Identifies the known connection when combined with - the user_database value for the connection_type parameter - sample_data: A list of strings requesting that some example data - sets be installed on the underlying database. Valid list - members are 'library_management' and 'movie_collection'. - - Returns: - Metadata about the Database associated with the connection. - """ - if connection_id is not None: - connection_type = 'user_database' - else: - connection_type = 'internal_database' - connection = { - 'connection_type': connection_type, - 'connection_id': connection_id - } - connection_model = connections.copy_connection_from_preexisting( - connection, nickname, database, create_db, sample_data - ) - return ConnectionReturn.from_model(connection_model) - - -@rpc_method(name='connections.add_from_scratch') -@http_basic_auth_superuser_required -@handle_rpc_exceptions -def add_from_scratch( - *, - nickname: str, - database: str, - user: str, - password: str, - host: str, - port: int, - sample_data: list[str] = [], -) -> ConnectionReturn: - """ - Add a new connection to a PostgreSQL server from scratch. - - This requires inputting valid credentials for the connection. When - setting up the connection, therefore, the `database` must already - exist on the PostgreSQL server. - - Args: - nickname: Identify the added connection. Should be unique. - database: The name of the database on the server. - user: A valid user (role) on the server, with `CONNECT` and - `CREATE` privileges on the database given by `database`. - password: The password for `user`. - host: The hostname or IP address of the PostgreSQL server. - port: The port of the PostgreSQL server. - sample_data: A list of strings requesting that some example data - sets be installed on the underlying database. Valid list - members are 'library_management' and 'movie_collection'. - - Returns: - Metadata about the Database associated with the connection. - """ - connection_model = connections.create_connection_from_scratch( - user, password, host, port, nickname, database, sample_data - ) - return ConnectionReturn.from_model(connection_model) - - -@rpc_method(name='connections.grant_access_to_user') -@http_basic_auth_superuser_required -@handle_rpc_exceptions -def grant_access_to_user(*, connection_id: int, user_id: int): - """ - Migrate a connection to new models and grant access to a user. - - This function is designed to be temporary, and should probably be - removed once we have completed the new users and permissions setup - for beta. You pass any conneciton id and user id. The function will - fill the required models as needed. - - Args: - connection_id: The Django id of an old-style connection. - user_id: The Django id of a user. - """ - permissions.migrate_connection_for_user(connection_id, user_id) diff --git a/mathesar/signals.py b/mathesar/signals.py index bcec182ab1..0997730ed7 100644 --- a/mathesar/signals.py +++ b/mathesar/signals.py @@ -3,7 +3,6 @@ from mathesar.models.deprecated import ( Column, Table, _set_default_preview_template, - _create_table_settings, ) from mathesar.state.django import reflect_new_table_constraints @@ -16,13 +15,6 @@ def sync_table_constraints(**kwargs): reflect_new_table_constraints(kwargs['instance']) -@receiver(post_save, sender=Table) -def create_table_settings(**kwargs): - if kwargs['created']: - instance = kwargs['instance'] - _create_table_settings([instance]) - - @receiver(post_save, sender=Column) def compute_preview_column_settings(**kwargs): instance = kwargs['instance'] diff --git a/mathesar/state/django.py b/mathesar/state/django.py index ebb5cfbcfb..4feef16dd0 100644 --- a/mathesar/state/django.py +++ b/mathesar/state/django.py @@ -109,7 +109,6 @@ def reflect_tables_from_schemas(schemas, metadata): tables.append(table) models_deprecated.Table.current_objects.bulk_create(tables, ignore_conflicts=True) # Calling signals manually because bulk create does not emit any signals - models_deprecated._create_table_settings(models_deprecated.Table.current_objects.filter(settings__isnull=True)) deleted_tables = [] for table in models_deprecated.Table.current_objects.filter(schema__in=schemas).select_related('schema'): if (table.oid, table.schema.oid) not in db_table_oids: diff --git a/mathesar/tests/api/conftest.py b/mathesar/tests/api/conftest.py index 5d0318d51b..bcb36e81dd 100644 --- a/mathesar/tests/api/conftest.py +++ b/mathesar/tests/api/conftest.py @@ -6,9 +6,7 @@ from sqlalchemy import Table as SATable from db.columns.operations.select import get_column_attnum_from_name -from db.constraints.base import ForeignKeyConstraint, UniqueConstraint from db.tables.operations.select import get_oid_from_table -from db.types.base import PostgresType from mathesar.models.deprecated import Table, DataFile, Column as ServiceLayerColumn from db.metadata import get_empty_metadata from mathesar.state import reset_reflection @@ -27,19 +25,6 @@ def _create_data_file(file_path, file_name): return _create_data_file -@pytest.fixture -def create_data_types_table(data_types_csv_filepath, create_table): - csv_filepath = data_types_csv_filepath - - def _create_table(table_name, schema_name='Data Types'): - return create_table( - table_name=table_name, - schema_name=schema_name, - csv_filepath=csv_filepath, - ) - return _create_table - - @pytest.fixture def self_referential_table(create_table, get_uid): return create_table( @@ -49,140 +34,6 @@ def self_referential_table(create_table, get_uid): ) -@pytest.fixture -def create_base_table(create_table): - def _create_table(table_name, schema_name='FK Test'): - return create_table( - table_name=table_name, - schema_name=schema_name, - csv_filepath='mathesar/tests/data/base_table.csv' - ) - return _create_table - - -@pytest.fixture -def create_referent_table(create_table): - def _create_table(table_name, schema_name='FK Test'): - return create_table( - table_name=table_name, - schema_name=schema_name, - csv_filepath='mathesar/tests/data/reference_table.csv' - ) - return _create_table - - -@pytest.fixture -def two_foreign_key_tables(_create_tables_from_files): - return _create_tables_from_files( - 'mathesar/tests/data/base_table.csv', - 'mathesar/tests/data/reference_table.csv', - ) - - -@pytest.fixture -def publication_tables(_create_tables_from_files, client): - author_table, publisher_table, publication_table, checkouts_table = _create_tables_from_files( - 'mathesar/tests/data/relation_tables/author.csv', - 'mathesar/tests/data/relation_tables/publisher.csv', - 'mathesar/tests/data/relation_tables/publication.csv', - 'mathesar/tests/data/relation_tables/items.csv', - ) - author_table_pk_column = author_table.get_column_by_name("id") - author_table.add_constraint(UniqueConstraint(None, author_table.oid, [author_table_pk_column.attnum])) - publisher_table_pk_column = publisher_table.get_column_by_name("id") - publisher_table.add_constraint(UniqueConstraint(None, publisher_table.oid, [publisher_table_pk_column.attnum])) - publication_table_columns = publication_table.get_columns_by_name(["id", "publisher", "author", "co_author"]) - publication_table_pk_column = publication_table_columns[0] - publication_table.add_constraint( - UniqueConstraint( - None, - publication_table.oid, - [publication_table_pk_column.attnum] - ) - ) - checkouts_table_columns = checkouts_table.get_columns_by_name(["id", "publication"]) - checkouts_table_pk_column = checkouts_table_columns[0] - checkouts_table_publication_column = checkouts_table_columns[1] - checkouts_table.add_constraint( - UniqueConstraint( - None, - checkouts_table.oid, - [checkouts_table_pk_column.attnum] - ) - ) - db_type = PostgresType.INTEGER - data = {"type": db_type.id} - # TODO Uncomment when DB query bug is fixed - publication_publisher_column = publication_table_columns[1] - publication_author_column = publication_table_columns[2] - publication_co_author_column = publication_table_columns[3] - client.patch( - f"/api/db/v0/tables/{publication_table.id}/columns/{publication_publisher_column.id}/", data=data - ) - publication_table.add_constraint( - ForeignKeyConstraint( - None, - publication_table.oid, - [publication_publisher_column.attnum], - publisher_table.oid, - [publisher_table_pk_column.attnum], {} - ) - ) - client.patch( - f"/api/db/v0/tables/{publication_table.id}/columns/{publication_author_column.id}/", data=data - ) - publication_table.add_constraint( - ForeignKeyConstraint( - None, - publication_table.oid, - [publication_author_column.attnum], - author_table.oid, - [author_table_pk_column.attnum], {} - ) - ) - client.patch( - f"/api/db/v0/tables/{publication_table.id}/columns/{publication_co_author_column.id}/", data=data - ) - publication_table.add_constraint( - ForeignKeyConstraint( - None, - publication_table.oid, - [publication_co_author_column.attnum], - author_table.oid, - [author_table_pk_column.attnum], {} - ) - ) - client.patch( - f"/api/db/v0/tables/{checkouts_table.id}/columns/{checkouts_table_publication_column.id}/", data=data - ) - checkouts_table.add_constraint( - ForeignKeyConstraint( - None, - checkouts_table.oid, - [checkouts_table_publication_column.attnum], - publication_table.oid, - [publication_table_pk_column.attnum], {} - ) - ) - return author_table, publisher_table, publication_table, checkouts_table - - -@pytest.fixture -def two_multi_column_foreign_key_tables(_create_tables_from_files): - return _create_tables_from_files( - 'mathesar/tests/data/multi_column_foreign_key_base_table.csv', - 'mathesar/tests/data/multi_column_reference_table.csv', - ) - - -@pytest.fixture -def two_invalid_related_data_foreign_key_tables(_create_tables_from_files): - return _create_tables_from_files( - 'mathesar/tests/data/invalid_reference_base_table.csv', - 'mathesar/tests/data/reference_table.csv', - ) - - @pytest.fixture def _create_tables_from_files(create_table, get_uid): def _create(*csv_files): diff --git a/mathesar/tests/api/query/__init__.py b/mathesar/tests/api/query/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/mathesar/tests/api/query/conftest.py b/mathesar/tests/api/query/conftest.py deleted file mode 100644 index 540680ca1e..0000000000 --- a/mathesar/tests/api/query/conftest.py +++ /dev/null @@ -1,58 +0,0 @@ -import pytest -from mathesar.models.query import Exploration - - -@pytest.fixture -def academics_ma_tables(db_table_to_dj_table, academics_db_tables): - return { - table_name: db_table_to_dj_table(db_table) - for table_name, db_table - in academics_db_tables.items() - } - - -@pytest.fixture -def create_minimal_patents_query(create_patents_table, get_uid, patent_schema): - schema_name = patent_schema.name - - def _create(schema_name=schema_name): - base_table = create_patents_table(table_name=get_uid(), schema_name=schema_name) - initial_columns = [ - { - 'id': base_table.get_column_by_name('Center').id, - 'alias': 'col1', - }, - { - 'id': base_table.get_column_by_name('Case Number').id, - 'alias': 'col2', - }, - ] - display_names = { - 'col1': 'Column 1', - 'col2': 'Column 2', - 'Checkout Month': 'Month', - 'Count': 'Number of Checkouts', - } - display_options = { - 'col1': dict(a=1), - 'col2': dict(b=2), - } - ui_query = Exploration.objects.create( - base_table=base_table, - initial_columns=initial_columns, - display_options=display_options, - display_names=display_names, - ) - return ui_query - - return _create - - -@pytest.fixture -def minimal_patents_query(create_minimal_patents_query): - query = create_minimal_patents_query() - yield query - - # cleanup - query.delete() - query.base_table.delete() diff --git a/mathesar/tests/api/query/test_aggregation_functions.py b/mathesar/tests/api/query/test_aggregation_functions.py deleted file mode 100644 index 869049ef21..0000000000 --- a/mathesar/tests/api/query/test_aggregation_functions.py +++ /dev/null @@ -1,2211 +0,0 @@ -display_option_origin = "display_option_origin" - - -def test_alias(library_ma_tables, get_uid, client): - _ = library_ma_tables - checkouts = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Checkouts"] - columns = { - c["name"]: c for c in checkouts["columns"] - } - request_data = { - "name": get_uid(), - "base_table": checkouts["id"], - "initial_columns": [ - {"id": columns["id"]["id"], "alias": "id"}, - {"id": columns["Checkout Time"]["id"], "alias": "Checkout Time"}, - {"id": columns["Patron"]["id"], "alias": "Patron"}, - ], - "display_names": { - "Checkout Month": "Month", - "Count": "Number of Checkouts", - "Sum": "Sum of patron", - }, - "display_options": { - "Checkout Time": { - display_option_origin: "Checkout Time", - }, - "id": { - display_option_origin: "id", - }, - "Patron": { - display_option_origin: "Patron", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Checkout Time", - "output_alias": "Checkout Month", - "preproc": "truncate_to_month", - } - ], - "aggregation_expressions": [ - { - "input_alias": "id", - "output_alias": "Count", - "function": "count", - }, - { - "input_alias": "Patron", - "output_alias": "Sum", - "function": "sum", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()["id"] - expect_repsonse_data = [ - { - 'alias': 'Checkout Month', - 'display_name': 'Month', - 'type': 'text', - 'type_options': None, - 'display_options': { - display_option_origin: "Checkout Time", - }, - 'is_initial_column': False, - 'input_table_name': None, - 'input_table_id': None, - 'input_column_name': None, - 'input_alias': 'Checkout Time', - }, { - 'alias': 'Count', - 'display_name': 'Number of Checkouts', - 'type': 'integer', - 'type_options': None, - 'display_options': { - display_option_origin: "id", - }, - 'is_initial_column': False, - 'input_table_name': None, - 'input_table_id': None, - 'input_column_name': None, - 'input_alias': 'id', - }, { - 'alias': 'Sum', - 'display_name': 'Sum of patron', - 'type': 'numeric', - 'type_options': None, - 'display_options': { - display_option_origin: "Patron", - }, - 'is_initial_column': False, - 'input_table_name': None, - 'input_table_id': None, - 'input_column_name': None, - 'input_alias': 'Patron', - } - ] - actual_response_data = client.get(f'/api/db/v0/queries/{query_id}/columns/').json() - assert sorted(actual_response_data, key=lambda x: x['alias']) == expect_repsonse_data - - -def test_count_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - checkouts = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Checkouts"] - columns = { - c["name"]: c for c in checkouts["columns"] - } - request_data = { - "name": get_uid(), - "base_table": checkouts["id"], - "initial_columns": [ - {"id": columns["id"]["id"], "alias": "id"}, - {"id": columns["Checkout Time"]["id"], "alias": "Checkout Time"}, - ], - "display_names": { - "Checkout Month": "Month", - "Count": "Number of Checkouts", - }, - "display_options": { - "Checkout Time": { - display_option_origin: "Checkout Time", - }, - "id": { - display_option_origin: "id", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Checkout Time", - "output_alias": "Checkout Month", - "preproc": "truncate_to_month", - } - ], - "aggregation_expressions": [ - { - "input_alias": "id", - "output_alias": "Count", - "function": "count", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - {'Checkout Month': '2022-05', 'Count': 39}, - {'Checkout Month': '2022-06', 'Count': 26}, - {'Checkout Month': '2022-07', 'Count': 29}, - {'Checkout Month': '2022-08', 'Count': 10}, - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['Checkout Month']) == expect_records - - -def test_mean_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - checkouts = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Checkouts"] - columns = { - c["name"]: c for c in checkouts["columns"] - } - request_data = { - "name": get_uid(), - "base_table": checkouts["id"], - "initial_columns": [ - {"id": columns["Checkout Time"]["id"], "alias": "Checkout Time"}, - {"id": columns["Patron"]["id"], "alias": "Patron"}, - ], - "display_names": { - "Checkout Month": "Month", - "Mean": "Mean of patron", - }, - "display_options": { - "Checkout Time": { - display_option_origin: "Checkout Time", - }, - "Patron": { - display_option_origin: "Patron", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Checkout Time", - "output_alias": "Checkout Month", - "preproc": "truncate_to_month", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Patron", - "output_alias": "Mean", - "function": "mean", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - {'Checkout Month': '2022-05', 'Mean': 16.641025641025642}, - {'Checkout Month': '2022-06', 'Mean': 11.461538461538462}, - {'Checkout Month': '2022-07', 'Mean': 18.06896551724138}, - {'Checkout Month': '2022-08', 'Mean': 12.6}, - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['Checkout Month']) == expect_records - - -def test_sum_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - checkouts = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Checkouts"] - columns = { - c["name"]: c for c in checkouts["columns"] - } - request_data = { - "name": get_uid(), - "base_table": checkouts["id"], - "initial_columns": [ - {"id": columns["Checkout Time"]["id"], "alias": "Checkout Time"}, - {"id": columns["Patron"]["id"], "alias": "Patron"}, - ], - "display_names": { - "Checkout Month": "Month", - "Sum": "Sum of patron", - }, - "display_options": { - "Checkout Time": { - display_option_origin: "Checkout Time", - }, - "Patron": { - display_option_origin: "Patron", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Checkout Time", - "output_alias": "Checkout Month", - "preproc": "truncate_to_month", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Patron", - "output_alias": "Sum", - "function": "sum", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - {'Checkout Month': '2022-05', 'Sum': 649}, - {'Checkout Month': '2022-06', 'Sum': 298}, - {'Checkout Month': '2022-07', 'Sum': 524}, - {'Checkout Month': '2022-08', 'Sum': 126}, - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['Checkout Month']) == expect_records - - -def test_median_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - checkouts = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Checkouts"] - columns = { - c["name"]: c for c in checkouts["columns"] - } - request_data = { - "name": get_uid(), - "base_table": checkouts["id"], - "initial_columns": [ - {"id": columns["Checkout Time"]["id"], "alias": "Checkout Time"}, - {"id": columns["Patron"]["id"], "alias": "Patron"}, - ], - "display_names": { - "Checkout Month": "Month", - "Median": "Median of patron", - }, - "display_options": { - "Checkout Time": { - display_option_origin: "Checkout Time", - }, - "Patron": { - display_option_origin: "Patron", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Checkout Time", - "output_alias": "Checkout Month", - "preproc": "truncate_to_month", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Patron", - "output_alias": "Median", - "function": "median", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - {'Checkout Month': '2022-05', 'Median': 18}, - {'Checkout Month': '2022-06', 'Median': 8}, - {'Checkout Month': '2022-07', 'Median': 20}, - {'Checkout Month': '2022-08', 'Median': 11}, - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['Checkout Month']) == expect_records - - -def test_mode_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - checkouts = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Checkouts"] - columns = { - c["name"]: c for c in checkouts["columns"] - } - request_data = { - "name": get_uid(), - "base_table": checkouts["id"], - "initial_columns": [ - {"id": columns["Checkout Time"]["id"], "alias": "Checkout Time"}, - {"id": columns["Patron"]["id"], "alias": "Patron"}, - ], - "display_names": { - "Checkout Month": "Month", - "Mode": "Mode of patron", - }, - "display_options": { - "Checkout Time": { - display_option_origin: "Checkout Time", - }, - "Patron": { - display_option_origin: "Patron", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Checkout Time", - "output_alias": "Checkout Month", - "preproc": "truncate_to_month", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Patron", - "output_alias": "Mode", - "function": "mode", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - {'Checkout Month': '2022-05', 'Mode': 11}, - {'Checkout Month': '2022-06', 'Mode': 2}, - {'Checkout Month': '2022-07', 'Mode': 22}, - {'Checkout Month': '2022-08', 'Mode': 3}, - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['Checkout Month']) == expect_records - - -def test_max_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - checkouts = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Checkouts"] - columns = { - c["name"]: c for c in checkouts["columns"] - } - request_data = { - "name": get_uid(), - "base_table": checkouts["id"], - "initial_columns": [ - {"id": columns["Checkout Time"]["id"], "alias": "Checkout Time"}, - {"id": columns["Patron"]["id"], "alias": "Patron"}, - ], - "display_names": { - "Checkout Month": "Month", - "Max": "Max of patron", - }, - "display_options": { - "Checkout Time": { - display_option_origin: "Checkout Time", - }, - "Patron": { - display_option_origin: "Patron", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Checkout Time", - "output_alias": "Checkout Month", - "preproc": "truncate_to_month", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Patron", - "output_alias": "Max", - "function": "max", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - {'Checkout Month': '2022-05', 'Max': 29}, - {'Checkout Month': '2022-06', 'Max': 27}, - {'Checkout Month': '2022-07', 'Max': 29}, - {'Checkout Month': '2022-08', 'Max': 29}, - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['Checkout Month']) == expect_records - - -def test_peak_time_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - checkouts = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Checkouts"] - columns = { - c["name"]: c for c in checkouts["columns"] - } - request_data = { - "name": get_uid(), - "base_table": checkouts["id"], - "initial_columns": [ - {"id": columns["Checkout Time"]["id"], "alias": "Checkout Time"}, - {"id": columns["Patron"]["id"], "alias": "Patron"}, - ], - "display_names": { - "Checkout Time": "Checkout Time", - "Patron": "Patron", - }, - "display_options": { - "Checkout Time": { - display_option_origin: "Checkout Time", - }, - "Patron": { - display_option_origin: "Patron", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Patron", - "output_alias": "Patron", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Checkout Time", - "output_alias": "Checkout Time", - "function": "peak_time", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - { - "Patron": 1, - "Checkout Time": "12:54:00.286569" - }, - { - "Patron": 2, - "Checkout Time": "12:33:56.911007" - }, - { - "Patron": 3, - "Checkout Time": "15:59:15.468421" - }, - { - "Patron": 4, - "Checkout Time": "19:32:29.142471" - }, - { - "Patron": 5, - "Checkout Time": "13:14:47.698064" - }, - { - "Patron": 6, - "Checkout Time": "14:03:59.331127" - }, - { - "Patron": 7, - "Checkout Time": "12:39:06.036969" - }, - { - "Patron": 8, - "Checkout Time": "13:11:37.322141" - }, - { - "Patron": 9, - "Checkout Time": "15:42:14.208165" - }, - { - "Patron": 10, - "Checkout Time": "15:34:02.558857" - }, - { - "Patron": 11, - "Checkout Time": "14:25:18.50151" - }, - { - "Patron": 12, - "Checkout Time": "19:38:12.268677" - }, - { - "Patron": 13, - "Checkout Time": "12:31:00.403794" - }, - { - "Patron": 14, - "Checkout Time": "13:26:25.293263" - }, - { - "Patron": 15, - "Checkout Time": "13:34:53.582087" - }, - { - "Patron": 16, - "Checkout Time": "15:23:23.148845" - }, - { - "Patron": 17, - "Checkout Time": "16:39:21.51814" - }, - { - "Patron": 18, - "Checkout Time": "15:56:47.170539" - }, - { - "Patron": 19, - "Checkout Time": "13:05:33.506587" - }, - { - "Patron": 20, - "Checkout Time": "15:45:13.753633" - }, - { - "Patron": 21, - "Checkout Time": "11:40:36.809586" - }, - { - "Patron": 22, - "Checkout Time": "13:25:09.374102" - }, - { - "Patron": 23, - "Checkout Time": "14:18:54.097847" - }, - { - "Patron": 24, - "Checkout Time": "15:30:34.310875" - }, - { - "Patron": 25, - "Checkout Time": "13:03:00.9767" - }, - { - "Patron": 26, - "Checkout Time": "17:14:35.469216" - }, - { - "Patron": 27, - "Checkout Time": "13:41:13.814894" - }, - { - "Patron": 28, - "Checkout Time": "15:51:15.074412" - }, - { - "Patron": 29, - "Checkout Time": "16:03:31.517422" - } - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert all( - actual["Patron"] == expect["Patron"] - and actual["Checkout Time"][:14] == expect["Checkout Time"][:14] - for actual, expect - in zip(sorted(actual_records, key=lambda x: x['Patron']), expect_records) - ) - - -def test_min_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - checkouts = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Checkouts"] - columns = { - c["name"]: c for c in checkouts["columns"] - } - request_data = { - "name": get_uid(), - "base_table": checkouts["id"], - "initial_columns": [ - {"id": columns["Checkout Time"]["id"], "alias": "Checkout Time"}, - {"id": columns["Patron"]["id"], "alias": "Patron"}, - ], - "display_names": { - "Checkout Month": "Month", - "Min": "Min of patron", - }, - "display_options": { - "Checkout Time": { - display_option_origin: "Checkout Time", - }, - "Patron": { - display_option_origin: "Patron", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Checkout Time", - "output_alias": "Checkout Month", - "preproc": "truncate_to_month", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Patron", - "output_alias": "Min", - "function": "min", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - {'Checkout Month': '2022-05', 'Min': 1}, - {'Checkout Month': '2022-06', 'Min': 2}, - {'Checkout Month': '2022-07', 'Min': 3}, - {'Checkout Month': '2022-08', 'Min': 3}, - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['Checkout Month']) == expect_records - - -def test_peak_month_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - checkouts = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Checkouts"] - columns = { - c["name"]: c for c in checkouts["columns"] - } - request_data = { - "name": get_uid(), - "base_table": checkouts["id"], - "initial_columns": [ - {"id": columns["Checkout Time"]["id"], "alias": "Checkout Time"}, - {"id": columns["Patron"]["id"], "alias": "Patron"}, - ], - "display_names": { - "Checkout Time": "Checkout Time", - "Patron": "Patron", - }, - "display_options": { - "Checkout Time": { - display_option_origin: "Checkout Time", - }, - "Patron": { - display_option_origin: "Patron", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Patron", - "output_alias": "Patron", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Checkout Time", - "output_alias": "Checkout Time", - "function": "peak_month", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - { - "Patron": 1, - "Checkout Time": 5 - }, - { - "Patron": 2, - "Checkout Time": 6 - }, - { - "Patron": 3, - "Checkout Time": 7 - }, - { - "Patron": 4, - "Checkout Time": 5 - }, - { - "Patron": 5, - "Checkout Time": 6 - }, - { - "Patron": 6, - "Checkout Time": 7 - }, - { - "Patron": 7, - "Checkout Time": 5 - }, - { - "Patron": 8, - "Checkout Time": 7 - }, - { - "Patron": 9, - "Checkout Time": 5 - }, - { - "Patron": 10, - "Checkout Time": 6 - }, - { - "Patron": 11, - "Checkout Time": 6 - }, - { - "Patron": 12, - "Checkout Time": 7 - }, - { - "Patron": 13, - "Checkout Time": 7 - }, - { - "Patron": 14, - "Checkout Time": 5 - }, - { - "Patron": 15, - "Checkout Time": 6 - }, - { - "Patron": 16, - "Checkout Time": 6 - }, - { - "Patron": 17, - "Checkout Time": 7 - }, - { - "Patron": 18, - "Checkout Time": 5 - }, - { - "Patron": 19, - "Checkout Time": 6 - }, - { - "Patron": 20, - "Checkout Time": 7 - }, - { - "Patron": 21, - "Checkout Time": 5 - }, - { - "Patron": 22, - "Checkout Time": 7 - }, - { - "Patron": 23, - "Checkout Time": 6 - }, - { - "Patron": 24, - "Checkout Time": 6 - }, - { - "Patron": 25, - "Checkout Time": 5 - }, - { - "Patron": 26, - "Checkout Time": 7 - }, - { - "Patron": 27, - "Checkout Time": 6 - }, - { - "Patron": 28, - "Checkout Time": 6 - }, - { - "Patron": 29, - "Checkout Time": 7 - } - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['Patron']) == expect_records - - -def test_percentage_true_aggregation(payments_ma_table, get_uid, client): - _ = payments_ma_table - payments = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Payments"] - columns = { - c["name"]: c for c in payments["columns"] - } - request_data = { - "name": get_uid(), - "base_table": payments["id"], - "initial_columns": [ - {"id": columns["Payment Mode"]["id"], "alias": "Payment Mode"}, - {"id": columns["Is Fraudulent"]["id"], "alias": "Is Fraudulent"}, - ], - "display_names": { - "Payment Mode": "Payment Mode", - "Percentage Fraudulent": "Percentage Fraudulent", - }, - "display_options": { - "Payment Mode": { - display_option_origin: "Payment Mode", - }, - "Is Fraudulent": { - display_option_origin: "Is Fraudulent", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Payment Mode", - "output_alias": "Payment Mode", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Is Fraudulent", - "output_alias": "Percentage Fraudulent", - "function": "percentage_true", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - { - 'Payment Mode': 'UPI', - 'Percentage Fraudulent': 16.666666666666668 - }, - { - 'Payment Mode': 'credit card', - 'Percentage Fraudulent': 10.0 - }, - { - 'Payment Mode': 'debit card', - 'Percentage Fraudulent': 10.81081081081081 - }, - { - 'Payment Mode': 'pay later', - 'Percentage Fraudulent': 14.285714285714286 - }, - { - 'Payment Mode': 'wallet', - 'Percentage Fraudulent': 23.333333333333332 - } - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['Payment Mode']) == expect_records - - -def test_list_aggregation_intrval(athletes_ma_table, get_uid, client): - _ = athletes_ma_table - athletes = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Marathon"] - columns = { - c["name"]: c for c in athletes["columns"] - } - request_data = { - "name": get_uid(), - "base_table": athletes["id"], - "initial_columns": [ - {"id": columns["city"]["id"], "alias": "city"}, - {"id": columns["finish time"]["id"], "alias": "finish time"}, - ], - "display_names": { - "city": "city", - "finish time": "finish time", - }, - "display_options": { - "ciy": { - display_option_origin: "country", - }, - "finish time": { - display_option_origin: "finish time", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "city", - "output_alias": "city", - } - ], - "aggregation_expressions": [ - { - "input_alias": "finish time", - "output_alias": "finish time", - "function": "distinct_aggregate_to_array", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - { - "city": "Berlin", - "finish time": [ - "02:01:39", - "02:02:57", - "02:03:23", - "02:03:59", - "02:04:00" - ] - }, - { - "city": "Chicago", - "finish time": [ - "02:14:04", - "02:17:45" - ] - }, - { - "city": "London", - "finish time": [ - "02:15:25", - "02:17:56", - "02:18:35" - ] - } - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['city']) == expect_records - - -def test_list_aggregation_mathesar_json_array(players_ma_table, get_uid, client): - _ = players_ma_table - players = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Players"] - columns = { - c["name"]: c for c in players["columns"] - } - request_data = { - "name": get_uid(), - "base_table": players["id"], - "initial_columns": [ - {"id": columns["ballon_dor"]["id"], "alias": "ballon_dor"}, - {"id": columns["country"]["id"], "alias": "country"}, - {"id": columns["titles"]["id"], "alias": "titles"}, - ], - "display_names": { - "ballon_dor": "ballon_dor", - "country": "country", - "titles": "titles", - }, - "display_options": { - "country": { - display_option_origin: "country", - }, - "ballon_dor": { - display_option_origin: "ballon_dor", - }, - "titles": { - display_option_origin: "titles", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "country", - "output_alias": "country", - } - ], - "aggregation_expressions": [ - { - "input_alias": "ballon_dor", - "output_alias": "ballon_dor", - "function": "distinct_aggregate_to_array", - }, - { - "input_alias": "titles", - "output_alias": "titles", - "function": "distinct_aggregate_to_array", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - { - "country": "Argentina", - "ballon_dor": [ - [ - 1957, - 1959, - 1960 - ], - [ - 2009, - 2011, - 2012, - 2013, - 2016, - 2019, - 2021 - ] - ], - "titles": [ - [ - { - "ucl": 4, - "world_cup": 1 - } - ], - [ - { - "ucl": 5, - "world_cup": 0 - } - ] - ] - }, - { - "country": "Brazil", - "ballon_dor": [ - [ - 1962 - ], - [ - 1999 - ], - [ - 2005 - ], - [ - 2007 - ], - [ - 1997, - 2002 - ] - ], - "titles": [ - [ - { - "ucl": 0, - "world_cup": 2 - } - ], - [ - { - "ucl": 1, - "world_cup": 1 - } - ], - [ - { - "ucl": 3, - "world_cup": 2 - } - ] - ] - }, - { - "country": "Croatia", - "ballon_dor": [ - [ - 2018 - ] - ], - "titles": [ - [ - { - "ucl": 4, - "world_cup": 0 - } - ] - ] - }, - { - "country": "England", - "ballon_dor": [ - [ - 1966 - ] - ], - "titles": [ - [ - { - "ucl": 1, - "world_cup": 1 - } - ] - ] - }, - { - "country": "France", - "ballon_dor": [ - [ - 1958 - ], - [ - 1998 - ], - [ - 1983, - 1984, - 1985 - ] - ], - "titles": [ - [ - { - "ucl": 0, - "world_cup": 0 - } - ], - [ - { - "ucl": 1, - "world_cup": 0 - } - ], - [ - { - "ucl": 1, - "world_cup": 1 - } - ] - ] - }, - { - "country": "Germany", - "ballon_dor": [ - [ - 1970 - ], - [ - 1990 - ], - [ - 1972, - 1976 - ] - ], - "titles": [ - [ - { - "ucl": 0, - "world_cup": 1 - } - ], - [ - { - "ucl": 3, - "world_cup": 1 - } - ] - ] - }, - { - "country": "Hungary", - "ballon_dor": [ - [ - 1959 - ] - ], - "titles": [ - [ - { - "ucl": 3, - "world_cup": 0 - } - ] - ] - }, - { - "country": "Italy", - "ballon_dor": [ - [ - 1969 - ], - [ - 1993 - ], - [ - 2006 - ] - ], - "titles": [ - [ - { - "ucl": 0, - "world_cup": 0 - } - ], - [ - { - "ucl": 0, - "world_cup": 1 - } - ], - [ - { - "ucl": 2, - "world_cup": 1 - } - ] - ] - }, - { - "country": "Liberia", - "ballon_dor": [ - [ - 1995 - ] - ], - "titles": [ - [ - { - "ucl": 0, - "world_cup": 0 - } - ] - ] - }, - { - "country": "Netherlands", - "ballon_dor": [ - [ - 1971, - 1973, - 1974 - ], - [ - 1988, - 1992, - 1993 - ] - ], - "titles": [ - [ - { - "ucl": 3, - "world_cup": 0 - } - ], - [ - { - "ucl": 3, - "world_cup": 1 - } - ] - ] - }, - { - "country": "Portugal", - "ballon_dor": [ - [ - 2008, - 2010, - 2014, - 2015, - 2017 - ] - ], - "titles": [ - [ - { - "ucl": 5, - "world_cup": 0 - } - ] - ] - }, - { - "country": "Soviet Union", - "ballon_dor": [ - [ - 1963 - ], - [ - 1975 - ] - ], - "titles": [ - [ - { - "ucl": 0, - "world_cup": 0 - } - ] - ] - }, - { - "country": "Ukraine", - "ballon_dor": [ - [ - 2004 - ] - ], - "titles": [ - [ - { - "ucl": 1, - "world_cup": 0 - } - ] - ] - } - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['country']) == expect_records - - -def test_Mathesar_money_distinct_list_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - items = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Items"] - columns = { - c["name"]: c for c in items["columns"] - } - request_data = { - "name": get_uid(), - "base_table": items["id"], - "initial_columns": [ - {"id": columns["Publication"]["id"], "alias": "Publication"}, - {"id": columns["Acquisition Price"]["id"], "alias": "Acquisition Price"}, - ], - "display_names": { - "Acquisition Price": "Price", - "Publication": "Publication", - }, - "display_options": { - "Publication": { - display_option_origin: "Publication", - }, - "Acquisition Price": { - display_option_origin: "Acquisition Price", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Publication", - "output_alias": "Publication", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Acquisition Price", - "output_alias": "Acquisition Price", - "function": "distinct_aggregate_to_array" - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - { - "Publication": 1, - "Acquisition Price": [ - 0.59 - ] - }, - { - "Publication": 2, - "Acquisition Price": [ - 6.09 - ] - }, - { - "Publication": 3, - "Acquisition Price": [ - 3.89 - ] - }, - { - "Publication": 4, - "Acquisition Price": [ - 11.42, - 13.55 - ] - }, - { - "Publication": 5, - "Acquisition Price": [ - 10.75 - ] - }, - { - "Publication": 6, - "Acquisition Price": [ - 12.08 - ] - }, - { - "Publication": 7, - "Acquisition Price": [ - 4.66 - ] - }, - { - "Publication": 8, - "Acquisition Price": [ - 0.1 - ] - }, - { - "Publication": 9, - "Acquisition Price": [ - 11.05, - 14.94 - ] - }, - { - "Publication": 10, - "Acquisition Price": [ - 1.75, - 3.88 - ] - }, - { - "Publication": 11, - "Acquisition Price": [ - 4.8 - ] - }, - { - "Publication": 12, - "Acquisition Price": [ - 1.31 - ] - }, - { - "Publication": 13, - "Acquisition Price": [ - 2.06, - 7.77 - ] - }, - { - "Publication": 14, - "Acquisition Price": [ - 8.26 - ] - }, - { - "Publication": 15, - "Acquisition Price": [ - 3.09, - 3.73, - 3.76, - 9.6, - 11.77, - 13.06 - ] - }, - { - "Publication": 16, - "Acquisition Price": [ - 4.28 - ] - }, - { - "Publication": 17, - "Acquisition Price": [ - 2.03, - 3.23 - ] - }, - { - "Publication": 18, - "Acquisition Price": [ - 3.62, - 5.45, - 9.77, - 10.78 - ] - }, - { - "Publication": 19, - "Acquisition Price": [ - 9.55 - ] - }, - { - "Publication": 20, - "Acquisition Price": [ - 0.16, - 5.28 - ] - }, - { - "Publication": 21, - "Acquisition Price": [ - 5.29 - ] - }, - { - "Publication": 22, - "Acquisition Price": [ - 8.91, - 12.06, - 14.76 - ] - }, - { - "Publication": 23, - "Acquisition Price": [ - 4.69, - 14.48 - ] - }, - { - "Publication": 24, - "Acquisition Price": [ - 2.08, - 4.52, - 12.53 - ] - }, - { - "Publication": 25, - "Acquisition Price": [ - 7.45, - 10.39 - ] - }, - { - "Publication": 26, - "Acquisition Price": [ - 3.36, - 14.59 - ] - }, - { - "Publication": 27, - "Acquisition Price": [ - 1.12 - ] - }, - { - "Publication": 28, - "Acquisition Price": [ - 3.18, - 12.24 - ] - }, - { - "Publication": 29, - "Acquisition Price": [ - 10.6 - ] - }, - { - "Publication": 30, - "Acquisition Price": [ - 6.38 - ] - }, - { - "Publication": 31, - "Acquisition Price": [ - 8.47 - ] - }, - { - "Publication": 32, - "Acquisition Price": [ - 2.11 - ] - }, - { - "Publication": 33, - "Acquisition Price": [ - 2.77 - ] - }, - { - "Publication": 34, - "Acquisition Price": [ - 9.23, - 10.27, - 10.82, - 12.35, - 12.78 - ] - }, - { - "Publication": 35, - "Acquisition Price": [ - 8.25 - ] - }, - { - "Publication": 36, - "Acquisition Price": [ - 12.79, - 12.98, - 13.96 - ] - }, - { - "Publication": 37, - "Acquisition Price": [ - 1.88, - 5.57, - 10.81, - 13.37 - ] - }, - { - "Publication": 38, - "Acquisition Price": [ - 12.01 - ] - }, - { - "Publication": 39, - "Acquisition Price": [ - 3.17 - ] - }, - { - "Publication": 40, - "Acquisition Price": [ - 2.73, - 10.1 - ] - }, - { - "Publication": 41, - "Acquisition Price": [ - 10.55, - 13.57 - ] - }, - { - "Publication": 42, - "Acquisition Price": [ - 8.31, - 9.27, - 11.83 - ] - }, - { - "Publication": 43, - "Acquisition Price": [ - 6.63, - 13.27 - ] - }, - { - "Publication": 44, - "Acquisition Price": [ - 5.14 - ] - }, - { - "Publication": 45, - "Acquisition Price": [ - 7.21 - ] - }, - { - "Publication": 46, - "Acquisition Price": [ - 13.85 - ] - }, - { - "Publication": 47, - "Acquisition Price": [ - 10.93, - 10.99 - ] - }, - { - "Publication": 48, - "Acquisition Price": [ - 4.02, - 6.41, - 9.6, - 10.83, - 14.32 - ] - }, - { - "Publication": 49, - "Acquisition Price": [ - 5.74, - 6.66, - 13.08 - ] - }, - { - "Publication": 50, - "Acquisition Price": [ - 6.97, - 13.75 - ] - } - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['Publication']) == expect_records - - -def test_Mathesar_URI_distinct_list_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - authors = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Authors"] - columns = { - c["name"]: c for c in authors["columns"] - } - request_data = { - "name": get_uid(), - "base_table": authors["id"], - "initial_columns": [ - {"id": columns["Author Last Name"]["id"], "alias": "Author Last Name"}, - {"id": columns["Author Website"]["id"], "alias": "Author Website"}, - ], - "display_names": { - "Author Last Name": "Author Last Name", - "Website": "Website", - }, - "display_options": { - "Author Last Name": { - display_option_origin: "Author Last Name", - }, - "Author Website": { - display_option_origin: "Author Website", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "Author Last Name", - "output_alias": "Author Last Name", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Author Website", - "output_alias": "Website", - "function": "distinct_aggregate_to_array", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - - expect_records = [ - { - "Author Last Name": "Castillo", - "Website": [ - "https://jennifercastillo.com" - ] - }, - { - "Author Last Name": "Diaz", - "Website": [ - "https://diaz.net" - ] - }, - { - "Author Last Name": "Dunlap", - "Website": [ - "https://dunlap.com" - ] - }, - { - "Author Last Name": "Edwards", - "Website": [ - "https://catherineedwards.com", - "https://edwards.info" - ] - }, - { - "Author Last Name": "Evans", - "Website": [ - "https://bonnieevans.com" - ] - }, - { - "Author Last Name": "Harris", - "Website": [ - "http://harris.info" - ] - }, - { - "Author Last Name": "Herrera", - "Website": [ - None - ] - }, - { - "Author Last Name": "Jensen", - "Website": [ - "http://hannahjensen.org" - ] - }, - { - "Author Last Name": "Johnson", - "Website": [ - "https://kimberlyjohnson.net" - ] - }, - { - "Author Last Name": "Medina", - "Website": [ - None - ] - }, - { - "Author Last Name": "Munoz", - "Website": [ - "https://munoz.com" - ] - }, - { - "Author Last Name": "Newman", - "Website": [ - None - ] - }, - { - "Author Last Name": "Robinson", - "Website": [ - "https://seanrobinson.com" - ] - } - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['Author Last Name']) == expect_records - - -def test_Mathesar_Email_distinct_list_aggregation(library_ma_tables, get_uid, client): - _ = library_ma_tables - patrons = { - t["name"]: t for t in client.get("/api/db/v0/tables/").json()["results"] - }["Patrons"] - columns = { - c["name"]: c for c in patrons["columns"] - } - request_data = { - "name": get_uid(), - "base_table": patrons["id"], - "initial_columns": [ - {"id": columns["First Name"]["id"], "alias": "First Name"}, - {"id": columns["Email"]["id"], "alias": "Email"}, - ], - "display_names": { - "First Name": "First Name", - "Email": "Email", - }, - "display_options": { - "First Name": { - display_option_origin: "First Name", - }, - "Email": { - display_option_origin: "Email", - }, - }, - "transformations": [ - { - "spec": { - "grouping_expressions": [ - { - "input_alias": "First Name", - "output_alias": "First Name", - } - ], - "aggregation_expressions": [ - { - "input_alias": "Email", - "output_alias": "Email", - "function": "distinct_aggregate_to_array", - } - ] - }, - "type": "summarize", - } - ] - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - query_id = response.json()['id'] - expect_records = [ - { - "First Name": "Alexander", - "Email": [ - "alexander.phillips38@alvarez.com" - ] - }, - { - "First Name": "Andrew", - "Email": [ - "a.vaughan@roy.com" - ] - }, - { - "First Name": "Autumn", - "Email": [ - "autumn.h19@mathews.com" - ] - }, - { - "First Name": "Barry", - "Email": [ - "b.huff@haney.com" - ] - }, - { - "First Name": "Benjamin", - "Email": [ - "b.watson33@bell-beard.biz" - ] - }, - { - "First Name": "Calvin", - "Email": [ - "c.curtis12@brown.com" - ] - }, - { - "First Name": "Connor", - "Email": [ - "c.taylor@miller.org" - ] - }, - { - "First Name": "Deanna", - "Email": [ - "deanna.s54@cook.org" - ] - }, - { - "First Name": "Eduardo", - "Email": [ - "eduardorojas13@peterson-curry.com" - ] - }, - { - "First Name": "Harry", - "Email": [ - "harry.h5@beck.net" - ] - }, - { - "First Name": "Heather", - "Email": [ - "heatherwheeler@peterson-delgado.com" - ] - }, - { - "First Name": "Jason", - "Email": [ - "jasongriffin@wilkinson.com", - "jpeterson11@williams.com" - ] - }, - { - "First Name": "Jennifer", - "Email": [ - "jenniferw20@morrison-patton.com" - ] - }, - { - "First Name": "Jesse", - "Email": [ - "jessef88@stewart.com" - ] - }, - { - "First Name": "Joshua", - "Email": [ - "jhooper@bowers.com" - ] - }, - { - "First Name": "Kathy", - "Email": [ - "kathyb@le.org" - ] - }, - { - "First Name": "Kristen", - "Email": [ - "kwright@odonnell.com" - ] - }, - { - "First Name": "Laura", - "Email": [ - "lauras@hurley.com" - ] - }, - { - "First Name": "Lori", - "Email": [ - "l.stevens@lopez.com" - ] - }, - { - "First Name": "Luke", - "Email": [ - "luke.vang46@palmer.com" - ] - }, - { - "First Name": "Mary", - "Email": [ - "mknox45@fletcher-rodriguez.net" - ] - }, - { - "First Name": "Nicole", - "Email": [ - "nicole.jones66@dixon.org" - ] - }, - { - "First Name": "Patrick", - "Email": [ - "pshepherd13@white-bradford.info" - ] - }, - { - "First Name": "Rita", - "Email": [ - "ritab@powell.com" - ] - }, - { - "First Name": "Toni", - "Email": [ - "tevans46@thompson.net" - ] - }, - { - "First Name": "Traci", - "Email": [ - "thamilton76@smith.net" - ] - }, - { - "First Name": "Tyler", - "Email": [ - "t.gonzalez@washington.com" - ] - }, - { - "First Name": "Walter", - "Email": [ - "waltermanning@freeman.com" - ] - }, - { - "First Name": "Yvonne", - "Email": [ - "y.ho@johnson.info" - ] - } - ] - actual_records = client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'] - assert sorted(actual_records, key=lambda x: x['First Name']) == expect_records diff --git a/mathesar/tests/api/query/test_library_demo_reports.py b/mathesar/tests/api/query/test_library_demo_reports.py deleted file mode 100644 index 24f441ea0e..0000000000 --- a/mathesar/tests/api/query/test_library_demo_reports.py +++ /dev/null @@ -1,174 +0,0 @@ -import pytest - - -# Otherwise meaningless display option key meant to designate on which alias a given display -# option was defined, so that we can test if display option inheritance works as expected. -display_option_origin = "display_option_origin" - - -@pytest.fixture -def create_overdue_books_query(library_ma_tables, get_uid, client): - checkouts = library_ma_tables["Checkouts"] - patrons = library_ma_tables["Patrons"] - items = library_ma_tables["Items"] - publications = library_ma_tables["Publications"] - request_data = { - "name": get_uid(), - "base_table": checkouts.id, - "display_names": { - "email": "Patron Email", - "Title List": "Titles" - }, - "display_options": { - "email": { - display_option_origin: "email", - }, - "Book Title": { - display_option_origin: "Book Title", - }, - }, - "initial_columns": [ - { - "id": checkouts.get_column_by_name("id").id, - "alias": "Checkout id" - }, { - "id": checkouts.get_column_by_name("Due Date").id, - "alias": "Due Date" - }, { - "id": checkouts.get_column_by_name("Check In Time").id, - "alias": "Check In Time" - }, { - "id": patrons.get_column_by_name("Email").id, - "alias": "email", - "jp_path": [ - [ - checkouts.get_column_by_name("Patron").id, - patrons.get_column_by_name("id").id, - ] - ] - }, { - "id": publications.get_column_by_name("Title").id, - "alias": "Book Title", - "jp_path": [ - [ - checkouts.get_column_by_name("Item").id, - items.get_column_by_name("id").id, - ], [ - items.get_column_by_name("Publication").id, - publications.get_column_by_name("id").id, - ] - ] - }, - ], - "transformations": [ - { - "spec": { - "lesser": [ - {"column_name": ["Due Date"]}, - {"literal": ["2022-08-10"]} - ] - }, - "type": "filter" - }, - { - "spec": { - "null": [{"column_name": ["Check In Time"]}] - }, - "type": "filter" - }, - { - "spec": { - "grouping_expressions": [{ - "input_alias": "email", - "output_alias": "email", - }], - "aggregation_expressions": [ - { - "input_alias": "Book Title", - "output_alias": "Title List", - "function": "distinct_aggregate_to_array" - } - ] - }, - "type": "summarize", - } - ], - - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - return response - - -@pytest.fixture -def check_overdue_books_columns(library_ma_tables, create_overdue_books_query, client): - patrons = library_ma_tables["Patrons"] - query_id = create_overdue_books_query.json()['id'] - expect_response_data = [ - { - 'alias': 'Title List', - 'display_name': 'Titles', - 'type': '_array', - 'type_options': {'item_type': 'text'}, - 'display_options': { - display_option_origin: 'Book Title', - }, - 'is_initial_column': False, - 'input_table_name': None, - 'input_table_id': None, - 'input_column_name': None, - 'input_alias': 'Book Title', - }, - { - 'alias': 'email', - 'display_name': 'Patron Email', - 'type': 'mathesar_types.email', - 'type_options': None, - 'display_options': { - display_option_origin: 'email', - }, - 'is_initial_column': True, - 'input_table_name': 'Patrons', - 'input_table_id': patrons.id, - 'input_column_name': 'Email', - 'input_alias': None, - }, - ] - actual_response_data = client.get(f'/api/db/v0/queries/{query_id}/columns/').json() - assert sorted(actual_response_data, key=lambda x: x['alias']) == expect_response_data - return query_id - - -def test_run_overdue_books_scenario(check_overdue_books_columns, client): - query_id = check_overdue_books_columns - expect_records = [ - { - 'email': 'eduardorojas13@peterson-curry.com', - 'Title List': ['Bar Order Might Per', 'Hand Raise Son Probably Do'] - }, { - 'email': 'heatherwheeler@peterson-delgado.com', - 'Title List': ['Bar Order Might Per', 'I Worker Suffer Likely'] - }, { - 'email': 'jhooper@bowers.com', - 'Title List': ['Military Myself Sport Wrong'] - }, { - 'email': 'kathyb@le.org', - 'Title List': ['Pass Street Year'] - }, { - 'email': 'kwright@odonnell.com', - 'Title List': ['Day Beyond Property', 'On Letter Experience'] - }, { - 'email': 'tevans46@thompson.net', - 'Title List': ['Space Music Rest Crime'] - }, { - 'email': 'y.ho@johnson.info', - 'Title List': ['Mention Add Size City Kid', 'Economic Too Level'] - } - ] - actual_records = sorted( - client.get(f'/api/db/v0/queries/{query_id}/records/').json()['results'], - key=lambda x: x['email'] - ) - for rec_pair in zip(actual_records, expect_records): - assert rec_pair[0]['email'] == rec_pair[1]['email'] - assert sorted(rec_pair[0]['Title List']) == sorted(rec_pair[1]['Title List']) diff --git a/mathesar/tests/api/query/test_query_api.py b/mathesar/tests/api/query/test_query_api.py deleted file mode 100644 index 2c2d57acfa..0000000000 --- a/mathesar/tests/api/query/test_query_api.py +++ /dev/null @@ -1,485 +0,0 @@ -import pytest - -from mathesar.models.query import Exploration - - -@pytest.fixture -def post_minimal_query(_post_query, create_patents_table, get_uid): - base_table = create_patents_table(table_name=get_uid()) - request_data = { - "name": get_uid(), - "base_table": base_table.id, - # TODO use actual columns - "initial_columns": [ - { - "id": 1, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3], [4, 5]], - "alias": "alias_x", - }, - { - "id": 2, - "alias": "alias_y", - }, - ], - } - return _post_query(request_data) - - -@pytest.fixture -def post_query_with_description(_post_query, create_patents_table, get_uid): - base_table = create_patents_table(table_name=get_uid()) - request_data = { - "name": get_uid(), - "description": "A generic description", - "base_table": base_table.id, - "initial_columns": [ - { - "id": 1, - "jp_path": [[1, 3], [4, 5]], - "alias": "alias_x", - }, - { - "id": 2, - "alias": "alias_y", - }, - ], - } - return _post_query(request_data) - - -@pytest.fixture -def _post_query(client): - def _f(request_data): - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 201 - return request_data, response - - return _f - - -@pytest.mark.parametrize( - "expected,actual,should_throw", - [ - [[1, 2, 3], [1, 2, 3], False], - [[1, 2, 4], [1, 2, 3], True], - [3, 3, False], - [3, 4, True], - [dict(a=1, b=2), dict(a=1, b=2), False], - [dict(a=1, c=2), dict(a=1, b=2), True], - [dict(a=[1, 2, 3], b=2, c=dict(a=[1])), dict(a=[1, 2, 3], b=2, c=dict(a=[1])), False], - [dict(a=[1, 2, 5], b=2, c=dict(a=[1])), dict(a=[1, 2, 3], b=2, c=dict(a=[1])), True], - ] -) -def test_deep_equality_assert(expected, actual, should_throw): - if should_throw: - with pytest.raises(Exception): - _deep_equality_assert(expected=expected, actual=actual) - else: - _deep_equality_assert(expected=expected, actual=actual) - - -def test_create(post_minimal_query): - request_data, response = post_minimal_query - response_json = response.json() - _deep_equality_assert(expected=request_data, actual=response_json) - - -write_clients_with_status_code = [ - ('superuser_client_factory', 201, 201), - ('db_manager_client_factory', 201, 201), - ('db_editor_client_factory', 201, 201), - ('schema_manager_client_factory', 201, 400), - ('schema_viewer_client_factory', 201, 400), - ('db_viewer_schema_manager_client_factory', 201, 201) -] - - -@pytest.mark.parametrize( - 'client_name, expected_status_code, different_schema_expected_status_code', - write_clients_with_status_code -) -def test_create_based_on_permission( - create_patents_table, - request, - get_uid, - client_name, - expected_status_code, - different_schema_expected_status_code -): - base_table = create_patents_table(table_name=get_uid()) - different_schema_base_table = create_patents_table(table_name=get_uid(), schema_name='Private Schema') - request_data = { - "name": get_uid(), - "base_table": base_table.id, - # TODO use actual columns - "initial_columns": [ - { - "id": 1, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3], [4, 5]], - "alias": "alias_x", - }, - { - "id": 2, - "alias": "alias_y", - }, - ], - } - client = request.getfixturevalue(client_name)(base_table.schema) - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == expected_status_code - request_data = { - "name": get_uid(), - "base_table": different_schema_base_table.id, - # TODO use actual columns - "initial_columns": [ - { - "id": 1, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3], [4, 5]], - "alias": "alias_x", - }, - { - "id": 2, - "alias": "alias_y", - }, - ], - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == different_schema_expected_status_code - - -def test_query_with_bad_base_table(get_uid, client): - unexistant_base_table_id = 16135 - request_data = { - "name": get_uid(), - "base_table": unexistant_base_table_id, - "initial_columns": [ - { - "id": 1, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3], [4, 5]], - "alias": "alias_x", - }, - { - "id": 2, - "alias": "alias_y", - }, - ], - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 400 - - -def test_query_with_initial_column_without_id(create_patents_table, get_uid, client): - base_table = create_patents_table(table_name=get_uid()) - request_data = { - "name": get_uid(), - "base_table": base_table.id, - "initial_columns": [ - { - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3], [4, 5]], - "alias": "alias_x", - }, - { - "id": 2, - "alias": "alias_y", - }, - ], - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 400 - - -def test_query_with_initial_column_with_bad_jp_path(create_patents_table, get_uid, client): - """ - A jp path that is not made up of a sequence of integer tuples. - """ - base_table = create_patents_table(table_name=get_uid()) - request_data = { - "name": get_uid(), - "base_table": base_table.id, - "initial_columns": [ - { - "id": 1, - # Mock Django ID; its correctness is not checked - "jp_path": [1], - "alias": "alias_x", - }, - { - "id": 2, - "alias": "alias_y", - }, - ], - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 400 - - -def test_query_with_initial_column_without_alias(create_patents_table, get_uid, client): - base_table = create_patents_table(table_name=get_uid()) - request_data = { - "name": get_uid(), - "base_table": base_table.id, - "initial_columns": [ - { - "id": 1, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3], [4, 5]], - }, - { - "id": 2, - "alias": "alias_y", - }, - ], - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 400 - - -def test_query_with_initial_column_with_unexpected_key(create_patents_table, get_uid, client): - base_table = create_patents_table(table_name=get_uid()) - request_data = { - "name": get_uid(), - "base_table": base_table.id, - "initial_columns": [ - { - "id": 1, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3], [4, 5]], - "alias": "alias_x", - "bad_key": 1, - }, - { - "id": 2, - "alias": "alias_y", - }, - ], - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 400 - - -def test_query_with_with_unexpected_key(create_patents_table, get_uid, client): - base_table = create_patents_table(table_name=get_uid()) - request_data = { - "bad_key": 1, - "name": get_uid(), - "base_table": base_table.id, - "initial_columns": [ - { - "id": 1, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3], [4, 5]], - "alias": "alias_x", - }, - { - "id": 2, - "alias": "alias_y", - }, - ], - } - response = client.post('/api/db/v0/queries/', data=request_data) - assert response.status_code == 400 - - -def test_update(post_minimal_query, client): - post_data, response = post_minimal_query - response_json = response.json() - query_id = response_json['id'] - patch_data = { - "initial_columns": [ - { - "id": 3, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3]], - "alias": "alias_x", - } - ] - } - response = client.patch(f'/api/db/v0/queries/{query_id}/', data=patch_data) - response_json = response.json() - expected = {} - expected.update(post_data) - expected.update(patch_data) - _deep_equality_assert(expected=expected, actual=response_json) - - -update_client_with_status_code = [ - ('db_manager_client_factory', 200, 200), - ('db_editor_client_factory', 200, 200), - ('schema_manager_client_factory', 200, 404), - ('schema_viewer_client_factory', 200, 404), - ('db_viewer_schema_manager_client_factory', 200, 200) -] - - -@pytest.mark.parametrize( - 'client_name, expected_status_code, different_schema_expected_status_code', - update_client_with_status_code -) -def test_update_based_on_permission( - create_patents_table, - request, - get_uid, - client_name, - expected_status_code, - different_schema_expected_status_code -): - base_table = create_patents_table(table_name=get_uid()) - different_schema_base_table = create_patents_table(table_name=get_uid(), schema_name='Private Schema') - ui_query = Exploration.objects.create( - name="Query1", - base_table=base_table, - initial_columns=[ - { - "id": 3, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3]], - "alias": "alias_x", - } - ] - ) - different_schema_ui_query = Exploration.objects.create( - name="Query2", - base_table=different_schema_base_table, - initial_columns=[ - { - "id": 3, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3]], - "alias": "alias_x", - } - ] - ) - client = request.getfixturevalue(client_name)(base_table.schema) - patch_data = { - "initial_columns": [ - { - "id": 3, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3]], - "alias": "alias_x", - } - ] - } - response = client.patch(f'/api/db/v0/queries/{ui_query.id}/', data=patch_data) - assert response.status_code == expected_status_code - patch_data = { - "initial_columns": [ - { - "id": 3, - # Mock Django IDs; their correctness is not checked - "jp_path": [[1, 3]], - "alias": "alias_x", - } - ] - } - response = client.patch(f'/api/db/v0/queries/{different_schema_ui_query.id}/', data=patch_data) - assert response.status_code == different_schema_expected_status_code - - -def test_list(post_minimal_query, client): - request_data, response = post_minimal_query - response = client.get('/api/db/v0/queries/') - response_json = response.json() - assert response.status_code == 200 - actual = response_json['results'] - expected = [ - request_data, - ] - _deep_equality_assert(expected=expected, actual=actual) - - -def test_filter(post_minimal_query, client): - request_data, response = post_minimal_query - - # check that filtering on the right schema_id works - query = response.json() - schema_id = query['schema'] # get schema_id from output_only field - response = client.get(f'/api/db/v0/queries/?schema={schema_id}') - assert response.status_code == 200 - expected = [ - request_data, - ] - actual = response.json()['results'] - _deep_equality_assert(expected=expected, actual=actual) - - # check that filtering on the wrong schema_id returns nothing - wrong_schema_id = schema_id + 1 - response = client.get(f'/api/db/v0/queries/?schema={wrong_schema_id}') - response_json = response.json() - assert response.status_code == 200 - assert not response_json['results'] - - -def test_get(post_minimal_query, client): - request_data, response = post_minimal_query - response_json = response.json() - query_id = response_json['id'] - response = client.get(f'/api/db/v0/queries/{query_id}/') - response_json = response.json() - assert response.status_code == 200 - expected = request_data - _deep_equality_assert(expected=expected, actual=response_json) - - -def test_delete(post_minimal_query, client): - _, response = post_minimal_query - response_json = response.json() - query_id = response_json['id'] - response = client.delete(f'/api/db/v0/queries/{query_id}/') - assert response.status_code == 204 - assert response.data is None - - -def _deep_equality_assert( - expected, - actual, - key_path=None, # for debugging -): - """ - Recursively walks dicts and lists, checking that values in `expected` are present in and equal - to those in `actual`. Note that dicts in `actual` can have keys not in `expected`, but can't - have list elements that are not in `expected`. - """ - if key_path is None: - key_path = [] - if isinstance(expected, dict): - for key in expected: - assert key in actual - _deep_equality_assert( - key_path=key_path + [key], - expected=expected[key], - actual=actual[key], - ) - elif isinstance(expected, list): - assert len(actual) == len(expected) - for i, _ in enumerate(expected): - _deep_equality_assert( - key_path=key_path + [i], - expected=expected[i], - actual=actual[i], - ) - else: - assert expected == actual - - -def test_create_with_description(post_query_with_description): - request_data, response = post_query_with_description - response_json = response.json() - assert response_json['description'] == "A generic description" - - -def test_update_description(post_query_with_description, client): - post_data, response = post_query_with_description - response_json = response.json() - query_id = response_json['id'] - patch_data = { - "description": "A new description" - } - response = client.patch(f'/api/db/v0/queries/{query_id}/', data=patch_data) - response_json = response.json() - assert response_json['description'] == "A new description" diff --git a/mathesar/tests/api/query/test_query_columns_api.py b/mathesar/tests/api/query/test_query_columns_api.py deleted file mode 100644 index 5802cfc703..0000000000 --- a/mathesar/tests/api/query/test_query_columns_api.py +++ /dev/null @@ -1,36 +0,0 @@ -from db.types.base import PostgresType - - -def test_columns(client, minimal_patents_query): - ui_query = minimal_patents_query - input_table_name = ui_query.base_table.name - input_table_id = ui_query.base_table.id - response = client.get(f'/api/db/v0/queries/{ui_query.id}/columns/') - response_json = response.json() - assert response.status_code == 200 - assert response_json == [ - { - 'alias': 'col1', - 'display_name': 'Column 1', - 'type': PostgresType.TEXT.id, - 'type_options': None, - 'display_options': dict(a=1), - 'is_initial_column': True, - 'input_alias': None, - 'input_column_name': 'Center', - 'input_table_name': input_table_name, - 'input_table_id': input_table_id, - }, - { - 'alias': 'col2', - 'display_name': 'Column 2', - 'type': PostgresType.TEXT.id, - 'type_options': None, - 'display_options': dict(b=2), - 'is_initial_column': True, - 'input_alias': None, - 'input_column_name': 'Case Number', - 'input_table_name': input_table_name, - 'input_table_id': input_table_id, - }, - ] diff --git a/mathesar/tests/api/query/test_query_records_api.py b/mathesar/tests/api/query/test_query_records_api.py deleted file mode 100644 index 67a0681efc..0000000000 --- a/mathesar/tests/api/query/test_query_records_api.py +++ /dev/null @@ -1,110 +0,0 @@ -import pytest -import json - -from mathesar.models.query import Exploration - - -@pytest.fixture -def joining_patents_query(academics_ma_tables): - academics_table = academics_ma_tables['academics'] - institutions_table = academics_ma_tables['universities'] - display_names = { - 'name': 'name', - 'institution_name': 'institution name', - } - initial_columns = [ - { - 'id': academics_table.get_column_by_name('name').id, - 'alias': 'name', - }, - { - 'id': institutions_table.get_column_by_name('name').id, - 'alias': 'institution_name', - 'jp_path': [[ - academics_table.get_column_by_name('institution').id, - institutions_table.get_column_by_name('id').id, - ]], - }, - ] - display_options = { - 'name': dict(a=1), - 'institution_name': dict(b=2), - } - ui_query = Exploration.objects.create( - base_table=academics_table, - initial_columns=initial_columns, - display_options=display_options, - display_names=display_names, - ) - return ui_query - - -@pytest.mark.parametrize("limit", [None, 100, 500]) -def test_basics(client, minimal_patents_query, limit): - ui_query = minimal_patents_query - total_rows_in_table = 1393 - default_limit = 50 - response = client.get(f'/api/db/v0/queries/{ui_query.id}/records/?limit={limit}') - response_json = response.json() - assert response.status_code == 200 - expected_result_count = limit or default_limit - _assert_well_formed_records( - response_json, - expected_result_count, - total_rows_in_table - ) - - -def test_query_with_joins(client, joining_patents_query): - ui_query = joining_patents_query - total_rows_in_table = 3 - response = client.get(f'/api/db/v0/queries/{ui_query.id}/records/') - response_json = response.json() - assert response.status_code == 200 - expected_result_count = 3 - _assert_well_formed_records( - response_json, - expected_result_count, - total_rows_in_table - ) - response_json['results'] == [ - {'name': 'academic1', 'institution_name': 'uni1'}, - {'name': 'academic2', 'institution_name': 'uni1'}, - {'name': 'academic3', 'institution_name': 'uni2'}, - ] - - -def test_grouping(client, minimal_patents_query): - ui_query = minimal_patents_query - total_rows_in_table = 1393 - alias_of_column_to_group_on = 'col1' - grouping = {'columns': [alias_of_column_to_group_on]} - grouping_json = json.dumps(grouping) - limit = 150 - response = client.get( - f'/api/db/v0/queries/{ui_query.id}/records/?grouping={grouping_json}&limit={limit}' - ) - response_json = response.json() - assert response.status_code == 200 - expected_result_count = limit - _assert_well_formed_records( - response_json, - expected_result_count, - total_rows_in_table - ) - assert len(response_json['grouping']['groups']) == 2 - assert response_json['grouping']['groups'][0]['count'] == 138 - assert response_json['grouping']['groups'][1]['count'] == 21 - - -def _assert_well_formed_records( - response_json, - expected_result_count, - total_rows_in_table -): - assert isinstance(response_json, dict) - assert response_json['count'] == total_rows_in_table - results = response_json.get('results') - assert results - assert isinstance(results, list) - assert len(results) == expected_result_count diff --git a/mathesar/tests/api/query/test_query_results.py b/mathesar/tests/api/query/test_query_results.py deleted file mode 100644 index a7191fe076..0000000000 --- a/mathesar/tests/api/query/test_query_results.py +++ /dev/null @@ -1,58 +0,0 @@ -import json - - -def test_query_results_minimal(client, minimal_patents_query): - ui_query = minimal_patents_query - input_table_name = ui_query.base_table.name - input_table_id = ui_query.base_table.id - order_by = json.dumps( - [ - {'field': 'col1', 'direction': 'asc'}, - {'field': 'col2', 'direction': 'desc'} - ] - ) - response = client.get( - f'/api/db/v0/queries/{ui_query.id}/results/?limit=2&offset=3&order_by={order_by}' - ) - assert response.status_code == 200 - - actual_response_json = response.json() - expect_response_json = { - 'records': { - 'count': 1393, - 'grouping': None, - 'preview_data': None, - 'results': [ - {'col1': 'NASA Ames Research Center', 'col2': 'ARC-16902-1'}, - {'col1': 'NASA Ames Research Center', 'col2': 'ARC-16892-1A'} - ] - }, - 'output_columns': ['col1', 'col2'], - 'column_metadata': { - 'col1': { - 'alias': 'col1', - 'display_name': 'Column 1', - 'type': 'text', - 'type_options': None, - 'display_options': {'a': 1}, - 'is_initial_column': True, - 'input_table_name': input_table_name, - 'input_table_id': input_table_id, - 'input_column_name': 'Center', - 'input_alias': None, - }, - 'col2': { - 'alias': 'col2', - 'display_name': 'Column 2', - 'type': 'text', - 'type_options': None, - 'display_options': {'b': 2}, - 'is_initial_column': True, - 'input_table_name': input_table_name, - 'input_table_id': input_table_id, - 'input_column_name': 'Case Number', - 'input_alias': None, - } - }, - } - assert actual_response_json == expect_response_json diff --git a/mathesar/tests/api/query/test_query_run.py b/mathesar/tests/api/query/test_query_run.py deleted file mode 100644 index 5e477506ef..0000000000 --- a/mathesar/tests/api/query/test_query_run.py +++ /dev/null @@ -1,289 +0,0 @@ -import pytest - -from mathesar.api.exceptions.error_codes import ErrorCodes -from mathesar.models.deprecated import Column - -run_client_with_status_code = [ - ('db_manager_client_factory', 200, 200), - ('db_editor_client_factory', 200, 200), - ('schema_manager_client_factory', 200, 400), - ('schema_viewer_client_factory', 200, 400), - ('db_viewer_schema_manager_client_factory', 200, 200) -] - - -@pytest.mark.parametrize( - 'client_name, expected_status_code, different_schema_expected_status_code', - run_client_with_status_code -) -def test_queries_run_minimal_based_on_permissions( - create_patents_table, - request, - client_name, - expected_status_code, - different_schema_expected_status_code -): - base_table = create_patents_table(table_name='Patent Table') - different_schema_base_table = create_patents_table(table_name='Patent Table', schema_name="Private Schema") - initial_columns = [ - { - 'id': base_table.get_column_by_name('Center').id, - 'alias': 'col1', - }, - { - 'id': base_table.get_column_by_name('Case Number').id, - 'alias': 'col2', - }, - ] - data = { - 'base_table': base_table.id, - 'initial_columns': initial_columns, - 'parameters': { - 'order_by': [ - {'field': 'col1', 'direction': 'asc'}, - {'field': 'col2', 'direction': 'desc'} - ], - 'limit': 2, - 'offset': 3 - } - } - client = request.getfixturevalue(client_name)(base_table.schema) - response = client.post('/api/db/v0/queries/run/', data, format='json') - assert response.status_code == expected_status_code - data = { - 'base_table': different_schema_base_table.id, - 'initial_columns': initial_columns, - 'parameters': { - 'order_by': [ - {'field': 'col1', 'direction': 'asc'}, - {'field': 'col2', 'direction': 'desc'} - ], - 'limit': 2, - 'offset': 3 - } - } - response = client.post('/api/db/v0/queries/run/', data, format='json') - assert response.status_code == different_schema_expected_status_code - - -def test_queries_run_minimal(create_patents_table, client): - base_table = create_patents_table(table_name='patent_query_run_minimal_table') - display_names = { - 'col1': 'Column 1', - 'col2': 'Column 2', - } - initial_columns = [ - { - 'id': base_table.get_column_by_name('Center').id, - 'alias': 'col1', - }, - { - 'id': base_table.get_column_by_name('Case Number').id, - 'alias': 'col2', - }, - ] - data = { - 'base_table': base_table.id, - 'initial_columns': initial_columns, - 'display_names': display_names, - 'parameters': { - 'order_by': [ - {'field': 'col1', 'direction': 'asc'}, - {'field': 'col2', 'direction': 'desc'} - ], - 'limit': 2, - 'offset': 3 - } - } - - expect_query = ( - {k: v for k, v in data.items() if k != 'parameters'} - | {'schema': base_table.schema.id, 'transformations': []} - ) - - expect_response_json = { - 'query': expect_query, - 'records': { - 'count': 1393, - 'grouping': None, - 'preview_data': None, - 'results': [ - {'col1': 'NASA Ames Research Center', 'col2': 'ARC-16902-1'}, - {'col1': 'NASA Ames Research Center', 'col2': 'ARC-16892-1A'} - ] - }, - 'output_columns': ['col1', 'col2'], - 'column_metadata': { - 'col1': { - 'alias': 'col1', - 'display_name': 'Column 1', - 'type': 'text', - 'type_options': None, - 'display_options': None, - 'is_initial_column': True, - 'input_table_name': 'patent_query_run_minimal_table', - 'input_table_id': base_table.id, - 'input_column_name': 'Center', - 'input_alias': None, - }, - 'col2': { - 'alias': 'col2', - 'display_name': 'Column 2', - 'type': 'text', - 'type_options': None, - 'display_options': None, - 'is_initial_column': True, - 'input_table_name': 'patent_query_run_minimal_table', - 'input_table_id': base_table.id, - 'input_column_name': 'Case Number', - 'input_alias': None, - } - }, - 'parameters': { - 'order_by': [ - {'field': 'col1', 'direction': 'asc'}, - {'field': 'col2', 'direction': 'desc'} - ], - 'limit': 2, - 'offset': 3, - } - } - - response = client.post('/api/db/v0/queries/run/', data, format='json') - assert response.status_code == 200 - assert response.json() == expect_response_json - - -def test_queries_run_deleted_column(create_patents_table, client): - base_table = create_patents_table(table_name='patent_query_run_minimal_table') - to_be_deleted_column_id = base_table.get_column_by_name('Center').id - initial_columns = [ - { - 'id': to_be_deleted_column_id, - 'alias': 'col1', - }, - { - 'id': base_table.get_column_by_name('Case Number').id, - 'alias': 'col2', - }, - ] - data = { - 'base_table': base_table.id, - 'initial_columns': initial_columns, - 'parameters': { - 'order_by': [ - {'field': 'col1', 'direction': 'asc'}, - {'field': 'col2', 'direction': 'desc'} - ], - 'limit': 2, - 'offset': 3 - } - } - Column.objects.get(id=to_be_deleted_column_id).delete() - response = client.post('/api/db/v0/queries/run/', data, format='json') - response_data = response.json() - assert response.status_code == 400 - assert response_data[0]['code'] == ErrorCodes.DeletedColumnAccess.value - assert response_data[0]['detail']['column_id'] == to_be_deleted_column_id - - -def test_queries_run_with_transforms(create_patents_table, client): - base_table = create_patents_table(table_name='patent_query_run_minimal_table') - display_names = { - 'col1': 'Column 1', - 'col2': 'Column 2', - } - initial_columns = [ - { - 'id': base_table.get_column_by_name('Center').id, - 'alias': 'col1', - }, - { - 'id': base_table.get_column_by_name('Case Number').id, - 'alias': 'col2', - }, - ] - data = { - 'base_table': base_table.id, - 'initial_columns': initial_columns, - 'display_names': display_names, - 'transformations': [ - { - "type": "hide", - "spec": ['col1'] - }, - ], - 'parameters': { - 'order_by': [ - {'field': 'col2', 'direction': 'desc'} - ], - 'limit': 2, - 'offset': 3 - } - } - - expect_query = { - k: v - for k, v - in data.items() - if k != 'parameters' - } | { - 'schema': base_table.schema.id, - 'transformations': [ - { - "type": "hide", - "spec": ['col1'], - } - ] - } - - expect_response_json = { - 'query': expect_query, - 'records': { - 'count': 1393, - 'grouping': None, - 'preview_data': None, - 'results': [ - {'col2': 'SSC-00050'}, - {'col2': 'SSC-00040'}, - ] - }, - 'output_columns': ['col2'], - 'column_metadata': { - 'col1': { - 'alias': 'col1', - 'display_name': 'Column 1', - 'type': 'text', - 'type_options': None, - 'display_options': None, - 'is_initial_column': True, - 'input_table_name': 'patent_query_run_minimal_table', - 'input_table_id': base_table.id, - 'input_column_name': 'Center', - 'input_alias': None, - }, - 'col2': { - 'alias': 'col2', - 'display_name': 'Column 2', - 'type': 'text', - 'type_options': None, - 'display_options': None, - 'is_initial_column': True, - 'input_table_name': 'patent_query_run_minimal_table', - 'input_table_id': base_table.id, - 'input_column_name': 'Case Number', - 'input_alias': None, - } - }, - 'parameters': { - 'order_by': [ - {'field': 'col2', 'direction': 'desc'} - ], - 'limit': 2, - 'offset': 3, - } - } - - response = client.post('/api/db/v0/queries/run/', data, format='json') - assert response.status_code == 200 - assert response.json() == expect_response_json diff --git a/mathesar/tests/api/query/test_query_run_partial_transforms.py b/mathesar/tests/api/query/test_query_run_partial_transforms.py deleted file mode 100644 index a51ff1dd4a..0000000000 --- a/mathesar/tests/api/query/test_query_run_partial_transforms.py +++ /dev/null @@ -1,208 +0,0 @@ -import pytest - -from db.transforms.base import Summarize, Limit -from db.transforms.operations.serialize import serialize_transformation - - -fully_speced_summarize = \ - Summarize( - dict( - aggregation_expressions=[ - dict( - function='distinct_aggregate_to_array', - input_alias='col2', - output_alias='col2_agged' - ) - ], - base_grouping_column='col1', - grouping_expressions=[ - dict( - input_alias='col1', - output_alias='col1_grouped', - ) - ] - ) - ) - - -@pytest.mark.parametrize( - 'input_summarize, expected_summarize', [ - [ - Summarize( - dict( - base_grouping_column='col1', - aggregation_expressions=[ - dict( - function='distinct_aggregate_to_array', - input_alias='col2', - output_alias='col2_agged' - ) - ], - ), - ), - fully_speced_summarize, - ], - [ - Summarize( - dict( - base_grouping_column='col1', - grouping_expressions=[ - dict( - input_alias='col1', - output_alias='col1_grouped', - ) - ] - ) - ), - fully_speced_summarize, - ], - [ - Summarize( - dict( - base_grouping_column='col1', - ) - ), - fully_speced_summarize, - ], - ] -) -def test_partial_summarize_transform( - create_patents_table, client, input_summarize, expected_summarize, -): - base_table = create_patents_table(table_name='patent_query_run_minimal_table') - initial_columns = [ - { - 'id': base_table.get_column_by_name('Center').id, - 'alias': 'col1', - }, - { - 'id': base_table.get_column_by_name('Case Number').id, - 'alias': 'col2', - }, - ] - input_summarize_transform_json = \ - serialize_transformation(input_summarize) - expected_summarize_transform_json = \ - serialize_transformation(expected_summarize) - limit_transform_json = serialize_transformation(Limit(5)) - input_transformations = [ - limit_transform_json, - input_summarize_transform_json, - ] - output_transformations = [ - limit_transform_json, - expected_summarize_transform_json, - ] - data = { - 'base_table': base_table.id, - 'initial_columns': initial_columns, - 'display_names': None, - 'parameters': { - 'order_by': [ - {'field': 'col1_grouped', 'direction': 'asc'}, - {'field': 'col2_agged', 'direction': 'desc'} - ], - 'limit': 2 - }, - 'transformations': input_transformations, - } - expected_query = ( - {k: v for k, v in data.items() if k not in {'parameters'}} - | { - 'schema': base_table.schema.id, - 'transformations': output_transformations, - 'display_names': { - 'col1': 'Center', - 'col1_grouped': 'Center', - 'col2': 'Case Number', - 'col2_agged': 'Case Number distinct list', - }, - } - ) - expect_response_json = { - 'column_metadata': { - 'col1': { - 'alias': 'col1', - 'display_name': 'Center', - 'display_options': None, - 'input_alias': None, - 'input_column_name': 'Center', - 'input_table_name': 'patent_query_run_minimal_table', - 'input_table_id': base_table.id, - 'is_initial_column': True, - 'type': 'text', - 'type_options': None - }, - 'col1_grouped': { - 'alias': 'col1_grouped', - 'display_name': 'Center', - 'display_options': None, - 'input_alias': 'col1', - 'input_column_name': None, - 'input_table_name': None, - 'input_table_id': None, - 'is_initial_column': False, - 'type': 'text', - 'type_options': None - }, - 'col2': { - 'alias': 'col2', - 'display_name': 'Case Number', - 'display_options': None, - 'input_alias': None, - 'input_column_name': 'Case Number', - 'input_table_name': 'patent_query_run_minimal_table', - 'input_table_id': base_table.id, - 'is_initial_column': True, - 'type': 'text', - 'type_options': None - }, - 'col2_agged': { - 'alias': 'col2_agged', - 'display_name': 'Case Number distinct list', - 'display_options': None, - 'input_alias': 'col2', - 'input_column_name': None, - 'input_table_name': None, - 'input_table_id': None, - 'is_initial_column': False, - 'type': '_array', - 'type_options': {'item_type': 'text'} - } - }, - 'output_columns': [ - 'col1_grouped', - 'col2_agged', - ], - 'parameters': { - 'limit': 2, - 'order_by': [ - {'direction': 'asc', 'field': 'col1_grouped'}, - {'direction': 'desc', 'field': 'col2_agged'} - ] - }, - 'query': expected_query, - 'records': { - 'count': 2, - 'grouping': None, - 'preview_data': None, - 'results': [ - { - 'col1_grouped': 'NASA Ames Research Center', - 'col2_agged': [ - 'ARC-14048-1', - 'ARC-14231-1', - 'ARC-14231-2DIV', - 'ARC-14231-3' - ] - }, - { - 'col1_grouped': 'NASA Kennedy Space Center', - 'col2_agged': ['KSC-12871'] - } - ] - } - } - response = client.post('/api/db/v0/queries/run/', data, format='json') - assert response.status_code == 200 - assert response.json() == expect_response_json diff --git a/mathesar/tests/api/query/test_shared_queries.py b/mathesar/tests/api/query/test_shared_queries.py deleted file mode 100644 index 8d2da86159..0000000000 --- a/mathesar/tests/api/query/test_shared_queries.py +++ /dev/null @@ -1,243 +0,0 @@ -import pytest -import uuid - -from mathesar.models.shares import SharedQuery - - -@pytest.fixture -def schemas_with_shared_queries(create_minimal_patents_query): - query = create_minimal_patents_query() - share = SharedQuery.objects.create( - query=query, - enabled=True, - ) - different_schema_query = create_minimal_patents_query(schema_name="Different Schema") - different_schema_share = SharedQuery.objects.create( - query=different_schema_query, - enabled=True, - ) - yield { - 'patent_schema': query.base_table.schema, - 'patent_query': query, - 'patent_query_share': share, - 'different_schema': different_schema_query.base_table.schema, - 'different_schema_query': different_schema_query, - 'different_schema_query_share': different_schema_share, - } - - # cleanup - share.delete() - query.delete() - query.base_table.delete() - different_schema_share.delete() - different_schema_query.delete() - different_schema_query.base_table.delete() - - -read_client_with_different_roles = [ - # (client_name, different_schema_status_code) - ('superuser_client_factory', 200), - ('db_manager_client_factory', 200), - ('db_editor_client_factory', 200), - ('schema_manager_client_factory', 403), - ('schema_viewer_client_factory', 403), - ('db_viewer_schema_manager_client_factory', 200) -] - - -write_client_with_different_roles = [ - # client_name, is_allowed - ('superuser_client_factory', True), - ('db_manager_client_factory', True), - ('db_editor_client_factory', True), - ('schema_manager_client_factory', True), - ('schema_viewer_client_factory', False), - ('db_viewer_schema_manager_client_factory', True) -] - - -@pytest.mark.parametrize('client_name,different_schema_status_code', read_client_with_different_roles) -def test_shared_query_list( - schemas_with_shared_queries, - request, - client_name, - different_schema_status_code, -): - client = request.getfixturevalue(client_name)(schemas_with_shared_queries["patent_schema"]) - response = client.get(f'/api/ui/v0/queries/{schemas_with_shared_queries["patent_query"].id}/shares/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 1 - assert len(response_data['results']) == 1 - result = response_data['results'][0] - assert result['slug'] == str(schemas_with_shared_queries['patent_query_share'].slug) - assert result['enabled'] == schemas_with_shared_queries['patent_query_share'].enabled - - response = client.get(f'/api/ui/v0/queries/{schemas_with_shared_queries["different_schema_query"].id}/shares/') - assert response.status_code == different_schema_status_code - if different_schema_status_code == 200: - response_data = response.json() - assert len(response_data['results']) == 1 - result = response_data['results'][0] - assert result['slug'] == str(schemas_with_shared_queries['different_schema_query_share'].slug) - assert result['enabled'] == schemas_with_shared_queries['different_schema_query_share'].enabled - - -@pytest.mark.parametrize('client_name,different_schema_status_code', read_client_with_different_roles) -def test_shared_query_retrieve( - schemas_with_shared_queries, - request, - client_name, - different_schema_status_code, -): - client = request.getfixturevalue(client_name)(schemas_with_shared_queries["patent_schema"]) - response = client.get(f'/api/ui/v0/queries/{schemas_with_shared_queries["patent_query"].id}/shares/{schemas_with_shared_queries["patent_query_share"].id}/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['slug'] == str(schemas_with_shared_queries['patent_query_share'].slug) - assert response_data['enabled'] == schemas_with_shared_queries['patent_query_share'].enabled - - response = client.get(f'/api/ui/v0/queries/{schemas_with_shared_queries["different_schema_query"].id}/shares/{schemas_with_shared_queries["different_schema_query_share"].id}/') - assert response.status_code == different_schema_status_code - if different_schema_status_code == 200: - response_data = response.json() - assert response_data['slug'] == str(schemas_with_shared_queries['different_schema_query_share'].slug) - assert response_data['enabled'] == schemas_with_shared_queries['different_schema_query_share'].enabled - - -@pytest.mark.parametrize('client_name,is_allowed', write_client_with_different_roles) -def test_shared_query_create( - minimal_patents_query, - request, - client_name, - is_allowed -): - client = request.getfixturevalue(client_name)(minimal_patents_query.base_table.schema) - data = {'enabled': True} - response = client.post(f'/api/ui/v0/queries/{minimal_patents_query.id}/shares/', data) - response_data = response.json() - - if is_allowed: - assert response.status_code == 201 - assert 'id' in response_data - assert response_data['enabled'] is True - created_share = SharedQuery.objects.get(id=response_data['id']) - assert created_share is not None - else: - assert response.status_code == 403 - - -@pytest.mark.parametrize('client_name,is_allowed', write_client_with_different_roles) -def test_shared_query_patch( - schemas_with_shared_queries, - request, - client_name, - is_allowed -): - client = request.getfixturevalue(client_name)(schemas_with_shared_queries["patent_schema"]) - data = {'enabled': False} - response = client.patch(f'/api/ui/v0/queries/{schemas_with_shared_queries["patent_query"].id}/shares/{schemas_with_shared_queries["patent_query_share"].id}/', data) - response_data = response.json() - - if is_allowed: - assert response.status_code == 200 - assert response_data['slug'] == str(schemas_with_shared_queries['patent_query_share'].slug) - assert response_data['enabled'] is False - else: - assert response.status_code == 403 - - -@pytest.mark.parametrize('client_name,is_allowed', write_client_with_different_roles) -def test_shared_query_delete( - schemas_with_shared_queries, - request, - client_name, - is_allowed -): - client = request.getfixturevalue(client_name)(schemas_with_shared_queries["patent_schema"]) - response = client.delete(f'/api/ui/v0/queries/{schemas_with_shared_queries["patent_query"].id}/shares/{schemas_with_shared_queries["patent_query_share"].id}/') - - if is_allowed: - assert response.status_code == 204 - assert SharedQuery.objects.filter(id=schemas_with_shared_queries['patent_query_share'].id).first() is None - else: - assert response.status_code == 403 - - -@pytest.mark.parametrize('client_name,is_allowed', write_client_with_different_roles) -def test_shared_query_regenerate_link( - schemas_with_shared_queries, - request, - client_name, - is_allowed -): - client = request.getfixturevalue(client_name)(schemas_with_shared_queries["patent_schema"]) - old_slug = str(schemas_with_shared_queries["patent_query_share"].slug) - response = client.post(f'/api/ui/v0/queries/{schemas_with_shared_queries["patent_query"].id}/shares/{schemas_with_shared_queries["patent_query_share"].id}/regenerate/') - response_data = response.json() - - if is_allowed: - assert response.status_code == 200 - assert response_data['slug'] != old_slug - else: - assert response.status_code == 403 - - -# Query endpoints with share-link-uuid token - -queries_request_client_with_different_roles = [ - # (client_name, same_schema_invalid_token_status, different_schema_invalid_token_status) - ('superuser_client_factory', 200, 200), - ('db_manager_client_factory', 200, 200), - ('db_editor_client_factory', 200, 200), - ('schema_manager_client_factory', 200, 403), - ('schema_viewer_client_factory', 200, 403), - ('db_viewer_schema_manager_client_factory', 200, 200), - ('anonymous_client_factory', 401, 401) -] - - -@pytest.mark.parametrize('client_name,same_schema_invalid_token_status,different_schema_invalid_token_status', queries_request_client_with_different_roles) -@pytest.mark.parametrize('endpoint', ['/', '/results/']) -def test_shared_query_view_requests( - schemas_with_shared_queries, - request, - endpoint, - client_name, - same_schema_invalid_token_status, - different_schema_invalid_token_status -): - client = request.getfixturevalue(client_name)(schemas_with_shared_queries["patent_schema"]) - - query_url = f'/api/db/v0/queries/{schemas_with_shared_queries["patent_query"].id}' - share_uuid_param = f'shared-link-uuid={schemas_with_shared_queries["patent_query_share"].slug}' - invalid_share_uuid_param = f'shared-link-uuid={uuid.uuid4()}' - different_schema_query_url = f'/api/db/v0/queries/{schemas_with_shared_queries["different_schema_query"].id}' - different_schema_query_uuid_param = f'shared-link-uuid={schemas_with_shared_queries["different_schema_query_share"].slug}' - is_result_endpoint = endpoint == '/results/' - - response = client.get(f'{query_url}{endpoint}?{share_uuid_param}') - response_data = response.json() - assert response.status_code == 200 - if is_result_endpoint: - assert response_data['records']['count'] == 1393 - - response = client.get(f'{query_url}{endpoint}?{invalid_share_uuid_param}') - response_data = response.json() - assert response.status_code == same_schema_invalid_token_status - if same_schema_invalid_token_status == 200 and is_result_endpoint: - assert response_data['records']['count'] == 1393 - - response = client.get(f'{different_schema_query_url}{endpoint}?{different_schema_query_uuid_param}') - response_data = response.json() - assert response.status_code == 200 - if is_result_endpoint: - assert response_data['records']['count'] == 1393 - - response = client.get(f'{different_schema_query_url}{endpoint}?{invalid_share_uuid_param}') - response_data = response.json() - assert response.status_code == different_schema_invalid_token_status - if different_schema_invalid_token_status == 200 and is_result_endpoint: - assert response_data['records']['count'] == 1393 diff --git a/mathesar/tests/api/test_column_api.py b/mathesar/tests/api/test_column_api.py deleted file mode 100644 index 48ff87602d..0000000000 --- a/mathesar/tests/api/test_column_api.py +++ /dev/null @@ -1,871 +0,0 @@ -import json - -import pytest - -from sqlalchemy import select - -from db.constants import COLUMN_NAME_TEMPLATE -from db.types.base import PostgresType, MathesarCustomType - -from mathesar.api.exceptions.error_codes import ErrorCodes -from mathesar.tests.api.test_table_api import check_columns_response - - -def test_column_list(column_test_table, client): - response = client.get(f"/api/db/v0/tables/{column_test_table.id}/columns/") - assert response.status_code == 200 - response_data = response.json() - assert response_data['count'] == len(column_test_table.sa_columns) - expect_results = [ - { - 'name': 'mycolumn0', - 'type': PostgresType.INTEGER.id, - 'type_options': None, - 'nullable': False, - 'primary_key': True, - 'display_options': None, - 'default': { - 'value': """nextval('"Patents".anewtable_mycolumn0_seq'::regclass)""", - 'is_dynamic': True - }, - 'valid_target_types': [ - 'bigint', 'boolean', 'character', 'character varying', - 'double precision', 'integer', 'mathesar_types.mathesar_money', - 'mathesar_types.multicurrency_money', 'money', 'numeric', - 'real', 'smallint', 'text', - ], - 'has_dependents': True, - 'description': None, - }, - { - 'name': 'mycolumn1', - 'type': PostgresType.INTEGER.id, - 'type_options': None, - 'nullable': False, - 'primary_key': False, - 'display_options': None, - 'default': None, - 'valid_target_types': [ - 'bigint', 'boolean', 'character', 'character varying', - 'double precision', 'integer', 'mathesar_types.mathesar_money', - 'mathesar_types.multicurrency_money', 'money', 'numeric', - 'real', 'smallint', 'text', - ], - 'has_dependents': False, - 'description': None, - }, - { - 'name': 'mycolumn2', - 'type': PostgresType.INTEGER.id, - 'type_options': None, - 'nullable': True, - 'primary_key': False, - 'display_options': None, - 'default': { - 'value': 5, - 'is_dynamic': False, - }, - 'valid_target_types': [ - 'bigint', 'boolean', 'character', 'character varying', - 'double precision', 'integer', 'mathesar_types.mathesar_money', - 'mathesar_types.multicurrency_money', 'money', 'numeric', - 'real', 'smallint', 'text', - ], - 'has_dependents': True, - 'description': None, - }, - { - 'name': 'mycolumn3', - 'type': PostgresType.CHARACTER_VARYING.id, - 'type_options': None, - 'nullable': True, - 'primary_key': False, - 'display_options': None, - 'valid_target_types': [ - 'bigint', 'boolean', 'character', 'character varying', 'date', - 'double precision', 'integer', 'interval', 'json', 'jsonb', - 'mathesar_types.email', 'mathesar_types.mathesar_json_array', - 'mathesar_types.mathesar_json_object', 'mathesar_types.mathesar_money', - 'mathesar_types.multicurrency_money', 'mathesar_types.uri', - 'money', 'numeric', 'real', 'smallint', 'text', - 'time with time zone', 'time without time zone', - 'timestamp with time zone', 'timestamp without time zone', - ], - 'default': None, - 'has_dependents': False, - 'description': None, - } - ] - check_columns_response(response_data['results'], expect_results) - - -list_client_with_different_roles = [ - ('superuser_client_factory', 8, 200, 8), - ('db_manager_client_factory', 8, 200, 8), - ('db_editor_client_factory', 8, 200, 8), - ('schema_manager_client_factory', 8, 403, 0), - ('schema_viewer_client_factory', 8, 403, 0), - ('db_viewer_schema_manager_client_factory', 8, 200, 8) -] - -write_client_with_different_roles = [ - ('superuser_client_factory', 201), - ('db_manager_client_factory', 201), - ('db_editor_client_factory', 403), - ('schema_manager_client_factory', 201), - ('schema_viewer_client_factory', 403), - ('db_viewer_schema_manager_client_factory', 201) -] - - -@pytest.mark.parametrize('client_name,expected_count,different_schema_status_code,different_schema_expected_count', list_client_with_different_roles) -def test_column_list_based_on_permissions( - create_patents_table, - request, - client_name, - expected_count, - different_schema_status_code, - different_schema_expected_count -): - table_name = 'NASA Column List 1' - table = create_patents_table(table_name) - different_schema_table = create_patents_table(table_name, schema_name="Different Schema") - client = request.getfixturevalue(client_name)(table.schema) - response = client.get(f"/api/db/v0/tables/{table.id}/columns/") - response_data = response.json() - assert response_data['count'] == expected_count - response = client.get(f"/api/db/v0/tables/{different_schema_table.id}/columns/") - assert response.status_code == different_schema_status_code - if different_schema_status_code == 200: - response_data = response.json() - assert response_data['count'] == different_schema_expected_count - - -@pytest.mark.parametrize( - "table_fixture", - [ - "column_test_table", - "table_with_unknown_types", - ], -) -def test_column_create(table_fixture, client, request): - table = request.getfixturevalue(table_fixture) - name = "anewcolumn" - db_type = PostgresType.NUMERIC - num_columns = len(table.sa_columns) - data = { - "name": name, - "type": db_type.id, - "display_options": {"show_as_percentage": True}, - "nullable": True - } - response = client.post( - f"/api/db/v0/tables/{table.id}/columns/", - data=data, - ) - assert response.status_code == 201 - new_columns_response = client.get( - f"/api/db/v0/tables/{table.id}/columns/" - ) - assert new_columns_response.json()["count"] == num_columns + 1 - actual_new_col = new_columns_response.json()["results"][-1] - assert actual_new_col["name"] == name - assert actual_new_col["type"] == db_type.id - assert actual_new_col["default"] is None - - -@pytest.mark.parametrize('client_name, expected_status_code', write_client_with_different_roles) -def test_column_create_by_different_roles(create_patents_table, request, client_name, expected_status_code): - table_name = 'NASA Constraint List 1' - table = create_patents_table(table_name) - name = "anewcolumn" - db_type = PostgresType.NUMERIC - data = { - "name": name, - "type": db_type.id, - "display_options": {"show_as_percentage": True}, - "nullable": True - } - client = request.getfixturevalue(client_name)(table.schema) - - response = client.post( - f"/api/db/v0/tables/{table.id}/columns/", - data=data, - ) - assert response.status_code == expected_status_code - - -create_default_test_list = [ - (PostgresType.BOOLEAN, True, True, True), - (PostgresType.INTERVAL, "00:42:00", "P0Y0M0DT0H42M0S", "P0Y0M0DT0H42M0S"), - (PostgresType.NUMERIC, 42, 42, 42), - (PostgresType.CHARACTER_VARYING, "test_string", "test_string", "test_string"), - (PostgresType.DATE, "2020-1-1", "2020-01-01 AD", "2020-01-01 AD"), - (MathesarCustomType.EMAIL, "test@test.com", "test@test.com", "test@test.com"), -] - - -@pytest.mark.parametrize( - "db_type,default,default_obj,expt_default", create_default_test_list -) -def test_column_create_default( - column_test_table, db_type, default, default_obj, expt_default, client, engine_with_schema -): - engine, _ = engine_with_schema - name = "anewcolumn" - data = {"name": name, "type": db_type.id, "default": {"value": default}} - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", - json.dumps(data), content_type='application/json' - ) - assert response.status_code == 201 - - # Ensure the correct serialized date is returned by the API - new_columns_response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - actual_new_col = new_columns_response.json()["results"][-1] - assert actual_new_col["default"]["value"] == expt_default - - # Ensure the correct date value is generated when inserting a new record - sa_table = column_test_table._sa_table - with engine.begin() as conn: - conn.execute(sa_table.insert((1, 1, 1, 'str'))) - created_default = conn.execute(select(sa_table)).fetchall()[0][-1] - assert created_default == default_obj - - -def test_column_create_invalid_default(column_test_table, client): - name = "anewcolumn" - data = { - "name": name, - "type": PostgresType.BOOLEAN.id, - "default": {"value": "Not a boolean"}, - } - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", - json.dumps(data), - content_type="application/json", - ) - assert response.status_code == 400 - assert f'default "{data["default"]}" is invalid for type' in response.json()[0]['message'] - - -@pytest.mark.parametrize( - "db_type,type_options,expected_type_options", - [ - (PostgresType.NUMERIC, {"precision": 5, "scale": 3}, {"precision": 5, "scale": 3}), - (PostgresType.CHARACTER_VARYING, {"length": 5}, {"length": 5}), - (PostgresType.CHARACTER, {"length": 5}, {"length": 5}), - (PostgresType.INTERVAL, {"precision": 5}, {"precision": 5}), - (PostgresType.INTERVAL, {"precision": 5, "fields": "second"}, {"precision": 5, "fields": "second"}), - (PostgresType.INTERVAL, {"fields": "day"}, {"fields": "day"}), - ] -) -def test_column_create_retrieve_options(column_test_table, client, db_type, type_options, expected_type_options): - name = "anewcolumn" - num_columns = len(column_test_table.sa_columns) - data = { - "name": name, "type": db_type.id, "type_options": type_options, - } - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", - data=data, - ) - assert response.status_code == 201 - new_columns_response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - assert new_columns_response.json()["count"] == num_columns + 1 - actual_new_col = new_columns_response.json()["results"][-1] - assert actual_new_col["name"] == name - assert actual_new_col["type"] == db_type.id - assert actual_new_col["type_options"] == expected_type_options - - -invalid_type_options = [ - {"precision": 1000}, - {"scale": 5}, - {"precision": "asd"}, - {"nonoption": 34}, - {"length": "two"}, -] - - -@pytest.mark.parametrize("type_options", invalid_type_options) -def test_column_create_bad_options(column_test_table, client, type_options): - name = "anewcolumn" - db_type = PostgresType.NUMERIC - data = { - "name": name, "type": db_type.id, "type_options": type_options, - } - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", - data=data, - ) - assert response.status_code == 400 - - -def test_column_create_duplicate(column_test_table, client): - column = column_test_table.sa_columns[0] - name = column.name - db_type = PostgresType.NUMERIC - data = { - "name": name, "type": db_type.id - } - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", data=data - ) - assert response.status_code == 400 - - -def test_column_create_some_parameters(column_test_table, client): - data = { - "name": "only name", - } - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", data=data - ) - response_data = response.json()[0] - assert response.status_code == 400 - assert response_data['message'] == "This field is required." - assert response_data['field'] == "type" - - -def test_column_create_no_name_parameter(column_test_table, client): - db_type = PostgresType.BOOLEAN - num_columns = len(column_test_table.sa_columns) - generated_name = f"{COLUMN_NAME_TEMPLATE}{num_columns + 1}" - data = { - "type": db_type.id - } - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", data=data - ) - assert response.status_code == 201 - new_columns_response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - assert new_columns_response.json()["count"] == num_columns + 1 - actual_new_col = new_columns_response.json()["results"][-1] - assert actual_new_col["name"] == generated_name - assert actual_new_col["type"] == db_type.id - - -def test_column_create_name_parameter_empty(column_test_table, client): - name = "" - db_type = PostgresType.BOOLEAN - num_columns = len(column_test_table.sa_columns) - generated_name = f"{COLUMN_NAME_TEMPLATE}{num_columns + 1}" - data = { - "name": name, "type": db_type.id - } - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", data=data - ) - assert response.status_code == 201 - new_columns_response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - assert new_columns_response.json()["count"] == num_columns + 1 - actual_new_col = new_columns_response.json()["results"][-1] - assert actual_new_col["name"] == generated_name - assert actual_new_col["type"] == db_type.id - - -@pytest.mark.parametrize( - "table_fixture", - [ - "column_test_table", - "table_with_unknown_types", - ], -) -def test_column_update_name(table_fixture, request, client): - table = request.getfixturevalue(table_fixture) - name = "updatedname" - data = {"name": name} - column = table.columns.last() - response = client.patch( - f"/api/db/v0/tables/{table.id}/columns/{column.id}/", data=data - ) - assert response.status_code == 200 - assert response.json()["name"] == name - response = client.get( - f"/api/db/v0/tables/{table.id}/columns/{column.id}/" - ) - assert response.status_code == 200 - assert response.json()["name"] == name - - -@pytest.mark.parametrize( - "table_fixture,expected_status_code", - [ - ["column_test_table", 200], - # NOTE we don't cast from unknown types - ["table_with_unknown_types", 400], - ], -) -def test_column_update_type_get_all_columns(table_fixture, expected_status_code, request, client): - table = request.getfixturevalue(table_fixture) - column = table.columns.last() - column_id = column.id - display_options_data = {'type': 'BOOLEAN'} - response = client.patch( - f"/api/db/v0/tables/{table.id}/columns/{column_id}/", - display_options_data, - ) - assert response.status_code == expected_status_code - response = client.get( - f"/api/db/v0/tables/{table.id}/columns/" - ) - assert response.status_code == 200 - - -def test_column_with_dynamic_default_update_default(column_test_table, client): - expt_default = 5 - data = {"default": {"value": expt_default}} - column = column_test_table.get_columns_by_name(['mycolumn0'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - data=json.dumps(data), - content_type="application/json", - ) - assert response.json()[0]["code"] == 4424 - - -def test_column_with_dynamic_default_update_delete_default(column_test_table, client): - data = {"default": None} - column = column_test_table.get_columns_by_name(['mycolumn0'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - data=json.dumps(data), - content_type="application/json", - ) - assert response.json()[0]["code"] == 4424 - - -def test_column_update_default(column_test_table, client): - expt_default = 5 - data = {"default": {"value": expt_default}} # Ensure we pass a int and not a str - column = column_test_table.get_columns_by_name(['mycolumn1'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - data=json.dumps(data), - content_type="application/json", - ) - assert response.json()["default"]["value"] == expt_default - - -def test_column_update_delete_default(column_test_table, client): - expt_default = None - data = {"default": None} - column = column_test_table.get_columns_by_name(['mycolumn1'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - data=json.dumps(data), - content_type="application/json", - ) - assert response.json()["default"] == expt_default - - -def test_column_update_default_invalid_cast(column_test_table, client): - data = {"default": {"value": "not an integer"}} - column = column_test_table.get_columns_by_name(['mycolumn1'])[0] - - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - data=json.dumps(data), - content_type="application/json" - ) - assert response.status_code == 400 - - -def test_column_update_type_dynamic_default(column_test_table, client): - db_type = PostgresType.NUMERIC - data = {"type": db_type.id} - column = column_test_table.get_columns_by_name(['mycolumn0'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data - ) - assert response.status_code == 200 - - -def test_column_update_type(column_test_table, client): - db_type = PostgresType.BOOLEAN - data = {"type": db_type.id} - column = column_test_table.get_columns_by_name(['mycolumn3'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data - ) - assert response.json()["type"] == db_type.id - - -def test_column_update_name_and_type(column_test_table, client): - db_type = PostgresType.BOOLEAN - new_name = 'new name' - data = {"type": db_type.id, "name": new_name} - column = column_test_table.get_columns_by_name(['mycolumn3'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data - ) - assert response.json()["type"] == db_type.id - assert response.json()["name"] == new_name - - -def test_column_update_name_type_nullable(column_test_table, client): - db_type = PostgresType.BOOLEAN - new_name = 'new name' - data = {"type": db_type.id, "name": new_name, "nullable": True} - column = column_test_table.get_columns_by_name(['mycolumn3'])[0] - - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data - ) - assert response.json()["type"] == db_type.id - assert response.json()["name"] == new_name - assert response.json()["nullable"] is True - - -def test_column_update_name_type_nullable_default(column_test_table, client): - db_type = PostgresType.BOOLEAN - new_name = 'new name' - data = { - "type": db_type.id, - "name": new_name, - "nullable": True, - "default": {"value": True}, - } - column = column_test_table.get_columns_by_name(['mycolumn3'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - data=json.dumps(data), - content_type='application/json' - ) - assert response.json()["type"] == db_type.id - assert response.json()["name"] == new_name - assert response.json()["nullable"] is True - assert response.json()["default"]["value"] is True - - -def test_column_update_type_options(column_test_table, client): - db_type = PostgresType.NUMERIC - type_options = {'precision': 1000, "scale": 1} - expected_type_options = {'precision': 1000, 'scale': 1} - data = {"type": db_type.id, "type_options": type_options} - column = column_test_table.get_columns_by_name(['mycolumn3'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - data, - ) - assert response.json()["type"] == db_type.id - assert response.json()["type_options"] == expected_type_options - - -def test_column_update_type_options_no_type(column_test_table, client): - db_type = PostgresType.NUMERIC - data = {"type": db_type.id} - column = column_test_table.get_columns_by_name(['mycolumn3'])[0] - client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - data, - ) - type_options = {"precision": 3, "scale": 1} - type_option_data = {"type_options": type_options} - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - type_option_data, - ) - assert response.json()["type"] == db_type.id - assert response.json()["type_options"] == type_options - - -def test_column_update_invalid_type(create_patents_table, client): - table = create_patents_table('Column Invalid Type') - body = {"type": PostgresType.BIGINT.id} - response = client.get( - f"/api/db/v0/tables/{table.id}/columns/" - ) - assert response.status_code == 200 - columns = response.json()['results'] - column_index = 3 - column_id = columns[column_index]['id'] - response = client.patch( - f"/api/db/v0/tables/{table.id}/columns/{column_id}/", - body - ) - assert response.status_code == 400 - response_json = response.json() - assert response_json[0]['code'] == ErrorCodes.InvalidTypeCast.value - assert response_json[0]['message'] == f"\"{columns[column_index]['name']}\" cannot be cast to bigint." - - -def test_column_update_invalid_nullable(create_patents_table, client): - table = create_patents_table('Column Invalid Nullable') - body = {"nullable": False} - response = client.get( - f"/api/db/v0/tables/{table.id}/columns/" - ) - assert response.status_code == 200 - columns = response.json()['results'] - column_index = 4 - column_id = columns[column_index]['id'] - response = client.patch( - f"/api/db/v0/tables/{table.id}/columns/{column_id}/", - body - ) - assert response.status_code == 400 - response_json = response.json() - assert response_json[0]['code'] == ErrorCodes.NotNullViolation.value - assert response_json[0]['message'] == ( - 'column "Patent Number" of relation' - ' "Column Invalid Nullable" contains null values' - ) - - -def test_column_update_returns_table_dependent_fields(column_test_table, client): - expt_default = 5 - data = {"default": {"value": expt_default}} - column = column_test_table.get_columns_by_name(['mycolumn1'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - data=data, - ) - assert response.json()["default"] is not None - assert response.json()["id"] is not None - - -@pytest.mark.parametrize("type_options", invalid_type_options) -def test_column_update_type_invalid_options(column_test_table, client, type_options): - db_type = PostgresType.NUMERIC - data = {"type": db_type.id, "type_options": type_options} - column = column_test_table.get_columns_by_name(['mycolumn3'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - data=data, - ) - assert response.status_code == 400 - - -# This cast is currently succeeding, because the column is empty. -# While we do have the facilities to not recommend a cast like this (and we don't), this test is -# testing whether or not we allow attempting this cast anyway. It is not clear to me (Dom) that we -# should forbid it, and our code currently does not forbid it. -@pytest.mark.skip(reason="unclear whether this is indeed an unsupported cast") -def test_column_update_type_invalid_cast(column_test_table, client): - db_type = MathesarCustomType.EMAIL - data = {"type": db_type.id} - column = column_test_table.get_columns_by_name(['mycolumn1'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data - ) - assert response.status_code == 400 - - -def test_column_update_when_missing(column_test_table, client): - name = "updatedname" - data = {"name": name} - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/99999/", data=data - ) - assert response.status_code == 404 - response_data = response.json()[0] - assert response_data['message'] == "Not found." - assert response_data['code'] == ErrorCodes.NotFound.value - - -def test_column_destroy(column_test_table, create_patents_table, client): - create_patents_table('Dummy Table') - num_columns = len(column_test_table.sa_columns) - col_one_name = column_test_table.sa_columns[1].name - column = column_test_table.get_columns_by_name(['mycolumn1'])[0] - response = client.delete( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/" - ) - assert response.status_code == 204 - new_columns_response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - new_data = new_columns_response.json() - assert col_one_name not in [col["name"] for col in new_data["results"]] - assert new_data["count"] == num_columns - 1 - - -delete_client_with_different_roles = [ - ('superuser_client_factory', 204, 204), - ('db_manager_client_factory', 204, 204), - ('db_editor_client_factory', 403, 403), - ('schema_manager_client_factory', 204, 403), - ('schema_viewer_client_factory', 403, 403), - ('db_viewer_schema_manager_client_factory', 204, 403) -] - - -@pytest.mark.parametrize('client_name, expected_status_code, different_schema_expected_status_code', delete_client_with_different_roles) -def test_column_destroy_based_on_permissions(create_patents_table, request, client_name, expected_status_code, different_schema_expected_status_code): - table_name = 'NASA Column Table' - table = create_patents_table(table_name) - client = request.getfixturevalue(client_name)(table.schema) - different_schema_table = create_patents_table(table_name, schema_name="Different Schema") - col_one_name = table.sa_columns[1].name - column = table.get_columns_by_name([col_one_name])[0] - response = client.delete( - f"/api/db/v0/tables/{table.id}/columns/{column.id}/" - ) - assert response.status_code == expected_status_code - column = different_schema_table.get_columns_by_name([col_one_name])[0] - response = client.delete( - f"/api/db/v0/tables/{different_schema_table.id}/columns/{column.id}/" - ) - assert response.status_code == different_schema_expected_status_code - - -def test_column_destroy_when_missing(column_test_table, client): - response = client.delete( - f"/api/db/v0/tables/{column_test_table.id}/columns/99999/" - ) - response_data = response.json()[0] - assert response_data['message'] == "Not found." - assert response_data['code'] == ErrorCodes.NotFound.value - assert response.status_code == 404 - -# TODO Fix test case to use correct column name -# def test_column_duplicate(column_test_table, client): -# column = column_test_table.get_columns_by_name(['mycolumn1'])[0] -# target_col = column_test_table.sa_columns[column.name] -# data = { -# "name": "new_col_name", -# "source_column": column.id, -# "copy_source_data": False, -# "copy_source_constraints": False, -# } -# with patch.object(models_base, "duplicate_column") as mock_infer: -# mock_infer.return_value = target_col -# response = client.post( -# f"/api/db/v0/tables/{column_test_table.id}/columns/", -# data=data -# ) -# assert response.status_code == 201 -# response_col = response.json() -# assert response_col["name"] == target_col.name -# assert response_col["type"] == target_col.db_type.id -# -# assert mock_infer.call_args[0] == ( -# column_test_table.oid, -# column, -# column_test_table.schema._sa_engine, -# ) -# assert mock_infer.call_args[1] == { -# "new_column_name": data["name"], -# "copy_data": data["copy_source_data"], -# "copy_constraints": data["copy_source_constraints"] -# } - - -def test_column_duplicate_when_missing(column_test_table, client): - data = { - "source_column": 3000, - } - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", data=data - ) - assert response.status_code == 400 - response_data = response.json()[0] - assert 2151 == response_data['code'] - assert "object does not exist" in response_data['message'] - - -def test_column_duplicate_some_parameters(column_test_table, client): - data = { - "copy_source_constraints": True, - } - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", data=data - ) - response_data = response.json() - assert response.status_code == 400 - assert response_data[0]['message'] == "This field is required." - assert response_data[0]['field'] == "source_column" - - -def test_column_duplicate_no_parameters(column_test_table, client): - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", data={} - ) - response_data = response.json() - assert response.status_code == 400 - assert response_data[0]["message"] == "This field is required." - assert response_data[0]["field"] == "type" - - -def test_list_columns_with_unknown_types(table_with_unknown_types, client): - table = table_with_unknown_types - response = client.get( - f"/api/db/v0/tables/{table.id}/columns/", data={} - ) - response_data = response.json() - assert response.status_code == 200 - was_col1_found = False - was_col2_found = False - for response_column in response_data['results']: - if response_column['name'] == 'text_column': - assert response_column['type'] == '__unknown__' - was_col1_found = True - if response_column['name'] == 'point_column': - assert response_column['type'] == '__unknown__' - was_col2_found = True - assert was_col1_found - assert was_col2_found - - -def test_column_description_set_and_unset(column_test_table, client): - expected_descriptions = [ - 'Some comment', - None, - ] - table = column_test_table - column = table.columns.first() - assert column is not None - for expected_description in expected_descriptions: - data = dict(description=expected_description) - response = client.patch( - f"/api/db/v0/tables/{table.id}/columns/{column.id}/", - data=data - ) - response_json = response.json() - assert response.status_code == 200 - response = client.get( - f"/api/db/v0/tables/{table.id}/columns/{column.id}/", - ) - response_json = response.json() - assert response.status_code == 200 - actual_description = response_json.get('description') - assert actual_description == expected_description - - -@pytest.mark.parametrize( - 'expected_description', - [ - None, - 'Some comment', - ] -) -def test_column_description_when_creating(column_test_table, client, expected_description): - table = column_test_table - column_id = None - data = dict(type='text') - if expected_description is not None: - data['description'] = expected_description - response = client.post( - f"/api/db/v0/tables/{table.id}/columns/", - data=data - ) - response_data = response.json() - assert response.status_code == 201 - column_id = response_data['id'] - response = client.get( - f"/api/db/v0/tables/{table.id}/columns/{column_id}/", - ) - response_data = response.json() - assert response.status_code == 200 - actual_description = response_data.get('description') - assert actual_description == expected_description diff --git a/mathesar/tests/api/test_column_api_display_options.py b/mathesar/tests/api/test_column_api_display_options.py deleted file mode 100644 index ae421e5ae1..0000000000 --- a/mathesar/tests/api/test_column_api_display_options.py +++ /dev/null @@ -1,448 +0,0 @@ -import pytest - -from sqlalchemy import INTEGER, BOOLEAN, TEXT, TIMESTAMP, Column as SAColumn, Table as SATable, MetaData, TIME, DATE - -from db.columns.operations.alter import alter_column_type -from db.columns.operations.select import get_column_attnum_from_name -from db.tables.operations.select import get_oid_from_table -from db.types.base import PostgresType, MathesarCustomType -from db.types.custom.money import MathesarMoney -from db.metadata import get_empty_metadata - -from mathesar.models.deprecated import Table, Column - - -@pytest.fixture -def column_test_table_with_service_layer_options(patent_schema): - engine = patent_schema._sa_engine - column_list_in = [ - SAColumn("mycolumn0", INTEGER, primary_key=True), - SAColumn("mycolumn1", BOOLEAN), - SAColumn("mycolumn2", INTEGER), - SAColumn("mycolumn3", TEXT), - SAColumn("mycolumn4", TEXT), - SAColumn("mycolumn5", MathesarMoney), - SAColumn("mycolumn6", TIMESTAMP), - SAColumn("mycolumn7", TIME), - SAColumn("mycolumn8", DATE), - ] - column_data_list = [ - {}, - {'display_options': {'input': "dropdown", "custom_labels": {"TRUE": "yes", "FALSE": "no"}}}, - { - 'display_options': { - 'show_as_percentage': True, - 'number_format': "english", - "use_grouping": 'true', - "minimum_fraction_digits": None, - "maximum_fraction_digits": None, - } - }, - {'display_options': None}, - {}, - { - 'display_options': { - 'currency_details': { - 'currency_symbol': "HK $", - 'number_format': "english", - 'currency_symbol_location': 'after-minus' - } - } - }, - {'display_options': {'time_format': 'hh:mm', 'date_format': 'YYYY-MM-DD'}}, - {'display_options': {'format': 'hh:mm'}}, - {'display_options': {'format': 'YYYY-MM-DD'}}, - ] - db_table = SATable( - "anewtable", - MetaData(bind=engine), - *column_list_in, - schema=patent_schema.name - ) - db_table.create() - db_table_oid = get_oid_from_table(db_table.name, db_table.schema, engine) - table = Table.current_objects.create(oid=db_table_oid, schema=patent_schema) - service_columns = [] - for column_data in zip(column_list_in, column_data_list): - attnum = get_column_attnum_from_name(db_table_oid, column_data[0].name, engine, metadata=get_empty_metadata()) - service_columns.append( - Column.current_objects.get_or_create( - table=table, - attnum=attnum, - display_options=column_data[1].get('display_options', None) - )[0] - ) - return table, service_columns - - -# NOTE: display option value types are checked backend, but that's it: e.g. a time format may be any string. -_create_display_options_test_list = [ - ( - PostgresType.BOOLEAN, - {"input": "dropdown"}, - {"input": "dropdown"} - ), - ( - PostgresType.BOOLEAN, - {"input": "checkbox", "custom_labels": {"TRUE": "yes", "FALSE": "no"}}, - {"input": "checkbox", "custom_labels": {"TRUE": "yes", "FALSE": "no"}} - ), - ( - PostgresType.DATE, - {'format': 'YYYY-MM-DD'}, - {'format': 'YYYY-MM-DD'} - ), - ( - PostgresType.INTERVAL, - {'min': 's', 'max': 'h', 'show_units': True}, - {'min': 's', 'max': 'h', 'show_units': True} - ), - ( - PostgresType.MONEY, - { - 'number_format': "english", - 'currency_symbol': '$', - 'currency_symbol_location': 'after-minus', - 'use_grouping': 'true', - "minimum_fraction_digits": 2, - "maximum_fraction_digits": 2, - }, - { - 'currency_symbol': '$', - 'currency_symbol_location': 'after-minus', - 'number_format': "english", - 'use_grouping': 'true', - "minimum_fraction_digits": 2, - "maximum_fraction_digits": 2, - }, - ), - ( - PostgresType.NUMERIC, - {}, - { - "show_as_percentage": False, - 'number_format': None, - 'use_grouping': 'false', - "minimum_fraction_digits": None, - "maximum_fraction_digits": None, - }, - ), - ( - PostgresType.NUMERIC, - { - "show_as_percentage": True, - 'number_format': None, - 'use_grouping': 'false', - "minimum_fraction_digits": 2, - "maximum_fraction_digits": 20, - }, - { - "show_as_percentage": True, - 'number_format': None, - 'use_grouping': 'false', - "minimum_fraction_digits": 2, - "maximum_fraction_digits": 20, - }, - ), - ( - PostgresType.NUMERIC, - { - "show_as_percentage": True, - 'number_format': "english", - 'use_grouping': 'true', - "minimum_fraction_digits": None, - "maximum_fraction_digits": None, - }, - { - "show_as_percentage": True, - 'number_format': "english", - 'use_grouping': 'true', - "minimum_fraction_digits": None, - "maximum_fraction_digits": None, - }, - ), - ( - PostgresType.TIMESTAMP_WITH_TIME_ZONE, - {'date_format': 'x', 'time_format': 'x'}, - {'date_format': 'x', 'time_format': 'x'} - ), - ( - PostgresType.TIMESTAMP_WITHOUT_TIME_ZONE, - {'date_format': 'x', 'time_format': 'x'}, - {'date_format': 'x', 'time_format': 'x'} - ), - ( - PostgresType.TIME_WITHOUT_TIME_ZONE, - {'format': 'hh:mm'}, - {'format': 'hh:mm'} - ), - ( - PostgresType.TIME_WITH_TIME_ZONE, - {'format': 'hh:mm Z'}, - {'format': 'hh:mm Z'} - ), -] - - -# TODO does it make sense to do two HTTP requests here? -@pytest.mark.parametrize( - "db_type,display_options,expected_display_options", - _create_display_options_test_list -) -def test_column_create_display_options( - column_test_table, db_type, display_options, expected_display_options, client -): - name = "anewcolumn" - data = {"name": name, "type": db_type.id, "display_options": display_options} - - response = client.post(f"/api/db/v0/tables/{column_test_table.id}/columns/", data) - assert response.status_code == 201 - - # Ensure the correct serialized date is returned by the API - new_columns_response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - assert new_columns_response.status_code == 200 - columns = new_columns_response.json()["results"] - - # We have to find the new column. - new_column = None - for column in columns: - if column['name'] == name: - new_column = column - - assert new_column is not None - assert new_column["display_options"] == expected_display_options - - -_too_long_string = "x" * 256 - - -_create_display_options_invalid_test_list = [ - ( - PostgresType.BOOLEAN, - {"input": "invalid", "use_custom_columns": False} - ), - ( - PostgresType.BOOLEAN, - { - "input": "checkbox", - "use_custom_columns": True, - "custom_labels": {"yes": "yes", "1": "no"}} - ), - ( - PostgresType.DATE, - {'format': _too_long_string} - ), - ( - PostgresType.MONEY, - {'currency_symbol': '$', 'currency_symbol_location': 'middle'} - ), - ( - PostgresType.MONEY, - {'currency_symbol': None} - ), - ( - PostgresType.NUMERIC, - {"show_as_percentage": "wrong value type"} - ), - ( - PostgresType.NUMERIC, - {'number_format': "wrong"} - ), - - # Out of range values - (PostgresType.NUMERIC, {'minimum_fraction_digits': -1}), - (PostgresType.NUMERIC, {'maximum_fraction_digits': -1}), - (PostgresType.NUMERIC, {'minimum_fraction_digits': 21}), - (PostgresType.NUMERIC, {'maximum_fraction_digits': 21}), - - # Incorrect types - (PostgresType.NUMERIC, {'minimum_fraction_digits': 1.5}), - (PostgresType.NUMERIC, {'maximum_fraction_digits': 1.5}), - (PostgresType.NUMERIC, {'minimum_fraction_digits': "can't be a string"}), - (PostgresType.NUMERIC, {'maximum_fraction_digits': "can't be a string"}), - - # Values in conflict. Max must be greater or equal to min. - ( - PostgresType.NUMERIC, - { - 'minimum_fraction_digits': 4, - 'maximum_fraction_digits': 3, - }, - ), - - ( - PostgresType.TIMESTAMP_WITH_TIME_ZONE, - {'format': []} - ), - ( - PostgresType.TIMESTAMP_WITHOUT_TIME_ZONE, - {'format': _too_long_string} - ), - ( - PostgresType.TIME_WITH_TIME_ZONE, - {'format': _too_long_string} - ), - ( - PostgresType.TIME_WITHOUT_TIME_ZONE, - {'format': {}} - ), -] - - -@pytest.mark.parametrize("db_type,display_options", _create_display_options_invalid_test_list) -def test_column_create_wrong_display_options( - column_test_table, db_type, display_options, client -): - name = "anewcolumn" - data = {"name": name, "type": db_type.id, "display_options": display_options} - response = client.post(f"/api/db/v0/tables/{column_test_table.id}/columns/", data) - assert response.status_code == 400 - - -def test_default_display_options_for_mathesar_money( - column_test_table, client -): - name = "moneycolumn" - expected_display_options = { - 'use_grouping': 'true', - 'number_format': None, - 'currency_symbol': None, - 'maximum_fraction_digits': 2, - 'minimum_fraction_digits': 2, - 'currency_symbol_location': 'after-minus'} - - data = {"name": name, "type": MathesarCustomType.MATHESAR_MONEY.id, "display_options": None} - - response = client.post(f"/api/db/v0/tables/{column_test_table.id}/columns/", data) - response_data = response.json() - assert response.status_code == 201 - assert response_data["display_options"] == expected_display_options - - -def test_column_update_display_options(column_test_table_with_service_layer_options, client): - table, _ = column_test_table_with_service_layer_options - column_indexes = [2, 3, 4] - for column_index in column_indexes: - colum_name = f"mycolumn{column_index}" - column = table.get_columns_by_name([colum_name])[0] - column_id = column.id - display_options = { - "input": "dropdown", - "custom_labels": {"TRUE": "yes", "FALSE": "no"} - } - column_data = { - 'type': PostgresType.BOOLEAN.id, - 'type_options': {}, - 'display_options': display_options, - } - response = client.patch( - f"/api/db/v0/tables/{table.id}/columns/{column_id}/", - column_data, - ) - assert response.status_code == 200 - assert response.json()["display_options"] == display_options - - -def test_column_update_type_with_existing_display_options(column_test_table_with_service_layer_options, client): - table, _ = column_test_table_with_service_layer_options - colum_name = "mycolumn2" - column = table.get_columns_by_name([colum_name])[0] - column_id = column.id - column_data = {'type': PostgresType.BOOLEAN.id} - response = client.patch( - f"/api/db/v0/tables/{table.id}/columns/{column_id}/", - column_data, - ) - assert response.status_code == 200 - assert response.json()["display_options"] is None - - -def test_column_update_type_invalid_display_options(column_test_table_with_service_layer_options, client): - table, _ = column_test_table_with_service_layer_options - colum_name = "mycolumn3" - column = table.get_columns_by_name([colum_name])[0] - column_id = column.id - display_options_data = {'type': 'BOOLEAN', 'display_options': {}} - response = client.patch( - f"/api/db/v0/tables/{table.id}/columns/{column_id}/", - display_options_data, - ) - assert response.status_code == 400 - - -def test_column_display_options_type_on_reflection( - column_test_table, client -): - table = column_test_table - response = client.get( - f"/api/db/v0/tables/{table.id}/columns/", - ) - columns = response.json()['results'] - for column in columns: - assert column["display_options"] is None - - -def test_column_invalid_display_options_type_on_reflection( - column_test_table_with_service_layer_options, client, engine -): - table, columns = column_test_table_with_service_layer_options - column_index = 2 - column = columns[column_index] - column_attnum = get_column_attnum_from_name( - table.oid, column.name, engine, get_empty_metadata() - ) - with engine.begin() as conn: - alter_column_type(table.oid, column_attnum, engine, conn, PostgresType.BOOLEAN) - column_id = column.id - response = client.get( - f"/api/db/v0/tables/{table.id}/columns/{column_id}/", - ) - assert response.json()["display_options"] is None - - -def test_column_alter_same_type_display_options( - column_test_table_with_service_layer_options, - client, engine -): - table, columns = column_test_table_with_service_layer_options - column_index = 2 - column = columns[column_index] - pre_alter_display_options = column.display_options - column_attnum = get_column_attnum_from_name( - table.oid, column.name, engine, get_empty_metadata() - ) - with engine.begin() as conn: - alter_column_type(table.oid, column_attnum, engine, conn, PostgresType.NUMERIC) - column_id = column.id - response = client.get( - f"/api/db/v0/tables/{table.id}/columns/{column_id}/", - ) - assert response.json()["display_options"] == pre_alter_display_options - - -@pytest.mark.parametrize( - "display_options,type_options, expected_display_options, expected_type_options", - [[None, None, None, None], [{}, {}, {}, {}]] -) -def test_column_update_type_with_display_and_type_options_as_null_or_empty_obj( - column_test_table, client, display_options, type_options, expected_display_options, expected_type_options -): - db_type_id = MathesarCustomType.URI.id - data = { - "type": db_type_id, - "display_options": display_options, - "type_options": type_options - } - column = column_test_table.get_columns_by_name(['mycolumn3'])[0] - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", - data=data, - ) - assert response.status_code == 200 - response_json = response.json() - assert response_json["type"] == db_type_id - assert response_json["display_options"] == expected_display_options - # For some reason, type_options will reflect None, whether it was updated to None or to {}. - assert response_json["type_options"] is None diff --git a/mathesar/tests/api/test_constraint_api.py b/mathesar/tests/api/test_constraint_api.py deleted file mode 100644 index 3258032116..0000000000 --- a/mathesar/tests/api/test_constraint_api.py +++ /dev/null @@ -1,634 +0,0 @@ -import json - -import pytest -from sqlalchemy import Column as SAColumn, ForeignKey, Integer, MetaData, Table as SATable, select -from sqlalchemy.sql import text - -from db.columns.operations.select import get_column_attnum_from_name -from db.constraints.base import UniqueConstraint -from db.tables.operations.select import get_oid_from_table -from mathesar.models.deprecated import Constraint, Table, Column -from mathesar.api.exceptions.error_codes import ErrorCodes -from db.metadata import get_empty_metadata -from mathesar.state import reset_reflection - - -@pytest.fixture -def multi_column_primary_key_table(create_schema, get_uid, engine): - prefix = "multi_col_pk" - schema_name = f"schema_{prefix}_{get_uid()}" - schema = create_schema(schema_name) - db_name = schema.database.name - table_name = f"table_{prefix}_{get_uid()}" - query = f""" - CREATE TABLE "{schema_name}"."{table_name}" ( - column1 INT, - column2 INT, - column3 INT, - PRIMARY KEY (column1, column2) - ); - """ - with engine.connect() as conn: - conn.execute(text(query)) - conn.commit() - reset_reflection(db_name=db_name) - # NOTE filtering by name is impossible here, because db object names are a dynamic properties, not model fields - all_tables = Table.current_objects.all() - for table in all_tables: - if table.name == table_name: - return table - raise Exception("Should never happen.") - - -def _verify_primary_and_unique_constraints(response): - response_data = response.json() - constraints_data = response_data['results'] - assert response.status_code == 200 - assert response_data['count'] == 2 - assert set(['unique', 'primary']) == set([constraint_data['type'] for constraint_data in constraints_data]) - - -def _verify_foreign_key_constraint( - constraint_data, - columns, - name, - referent_columns, - referent_table_id, - onupdate='NO ACTION', - ondelete='NO ACTION', - deferrable=False, -): - assert constraint_data['columns'] == columns - assert constraint_data['referent_columns'] == referent_columns - assert constraint_data['referent_table'] == referent_table_id - assert constraint_data['name'] == name - assert constraint_data['type'] == 'foreignkey' - assert constraint_data['onupdate'] == onupdate - assert constraint_data['ondelete'] == ondelete - assert constraint_data['deferrable'] == deferrable - assert 'id' in constraint_data and type(constraint_data['id']) is int - - -def _verify_unique_constraint(constraint_data, columns, name): - assert constraint_data['columns'] == columns - assert constraint_data['name'] == name - assert constraint_data['type'] == 'unique' - assert 'id' in constraint_data and type(constraint_data['id']) is int - - -write_client_with_different_roles = [ - ('superuser_client_factory', 201), - ('db_manager_client_factory', 201), - ('db_editor_client_factory', 403), - ('schema_manager_client_factory', 201), - ('schema_viewer_client_factory', 403), - ('db_viewer_schema_manager_client_factory', 201) -] - - -list_client_with_different_roles = [ - ('superuser_client_factory', 2, 200, 2), - ('db_manager_client_factory', 2, 200, 2), - ('db_editor_client_factory', 2, 200, 2), - ('schema_manager_client_factory', 2, 403, 0), - ('schema_viewer_client_factory', 2, 403, 0), - ('db_viewer_schema_manager_client_factory', 2, 200, 2) -] - - -delete_client_with_different_roles = [ - ('superuser_client_factory', 204, 204), - ('db_manager_client_factory', 204, 204), - ('db_editor_client_factory', 403, 403), - ('schema_manager_client_factory', 204, 403), - ('schema_viewer_client_factory', 403, 403), - ('db_viewer_schema_manager_client_factory', 204, 403) -] - - -def test_default_constraint_list(create_patents_table, client): - table_name = 'NASA Constraint List 0' - table = create_patents_table(table_name) - constraint_column_id = table.get_columns_by_name(['id'])[0].id - - response = client.get(f'/api/db/v0/tables/{table.id}/constraints/') - response_data = response.json() - constraint_data = response_data['results'][0] - - assert response.status_code == 200 - assert response_data['count'] == 1 - assert constraint_data['columns'] == [constraint_column_id] - assert 'id' in constraint_data and type(constraint_data['id']) is int - assert constraint_data['name'] == 'NASA Constraint List 0_pkey' - assert constraint_data['type'] == 'primary' - - -@pytest.mark.parametrize('client_name,expected_constraint_count,different_schema_status_code,different_schema_expected_constraint_count', list_client_with_different_roles) -def test_constraint_list_based_on_permissions( - create_patents_table, - request, - client_name, - expected_constraint_count, - different_schema_status_code, - different_schema_expected_constraint_count -): - table_name = 'NASA Constraint List 1' - table = create_patents_table(table_name) - constraint_column = table.get_columns_by_name(['Case Number'])[0] - table.add_constraint(UniqueConstraint(None, table.oid, [constraint_column.attnum])) - different_schema_table = create_patents_table(table_name, schema_name="Different Schema") - constraint_column = different_schema_table.get_columns_by_name(['Case Number'])[0] - different_schema_table.add_constraint( - UniqueConstraint(None, different_schema_table.oid, [constraint_column.attnum]) - ) - client = request.getfixturevalue(client_name)(table.schema) - response = client.get(f'/api/db/v0/tables/{table.id}/constraints/') - response_data = response.json() - assert response_data['count'] == expected_constraint_count - response = client.get(f'/api/db/v0/tables/{different_schema_table.id}/constraints/') - assert response.status_code == different_schema_status_code - if different_schema_status_code == 200: - response_data = response.json() - assert response_data['count'] == different_schema_expected_constraint_count - - -def test_existing_foreign_key_constraint_list(patent_schema, client): - engine = patent_schema._sa_engine - referent_col_name = "referred_col" - metadata = MetaData(bind=engine, schema=patent_schema.name) - referent_table = SATable( - "referent", - metadata, - SAColumn(referent_col_name, Integer, primary_key=True), - schema=patent_schema.name - ) - referent_table.create() - referent_table_oid = get_oid_from_table(referent_table.name, referent_table.schema, engine) - referent_table = Table.current_objects.create(oid=referent_table_oid, schema=patent_schema) - fk_column_name = "fk_col" - column_list_in = [ - SAColumn("mycolumn0", Integer, primary_key=True), - SAColumn( - fk_column_name, - Integer, - ForeignKey( - "referent.referred_col", - onupdate="RESTRICT", - ondelete="CASCADE", - deferrable="NOT DEFERABLE", - match="SIMPLE" - ), - nullable=False - ), - ] - db_table = SATable( - "referrer", - metadata, - *column_list_in, - schema=patent_schema.name - ) - db_table.create() - db_table_oid = get_oid_from_table(db_table.name, db_table.schema, engine) - table = Table.current_objects.create(oid=db_table_oid, schema=patent_schema) - response = client.get(f'/api/db/v0/tables/{table.id}/constraints/') - response_data = response.json() - column_attnum = get_column_attnum_from_name(db_table_oid, [fk_column_name], engine, metadata=get_empty_metadata()) - columns = list(Column.objects.filter(table=table, attnum=column_attnum).values_list('id', flat=True)) - referent_column_attnum = get_column_attnum_from_name(referent_table_oid, [referent_col_name], engine, metadata=get_empty_metadata()) - referent_columns = list(Column.objects.filter(table=referent_table, attnum=referent_column_attnum).values_list('id', flat=True)) - for constraint_data in response_data['results']: - if constraint_data['type'] == 'foreignkey': - _verify_foreign_key_constraint( - constraint_data, - columns, - 'referrer_fk_col_fkey', - referent_columns, - referent_table.id, - onupdate="RESTRICT", - ondelete="CASCADE", - deferrable=True - ) - - -def test_multiple_column_constraint_list(create_patents_table, client): - table_name = 'NASA Constraint List 2' - table = create_patents_table(table_name) - constraint_columns = table.get_columns_by_name(['Center', 'Case Number']) - constraint_column_id_list = [constraint_columns[0].id, constraint_columns[1].id] - constraint_column_attnum_list = [constraint_columns[0].attnum, constraint_columns[1].attnum] - table.add_constraint(UniqueConstraint(None, table.oid, constraint_column_attnum_list)) - - response = client.get(f'/api/db/v0/tables/{table.id}/constraints/') - response_data = response.json() - - _verify_primary_and_unique_constraints(response) - for constraint_data in response_data['results']: - if constraint_data['type'] == 'unique': - _verify_unique_constraint( - constraint_data, constraint_column_id_list, - 'NASA Constraint List 2_Center_Case Number_key' - ) - - -def test_retrieve_constraint(create_patents_table, client): - table_name = 'NASA Constraint List 3' - table = create_patents_table(table_name) - constraint_column = table.get_columns_by_name(['Case Number'])[0] - table.add_constraint(UniqueConstraint(None, table.oid, [constraint_column.attnum])) - list_response = client.get(f'/api/db/v0/tables/{table.id}/constraints/') - list_response_data = list_response.json() - assert list_response_data['count'] == 2 - for constraint_data in list_response_data['results']: - if constraint_data['type'] == 'unique': - constraint_id = constraint_data['id'] - break - - response = client.get(f'/api/db/v0/tables/{table.id}/constraints/{constraint_id}/') - assert response.status_code == 200 - _verify_unique_constraint(response.json(), [constraint_column.id], 'NASA Constraint List 3_Case Number_key') - - -def test_create_multiple_column_unique_constraint(create_patents_table, client): - table_name = 'NASA Constraint List 4' - table = create_patents_table(table_name) - constraint_columns = table.get_columns_by_name(['Center', 'Case Number']) - constraint_column_1 = constraint_columns[0] - constraint_column_2 = constraint_columns[1] - constraint_column_id_list = [constraint_column_1.id, constraint_column_2.id] - data = { - 'type': 'unique', - 'columns': constraint_column_id_list - } - response = client.post( - f'/api/db/v0/tables/{table.id}/constraints/', data - ) - assert response.status_code == 201 - _verify_unique_constraint( - response.json(), constraint_column_id_list, - 'NASA Constraint List 4_Center_Case Number_key' - ) - - -def test_create_single_column_unique_constraint(create_patents_table, client): - table_name = 'NASA Constraint List 5' - table = create_patents_table(table_name) - constraint_column_id = table.get_columns_by_name(['Case Number'])[0].id - data = { - 'type': 'unique', - 'columns': [constraint_column_id] - } - response = client.post( - f'/api/db/v0/tables/{table.id}/constraints/', - data=json.dumps(data), - content_type='application/json' - ) - assert response.status_code == 201 - _verify_unique_constraint(response.json(), [constraint_column_id], 'NASA Constraint List 5_Case Number_key') - - -@pytest.mark.parametrize('client_name, expected_status_code', write_client_with_different_roles) -def test_create_unique_constraint_by_different_roles(create_patents_table, request, client_name, expected_status_code): - table_name = 'NASA Constraint List 5' - table = create_patents_table(table_name) - constraint_column_id = table.get_columns_by_name(['Case Number'])[0].id - data = { - 'type': 'unique', - 'columns': [constraint_column_id] - } - client = request.getfixturevalue(client_name)(table.schema) - response = client.post( - f'/api/db/v0/tables/{table.id}/constraints/', - data=json.dumps(data), - content_type='application/json' - ) - assert response.status_code == expected_status_code - - -def test_create_unique_constraint_with_name_specified(create_patents_table, client): - table_name = 'NASA Constraint List 6' - table = create_patents_table(table_name) - constraint_columns = table.get_columns_by_name(['Case Number']) - constraint_column_id_list = [constraint_columns[0].id] - data = { - 'name': 'awesome_constraint', - 'type': 'unique', - 'columns': constraint_column_id_list - } - response = client.post( - f'/api/db/v0/tables/{table.id}/constraints/', data) - assert response.status_code == 201 - _verify_unique_constraint(response.json(), constraint_column_id_list, 'awesome_constraint') - - -def test_create_single_column_foreign_key_constraint(two_foreign_key_tables, client): - referrer_table, referent_table = two_foreign_key_tables - referent_column = referent_table.get_columns_by_name(["Id"])[0] - referrer_column = referrer_table.get_columns_by_name(["Center"])[0] - referent_table.add_constraint( - UniqueConstraint(None, referent_table.oid, [referent_column.attnum]) - ) - data = { - 'type': 'foreignkey', - 'columns': [referrer_column.id], - 'referent_columns': [referent_column.id] - } - response = client.post(f'/api/db/v0/tables/{referrer_table.id}/constraints/', data) - assert response.status_code == 201 - fk_name = referrer_table.name + '_Center_fkey' - _verify_foreign_key_constraint( - response.json(), [referrer_column.id], fk_name, - [referent_column.id], referent_table.id - ) - - -def test_foreign_key_constraint_on_invalid_table_name( - create_base_table, - create_referent_table, - client -): - referrer_table = create_base_table(table_name="Base_table") - # Having round brackets in the referent_table name is invalid. - referent_table = create_referent_table(table_name="Referent_table(alpha)") - referrer_column = referrer_table.get_columns_by_name(["Center"])[0] - referent_column = referent_table.get_columns_by_name(["Id"])[0] - referent_table.add_constraint( - UniqueConstraint(None, referent_table.oid, [referent_column.attnum]) - ) - data = { - 'type': 'foreignkey', - 'columns': [referrer_column.id], - 'referent_columns': [referent_column.id], - 'referent_table': referent_table.id - } - response = client.post(f'/api/db/v0/tables/{referrer_table.id}/constraints/', data) - response_data = response.json()[0] - assert response.status_code == 400 - assert response_data['code'] == ErrorCodes.InvalidTableName.value - assert response_data['message'] == 'Table name "Referent_table(alpha)" is invalid.' - assert response_data['field'] == 'referent_table' - - -def test_create_single_column_foreign_key_constraint_with_options( - two_foreign_key_tables, client -): - referrer_table, referent_table = two_foreign_key_tables - referent_column = referent_table.get_columns_by_name(["Id"])[0] - referrer_column = referrer_table.get_columns_by_name(["Center"])[0] - referent_table.add_constraint( - UniqueConstraint(None, referent_table.oid, [referent_column.attnum]) - ) - data = { - 'type': 'foreignkey', - 'columns': [referrer_column.id], - 'referent_columns': [referent_column.id], - 'onupdate': "RESTRICT", - 'ondelete': "CASCADE", - 'deferrable': False, - } - response = client.post(f'/api/db/v0/tables/{referrer_table.id}/constraints/', data) - assert response.status_code == 201 - fk_name = referrer_table.name + '_Center_fkey' - _verify_foreign_key_constraint( - response.json(), [referrer_column.id], fk_name, - [referent_column.id], - referent_table.id, - onupdate='RESTRICT', - ondelete='CASCADE', - deferrable=False, - ) - - -def test_create_self_referential_single_column_foreign_key_constraint( - self_referential_table, client, engine -): - table = self_referential_table - column = table.get_columns_by_name(["Id"])[0] - parent_column = table.get_columns_by_name(["Parent"])[0] - table.add_constraint(UniqueConstraint(None, table.oid, [column.attnum])) - - data = { - 'type': 'foreignkey', - 'columns': [parent_column.id], - 'referent_columns': [column.id] - } - response = client.post(f'/api/db/v0/tables/{table.id}/constraints/', data) - assert response.status_code == 201 - fk_name = table.name + '_Parent_fkey' - _verify_foreign_key_constraint( - response.json(), [parent_column.id], fk_name, - [column.id], table.id - ) - # Recursively fetch children - with engine.begin() as conn: - sa_table = table._sa_table - head = select(sa_table).filter(sa_table.c.Id == "1").cte(recursive=True) - u = head.union_all(select(sa_table).join(head, sa_table.c.Parent == head.c.Id)) - stmt = select(u.c.Id) - created_default = conn.execute(stmt).fetchall() - assert created_default == [("1",), ("2", ), ("4", )] - - -def test_create_single_column_foreign_key_constraint_invalid_related_data( - two_invalid_related_data_foreign_key_tables, client -): - referrer_table, referent_table = two_invalid_related_data_foreign_key_tables - referent_column = referent_table.get_columns_by_name(["Id"])[0] - referrer_column = referrer_table.get_columns_by_name(["Center"])[0] - referent_table.add_constraint(UniqueConstraint(None, referent_table.oid, [referent_column.attnum])) - - data = { - 'type': 'foreignkey', - 'columns': [referrer_column.id], - 'referent': {'table': referent_table.id, 'columns': [referent_column.id]} - } - response = client.post(f'/api/db/v0/tables/{referrer_table.id}/constraints/', data) - assert response.status_code == 400 - - -def test_create_multiple_column_foreign_key_constraint( - two_multi_column_foreign_key_tables, client -): - referrer_table, referent_table = two_multi_column_foreign_key_tables - referent_columns = referent_table.get_columns_by_name(['Name', 'City']) - referrer_columns = referrer_table.get_columns_by_name(["Center", 'Center City']) - referent_columns_id = [referent_column.id for referent_column in referent_columns] - referrer_columns_id = [referrer_column.id for referrer_column in referrer_columns] - referent_table.add_constraint( - UniqueConstraint( - None, referent_table.oid, [referent_column.attnum for referent_column in referent_columns] - ) - ) - - data = { - 'type': 'foreignkey', - 'columns': referrer_columns_id, - 'referent_columns': referent_columns_id - } - response = client.post(f'/api/db/v0/tables/{referrer_table.id}/constraints/', data) - assert response.status_code == 201 - fk_name = referrer_table.name + '_Center_Center City_fkey' - _verify_foreign_key_constraint( - response.json(), referrer_columns_id, fk_name, referent_columns_id, referent_table.id - ) - - -def test_drop_constraint(create_patents_table, client): - table_name = 'NASA Constraint List 7' - table = create_patents_table(table_name) - - constraint_column = table.get_columns_by_name(['Case Number'])[0] - table.add_constraint(UniqueConstraint(None, table.oid, [constraint_column.attnum])) - list_response = client.get(f'/api/db/v0/tables/{table.id}/constraints/') - list_response_data = list_response.json() - assert list_response_data['count'] == 2 - for constraint_data in list_response_data['results']: - if constraint_data['type'] == 'unique': - constraint_id = constraint_data['id'] - break - - response = client.delete(f'/api/db/v0/tables/{table.id}/constraints/{constraint_id}/') - assert response.status_code == 204 - new_list_response = client.get(f'/api/db/v0/tables/{table.id}/constraints/') - assert new_list_response.json()['count'] == 1 - - -def _first_unique_constraint(table): - constraints = Constraint.objects.filter(table=table) - for constraint_data in constraints: - if constraint_data.type == 'unique': - constraint_id = constraint_data. id - break - return constraint_id - - -def test_create_unique_constraint_with_duplicate_name(create_patents_table, client): - table_name = 'NASA Constraint List 8' - table = create_patents_table(table_name) - constraint_columns = table.get_columns_by_name(['Center', 'Case Number']) - constraint_column_id_list = [constraint_columns[0].id, constraint_columns[1].id] - constraint_column_attnum_list = [constraint_columns[0].attnum, constraint_columns[1].attnum] - constraint_name = 'unique_name' - table.add_constraint(UniqueConstraint(constraint_name, table.oid, constraint_column_attnum_list)) - data = { - 'name': constraint_name, - 'type': 'unique', - 'columns': constraint_column_id_list - } - response = client.post( - f'/api/db/v0/tables/{table.id}/constraints/', - data=json.dumps(data), - content_type='application/json' - ) - assert response.status_code == 400 - response_body = response.json()[0] - assert response_body['message'] == 'Relation with the same name already exists' - assert response_body['code'] == ErrorCodes.DuplicateTableError.value - - -def test_create_unique_constraint_for_non_unique_column(create_patents_table, client): - table_name = 'NASA Constraint List 9' - table = create_patents_table(table_name) - constraint_column = table.get_columns_by_name(['Center'])[0] - data = { - 'type': 'unique', - 'columns': [constraint_column.id] - } - response = client.post( - f'/api/db/v0/tables/{table.id}/constraints/', - data=json.dumps(data), - content_type='application/json' - ) - assert response.status_code == 400 - response_body = response.json()[0] - assert response_body['message'] == 'This column has non-unique values so a unique constraint cannot be set' - assert response_body['code'] == ErrorCodes.UniqueViolation.value - - -def test_drop_nonexistent_constraint(create_patents_table, client): - table_name = 'NASA Constraint List 10' - table = create_patents_table(table_name) - - response = client.delete(f'/api/db/v0/tables/{table.id}/constraints/345/') - assert response.status_code == 404 - response_data = response.json()[0] - assert response_data['message'] == "Not found." - assert response_data['code'] == ErrorCodes.NotFound.value - - -@pytest.mark.parametrize('client_name, expected_status_code, different_schema_expected_status_code', delete_client_with_different_roles) -def test_drop_constraint_based_on_permission(create_patents_table, request, client_name, expected_status_code, different_schema_expected_status_code): - table_name = 'NASA Constraint List 1' - table = create_patents_table(table_name) - constraint_column = table.get_columns_by_name(['Case Number'])[0] - table.add_constraint(UniqueConstraint(None, table.oid, [constraint_column.attnum])) - different_schema_table = create_patents_table(table_name, schema_name="Different Schema") - constraint_column = different_schema_table.get_columns_by_name(['Case Number'])[0] - different_schema_table.add_constraint( - UniqueConstraint(None, different_schema_table.oid, [constraint_column.attnum]) - ) - client = request.getfixturevalue(client_name)(table.schema) - constraint_id = _first_unique_constraint(table) - response = client.delete(f'/api/db/v0/tables/{table.id}/constraints/{constraint_id}/') - assert response.status_code == expected_status_code - different_schema_table_constraint_id = _first_unique_constraint(different_schema_table) - response = client.delete(f'/api/db/v0/tables/{different_schema_table.id}/constraints/{different_schema_table_constraint_id}/') - assert response.status_code == different_schema_expected_status_code - - -def test_drop_nonexistent_table(client): - response = client.delete('/api/db/v0/tables/9387489/constraints/4234/') - assert response.status_code == 404 - response_data = response.json()[0] - assert response_data['message'] == "Table doesn't exist" - assert response_data['code'] == ErrorCodes.TableNotFound.value - - -def test_empty_column_list(create_patents_table, client): - table_name = 'NASA Constraint List 11' - table = create_patents_table(table_name) - data = { - 'type': 'unique', - 'columns': [] - } - response = client.post( - f'/api/db/v0/tables/{table.id}/constraints/', data - ) - response_data = response.json()[0] - assert response.status_code == 400 - assert response_data['code'] == ErrorCodes.ConstraintColumnEmpty.value - assert response_data['message'] == 'Constraint column field cannot be empty' - - -def test_invalid_constraint_type(create_patents_table, client): - table_name = 'NASA Constraint List 12' - table = create_patents_table(table_name) - invalid_constraint = 'foo' - data = { - 'type': invalid_constraint, - 'columns': [1] - } - response = client.post( - f'/api/db/v0/tables/{table.id}/constraints/', data - ) - response_data = response.json()[0] - assert response.status_code == 400 - assert response_data['code'] == ErrorCodes.UnsupportedConstraint.value - assert f'Operations related to {invalid_constraint} constraint are currently not supported' in response_data['message'] - - -def test_multi_column_primary_key_constraint_list(multi_column_primary_key_table, client): - table = multi_column_primary_key_table - response = client.get(f'/api/db/v0/tables/{table.id}/constraints/') - response_data = response.json() - constraints_data = response_data['results'] - assert len(constraints_data) == 1 - constraint_data = constraints_data[0] - assert constraint_data['type'] == 'primary' - expected_pk_col_names = set(['column1', 'column2']) - expected_pk_col_ids = set( - col.id - for col - in table.columns.all() - if col.name in expected_pk_col_names - ) - assert set(constraint_data['columns']) == expected_pk_col_ids diff --git a/mathesar/tests/api/test_database_api.py b/mathesar/tests/api/test_database_api.py deleted file mode 100644 index 6aeb80f931..0000000000 --- a/mathesar/tests/api/test_database_api.py +++ /dev/null @@ -1,191 +0,0 @@ -import pytest -from django.core.cache import cache -from sqlalchemy import text - -from db.metadata import get_empty_metadata -from mathesar.models.users import DatabaseRole -from mathesar.state.django import reflect_db_objects -from mathesar.models.deprecated import Table, Schema, Connection -from django.conf import settings -from django.core.exceptions import ObjectDoesNotExist -from db.install import install_mathesar -from db.engine import create_future_engine_with_custom_types - - -def _recreate_db(db_name): - credentials = settings.DATABASES['default'] - root_engine = create_future_engine_with_custom_types( - credentials['USER'], - credentials['PASSWORD'], - credentials['HOST'], - credentials['NAME'], - credentials['PORT'] - ) - with root_engine.connect() as conn: - conn.execution_options(isolation_level="AUTOCOMMIT") - conn.execute(text(f"DROP DATABASE IF EXISTS {db_name} WITH (FORCE);")) - conn.execute(text(f"CREATE DATABASE {db_name};")) - - -def _remove_db(db_name): - credentials = settings.DATABASES['default'] - root_engine = create_future_engine_with_custom_types( - credentials['USER'], - credentials['PASSWORD'], - credentials['HOST'], - credentials['NAME'], - credentials['PORT'] - ) - with root_engine.connect() as conn: - conn.execution_options(isolation_level="AUTOCOMMIT") - conn.execute(text(f"DROP DATABASE IF EXISTS {db_name} WITH (FORCE);")) - - -@pytest.fixture -def test_db_name(worker_id): - default_test_db_name = "mathesar_db_api_test" - return f"{default_test_db_name}_{worker_id}" - - -@pytest.fixture -def db_dj_model(test_db_name): - _recreate_db(test_db_name) - db = Connection.objects.get_or_create( - name=test_db_name, - defaults={ - 'db_name': test_db_name, - 'username': 'mathesar', - 'password': 'mathesar', - 'host': 'mathesar_dev_db', - 'port': 5432 - } - )[0] - reflect_db_objects(get_empty_metadata()) - yield db - _remove_db(test_db_name) - db.delete() - - -def test_database_reflection_delete(db_dj_model): - assert db_dj_model.deleted is False # check DB is not marked deleted inappropriately - _remove_db(db_dj_model.name) - reflect_db_objects(get_empty_metadata()) - fresh_db_model = Connection.objects.get(name=db_dj_model.name) - assert fresh_db_model.deleted is True # check DB is marked deleted appropriately - - -def test_database_reflection_delete_schema(db_dj_model): - Schema.objects.create(oid=1, database=db_dj_model) - # We expect the test schema + 'public' - assert Schema.objects.filter(database=db_dj_model).count() == 2 - _remove_db(db_dj_model.name) - reflect_db_objects(get_empty_metadata()) - assert Schema.objects.filter(database=db_dj_model).count() == 0 - - -def test_database_reflection_delete_table(db_dj_model): - schema = Schema.objects.create(oid=1, database=db_dj_model) - Table.objects.create(oid=2, schema=schema) - assert Table.objects.filter(schema__database=db_dj_model).count() == 1 - _remove_db(db_dj_model.name) - reflect_db_objects(get_empty_metadata()) - assert Table.objects.filter(schema__database=db_dj_model).count() == 0 - - -def check_database(database, response_database): - assert database.id == response_database['id'] - assert database.name == response_database['nickname'] - assert 'supported_types_url' in response_database - assert '/api/ui/v0/connections/' in response_database['supported_types_url'] - assert response_database['supported_types_url'].endswith('/types/') - - -def test_database_list(client, db_dj_model): - response = client.get('/api/db/v0/connections/') - response_data = response.json() - assert response.status_code == 200 - assert response_data['count'] == 1 - assert len(response_data['results']) == 1 - check_database(db_dj_model, response_data['results'][0]) - - -def test_database_list_permissions(FUN_create_dj_db, get_uid, client, client_bob, client_alice, user_bob, user_alice): - db1 = FUN_create_dj_db(get_uid()) - DatabaseRole.objects.create(user=user_bob, database=db1, role='viewer') - DatabaseRole.objects.create(user=user_alice, database=db1, role='viewer') - - db2 = FUN_create_dj_db(get_uid()) - DatabaseRole.objects.create(user=user_bob, database=db2, role='manager') - DatabaseRole.objects.create(user=user_alice, database=db2, role='editor') - - db3 = FUN_create_dj_db(get_uid()) - DatabaseRole.objects.create(user=user_bob, database=db3, role='editor') - - response = client_bob.get('/api/db/v0/connections/') - response_data = response.json() - assert response.status_code == 200 - assert response_data['count'] == 3 - - response = client_alice.get('/api/db/v0/connections/') - response_data = response.json() - assert response.status_code == 200 - assert response_data['count'] == 2 - - -def test_database_list_deleted(client, db_dj_model): - # Note that there is no longer a distinction between "deleted" and undeleted - # connections in the API. - _remove_db(db_dj_model.name) - cache.clear() - response = client.get('/api/db/v0/connections/') - response_data = response.json() - assert response.status_code == 200 - assert response_data['count'] == 1 - assert len(response_data['results']) == 1 - check_database(db_dj_model, response_data['results'][0]) - - -def test_delete_dbconn_with_msar_schemas(client, db_dj_model): - # install mathesar specific schemas - install_mathesar( - db_dj_model.name, - db_dj_model.username, - db_dj_model.password, - db_dj_model.host, - db_dj_model.port, - True - ) - engine = db_dj_model._sa_engine - check_schema_exists = text( - "SELECT schema_name FROM information_schema.schemata \ - WHERE schema_name LIKE '%msar' OR schema_name = 'mathesar_types';" - ) - with engine.connect() as conn: - before_deletion = conn.execute(check_schema_exists) - response = client.delete(f'/api/db/v0/connections/{db_dj_model.id}/?del_msar_schemas=true') - after_deletion = conn.execute(check_schema_exists) - - with pytest.raises(ObjectDoesNotExist): - Connection.objects.get(id=db_dj_model.id) - assert response.status_code == 204 - assert before_deletion.rowcount == 3 - assert after_deletion.rowcount == 0 - - -def test_database_detail(client, db_dj_model): - response = client.get(f'/api/db/v0/connections/{db_dj_model.id}/') - response_database = response.json() - - assert response.status_code == 200 - check_database(db_dj_model, response_database) - - -def test_database_detail_permissions(FUN_create_dj_db, get_uid, client_bob, client_alice, user_bob, user_alice): - db1 = FUN_create_dj_db(get_uid()) - DatabaseRole.objects.create(user=user_bob, database=db1, role='viewer') - - response = client_bob.get(f'/api/db/v0/connections/{db1.id}/') - assert response.status_code == 200 - - response = client_alice.get(f'/api/db/v0/connections/{db1.id}/') - assert response.status_code == 404 diff --git a/mathesar/tests/api/test_db_type_api.py b/mathesar/tests/api/test_db_type_api.py deleted file mode 100644 index b440e46cc6..0000000000 --- a/mathesar/tests/api/test_db_type_api.py +++ /dev/null @@ -1,11 +0,0 @@ -def test_db_type_list_well_formed(client, test_db_model): - database_id = test_db_model.id - response = client.get(f'/api/db/v0/connections/{database_id}/types/') - assert response.status_code == 200 - json_db_types = response.json() - assert isinstance(json_db_types, list) - assert len(json_db_types) > 0 - for json_db_type in json_db_types: - assert json_db_type.get('id') is not None - hints = json_db_type.get('hints') - assert hints is None or isinstance(hints, list) diff --git a/mathesar/tests/api/test_dependents_api.py b/mathesar/tests/api/test_dependents_api.py deleted file mode 100644 index 0eadaae222..0000000000 --- a/mathesar/tests/api/test_dependents_api.py +++ /dev/null @@ -1,94 +0,0 @@ -def _get_object_dependent_ids(dependents, object_id, type): - return [ - int(d['obj']['id']) - for d in dependents - if int(d['parent_obj']['id']) == object_id - and d['parent_obj']['type'] == type - and 'id' in d['obj'] - ] - - -def _get_constraint_ids(table_constraint_results): - return [r['id'] for r in table_constraint_results] - - -def test_dependents_response_attrs(library_ma_tables, client): - items_id = library_ma_tables["Items"].id - response = client.get(f'/api/db/v0/tables/{items_id}/dependents/') - assert response.status_code == 200 - response_data = response.json() - - dependent_expected_attrs = ['obj', 'parent_obj'] - assert len(response_data) == 4 - assert all( - [ - all(attr in dependent for attr in dependent_expected_attrs) - for dependent in response_data - ] - ) - assert all( - [ - dependent is not None - for dependent in response_data - ] - ) - - -def test_dependents_response(library_ma_tables, client): - items_id = library_ma_tables["Items"].id - checkouts_id = library_ma_tables["Checkouts"].id - - items_dependents = client.get(f'/api/db/v0/tables/{items_id}/dependents/').json() - items_dependent_ids = _get_object_dependent_ids(items_dependents, items_id, 'table') - - items_constraints = client.get(f'/api/db/v0/tables/{items_id}/constraints/').json()['results'] - checkouts_constraints = client.get(f'/api/db/v0/tables/{checkouts_id}/constraints/').json()['results'] - - items_constraints_ids = _get_constraint_ids(items_constraints) - checkouts_items_fkey_id = [c['id'] for c in checkouts_constraints if "Item" in c['name']] - - assert sorted(items_dependent_ids) == sorted(items_constraints_ids + checkouts_items_fkey_id) - - -def test_schema_dependents(library_ma_tables, client): - table_names = ['Authors', 'Checkouts', 'Items', 'Patrons', 'Publications', 'Publishers'] - table_ids = [library_ma_tables[name].id for name in table_names] - - schema_id = library_ma_tables['Authors'].schema.id - schema_dependents_graph = client.get(f'/api/db/v0/schemas/{schema_id}/dependents/').json() - - schema_dependents_ids = _get_object_dependent_ids(schema_dependents_graph, schema_id, 'schema') - - assert sorted(table_ids) == sorted(schema_dependents_ids) - - -def test_column_dependents(library_ma_tables, client): - patrons = library_ma_tables['Patrons'] - patronds_id_col = patrons.get_column_by_name('id') - - patrons_id_dependents_graph = client.get(f'/api/db/v0/tables/{patrons.id}/columns/{patronds_id_col.id}/dependents/').json() - patrons_id_dependents_ids = _get_object_dependent_ids(patrons_id_dependents_graph, patrons.id, 'table column') - - checkouts = library_ma_tables['Checkouts'] - patrons_constraints = client.get(f'/api/db/v0/tables/{patrons.id}/constraints/').json()['results'] - checkouts_constraints = client.get(f'/api/db/v0/tables/{checkouts.id}/constraints/').json()['results'] - - patrons_pk_id = [c['id'] for c in patrons_constraints if c['name'] == 'Patrons_pkey'] - checkouts_patrons_fk_id = [c['id'] for c in checkouts_constraints if c['name'] == 'Checkouts_Patron id_fkey'] - - assert sorted(patrons_id_dependents_ids) == sorted(patrons_pk_id + checkouts_patrons_fk_id) - - -def test_dependents_filters(library_ma_tables, client): - publishers_id = library_ma_tables['Publishers'].id - exclude_types = ['table constraint'] - query_params = {'exclude': exclude_types} - publishers_dependents_graph = client.get(f'/api/db/v0/tables/{publishers_id}/dependents/', data=query_params).json() - - dependents_types = [dependent['obj']['type'] for dependent in publishers_dependents_graph] - - assert all( - [ - type not in dependents_types for type in exclude_types - ] - ) diff --git a/mathesar/tests/api/test_function_api.py b/mathesar/tests/api/test_function_api.py deleted file mode 100644 index 4d65f82f45..0000000000 --- a/mathesar/tests/api/test_function_api.py +++ /dev/null @@ -1,24 +0,0 @@ -from mathesar.models.users import DatabaseRole - - -def test_function_list_well_formed(client, test_db_model): - database_id = test_db_model.id - response = client.get(f'/api/db/v0/connections/{database_id}/functions/') - assert response.status_code == 200 - json_db_functions = response.json() - assert isinstance(json_db_functions, list) - assert len(json_db_functions) > 0 - for json_db_function in json_db_functions: - assert json_db_function.get('id') is not None - hints = json_db_function.get('hints') - assert hints is None or isinstance(hints, list) - - -def test_function_list_permissions(FUN_create_dj_db, get_uid, client_bob, client_alice, user_bob, user_alice): - database = FUN_create_dj_db(get_uid()) - DatabaseRole.objects.create(user=user_bob, database=database, role='viewer') - response = client_bob.get(f'/api/db/v0/connections/{database.id}/functions/') - assert response.status_code == 200 - - response = client_alice.get(f'/api/db/v0/connections/{database.id}/functions/') - assert response.status_code == 404 diff --git a/mathesar/tests/api/test_links.py b/mathesar/tests/api/test_links.py deleted file mode 100644 index abd79f42af..0000000000 --- a/mathesar/tests/api/test_links.py +++ /dev/null @@ -1,319 +0,0 @@ -import pytest -from sqlalchemy import Column, Integer, MetaData, String -from sqlalchemy import Table as SATable - -from db.constraints.utils import ConstraintType -from db.tables.operations.select import get_oid_from_table -from db.tables.utils import get_primary_key_column - -from mathesar.models.deprecated import Constraint, Table -from mathesar.api.exceptions.error_codes import ErrorCodes - - -@pytest.fixture -def column_test_table(patent_schema): - engine = patent_schema._sa_engine - column_list_in = [ - Column("mycolumn0", Integer, primary_key=True), - Column("mycolumn1", Integer, nullable=False), - Column("mycolumn2", Integer, server_default="5"), - Column("mycolumn3", String), - ] - db_table = SATable( - "anewtable", - MetaData(bind=engine), - *column_list_in, - schema=patent_schema.name - ) - db_table.create() - db_table_oid = get_oid_from_table(db_table.name, db_table.schema, engine) - table = Table.current_objects.create(oid=db_table_oid, schema=patent_schema) - return table - - -write_clients_with_status_code = [ - ('superuser_client_factory', 201), - ('db_manager_client_factory', 201), - ('db_editor_client_factory', 400), - ('schema_manager_client_factory', 201), - ('schema_viewer_client_factory', 400), - ('db_viewer_schema_manager_client_factory', 201) -] - - -@pytest.mark.parametrize('client_name, expected_status_code', write_clients_with_status_code) -def test_one_to_one_link_create_permissions( - column_test_table, - request, - create_patents_table, - client_name, - expected_status_code -): - table_2 = create_patents_table('Table 2') - client = request.getfixturevalue(client_name)(table_2.schema) - - data = { - "link_type": "one-to-one", - "reference_column_name": "col_1", - "reference_table": table_2.id, - "referent_table": column_test_table.id, - } - response = client.post( - "/api/db/v0/links/", - data=data, - ) - assert response.status_code == expected_status_code - - -def test_one_to_many_link_on_invalid_table_name( - create_base_table, - create_referent_table, - client -): - reference_table = create_base_table('Base_table') - # Having round brackets in the referent_table name is invalid. - referent_table = create_referent_table('Referent_table(alpha)') - data = { - "link_type": "one-to-many", - "reference_column_name": "col_1", - "reference_table": reference_table.id, - "referent_table": referent_table.id, - } - response = client.post( - "/api/db/v0/links/", - data=data, - ) - response_data = response.json()[0] - assert response.status_code == 400 - assert response_data['code'] == ErrorCodes.InvalidTableName.value - assert response_data['message'] == 'Table name "Referent_table(alpha)" is invalid.' - assert response_data['field'] == 'referent_table' - - -def test_one_to_one_link_on_invalid_table_name( - create_base_table, - create_referent_table, - client -): - reference_table = create_base_table('Base_table') - # Having round brackets in the referent_table name is invalid. - referent_table = create_referent_table('Referent_table(alpha)') - data = { - "link_type": "one-to-one", - "reference_column_name": "col_1", - "reference_table": reference_table.id, - "referent_table": referent_table.id, - } - response = client.post( - "/api/db/v0/links/", - data=data, - ) - response_data = response.json()[0] - assert response.status_code == 400 - assert response_data['code'] == ErrorCodes.InvalidTableName.value - assert response_data['message'] == 'Table name "Referent_table(alpha)" is invalid.' - assert response_data['field'] == 'referent_table' - - -def test_one_to_one_link_create(column_test_table, client, create_patents_table): - table_2 = create_patents_table('Table 2') - data = { - "link_type": "one-to-one", - "reference_column_name": "col_1", - "reference_table": table_2.id, - "referent_table": column_test_table.id, - } - response = client.post( - "/api/db/v0/links/", - data=data, - ) - assert response.status_code == 201 - constraints = Constraint.objects.filter(table=table_2) - assert constraints.count() == 3 - - unique_constraint = next( - constraint - for constraint in constraints - if constraint.type == ConstraintType.UNIQUE.value - ) - fk_constraint = next( - constraint - for constraint in constraints - if constraint.type == ConstraintType.FOREIGN_KEY.value - ) - unique_constraint_columns = list(unique_constraint.columns.all()) - fk_constraint_columns = list(fk_constraint.columns.all()) - referent_columns = list(fk_constraint.referent_columns.all()) - assert unique_constraint_columns == table_2.get_columns_by_name(['col_1']) - assert fk_constraint_columns == table_2.get_columns_by_name(['col_1']) - referent_primary_key_column_name = get_primary_key_column(column_test_table._sa_table).name - assert referent_columns == column_test_table.get_columns_by_name([referent_primary_key_column_name]) - - -@pytest.mark.parametrize('client_name, expected_status_code', write_clients_with_status_code) -def test_one_to_many_link_create_permissions( - column_test_table, - request, - create_patents_table, - client_name, - expected_status_code -): - - table_2 = create_patents_table('Table 2') - client = request.getfixturevalue(client_name)(table_2.schema) - - data = { - "link_type": "one-to-many", - "reference_column_name": "col_1", - "reference_table": table_2.id, - "referent_table": column_test_table.id, - } - response = client.post( - "/api/db/v0/links/", - data=data, - ) - assert response.status_code == expected_status_code - - -def test_one_to_many_link_create(column_test_table, client, create_patents_table): - table_2 = create_patents_table('Table 2') - data = { - "link_type": "one-to-many", - "reference_column_name": "col_1", - "reference_table": table_2.id, - "referent_table": column_test_table.id, - } - response = client.post( - "/api/db/v0/links/", - data=data, - ) - assert response.status_code == 201 - constraints = Constraint.objects.filter(table=table_2) - assert constraints.count() == 2 - - fk_constraint = next( - constraint - for constraint in constraints - if constraint.type == ConstraintType.FOREIGN_KEY.value - ) - fk_constraint_columns = list(fk_constraint.columns.all()) - referent_columns = list(fk_constraint.referent_columns.all()) - assert fk_constraint_columns == table_2.get_columns_by_name(['col_1']) - referent_primary_key_column_name = get_primary_key_column(column_test_table._sa_table).name - assert referent_columns == column_test_table.get_columns_by_name([referent_primary_key_column_name]) - - -def test_one_to_many_self_referential_link_create(column_test_table, client): - data = { - "link_type": "one-to-many", - "reference_column_name": "col_1", - "reference_table": column_test_table.id, - "referent_table": column_test_table.id, - } - response = client.post( - "/api/db/v0/links/", - data=data, - ) - assert response.status_code == 201 - constraints = Constraint.objects.filter(table=column_test_table) - assert constraints.count() == 2 - - fk_constraint = next( - constraint - for constraint in constraints - if constraint.type == ConstraintType.FOREIGN_KEY.value - ) - fk_constraint_columns = list(fk_constraint.columns.all()) - referent_columns = list(fk_constraint.referent_columns.all()) - assert fk_constraint_columns == column_test_table.get_columns_by_name(['col_1']) - referent_primary_key_column_name = get_primary_key_column(column_test_table._sa_table).name - assert referent_columns == column_test_table.get_columns_by_name([referent_primary_key_column_name]) - - -def test_many_to_many_self_referential_link_create(column_test_table, client): - schema = column_test_table.schema - engine = schema._sa_engine - data = { - "link_type": "many-to-many", - "mapping_table_name": "map_table", - "referents": [ - {'referent_table': column_test_table.id, 'column_name': "link_1"}, - {'referent_table': column_test_table.id, 'column_name': "link_2"} - ], - } - response = client.post( - "/api/db/v0/links/", - data=data, - ) - assert response.status_code == 201 - map_table_oid = get_oid_from_table("map_table", schema.name, engine) - map_table = Table.objects.get(oid=map_table_oid) - constraints = Constraint.objects.filter(table=map_table) - assert constraints.count() == 3 - - -def test_many_to_many_link_create(column_test_table, client, create_patents_table): - table_2 = create_patents_table('Table 2') - data = { - "link_type": "many-to-many", - "mapping_table_name": "map_table", - "referents": [ - {'referent_table': column_test_table.id, 'column_name': "link_1"}, - {'referent_table': table_2.id, 'column_name': "link_2"} - ], - } - response = client.post( - "/api/db/v0/links/", - data=data, - ) - assert response.status_code == 201 - - -def test_many_to_many_link_invalid_table_name( - column_test_table, - client, - create_patents_table -): - table_2 = create_patents_table('Referent_table(alpha)') - data = { - "link_type": "many-to-many", - "mapping_table_name": "map_table", - "referents": [ - {'referent_table': column_test_table.id, 'column_name': "link_1"}, - {'referent_table': table_2.id, 'column_name': "link_2"} - ], - } - response = client.post( - "/api/db/v0/links/", - data=data, - ) - response_data = response.json()[0] - assert response.status_code == 400 - assert response_data['code'] == ErrorCodes.InvalidTableName.value - assert response_data['message'] == 'Table name "Referent_table(alpha)" is invalid.' - assert response_data['field'] == 'referents' - - -@pytest.mark.parametrize('client_name, expected_status_code', write_clients_with_status_code) -def test_many_to_many_link_create_permissions( - column_test_table, - request, - create_patents_table, - client_name, - expected_status_code -): - table_2 = create_patents_table('Table 2') - client = request.getfixturevalue(client_name)(table_2.schema) - data = { - "link_type": "many-to-many", - "mapping_table_name": "map_table", - "referents": [ - {'referent_table': column_test_table.id, 'column_name': "link_1"}, - {'referent_table': table_2.id, 'column_name': "link_2"} - ], - } - response = client.post( - "/api/db/v0/links/", - data=data, - ) - assert response.status_code == expected_status_code diff --git a/mathesar/tests/api/test_long_identifiers.py b/mathesar/tests/api/test_long_identifiers.py deleted file mode 100644 index eb6443f00d..0000000000 --- a/mathesar/tests/api/test_long_identifiers.py +++ /dev/null @@ -1,216 +0,0 @@ -import pytest - -from sqlalchemy.sql import text - -from django.core.files.base import File - -from db.types.base import PostgresType -from db.identifiers import truncate_if_necessary, POSTGRES_IDENTIFIER_SIZE_LIMIT - -from mathesar.api.exceptions.database_exceptions import ( - exceptions as database_api_exceptions -) -from mathesar.models.deprecated import DataFile - -from mathesar.tests.api.test_table_api import check_create_table_response, get_expected_name - - -def _get_string_of_length(n): - def return_a_character(_): - return 'x' - return ''.join(map(return_a_character, range(n))) - - -@pytest.fixture -def long_column_data_file(): - data_filepath = 'mathesar/tests/data/long_column_names.csv' - with open(data_filepath, "rb") as csv_file: - data_file = DataFile.objects.create( - file=File(csv_file), - created_from='file', - base_name='longdatafiled', - type='csv' - ) - return data_file - - -@pytest.fixture -def dj_model_of_preexisting_db(worker_id, FUN_create_dj_db, FUN_engine_cache): - db_name = f"preexisting_db_{worker_id}" - db_model = FUN_create_dj_db(db_name) - engine = FUN_engine_cache(db_name) - max_length_identifier = _get_string_of_length( - POSTGRES_IDENTIFIER_SIZE_LIMIT - ) - with engine.connect() as con: - statement = text(f""" - CREATE TABLE public.persons ( - {max_length_identifier} INT PRIMARY KEY - ); - """) - con.execute(statement) - con.commit() - return db_model - - -def test_long_identifier_in_prexisting_db(dj_model_of_preexisting_db, client): # noqa: F841 - """ - Checks that the table and column endpoints work for a third-party db with - an identifier that has maximum length supported by Postgres. - """ - response = client.get("/api/db/v0/tables/") - json = response.json() - assert response.status_code == 200 - assert json['count'] == 1 - table_json = json['results'][0] - assert table_json['name'] == 'persons' - table_id = table_json['id'] - response = client.get( - f"/api/db/v0/tables/{table_id}/columns/", - ) - json = response.json() - assert response.status_code == 200 - column_json = json['results'][0] - column_id = column_json['id'] - assert len(column_json['name']) == POSTGRES_IDENTIFIER_SIZE_LIMIT - db_type = PostgresType.BOOLEAN - data = {"type": db_type.id} - response = client.patch( - f"/api/db/v0/tables/{table_id}/columns/{column_id}/", data=data - ) - assert response.status_code == 200 - json = response.json() - column_json = json - assert len(column_json['name']) == POSTGRES_IDENTIFIER_SIZE_LIMIT - - -def test_column_create_with_long_column_name(column_test_table, client): - very_long_string = ''.join(map(str, range(50))) - name = 'very_long_identifier_' + very_long_string - db_type = PostgresType.NUMERIC - data = { - "name": name, - "type": db_type.id, - } - response = client.post( - f"/api/db/v0/tables/{column_test_table.id}/columns/", - data=data, - ) - assert response.status_code == 400 - assert response.json()[0]['code'] == database_api_exceptions.IdentifierTooLong.error_code - - -@pytest.mark.parametrize( - 'before_truncation, after_truncation', - [ - [ - "bbbbbbbbbbbbbb", - "bbbbbbbbbbbbbb", - ], - [ - "cccccccccccccccccccccc", - "cccccccccccccccccccccc", - ], - [ - "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - ], - [ - "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", - "fffffffffffffffffffffffffffffffffffffff-7e43d30e" - ], - [ - "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee", - "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-d0ccef3c", - ], - [ - "ggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg", - "ggggggggggggggggggggggggggggggggggggggg-2910cecf", - ], - ] -) -def test_truncate_if_necessary(before_truncation, after_truncation): - assert truncate_if_necessary(before_truncation) == after_truncation - - -def test_create_table_long_name_data_file(client, long_column_data_file, create_schema, uid): - table_name = 'My Long column name datafile' - # response, response_table, table = _create_table( - # ) - expt_name = get_expected_name(table_name, data_file=long_column_data_file) - first_row = ( - 1, 'NATION', '8.6', '4.5', '8.5', '4.3', '8.3', '4.6', '78.6', '2.22', - '0.88', '0.66', '1.53', '3.75', '3.26', '0.45', '0.07', '53.9', '52.3', - '0.8', '0.38487', '3.15796', '2.3', '33247', '14.842144', '6.172333', - '47.158545', '1.698662', '2.345577', '7.882694', '0.145406', '3.395302', - '92.085375', '14.447634', '78.873848', '1.738571', '16.161024', - '19.436701', '8.145643', '94.937079', '74.115131', '75.601680', - '22.073834', '11.791045', '1.585233', - '1.016932', '2023-02-01' - ) - column_names = [ - "State or Nation", - "Cycle 1 Total Number of Health Deficiencies", - "Cycle 1 Total Number of Fire Safety Deficiencies", - "Cycle 2 Total Number of Health Deficiencies", - "Cycle 2 Total Number of Fire Safety Deficiencies", - "Cycle 3 Total Number of Health Deficiencies", - "Cycle 3 Total Number of Fire Safety Deficiencies", - "Average Number of Residents per Day", - "Reported Nurse Aide Staffing Hours per Resident per Day", - "Reported LPN Staffing Hours per Resident per Day", - "Reported RN Staffing Hours per Resident per Day", - "Reported Licensed Staffing Hours per Resident per Day", - "Reported Total Nurse Staffing Hours per Resident per Day", - "Total number of nurse staff hours per resident per day on the weekend", - "Registered Nurse hours per resident per day on the weekend", - "Reported Physical Therapist Staffing Hours per Resident Per Day", - "Total nursing staff turnover", - "Registered Nurse turnover", - "Number of administrators who have left the nursing home", - "Case-Mix RN Staffing Hours per Resident per Day", - "Case-Mix Total Nurse Staffing Hours per Resident per Day", - "Number of Fines", - "Fine Amount in Dollars", - "Percentage of long stay residents whose need for help with daily activities has increased", - "Percentage of long stay residents who lose too much weight", - "Percentage of low risk long stay residents who lose control of their bowels or bladder", - "Percentage of long stay residents with a catheter inserted and left in their bladder", - "Percentage of long stay residents with a urinary tract infection", - "Percentage of long stay residents who have depressive symptoms", - "Percentage of long stay residents who were physically restrained", - "Percentage of long stay residents experiencing one or more falls with major injury", - "Percentage of long stay residents assessed and appropriately given the pneumococcal vaccine", - "Percentage of long stay residents who received an antipsychotic medication", - "Percentage of short stay residents assessed and appropriately given the pneumococcal vaccine", - "Percentage of short stay residents who newly received an antipsychotic medication", - "Percentage of long stay residents whose ability to move independently worsened", - "Percentage of long stay residents who received an antianxiety or hypnotic medication", - "Percentage of high risk long stay residents with pressure ulcers", - "Percentage of long stay residents assessed and appropriately given the seasonal influenza vaccine", - "Percentage of short stay residents who made improvements in function", - "Percentage of short stay residents who were assessed and appropriately given the seasonal influenza vaccine", - "Percentage of short stay residents who were rehospitalized after a nursing home admission", - "Percentage of short stay residents who had an outpatient emergency department visit", - "Number of hospitalizations per 1000 long-stay resident days", - "Number of outpatient emergency department visits per 1000 long-stay resident days", - "Processing Date" - ] - # Make sure at least some column names require truncation; - # 63 is the hard Postgres limit; we're also experiencing problems with ids - # as short as 58 characters, but I'll leave this at 63 so that it doesn't - # have to be updated once that's fixed. - assert any( - len(column_name) >= 63 - for column_name - in column_names - ) - processed_column_names = [truncate_if_necessary(col) for col in column_names] - schema = create_schema(uid) - table = check_create_table_response( - client, table_name, expt_name, long_column_data_file, schema, first_row, - processed_column_names, import_target_table=None - ) - # This just makes sure we can get records. This was a bug with long column names. - response = client.get(f'/api/db/v0/tables/{table.id}/records/') - assert response.status_code == 200 diff --git a/mathesar/tests/api/test_record_api.py b/mathesar/tests/api/test_record_api.py deleted file mode 100644 index 8fba30e10b..0000000000 --- a/mathesar/tests/api/test_record_api.py +++ /dev/null @@ -1,1566 +0,0 @@ -import json -import pytest -from sqlalchemy import text -from copy import deepcopy -from unittest.mock import patch - -from db.constraints.base import ForeignKeyConstraint, UniqueConstraint -from db.functions.exceptions import UnknownDBFunctionID -from db.records.exceptions import BadGroupFormat, GroupFieldNotFound -from db.records.operations.group import GroupBy -from db.records.operations.sort import BadSortFormat, SortFieldNotFound - -from mathesar.state import reset_reflection -from mathesar.api.exceptions.error_codes import ErrorCodes -from mathesar.api.utils import follows_json_number_spec -from mathesar.functions.operations.convert import rewrite_db_function_spec_column_ids_to_names -from mathesar.models import deprecated as models_deprecated -from mathesar.models.deprecated import compute_default_preview_template -from mathesar.models.query import DBQuery -from mathesar.utils.preview import compute_path_prefix, compute_path_str - - -@pytest.mark.parametrize( - "table_fixture,expected_count", - [ - ["patents_table", 1393], - ["table_with_unknown_types", 3], - ], -) -def test_record_list(table_fixture, expected_count, request, client): - """ - Desired format: - { - "count": 25, - "results": [ - { - "id": 1, - "Center": "NASA Kennedy Space Center", - "Status": "Application", - "Case Number": "KSC-12871", - "Patent Number": "0", - "Application SN": "13/033,085", - "Title": "Polyimide Wire Insulation Repair System", - "Patent Expiration Date": "" - }, - { - "id": 2, - "Center": "NASA Ames Research Center", - "Status": "Issued", - "Case Number": "ARC-14048-1", - "Patent Number": "5694939", - "Application SN": "08/543,093", - "Title": "Autogenic-Feedback Training Exercise Method & System", - "Patent Expiration Date": "10/03/2015" - }, - etc. - ] - } - """ - table = request.getfixturevalue(table_fixture) - response = client.get(f'/api/db/v0/tables/{table.id}/records/') - response_json = response.json() - assert response.status_code == 200 - response_first_record = response_json['results'][0] - assert response_json['count'] == expected_count - assert response_json['grouping'] is None - count_per_page = 50 - assert len(response_json['results']) == min(count_per_page, expected_count) - for column_id in table.columns.all().values_list('id', flat=True): - assert str(column_id) in response_first_record - - -list_client_with_different_roles = [ - ('superuser_client_factory', 200, 200), - ('db_manager_client_factory', 200, 200), - ('db_editor_client_factory', 200, 200), - ('schema_manager_client_factory', 200, 403), - ('schema_viewer_client_factory', 200, 403), - ('db_viewer_schema_manager_client_factory', 200, 200) -] - -write_clients_with_status_code = [ - ('superuser_client_factory', 201), - ('db_manager_client_factory', 201), - ('db_editor_client_factory', 400), - ('schema_manager_client_factory', 201), - ('schema_viewer_client_factory', 400), - ('db_viewer_schema_manager_client_factory', 201) -] - -update_client_with_status_code = [ - ('db_manager_client_factory', 200), - ('db_editor_client_factory', 200), - ('schema_manager_client_factory', 200), - ('schema_viewer_client_factory', 403), - ('db_viewer_schema_manager_client_factory', 200) -] - - -@pytest.mark.parametrize( - 'client_name,expected_status_code,different_schema_expected_status_code', - list_client_with_different_roles -) -def test_record_list_based_on_permission( - create_patents_table, - request, - client_name, - expected_status_code, - different_schema_expected_status_code -): - - table_name = 'NASA Record List' - table = create_patents_table(table_name) - different_schema_table = create_patents_table(table_name, schema_name="Different Schema") - client = request.getfixturevalue(client_name)(table.schema) - response = client.get(f'/api/db/v0/tables/{table.id}/records/') - assert response.status_code == expected_status_code - response = client.get(f'/api/db/v0/tables/{different_schema_table.id}/records/') - assert response.status_code == different_schema_expected_status_code - - -serialization_test_list = [ - ("TIME WITH TIME ZONE", "12:30:10.0+01:00"), - ("TIMESTAMP WITHOUT TIME ZONE", "2000-05-23T12:30:10.0 AD"), - ("MONEY", "$5.00"), -] - - -@pytest.mark.parametrize("type_, value", serialization_test_list) -def test_record_serialization(empty_nasa_table, create_column, client, type_, value): - col_name = "TEST COL" - column = create_column(empty_nasa_table, {"name": col_name, "type": type_}) - empty_nasa_table.create_record_or_records([{col_name: value}]) - - response = client.get(f'/api/db/v0/tables/{empty_nasa_table.id}/records/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data["results"][0][str(column.id)] == value - - -def test_record_list_filter(create_patents_table, client): - table_name = 'NASA Record List Filter' - table = create_patents_table(table_name) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - - filter = {"or": [ - {"and": [ - {"equal": [ - {"column_id": [columns_name_id_map['Center']]}, - {"literal": ["NASA Ames Research Center"]} - ]}, - {"equal": [ - {"column_id": [columns_name_id_map["Case Number"]]}, - {"literal": ["ARC-14048-1"]} - ]}, - ]}, - {"and": [ - {"equal": [ - {"column_id": [columns_name_id_map["Center"]]}, - {"literal": ["NASA Kennedy Space Center"]} - ]}, - {"equal": [ - {"column_id": [columns_name_id_map["Case Number"]]}, - {"literal": ["KSC-12871"]} - ]}, - ]}, - ]} - json_filter = json.dumps(filter) - - with patch.object( - DBQuery, "get_records", side_effect=DBQuery.get_records, autospec=True - ) as mock_get: - response = client.get( - f'/api/db/v0/tables/{table.id}/records/?filter={json_filter}' - ) - - assert response.status_code == 200 - response_data = response.json() - assert response_data['count'] == 2 - assert len(response_data['results']) == 2 - assert mock_get.call_args is not None - column_ids_to_names = table.get_column_name_id_bidirectional_map().inverse - processed_filter = rewrite_db_function_spec_column_ids_to_names( - column_ids_to_names=column_ids_to_names, - spec=filter, - ) - assert mock_get.call_args[1]['filter'] == processed_filter - - -def test_record_list_duplicate_rows_only(create_patents_table, client): - table_name = 'NASA Record List Filter Duplicates' - table = create_patents_table(table_name) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - duplicate_only = columns_name_id_map['Patent Expiration Date'] - json_duplicate_only = json.dumps(duplicate_only) - - with patch.object(DBQuery, "get_records", return_value=[]) as mock_get: - client.get(f'/api/db/v0/tables/{table.id}/records/?duplicate_only={json_duplicate_only}') - assert mock_get.call_args is not None - assert mock_get.call_args[1]['duplicate_only'] == duplicate_only - - -def test_filter_with_added_columns(create_patents_table, client): - table_name = 'NASA Record List Filter' - table = create_patents_table(table_name) - - columns_to_add = [ - { - 'name': 'Published', - 'type': 'BOOLEAN', - 'default_value': True, - 'row_values': {1: False, 2: False, 3: None} - } - ] - - operators_and_expected_values = [ - ( - lambda new_column_id, value: {"not": [{"equal": [{"column_id": [new_column_id]}, {"literal": [value]}]}]}, - True, 2), - ( - lambda new_column_id, value: {"equal": [{"column_id": [new_column_id]}, {"literal": [value]}]}, - False, 2), - ( - lambda new_column_id, _: {"null": [{"column_id": [new_column_id]}]}, - None, 1394), - ( - lambda new_column_id, _: {"not": [{"null": [{"column_id": [new_column_id]}]}]}, - None, 49), - ] - - for new_column in columns_to_add: - new_column_name = new_column.get("name") - new_column_type = new_column.get("type") - table.add_column({"name": new_column_name, "type": new_column_type}) - row_values_list = [] - # Get a new instance with clean cache, so that the new column is added to the _sa_column list - table = models_deprecated.Table.objects.get(oid=table.oid) - response_data = client.get(f'/api/db/v0/tables/{table.id}/records/').json() - existing_records = response_data['results'] - - for row_number, row in enumerate(existing_records, 1): - row_value = new_column.get("row_values").get(row_number, new_column.get("default_value")) - row_values_list.append({new_column_name: row_value}) - - table.create_record_or_records(row_values_list) - - column_names_to_ids = table.get_column_name_id_bidirectional_map() - new_column_id = column_names_to_ids[new_column_name] - - for filter_lambda, value, expected in operators_and_expected_values: - filter = filter_lambda(new_column_id, value) - json_filter = json.dumps(filter) - - with patch.object( - DBQuery, "get_records", side_effect=DBQuery.get_records, autospec=True - ) as mock_get: - response = client.get( - f'/api/db/v0/tables/{table.id}/records/?filter={json_filter}' - ) - response_data = response.json() - - num_results = expected - if expected > 50: - num_results = 50 - assert response.status_code == 200 - assert response_data['count'] == expected - assert len(response_data['results']) == num_results - assert mock_get.call_args is not None - processed_filter = rewrite_db_function_spec_column_ids_to_names( - column_ids_to_names=column_names_to_ids.inverse, - spec=filter, - ) - assert mock_get.call_args[1]['filter'] == processed_filter - - -def test_record_list_sort(create_patents_table, client): - table_name = 'NASA Record List Order' - table = create_patents_table(table_name) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - order_by = [ - {'field': 'Center', 'direction': 'desc'}, - {'field': 'Case Number', 'direction': 'asc'}, - ] - - id_converted_order_by = [{**column, 'field': columns_name_id_map[column['field']]} for column in order_by] - json_order_by = json.dumps(id_converted_order_by) - - with patch.object( - DBQuery, "get_records", side_effect=DBQuery.get_records, autospec=True - ) as mock_get: - response = client.get( - f'/api/db/v0/tables/{table.id}/records/?order_by={json_order_by}' - ) - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 1393 - assert len(response_data['results']) == 50 - - assert mock_get.call_args is not None - assert mock_get.call_args[1]['order_by'][:len(order_by)] == order_by - - -def test_record_search(create_patents_table, client): - table_name = 'NASA Record List Search' - table = create_patents_table(table_name) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - search_columns = [ - {'field': columns_name_id_map['Title'], 'literal': 'A Direct-To Controller Tool'}, - ] - - json_search_fuzzy = json.dumps(search_columns) - - response = client.get( - f'/api/db/v0/tables/{table.id}/records/?search_fuzzy={json_search_fuzzy}' - ) - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 1 - assert len(response_data['results']) == 1 - - -def test_record_search_invalid_date(create_patents_table, client): - table_name = 'NASA Record Invalid Date' - table = create_patents_table(table_name) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - column_id_with_date_type = table.get_column_name_id_bidirectional_map()['Patent Expiration Date'] - column_attnum = table.columns.get(id=column_id_with_date_type).attnum - table.alter_column(column_attnum, {'type': 'date'}) - search_columns = [ - {'field': columns_name_id_map['Patent Expiration Date'], 'literal': '99/99/9999'}, - ] - - json_search_fuzzy = json.dumps(search_columns) - - response = client.get( - f'/api/db/v0/tables/{table.id}/records/?search_fuzzy={json_search_fuzzy}' - ) - response_data = response.json() - assert response.status_code == 400 - assert response_data[0]['code'] == ErrorCodes.InvalidDateError.value - assert response_data[0]['message'] == 'Invalid date' - - -def test_record_search_invalid_date_format(create_patents_table, client): - table_name = 'NASA Record Invalid Date Format' - table = create_patents_table(table_name) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - column_id_with_date_type = table.get_column_name_id_bidirectional_map()['Patent Expiration Date'] - column_attnum = table.columns.get(id=column_id_with_date_type).attnum - table.alter_column(column_attnum, {'type': 'date'}) - search_columns = [ - {'field': columns_name_id_map['Patent Expiration Date'], 'literal': '12/31'}, - ] - - json_search_fuzzy = json.dumps(search_columns) - - response = client.get( - f'/api/db/v0/tables/{table.id}/records/?search_fuzzy={json_search_fuzzy}' - ) - response_data = response.json() - - assert response.status_code == 400 - assert response_data[0]['code'] == ErrorCodes.InvalidDateFormatError.value - assert response_data[0]['message'] == 'Invalid date format' - - -grouping_params = [ - ( - 'NASA Record List Group Single', - {'columns': ['Center']}, - [ - { - 'count': 87, - 'first_value': {'Center': 'NASA Kennedy Space Center'}, - 'last_value': {'Center': 'NASA Kennedy Space Center'}, - 'less_than_eq_value': None, - 'less_than_value': None, - 'greater_than_eq_value': None, - 'greater_than_value': None, - 'result_indices': [0] - }, { - 'count': 138, - 'first_value': {'Center': 'NASA Ames Research Center'}, - 'last_value': {'Center': 'NASA Ames Research Center'}, - 'less_than_eq_value': None, - 'less_than_value': None, - 'greater_than_eq_value': None, - 'greater_than_value': None, - 'result_indices': [ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, - 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 31, 32, 33, - 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, - 94, 95, 96, 97, 98, 99, - ] - }, { - 'count': 21, - 'first_value': {'Center': 'NASA Armstrong Flight Research Center'}, - 'last_value': {'Center': 'NASA Armstrong Flight Research Center'}, - 'less_than_eq_value': None, - 'less_than_value': None, - 'greater_than_eq_value': None, - 'greater_than_value': None, - 'result_indices': [30] - }, - ], - ), - ( - 'NASA Record List Group Single Percentile', - {'columns': ['Center'], 'mode': 'percentile', 'num_groups': 5}, - [ - { - 'count': 87, - 'first_value': {'Center': 'NASA Kennedy Space Center'}, - 'last_value': {'Center': 'NASA Kennedy Space Center'}, - 'less_than_eq_value': None, - 'less_than_value': None, - 'greater_than_eq_value': None, - 'greater_than_value': None, - 'result_indices': [0] - }, { - 'count': 159, - 'first_value': {'Center': 'NASA Ames Research Center'}, - 'last_value': {'Center': 'NASA Armstrong Flight Research Center'}, - 'less_than_eq_value': None, - 'less_than_value': None, - 'greater_than_eq_value': None, - 'greater_than_value': None, - 'result_indices': [ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, - 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, - 93, 94, 95, 96, 97, 98, 99 - ], - }, - ], - ), - ( - 'NASA Record List Group Multi', - {'columns': ['Center', 'Status']}, - [ - { - 'count': 29, - 'first_value': { - 'Center': 'NASA Kennedy Space Center', 'Status': 'Application' - }, - 'last_value': { - 'Center': 'NASA Kennedy Space Center', 'Status': 'Application' - }, - 'less_than_eq_value': None, - 'less_than_value': None, - 'greater_than_eq_value': None, - 'greater_than_value': None, - 'result_indices': [0] - }, { - 'count': 100, - 'first_value': { - 'Center': 'NASA Ames Research Center', 'Status': 'Issued' - }, - 'last_value': { - 'Center': 'NASA Ames Research Center', 'Status': 'Issued' - }, - 'less_than_eq_value': None, - 'less_than_value': None, - 'greater_than_eq_value': None, - 'greater_than_value': None, - 'result_indices': [ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, - 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 31, 32, 33, - 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 88, 90, 91, 92, 94, 96, 98, 99 - ] - }, { - 'count': 12, - 'first_value': { - 'Center': 'NASA Armstrong Flight Research Center', 'Status': 'Issued' - }, - 'last_value': { - 'Center': 'NASA Armstrong Flight Research Center', 'Status': 'Issued' - }, - 'less_than_eq_value': None, - 'less_than_value': None, - 'greater_than_eq_value': None, - 'greater_than_value': None, - 'result_indices': [30] - }, { - 'count': 38, - 'first_value': { - 'Center': 'NASA Ames Research Center', 'Status': 'Application' - }, - 'last_value': { - 'Center': 'NASA Ames Research Center', 'Status': 'Application' - }, - 'less_than_eq_value': None, - 'less_than_value': None, - 'greater_than_eq_value': None, - 'greater_than_value': None, - 'result_indices': [86, 87, 89, 93, 95, 97] - }, - ], - ), - ( - 'NASA Record List Group Multi Percentile', - {'columns': ['Center', 'Status'], 'mode': 'percentile', 'num_groups': 5}, - [ - { - 'count': 197, - 'first_value': { - 'Center': 'NASA Kennedy Space Center', 'Status': 'Application' - }, - 'last_value': { - 'Center': 'NASA Langley Research Center', 'Status': 'Application' - }, - 'less_than_eq_value': None, - 'less_than_value': None, - 'greater_than_eq_value': None, - 'greater_than_value': None, - 'result_indices': [0] - }, { - 'count': 159, - 'first_value': { - 'Center': 'NASA Ames Research Center', 'Status': 'Application' - }, - 'last_value': { - 'Center': 'NASA Armstrong Flight Research Center', 'Status': 'Issued' - }, - 'less_than_eq_value': None, - 'less_than_value': None, - 'greater_than_eq_value': None, - 'greater_than_value': None, - 'result_indices': [ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, - 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, - 93, 94, 95, 96, 97, 98, 99 - ], - }, - ], - ), - ( - 'Magnitude Grouping', - {'columns': ['id'], 'mode': 'magnitude'}, - [ - { - 'count': 99, - 'first_value': {'id': 1}, - 'last_value': {'id': 99}, - 'less_than_eq_value': None, - 'greater_than_eq_value': {'id': 0}, - 'less_than_value': {'id': 100}, - 'greater_than_value': None, - 'result_indices': [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98 - ], - }, { - 'count': 100, - 'first_value': {'id': 100}, - 'last_value': {'id': 199}, - 'less_than_eq_value': None, - 'greater_than_eq_value': {'id': 100}, - 'less_than_value': {'id': 200}, - 'greater_than_value': None, - 'result_indices': [99], - }, - ], - ), - ( - 'Count By Grouping', - { - 'columns': ['id'], - 'mode': 'count_by', - 'global_min': 0, - 'global_max': 1000, - 'count_by': 50 - }, - [ - { - 'count': 49, - 'first_value': {'id': 1}, - 'last_value': {'id': 49}, - 'less_than_eq_value': None, - 'greater_than_eq_value': {'id': 0}, - 'less_than_value': {'id': 50}, - 'greater_than_value': None, - 'result_indices': [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48 - ] - }, { - 'count': 50, - 'first_value': {'id': 50}, - 'last_value': {'id': 99}, - 'less_than_eq_value': None, - 'greater_than_eq_value': {'id': 50}, - 'less_than_value': {'id': 100}, - 'greater_than_value': None, - 'result_indices': [ - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, - 94, 95, 96, 97, 98 - ] - }, { - 'count': 50, - 'first_value': {'id': 100}, - 'last_value': {'id': 149}, - 'less_than_eq_value': None, - 'greater_than_eq_value': {'id': 100}, - 'less_than_value': {'id': 150}, - 'greater_than_value': None, - 'result_indices': [99] - } - ] - ), - ( - 'NASA Record List Group Prefix', - {'columns': ['Case Number'], 'mode': 'prefix', 'prefix_length': 3}, - [ - { - 'count': 87, - 'first_value': {'Case Number': 'KSC-11641'}, - 'last_value': {'Case Number': 'KSC-13689'}, - 'less_than_eq_value': None, - 'greater_than_eq_value': None, - 'less_than_value': None, - 'greater_than_value': None, - 'result_indices': [0] - }, { - 'count': 138, - 'first_value': {'Case Number': 'ARC-14048-1'}, - 'last_value': {'Case Number': 'ARC-16942-2'}, - 'less_than_eq_value': None, - 'greater_than_eq_value': None, - 'less_than_value': None, - 'greater_than_value': None, - 'result_indices': [ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, - 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 31, 32, 33, - 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, - 94, 95, 96, 97, 98, 99 - ] - }, { - 'count': 21, - 'first_value': {'Case Number': 'DRC-001-049'}, - 'last_value': {'Case Number': 'DRC-098-001'}, - 'less_than_eq_value': None, - 'greater_than_eq_value': None, - 'less_than_value': None, - 'greater_than_value': None, - 'result_indices': [30] - }, - ], - ), -] - - -def test_null_error_record_create(create_patents_table, client): - table_name = 'NASA Record Create' - table = create_patents_table(table_name) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - column_id = columns_name_id_map['Case Number'] - data = {"nullable": False} - client.patch( - f"/api/db/v0/tables/{table.id}/columns/{column_id}/", data=data - ) - data = { - columns_name_id_map['Center']: 'NASA Example Space Center', - columns_name_id_map['Status']: 'Application', - columns_name_id_map['Case Number']: None, - columns_name_id_map['Patent Number']: '01234', - columns_name_id_map['Application SN']: '01/000,001', - columns_name_id_map['Title']: 'Example Patent Name', - columns_name_id_map['Patent Expiration Date']: '' - } - response = client.post(f'/api/db/v0/tables/{table.id}/records/', data=data) - record_data = response.json() - assert response.status_code == 400 - assert 'null value in column "Case Number"' in record_data[0]['message'] - assert ErrorCodes.NotNullViolation.value == record_data[0]['code'] - assert column_id == record_data[0]['detail']['column_id'] - - -@pytest.mark.parametrize('table_name,grouping,expected_groups', grouping_params) -def test_record_list_groups( - table_name, grouping, expected_groups, create_patents_table, client, -): - table = create_patents_table(table_name) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - - order_by = [ - {'field': columns_name_id_map['id'], 'direction': 'asc'}, - ] - json_order_by = json.dumps(order_by) - group_by_columns_ids = [columns_name_id_map[column_name] for column_name in grouping['columns']] - ids_converted_group_by = {**grouping, 'columns': group_by_columns_ids} - json_grouping = json.dumps(ids_converted_group_by) - limit = 100 - query_str = f'grouping={json_grouping}&order_by={json_order_by}&limit={limit}' - - response = client.get(f'/api/db/v0/tables/{table.id}/records/?{query_str}') - response_data = response.json() - - def _test_group_equality(actual_groups, expect_groups): - actual_groups = deepcopy(actual_groups) - expect_groups = deepcopy(expect_groups) - assert len(actual_groups) == len(expect_groups) - for i in range(len(actual_groups)): - assert actual_groups[i].pop('count') == expect_groups[i].pop('count') - assert ( - actual_groups[i].pop('result_indices') - == expect_groups[i].pop('result_indices') - ) - for k in expect_groups[i]: - actual_item = actual_groups[i][k] - expect_item = expect_groups[i][k] - if expect_item is not None: - for column_name in expect_item: - assert ( - expect_item[column_name] - == actual_item[str(columns_name_id_map[column_name])] - ) - else: - assert actual_item is None - - def _retuple_bound_tuples(bound_tuple_list): - if bound_tuple_list is not None: - return [tuple(t) for t in grouping_dict['bound_tuples']] - - assert response.status_code == 200 - assert response_data['count'] == 1393 - assert len(response_data['results']) == limit - - group_by = GroupBy(**grouping) - grouping_dict = response_data['grouping'] - assert grouping_dict['columns'] == [ - columns_name_id_map[colname] for colname in group_by.columns - ] - assert grouping_dict['mode'] == group_by.mode - assert grouping_dict['num_groups'] == group_by.num_groups - assert _retuple_bound_tuples(grouping_dict['bound_tuples']) == group_by.bound_tuples - assert grouping_dict['count_by'] == group_by.count_by - assert grouping_dict['global_min'] == group_by.global_min - assert grouping_dict['global_max'] == group_by.global_max - assert grouping_dict['preproc'] == group_by.preproc - assert grouping_dict['prefix_length'] == group_by.prefix_length - assert grouping_dict['extract_field'] == group_by.extract_field - assert grouping_dict['ranged'] == group_by.ranged - _test_group_equality(grouping_dict['groups'], expected_groups) - - -def test_group_filter_combo_order(create_patents_table, client): - table_name = 'NASA Record List Group Filter' - table = create_patents_table(table_name) - name_id_map = table.get_column_name_id_bidirectional_map() - - raw_grouping = {'columns': ['Center']} - raw_filter = { - "contains": [ - {"column_id": [name_id_map["Case Number"]]}, {"literal": ["11"]} - ] - } - raw_order_by = [{'field': name_id_map['id'], 'direction': 'asc'}] - group_by_col_ids = [name_id_map[col_name] for col_name in raw_grouping['columns']] - ids_converted_group_by = {**raw_grouping, 'columns': group_by_col_ids} - - grouping = json.dumps(ids_converted_group_by) - filter_ = json.dumps(raw_filter) - order_by = json.dumps(raw_order_by) - - limit = 10 - query_str = f'grouping={grouping}&order_by={order_by}&limit={limit}&filter={filter_}' - - response = client.get(f'/api/db/v0/tables/{table.id}/records/?{query_str}') - response_data = response.json() - - expect_group_counts = [2, 3, 2, 1, 7] - actual_group_counts = [g['count'] for g in response_data['grouping']['groups']] - assert actual_group_counts == expect_group_counts - - -def test_record_list_pagination_limit(create_patents_table, client): - table_name = 'NASA Record List Pagination Limit' - table = create_patents_table(table_name) - - response = client.get(f'/api/db/v0/tables/{table.id}/records/?limit=5') - response_data = response.json() - record_data = response_data['results'][0] - - assert response.status_code == 200 - assert response_data['count'] == 1393 - assert len(response_data['results']) == 5 - for column_id in table.columns.all().values_list('id', flat=True): - assert str(column_id) in record_data - - -def test_record_list_pagination_offset(create_patents_table, client): - table_name = 'NASA Record List Pagination Offset' - table = create_patents_table(table_name) - columns_id = table.columns.all().order_by('id').values_list('id', flat=True) - - response_1 = client.get(f'/api/db/v0/tables/{table.id}/records/?limit=5&offset=5') - response_1_data = response_1.json() - record_1_data = response_1_data['results'][0] - response_2 = client.get(f'/api/db/v0/tables/{table.id}/records/?limit=5&offset=10') - response_2_data = response_2.json() - record_2_data = response_2_data['results'][0] - - assert response_1.status_code == 200 - assert response_2.status_code == 200 - assert response_1_data['count'] == 1393 - assert response_2_data['count'] == 1393 - assert len(response_1_data['results']) == 5 - assert len(response_2_data['results']) == 5 - - assert record_1_data[str(columns_id[0])] != record_2_data[str(columns_id[0])] - assert record_1_data[str(columns_id[3])] != record_2_data[str(columns_id[3])] - assert record_1_data[str(columns_id[4])] != record_2_data[str(columns_id[4])] - assert record_1_data[str(columns_id[5])] != record_2_data[str(columns_id[5])] - - -def test_self_referential_column_preview(self_referential_table, engine, client): - table = self_referential_table - pk_column = table.get_column_by_name("Id") - name_column = table.get_column_by_name("Name") - parent_column = table.get_column_by_name("Parent") - table.add_constraint(UniqueConstraint(None, table.oid, [pk_column.attnum])) - table.add_constraint( - ForeignKeyConstraint( - None, - table.oid, - [parent_column.attnum], - table.oid, - [pk_column.attnum], {} - ) - ) - recursive_preview_template = f'Name: {{{name_column.id}}} Parent: {{{parent_column.id}}}' - table_settings_id = table.settings.id - data = { - "preview_settings": { - 'template': recursive_preview_template - } - } - response = client.patch( - f"/api/db/v0/tables/{table.id}/settings/{table_settings_id}/", - data=data, - ) - assert response.status_code == 200 - response = client.get(f'/api/db/v0/tables/{table.id}/records/') - response_data = response.json() - preview_data = response_data['preview_data'] - self_referential_column_reference_path = [[parent_column.id, pk_column.id]] - self_referential_column_path_prefix = compute_path_prefix(self_referential_column_reference_path) - name_column_alias = compute_path_str(self_referential_column_path_prefix, name_column.id) - parent_column_alias = compute_path_str(self_referential_column_path_prefix, parent_column.id) - preview_column = next( - preview - for preview in preview_data - if preview['column'] == parent_column.id - ) - preview_data = preview_column['data']['2'] - assert all( - [ - key in preview_data - for key in [name_column_alias, parent_column_alias] - ] - ) - - expected_preview_data = { - name_column_alias: 'Child1', parent_column_alias: '1' - } - assert preview_data == expected_preview_data - - -def test_table_settings_set_customized_field_automatically(publication_tables, client): - author_table, publisher_table, publication_table, checkouts_table = publication_tables - author_template_columns = author_table.get_columns_by_name(["first_name", "last_name", "id"]) - author_table_settings_id = author_table.settings.id - response = client.get( - f"/api/db/v0/tables/{author_table.id}/settings/{author_table_settings_id}/", - ) - default_preview = response.json() - assert default_preview['preview_settings']['customized'] is False - assert default_preview['preview_settings']['template'] == compute_default_preview_template(author_table) - author_preview_template = f'Full Name: {{{ author_template_columns[0].id }}} {{{author_template_columns[1].id}}}' - data = { - "preview_settings": { - 'template': author_preview_template, - } - } - client.patch( - f"/api/db/v0/tables/{author_table.id}/settings/{author_table_settings_id}/", - data=data, - ) - response = client.get( - f"/api/db/v0/tables/{author_table.id}/settings/{author_table_settings_id}/", - ) - customised_preview = response.json() - assert customised_preview['preview_settings']['customized'] is True - assert customised_preview['preview_settings']['template'] == author_preview_template - - -def test_foreign_key_record_api_reset_column_preview_to_default(publication_tables, client): - author_table, publisher_table, publication_table, checkouts_table = publication_tables - author_template_columns = author_table.get_columns_by_name(["first_name", "last_name", "id"]) - author_table_settings_id = author_table.settings.id - response = client.get( - f"/api/db/v0/tables/{author_table.id}/settings/{author_table_settings_id}/", - ) - default_preview = response.json() - assert default_preview['preview_settings']['customized'] is False - default_preview_template = compute_default_preview_template(author_table) - assert default_preview['preview_settings']['template'] == default_preview_template - author_preview_template = f'Full Name: {{{ author_template_columns[0].id }}} {{{author_template_columns[1].id}}}' - data = { - "preview_settings": { - 'template': author_preview_template, - } - } - client.patch( - f"/api/db/v0/tables/{author_table.id}/settings/{author_table_settings_id}/", - data=data, - ) - response = client.get( - f"/api/db/v0/tables/{author_table.id}/settings/{author_table_settings_id}/", - ) - customised_preview = response.json() - assert customised_preview['preview_settings']['template'] == author_preview_template - data = { - "preview_settings": { - 'customized': False, - } - } - client.patch( - f"/api/db/v0/tables/{author_table.id}/settings/{author_table_settings_id}/", - data=data, - ) - response = client.get( - f"/api/db/v0/tables/{author_table.id}/settings/{author_table_settings_id}/", - ) - reset_preview = response.json() - assert reset_preview['preview_settings']['template'] == default_preview_template - assert reset_preview['preview_settings']['customized'] is False - - -def test_foreign_key_record_api_all_column_previews(publication_tables, client): - author_table, publisher_table, publication_table, checkouts_table = publication_tables - author_template_columns = author_table.get_columns_by_name(["first_name", "last_name", "id"]) - author_preview_template = f'Full Name: {{{ author_template_columns[0].id }}} {{{author_template_columns[1].id}}}' - author_table_settings_id = author_table.settings.id - data = { - "preview_settings": { - 'template': author_preview_template, - } - } - response = client.patch( - f"/api/db/v0/tables/{author_table.id}/settings/{author_table_settings_id}/", - data=data, - ) - assert response.status_code == 200 - publisher_template_columns = publisher_table.get_columns_by_name(["name", "id"]) - publisher_preview_template = f'{{{ publisher_template_columns[0].id }}}' - publisher_table_settings_id = publisher_table.settings.id - data = { - "preview_settings": { - 'customized': True, - 'template': publisher_preview_template, - } - } - response = client.patch( - f"/api/db/v0/tables/{publisher_table.id}/settings/{publisher_table_settings_id}/", - data=data, - ) - assert response.status_code == 200 - publication_template_columns = publication_table.get_columns_by_name(['publisher', 'author', 'co_author', 'title', 'id']) - publication_preview_template = f'{{{publication_template_columns[3].id}}} Published By: {{{ publication_template_columns[0].id}}} and Authored by {{{publication_template_columns[1].id}}} along with {{{publication_template_columns[2].id}}}' - publication_table_settings_id = publication_table.settings.id - data = { - "preview_settings": { - 'template': publication_preview_template, - } - } - response = client.patch( - f"/api/db/v0/tables/{publication_table.id}/settings/{publication_table_settings_id}/", - data=data, - ) - assert response.status_code == 200 - response = client.get(f'/api/db/v0/tables/{checkouts_table.id}/records/') - response_data = response.json() - preview_data = response_data['preview_data'] - checkouts_table_publication_fk_column = checkouts_table.get_column_by_name('publication') - preview_column = next( - preview - for preview in preview_data - if preview['column'] == checkouts_table_publication_fk_column.id - ) - publication_path = [[checkouts_table_publication_fk_column.id, publication_template_columns[-1].id]] - publisher_paths = publication_path + [[publication_template_columns[0].id, publisher_template_columns[-1].id]] - author_paths = publication_path + [[publication_template_columns[1].id, author_template_columns[-1].id]] - co_author_paths = publication_path + [[publication_template_columns[2].id, author_template_columns[-1].id]] - publication_path_prefix = compute_path_prefix(publication_path) - publisher_path_prefix = compute_path_prefix(publisher_paths) - co_author_path_path_prefix = compute_path_prefix(co_author_paths) - author_path_prefix = compute_path_prefix(author_paths) - publication_title_alias = compute_path_str(publication_path_prefix, publication_template_columns[3].id) - publisher_name_alias = compute_path_str(publisher_path_prefix, publisher_template_columns[0].id) - co_author_first_name_alias = compute_path_str(co_author_path_path_prefix, author_template_columns[0].id) - co_author_last_name_alias = compute_path_str(co_author_path_path_prefix, author_template_columns[1].id) - author_first_name_alias = compute_path_str(author_path_prefix, author_template_columns[0].id) - author_last_name_alias = compute_path_str(author_path_prefix, author_template_columns[1].id) - preview_column_alias = f'{{{publication_title_alias}}} Published By: {{{ publisher_name_alias}}} and Authored by Full Name: {{{author_first_name_alias}}} {{{author_last_name_alias}}} along with Full Name: {{{co_author_first_name_alias}}} {{{co_author_last_name_alias}}}' - - assert preview_column['template'] == preview_column_alias - preview_data = preview_column['data']['1'] - assert all([key in preview_data for key in [publication_title_alias, publisher_name_alias, author_first_name_alias, author_last_name_alias, co_author_first_name_alias, co_author_last_name_alias]]) - - expected_preview_data = {publication_title_alias: 'Pressure Should Old', publisher_name_alias: 'Ruiz', author_first_name_alias: 'Matthew', author_last_name_alias: 'Brown', co_author_first_name_alias: 'Mark', co_author_last_name_alias: 'Smith'} - assert preview_data == expected_preview_data - - -def test_record_detail(publication_tables, client): - author_table, publisher_table, publication_table, checkouts_table = publication_tables - record_id = 1 - record = checkouts_table.get_record(record_id) - - response = client.get(f'/api/db/v0/tables/{checkouts_table.id}/records/{record_id}/') - record_data = response.json()['results'][0] - preview_data = response.json()['preview_data'] - record_as_dict = record._asdict() - - assert response.status_code == 200 - columns_name_id_map = checkouts_table.get_column_name_id_bidirectional_map() - for column_name in checkouts_table.sa_column_names: - column_id_str = str(columns_name_id_map[column_name]) - assert column_id_str in record_data - assert record_as_dict[column_name] == record_data[column_id_str] - checkouts_table_publication_fk_column = checkouts_table.get_column_by_name('publication') - preview_column = next( - preview - for preview in preview_data - if preview['column'] == checkouts_table_publication_fk_column.id - ) - publication_template_columns = publication_table.get_columns_by_name(['title', 'id']) - publication_path = [[checkouts_table_publication_fk_column.id, publication_template_columns[-1].id]] - publication_title_alias = compute_path_str( - compute_path_prefix(publication_path), - publication_template_columns[0].id - ) - preview_column_alias = f'{{{publication_title_alias}}}' - - assert preview_column['template'] == preview_column_alias - - -def test_record_create(create_patents_table, client): - table_name = 'NASA Record Create' - table = create_patents_table(table_name) - records = table.get_records() - original_num_records = len(records) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - data = { - columns_name_id_map['Center']: 'NASA Example Space Center', - columns_name_id_map['Status']: 'Application', - columns_name_id_map['Case Number']: 'ESC-0000', - columns_name_id_map['Patent Number']: '01234', - columns_name_id_map['Application SN']: '01/000,001', - columns_name_id_map['Title']: 'Example Patent Name', - columns_name_id_map['Patent Expiration Date']: '' - } - response = client.post(f'/api/db/v0/tables/{table.id}/records/', data=data) - record_data = response.json()['results'][0] - assert response.status_code == 201 - assert len(table.get_records()) == original_num_records + 1 - columns_name_id_map = table.get_column_name_id_bidirectional_map() - - for column_name in table.sa_column_names: - column_id_str = str(columns_name_id_map[column_name]) - assert column_id_str in record_data - if column_name in data: - assert data[column_name] == record_data[column_id_str] - - -def test_record_create_without_primary_key(create_patents_table, client): - table_name = 'NASA Record Create Without PK' - table = create_patents_table(table_name) - column_test_table = table - num_columns = len(column_test_table.sa_columns) - col_one_name = column_test_table.sa_columns[0].name - column = column_test_table.get_columns_by_name(['id'])[0] - response = client.delete( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/" - ) - assert response.status_code == 204 - new_columns_response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - new_data = new_columns_response.json() - assert col_one_name not in [col["name"] for col in new_data["results"]] - assert new_data["count"] == num_columns - 1 - columns_name_id_map = table.get_column_name_id_bidirectional_map() - data = { - columns_name_id_map['Center']: 'NASA Example Space Center', - columns_name_id_map['Status']: 'Application', - columns_name_id_map['Case Number']: 'ESC-0000', - columns_name_id_map['Patent Number']: '01234', - columns_name_id_map['Application SN']: '01/000,001', - columns_name_id_map['Title']: 'Example Patent Name', - columns_name_id_map['Patent Expiration Date']: '' - } - response = client.post(f'/api/db/v0/tables/{table.id}/records/', data=data) - assert response.status_code == 405 - assert response.json()[0]['message'] == "You cannot insert into tables without a primary key" - - -@pytest.mark.parametrize('client_name, expected_status_code', update_client_with_status_code) -def test_record_partial_update_based_on_permission(create_patents_table, request, client_name, expected_status_code): - table_name = 'NASA Record Patch' - table = create_patents_table(table_name) - records = table.get_records() - record_id = records[0]['id'] - client = request.getfixturevalue(client_name)(table.schema) - client.get(f'/api/db/v0/tables/{table.id}/records/{record_id}/') - columns_name_id_map = table.get_column_name_id_bidirectional_map() - data = { - columns_name_id_map['Center']: 'NASA Example Space Center', - columns_name_id_map['Status']: 'Example', - } - response = client.patch(f'/api/db/v0/tables/{table.id}/records/{record_id}/', data=data) - assert response.status_code == expected_status_code - - -def test_record_partial_update(create_patents_table, client): - table_name = 'NASA Record Patch' - table = create_patents_table(table_name) - records = table.get_records() - record_id = records[0]['id'] - - original_response = client.get(f'/api/db/v0/tables/{table.id}/records/{record_id}/') - original_data = original_response.json()['results'][0] - columns_name_id_map = table.get_column_name_id_bidirectional_map() - data = { - columns_name_id_map['Center']: 'NASA Example Space Center', - columns_name_id_map['Status']: 'Example', - } - response = client.patch(f'/api/db/v0/tables/{table.id}/records/{record_id}/', data=data) - record_data = response.json()['results'][0] - assert response.status_code == 200 - for column_name in table.sa_column_names: - column_id_str = str(columns_name_id_map[column_name]) - assert column_id_str in record_data - if column_id_str in data and column_name not in ['Center', 'Status']: - assert original_data[column_id_str] == record_data[column_id_str] - elif column_name == 'Center': - assert original_data[column_id_str] != record_data[column_id_str] - assert record_data[column_id_str] == 'NASA Example Space Center' - elif column_name == 'Status': - assert original_data[column_id_str] != record_data[column_id_str] - assert record_data[column_id_str] == 'Example' - - -delete_clients_with_status_codes = [ - ('superuser_client_factory', 204, 204), - ('db_manager_client_factory', 204, 204), - ('db_editor_client_factory', 204, 204), - ('schema_manager_client_factory', 204, 403), - ('schema_viewer_client_factory', 403, 403), - ('db_viewer_schema_manager_client_factory', 204, 403) -] - - -@pytest.mark.parametrize('client_name, expected_status_code,different_schema_expected_status_code', delete_clients_with_status_codes) -def test_record_delete_based_on_permissions( - create_patents_table, - request, - client_name, - expected_status_code, - different_schema_expected_status_code -): - table_name = 'NASA Record Delete' - table = create_patents_table(table_name) - different_schema_table = create_patents_table('Private Table', schema_name='Private Schema') - records = table.get_records() - record_id = records[0]['id'] - client = request.getfixturevalue(client_name)(table.schema) - response = client.delete(f'/api/db/v0/tables/{table.id}/records/{record_id}/') - assert response.status_code == expected_status_code - response = client.delete(f'/api/db/v0/tables/{different_schema_table.id}/records/{record_id}/') - assert response.status_code == different_schema_expected_status_code - - -def test_record_delete(create_patents_table, client): - table_name = 'NASA Record Delete' - table = create_patents_table(table_name) - records = table.get_records() - original_num_records = len(records) - record_id = records[0]['id'] - - response = client.delete(f'/api/db/v0/tables/{table.id}/records/{record_id}/') - assert response.status_code == 204 - assert len(table.get_records()) == original_num_records - 1 - - -def test_record_delete_fkey_violation(library_ma_tables, client): - publications = library_ma_tables['Publications'] - - response = client.delete(f'/api/db/v0/tables/{publications.id}/records/1/') - assert response.status_code == 400 - response_exception = response.json()[0] - assert response_exception['code'] == ErrorCodes.ForeignKeyViolation.value - assert 'Items' in response_exception['message'] - - -def test_record_bulk_delete(create_patents_table, client): - table_name = 'NASA Record Delete' - table = create_patents_table(table_name) - records = table.get_records() - original_num_records = len(records) - record_ids = [records[i]['id'] for i in range(1, 4)] - data = { - 'pks': record_ids - } - - response = client.post(f'/api/ui/v0/tables/{table.id}/records/delete/', data=data) - assert response.status_code == 204 - assert len(table.get_records()) == original_num_records - len(record_ids) - - -def test_record_bulk_delete_fkey_violation(library_ma_tables, client): - publications = library_ma_tables['Publications'] - records = publications.get_records() - record_ids = [records[i]['id'] for i in range(1, 4)] - data = { - 'pks': record_ids - } - - response = client.post(f'/api/ui/v0/tables/{publications.id}/records/delete/', data=data) - assert response.status_code == 400 - response_exception = response.json()[0] - assert response_exception['code'] == ErrorCodes.ForeignKeyViolation.value - assert 'Items' in response_exception['message'] - - -def test_record_bulk_delete_atomicity(library_ma_tables, client): - publications = library_ma_tables['Publications'] - items = library_ma_tables['Items'] - item_records = items.get_records() - original_items_num_records = len(item_records) - record_id = item_records[0]['id'] - - response = client.delete(f'/api/db/v0/tables/{items.id}/records/{record_id}/') - assert response.status_code == 204 - assert len(items.get_records()) == original_items_num_records - 1 - - publication_records = publications.get_records() - original_publication_num_records = len(publication_records) - record_ids = [publication_records[i]['id'] for i in range(1, 4)] - data = { - 'pks': record_ids - } - - response = client.post(f'/api/ui/v0/tables/{publications.id}/records/delete/', data=data) - assert response.status_code == 400 - response_exception = response.json()[0] - assert response_exception['code'] == ErrorCodes.ForeignKeyViolation.value - assert 'Items' in response_exception['message'] - assert len(publication_records) == original_publication_num_records - - -def test_record_update(create_patents_table, client): - table_name = 'NASA Record Put' - table = create_patents_table(table_name) - records = table.get_records() - record_id = records[0]['id'] - - data = { - 'Center': 'NASA Example Space Center', - 'Status': 'Example', - } - response = client.put(f'/api/db/v0/tables/{table.id}/records/{record_id}/', data=data) - assert response.status_code == 405 - assert response.json()[0]['message'] == 'Method "PUT" not allowed.' - assert response.json()[0]['code'] == ErrorCodes.MethodNotAllowed.value - - -def test_record_404(create_patents_table, client): - table_name = 'NASA Record 404' - table = create_patents_table(table_name) - records = table.get_records() - record_id = records[0]['id'] - - client.delete(f'/api/db/v0/tables/{table.id}/records/{record_id}/') - response = client.get(f'/api/db/v0/tables/{table.id}/records/{record_id}/') - assert response.status_code == 404 - assert response.json()[0]['message'] == 'Not found.' - assert response.json()[0]['code'] == ErrorCodes.NotFound.value - - -def test_record_list_filter_exceptions(create_patents_table, client): - exception = UnknownDBFunctionID - table_name = f"NASA Record List {exception.__name__}" - table = create_patents_table(table_name) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - filter_list = json.dumps({"null": [{"column_name": [columns_name_id_map['Center']]}]}) - with patch.object(DBQuery, "get_records", side_effect=exception): - response = client.get( - f'/api/db/v0/tables/{table.id}/records/?filters={filter_list}' - ) - response_data = response.json() - assert response.status_code == 400 - assert len(response_data) == 1 - assert "filters" in response_data[0]['field'] - assert response_data[0]['code'] == ErrorCodes.UnsupportedType.value - - -@pytest.mark.parametrize("exception", [BadSortFormat, SortFieldNotFound]) -def test_record_list_sort_exceptions(create_patents_table, client, exception): - table_name = f"NASA Record List {exception.__name__}" - table = create_patents_table(table_name) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - order_by = json.dumps([{"field": columns_name_id_map['id'], "direction": "desc"}]) - with patch.object(DBQuery, "get_records", side_effect=exception): - response = client.get( - f'/api/db/v0/tables/{table.id}/records/?order_by={order_by}' - ) - response_data = response.json() - assert response.status_code == 400 - assert len(response_data) == 1 - assert "order_by" in response_data[0]['field'] - assert response_data[0]['code'] == ErrorCodes.UnsupportedType.value - - -@pytest.mark.parametrize("exception", [BadGroupFormat, GroupFieldNotFound]) -def test_record_list_group_exceptions(create_patents_table, client, exception): - table_name = f"NASA Record List {exception.__name__}" - table = create_patents_table(table_name) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - group_by = json.dumps({"columns": [columns_name_id_map['Case Number']]}) - with patch.object(DBQuery, "get_records", side_effect=exception): - response = client.get( - f'/api/db/v0/tables/{table.id}/records/?grouping={group_by}' - ) - response_data = response.json() - assert response.status_code == 400 - assert len(response_data) == 1 - assert "grouping" in response_data[0]['field'] - assert response_data[0]['code'] == ErrorCodes.UnsupportedType.value - - -@pytest.mark.parametrize("test_input, expected", [ - ("0", True), - ("-0", True), - ("0.314", True), - ("-0.00314", True), - ("0.0314e3", True), - ("0.0314e+3", True), - ("0.0314e-3", True), - ("0.314e01", True), - ("-314", True), - ("-0314", False), - ("314", True), - ("0314", False), - ("100.04", True), - ("100.", False), - ("314e3", True), - ("314E+3", True), - ("314e-3", True), - ("314.0e-3", True), - ("314.0E+3", True), - ("314.0E1", True), - ("~2324", False) -]) -def test_json_number_spec_validation(test_input, expected): - assert follows_json_number_spec(test_input) == expected - - -def test_number_input_api_validation(empty_nasa_table, client): - table = empty_nasa_table - column_name = 'Nonce' - table.add_column({"name": column_name, "type": 'REAL'}) - nonce_id = table.get_column_name_id_bidirectional_map()[column_name] - - for nonce, status_code in [ - ("0", 201), - ("-0.00314", 201), - ("-314", 201), - ("-0314", 400), - ("314.0e-3", 201), - ("~2324", 400), - (2132, 201), - ]: - data = { - nonce_id: nonce, - } - response = client.post(f'/api/db/v0/tables/{table.id}/records/', data=data) - assert response.status_code == status_code - - -def test_invalid_email_post_api_validation(empty_nasa_table, client): - table = empty_nasa_table - column_name = 'Email' - table.add_column({"name": column_name, "type": 'mathesar_types.email'}) - email_col_id = table.get_column_name_id_bidirectional_map()[column_name] - data = { - email_col_id: 'foobar' - } - response = client.post(f'/api/db/v0/tables/{table.id}/records/', data=data) - response_data = response.json() - assert response.status_code == 400 - assert response_data[0]['code'] == ErrorCodes.CheckViolation.value - assert 'value for domain mathesar_types.email violates check constraint' in response_data[0]['message'] - - -def test_invalid_email_patch_api_validation(empty_nasa_table, client): - table = empty_nasa_table - column_name = 'Email' - table.add_column({"name": column_name, "type": 'mathesar_types.email'}) - email_col_id = table.get_column_name_id_bidirectional_map()[column_name] - valid_data = { - email_col_id: 'foo@bar.org' - } - client.post(f'/api/db/v0/tables/{table.id}/records/', data=valid_data) - invalid_data = { - email_col_id: 'foobar' - } - response = client.patch(f'/api/db/v0/tables/{table.id}/records/1/', data=invalid_data) - response_data = response.json() - assert response.status_code == 400 - assert response_data[0]['code'] == ErrorCodes.CheckViolation.value - assert 'value for domain mathesar_types.email violates check constraint' in response_data[0]['message'] - - -def test_record_patch_invalid_date(create_patents_table, client): - table_name = 'NASA Invalid Date' - table = create_patents_table(table_name) - column_id_with_date_type = table.get_column_name_id_bidirectional_map()['Patent Expiration Date'] - column_attnum = table.columns.get(id=column_id_with_date_type).attnum - table.alter_column(column_attnum, {'type': 'date'}) - data = {f"{column_id_with_date_type}": "99/99/9999"} - response = client.patch(f'/api/db/v0/tables/{table.id}/records/17/', data=data) - response_data = response.json() - assert response.status_code == 400 - assert response_data[0]['code'] == ErrorCodes.InvalidDateError.value - assert response_data[0]['message'] == 'Invalid date' - - -def test_record_patch_invalid_date_format(create_patents_table, client): - table_name = 'NASA Invalid Date Format' - table = create_patents_table(table_name) - column_id_with_date_type = table.get_column_name_id_bidirectional_map()['Patent Expiration Date'] - column_attnum = table.columns.get(id=column_id_with_date_type).attnum - table.alter_column(column_attnum, {'type': 'date'}) - data = {f"{column_id_with_date_type}": "5555/5555"} - response = client.patch(f'/api/db/v0/tables/{table.id}/records/17/', data=data) - response_data = response.json() - assert response.status_code == 400 - assert response_data[0]['code'] == ErrorCodes.InvalidDateFormatError.value - assert response_data[0]['message'] == 'Invalid date format' - - -def test_record_post_unique_violation(create_patents_table, client): - table_name = 'NASA unique record POST' - table = create_patents_table(table_name) - id_column_id = table.get_column_name_id_bidirectional_map()['id'] - data = {str(id_column_id): 1} - response = client.post(f'/api/db/v0/tables/{table.id}/records/', data=data) - actual_exception = response.json()[0] - assert actual_exception['code'] == ErrorCodes.UniqueViolation.value - assert actual_exception['message'] == 'The requested insert violates a uniqueness constraint' - assert actual_exception['detail']['constraint_columns'] == [id_column_id] - constraint_id = actual_exception['detail']['constraint'] - actual_constraint_details = client.get( - f'/api/db/v0/tables/{table.id}/constraints/{constraint_id}/' - ).json() - assert actual_constraint_details['name'] == 'NASA unique record POST_pkey' - - -def test_record_patch_unique_violation(create_patents_table, client): - table_name = 'NASA unique record PATCH' - table = create_patents_table(table_name) - id_column_id = table.get_column_name_id_bidirectional_map()['id'] - data = {str(id_column_id): 1} - response = client.patch(f'/api/db/v0/tables/{table.id}/records/{2}/', data=data) - actual_exception = response.json()[0] - assert actual_exception['code'] == ErrorCodes.UniqueViolation.value - assert actual_exception['message'] == 'The requested update violates a uniqueness constraint' - assert actual_exception['detail']['constraint_columns'] == [id_column_id] - constraint_id = actual_exception['detail']['constraint'] - actual_constraint_details = client.get( - f'/api/db/v0/tables/{table.id}/constraints/{constraint_id}/' - ).json() - assert actual_constraint_details['name'] == 'NASA unique record PATCH_pkey' - - -def test_record_post_exclusion_violation(create_reservations_table, engine, client): - table = create_reservations_table - room_number_column_id = table.get_column_name_id_bidirectional_map()['room_number'] - columns_name_id_map = table.get_column_name_id_bidirectional_map() - query = text( - f"""CREATE EXTENSION IF NOT EXISTS btree_gist; - ALTER TABLE "Reservations"."{table.name}" DROP CONSTRAINT IF EXISTS room_overlap; - ALTER TABLE "Reservations"."{table.name}" - ADD CONSTRAINT room_overlap - EXCLUDE USING gist - ("room_number" WITH =, TSRANGE("check_in_date", "check_out_date", '[]') WITH &&);""" - ) - with engine.begin() as conn: - conn.execute(query) - reset_reflection(db_name=table.schema.database.name) - response = client.post(f'/api/db/v0/tables/{table.id}/records/', data={ - str(columns_name_id_map['id']): 3, - str(columns_name_id_map['room_number']): 1, - str(columns_name_id_map['check_in_date']): '11/12/2023', - str(columns_name_id_map['check_out_date']): '11/21/2023', - }) - actual_exception = response.json()[0] - assert actual_exception['code'] == ErrorCodes.ExclusionViolation.value - assert actual_exception['message'] == 'The requested update violates an exclusion constraint' - assert actual_exception['detail']['constraint_columns'] == [room_number_column_id] - - -def test_record_patch_exclusion_violation(create_reservations_table, engine, client): - table = create_reservations_table - room_number_column_id = table.get_column_name_id_bidirectional_map()['room_number'] - columns_name_id_map = table.get_column_name_id_bidirectional_map() - query = text( - f"""CREATE EXTENSION IF NOT EXISTS btree_gist; - ALTER TABLE "Reservations"."{table.name}" DROP CONSTRAINT IF EXISTS room_overlap; - ALTER TABLE "Reservations"."{table.name}" - ADD CONSTRAINT room_overlap - EXCLUDE USING gist - ("room_number" WITH =, TSRANGE("check_in_date", "check_out_date", '[]') WITH &&);""" - ) - with engine.begin() as conn: - conn.execute(query) - reset_reflection(db_name=table.schema.database.name) - response = client.patch(f'/api/db/v0/tables/{table.id}/records/{2}/', data={ - str(columns_name_id_map['room_number']): 1, - str(columns_name_id_map['check_in_date']): '11/12/2023', - str(columns_name_id_map['check_out_date']): '11/21/2023', - }) - actual_exception = response.json()[0] - assert actual_exception['code'] == ErrorCodes.ExclusionViolation.value - assert actual_exception['message'] == 'The requested update violates an exclusion constraint' - assert actual_exception['detail']['constraint_columns'] == [room_number_column_id] diff --git a/mathesar/tests/api/test_reflection_api.py b/mathesar/tests/api/test_reflection_api.py deleted file mode 100644 index 3e10725a40..0000000000 --- a/mathesar/tests/api/test_reflection_api.py +++ /dev/null @@ -1,3 +0,0 @@ -def test_reflect_endpoint(client): - response = client.post('/api/ui/v0/reflect/') - assert response.status_code == 200 diff --git a/mathesar/tests/api/test_schema_api.py b/mathesar/tests/api/test_schema_api.py deleted file mode 100644 index ac0f02c2f9..0000000000 --- a/mathesar/tests/api/test_schema_api.py +++ /dev/null @@ -1,592 +0,0 @@ -from django.core.cache import cache -import pytest -from sqlalchemy import text - -from db.schemas.utils import get_mathesar_schemas, get_schema_oid_from_name -from mathesar.models.deprecated import Schema -from mathesar.api.exceptions.error_codes import ErrorCodes -from mathesar.models.users import DatabaseRole - - -def check_schema_response( - MOD_engine_cache, - response_schema, - schema, - schema_name, - test_db_name, - connection_id, - schema_description=None, - check_schema_objects=True -): - assert response_schema['id'] == schema.id - assert response_schema['name'] == schema_name - assert response_schema['connection_id'] == connection_id - assert response_schema['description'] == schema_description - assert 'has_dependents' in response_schema - if check_schema_objects: - engine = MOD_engine_cache(test_db_name) - assert schema_name in get_mathesar_schemas(engine) - - -list_clients_with_results_count = [ - ('superuser_client_factory', 3), - ('db_manager_client_factory', 3), - ('db_editor_client_factory', 3), - ('schema_manager_client_factory', 1), - ('schema_viewer_client_factory', 1), - ('db_viewer_schema_manager_client_factory', 3) -] - - -@pytest.mark.parametrize('client_name, expected_schema_count', list_clients_with_results_count) -def test_schema_list(request, patent_schema, create_schema, MOD_engine_cache, client_name, expected_schema_count): - create_schema("Private Schema") - client = request.getfixturevalue(client_name)(patent_schema) - response = client.get('/api/db/v0/schemas/') - assert response.status_code == 200 - - response_data = response.json() - - assert response_data['count'] == expected_schema_count - results = response_data['results'] - assert len(results) == expected_schema_count - - response_schema = None - for some_schema in response_data['results']: - if some_schema['name'] == patent_schema.name: - response_schema = some_schema - assert response_schema is not None - check_schema_response( - MOD_engine_cache, - response_schema, - patent_schema, - patent_schema.name, - patent_schema.database.name, - patent_schema.database.id - ) - - -@pytest.mark.skip("Faulty DB handling assumptions; invalid") -def test_schema_list_filter(client, create_db_schema, FUN_create_dj_db, MOD_engine_cache): - schema_params = [("schema_1", "database_1"), ("schema_2", "database_2"), - ("schema_3", "database_3"), ("schema_1", "database_3")] - - dbs_to_create = set(param[1] for param in schema_params) - - for db_name in dbs_to_create: - FUN_create_dj_db(db_name) - - for schema_name, db_name in schema_params: - engine = MOD_engine_cache(db_name) - create_db_schema(schema_name, engine) - - cache.clear() - - schemas = { - schema_param: Schema.objects.get( - oid=get_schema_oid_from_name( - schema_param[0], - MOD_engine_cache(schema_param[1]) - ), - ) - for schema_param in schema_params - } - - names = ["schema_1", "schema_3"] - names_query = ",".join(names) - databases = ["database_2", "database_3"] - database_query = ",".join(databases) - query = f"name={names_query}&database={database_query}" - - response = client.get(f'/api/db/v0/schemas/?{query}') - response_data = response.json() - response_schemas = response_data['results'] - - assert response.status_code == 200 - assert response_data['count'] == 2 - assert len(response_data['results']) == 2 - - response_schemas = { - (schema["name"], schema["database"]): schema - for schema in response_schemas - } - - for name in names: - for database in databases: - query_tuple = (name, database) - if query_tuple not in schema_params: - continue - schema = schemas[query_tuple] - response_schema = response_schemas[query_tuple] - check_schema_response( - MOD_engine_cache, - response_schema, schema, schema.name, - schema.database.name, schema.database.id, check_schema_objects=False - ) - - -list_clients_with_num_tables_count = [ - ('superuser_client_factory', 3), - ('db_manager_client_factory', 3), - ('db_editor_client_factory', 2), - ('schema_manager_client_factory', 3), - ('schema_viewer_client_factory', 2), - ('db_viewer_schema_manager_client_factory', 3) -] - - -@pytest.mark.parametrize('client_name, expected_num_tables_count', list_clients_with_num_tables_count) -def test_schema_num_tables_with_multiple_roles( - request, - create_patents_table, - patent_schema, - client_name, - expected_num_tables_count -): - create_patents_table("Patent Table 1") - table_2 = create_patents_table("Patent Table 2") - table_3 = create_patents_table("Patent Table 3") - table_2.import_verified = True - table_3.import_verified = None - table_2.save() - table_3.save() - client = request.getfixturevalue(client_name)(patent_schema) - response = client.get('/api/db/v0/schemas/') - assert response.status_code == 200 - response_data = response.json() - response_schemas = [s for s in response_data['results'] if s['name'] != 'public'] - for someschema in response_schemas: - assert someschema['num_tables'] == expected_num_tables_count - - -def test_schema_detail(create_patents_table, client, test_db_name, MOD_engine_cache): - """ - Desired format: - One item in the results list in the schema list view, see above. - """ - table = create_patents_table('NASA Schema Detail') - - response = client.get(f'/api/db/v0/schemas/{table.schema.id}/') - response_schema = response.json() - assert response.status_code == 200 - check_schema_response( - MOD_engine_cache, - response_schema, table.schema, table.schema.name, test_db_name, table.schema.database.id - ) - - -def test_schema_sort_by_name(create_schema, client, MOD_engine_cache): - """ - Desired format: - One item in the results list in the schema list view, see above. - """ - schema_3 = create_schema("Schema 3") - schema_1 = create_schema("Schema 1") - schema_5 = create_schema("Schema 5") - schema_2 = create_schema("Schema 2") - schema_4 = create_schema("Schema 4") - unsorted_expected_schemas = [ - schema_4, - schema_2, - schema_5, - schema_1, - schema_3 - ] - expected_schemas = [ - schema_1, - schema_2, - schema_3, - schema_4, - schema_5 - ] - response = client.get('/api/db/v0/schemas/') - response_data = response.json() - response_schemas = [s for s in response_data['results'] if s['name'] != 'public'] - comparison_tuples = zip(response_schemas, unsorted_expected_schemas) - for comparison_tuple in comparison_tuples: - check_schema_response( - MOD_engine_cache, comparison_tuple[0], comparison_tuple[1], comparison_tuple[1].name, - comparison_tuple[1].database.name, comparison_tuple[1].database.id - ) - sort_field = "name" - response = client.get(f'/api/db/v0/schemas/?sort_by={sort_field}') - response_data = response.json() - response_schemas = [s for s in response_data['results'] if s['name'] != 'public'] - comparison_tuples = zip(response_schemas, expected_schemas) - for comparison_tuple in comparison_tuples: - check_schema_response( - MOD_engine_cache, comparison_tuple[0], comparison_tuple[1], comparison_tuple[1].name, - comparison_tuple[1].database.name, comparison_tuple[1].database.id - ) - - -def test_schema_sort_by_id(create_schema, client, MOD_engine_cache): - """ - Desired format: - One item in the results list in the schema list view, see above. - """ - schema_1 = create_schema("desiderium parma!") - schema_2 = create_schema("Cur bursa messis?") - schema_3 = create_schema("Historias") - schema_4 = create_schema("Confucius says") - schema_5 = create_schema("Nanomachines") - unsorted_expected_schemas = [ - schema_5, - schema_4, - schema_3, - schema_2, - schema_1 - ] - expected_schemas = [ - schema_1, - schema_2, - schema_3, - schema_4, - schema_5 - ] - response = client.get('/api/db/v0/schemas/') - response_data = response.json() - response_schemas = [s for s in response_data['results'] if s['name'] != 'public'] - comparison_tuples = zip(response_schemas, unsorted_expected_schemas) - for comparison_tuple in comparison_tuples: - check_schema_response( - MOD_engine_cache, - comparison_tuple[0], comparison_tuple[1], comparison_tuple[1].name, - comparison_tuple[1].database.name, comparison_tuple[1].database.id - ) - - response = client.get('/api/db/v0/schemas/?sort_by=id') - response_data = response.json() - response_schemas = [s for s in response_data['results'] if s['name'] != 'public'] - comparison_tuples = zip(response_schemas, expected_schemas) - for comparison_tuple in comparison_tuples: - check_schema_response( - MOD_engine_cache, - comparison_tuple[0], comparison_tuple[1], comparison_tuple[1].name, - comparison_tuple[1].database.name, comparison_tuple[1].database.id - ) - - -def test_schema_create_by_superuser(client, FUN_create_dj_db, MOD_engine_cache): - db_name = "some_db1" - database = FUN_create_dj_db(db_name) - - schema_count_before = Schema.objects.count() - - schema_name = 'Test Schema' - data = { - 'name': schema_name, - 'connection_id': database.id - } - response = client.post('/api/db/v0/schemas/', data=data) - response_schema = response.json() - - assert response.status_code == 201 - schema_count_after = Schema.objects.count() - assert schema_count_after == schema_count_before + 1 - schema = Schema.objects.get(id=response_schema['id']) - check_schema_response( - MOD_engine_cache, - response_schema, - schema, - schema_name, - db_name, - database.id, - check_schema_objects=0 - ) - - -def test_schema_create_by_superuser_too_long_name(client, FUN_create_dj_db): - db_name = "some_db1" - database = FUN_create_dj_db(db_name) - schema_count_before = Schema.objects.count() - very_long_string = ''.join(map(str, range(50))) - schema_name = 'very_long_identifier_' + very_long_string - data = { - 'name': schema_name, - 'connection_id': database.id - } - response = client.post('/api/db/v0/schemas/', data=data) - assert response.status_code == 400 - assert response.json()[0]['code'] == ErrorCodes.IdentifierTooLong.value - schema_count_after = Schema.objects.count() - assert schema_count_after == schema_count_before - - -def test_schema_create_by_db_manager(client_bob, user_bob, FUN_create_dj_db, get_uid): - db_name = get_uid() - role = "manager" - database = FUN_create_dj_db(db_name) - - schema_name = 'Test Schema' - data = { - 'name': schema_name, - 'connection_id': database.id - } - response = client_bob.post('/api/db/v0/schemas/', data=data) - assert response.status_code == 400 - - DatabaseRole.objects.create(database=database, user=user_bob, role=role) - response = client_bob.post('/api/db/v0/schemas/', data=data) - assert response.status_code == 201 - - -def test_schema_create_by_db_editor(client_bob, user_bob, FUN_create_dj_db, get_uid): - db_name = get_uid() - role = "editor" - database = FUN_create_dj_db(db_name) - DatabaseRole.objects.create(database=database, user=user_bob, role=role) - - schema_name = 'Test Schema' - data = { - 'name': schema_name, - 'connection_id': database.id - } - response = client_bob.post('/api/db/v0/schemas/', data=data) - assert response.status_code == 400 - - -def test_schema_create_multiple_existing_roles(client_bob, user_bob, FUN_create_dj_db, get_uid): - database_with_viewer_access = FUN_create_dj_db(get_uid()) - database_with_manager_access = FUN_create_dj_db(get_uid()) - FUN_create_dj_db(get_uid()) - DatabaseRole.objects.create(user=user_bob, database=database_with_viewer_access, role='viewer') - DatabaseRole.objects.create(user=user_bob, database=database_with_manager_access, role='manager') - - schema_name = 'Test Schema' - data = { - 'name': schema_name, - 'connection_id': database_with_viewer_access.id - } - response = client_bob.post('/api/db/v0/schemas/', data=data) - assert response.status_code == 400 - data['connection_id'] = database_with_manager_access.id - response = client_bob.post('/api/db/v0/schemas/', data=data) - assert response.status_code == 201 - - -@pytest.mark.skip("Faulty DB handling assumptions; invalid") -def test_schema_create_description(client, FUN_create_dj_db, MOD_engine_cache): - db_name = "some_db2" - database = FUN_create_dj_db(db_name) - - schema_count_before = Schema.objects.count() - - schema_name = 'Test Schema with description' - description = 'blah blah blah' - data = { - 'name': schema_name, - 'connection_id': database.id, - 'description': description, - } - response = client.post('/api/db/v0/schemas/', data=data) - response_schema = response.json() - - assert response.status_code == 201 - schema_count_after = Schema.objects.count() - assert schema_count_after == schema_count_before + 1 - schema = Schema.objects.get(id=response_schema['id']) - check_schema_response( - MOD_engine_cache, - response_schema, - schema, - schema_name, - db_name, - schema_description=description, - check_schema_objects=0, - ) - - -def test_schema_update(client, create_schema): - schema = create_schema('foo') - data = { - 'name': 'blah' - } - response = client.put(f'/api/db/v0/schemas/{schema.id}/', data=data) - assert response.status_code == 405 - assert response.json()[0]['message'] == 'Method "PUT" not allowed.' - assert response.json()[0]['code'] == ErrorCodes.MethodNotAllowed.value - - -def test_schema_partial_update(create_schema, client, test_db_name, MOD_engine_cache): - schema_name = 'NASA Schema Partial Update' - new_schema_name = 'NASA Schema Partial Update New' - schema = create_schema(schema_name) - - body = {'name': new_schema_name} - response = client.patch(f'/api/db/v0/schemas/{schema.id}/', body) - - response_schema = response.json() - assert response.status_code == 200 - check_schema_response( - MOD_engine_cache, - response_schema, - schema, - new_schema_name, - test_db_name, - schema.database.id - ) - - schema = Schema.objects.get(oid=schema.oid) - assert schema.name == new_schema_name - - -update_clients_with_status_code = [ - ('superuser_client_factory', 200), - ('db_manager_client_factory', 200), - ('db_editor_client_factory', 403), - ('schema_manager_client_factory', 200), - ('schema_viewer_client_factory', 403), - ('db_viewer_schema_manager_client_factory', 200) -] - - -@pytest.mark.parametrize('client_name, expected_status_code', update_clients_with_status_code) -def test_schema_patch_same_name(create_schema, request, client_name, expected_status_code): - schema_name = 'Patents Schema Same Name' - schema = create_schema(schema_name) - client = request.getfixturevalue(client_name)(schema) - body = {'name': schema_name} - response = client.patch(f'/api/db/v0/schemas/{schema.id}/', body) - assert response.status_code == expected_status_code - - -def test_schema_delete(create_schema, client): - schema_name = 'NASA Schema Delete' - schema = create_schema(schema_name) - - response = client.delete(f'/api/db/v0/schemas/{schema.id}/') - assert response.status_code == 204 - - # Ensure the Django model was deleted - existing_oids = {schema.oid for schema in Schema.objects.all()} - assert schema.oid not in existing_oids - - -delete_clients_with_status_code = [ - ('superuser_client_factory', 204), - ('db_manager_client_factory', 204), - ('db_editor_client_factory', 403), - ('schema_manager_client_factory', 204), - ('schema_viewer_client_factory', 403), - ('db_viewer_schema_manager_client_factory', 204) -] - - -@pytest.mark.parametrize('client_name, expected_status_code', delete_clients_with_status_code) -def test_schema_delete_by_different_roles(create_schema, request, client_name, expected_status_code): - schema_name = 'NASA Schema Delete' - schema = create_schema(schema_name) - client = request.getfixturevalue(client_name)(schema) - response = client.delete(f'/api/db/v0/schemas/{schema.id}/') - assert response.status_code == expected_status_code - - -def test_schema_dependents(client, create_schema): - schema_name = 'NASA Schema Dependencies' - schema = create_schema(schema_name) - - response = client.get(f'/api/db/v0/schemas/{schema.id}/') - response_schema = response.json() - assert response.status_code == 200 - assert response_schema['has_dependents'] is False - - -def test_schema_detail_404(client): - response = client.get('/api/db/v0/schemas/3000/') - assert response.status_code == 404 - assert response.json()[0]['message'] == 'Not found.' - assert response.json()[0]['code'] == ErrorCodes.NotFound.value - - -def test_schema_partial_update_404(client): - response = client.patch('/api/db/v0/schemas/3000/', {}) - assert response.status_code == 404 - assert response.json()[0]['message'] == 'Not found.' - assert response.json()[0]['code'] == ErrorCodes.NotFound.value - - -def test_schema_delete_404(client): - response = client.delete('/api/db/v0/schemas/3000/') - assert response.status_code == 404 - assert response.json()[0]['message'] == 'Not found.' - assert response.json()[0]['code'] == ErrorCodes.NotFound.value - - -def test_schema_get_with_reflect_new(client, engine): - schema_name = 'a_new_schema' - with engine.begin() as conn: - conn.execute(text(f'CREATE SCHEMA {schema_name};')) - response = client.get('/api/db/v0/schemas/') - # The schema number should only change after the GET request - response_data = response.json() - actual_created = [ - schema for schema in response_data['results'] if schema['name'] == schema_name - ] - assert len(actual_created) == 1 - with engine.begin() as conn: - conn.execute(text(f'DROP SCHEMA {schema_name} CASCADE;')) - - -def test_schema_get_with_reflect_change(client, engine, create_db_schema): - schema_name = 'a_new_schema' - create_db_schema(schema_name, engine) - response = client.get('/api/db/v0/schemas/') - response_data = response.json() - orig_created = [ - schema for schema in response_data['results'] if schema['name'] == schema_name - ] - assert len(orig_created) == 1 - orig_id = orig_created[0]['id'] - new_schema_name = 'even_newer_schema' - with engine.begin() as conn: - conn.execute(text(f'ALTER SCHEMA {schema_name} RENAME TO {new_schema_name};')) - cache.clear() - response = client.get('/api/db/v0/schemas/') - response_data = response.json() - orig_created = [ - schema for schema in response_data['results'] if schema['name'] == schema_name - ] - assert len(orig_created) == 0 - modified = [ - schema for schema in response_data['results'] if schema['name'] == new_schema_name - ] - modified_id = modified[0]['id'] - assert len(modified) == 1 - assert orig_id == modified_id - - -@pytest.mark.skip("Faulty DB handling assumptions; invalid") -def test_schema_create_duplicate(client, FUN_create_dj_db): - db_name = "tmp_db1" - FUN_create_dj_db(db_name) - - data = { - 'name': 'Test Duplication Schema', - 'database': db_name - } - response = client.post('/api/db/v0/schemas/', data=data) - assert response.status_code == 201 - response = client.post('/api/db/v0/schemas/', data=data) - assert response.status_code == 400 - - -def test_schema_get_with_reflect_delete(client, engine, create_db_schema): - schema_name = 'a_new_schema' - create_db_schema(schema_name, engine) - - response = client.get('/api/db/v0/schemas/') - response_data = response.json() - orig_created = [ - schema for schema in response_data['results'] if schema['name'] == schema_name - ] - assert len(orig_created) == 1 - with engine.begin() as conn: - conn.execute(text(f'DROP SCHEMA {schema_name};')) - cache.clear() - response = client.get('/api/db/v0/schemas/') - response_data = response.json() - orig_created = [ - schema for schema in response_data['results'] if schema['name'] == schema_name - ] - assert len(orig_created) == 0 diff --git a/mathesar/tests/api/test_shared_tables.py b/mathesar/tests/api/test_shared_tables.py deleted file mode 100644 index 6dcf2a2322..0000000000 --- a/mathesar/tests/api/test_shared_tables.py +++ /dev/null @@ -1,240 +0,0 @@ -import pytest -import uuid - -from mathesar.models.shares import SharedTable - - -@pytest.fixture -def schemas_with_shared_tables(create_patents_table, uid): - table_name = f"schemas_with_shared_tables_{uid}" - table = create_patents_table(table_name) - share = SharedTable.objects.create( - table=table, - enabled=True, - ) - different_schema_table = create_patents_table(table_name, schema_name="Different Schema") - different_schema_share = SharedTable.objects.create( - table=different_schema_table, - enabled=True, - ) - yield { - 'patents_schema': table.schema, - 'patents_table': table, - 'patents_table_share': share, - 'different_schema': different_schema_table.schema, - 'different_schema_table': different_schema_table, - 'different_schema_table_share': different_schema_share, - } - share.delete() - table.delete() - different_schema_share.delete() - different_schema_table.delete() - - -read_client_with_different_roles = [ - # (client_name, different_schema_status_code) - ('superuser_client_factory', 200), - ('db_manager_client_factory', 200), - ('db_editor_client_factory', 200), - ('schema_manager_client_factory', 403), - ('schema_viewer_client_factory', 403), - ('db_viewer_schema_manager_client_factory', 200) -] - - -write_client_with_different_roles = [ - # client_name, is_allowed - ('superuser_client_factory', True), - ('db_manager_client_factory', True), - ('db_editor_client_factory', True), - ('schema_manager_client_factory', True), - ('schema_viewer_client_factory', False), - ('db_viewer_schema_manager_client_factory', True) -] - - -@pytest.mark.parametrize('client_name,different_schema_status_code', read_client_with_different_roles) -def test_shared_table_list( - schemas_with_shared_tables, - request, - client_name, - different_schema_status_code, -): - client = request.getfixturevalue(client_name)(schemas_with_shared_tables["patents_schema"]) - response = client.get(f'/api/ui/v0/tables/{schemas_with_shared_tables["patents_table"].id}/shares/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 1 - assert len(response_data['results']) == 1 - result = response_data['results'][0] - assert result['slug'] == str(schemas_with_shared_tables['patents_table_share'].slug) - assert result['enabled'] == schemas_with_shared_tables['patents_table_share'].enabled - - response = client.get(f'/api/ui/v0/tables/{schemas_with_shared_tables["different_schema_table"].id}/shares/') - assert response.status_code == different_schema_status_code - if different_schema_status_code == 200: - response_data = response.json() - assert len(response_data['results']) == 1 - result = response_data['results'][0] - assert result['slug'] == str(schemas_with_shared_tables['different_schema_table_share'].slug) - assert result['enabled'] == schemas_with_shared_tables['different_schema_table_share'].enabled - - -@pytest.mark.parametrize('client_name,different_schema_status_code', read_client_with_different_roles) -def test_shared_table_retrieve( - schemas_with_shared_tables, - request, - client_name, - different_schema_status_code, -): - client = request.getfixturevalue(client_name)(schemas_with_shared_tables["patents_schema"]) - response = client.get(f'/api/ui/v0/tables/{schemas_with_shared_tables["patents_table"].id}/shares/{schemas_with_shared_tables["patents_table_share"].id}/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['slug'] == str(schemas_with_shared_tables['patents_table_share'].slug) - assert response_data['enabled'] == schemas_with_shared_tables['patents_table_share'].enabled - - response = client.get(f'/api/ui/v0/tables/{schemas_with_shared_tables["different_schema_table"].id}/shares/{schemas_with_shared_tables["different_schema_table_share"].id}/') - assert response.status_code == different_schema_status_code - if different_schema_status_code == 200: - response_data = response.json() - assert response_data['slug'] == str(schemas_with_shared_tables['different_schema_table_share'].slug) - assert response_data['enabled'] == schemas_with_shared_tables['different_schema_table_share'].enabled - - -@pytest.mark.parametrize('client_name,is_allowed', write_client_with_different_roles) -def test_shared_table_create( - patents_table, - request, - client_name, - is_allowed -): - client = request.getfixturevalue(client_name)(patents_table.schema) - data = {'enabled': True} - response = client.post(f'/api/ui/v0/tables/{patents_table.id}/shares/', data) - response_data = response.json() - - if is_allowed: - assert response.status_code == 201 - assert 'id' in response_data - assert response_data['enabled'] is True - created_share = SharedTable.objects.get(id=response_data['id']) - assert created_share is not None - else: - assert response.status_code == 403 - - # clean up - patents_table.delete() - - -@pytest.mark.parametrize('client_name,is_allowed', write_client_with_different_roles) -def test_shared_table_patch( - schemas_with_shared_tables, - request, - client_name, - is_allowed -): - client = request.getfixturevalue(client_name)(schemas_with_shared_tables["patents_schema"]) - data = {'enabled': False} - response = client.patch(f'/api/ui/v0/tables/{schemas_with_shared_tables["patents_table"].id}/shares/{schemas_with_shared_tables["patents_table_share"].id}/', data) - response_data = response.json() - - if is_allowed: - assert response.status_code == 200 - assert response_data['slug'] == str(schemas_with_shared_tables['patents_table_share'].slug) - assert response_data['enabled'] is False - else: - assert response.status_code == 403 - - -@pytest.mark.parametrize('client_name,is_allowed', write_client_with_different_roles) -def test_shared_table_delete( - schemas_with_shared_tables, - request, - client_name, - is_allowed -): - client = request.getfixturevalue(client_name)(schemas_with_shared_tables["patents_schema"]) - response = client.delete(f'/api/ui/v0/tables/{schemas_with_shared_tables["patents_table"].id}/shares/{schemas_with_shared_tables["patents_table_share"].id}/') - - if is_allowed: - assert response.status_code == 204 - assert SharedTable.objects.filter(id=schemas_with_shared_tables['patents_table_share'].id).first() is None - else: - assert response.status_code == 403 - - -@pytest.mark.parametrize('client_name,is_allowed', write_client_with_different_roles) -def test_shared_table_regenerate_link( - schemas_with_shared_tables, - request, - client_name, - is_allowed -): - client = request.getfixturevalue(client_name)(schemas_with_shared_tables["patents_schema"]) - old_slug = str(schemas_with_shared_tables["patents_table_share"].slug) - response = client.post(f'/api/ui/v0/tables/{schemas_with_shared_tables["patents_table"].id}/shares/{schemas_with_shared_tables["patents_table_share"].id}/regenerate/') - response_data = response.json() - - if is_allowed: - assert response.status_code == 200 - assert response_data['slug'] != old_slug - else: - assert response.status_code == 403 - - -# Table endpoints with share-link-uuid token - -tables_request_client_with_different_roles = [ - # (client_name, same_schema_invalid_token_status, different_schema_invalid_token_status) - ('superuser_client_factory', 200, 200), - ('db_manager_client_factory', 200, 200), - ('db_editor_client_factory', 200, 200), - ('schema_manager_client_factory', 200, 403), - ('schema_viewer_client_factory', 200, 403), - ('db_viewer_schema_manager_client_factory', 200, 200), - ('anonymous_client_factory', 401, 401) -] - - -@pytest.mark.parametrize('client_name,same_schema_invalid_token_status,different_schema_invalid_token_status', tables_request_client_with_different_roles) -@pytest.mark.parametrize('endpoint', ['columns', 'constraints', 'records']) -def test_shared_table_view_requests( - schemas_with_shared_tables, - request, - endpoint, - client_name, - same_schema_invalid_token_status, - different_schema_invalid_token_status -): - client = request.getfixturevalue(client_name)(schemas_with_shared_tables["patents_schema"]) - - table_url = f'/api/db/v0/tables/{schemas_with_shared_tables["patents_table"].id}' - share_uuid_param = f'shared-link-uuid={schemas_with_shared_tables["patents_table_share"].slug}' - invalid_share_uuid_param = f'shared-link-uuid={uuid.uuid4()}' - different_schema_table_url = f'/api/db/v0/tables/{schemas_with_shared_tables["different_schema_table"].id}' - different_schema_table_uuid_param = f'shared-link-uuid={schemas_with_shared_tables["different_schema_table_share"].slug}' - - response = client.get(f'{table_url}/{endpoint}/?{share_uuid_param}') - response_data = response.json() - assert response.status_code == 200 - assert len(response_data['results']) >= 1 - - response = client.get(f'{table_url}/{endpoint}/?{invalid_share_uuid_param}') - response_data = response.json() - assert response.status_code == same_schema_invalid_token_status - if same_schema_invalid_token_status == 200: - assert len(response_data['results']) >= 1 - - response = client.get(f'{different_schema_table_url}/{endpoint}/?{different_schema_table_uuid_param}') - response_data = response.json() - assert response.status_code == 200 - assert len(response_data['results']) >= 1 - - response = client.get(f'{different_schema_table_url}/{endpoint}/?{invalid_share_uuid_param}') - response_data = response.json() - assert response.status_code == different_schema_invalid_token_status - if different_schema_invalid_token_status == 200: - assert len(response_data['results']) >= 1 diff --git a/mathesar/tests/api/test_table_api.py b/mathesar/tests/api/test_table_api.py deleted file mode 100644 index 162b2ae796..0000000000 --- a/mathesar/tests/api/test_table_api.py +++ /dev/null @@ -1,2127 +0,0 @@ -import json -import pytest - -from django.core.cache import cache -from django.core.files.base import File, ContentFile -from sqlalchemy import text - -from db.columns.operations.select import get_column_attnum_from_name, get_column_attnum_from_names_as_map -from db.constants import ID, ID_ORIGINAL -from db.types.base import PostgresType, MathesarCustomType -from db.metadata import get_empty_metadata -from mathesar.models.users import DatabaseRole, SchemaRole -from mathesar.models.query import Exploration - -from mathesar.state import reset_reflection -from mathesar.api.exceptions.error_codes import ErrorCodes -from mathesar.models.deprecated import Column, Table, DataFile - - -@pytest.fixture -def missing_keys_json_data_file(): - data_filepath = 'mathesar/tests/data/json_parsing/missing_keys.json' - with open(data_filepath, "rb") as json_file: - data_file = DataFile.objects.create( - file=File(json_file), - created_from='file', - base_name='missing_keys', - type='json' - ) - return data_file - - -@pytest.fixture -def patents_excel_data_file(patents_excel_filepath): - with open(patents_excel_filepath, "rb") as excel_file: - data_file = DataFile.objects.create( - file=File(excel_file), - created_from='file', - base_name='patents', - type='excel' - ) - return data_file - - -@pytest.fixture -def misaligned_table_excel_data_file(): - data_filepath = 'mathesar/tests/data/excel_parsing/misaligned_table.xlsx' - with open(data_filepath, "rb") as excel_file: - data_file = DataFile.objects.create( - file=File(excel_file), - created_from='file', - base_name='misaligned_table', - type='excel' - ) - return data_file - - -@pytest.fixture -def duplicate_id_csv_data_file(duplicate_id_table_csv_filepath): - with open(duplicate_id_table_csv_filepath, "rb") as file: - data_file = DataFile.objects.create( - file=File(file), - created_from='file', - base_name='duplicate_id', - type='csv' - ) - return data_file - - -@pytest.fixture -def null_id_csv_data_file(null_id_table_csv_filepath): - with open(null_id_table_csv_filepath, "rb") as file: - data_file = DataFile.objects.create( - file=File(file), - created_from='file', - base_name='null_id', - type='csv' - ) - return data_file - - -@pytest.fixture -def duplicate_id_json_data_file(duplicate_id_table_json_filepath): - with open(duplicate_id_table_json_filepath, "rb") as file: - data_file = DataFile.objects.create( - file=File(file), - created_from='file', - base_name='duplicate_id', - type='json' - ) - return data_file - - -@pytest.fixture -def null_id_json_data_file(null_id_table_json_filepath): - with open(null_id_table_json_filepath, "rb") as file: - data_file = DataFile.objects.create( - file=File(file), - created_from='file', - base_name='null_id', - type='json' - ) - return data_file - - -@pytest.fixture -def duplicate_id_excel_data_file(duplicate_id_table_excel_filepath): - with open(duplicate_id_table_excel_filepath, "rb") as file: - data_file = DataFile.objects.create( - file=File(file), - created_from='file', - base_name='duplicate_id', - type='excel' - ) - return data_file - - -@pytest.fixture -def null_id_excel_data_file(null_id_table_excel_filepath): - with open(null_id_table_excel_filepath, "rb") as file: - data_file = DataFile.objects.create( - file=File(file), - created_from='file', - base_name='null_id', - type='excel' - ) - return data_file - - -@pytest.fixture -def schema_name(): - return 'table_tests' - - -@pytest.fixture -def schema(create_schema, schema_name): - return create_schema(schema_name) - - -@pytest.fixture -def data_file(patents_csv_filepath): - with open(patents_csv_filepath, 'rb') as csv_file: - data_file = DataFile.objects.create( - file=File(csv_file), - created_from='file', - base_name='patents', - type='csv' - ) - return data_file - - -@pytest.fixture -def existing_id_col_table_datafile(table_with_id_col_filepath): - with open(table_with_id_col_filepath, 'rb') as csv_file: - data_file = DataFile.objects.create( - file=File(csv_file), - created_from='file', - base_name='table_with_id', - type='csv' - ) - return data_file - - -@pytest.fixture -def paste_data_file(paste_filename): - with open(paste_filename, 'r') as paste_file: - paste_text = paste_file.read() - data_file = DataFile.objects.create( - file=ContentFile(paste_text, name='paste_file.txt'), - created_from='paste', - delimiter='\t', - quotechar='', - escapechar='', - type='csv' - ) - return data_file - - -@pytest.fixture -def url_data_file(patents_url, patents_url_filename): - base_name = patents_url.split('/')[-1].split('.')[0] - with open(patents_url_filename, 'rb') as file: - data_file = DataFile.objects.create( - file=File(file), - created_from='url', - base_name=base_name, - type='csv' - ) - return data_file - - -def check_table_response(response_table, table, table_name): - assert response_table['id'] == table.id - assert response_table['name'] == table_name - assert response_table['schema'] == table.schema.id - assert response_table['settings']['preview_settings']['template'] == table.settings.preview_settings.template - assert 'import_target' in response_table - assert 'created_at' in response_table - assert 'updated_at' in response_table - assert 'has_dependents' in response_table - assert 'import_verified' in response_table - assert len(response_table['columns']) == len(table.sa_column_names) - for column in response_table['columns']: - assert column['name'] in table.sa_column_names - assert 'type' in column - assert response_table['records_url'].startswith('http') - assert response_table['columns_url'].startswith('http') - assert response_table['constraints_url'].startswith('http') - assert response_table['type_suggestions_url'].startswith('http') - assert response_table['previews_url'].startswith('http') - assert '/api/db/v0/tables/' in response_table['records_url'] - assert '/api/db/v0/tables/' in response_table['columns_url'] - assert '/api/db/v0/tables/' in response_table['constraints_url'] - assert '/api/db/v0/tables/' in response_table['type_suggestions_url'] - assert '/api/db/v0/tables/' in response_table['previews_url'] - assert response_table['records_url'].endswith('/records/') - assert response_table['columns_url'].endswith('/columns/') - assert response_table['constraints_url'].endswith('/constraints/') - assert response_table['type_suggestions_url'].endswith('/type_suggestions/') - assert response_table['previews_url'].endswith('/previews/') - - -def check_table_filter_response(response, status_code=None, count=None): - response_data = response.json() - if status_code is not None: - assert response.status_code == status_code - if count is not None: - assert response_data['count'] == count - assert len(response_data['results']) == count - - -def _create_table(client, data_files, table_name, schema, import_target_table, description=None): - body = { - 'name': table_name, - 'schema': schema.id, - 'description': description - } - if data_files is not None: - body['data_files'] = [df.id for df in data_files] - if import_target_table is not None: - body['import_target'] = import_target_table.id - - response = client.post('/api/db/v0/tables/', body) - response_table = response.json() - table = Table.objects.get(id=response_table['id']) - - if data_files is not None: - for df in data_files: - df.refresh_from_db() - - return response, response_table, table - - -def get_expected_name(table_name, data_file=None): - if not table_name and data_file: - return data_file.base_name - elif not table_name and data_file is None: - return f'Table {Table.objects.count()}' - else: - return table_name - - -def check_create_table_response( - client, name, expt_name, data_file, schema, first_row, column_names, import_target_table -): - num_tables = Table.objects.count() - - response, response_table, table = _create_table(client, [data_file], name, schema, import_target_table) - - assert response.status_code == 201 - assert Table.objects.count() == num_tables + 1 - assert table.get_records()[0] == first_row - assert all([col in table.sa_column_names for col in column_names]) - assert data_file.table_imported_to.id == table.id - assert table.import_target == import_target_table - check_table_response(response_table, table, expt_name) - return table - - -list_clients_with_results_count = [ - ('superuser_client_factory', 5), - ('db_manager_client_factory', 5), - ('db_editor_client_factory', 2), - ('schema_manager_client_factory', 4), - ('schema_viewer_client_factory', 2), - ('db_viewer_schema_manager_client_factory', 4) -] - -write_clients_with_status_code = [ - ('superuser_client_factory', 201), - ('db_manager_client_factory', 201), - ('db_editor_client_factory', 400), - ('schema_manager_client_factory', 201), - ('schema_viewer_client_factory', 400), - ('db_viewer_schema_manager_client_factory', 201) -] - -update_client_with_status_code = [ - ('db_manager_client_factory', 200), - ('db_editor_client_factory', 403), - ('schema_manager_client_factory', 200), - ('schema_viewer_client_factory', 403), - ('db_viewer_schema_manager_client_factory', 200) -] - - -def test_table_list(create_patents_table, client): - """ - Desired format: - { - 'count': 1, - 'results': [ - { - 'id': 1, - 'name': 'NASA Table List', - 'schema': 'http://testserver/api/db/v0/schemas/1/', - 'created_at': '2021-04-27T18:43:41.201851Z', - 'updated_at': '2021-04-27T18:43:41.201898Z', - 'columns': [ - { - 'name': 'id', - 'type': PostgresType.INTEGER.id - }, - { - 'name': 'Center', - 'type': PostgresType.CHARACTER_VARYING.id - }, - # etc. - ], - 'records_url': 'http://testserver/api/db/v0/tables/3/records/' - } - ] - } - """ - table_name = 'NASA Table List' - table = create_patents_table(table_name) - - response = client.get('/api/db/v0/tables/') - response_data = response.json() - response_table = None - for table_data in response_data['results']: - if table_data['name'] == table_name: - response_table = table_data - break - assert response.status_code == 200 - assert response_data['count'] >= 1 - assert len(response_data['results']) >= 1 - check_table_response(response_table, table, table_name) - - -@pytest.mark.parametrize('client_name,expected_table_count', list_clients_with_results_count) -def test_table_list_based_on_permissions( - create_patents_table, - patent_schema, - request, - client_name, - expected_table_count -): - create_patents_table('Private Table', schema_name='Private Schema') - create_patents_table("Patent Table 1") - create_patents_table("Patent Table 2") - table3 = create_patents_table("Patent Table 3") - table4 = create_patents_table("Patent Table 4") - table3.import_verified = True - table4.import_verified = None - table3.save() - table4.save() - - client = request.getfixturevalue(client_name)(patent_schema) - - response = client.get('/api/db/v0/tables/') - response_data = response.json() - assert response_data['count'] == expected_table_count - - -def test_table_list_filter_name(create_patents_table, client): - expected_tables = { - 'Filter Name 1': create_patents_table('Filter Name 1'), - 'Filter Name 2': create_patents_table('Filter Name 2'), - 'Filter Name 3': create_patents_table('Filter Name 3') - } - - filter_tables = ['Filter Name 1', 'Filter Name 2'] - query_str = ','.join(filter_tables) - response = client.get(f'/api/db/v0/tables/?name={query_str}') - response_data = response.json() - check_table_filter_response(response, status_code=200, count=2) - - response_tables = {res['name']: res for res in response_data['results']} - for table_name in filter_tables: - assert table_name in response_tables - table = expected_tables[table_name] - response_table = response_tables[table_name] - check_table_response(response_table, table, table_name) - - -def test_table_list_filter_schema(create_patents_table, client): - expected_tables = { - 'Schema 1': create_patents_table('Filter Schema 1', schema_name='Schema 1'), - 'Schema 2': create_patents_table('Filter Schema 2', schema_name='Schema 2'), - 'Schema 3': create_patents_table('Filter Schema 3', schema_name='Schema 3') - } - - schema_name = 'Schema 1' - schema_id = expected_tables[schema_name].schema.id - response = client.get(f'/api/db/v0/tables/?schema={schema_id}') - response_data = response.json() - check_table_filter_response(response, status_code=200, count=1) - - response_tables = {res['schema']: res - for res in response_data['results']} - - assert schema_id in response_tables - table = expected_tables[schema_name] - response_table = response_tables[schema_id] - check_table_response(response_table, table, table.name) - - -def test_table_list_order_by_name(create_patents_table, client): - table_2 = create_patents_table('Filter Name 2') - table_1 = create_patents_table('Filter Name 1') - table_4 = create_patents_table('Filter Name 4') - table_3 = create_patents_table('Filter Name 3') - table_5 = create_patents_table('Filter Name 5') - unsorted_expected_tables = [table_5, table_3, table_4, table_1, table_2] - expected_tables = [table_1, table_2, table_3, table_4, table_5] - response = client.get('/api/db/v0/tables/') - response_data = response.json() - response_tables = response_data['results'] - comparison_tuples = zip(response_tables, unsorted_expected_tables) - for comparison_tuple in comparison_tuples: - check_table_response(comparison_tuple[0], comparison_tuple[1], comparison_tuple[1].name) - sort_field = 'name' - response = client.get(f'/api/db/v0/tables/?sort_by={sort_field}') - response_data = response.json() - response_tables = response_data['results'] - comparison_tuples = zip(response_tables, expected_tables) - for comparison_tuple in comparison_tuples: - check_table_response(comparison_tuple[0], comparison_tuple[1], comparison_tuple[1].name) - - -def test_table_list_order_by_id(create_patents_table, client): - table_1 = create_patents_table('Filter Name 1') - table_2 = create_patents_table('Filter Name 2') - table_3 = create_patents_table('Filter Name 3') - unsorted_expected_tables = [ - table_3, - table_2, - table_1 - ] - expected_tables = [ - table_1, - table_2, - table_3 - ] - - response = client.get('/api/db/v0/tables/') - response_data = response.json() - response_tables = response_data['results'] - comparison_tuples = zip(response_tables, unsorted_expected_tables) - for comparison_tuple in comparison_tuples: - check_table_response(comparison_tuple[0], comparison_tuple[1], comparison_tuple[1].name) - - sort_field = 'id' - response = client.get(f'/api/db/v0/tables/?sort_by={sort_field}') - response_data = response.json() - response_tables = response_data['results'] - comparison_tuples = zip(response_tables, expected_tables) - for comparison_tuple in comparison_tuples: - check_table_response(comparison_tuple[0], comparison_tuple[1], comparison_tuple[1].name) - - -@pytest.mark.parametrize('timestamp_type', ['created', 'updated']) -def test_table_list_filter_timestamps(create_patents_table, client, timestamp_type): - table_name = f'Fitler {timestamp_type}' - table = create_patents_table(table_name) - query_str = '2020-01-01 8:00' - - response = client.get(f'/api/db/v0/tables/?{timestamp_type}_before={query_str}') - response_data = response.json() - check_table_filter_response(response, status_code=200, count=0) - - response = client.get(f'/api/db/v0/tables/?{timestamp_type}_after={query_str}') - response_data = response.json() - check_table_filter_response(response, status_code=200, count=1) - check_table_response(response_data['results'][0], table, table_name) - - timestamp = table.created_at if timestamp_type == 'created' else table.updated_at - response = client.get(f'/api/db/v0/tables/?{timestamp_type}={timestamp}') - response_data = response.json() - check_table_filter_response(response, status_code=200, count=1) - check_table_response(response_data['results'][0], table, table_name) - - -def test_table_list_filter_import_verified(create_patents_table, client): - expected_tables = { - True: create_patents_table('Filter Verified 1'), - False: create_patents_table('Filter Verified 2'), - } - for verified, table in expected_tables.items(): - table.import_verified = verified - table.save() - - for verified, table in expected_tables.items(): - query_str = str(verified).lower() - response = client.get(f'/api/db/v0/tables/?import_verified={query_str}') - response_data = response.json() - check_table_filter_response(response, status_code=200, count=1) - check_table_response(response_data['results'][0], table, table.name) - - -def test_table_list_filter_imported(create_patents_table, client): - expected_tables = { - None: create_patents_table('Filter Imported 1'), - False: create_patents_table('Filter Imported 2'), - True: create_patents_table('Filter Imported 3'), - } - for verified, table in expected_tables.items(): - table.import_verified = verified - table.save() - - response = client.get('/api/db/v0/tables/?not_imported=false') - check_table_filter_response(response, status_code=200, count=2) - - table = expected_tables[None] - response = client.get('/api/db/v0/tables/?not_imported=true') - response_data = response.json() - check_table_filter_response(response, status_code=200, count=1) - check_table_response(response_data['results'][0], table, table.name) - - -def test_table_detail(create_patents_table, client): - """ - Desired format: - One item in the results list in the table list view, see above. - """ - table_name = 'NASA Table Detail' - table = create_patents_table(table_name) - - response = client.get(f'/api/db/v0/tables/{table.id}/') - response_table = response.json() - assert response.status_code == 200 - check_table_response(response_table, table, table_name) - - -@pytest.fixture -def type_inference_table(create_table, get_uid): - return create_table( - table_name=get_uid(), - schema_name=get_uid(), - csv_filepath='mathesar/tests/data/type_inference.csv', - ) - - -@pytest.fixture -def _type_inference_table_type_suggestions(): - return { - 'col_1': PostgresType.NUMERIC.id, - 'col_2': PostgresType.BOOLEAN.id, - 'col_3': PostgresType.BOOLEAN.id, - 'col_4': PostgresType.TEXT.id, - 'col_5': PostgresType.TEXT.id, - 'col_6': PostgresType.NUMERIC.id, - 'col_7': MathesarCustomType.MATHESAR_MONEY.id, - } - - -def test_table_type_suggestion(client, type_inference_table, _type_inference_table_type_suggestions): - table = type_inference_table - response = client.get(f'/api/db/v0/tables/{table.id}/type_suggestions/') - response_table = response.json() - assert response.status_code == 200 - expected_types = _type_inference_table_type_suggestions - assert response_table == expected_types - - -def _check_columns(actual_column_list, expected_column_list): - # Columns will return an extra type_options key in actual_dict - # so we need to check equality only for the keys in expect_dict - actual_column_list = [ - {k: v for k, v in actual_column.items() if k in expected_column} - for actual_column, expected_column - in zip(actual_column_list, expected_column_list) - ] - _assert_lists_of_dicts_are_equal(actual_column_list, expected_column_list) - - -def _assert_lists_of_dicts_are_equal(a, b): - assert len(a) == len(b) - for d in a: - assert d in b - - -@pytest.fixture -def _type_inference_table_previews_post_body(_type_inference_table_type_suggestions): - return { - 'columns': [ - {'name': 'id', 'type': PostgresType.INTEGER.id} - ] + [ - {'name': id, 'type': db_type_id} - for id, db_type_id - in _type_inference_table_type_suggestions.items() - ] - } - - -def test_table_previews(client, type_inference_table, _type_inference_table_previews_post_body): - table = type_inference_table - post_body = _type_inference_table_previews_post_body - response = client.post(f'/api/db/v0/tables/{table.id}/previews/', data=post_body) - assert response.status_code == 200 - expect_dict = { - 'name': table.name, - 'columns': post_body['columns'], - 'records': [ - {'id': 1, 'col_1': 0.0, 'col_2': False, 'col_3': True, 'col_4': 't', 'col_5': 'a', 'col_6': 2.0, 'col_7': 5}, - {'id': 2, 'col_1': 2.0, 'col_2': True, 'col_3': False, 'col_4': 'false', 'col_5': 'cat', 'col_6': 1.0, 'col_7': 1}, - {'id': 3, 'col_1': 1.0, 'col_2': True, 'col_3': True, 'col_4': '2', 'col_5': 'mat', 'col_6': 0.0, 'col_7': 2}, - {'id': 4, 'col_1': 0.0, 'col_2': False, 'col_3': False, 'col_4': '0', 'col_5': 'bat', 'col_6': 0.0, 'col_7': 3} - ], - } - actual_dict = response.json() - assert all([expect_dict[key] == actual_dict[key] for key in expect_dict if key in ['name', 'records']]) - _check_columns(actual_dict['columns'], expect_dict['columns']) - - -def _find_post_body_column_ix_by_name(post_body, name): - return tuple(column['name'] for column in post_body['columns']).index(name) - - -def test_table_previews_wrong_column_number(client, type_inference_table, _type_inference_table_previews_post_body): - table = type_inference_table - - post_body = _type_inference_table_previews_post_body - del post_body['columns'][_find_post_body_column_ix_by_name(post_body, 'col_1')] - - response = client.post(f'/api/db/v0/tables/{table.id}/previews/', data=post_body) - assert response.status_code == 400 - assert 'number' in response.json()[0]['message'] - assert ErrorCodes.ColumnSizeMismatch.value == response.json()[0]['code'] - - -def test_table_previews_invalid_type_cast_check(client, type_inference_table, _type_inference_table_previews_post_body): - table = type_inference_table - - post_body = _type_inference_table_previews_post_body - post_body['columns'][_find_post_body_column_ix_by_name(post_body, 'col_5')]['type'] = MathesarCustomType.EMAIL.id - - response = client.post(f'/api/db/v0/tables/{table.id}/previews/', data=post_body) - assert response.status_code == 400 - assert 'Invalid type' in response.json()[0]['message'] - - -def test_table_previews_unsupported_type(client, type_inference_table, _type_inference_table_previews_post_body): - table = type_inference_table - - post_body = _type_inference_table_previews_post_body - post_body['columns'][_find_post_body_column_ix_by_name(post_body, 'col_1')]['type'] = 'notatype' - - response = client.post(f'/api/db/v0/tables/{table.id}/previews/', data=post_body) - assert response.status_code == 400 - assert 'Unknown database type identifier' in response.json()[0]['message'] - assert 'columns' in response.json()[0]['field'] - - -def test_table_previews_missing_columns(client, type_inference_table): - table = type_inference_table - - post_body = {} - - response = client.post(f'/api/db/v0/tables/{table.id}/previews/', data=post_body) - assert response.status_code == 400 - assert 'required' in response.json()[0]['message'] - assert 'columns' in response.json()[0]['field'] - - -@pytest.mark.parametrize('table_name', ['Test Table Create From Datafile', '']) -def test_table_create_from_datafile(client, data_file, schema, table_name): - expt_name = get_expected_name(table_name, data_file=data_file) - first_row = (1, 'NASA Kennedy Space Center', 'Application', 'KSC-12871', '0', - '13/033,085', 'Polyimide Wire Insulation Repair System', None) - column_names = ['Center', 'Status', 'Case Number', 'Patent Number', - 'Application SN', 'Title', 'Patent Expiration Date'] - - check_create_table_response( - client, table_name, expt_name, data_file, schema, first_row, column_names, import_target_table=None - ) - - -@pytest.mark.parametrize('table_name', ['Test Table Create From Datafile', '']) -def test_table_create_from_datafile_with_import_target(client, data_file, schema, table_name): - _, _, import_target_table = _create_table(client, None, 'target_table', schema, import_target_table=None) - expt_name = get_expected_name(table_name, data_file=data_file) - first_row = (1, 'NASA Kennedy Space Center', 'Application', 'KSC-12871', '0', - '13/033,085', 'Polyimide Wire Insulation Repair System', None) - column_names = ['Center', 'Status', 'Case Number', 'Patent Number', - 'Application SN', 'Title', 'Patent Expiration Date'] - - check_create_table_response( - client, table_name, expt_name, data_file, schema, first_row, column_names, import_target_table - ) - - -@pytest.mark.parametrize('table_name', ['Test Table Create From Paste', '']) -def test_table_create_from_paste(client, schema, paste_data_file, table_name): - expt_name = get_expected_name(table_name) - first_row = (1, 'NASA Kennedy Space Center', 'Application', 'KSC-12871', '0', - '13/033,085', 'Polyimide Wire Insulation Repair System', None) - column_names = ['Center', 'Status', 'Case Number', 'Patent Number', - 'Application SN', 'Title', 'Patent Expiration Date'] - - check_create_table_response( - client, table_name, expt_name, paste_data_file, schema, first_row, column_names, import_target_table=None - ) - - -@pytest.mark.parametrize('table_name', ['Test Table Create From URL', '']) -def test_table_create_from_url(client, schema, url_data_file, table_name): - expt_name = get_expected_name(table_name, data_file=url_data_file) - first_row = (1, 'NASA Kennedy Space Center', 'Application', 'KSC-12871', '0', - '13/033,085', 'Polyimide Wire Insulation Repair System', None) - column_names = ['center', 'status', 'case_number', 'patent_number', - 'application_sn', 'title', 'patent_expiration_date'] - - check_create_table_response( - client, table_name, expt_name, url_data_file, schema, first_row, column_names, import_target_table=None - ) - - -@pytest.mark.parametrize('data_files', [None, []]) -@pytest.mark.parametrize('table_name', ['test_table_no_file', '']) -def test_table_create_without_datafile(client, schema, data_files, table_name): - num_tables = Table.objects.count() - expt_name = get_expected_name(table_name) - - expect_comment = 'test comment for table create' - response, response_table, table = _create_table( - client, data_files, table_name, schema, import_target_table=None, - description=expect_comment - ) - - assert response.status_code == 201 - assert Table.objects.count() == num_tables + 1 - assert len(table.sa_columns) == 1 # only the internal `id` column - assert len(table.get_records()) == 0 - assert table.description == expect_comment - assert response_table['description'] == expect_comment - check_table_response(response_table, table, expt_name) - - -def test_table_create_name_taken(client, paste_data_file, schema, create_patents_table, schema_name): - create_patents_table('Table 2', schema_name=schema_name) - create_patents_table('Table 3', schema_name=schema_name) - expt_name = 'Table 4' - - first_row = (1, 'NASA Kennedy Space Center', 'Application', 'KSC-12871', '0', - '13/033,085', 'Polyimide Wire Insulation Repair System', None) - column_names = ['Center', 'Status', 'Case Number', 'Patent Number', - 'Application SN', 'Title', 'Patent Expiration Date'] - - check_create_table_response( - client, '', expt_name, paste_data_file, schema, first_row, column_names, import_target_table=None - ) - - -def test_table_create_base_name_taken(client, data_file, schema, create_patents_table, schema_name): - create_patents_table('patents', schema_name=schema_name) - create_patents_table('patents 1', schema_name=schema_name) - expt_name = 'patents 2' - - first_row = (1, 'NASA Kennedy Space Center', 'Application', 'KSC-12871', '0', - '13/033,085', 'Polyimide Wire Insulation Repair System', None) - column_names = ['Center', 'Status', 'Case Number', 'Patent Number', - 'Application SN', 'Title', 'Patent Expiration Date'] - - check_create_table_response( - client, '', expt_name, data_file, schema, first_row, column_names, import_target_table=None - ) - - -def test_table_create_base_name_too_long(client, data_file, schema): - data_file.base_name = '0' * 100 - data_file.save() - expt_name = 'Table 0' - - first_row = (1, 'NASA Kennedy Space Center', 'Application', 'KSC-12871', '0', - '13/033,085', 'Polyimide Wire Insulation Repair System', None) - column_names = ['Center', 'Status', 'Case Number', 'Patent Number', - 'Application SN', 'Title', 'Patent Expiration Date'] - - check_create_table_response( - client, '', expt_name, data_file, schema, first_row, column_names, import_target_table=None - ) - - -get_encoding_test_list = [ - ('mathesar/tests/data/non_unicode_files/cp1250.csv', 'cp1250', (1, '2', - '1.7 Cubic Foot Compact "Cube" Office Refrigerators', - 'Barry French', - '293', '457.81', '208.16', '68.02', 'Nunavut', - 'Appliances', '0.58'), - ['1', 'Eldon Base for stackable storage shelf, platinum', 'Muhammed MacIntyre', - '3', '-213.25', '38.94', '35', 'Nunavut', 'Storage & Organization', '0.8']), - - ('mathesar/tests/data/non_unicode_files/utf_16_le.csv', 'utf_16_le', (1, 'Troy', '2004', 'English'), - ['Title', 'Year', 'Language']), -] - - -@pytest.mark.parametrize('non_unicode_file_path, filename, first_row, column_names', get_encoding_test_list) -def test_table_create_non_unicode(client, non_unicode_file_path, filename, first_row, column_names, - schema, create_data_file): - expt_name = filename - non_unicode_datafile = create_data_file(non_unicode_file_path, filename) - check_create_table_response( - client, '', expt_name, non_unicode_datafile, schema, first_row, column_names, import_target_table=None - ) - - -@pytest.mark.skip(reason="msar.add_mathesar_table no longer raises an exception if a table with the same name already exists in the database.") -def test_table_create_with_same_name(client, schema): - table_name = 'test_table_duplicate' - body = { - 'name': table_name, - 'schema': schema.id, - } - client.post('/api/db/v0/tables/', body) - response = client.post('/api/db/v0/tables/', body) - response_error = response.json() - assert response.status_code == 400 - assert response_error[0]['code'] == ErrorCodes.DuplicateTableError.value - assert response_error[0]['message'] == f'Relation {table_name} already exists in schema {schema.id}' - - -def test_table_create_with_too_long_name(client, schema): - very_long_string = ''.join(map(str, range(50))) - table_name = 'very_long_identifier_' + very_long_string - body = { - 'name': table_name, - 'schema': schema.id, - } - response = client.post('/api/db/v0/tables/', body) - assert response.status_code == 400 - assert response.json()[0]['code'] == ErrorCodes.IdentifierTooLong.value - - -def test_table_create_with_existing_id_col(client, existing_id_col_table_datafile, schema, engine): - table_name = "Table 1" - response, response_table, table = _create_table( - client, - [existing_id_col_table_datafile], - table_name, - schema, - import_target_table=None - ) - columns_name_id_map = table.get_column_name_id_bidirectional_map() - data = { - columns_name_id_map['Title']: 'Polyimide Wire Insulation Repair System', - } - response = client.post(f'/api/db/v0/tables/{table.id}/records/', data=data) - assert response.status_code == 201 - - -def test_table_create_multiple_users_different_roles(client_bob, client_alice, user_bob, user_alice, schema): - table_name = 'test_table' - body = { - 'name': table_name, - 'schema': schema.id, - } - - response = client_bob.post('/api/db/v0/tables/', body) - assert response.status_code == 400 - DatabaseRole.objects.create(database=schema.database, user=user_bob, role='manager') - response = client_bob.post('/api/db/v0/tables/', body) - assert response.status_code == 201 - - # Create different table by a different user - body['name'] = 'test_table_1' - response = client_alice.post('/api/db/v0/tables/', body) - assert response.status_code == 400 - alice_schema_role = SchemaRole.objects.create(schema=schema, user=user_alice, role='viewer') - response = client_alice.post('/api/db/v0/tables/', body) - assert response.status_code == 400 - alice_schema_role.delete() - alice_schema_role = SchemaRole.objects.create(schema=schema, user=user_alice, role='manager') - response = client_alice.post('/api/db/v0/tables/', body) - assert response.status_code == 201 - alice_schema_role.delete() - response = client_alice.post('/api/db/v0/tables/', body) - assert response.status_code == 400 - - -@pytest.mark.parametrize('client_name, expected_status_code', write_clients_with_status_code) -def test_table_create(schema, request, client_name, expected_status_code): - table_name = 'test_table' - body = { - 'name': table_name, - 'schema': schema.id, - } - client = request.getfixturevalue(client_name)(schema) - response = client.post('/api/db/v0/tables/', body) - assert response.status_code == expected_status_code - - -def test_table_partial_update_by_superuser(create_patents_table, client): - table_name = 'NASA Table Partial Update' - new_table_name = 'NASA Table Partial Update New' - table = create_patents_table(table_name) - - expect_comment = 'a super new test comment' - body = {'name': new_table_name, 'description': expect_comment} - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - - response_table = response.json() - assert response.status_code == 200 - assert response_table - assert response_table['description'] == expect_comment - check_table_response(response_table, table, new_table_name) - - table = Table.objects.get(oid=table.oid) - assert table.name == new_table_name - - -def test_table_partial_update_import_verified(create_patents_table, client): - table_name = 'NASA Table Import Verify' - table = create_patents_table(table_name) - - body = {'import_verified': True} - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - - response_table = response.json() - assert response.status_code == 200 - assert response_table['import_verified'] is True - - -def test_table_partial_update_schema(create_patents_table, client): - table_name = 'NASA Table Schema PATCH' - table = create_patents_table(table_name) - - body = {'schema': table.schema.id} - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - - response_error = response.json()[0] - assert response.status_code == 400 - assert response_error['message'] == 'Updating schema for tables is not supported.' - assert response_error['code'] == ErrorCodes.UnsupportedAlter.value - - -@pytest.mark.parametrize('client_name, expected_status_code', update_client_with_status_code) -def test_table_partial_update_by_different_roles(create_patents_table, request, client_name, expected_status_code): - table_name = 'NASA Table Partial Update' - new_table_name = 'NASA Table Partial Update New' - table = create_patents_table(table_name) - - # Editors and Viewers only have access to confirmed tables - table.import_verified = True - table.save() - - client = request.getfixturevalue(client_name)(table.schema) - expect_comment = 'a super new test comment' - body = {'name': new_table_name, 'description': expect_comment} - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - assert response.status_code == expected_status_code - - -def test_table_delete(create_patents_table, client): - table_name = 'NASA Table Delete' - table = create_patents_table(table_name) - table_count = len(Table.objects.all()) - - response = client.delete(f'/api/db/v0/tables/{table.id}/') - assert response.status_code == 204 - - # Ensure the Django model was deleted - new_table_count = len(Table.objects.all()) - assert table_count - 1 == new_table_count - assert Table.objects.filter(id=table.id).exists() is False - - -delete_clients_with_status_codes = [ - ('superuser_client_factory', 204, 204), - ('db_manager_client_factory', 204, 204), - ('db_editor_client_factory', 403, 403), - ('schema_manager_client_factory', 204, 404), - ('schema_viewer_client_factory', 403, 404), - ('db_viewer_schema_manager_client_factory', 204, 403) -] - - -@pytest.mark.parametrize('client_name, expected_status_code, different_schema_expected_status_code', delete_clients_with_status_codes) -def test_table_delete_by_different_roles( - create_patents_table, - request, - client_name, - expected_status_code, - different_schema_expected_status_code, -): - different_schema_table = create_patents_table('Private Table', schema_name='Private Schema') - table_name = 'NASA Table Delete' - table = create_patents_table(table_name) - - # Editors and Viewers only have access to confirmed tables - different_schema_table.import_verified = True - table.import_verified = True - different_schema_table.save() - table.save() - - client = request.getfixturevalue(client_name)(table.schema) - response = client.delete(f'/api/db/v0/tables/{table.id}/') - assert response.status_code == expected_status_code - response = client.delete(f'/api/db/v0/tables/{different_schema_table.id}/') - assert response.status_code == different_schema_expected_status_code - - -def test_table_dependencies(client, create_patents_table): - table_name = 'NASA Table Dependencies' - table = create_patents_table(table_name) - - response = client.get(f'/api/db/v0/tables/{table.id}/') - response_table = response.json() - assert response.status_code == 200 - assert response_table['has_dependents'] is True - - -def test_table_404(client): - response = client.get('/api/db/v0/tables/3000/') - assert response.status_code == 404 - assert response.json()[0]['message'] == 'Not found.' - assert response.json()[0]['code'] == ErrorCodes.NotFound.value - - -def test_table_type_suggestion_404(client): - response = client.get('/api/db/v0/tables/3000/type_suggestions/') - assert response.status_code == 404 - assert response.json()[0]['message'] == 'Not found.' - assert response.json()[0]['code'] == ErrorCodes.NotFound.value - - -def test_table_create_from_datafile_404(client): - body = { - 'data_files': [-999], - 'name': 'test_table', - 'schema': -999, - } - response = client.post('/api/db/v0/tables/', body) - response_table = response.json() - assert response.status_code == 400 - assert 'object does not exist' in response_table[0]['message'] - assert response_table[0]['field'] == 'schema' - assert 'object does not exist' in response_table[1]['message'] - assert response_table[1]['field'] == 'data_files' - - -def test_table_create_from_multiple_datafile(client, data_file, schema): - body = { - 'data_files': [data_file.id, data_file.id], - 'name': 'test_table', - 'schema': schema.id, - } - response = client.post('/api/db/v0/tables/', body) - response_table = response.json() - assert response.status_code == 400 - assert response_table[0]['message'] == 'Multiple data files are unsupported.' - assert response_table[0]['field'] == 'data_files' - - -def test_table_partial_update_invalid_field(create_patents_table, client): - table_name = 'NASA Table Partial Update' - table = create_patents_table(table_name) - - body = {'schema': table.schema.id} - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - - assert response.status_code == 400 - assert 'is not supported' in response.json()[0]['message'] - - -def test_table_partial_update_404(client): - response = client.patch('/api/db/v0/tables/3000/', {}) - assert response.status_code == 404 - assert response.json()[0]['message'] == 'Not found.' - assert response.json()[0]['code'] == ErrorCodes.NotFound.value - - -def test_table_delete_404(client): - response = client.delete('/api/db/v0/tables/3000/') - assert response.status_code == 404 - assert response.json()[0]['message'] == 'Not found.' - assert response.json()[0]['code'] == ErrorCodes.NotFound.value - - -def test_table_update(client, create_patents_table): - table = create_patents_table('update_table_test') - response = client.put(f'/api/db/v0/tables/{table.id}/') - assert response.status_code == 405 - assert response.json()[0]['message'] == 'Method "PUT" not allowed.' - assert response.json()[0]['code'] == ErrorCodes.MethodNotAllowed.value - - -def test_table_get_with_reflect_new(client, table_for_reflection): - _, table_name, _ = table_for_reflection - response = client.get('/api/db/v0/tables/') - # The table number should only change after the GET request - response_data = response.json() - actual_created = [ - table for table in response_data['results'] if table['name'] == table_name - ] - assert len(actual_created) == 1 - created_table = actual_created[0] - assert created_table['name'] == table_name - created_columns = created_table['columns'] - check_columns_response(created_columns, [ - {'name': 'id', 'type': PostgresType.INTEGER.id, 'type_options': None, 'display_options': None}, - {'name': 'name', 'type': PostgresType.CHARACTER_VARYING.id, 'type_options': None, 'display_options': None} - ]) - - -def check_columns_response(created_columns, expected_response): - # Id's are auto incrementing and vary depending up previous test cases, better to remove them before comparing - created_columns_id = [] - for created_column in created_columns: - created_columns_id.append(created_column.pop('id')) - assert len(created_columns_id) == len(expected_response) - assert created_columns == expected_response - - -def test_table_get_with_reflect_column_change(client, table_for_reflection): - schema_name, table_name, engine = table_for_reflection - response = client.get('/api/db/v0/tables/') - response_data = response.json() - orig_created = [ - table for table in response_data['results'] if table['name'] == table_name - ] - orig_id = orig_created[0]['id'] - new_column_name = 'new_name' - with engine.begin() as conn: - conn.execute( - text(f'ALTER TABLE {schema_name}.{table_name} RENAME COLUMN name TO {new_column_name};') - ) - cache.clear() - response = client.get('/api/db/v0/tables/') - response_data = response.json() - altered_table = [ - table for table in response_data['results'] if table['name'] == table_name - ][0] - new_columns = altered_table['columns'] - assert altered_table['id'] == orig_id - check_columns_response(new_columns, [ - {'name': 'id', 'type': PostgresType.INTEGER.id, 'type_options': None, 'display_options': None}, - {'name': new_column_name, 'type': PostgresType.CHARACTER_VARYING.id, 'type_options': None, 'display_options': None} - ]) - - -def test_table_get_with_reflect_name_change(client, table_for_reflection): - schema_name, table_name, engine = table_for_reflection - response = client.get('/api/db/v0/tables/') - response_data = response.json() - orig_created = [ - table for table in response_data['results'] if table['name'] == table_name - ] - orig_id = orig_created[0]['id'] - new_table_name = 'super_new_table_name' - with engine.begin() as conn: - conn.execute( - text( - f'ALTER TABLE {schema_name}.{table_name} RENAME TO {new_table_name};' - ) - ) - cache.clear() - response = client.get('/api/db/v0/tables/') - response_data = response.json() - orig_created_2 = [ - table for table in response_data['results'] if table['name'] == table_name - ] - assert len(orig_created_2) == 0 - modified = [ - table for table in response_data['results'] if table['name'] == new_table_name - ] - modified_id = modified[0]['id'] - assert len(modified) == 1 - assert orig_id == modified_id - - -def test_table_get_with_reflect_delete(client, table_for_reflection): - schema_name, table_name, engine = table_for_reflection - response = client.get('/api/db/v0/tables/') - response_data = response.json() - orig_created = [ - table for table in response_data['results'] if table['name'] == table_name - ] - assert len(orig_created) == 1 - with engine.begin() as conn: - conn.execute(text(f'DROP TABLE {schema_name}.{table_name};')) - reset_reflection() - response = client.get('/api/db/v0/tables/') - response_data = response.json() - new_created = [ - table for table in response_data['results'] if table['name'] == table_name - ] - assert len(new_created) == 0 - - -def _get_patents_column_data(table): - column_data = [{ - 'name': 'id', - 'type': PostgresType.INTEGER.id, - }, { - 'name': 'Center', - 'type': PostgresType.TEXT.id, - }, { - 'name': 'Status', - 'type': PostgresType.TEXT.id, - }, { - 'name': 'Case Number', - 'type': PostgresType.TEXT.id, - }, { - 'name': 'Patent Number', - 'type': PostgresType.TEXT.id, - }, { - 'name': 'Application SN', - 'type': PostgresType.TEXT.id, - }, { - 'name': 'Title', - 'type': PostgresType.TEXT.id, - }, { - 'name': 'Patent Expiration Date', - 'type': PostgresType.TEXT.id, - }] - bidirectmap = table.get_column_name_id_bidirectional_map() - for data in column_data: - name = data['name'] - data['id'] = bidirectmap[name] - return column_data - - -def test_table_patch_invalid_table_name(create_patents_table, client): - table_name = 'NASA Table' - table = create_patents_table(table_name) - # Having round brackets in the table name is invalid. - invalid_table_name = 'NASA Table(alpha)' - - body = {'name': invalid_table_name} - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - - response_data = response.json()[0] - assert response.status_code == 400 - assert response_data['code'] == ErrorCodes.InvalidTableName.value - assert response_data['message'] == f'Table name "{invalid_table_name}" is invalid.' - assert response_data['field'] == 'name' - - -def test_table_patch_same_table_name(create_patents_table, client): - table_name = 'PATCH same name' - table = create_patents_table(table_name) - - body = {'name': table_name} - # Need to specify format here because otherwise the body gets sent - # as a multi-part form, which can't handle nested keys. - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - - assert response.status_code == 200 - assert response.json()['name'] == table_name - - -def test_table_patch_columns_and_table_name(create_patents_table, client): - table_name = 'PATCH columns 1' - table = create_patents_table(table_name) - - body = { - 'name': 'PATCH COLUMNS 1', - 'columns': _get_patents_column_data(table) - } - # Need to specify format here because otherwise the body gets sent - # as a multi-part form, which can't handle nested keys. - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - - response_json = response.json() - assert response.status_code == 200 - assert response_json['name'] == 'PATCH COLUMNS 1' - - -def test_table_patch_columns_no_changes(create_patents_table, client): - table_name = 'PATCH columns 2' - table = create_patents_table(table_name) - column_data = _get_patents_column_data(table) - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def test_table_patch_columns_one_name_change(create_patents_table, client): - table_name = 'PATCH columns 3' - table = create_patents_table(table_name) - column_data = _get_patents_column_data(table) - column_data[1]['name'] = 'NASA Center' - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def test_table_patch_columns_two_name_changes(create_patents_table, client): - table_name = 'PATCH columns 4' - table = create_patents_table(table_name) - column_data = _get_patents_column_data(table) - column_data[1]['name'] = 'NASA Center' - column_data[2]['name'] = 'Patent Status' - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def test_table_patch_columns_one_type_change(create_patents_table, client): - table_name = 'PATCH columns 5' - table = create_patents_table(table_name) - column_data = _get_patents_column_data(table) - column_data[7]['type'] = PostgresType.DATE.id - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def _get_data_types_column_data(table): - column_data = [{ - 'name': 'id', - }, { - 'name': 'Integer', - }, { - 'name': 'Boolean', - }, { - 'name': 'Text', - }, { - 'name': 'Decimal', - }] - bidirectmap = table.get_column_name_id_bidirectional_map() - for data in column_data: - name = data['name'] - data['id'] = bidirectmap[name] - return column_data - - -def test_table_patch_columns_multiple_type_change(create_data_types_table, client): - table_name = 'PATCH columns 6' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data[1]['type'] = PostgresType.INTEGER.id - column_data[2]['type'] = PostgresType.BOOLEAN.id - column_data[4]['type'] = PostgresType.NUMERIC.id - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def test_table_patch_columns_one_drop(create_data_types_table, client): - table_name = 'PATCH columns 7' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data.pop(1) - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def test_table_patch_columns_multiple_drop(create_data_types_table, client): - table_name = 'PATCH columns 8' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data.pop(1) - column_data.pop(1) - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def test_table_patch_columns_diff_name_type_change(create_data_types_table, client): - table_name = 'PATCH columns 9' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data[1]['type'] = PostgresType.INTEGER.id - column_data[2]['name'] = 'Checkbox' - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def test_table_patch_columns_same_name_type_change(create_data_types_table, client): - table_name = 'PATCH columns 10' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data[2]['type'] = PostgresType.BOOLEAN.id - column_data[2]['name'] = 'Checkbox' - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def test_table_patch_columns_multiple_name_type_change(create_data_types_table, client): - table_name = 'PATCH columns 11' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data[1]['type'] = PostgresType.INTEGER.id - column_data[1]['name'] = 'Int.' - column_data[2]['type'] = PostgresType.BOOLEAN.id - column_data[2]['name'] = 'Checkbox' - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def test_table_patch_columns_diff_name_type_drop(create_data_types_table, client): - table_name = 'PATCH columns 12' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data[1]['type'] = PostgresType.INTEGER.id - column_data[2]['name'] = 'Checkbox' - column_data.pop(3) - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def test_table_patch_columns_display_options(create_data_types_table, client): - table_name = 'patch_cols_one' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - display_options = {"use_grouping": "false"} - column_data[0]['display_options'] = display_options - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - actual_id_col = [c for c in response.json()['columns'] if c['name'] == 'id'][0] - - assert response.status_code == 200 - actual_display_options = actual_id_col['display_options'] - for k in display_options: - assert actual_display_options[k] == display_options[k] - - -def test_table_patch_columns_invalid_display_options(create_data_types_table, client): - table_name = 'patch_cols_two' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - # despite its name, the last column is of type text - display_options = {"use_grouping": "false"} - - column_data[-1]['display_options'] = display_options - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - actual_col = [c for c in response.json()['columns'] if c['name'] == 'Decimal'][0] - - assert response.status_code == 200 - assert actual_col['display_options'] == {} - - -def test_table_patch_columns_type_plus_display_options(create_data_types_table, client): - table_name = 'patch_cols_three' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - # despite its name, the last column is of type text - display_options = {"use_grouping": "false"} - column_data[-1].update( - {'type': PostgresType.NUMERIC.id, 'display_options': display_options} - ) - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - actual_col = [c for c in response.json()['columns'] if c['name'] == 'Decimal'][0] - - assert response.status_code == 200 - assert actual_col['type'] == PostgresType.NUMERIC.id - for k, v in display_options.items(): - assert actual_col['display_options'][k] == v - - -def test_table_patch_columns_same_name_type_drop(create_data_types_table, client): - table_name = 'PATCH columns 13' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data[1] = {'id': column_data[1]['id']} - column_data[2]['type'] = PostgresType.BOOLEAN.id - column_data[2]['name'] = 'Checkbox' - column_data.pop(3) - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 200 - _check_columns(response_json['columns'], column_data) - - -def test_table_patch_columns_invalid_type(create_data_types_table, client): - table_name = 'PATCH columns 14' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data[3]['type'] = PostgresType.BOOLEAN.id - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - - assert response.status_code == 400 - assert response_json[0]['message'] == "Invalid type cast requested." - - -def test_table_patch_columns_invalid_type_with_name(create_data_types_table, client): - table_name = 'PATCH columns 15' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data[1]['name'] = 'hello' - column_data[3]['type'] = PostgresType.BOOLEAN.id - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - assert response.status_code == 400 - assert response_json[0]['message'] == "Invalid type cast requested." - - current_table_response = client.get(f'/api/db/v0/tables/{table.id}/') - # The table should not have changed - original_column_data = _get_data_types_column_data(table) - _check_columns(current_table_response.json()['columns'], original_column_data) - - -def test_table_patch_columns_invalid_type_with_type(create_data_types_table, client): - table_name = 'PATCH columns 16' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data[1]['type'] = PostgresType.INTEGER.id - column_data[3]['type'] = PostgresType.BOOLEAN.id - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - assert response.status_code == 400 - assert response_json[0]['message'] == "Invalid type cast requested." - - current_table_response = client.get(f'/api/db/v0/tables/{table.id}/') - # The table should not have changed - original_column_data = _get_data_types_column_data(table) - _check_columns(current_table_response.json()['columns'], original_column_data) - - -def test_table_patch_columns_invalid_type_with_drop(create_data_types_table, client): - table_name = 'PATCH columns 17' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data[1] = {'id': column_data[1]['id']} - column_data[3]['type'] = PostgresType.BOOLEAN.id - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - assert response.status_code == 400 - assert response_json[0]['message'] == "Invalid type cast requested." - - current_table_response = client.get(f'/api/db/v0/tables/{table.id}/') - # The table should not have changed - original_column_data = _get_data_types_column_data(table) - _check_columns(current_table_response.json()['columns'], original_column_data) - - -def test_table_patch_columns_invalid_type_with_multiple_changes(create_data_types_table, client): - table_name = 'PATCH columns 18' - table = create_data_types_table(table_name) - column_data = _get_data_types_column_data(table) - column_data[1] = {'id': column_data[1]['id']} - column_data[2]['name'] = 'Checkbox' - column_data[2]['type'] = PostgresType.BOOLEAN.id - column_data[3]['type'] = PostgresType.BOOLEAN.id - - body = { - 'columns': column_data - } - response = client.patch(f'/api/db/v0/tables/{table.id}/', body) - response_json = response.json() - assert response.status_code == 400 - assert response_json[0]['message'] == "Invalid type cast requested." - - current_table_response = client.get(f'/api/db/v0/tables/{table.id}/') - # The table should not have changed - original_column_data = _get_data_types_column_data(table) - _check_columns(current_table_response.json()['columns'], original_column_data) - - -def test_table_extract_columns_retain_original_table(create_patents_table, client): - table_name = 'Patents' - table = create_patents_table(table_name) - column_name_id_map = table.get_column_name_id_bidirectional_map() - existing_columns = table.columns.all() - existing_columns = [existing_column.name for existing_column in existing_columns] - column_names_to_extract = ['Patent Number', 'Title', 'Patent Expiration Date'] - column_ids_to_extract = [column_name_id_map[name] for name in column_names_to_extract] - - extract_table_name = "Patent Info" - split_data = { - 'extract_columns': column_ids_to_extract, - 'extracted_table_name': extract_table_name, - } - current_table_response = client.post(f'/api/db/v0/tables/{table.id}/split_table/', data=split_data) - assert current_table_response.status_code == 201 - response_data = current_table_response.json() - extracted_table_id = response_data['extracted_table'] - extracted_table = Table.objects.get(id=extracted_table_id) - assert extract_table_name == extracted_table.name - remainder_table_id = response_data['remainder_table'] - remainder_table = Table.objects.get(id=remainder_table_id) - assert Table.objects.filter(id=table.id).count() == 1 - extracted_columns = extracted_table.columns.all().order_by('attnum') - extracted_column_names = [extracted_column.name for extracted_column in extracted_columns] - expected_extracted_column_names = ['id'] + column_names_to_extract - assert expected_extracted_column_names == extracted_column_names - - remainder_columns = remainder_table.columns.all().order_by('attnum') - remainder_column_names = [remainder_column.name for remainder_column in remainder_columns] - expected_remainder_columns = (set(existing_columns) - set(column_names_to_extract)) | {'Patent Info_id'} - assert set(expected_remainder_columns) == set(remainder_column_names) - - -def test_table_extract_columns_drop_original_table(create_patents_table, client): - table_name = 'Patents' - table = create_patents_table(table_name) - column_name_id_map = table.get_column_name_id_bidirectional_map() - column_names_to_extract = ['Patent Number', 'Title', 'Patent Expiration Date'] - column_ids_to_extract = [column_name_id_map[name] for name in column_names_to_extract] - existing_columns = table.columns.all().order_by('attnum') - existing_columns = [existing_column.name for existing_column in existing_columns] - remainder_column_names = (set(existing_columns) - set(column_names_to_extract)) - - extract_table_name = "Patent Info" - split_data = { - 'extract_columns': column_ids_to_extract, - 'extracted_table_name': extract_table_name, - } - current_table_response = client.post(f'/api/db/v0/tables/{table.id}/split_table/', data=split_data) - assert current_table_response.status_code == 201 - response_data = current_table_response.json() - extracted_table_id = response_data['extracted_table'] - extracted_table = Table.objects.get(id=extracted_table_id) - remainder_table_id = response_data['remainder_table'] - remainder_table = Table.objects.get(id=remainder_table_id) - - remainder_columns = remainder_table.columns.all() - remainder_columns_map = {column.name: column for column in remainder_columns} - metadata = get_empty_metadata() - columns_with_attnum = get_column_attnum_from_names_as_map(remainder_table.oid, remainder_column_names, remainder_table._sa_engine, metadata=metadata) - for remainder_column_name in remainder_column_names: - remainder_column = remainder_columns_map[remainder_column_name] - assert remainder_column.attnum == columns_with_attnum[remainder_column.name] - assert remainder_column.id == column_name_id_map[remainder_column.name] - - extracted_columns = extracted_table.columns.all() - columns_with_attnum = get_column_attnum_from_names_as_map(extracted_table.oid, column_names_to_extract, extracted_table._sa_engine, metadata=metadata) - for extracted_column in extracted_columns: - if extracted_column.name != 'id': - assert extracted_column.attnum == columns_with_attnum[extracted_column.name] - assert extracted_column.id == column_name_id_map[extracted_column.name] - - -def test_table_extract_columns_specify_fk_column_name(create_patents_table, client): - table_name = 'Patents' - table = create_patents_table(table_name) - column_name_id_map = table.get_column_name_id_bidirectional_map() - column_names_to_extract = ['Patent Number', 'Title', 'Patent Expiration Date'] - column_ids_to_extract = [column_name_id_map[name] for name in column_names_to_extract] - relationship_fk_column_name = "Patent Number" - extract_table_name = "Patent Info" - split_data = { - 'extract_columns': column_ids_to_extract, - 'extracted_table_name': extract_table_name, - 'relationship_fk_column_name': relationship_fk_column_name - } - current_table_response = client.post(f'/api/db/v0/tables/{table.id}/split_table/', data=split_data) - assert current_table_response.status_code == 201 - response_data = current_table_response.json() - remainder_table_id = response_data['remainder_table'] - fk_column = response_data['fk_column'] - remainder_table = Table.objects.get(id=remainder_table_id) - metadata = get_empty_metadata() - relationship_fk_column_attnum = get_column_attnum_from_name(remainder_table.oid, relationship_fk_column_name, remainder_table._sa_engine, metadata=metadata) - assert relationship_fk_column_attnum is not None - assert fk_column == Column.objects.get(table_id=remainder_table_id, attnum=relationship_fk_column_attnum).id - - -def test_table_extract_columns_with_display_options(create_patents_table, client): - table_name = 'Patents' - table = create_patents_table(table_name) - column_name_id_map = table.get_column_name_id_bidirectional_map() - column_names_to_extract = ['Patent Number', 'Title', 'Patent Expiration Date'] - column_ids_to_extract = [column_name_id_map[name] for name in column_names_to_extract] - column_name_with_display_options = column_names_to_extract[0] - column_id_with_display_options = column_name_id_map[column_name_with_display_options] - - column_display_options = {'show_as_percentage': True, 'number_format': 'english'} - column_with_display_options = Column.objects.get(id=column_id_with_display_options) - column_with_display_options.display_options = column_display_options - column_with_display_options.save() - - extract_table_name = "Patent Info" - split_data = { - 'extract_columns': column_ids_to_extract, - 'extracted_table_name': extract_table_name, - } - current_table_response = client.post(f'/api/db/v0/tables/{table.id}/split_table/', data=split_data) - assert current_table_response.status_code == 201 - response_data = current_table_response.json() - extracted_table_id = response_data['extracted_table'] - extracted_table = Table.objects.get(id=extracted_table_id) - extracted_column_id = extracted_table.get_column_name_id_bidirectional_map()[column_name_with_display_options] - extracted_column = Column.objects.get(id=extracted_column_id) - assert extracted_column.id == extracted_column_id - assert extracted_column.display_options == column_with_display_options.display_options - - -def test_table_move_columns_after_extracting(create_patents_table, client): - table_name = 'Patents' - table = create_patents_table(table_name) - column_name_id_map = table.get_column_name_id_bidirectional_map() - column_names_to_extract = ['Title', 'Patent Expiration Date'] - column_ids_to_extract = [column_name_id_map[name] for name in column_names_to_extract] - - extract_table_name = "Patent Info" - split_data = { - 'extract_columns': column_ids_to_extract, - 'extracted_table_name': extract_table_name, - } - current_table_response = client.post(f'/api/db/v0/tables/{table.id}/split_table/', data=split_data) - assert current_table_response.status_code == 201 - remainder_table_id = current_table_response.json()['remainder_table'] - extracted_table_id = current_table_response.json()['extracted_table'] - column_names_to_move = ['Patent Number'] - column_ids_to_move = [column_name_id_map[name] for name in column_names_to_move] - column_display_options = {'show_as_percentage': True, 'number_format': 'english'} - column_name_with_display_options = column_names_to_move[0] - column_id_with_display_options = column_name_id_map[column_name_with_display_options] - column_with_display_options = Column.objects.get(id=column_id_with_display_options) - column_with_display_options.display_options = column_display_options - column_with_display_options.save() - move_data = { - 'move_columns': column_ids_to_move, - 'target_table': extracted_table_id, - } - current_table_response = client.post(f'/api/db/v0/tables/{remainder_table_id}/move_columns/', data=move_data) - assert current_table_response.status_code == 201 - extracted_table = Table.objects.get(id=extracted_table_id) - extracted_column_id = extracted_table.get_column_name_id_bidirectional_map()[column_name_with_display_options] - extracted_column = Column.objects.get(id=extracted_column_id) - assert extracted_column.id == extracted_column_id - assert extracted_column.display_options == column_with_display_options.display_options - - -split_table_client_with_different_roles = [ - ('superuser_client_factory', 201), - ('db_manager_client_factory', 201), - ('db_editor_client_factory', 403), - ('schema_manager_client_factory', 201), - ('schema_viewer_client_factory', 403), - ('db_viewer_schema_manager_client_factory', 201) -] - - -@pytest.mark.parametrize('client_name, expected_status_code', split_table_client_with_different_roles) -def test_table_extract_columns_by_different_roles(create_patents_table, request, client_name, expected_status_code): - table_name = 'Patents' - table = create_patents_table(table_name) - - # Editors and Viewers only have access to confirmed tables - table.import_verified = True - table.save() - - column_name_id_map = table.get_column_name_id_bidirectional_map() - column_names_to_extract = ['Patent Number', 'Title', 'Patent Expiration Date'] - column_ids_to_extract = [column_name_id_map[name] for name in column_names_to_extract] - - extract_table_name = "Patent Info" - split_data = { - 'extract_columns': column_ids_to_extract, - 'extracted_table_name': extract_table_name, - } - client = request.getfixturevalue(client_name)(table.schema) - current_table_response = client.post(f'/api/db/v0/tables/{table.id}/split_table/', data=split_data) - assert current_table_response.status_code == expected_status_code - - -def test_table_ui_dependency(client, create_patents_table, get_uid): - base_table = create_patents_table(table_name=get_uid()) - query_data = { - "name": get_uid(), - "base_table": base_table, - "initial_columns": [ - { - "id": 1, - "jp_path": [[1, 3], [4, 5]], - "alias": "alias_x", - }, - { - "id": 2, - "alias": "alias_y", - }, - ], - } - query = Exploration.objects.create(**query_data) - response = client.get(f'/api/db/v0/tables/{base_table.id}/ui_dependents/') - response_data = response.json() - expected_response = { - 'queries': [ - query.id - ] - } - assert response_data == expected_response - - -def test_create_table_and_normalize_json_data_file(client, missing_keys_json_data_file, schema): - table_name = 'Missing keys' - expt_name = get_expected_name(table_name, data_file=missing_keys_json_data_file) - first_row = (1, 'Matt', 'Murdock', 'Male', '["Stick", "Foggy"]', '{"street": "210", "city": "NY"}', None) - column_names = [ - "first_name", "last_name", "gender", "friends", "address", "email" - ] - - check_create_table_response( - client, table_name, expt_name, missing_keys_json_data_file, schema, first_row, - column_names, import_target_table=None - ) - - -def _create_json_datafile_using_max_level_param(json_filepath, max_level): - with open(json_filepath, "rb") as json_file: - data_file = DataFile.objects.create( - file=File(json_file), - created_from='file', - base_name='nested objects', - type='json', - max_level=max_level - ) - return data_file - - -def test_create_table_with_nested_json_objects(client, schema): - nested_json_objects_file_path = 'mathesar/tests/data/json_parsing/nested_objects.json' - test_datafile_objects = [ - _create_json_datafile_using_max_level_param(nested_json_objects_file_path, max_level) - for max_level in range(4) - ] - expected_data = [ - { - "first_row": ( - 1, "John Doe", "30", "john.doe@example.com", json.dumps({ - "name": "frontend", - "project": { - "name": "Project A", - "status": "In Progress", - "team": { - "lead": "John", - "members": ["Mary", "Mark"] - } - } - }) - ), - "column_names": ["name", "age", "email", "division"] - }, - { - "first_row": ( - 1, "John Doe", "30", "john.doe@example.com", "frontend", json.dumps({ - "name": "Project A", - "status": "In Progress", - "team": { - "lead": "John", - "members": ["Mary", "Mark"] - } - }) - ), - "column_names": ["name", "age", "email", "division.name", "division.project"] - }, - { - "first_row": ( - 1, "John Doe", "30", "john.doe@example.com", "frontend", "Project A", "In Progress", json.dumps({ - "lead": "John", - "members": ["Mary", "Mark"] - }) - ), - "column_names": [ - "name", "age", "email", "division.name", "division.project.name", - "division.project.status", "division.project.team" - ] - }, - { - "first_row": ( - 1, "John Doe", "30", "john.doe@example.com", "frontend", "Project A", "In Progress", - "John", '["Mary", "Mark"]' - ), - "column_names": [ - "name", "age", "email", "division.name", "division.project.name", - "division.project.status", "division.project.team.lead", "division.project.team.members" - ] - } - ] - - for index, datafile in enumerate(test_datafile_objects): - table_name = f'Table {index}' - check_create_table_response( - client, table_name, table_name, datafile, schema, expected_data[index]["first_row"], - expected_data[index]["column_names"], import_target_table=None - ) - - -def test_create_table_using_excel_data_file(client, patents_excel_data_file, schema): - table_name = 'patents' - expt_name = get_expected_name(table_name, data_file=patents_excel_data_file) - first_row = ( - 1, - "NASA Kennedy Space Center", - "Application", - "KSC-12871", - "0", - "13/033,085", - "Polyimide Wire Insulation Repair System", - None, - ) - column_names = [ - "Center", - "Status", - "Case Number", - "Patent Number", - "Application SN", - "Title", - "Patent Expiration Date", - ] - - check_create_table_response( - client, table_name, expt_name, patents_excel_data_file, schema, first_row, - column_names, import_target_table=None - ) - - -def test_create_table_and_normalize_excel_data_file(client, misaligned_table_excel_data_file, schema): - table_name = 'misaligned_table' - expt_name = get_expected_name(table_name, data_file=misaligned_table_excel_data_file) - first_row = (1, 'John', '25', 'Male') - column_names = ["Name", "Age", "Gender"] - - check_create_table_response( - client, table_name, expt_name, misaligned_table_excel_data_file, schema, first_row, - column_names, import_target_table=None - ) - - -def test_create_table_using_duplicate_id_csv_data_file(client, duplicate_id_csv_data_file, schema): - table_name = 'duplicate_id' - expt_name = get_expected_name(table_name, data_file=duplicate_id_csv_data_file) - first_row = (1, '1', 'John', '25') - column_names = [ID, ID_ORIGINAL, "Name", "Age"] - - check_create_table_response( - client, table_name, expt_name, duplicate_id_csv_data_file, schema, first_row, - column_names, import_target_table=None - ) - - -def test_create_table_using_null_id_csv_data_file(client, null_id_csv_data_file, schema): - table_name = 'null_id' - expt_name = get_expected_name(table_name, data_file=null_id_csv_data_file) - first_row = (1, '1', 'John', '25') - column_names = [ID, ID_ORIGINAL, "Name", "Age"] - - check_create_table_response( - client, table_name, expt_name, null_id_csv_data_file, schema, first_row, - column_names, import_target_table=None - ) - - -def test_create_table_using_duplicate_id_json_data_file(client, duplicate_id_json_data_file, schema): - table_name = 'duplicate_id' - expt_name = get_expected_name(table_name, data_file=duplicate_id_json_data_file) - first_row = (1, '1', 'John', '25') - column_names = [ID, ID_ORIGINAL, "Name", "Age"] - - check_create_table_response( - client, table_name, expt_name, duplicate_id_json_data_file, schema, first_row, - column_names, import_target_table=None - ) - - -def test_create_table_using_null_id_json_data_file(client, null_id_json_data_file, schema): - table_name = 'null_id' - expt_name = get_expected_name(table_name, data_file=null_id_json_data_file) - first_row = (1, '1.0', 'John', '25') - column_names = [ID, ID_ORIGINAL, "Name", "Age"] - - check_create_table_response( - client, table_name, expt_name, null_id_json_data_file, schema, first_row, - column_names, import_target_table=None - ) - - -def test_create_table_using_duplicate_id_excel_data_file(client, duplicate_id_excel_data_file, schema): - table_name = 'duplicate_id' - expt_name = get_expected_name(table_name, data_file=duplicate_id_excel_data_file) - first_row = (1, '1', 'John', '25') - column_names = [ID, ID_ORIGINAL, "Name", "Age"] - - check_create_table_response( - client, table_name, expt_name, duplicate_id_excel_data_file, schema, first_row, - column_names, import_target_table=None - ) - - -def test_create_table_using_null_id_excel_data_file(client, null_id_excel_data_file, schema): - table_name = 'null_id' - expt_name = get_expected_name(table_name, data_file=null_id_excel_data_file) - first_row = (1, '1.0', 'John', '25') - column_names = [ID, ID_ORIGINAL, "Name", "Age"] - - check_create_table_response( - client, table_name, expt_name, null_id_excel_data_file, schema, first_row, - column_names, import_target_table=None - ) - - -def _create_excel_datafile_using_sheet_index_param(filepath, sheet_index): - with open(filepath, "rb") as file: - data_file = DataFile.objects.create( - file=File(file), - created_from='file', - base_name='multiple_sheets', - type='excel', - sheet_index=sheet_index - ) - return data_file - - -def test_create_table_with_multiple_sheets_excel_file(client, multiple_sheets_excel_filepath, schema): - column_names = ['Name', 'Age', 'Email'] - test_datafile_objects_with_sheet_index = [ - _create_excel_datafile_using_sheet_index_param(multiple_sheets_excel_filepath, sheet_index) - for sheet_index in range(3) - ] - expected_first_row_data = [ - (1, 'Jim', '25', 'jim@example.com'), - (1, 'John', '25', 'john@example.com'), - (1, 'Jake', '25', 'jake@example.com'), - ] - - for index, datafile in enumerate(test_datafile_objects_with_sheet_index): - table_name = f'Table {index}' - check_create_table_response( - client, table_name, table_name, datafile, schema, expected_first_row_data[index], - column_names, import_target_table=None - ) diff --git a/mathesar/tests/api/test_table_settings_api.py b/mathesar/tests/api/test_table_settings_api.py deleted file mode 100644 index 6f1f680a24..0000000000 --- a/mathesar/tests/api/test_table_settings_api.py +++ /dev/null @@ -1,151 +0,0 @@ -import pytest -from sqlalchemy import Column, Integer, MetaData -from sqlalchemy import Table as SATable - -from db.tables.operations.select import get_oid_from_table -from mathesar.models import deprecated as models_deprecated -from mathesar.api.exceptions.error_codes import ErrorCodes - - -@pytest.fixture -def schema_name(): - return 'table_tests' - - -@pytest.fixture -def schema(create_schema, schema_name): - return create_schema(schema_name) - - -@pytest.fixture -def column_test_table(patent_schema, engine): - column_list_in = [ - Column("mycolumn0", Integer, primary_key=True), - Column("mycolumn1", Integer, nullable=False), - ] - db_table = SATable( - "anewtable", - MetaData(bind=engine), - *column_list_in, - schema=patent_schema.name - ) - db_table.create() - db_table_oid = get_oid_from_table(db_table.name, db_table.schema, engine) - table = models_deprecated.Table.current_objects.create(oid=db_table_oid, schema=patent_schema) - return table - - -def test_create_non_empty_table_settings(client, schema, create_patents_table, schema_name): - table = create_patents_table('Table 2', schema_name=schema_name) - first_non_primary_column = table.columns.order_by('attnum')[1] - expected_preview_template = f'{{{first_non_primary_column.id}}}' - column_order = [1, 2, 3] - table.settings.column_order = [1, 2, 3] - table.settings.save() - response = client.get( - f"/api/db/v0/tables/{table.id}/settings/" - ) - response_data = response.json() - results = response_data['results'] - assert response.status_code == 200 - assert response_data['count'] == 1 - assert results[0]['preview_settings']['template'] == expected_preview_template - assert results[0]['preview_settings']['customized'] is False - assert results[0]['column_order'] == column_order - - -def test_create_empty_table_settings(client, schema, empty_nasa_table, schema_name): - table = empty_nasa_table - primary_key_column_id = table.get_column_name_id_bidirectional_map()['id'] - expected_preview_template = f'{{{primary_key_column_id}}}' - response = client.get( - f"/api/db/v0/tables/{table.id}/settings/" - ) - response_data = response.json() - results = response_data['results'] - assert response.status_code == 200 - assert response_data['count'] == 1 - assert results[0]['preview_settings']['template'] == expected_preview_template - assert results[0]['preview_settings']['customized'] is False - assert results[0]['column_order'] is None - - -update_clients_with_status_codes = [ - ('superuser_client_factory', 200), - ('db_manager_client_factory', 200), - ('db_editor_client_factory', 200), - ('schema_manager_client_factory', 200), - ('schema_viewer_client_factory', 403), - ('db_viewer_schema_manager_client_factory', 200) -] - - -@pytest.mark.parametrize('client_name,expected_status_code', update_clients_with_status_codes) -def test_update_table_settings_permission(create_patents_table, request, client_name, expected_status_code): - table_name = 'NASA Table' - table = create_patents_table(table_name) - settings_id = table.settings.id - client = request.getfixturevalue(client_name)(table.schema) - columns = models_deprecated.Column.objects.filter(table=table).values_list('id', flat=True) - preview_template = ','.join(f'{{{ column }}}' for column in columns) - data = { - "preview_settings": { - 'template': preview_template, - } - } - response = client.patch( - f"/api/db/v0/tables/{table.id}/settings/{settings_id}/", data - ) - assert response.status_code == expected_status_code - - -def test_update_table_settings(client, column_test_table): - columns = models_deprecated.Column.objects.filter(table=column_test_table).values_list('id', flat=True) - preview_template = ','.join(f'{{{ column }}}' for column in columns) - settings_id = column_test_table.settings.id - column_order = [4, 5, 6] - data = { - "preview_settings": { - 'template': preview_template, - }, - "column_order": column_order - } - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/settings/{settings_id}/", - data=data, - ) - assert response.status_code == 200 - response_data = response.json() - assert response_data['preview_settings']['template'] == preview_template - assert response_data['preview_settings']['customized'] is True - assert response_data['column_order'] == column_order - - -def test_update_table_settings_string_in_column_order(client, column_test_table): - column_order = ["4", "5", "6"] - column_order_as_ints = [4, 5, 6] - data = { - "column_order": column_order - } - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/settings/{column_test_table.settings.id}/", - data=data, - ) - assert response.status_code == 200 - response_data = response.json() - assert response_data['column_order'] == column_order_as_ints - - -def test_update_table_settings_negative_column_order(client, column_test_table): - column_order = [-4, 5, 6] - data = { - "column_order": column_order - } - response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/settings/{column_test_table.settings.id}/", - data=data, - ) - response_data = response.json()[0] - assert response.status_code == 400 - assert response_data['code'] == ErrorCodes.InvalidColumnOrder.value - assert response_data['message'] == 'Invalid column order.' diff --git a/mathesar/tests/api/test_ui_filters_api.py b/mathesar/tests/api/test_ui_filters_api.py deleted file mode 100644 index f49516d1d5..0000000000 --- a/mathesar/tests/api/test_ui_filters_api.py +++ /dev/null @@ -1,24 +0,0 @@ -from mathesar.models.deprecated import Connection -from mathesar.filters.base import get_available_filters -from mathesar.models.users import DatabaseRole - - -def test_filter_list(client, test_db_name): - database = Connection.objects.get(name=test_db_name) - - response = client.get(f'/api/ui/v0/connections/{database.id}/filters/') - response_data = response.json() - assert response.status_code == 200 - for available_filter in response_data: - assert all([key in available_filter for key in ['id', 'name', 'parameters']]) - assert len(response_data) == len(get_available_filters(database._sa_engine)) - - -def test_filter_list_permissions(FUN_create_dj_db, get_uid, client_bob, client_alice, user_bob, user_alice): - database = FUN_create_dj_db(get_uid()) - DatabaseRole.objects.create(user=user_bob, database=database, role='viewer') - response = client_bob.get(f'/api/ui/v0/connections/{database.id}/filters/') - assert response.status_code == 200 - - response = client_alice.get(f'/api/ui/v0/connections/{database.id}/filters/') - assert response.status_code == 404 diff --git a/mathesar/tests/api/test_ui_types_api.py b/mathesar/tests/api/test_ui_types_api.py deleted file mode 100644 index 79d8ab3c46..0000000000 --- a/mathesar/tests/api/test_ui_types_api.py +++ /dev/null @@ -1,76 +0,0 @@ -from mathesar.api.display_options import DISPLAY_OPTIONS_BY_UI_TYPE -from mathesar.models.deprecated import Connection -from mathesar.database.types import get_ui_type_from_id, UIType -from db.types.base import PostgresType, MathesarCustomType -from mathesar.models.users import DatabaseRole - - -def test_type_list(client, test_db_name): - database = Connection.objects.get(name=test_db_name) - - response = client.get(f'/api/ui/v0/connections/{database.id}/types/') - response_data = response.json() - assert response.status_code == 200 - assert len(response_data) == len(database.supported_ui_types) - for supported_type in response_data: - assert all([key in supported_type for key in ['identifier', 'name', 'db_types', 'display_options']]) - found_display_options = supported_type.get('display_options') - ui_type = get_ui_type_from_id(supported_type.get('identifier')) - assert ui_type is not None - expected_display_options = DISPLAY_OPTIONS_BY_UI_TYPE.get(ui_type) - assert found_display_options == expected_display_options - - -def test_type_list_permissions(FUN_create_dj_db, get_uid, client_bob, client_alice, user_bob, user_alice): - database = FUN_create_dj_db(get_uid()) - DatabaseRole.objects.create(user=user_bob, database=database, role='viewer') - response = client_bob.get(f'/api/ui/v0/connections/{database.id}/types/') - response_data = response.json() - assert response.status_code == 200 - assert len(response_data) == len(database.supported_ui_types) - - response = client_alice.get(f'/api/ui/v0/connections/{database.id}/types/') - assert response.status_code == 404 - - -def test_database_types_installed(client, test_db_name): - expected_custom_types = [ - { - "identifier": UIType.EMAIL.id, - "name": "Email", - "db_types": set([ - MathesarCustomType.EMAIL.id, - ]), - 'display_options': None - }, - { - "identifier": UIType.MONEY.id, - "name": "Money", - "db_types": set([ - PostgresType.MONEY.id, - MathesarCustomType.MULTICURRENCY_MONEY.id, - MathesarCustomType.MATHESAR_MONEY.id, - ]), - 'display_options': DISPLAY_OPTIONS_BY_UI_TYPE.get(UIType.MONEY) - }, - { - "identifier": UIType.URI.id, - "name": "URI", - "db_types": set([ - MathesarCustomType.URI.id, - ]), - 'display_options': None - }, - ] - default_database = Connection.objects.get(name=test_db_name) - - response = client.get(f'/api/ui/v0/connections/{default_database.id}/types/') - assert response.status_code == 200 - actual_custom_types = response.json() - - for actual_custom_type in actual_custom_types: - # Treat JSON lists as sets - actual_custom_type['db_types'] = set(actual_custom_type['db_types']) - - for custom_type in expected_custom_types: - assert custom_type in actual_custom_types diff --git a/mathesar/tests/api/test_user_api.py b/mathesar/tests/api/test_user_api.py index c135081a3f..e4dce349b0 100644 --- a/mathesar/tests/api/test_user_api.py +++ b/mathesar/tests/api/test_user_api.py @@ -1,8 +1,4 @@ -from django.db import transaction - -from db.schemas.utils import get_schema_oid_from_name -from mathesar.models.deprecated import Connection, Schema -from mathesar.models.users import User, DatabaseRole, SchemaRole +from mathesar.models.users import User def test_user_list(client): @@ -23,8 +19,6 @@ def test_user_detail(client, admin_user): assert 'password' not in response_data assert response_data['email'] == 'admin@example.com' assert response_data['is_superuser'] is True - assert response_data['database_roles'] == [] - assert response_data['schema_roles'] == [] def test_same_user_detail_as_non_superuser(client_bob, user_bob): @@ -36,8 +30,6 @@ def test_same_user_detail_as_non_superuser(client_bob, user_bob): assert 'password' not in response_data assert response_data['email'] == 'bob@example.com' assert response_data['is_superuser'] is False - assert response_data['database_roles'] == [] - assert response_data['schema_roles'] == [] def test_user_password_reset(client, user_bob): @@ -96,8 +88,6 @@ def test_diff_user_detail_as_non_superuser(client_bob, admin_user): # email should not be visible assert 'email' not in response_data assert response_data['is_superuser'] is True - assert response_data['database_roles'] == [] - assert response_data['schema_roles'] == [] def test_user_patch(client, admin_user): @@ -251,556 +241,6 @@ def test_user_delete_different_user(client_bob, user_alice): assert response.json()[0]['code'] == 4004 -def test_database_role_list_user_without_view_permission(client_bob, user_alice): - role = 'manager' - database = Connection.objects.all()[0] - DatabaseRole.objects.create(user=user_alice, database=database, role=role) - - response = client_bob.get('/api/ui/v0/database_roles/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 0 - - -def test_db_role_list_with_roles_on_multiple_database(FUN_create_dj_db, client_bob, user_bob, get_uid): - FUN_create_dj_db(get_uid()) - FUN_create_dj_db(get_uid()) - FUN_create_dj_db(get_uid()) - databases = Connection.objects.all() - database_with_viewer_access = databases[0] - DatabaseRole.objects.create(user=user_bob, database=database_with_viewer_access, role='viewer') - database_with_manager_access = databases[1] - DatabaseRole.objects.create(user=user_bob, database=database_with_manager_access, role='manager') - - response = client_bob.get('/api/ui/v0/database_roles/') - - assert response.status_code == 200 - - -def test_database_role_list_user_with_view_permission(client_bob, user_alice, user_bob): - role = 'manager' - database = Connection.objects.all()[0] - DatabaseRole.objects.create(user=user_alice, database=database, role=role) - DatabaseRole.objects.create(user=user_bob, database=database, role=role) - - response = client_bob.get('/api/ui/v0/database_roles/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 2 - - -def test_database_role_list_superuser(client, user_bob): - role = 'manager' - database = Connection.objects.all()[0] - DatabaseRole.objects.create(user=user_bob, database=database, role=role) - - response = client.get('/api/ui/v0/database_roles/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 1 - assert len(response_data['results']) == response_data['count'] - role_data = response_data['results'][0] - assert 'id' in role_data - assert role_data['user'] == user_bob.id - assert role_data['role'] == role - assert role_data['database'] == database.id - - -def test_schema_role_list(client, user_bob): - role = 'viewer' - schema = Schema.objects.all()[0] - SchemaRole.objects.create(user=user_bob, schema=schema, role=role) - - response = client.get('/api/ui/v0/schema_roles/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 1 - assert len(response_data['results']) == response_data['count'] - role_data = response_data['results'][0] - assert 'id' in role_data - assert role_data['user'] == user_bob.id - assert role_data['role'] == role - assert role_data['schema'] == schema.id - - -def test_schema_role_list_database_viewer(client, user_bob, user_alice): - role = 'viewer' - schema = Schema.objects.all()[0] - DatabaseRole.objects.create(user=user_bob, database=schema.database, role=role) - SchemaRole.objects.create(user=user_alice, schema=schema, role='editor') - response = client.get('/api/ui/v0/schema_roles/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 1 - - -def test_schema_role_list_schema_viewer(create_schema, client_bob, user_bob, user_alice, get_uid): - different_schema = create_schema(get_uid()) - SchemaRole.objects.create(user=user_bob, schema=different_schema, role='viewer') - SchemaRole.objects.create(user=user_alice, schema=different_schema, role='editor') - response = client_bob.get('/api/ui/v0/schema_roles/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 2 - - -def test_schema_role_list_no_roles(create_schema, client_bob, user_alice, get_uid): - schema = Schema.objects.all()[0] - different_schema = create_schema(get_uid()) - DatabaseRole.objects.create(user=user_alice, database=schema.database, role='viewer') - SchemaRole.objects.create(user=user_alice, schema=different_schema, role='manager') - response = client_bob.get('/api/ui/v0/schema_roles/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 0 - - -def test_schema_role_list_with_roles_on_multiple_database( - FUN_create_dj_db, - create_schema, - client_bob, - user_alice, - get_uid -): - FUN_create_dj_db(get_uid()) - FUN_create_dj_db(get_uid()) - schema = Schema.objects.all()[0] - different_schema = create_schema(get_uid()) - DatabaseRole.objects.create(user=user_alice, database=schema.database, role='viewer') - SchemaRole.objects.create(user=user_alice, schema=different_schema, role='manager') - response = client_bob.get('/api/ui/v0/schema_roles/') - response_data = response.json() - - assert response.status_code == 200 - assert response_data['count'] == 0 - - -def test_database_role_detail(client, user_bob): - role = 'editor' - database = Connection.objects.all()[0] - database_role = DatabaseRole.objects.create(user=user_bob, database=database, role=role) - - response = client.get(f'/api/ui/v0/database_roles/{database_role.id}/') - response_data = response.json() - - assert response.status_code == 200 - assert 'id' in response_data - assert response_data['user'] == user_bob.id - assert response_data['role'] == role - assert response_data['database'] == database.id - - -def test_schema_role_detail(client, user_bob): - role = 'editor' - schema = Schema.objects.all()[0] - schema_role = SchemaRole.objects.create(user=user_bob, schema=schema, role=role) - - response = client.get(f'/api/ui/v0/schema_roles/{schema_role.id}/') - response_data = response.json() - - assert response.status_code == 200 - assert 'id' in response_data - assert response_data['user'] == user_bob.id - assert response_data['role'] == role - assert response_data['schema'] == schema.id - - -def test_database_role_update(client, user_bob): - role = 'viewer' - database = Connection.objects.all()[0] - database_role = DatabaseRole.objects.create(user=user_bob, database=database, role=role) - data = {'user': user_bob.id, 'role': role, 'database': database.id} - - response = client.put(f'/api/ui/v0/database_roles/{database_role.id}/', data) - response_data = response.json() - - assert response.status_code == 405 - assert response_data[0]['code'] == 4006 - - -def test_schema_role_update(client, user_bob): - role = 'viewer' - schema = Schema.objects.all()[0] - schema_role = SchemaRole.objects.create(user=user_bob, schema=schema, role=role) - data = {'user': user_bob.id, 'role': role, 'schema': schema.id} - - response = client.put(f'/api/ui/v0/schema_roles/{schema_role.id}/', data) - response_data = response.json() - - assert response.status_code == 405 - assert response_data[0]['code'] == 4006 - - -def test_database_role_partial_update(client, user_bob): - role = 'viewer' - database = Connection.objects.all()[0] - database_role = DatabaseRole.objects.create(user=user_bob, database=database, role=role) - data = {'role': 'editor'} - - response = client.patch(f'/api/ui/v0/database_roles/{database_role.id}/', data) - response_data = response.json() - - assert response.status_code == 405 - assert response_data[0]['code'] == 4006 - - -def test_schema_role_partial_update(client, user_bob): - role = 'manager' - schema = Schema.objects.all()[0] - schema_role = SchemaRole.objects.create(user=user_bob, schema=schema, role=role) - data = {'role': 'editor'} - - response = client.patch(f'/api/ui/v0/schema_roles/{schema_role.id}/', data) - response_data = response.json() - - assert response.status_code == 405 - assert response_data[0]['code'] == 4006 - - -def test_database_role_create_by_superuser(client, user_bob): - role = 'editor' - database = Connection.objects.all()[0] - data = {'user': user_bob.id, 'role': role, 'database': database.id} - - response = client.post('/api/ui/v0/database_roles/', data) - response_data = response.json() - - assert response.status_code == 201 - assert 'id' in response_data - assert response_data['user'] == user_bob.id - assert response_data['role'] == role - assert response_data['database'] == database.id - - -def test_database_role_create_by_manager(client_bob, user_bob, user_alice): - database = Connection.objects.all()[0] - DatabaseRole.objects.create(user=user_bob, database=database, role='manager') - - role = 'viewer' - data = {'user': user_alice.id, 'role': role, 'database': database.id} - - response = client_bob.post('/api/ui/v0/database_roles/', data) - response_data = response.json() - - assert response.status_code == 201 - assert 'id' in response_data - assert response_data['user'] == user_alice.id - assert response_data['role'] == role - assert response_data['database'] == database.id - - -def test_db_role_create_with_roles_on_multiple_database(FUN_create_dj_db, client_bob, user_bob, user_alice, get_uid): - FUN_create_dj_db(get_uid()) - FUN_create_dj_db(get_uid()) - FUN_create_dj_db(get_uid()) - databases = Connection.objects.all() - database_with_viewer_access = databases[0] - DatabaseRole.objects.create(user=user_bob, database=database_with_viewer_access, role='viewer') - database_with_manager_access = databases[1] - DatabaseRole.objects.create(user=user_bob, database=database_with_manager_access, role='manager') - - role = 'viewer' - data = {'user': user_alice.id, 'role': role, 'database': database_with_viewer_access.id} - - response = client_bob.post('/api/ui/v0/database_roles/', data) - - assert response.status_code == 400 - - -def test_database_role_create_non_superuser(client_bob, user_bob): - role = 'editor' - database = Connection.objects.all()[0] - data = {'user': user_bob.id, 'role': role, 'database': database.id} - - response = client_bob.post('/api/ui/v0/database_roles/', data) - - assert response.status_code == 400 - assert response.json()[0]['code'] == 2151 - - -def test_schema_role_create_by_superuser(client, user_bob): - role = 'editor' - schema = Schema.objects.all()[0] - data = {'user': user_bob.id, 'role': role, 'schema': schema.id} - - response = client.post('/api/ui/v0/schema_roles/', data) - response_data = response.json() - - assert response.status_code == 201 - assert 'id' in response_data - assert response_data['user'] == user_bob.id - assert response_data['role'] == role - assert response_data['schema'] == schema.id - - -def test_schema_role_create_no_roles(create_schema, client_bob, user_alice, get_uid): - role = 'manager' - schema = create_schema(get_uid()) - data = {'user': user_alice.id, 'role': role, 'schema': schema.id} - response = client_bob.post('/api/ui/v0/schema_roles/', data=data) - assert response.status_code == 400 - - -def test_schema_role_create_without_permissible_role(create_schema, client_bob, user_bob, user_alice, get_uid): - schema = create_schema(get_uid()) - SchemaRole.objects.create(user=user_bob, schema=schema, role='editor') - role = 'editor' - data = {'user': user_alice.id, 'role': role, 'schema': schema.id} - response = client_bob.post('/api/ui/v0/schema_roles/', data=data) - assert response.status_code == 400 - - -def test_schema_role_create_by_schema_manager(create_schema, client_bob, user_bob, user_alice, get_uid): - role = 'manager' - schema = create_schema(get_uid()) - SchemaRole.objects.create(user=user_bob, schema=schema, role='manager') - data = {'user': user_alice.id, 'role': role, 'schema': schema.id} - response = client_bob.post('/api/ui/v0/schema_roles/', data=data) - assert response.status_code == 201 - response_data = response.json() - assert response_data['user'] == user_alice.id - assert response_data['role'] == role - assert response_data['schema'] == schema.id - - -def test_schema_role_create_by_db_manager(create_schema, client_bob, user_bob, user_alice, get_uid): - role = 'manager' - schema = create_schema(get_uid()) - DatabaseRole.objects.create(user=user_bob, database=schema.database, role='manager') - data = {'user': user_alice.id, 'role': role, 'schema': schema.id} - response = client_bob.post('/api/ui/v0/schema_roles/', data=data) - assert response.status_code == 201 - response_data = response.json() - assert response_data['user'] == user_alice.id - assert response_data['role'] == role - assert response_data['schema'] == schema.id - - -def test_schema_role_create_with_multiple_database( - FUN_create_dj_db, - MOD_engine_cache, - create_db_schema, - client_bob, - user_bob, - user_alice, - get_uid -): - schema_params = [ - ("schema_1", "database_1"), ("schema_2", "database_2"), - ("schema_3", "database_3"), ("schema_1", "database_3") - ] - - dbs_to_create = set(param[1] for param in schema_params) - - for db_name in dbs_to_create: - FUN_create_dj_db(db_name) - - for schema_name, db_name in schema_params: - engine = MOD_engine_cache(db_name) - create_db_schema(schema_name, engine) - - schemas = { - schema_param: Schema.objects.get( - oid=get_schema_oid_from_name( - schema_param[0], - MOD_engine_cache(schema_param[1]) - ), - ) - for schema_param in schema_params - } - db1_schema_with_manager_role = schemas[schema_params[0]] - db2_schema_with_no_schema_role_but_db_manager = schemas[schema_params[1]] - DatabaseRole.objects.create( - user=user_bob, - database=db2_schema_with_no_schema_role_but_db_manager.database, - role='manager' - ) - db3_schema_with_no_role = schemas[schema_params[2]] - db3_schema_with_editor_role = schemas[schema_params[2]] - SchemaRole.objects.create(user=user_bob, schema=db1_schema_with_manager_role, role='manager') - SchemaRole.objects.create(user=user_bob, schema=db3_schema_with_editor_role, role='editor') - data = {'user': user_alice.id, 'role': 'manager', 'schema': db1_schema_with_manager_role.id} - response = client_bob.post('/api/ui/v0/schema_roles/', data=data) - assert response.status_code == 201 - data = {'user': user_alice.id, 'role': 'manager', 'schema': db3_schema_with_no_role.id} - response = client_bob.post('/api/ui/v0/schema_roles/', data=data) - assert response.status_code == 400 - data = {'user': user_alice.id, 'role': 'manager', 'schema': db3_schema_with_editor_role.id} - response = client_bob.post('/api/ui/v0/schema_roles/', data=data) - assert response.status_code == 400 - data = {'user': user_alice.id, 'role': 'manager', 'schema': db2_schema_with_no_schema_role_but_db_manager.id} - response = client_bob.post('/api/ui/v0/schema_roles/', data=data) - assert response.status_code == 201 - - -def test_database_role_create_with_incorrect_role(client, user_bob): - role = 'nonsense' - database = Connection.objects.all()[0] - data = {'user': user_bob.id, 'role': role, 'database': database.id} - - response = client.post('/api/ui/v0/database_roles/', data) - response_data = response.json() - - assert response.status_code == 400 - assert response_data[0]['code'] == 2081 - - -def test_schema_role_create_with_incorrect_role(client, user_bob): - role = 'nonsense' - schema = Schema.objects.all()[0] - data = {'user': user_bob.id, 'role': role, 'schema': schema.id} - - response = client.post('/api/ui/v0/schema_roles/', data) - response_data = response.json() - - assert response.status_code == 400 - assert response_data[0]['code'] == 2081 - - -def test_database_role_create_with_incorrect_database(client, user_bob): - role = 'editor' - database = Connection.objects.order_by('-id')[0] - data = {'user': user_bob.id, 'role': role, 'database': database.id + 1} - - response = client.post('/api/ui/v0/database_roles/', data) - response_data = response.json() - - assert response.status_code == 400 - assert response_data[0]['code'] == 2151 - - -def test_schema_role_create_with_incorrect_schema(client, user_bob): - role = 'editor' - schema = Schema.objects.order_by('-id')[0] - data = {'user': user_bob.id, 'role': role, 'schema': schema.id + 1} - - response = client.post('/api/ui/v0/schema_roles/', data) - response_data = response.json() - - assert response.status_code == 400 - assert response_data[0]['code'] == 2151 - - -def test_database_role_destroy(client, user_bob): - role = 'viewer' - database = Connection.objects.all()[0] - database_role = DatabaseRole.objects.create(user=user_bob, database=database, role=role) - - response = client.delete(f'/api/ui/v0/database_roles/{database_role.id}/') - assert response.status_code == 204 - - -def test_database_role_destroy_by_manager(client_bob, user_bob, user_alice): - database = Connection.objects.all()[0] - DatabaseRole.objects.create(user=user_bob, database=database, role='manager') - - role = 'viewer' - database_role = DatabaseRole.objects.create(user=user_alice, database=database, role=role) - - response = client_bob.delete(f'/api/ui/v0/database_roles/{database_role.id}/') - assert response.status_code == 204 - - -def test_database_role_destroy_by_non_manager(client_bob, user_bob, user_alice): - database = Connection.objects.all()[0] - DatabaseRole.objects.create(user=user_bob, database=database, role='viewer') - - role = 'viewer' - database_role = DatabaseRole.objects.create(user=user_alice, database=database, role=role) - - response = client_bob.delete(f'/api/ui/v0/database_roles/{database_role.id}/') - assert response.status_code == 403 - - -def test_database_role_destroy_by_user_without_role(client_bob, user_alice): - database = Connection.objects.all()[0] - - role = 'viewer' - database_role = DatabaseRole.objects.create(user=user_alice, database=database, role=role) - - response = client_bob.delete(f'/api/ui/v0/database_roles/{database_role.id}/') - assert response.status_code == 404 - - -def test_schema_role_destroy_by_superuser(client, user_bob): - role = 'viewer' - schema = Schema.objects.all()[0] - schema_role = SchemaRole.objects.create(user=user_bob, schema=schema, role=role) - - response = client.delete(f'/api/ui/v0/schema_roles/{schema_role.id}/') - assert response.status_code == 204 - - -def test_schema_role_destroy_by_manager(client_bob, user_bob): - role = 'manager' - schema = Schema.objects.all()[0] - schema_role = SchemaRole.objects.create(user=user_bob, schema=schema, role=role) - - response = client_bob.delete(f'/api/ui/v0/schema_roles/{schema_role.id}/') - assert response.status_code == 204 - - -def test_schema_role_destroy_by_non_manager(client_bob, user_bob): - role = 'viewer' - schema = Schema.objects.all()[0] - schema_role = SchemaRole.objects.create(user=user_bob, schema=schema, role=role) - - response = client_bob.delete(f'/api/ui/v0/schema_roles/{schema_role.id}/') - assert response.status_code == 403 - - -def test_schema_role_destroy_by_db_manager(client_bob, user_bob, user_alice): - schema = Schema.objects.all()[0] - database = schema.database - DatabaseRole.objects.create(user=user_bob, database=database, role='manager') - - schema_role = SchemaRole.objects.create(user=user_alice, schema=schema, role='viewer') - - response = client_bob.delete(f'/api/ui/v0/schema_roles/{schema_role.id}/') - assert response.status_code == 204 - - -def test_database_role_create_multiple_roles_on_same_object(client, user_bob): - role = 'manager' - database = Connection.objects.all()[0] - DatabaseRole.objects.create(user=user_bob, database=database, role=role) - data = {'user': user_bob.id, 'role': 'editor', 'database': database.id} - - # The IntegrityError triggered here was causing issues with tearing down the - # pytest user fixture. This answer suggested this solution: - # https://stackoverflow.com/a/23326971/287415 - with transaction.atomic(): - response = client.post('/api/ui/v0/database_roles/', data) - response_data = response.json() - - assert response.status_code == 500 - assert response_data[0]['code'] == 4201 - - -def test_schema_role_create_multiple_roles_on_same_object(client, user_bob): - role = 'manager' - schema = Schema.objects.all()[0] - SchemaRole.objects.create(user=user_bob, schema=schema, role=role) - data = {'user': user_bob.id, 'role': 'editor', 'schema': schema.id} - - # The IntegrityError triggered here was causing issues with tearing down the - # pytest user fixture. This answer suggested this solution: - # https://stackoverflow.com/a/23326971/287415 - with transaction.atomic(): - response = client.post('/api/ui/v0/schema_roles/', data) - response_data = response.json() - - assert response.status_code == 500 - assert response_data[0]['code'] == 4201 - - def test_superuser_create_redirect_if_superuser_exists(client, admin_user): response = client.get('/auth/create_superuser/') assert response.status_code == 302 diff --git a/mathesar/tests/conftest.py b/mathesar/tests/conftest.py index b887648cbc..27fdbc0118 100644 --- a/mathesar/tests/conftest.py +++ b/mathesar/tests/conftest.py @@ -202,11 +202,6 @@ def patents_excel_filepath(): return 'mathesar/tests/data/patents.xlsx' -@pytest.fixture(scope='session') -def table_with_id_col_filepath(): - return 'mathesar/tests/data/table_with_id_col.csv' - - @pytest.fixture(scope='session') def paste_filename(): return 'mathesar/tests/data/patents.txt' @@ -227,11 +222,6 @@ def patents_url_filename(): return 'mathesar/tests/data/api_patents.csv' -@pytest.fixture(scope='session') -def data_types_csv_filepath(): - return 'mathesar/tests/data/data_types.csv' - - @pytest.fixture(scope='session') def col_names_with_spaces_csv_filepath(): return 'mathesar/tests/data/col_names_with_spaces.csv' diff --git a/mathesar/tests/data/base_table.csv b/mathesar/tests/data/base_table.csv deleted file mode 100644 index 414b8397dd..0000000000 --- a/mathesar/tests/data/base_table.csv +++ /dev/null @@ -1,5 +0,0 @@ -Id,Center,Affiliated Center,Original Patent,Status,Case Number,Patent Number,Application SN,Title,Patent Expiration Date -1,1,2,,Application,KSC-12871,0,"13/033,085",Polyimide Wire Insulation Repair System, -2,2,1,1,Issued,ARC-14048-1,5694939,"08/543,093",Autogenic-Feedback Training Exercise Method & System,10/03/2015 -3,3,,1,Issued,DRC-008-014,8047472,"12/45,970",IMPROVED RAM BOOSTER,03/11/2028 -4,2,,2,Issued,ARC-14569-2,8139892,"12/807,375",Spatial Standard Observer,01/24/2025 \ No newline at end of file diff --git a/mathesar/tests/data/data_types.csv b/mathesar/tests/data/data_types.csv deleted file mode 100644 index ac4d5a6fef..0000000000 --- a/mathesar/tests/data/data_types.csv +++ /dev/null @@ -1,4 +0,0 @@ -Integer,Boolean,Text,Decimal -5,true,Pizza,5.0 -6,false,Ice cream,6.0 -7,true,Spanakopita,7.0 diff --git a/mathesar/tests/data/data_types.json b/mathesar/tests/data/data_types.json deleted file mode 100644 index 24e572d4bb..0000000000 --- a/mathesar/tests/data/data_types.json +++ /dev/null @@ -1,34 +0,0 @@ -[ - { - "Integer": 5, - "Boolean": true, - "Decimal": "6.5", - "Text": "Pizza", - "Money": "$5", - "Duration": "10:10", - "Date & Time": "10/10/2000 10:10", - "Email": "ezio@example.com", - "URI": "https://mathesar.org/", - "List": ["Gwen", "Matt", "Bruce"], - "Dict": { - "Name": "Peter", - "Age": 25 - } - }, - { - "Integer": 6, - "Boolean": false, - "Decimal": "10.5", - "Text": "Noodles", - "Money": "$10", - "Duration": "10:00", - "Date & Time": "01/05/1900 10:10", - "Email": "dom@example.com", - "URI": "https://github.com/", - "List": ["Jason", "Jake", "Mary"], - "Dict": { - "Name": "John", - "Age": 25 - } - } -] \ No newline at end of file diff --git a/mathesar/tests/data/display_options_inference.csv b/mathesar/tests/data/display_options_inference.csv deleted file mode 100644 index d579c8d915..0000000000 --- a/mathesar/tests/data/display_options_inference.csv +++ /dev/null @@ -1,5 +0,0 @@ -col_1,col_2,col_3,col_4 -0,0,"$5","5.00" -2,1,"$1","₿2.00" -1,1,"$2","5.00" -0,0,"$3","₿5.00" diff --git a/mathesar/tests/data/invalid_reference_base_table.csv b/mathesar/tests/data/invalid_reference_base_table.csv deleted file mode 100644 index 24b8b244d6..0000000000 --- a/mathesar/tests/data/invalid_reference_base_table.csv +++ /dev/null @@ -1,12 +0,0 @@ -Center,Status,Case Number,Patent Number,Application SN,Title,Patent Expiration Date -1,Application,KSC-12871,0,"13/033,085",Polyimide Wire Insulation Repair System, -2,Issued,ARC-14048-1,5694939,"08/543,093",Autogenic-Feedback Training Exercise Method & System,10/03/2015 -3,Issued,ARC-14231-1,6109270,"09/017,519",Multimodality Instrument For Tissue Characterization,02/04/2017 -1,Issued,ARC-14231-2DIV,6976013,"10/874,003",Metrics For Body Sensing System,06/16/2024 -4,Issued,ARC-14231-3,6718196,"09/652,299",Multimodality Instrument For Tissue Characterization,02/04/2017 -1,Issued,ARC-14275-1,6445390,"09/226,673",Automated Triangle Geometry Processing For Surface Modeling And Cartesian Grid Generation (CART3D),12/24/2018 -5,Issued,ARC-14281-1,6606612,"09/374,491",Aerodynamic Design Using Neural Networks,08/13/2019 -3,Issued,ARC-14281-3,7191161,"10/637,087",Method For Constructing Composite Response Surfaces By Combining Neural Networks With Polynomial Interpolation Or Estimation Techniques,11/18/2020 -1,Issued,ARC-14359-1,6314362,"09/498,123",A Direct-To Controller Tool (A Component Of The CTAS Software Suite),02/02/2020 -2,Issued,ARC-14494-1,6720984,"09/606,107",Bio-Electric Keyboard/Mouse/Joystick Interface Software/Algorithm,06/13/2020 -1,Issued,ARC-14512-1,6823333,"09/800,309",Keyword-in-context Search Method And Software For Information Retrieval From Collections Of Text Documents (Quorum/Perilog),03/02/2021 \ No newline at end of file diff --git a/mathesar/tests/data/long_column_names.csv b/mathesar/tests/data/long_column_names.csv deleted file mode 100644 index 5536053791..0000000000 --- a/mathesar/tests/data/long_column_names.csv +++ /dev/null @@ -1,55 +0,0 @@ -"State or Nation","Cycle 1 Total Number of Health Deficiencies","Cycle 1 Total Number of Fire Safety Deficiencies","Cycle 2 Total Number of Health Deficiencies","Cycle 2 Total Number of Fire Safety Deficiencies","Cycle 3 Total Number of Health Deficiencies","Cycle 3 Total Number of Fire Safety Deficiencies","Average Number of Residents per Day","Reported Nurse Aide Staffing Hours per Resident per Day","Reported LPN Staffing Hours per Resident per Day","Reported RN Staffing Hours per Resident per Day","Reported Licensed Staffing Hours per Resident per Day","Reported Total Nurse Staffing Hours per Resident per Day","Total number of nurse staff hours per resident per day on the weekend","Registered Nurse hours per resident per day on the weekend","Reported Physical Therapist Staffing Hours per Resident Per Day","Total nursing staff turnover","Registered Nurse turnover","Number of administrators who have left the nursing home","Case-Mix RN Staffing Hours per Resident per Day","Case-Mix Total Nurse Staffing Hours per Resident per Day","Number of Fines","Fine Amount in Dollars","Percentage of long stay residents whose need for help with daily activities has increased","Percentage of long stay residents who lose too much weight","Percentage of low risk long stay residents who lose control of their bowels or bladder","Percentage of long stay residents with a catheter inserted and left in their bladder","Percentage of long stay residents with a urinary tract infection","Percentage of long stay residents who have depressive symptoms","Percentage of long stay residents who were physically restrained","Percentage of long stay residents experiencing one or more falls with major injury","Percentage of long stay residents assessed and appropriately given the pneumococcal vaccine","Percentage of long stay residents who received an antipsychotic medication","Percentage of short stay residents assessed and appropriately given the pneumococcal vaccine","Percentage of short stay residents who newly received an antipsychotic medication","Percentage of long stay residents whose ability to move independently worsened","Percentage of long stay residents who received an antianxiety or hypnotic medication","Percentage of high risk long stay residents with pressure ulcers","Percentage of long stay residents assessed and appropriately given the seasonal influenza vaccine","Percentage of short stay residents who made improvements in function","Percentage of short stay residents who were assessed and appropriately given the seasonal influenza vaccine","Percentage of short stay residents who were rehospitalized after a nursing home admission","Percentage of short stay residents who had an outpatient emergency department visit","Number of hospitalizations per 1000 long-stay resident days","Number of outpatient emergency department visits per 1000 long-stay resident days","Processing Date" -"NATION","8.6","4.5","8.5","4.3","8.3","4.6","78.6","2.22","0.88","0.66","1.53","3.75","3.26","0.45","0.07","53.9","52.3","0.8","0.38487","3.15796","2.3","33247","14.842144","6.172333","47.158545","1.698662","2.345577","7.882694","0.145406","3.395302","92.085375","14.447634","78.873848","1.738571","16.161024","19.436701","8.145643","94.937079","74.115131","75.601680","22.073834","11.791045","1.585233","1.016932","2023-02-01" -"AK","8.9","7.3","8.0","6.6","13.1","5.5","35.8","4.26","0.59","2.01","2.60","6.86","5.88","1.44","0.09","54.7","54.0","0.3","0.32843","3.07815","2.8","40982","11.399472","5.955443","42.734961","3.729863","3.092132","7.233612","0.214286","3.780119","94.381998","15.207048","74.994950","0.897709","16.474449","14.673754","9.103530","97.261368","77.508477","81.395239","14.130346","14.433861","1.002547","0.897472","2023-02-01" -"AL","2.9","2.9","3.1","3.3","3.9","3.4","91.3","2.34","0.87","0.58","1.45","3.79","3.15","0.32","0.03","52.2","46.9","0.7","0.30675","2.91289","1.0","9544","12.824604","6.869793","42.223463","2.067217","3.126881","1.119971","0.627098","3.500289","92.692985","19.916009","81.288737","2.412304","14.032358","23.573902","9.394910","93.702772","72.519689","75.399412","21.907646","11.153483","1.641877","0.983719","2023-02-01" -"AR","8.5","2.1","9.2","1.8","11.1","2.2","71.2","2.50","1.02","0.37","1.39","3.89","3.29","0.26","0.03","56.5","54.9","0.5","0.30829","2.91594","1.9","23087","12.018400","5.631477","40.901992","1.622935","2.248033","2.173623","0.229492","3.957339","94.849162","11.978126","78.050791","1.685996","11.258485","21.562726","8.438663","96.916093","77.938874","73.836436","23.566078","13.385911","1.830761","1.296204","2023-02-01" -"AZ","7.6","3.3","7.0","3.2","8.0","3.3","76.0","2.28","1.08","0.75","1.82","4.10","3.55","0.52","0.11","55.0","55.0","0.5","0.39392","3.15730","2.0","13117","12.840441","6.067455","56.507921","1.539109","1.507067","3.595873","0.310559","2.456865","95.641211","10.356776","86.951457","1.056651","18.691552","17.508812","9.226697","94.685482","74.158771","83.771004","23.490408","11.388180","1.275856","0.956408","2023-02-01" -"CA","15.1","7.1","14.1","6.8","13.1","6.8","80.9","2.54","1.20","0.58","1.79","4.33","3.88","0.44","0.08","47.8","51.5","0.8","0.40762","3.15304","2.2","19392","8.128644","5.245266","31.959431","1.618842","1.273445","5.524313","0.231156","1.768723","97.972113","10.004216","93.461148","1.289304","11.198966","13.731006","7.546983","98.394685","79.463056","92.036398","21.788146","10.829473","1.722919","0.832184","2023-02-01" -"CO","7.6","6.1","7.5","6.2","9.9","6.0","66.3","2.16","0.68","0.84","1.52","3.68","3.22","0.62","0.08","58.5","56.8","1.2","0.35462","3.01904","3.6","47574","15.465715","6.301084","47.560332","1.943715","2.062230","3.872010","0.088626","3.633371","86.832813","17.676144","71.128281","1.719544","16.945294","12.513949","5.861102","93.915574","75.203028","69.259789","19.332534","12.842127","1.183063","0.924672","2023-02-01" -"CT","7.9","3.3","8.7","2.1","9.8","2.8","96.1","2.13","0.81","0.72","1.52","3.65","3.20","0.48","0.09","43.6","45.6","0.7","0.37347","3.12293","2.1","26299","17.142969","6.602093","42.812501","1.285110","2.264424","7.452102","0.006252","3.541306","84.747229","15.665749","62.698501","1.471289","19.217666","17.268224","6.452903","93.046692","74.507476","64.082133","23.019671","11.607396","1.720229","0.869883","2023-02-01" -"DC","14.4","2.4","16.4","2.5","16.1","4.6","117.7","2.46","0.59","1.55","2.15","4.61","3.98","1.25","0.11","46.3","44.0","0.7","0.53050","3.50084","2.8","90079","17.149334","5.072200","64.161295","1.323201","1.687977","0.829892","0.169683","1.229977","90.680541","7.962449","65.172847","1.668955","22.407608","10.783063","13.164346","95.659753","65.283356","70.271605","19.374209","6.763558","0.929341","0.325031","2023-02-01" -"DE","11.4","0.9","13.3","0.8","15.5","1.4","84.6","2.30","0.94","1.06","2.00","4.30","3.75","0.73","0.10","46.9","44.6","0.6","0.34697","3.03371","1.5","24843","15.009389","4.850518","51.780924","0.761590","1.721781","4.803085","0.190354","4.056899","95.177121","11.092806","80.869836","1.009524","18.304275","18.201550","5.977196","97.253134","76.179021","81.259924","19.549109","12.075911","1.457310","0.730632","2023-02-01" -"FL","5.9","1.8","5.6","2.7","6.7","3.5","100.2","2.40","0.85","0.68","1.54","3.93","3.54","0.47","0.09","54.0","58.4","0.9","0.35502","3.09968","2.1","27981","12.816814","6.365373","52.264518","1.134168","1.796867","2.448652","0.017971","2.804979","97.076653","10.386811","90.147105","1.726172","14.941135","21.530673","9.270504","96.952495","78.078477","87.700518","25.296906","9.591155","1.915698","0.691537","2023-02-01" -"GA","4.1","1.3","4.1","2.8","3.9","3.3","80.5","1.90","1.04","0.45","1.49","3.39","2.90","0.28","0.05","54.0","52.6","0.8","0.36572","3.12253","2.1","25067","17.040645","6.798043","42.320798","1.686013","3.169320","5.132865","0.069017","3.323895","92.367287","18.235999","79.363199","2.826814","18.303896","20.802715","10.509768","95.279234","69.422210","75.692177","21.757265","11.722100","1.646802","1.041544","2023-02-01" -"GU","20.0","8.0","13.0","5.0","13.0","10.0","15.2","4.19","1.68","4.10","5.78","9.97","9.11","3.55","0.32",,,,"0.67516","3.34519","3.0","7150",,,,,,,,,,,"97.163122","8.333332",,,,"70.833333","0.000000","91.452991","24.816053","0.000000",,,"2023-02-01" -"HI","11.4","2.2","9.6","3.2","8.6","0.7","74.6","2.72","0.34","1.60","1.94","4.66","4.15","1.25","0.07","39.3","43.4","0.8","0.38766","3.23198","2.5","41427","12.348540","5.191199","47.941947","1.853915","2.481894","1.479999","0.345567","1.639112","96.267609","9.441430","78.896335","1.257852","19.499854","8.656161","4.854205","97.745290","77.616533","77.288302","15.798930","8.019240","1.013812","0.675099","2023-02-01" -"IA","7.6","6.2","6.4","5.1","6.1","5.6","47.7","2.36","0.59","0.74","1.34","3.69","3.19","0.50","0.04","56.1","50.7","0.8","0.32422","2.94033","2.5","31362","17.182500","5.503345","45.826977","2.905904","3.211425","4.620689","0.234481","3.661753","94.486830","17.036856","84.754686","1.696546","17.689218","19.334326","6.198473","95.693380","78.295246","74.139313","20.062687","12.770796","1.354242","1.165986","2023-02-01" -"ID","8.2","4.6","10.3","4.5","10.4","4.2","47.1","2.49","0.83","0.95","1.78","4.27","3.61","0.64","0.14","56.1","51.0","0.7","0.37520","3.14779","2.5","33492","12.924392","5.824486","46.843756","1.784242","2.388714","9.129729","0.366979","3.501341","96.634971","17.707444","86.341184","1.204967","16.319169","15.595508","6.164639","96.364004","74.936058","81.483404","14.573690","11.210045","0.901025","1.001252","2023-02-01" -"IL","11.7","6.5","11.0","8.0","9.4","8.0","84.8","1.98","0.65","0.72","1.37","3.35","2.91","0.53","0.06","52.1","49.8","0.9","0.41900","3.31127","3.3","82957","13.806874","6.641194","44.309725","1.776029","2.565415","32.133572","0.101770","3.463801","85.381931","17.796274","65.381111","2.063073","15.655724","18.842671","8.756588","91.656850","70.257559","62.457472","24.553063","12.930031","1.788210","1.163074","2023-02-01" -"IN","8.9","6.9","9.6","7.1","9.6","6.8","66.8","2.13","0.79","0.63","1.42","3.55","3.05","0.41","0.06","57.4","53.4","0.7","0.42895","3.39945","2.6","30277","16.282996","6.252093","56.243292","0.931282","1.805943","14.403894","0.076208","4.067471","90.536625","12.883658","76.996226","1.609374","13.844641","22.110261","7.300250","94.211093","73.988901","72.941627","21.401472","12.099129","1.484595","1.110387","2023-02-01" -"KS","8.0","9.8","6.3","9.3","7.1","11.3","47.4","2.57","0.67","0.72","1.39","3.97","3.46","0.50","0.05","57.3","52.1","0.7","0.33705","2.96194","2.4","23551","17.270619","5.726627","41.596896","2.485671","3.635786","5.957237","0.059274","5.083958","88.856452","17.844800","71.643663","2.252982","18.470156","22.772910","6.972764","94.759783","74.145134","67.158045","20.934086","11.560256","1.687413","1.241434","2023-02-01" -"KY","4.6","1.9","5.5","1.4","4.9","0.9","74.3","2.25","0.86","0.77","1.64","3.89","3.36","0.50","0.06","56.7","50.0","0.7","0.41963","3.30694","1.6","40177","16.053339","7.800501","45.536515","1.538687","3.040351","8.842789","0.227049","4.119532","91.810084","16.179459","79.047690","2.173845","18.002202","29.375386","9.274942","95.133746","71.989882","76.797540","22.417735","14.350174","1.708204","1.389017","2023-02-01" -"LA","6.1","1.3","4.1","0.9","5.0","1.0","85.4","2.20","1.14","0.27","1.41","3.61","3.06","0.18","0.05","55.4","53.6","0.9","0.38369","3.09516","2.0","16894","18.656230","5.663137","39.565836","1.731458","2.810300","1.498618","0.270675","3.423500","93.077418","16.621426","81.679050","2.581402","14.256986","22.552108","9.601170","94.265139","65.089414","73.692261","25.552806","13.440326","2.206644","1.537023","2023-02-01" -"MA","11.6","2.6","9.5","2.0","9.6","1.9","90.9","2.10","0.92","0.67","1.59","3.69","3.24","0.47","0.07","47.1","52.8","0.8","0.31907","3.07685","2.7","50457","15.723910","5.724830","57.595477","1.467799","2.403211","2.008722","0.170192","3.730504","89.843750","19.521781","73.777879","1.508874","16.115730","18.586347","6.565019","95.019709","71.546852","74.994018","23.636202","11.430013","1.546126","0.805229","2023-02-01" -"MD","14.4","5.1","14.7","3.6","11.6","3.8","99.0","2.08","0.87","0.83","1.70","3.78","3.30","0.59","0.09","49.3","50.9","0.9","0.42591","3.32068","1.8","21990","20.331978","6.173061","63.941173","1.413939","2.086239","11.160903","0.122735","2.582015","89.935951","13.134876","74.321669","1.542654","25.781949","14.155775","10.105588","95.268467","73.059782","76.285525","20.975714","9.484874","1.149602","0.709799","2023-02-01" -"ME","6.4","4.9","5.7","4.0","5.7","5.4","58.0","2.87","0.47","1.00","1.48","4.34","3.83","0.69","0.08","55.5","50.8","0.8","0.31218","3.16030","1.0","7382","13.638926","6.025590","65.142410","2.004786","3.395025","8.676855","0.052568","4.127018","90.498288","20.603068","67.179995","1.495056","20.860618","16.306428","6.147884","95.563935","70.159342","73.078954","15.817526","14.276760","1.006634","1.146582","2023-02-01" -"MI","12.7","6.1","13.8","5.4","13.1","4.8","76.8","2.23","0.89","0.75","1.64","3.87","3.34","0.47","0.06","53.1","48.4","0.9","0.32755","3.01248","2.6","69596","12.304438","6.631126","47.835367","1.610109","2.299351","3.402342","0.146837","2.924258","92.242691","13.632260","79.105072","1.332810","15.182324","18.966396","8.849317","92.961463","77.621248","74.864522","22.459666","11.431663","1.598690","0.918488","2023-02-01" -"MN","6.7","4.5","8.6","3.5","9.4","3.4","56.3","2.38","0.66","1.01","1.67","4.05","3.48","0.63","0.07","50.9","47.2","0.6","0.30696","2.97545","2.4","38927","13.739490","4.743802","49.695572","2.918130","2.862371","4.374829","0.076392","4.018921","96.032326","16.585568","88.092258","1.826142","16.459135","12.601455","6.810617","95.146963","77.097943","77.600065","20.614291","13.533464","1.359289","0.961614","2023-02-01" -"MO","10.7","7.0","9.6","4.6","8.4","5.3","67.1","2.12","0.70","0.46","1.16","3.28","2.86","0.32","0.04","60.3","58.5","0.8","0.31571","2.78326","3.0","35865","15.459187","5.413499","30.087970","2.183881","3.297052","4.646042","0.118577","3.827940","86.108015","20.687883","67.949211","2.549121","13.767195","23.985574","9.445319","92.278732","72.307599","64.118590","23.853437","12.073895","1.761408","1.209387","2023-02-01" -"MS","3.9","0.6","4.5","0.4","4.1","0.7","69.4","2.32","1.09","0.59","1.68","4.00","3.36","0.35","0.05","52.4","49.9","0.7","0.34129","3.05183","1.1","21152","19.214683","6.958171","48.472827","1.942004","2.993915","2.040307","0.467353","3.094010","96.072347","20.591187","84.781258","3.337471","19.151819","22.715151","10.231306","96.417677","68.900694","79.245312","23.818034","15.014332","2.066777","1.523809","2023-02-01" -"MT","6.9","7.8","6.5","10.8","7.8","12.2","46.6","2.28","0.57","0.83","1.41","3.69","3.27","0.61","0.06","63.2","55.9","0.9","0.30381","2.83834","4.3","64624","15.897640","7.377114","42.449845","2.945208","2.960927","5.103359","0.228091","4.921630","90.655136","17.775999","75.340530","1.699699","17.451727","14.567369","8.471330","94.336078","74.622658","69.289414","15.661370","12.570224","1.197952","1.253237","2023-02-01" -"NC","7.6","2.0","6.0","2.7","6.0","4.0","78.0","2.20","0.89","0.57","1.46","3.66","3.15","0.37","0.08","57.2","54.8","0.8","0.40680","3.23533","1.8","44631","19.368996","8.365626","53.842000","1.462936","3.298314","3.419275","0.056298","3.779915","88.411617","11.988600","76.066907","1.483103","22.918309","21.400159","10.276281","93.055159","72.619948","74.185894","20.928803","12.743005","1.427248","1.075012","2023-02-01" -"ND","5.9","1.5","5.6","1.3","5.8","2.0","59.5","2.87","0.61","0.86","1.47","4.34","3.65","0.52","0.03","53.1","40.8","0.5","0.29070","2.94563","1.2","12419","16.393833","5.473313","48.268205","1.997814","3.509871","5.421254","0.193185","4.570133","98.238410","20.217778","91.943488","2.721974","15.731838","18.752673","5.869866","98.301952","77.585936","84.642634","17.287770","9.944697","1.270855","0.954934","2023-02-01" -"NE","7.4","6.3","6.3","4.6","8.9","6.5","51.9","2.55","0.68","0.72","1.40","3.95","3.39","0.50","0.05","56.1","47.5","0.6","0.33541","3.02373","1.9","19653","14.762205","5.825405","48.168379","2.289872","3.784967","4.156021","0.174891","4.819080","91.735755","18.843531","79.508421","1.790478","17.085793","19.739101","5.876530","95.958044","78.644474","72.319867","19.872370","10.487467","1.565626","0.991274","2023-02-01" -"NH","4.8","3.1","3.8","2.6","3.4","2.7","76.6","2.30","0.81","0.72","1.53","3.83","3.32","0.47","0.06","51.7","45.6","0.5","0.35018","3.07170","1.3","10862","17.985373","6.453707","47.445124","1.694464","2.661148","5.833451","0.225576","4.927813","93.195146","16.636614","78.343891","1.443100","19.562980","16.461981","6.487035","96.852235","79.661243","76.564362","19.780502","13.562729","1.359127","1.056185","2023-02-01" -"NJ","4.3","2.8","3.7","1.2","4.8","1.6","110.4","2.11","0.92","0.75","1.67","3.78","3.30","0.52","0.10","48.8","49.8","1.0","0.37014","3.05687","2.2","31780","10.457140","6.003672","41.742823","1.258171","1.493034","6.499913","0.103787","2.594331","93.606364","10.538616","80.683178","1.195082","10.681691","17.429732","9.083879","96.519125","81.174676","80.212170","23.532855","8.990347","1.791158","0.720861","2023-02-01" -"NM","15.7","3.9","12.4","4.2","10.8","4.6","74.0","2.27","0.69","0.65","1.34","3.60","3.17","0.47","0.08","61.2","64.5","0.9","0.33007","2.92979","3.4","54788","16.880726","6.974591","42.691992","2.212342","1.689994","3.268364","0.047687","3.344050","95.710454","16.752933","78.284619","1.760241","17.587717","13.044343","8.225322","95.091577","67.042592","74.459302","18.994139","16.282381","1.341497","1.503519","2023-02-01" -"NV","13.5","12.5","14.9","11.5","15.8","14.0","82.6","2.33","1.01","0.84","1.84","4.17","3.63","0.60","0.11","53.7","52.5","0.9","0.44537","3.18521","3.9","19457","15.613568","5.810421","39.745229","2.348736","2.082659","2.638951","0.186568","2.681275","91.584943","13.595163","79.367319","1.786471","18.670537","18.029114","8.509120","91.560315","69.506874","75.616935","20.984294","9.430053","1.333119","0.654221","2023-02-01" -"NY","4.9","4.5","5.5","3.9","4.9","4.4","157.8","2.08","0.78","0.66","1.44","3.52","2.99","0.43","0.12","45.0","47.2","0.6","0.47563","3.43071","1.5","9742","15.066503","6.417714","54.453754","1.156857","2.109533","13.099350","0.203698","3.010254","89.287753","11.614134","70.707866","1.413465","15.399499","13.317674","9.528866","95.009824","75.573860","73.840557","19.143212","9.340678","1.396627","0.742529","2023-02-01" -"OH","10.1","6.0","10.5","5.4","7.9","4.4","68.2","1.96","0.94","0.60","1.54","3.50","3.07","0.39","0.06","58.1","54.5","0.8","0.46852","3.38465","2.3","31735","14.965475","6.857397","43.246040","0.965583","1.502662","21.593891","0.053769","3.667924","89.503534","13.075572","73.916728","1.695244","14.012745","22.485032","7.411900","92.505303","75.215028","69.647646","23.028039","12.755315","1.407392","1.061040","2023-02-01" -"OK","7.6","4.3","8.9","4.0","8.1","4.4","55.5","2.46","0.95","0.34","1.29","3.76","3.34","0.27","0.02","61.0","60.4","0.7","0.31539","2.81726","3.2","23013","13.895305","4.488022","36.933077","2.527743","3.529558","4.213974","0.097056","4.232689","91.466497","14.176484","75.929068","2.076984","13.214458","24.398642","8.977589","95.142884","72.537256","70.803339","24.843107","16.633618","1.921517","1.510100","2023-02-01" -"OR","9.7","5.4","13.9","5.2","9.8","4.3","47.9","3.17","0.93","0.74","1.67","4.83","4.21","0.45","0.07","54.6","57.9","0.9","0.32562","3.07921","2.3","32471","13.308442","6.389020","51.941615","2.617117","2.431452","5.745072","0.185699","2.685213","94.926349","15.629524","82.965184","1.415088","20.172390","11.943572","8.776540","93.718219","75.768330","77.416431","18.273032","15.092847","1.203002","1.385720","2023-02-01" -"PA","8.0","5.1","8.3","5.5","8.4","5.4","96.0","2.03","0.87","0.78","1.65","3.68","3.26","0.54","0.09","50.8","48.6","0.9","0.41560","3.32079","1.8","26519","15.197922","6.622871","55.267584","1.551957","1.933698","2.951130","0.212735","3.348926","87.934847","15.708643","70.684062","1.486137","19.322636","19.718950","7.379146","94.139844","72.521877","70.032500","20.988444","9.326240","1.397721","0.672073","2023-02-01" -"PR","4.4","6.7","5.8","5.8","3.0","10.6","23.2","0.00","1.39","2.85","4.24","4.24","3.35","2.05","0.46","46.3","46.6","0.0","0.40967","2.99292","5.8","45874",,,,,,,,,,,"92.917090","0.648318",,,,,"77.337036","97.667416",,,,,"2023-02-01" -"RI","9.4","0.3","7.2","0.5","4.2","0.5","90.3","2.35","0.43","0.82","1.25","3.59","3.13","0.58","0.06","49.8","48.8","1.0","0.35556","3.05791","2.9","52981","16.642184","6.047845","45.177640","1.240739","2.869224","2.185293","0.087767","3.832213","91.629133","18.238520","72.640066","1.492483","18.642817","14.973592","7.519538","95.740607","76.334540","75.367531","23.154435","13.624046","1.267121","0.841478","2023-02-01" -"SC","4.1","0.7","4.3","0.4","7.8","0.8","83.1","2.24","1.03","0.64","1.67","3.91","3.39","0.40","0.07","57.7","54.0","0.8","0.33566","3.03900","1.7","17843","14.164727","7.366458","56.441614","1.365320","2.875399","2.837575","0.241224","3.287655","92.143804","14.863037","80.608406","1.833134","18.000137","19.891573","9.662829","94.396477","72.465571","77.092520","23.314670","12.731835","1.765981","1.048664","2023-02-01" -"SD","4.6","1.5","5.0","1.8","4.5","1.2","48.8","2.33","0.44","0.79","1.23","3.56","3.03","0.51","0.05","53.8","41.5","0.7","0.30817","2.93111","1.5","18554","16.834104","5.976653","51.417000","2.720656","3.291942","5.264478","0.143154","5.459047","95.028219","19.889467","85.487463","1.752324","18.448584","15.008498","6.788145","96.995369","80.949700","77.533005","16.528802","11.616449","1.264200","0.841901","2023-02-01" -"TN","5.4","2.0","4.4","3.0","4.0","3.4","77.2","2.05","1.14","0.57","1.70","3.75","3.19","0.35","0.08","55.7","51.3","0.9","0.39973","3.25675","1.3","32227","16.810657","7.716838","52.765450","1.615227","3.073131","9.865683","0.185103","3.401994","88.845066","15.912567","75.656471","1.912190","21.131910","29.392660","8.691131","93.233403","73.199093","73.972533","20.181561","11.435387","1.326567","0.979845","2023-02-01" -"TX","6.2","2.2","6.4","3.3","7.6","4.1","69.5","1.96","1.05","0.37","1.42","3.38","2.93","0.28","0.06","59.6","63.2","0.8","0.41931","3.22391","2.9","40448","17.074427","5.029710","52.196518","1.567410","1.416943","5.039136","0.043473","3.349034","95.502194","10.582499","83.231536","1.884036","14.435950","22.055441","7.508460","96.323202","66.829074","78.081775","23.965753","12.226630","1.777216","1.126636","2023-02-01" -"UT","9.4","3.3","9.0","3.7","9.8","4.3","55.3","2.32","0.55","1.16","1.71","4.02","3.49","0.86","0.12","60.8","50.3","0.8","0.45973","3.32571","2.7","27251","13.928882","5.526550","45.600098","1.718026","2.895718","10.177801","0.060164","3.226548","95.637162","13.366906","89.303507","1.329467","16.885494","20.695680","6.323916","95.588038","71.460153","84.549547","18.248338","11.729645","0.984483","0.807143","2023-02-01" -"VA","11.3","4.5","11.5","4.5","11.5","5.1","92.5","1.95","1.02","0.62","1.63","3.59","3.09","0.40","0.08","56.5","55.5","1.1","0.40404","3.21630","1.6","18869","17.088182","7.173330","52.658001","1.136915","2.622878","4.366234","0.135625","3.526327","88.492761","13.836493","70.953070","1.637843","19.632030","20.288508","8.688855","94.426334","74.984786","72.172020","21.472724","12.399274","1.353924","1.005582","2023-02-01" -"VT","7.4","2.5","4.7","2.5","4.5","1.1","67.7","2.35","1.00","0.72","1.72","4.07","3.55","0.46","0.08","61.3","56.7","0.7","0.37203","3.19644","1.6","18110","19.695843","6.495890","48.690671","2.038359","2.892317","13.645847","0.109464","5.059416","91.166759","17.128111","80.112576","1.925287","22.700096","16.323132","7.682717","95.385608","79.195423","75.541526","17.119589","13.717848","1.197579","1.462406","2023-02-01" -"WA","18.0","8.1","20.0","6.0","17.6","7.6","64.8","2.52","0.79","0.90","1.69","4.21","3.56","0.62","0.09","54.9","53.1","1.0","0.39495","3.30419","2.5","66656","14.634771","6.167716","54.567819","1.994137","2.317844","13.320128","0.328683","2.767774","95.106071","13.810903","84.581613","1.364470","19.892669","12.209977","6.936666","94.905943","73.869151","79.524549","16.574808","11.553737","1.019159","0.836404","2023-02-01" -"WI","7.9","6.2","7.3","4.6","7.2","4.2","52.0","2.28","0.58","0.96","1.54","3.82","3.34","0.66","0.07","53.1","46.3","0.6","0.36456","3.15373","2.1","41912","13.690116","5.695565","50.198605","3.069005","3.102199","6.290265","0.079362","3.238508","95.575805","14.099089","87.490647","1.143070","18.218220","15.397054","7.371773","95.116789","75.984216","79.213494","19.847340","13.503943","1.294014","1.303517","2023-02-01" -"WV","11.3","2.4","10.3","2.4","10.2","2.9","73.5","2.11","0.96","0.69","1.65","3.76","3.22","0.36","0.05","52.6","44.1","0.5","0.40919","3.24531","1.2","20821","16.293913","8.265190","44.432807","1.870960","3.791073","4.469620","0.143698","4.827009","95.376320","16.726475","78.814057","1.676784","20.695138","24.155520","9.562136","95.960581","71.442064","74.423412","20.816512","13.386938","1.531578","1.202551","2023-02-01" -"WY","6.0","6.3","6.2","3.9","8.3","3.3","56.5","2.25","0.54","0.86","1.40","3.65","3.10","0.63","0.05","58.4","48.8","0.4","0.34348","2.98457","2.4","20675","15.041404","7.127500","45.861098","2.375188","3.778759","7.096262","0.253890","4.172703","96.198127","19.817071","79.418052","2.199333","15.663748","14.880130","6.360152","96.324929","79.122969","73.355561","14.338753","14.813702","1.098024","1.308557","2023-02-01" diff --git a/mathesar/tests/data/multi_column_foreign_key_base_table.csv b/mathesar/tests/data/multi_column_foreign_key_base_table.csv deleted file mode 100644 index 7cf6454407..0000000000 --- a/mathesar/tests/data/multi_column_foreign_key_base_table.csv +++ /dev/null @@ -1,3 +0,0 @@ -Center,Center City,Status,Case Number,Patent Number,Application SN,Title,Patent Expiration Date -NASA Kennedy Space Center,LA,Application,KSC-12871,0,"13/033,085",Polyimide Wire Insulation Repair System, -NASA Kennedy Space Center,Florida,Issued,ARC-14281-3,7191161,"10/637,087",Method For Constructing Composite Response Surfaces By Combining Neural Networks With Polynomial Interpolation Or Estimation Techniques,11/18/2020 \ No newline at end of file diff --git a/mathesar/tests/data/multi_column_reference_table.csv b/mathesar/tests/data/multi_column_reference_table.csv deleted file mode 100644 index c7c168569a..0000000000 --- a/mathesar/tests/data/multi_column_reference_table.csv +++ /dev/null @@ -1,4 +0,0 @@ -Name,City,PostalCode -NASA Kennedy Space Center,LA,90001 -NASA Kennedy Space Center,Florida,32118 -NASA Ames Research Center,LA,9001 \ No newline at end of file diff --git a/mathesar/tests/data/reference_table.csv b/mathesar/tests/data/reference_table.csv deleted file mode 100644 index 6a7f8e9b93..0000000000 --- a/mathesar/tests/data/reference_table.csv +++ /dev/null @@ -1,5 +0,0 @@ -Id,Center Name -1,NASA Kennedy Space Center -2,NASA Ames Research Center -3,NASA Armstrong Flight Research Center -4,NASA Goddard Space Flight Center \ No newline at end of file diff --git a/mathesar/tests/data/relation_tables/author.csv b/mathesar/tests/data/relation_tables/author.csv deleted file mode 100644 index b6748edc99..0000000000 --- a/mathesar/tests/data/relation_tables/author.csv +++ /dev/null @@ -1,3 +0,0 @@ -first_name,last_name -Matthew,Brown -Mark,Smith \ No newline at end of file diff --git a/mathesar/tests/data/relation_tables/items.csv b/mathesar/tests/data/relation_tables/items.csv deleted file mode 100644 index 64639631f3..0000000000 --- a/mathesar/tests/data/relation_tables/items.csv +++ /dev/null @@ -1,2 +0,0 @@ -publication,acquisition_date,acquisition_price -1,1973-06-23,5.32 \ No newline at end of file diff --git a/mathesar/tests/data/relation_tables/publication.csv b/mathesar/tests/data/relation_tables/publication.csv deleted file mode 100644 index 354b89fcc4..0000000000 --- a/mathesar/tests/data/relation_tables/publication.csv +++ /dev/null @@ -1,2 +0,0 @@ -title,publisher,author,co_author,isbn -Pressure Should Old,1,1,2,1-953782-67-1 \ No newline at end of file diff --git a/mathesar/tests/data/relation_tables/publisher.csv b/mathesar/tests/data/relation_tables/publisher.csv deleted file mode 100644 index ab085806f2..0000000000 --- a/mathesar/tests/data/relation_tables/publisher.csv +++ /dev/null @@ -1,2 +0,0 @@ -name -Ruiz \ No newline at end of file diff --git a/mathesar/tests/data/self_referential_table.csv b/mathesar/tests/data/self_referential_table.csv deleted file mode 100644 index e0a629978c..0000000000 --- a/mathesar/tests/data/self_referential_table.csv +++ /dev/null @@ -1,6 +0,0 @@ -Id,Name,Parent -1,Parent, -2,Child1,1 -3,Parent2, -4,Child3,2 -5,Child2_1,3 \ No newline at end of file diff --git a/mathesar/tests/data/table_with_id_col.csv b/mathesar/tests/data/table_with_id_col.csv deleted file mode 100644 index af7932415f..0000000000 --- a/mathesar/tests/data/table_with_id_col.csv +++ /dev/null @@ -1,5 +0,0 @@ -id,Title -1,Polyimide Wire Insulation Repair System -2,Autogenic-Feedback Training Exercise Method & System -3,IMPROVED RAM BOOSTER -5,Spatial Standard Observer \ No newline at end of file diff --git a/mathesar/tests/data/textfile.txt b/mathesar/tests/data/textfile.txt deleted file mode 100644 index 7459c660ce..0000000000 --- a/mathesar/tests/data/textfile.txt +++ /dev/null @@ -1 +0,0 @@ -I am a test file. diff --git a/mathesar/tests/data/type_inference.csv b/mathesar/tests/data/type_inference.csv deleted file mode 100644 index 48d76d1a17..0000000000 --- a/mathesar/tests/data/type_inference.csv +++ /dev/null @@ -1,5 +0,0 @@ -col_1,col_2,col_3,col_4,col_5,col_6,col_7 -0,0,t,t,a,2,"$5" -2,1,false,false,cat,1,"$1" -1,1,true,2,mat,0,"$2" -0,0,f,0,bat,0,"$3" diff --git a/mathesar/tests/display_options_inference/__init__.py b/mathesar/tests/display_options_inference/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/mathesar/tests/display_options_inference/test_money.py b/mathesar/tests/display_options_inference/test_money.py deleted file mode 100644 index 5698f4383b..0000000000 --- a/mathesar/tests/display_options_inference/test_money.py +++ /dev/null @@ -1,40 +0,0 @@ -import pytest -from django.core.files.base import File - -from db.columns.operations.select import get_column_attnum_from_name -from db.metadata import get_empty_metadata -from mathesar.models.deprecated import DataFile, Table -from mathesar.utils.display_options_inference import infer_mathesar_money_display_options - -create_display_options_test_list = [ - ('col_4', { - 'number_format': 'english', - 'currency_symbol': '₿', - 'symbol_location': 'after-minus' - }), - ('col_3', { - 'number_format': 'english', - 'currency_symbol': '$', - 'symbol_location': 'after-minus' - }) -] - - -@pytest.mark.parametrize("col_name, expected_display_options", create_display_options_test_list) -def test_display_options_inference(client, patent_schema, col_name, expected_display_options): - engine = patent_schema._sa_engine - table_name = 'Type Inference Table' - file = 'mathesar/tests/data/display_options_inference.csv' - with open(file, 'rb') as csv_file: - data_file = DataFile.objects.create(file=File(csv_file), type='csv') - - body = { - 'data_files': [data_file.id], - 'name': table_name, - 'schema': patent_schema.id, - } - response_table = client.post('/api/db/v0/tables/', body).json() - table = Table.objects.get(id=response_table['id']) - column_attnum = get_column_attnum_from_name(table.oid, col_name, engine, metadata=get_empty_metadata()) - inferred_display_options = infer_mathesar_money_display_options(table.oid, engine, column_attnum) - assert inferred_display_options == expected_display_options diff --git a/mathesar/tests/filters/test_filters.py b/mathesar/tests/filters/test_filters.py deleted file mode 100644 index fc19db6bf9..0000000000 --- a/mathesar/tests/filters/test_filters.py +++ /dev/null @@ -1,30 +0,0 @@ -from mathesar.filters.base import get_available_filters - - -def test_available_filters_structure(engine_with_schema): - engine, _ = engine_with_schema - available_filters = get_available_filters(engine) - assert len(available_filters) > 0 - available_filter_ids = tuple(filter['id'] for filter in available_filters) - some_filters_that_we_expect_to_be_there = [ - 'greater', - 'lesser', - 'null', - 'not_null', - 'equal', - 'greater_or_equal', - 'contains_case_insensitive', - 'starts_with_case_insensitive', - 'uri_authority_contains', - 'uri_scheme_equals', - 'email_domain_contains', - 'email_domain_equals', - 'json_array_length_equals', - ] - - for expected_filter in some_filters_that_we_expect_to_be_there: - assert expected_filter in available_filter_ids - - for filter in available_filters: - for parameter in filter['parameters']: - assert len(parameter['ui_types']) > 0 diff --git a/mathesar/tests/imports/test_csv.py b/mathesar/tests/imports/test_csv.py index 0f97a6d5ba..304f2b37ae 100644 --- a/mathesar/tests/imports/test_csv.py +++ b/mathesar/tests/imports/test_csv.py @@ -62,6 +62,7 @@ def check_csv_upload(table, table_name, schema, num_records, row, cols): assert col in table.sa_column_names +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_csv_upload(data_file, schema): table_name = "NASA 1" table = create_table_from_data_file(data_file, table_name, schema) @@ -91,6 +92,7 @@ def test_csv_upload(data_file, schema): ) +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_headerless_csv_upload(headerless_data_file, schema): table_name = "NASA no headers" table = create_table_from_data_file(headerless_data_file, table_name, schema) @@ -113,6 +115,7 @@ def test_headerless_csv_upload(headerless_data_file, schema): ) +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_col_names_with_spaces_csv(col_names_with_spaces_data_file, schema): table_name = "Column names with spaces" table = create_table_from_data_file(col_names_with_spaces_data_file, table_name, schema) @@ -130,6 +133,7 @@ def test_col_names_with_spaces_csv(col_names_with_spaces_data_file, schema): ) +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_col_headers_empty_csv(col_headers_empty_data_file, schema): table_name = "Empty column header" table = create_table_from_data_file(col_headers_empty_data_file, table_name, schema) @@ -144,6 +148,7 @@ def test_col_headers_empty_csv(col_headers_empty_data_file, schema): @pytest.mark.skip(reason="msar.add_mathesar_table no longer raises an exception if a table with the same name already exists in the database.") +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_csv_upload_with_duplicate_table_name(data_file, schema): table_name = "NASA 2" @@ -157,6 +162,7 @@ def test_csv_upload_with_duplicate_table_name(data_file, schema): create_table_from_data_file(data_file, table_name, schema) +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_csv_upload_table_imported_to(data_file, schema): table = create_table_from_data_file(data_file, "NASA", schema) data_file.refresh_from_db() diff --git a/mathesar/tests/imports/test_excel.py b/mathesar/tests/imports/test_excel.py index 017ffd9c8a..8009989144 100644 --- a/mathesar/tests/imports/test_excel.py +++ b/mathesar/tests/imports/test_excel.py @@ -25,6 +25,7 @@ def check_excel_upload(table, table_name, schema, num_records, row, cols): assert col in table.sa_column_names +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_excel_upload(data_file, engine_with_schema): engine, schema_name = engine_with_schema schema_oid = get_schema_oid_from_name(schema_name, engine) @@ -58,6 +59,7 @@ def test_excel_upload(data_file, engine_with_schema): @pytest.mark.skip(reason="msar.add_mathesar_table no longer raises an exception if a table with the same name already exists in the database.") +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_excel_upload_with_duplicate_table_name(data_file, engine_with_schema): table_name = "NASA 2" @@ -74,6 +76,7 @@ def test_excel_upload_with_duplicate_table_name(data_file, engine_with_schema): create_table_from_data_file(data_file, table_name, schema) +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_excel_upload_table_imported_to(data_file, engine_with_schema): engine, schema_name = engine_with_schema schema_oid = get_schema_oid_from_name(schema_name, engine) diff --git a/mathesar/tests/imports/test_json.py b/mathesar/tests/imports/test_json.py index 514f1199a5..20e7fe2347 100644 --- a/mathesar/tests/imports/test_json.py +++ b/mathesar/tests/imports/test_json.py @@ -38,6 +38,7 @@ def check_json_upload(table, table_name, schema, num_records, row, cols): assert col in table.sa_column_names +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_json_upload(data_file, schema): table_name = "NASA 1" table = create_table_from_data_file(data_file, table_name, schema) @@ -81,6 +82,7 @@ def test_json_upload_with_duplicate_table_name(data_file, schema): create_table_from_data_file(data_file, table_name, schema) +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_json_upload_table_imported_to(data_file, schema): table = create_table_from_data_file(data_file, "NASA", schema) data_file.refresh_from_db() diff --git a/mathesar/tests/query/test_base.py b/mathesar/tests/query/test_base.py index c8611843ba..05005a7291 100644 --- a/mathesar/tests/query/test_base.py +++ b/mathesar/tests/query/test_base.py @@ -1,8 +1,11 @@ +import pytest + from mathesar.models.query import Exploration from db.queries.base import DBQuery, InitialColumn from db.transforms import base as transforms_base +@pytest.mark.skip(reason="We removed models used in the `create_table_from_data_file` setup function") def test_convert_to_db_query(create_patents_table, get_uid): base_table_dj = create_patents_table(table_name=get_uid()) col1_dj = base_table_dj.get_column_by_name('Center') diff --git a/mathesar/tests/rpc/exceptions/test_error_codes.py b/mathesar/tests/rpc/exceptions/test_error_codes.py index a50f01b521..dd170d36db 100644 --- a/mathesar/tests/rpc/exceptions/test_error_codes.py +++ b/mathesar/tests/rpc/exceptions/test_error_codes.py @@ -2,7 +2,7 @@ from psycopg.errors import BadCopyFileFormat from django.core.exceptions import FieldDoesNotExist -from mathesar.utils.connections import BadInstallationTarget +from mathesar.utils.permissions import BadInstallationTarget from db.functions.exceptions import UnknownDBFunctionID from sqlalchemy.exc import IntegrityError from http.client import CannotSendRequest diff --git a/mathesar/tests/rpc/exceptions/test_error_handler.py b/mathesar/tests/rpc/exceptions/test_error_handler.py index f701131343..253b7a3fc1 100644 --- a/mathesar/tests/rpc/exceptions/test_error_handler.py +++ b/mathesar/tests/rpc/exceptions/test_error_handler.py @@ -3,7 +3,7 @@ from modernrpc.exceptions import RPCException from psycopg.errors import BadCopyFileFormat from django.core.exceptions import FieldDoesNotExist -from mathesar.utils.connections import BadInstallationTarget +from mathesar.utils.permissions import BadInstallationTarget from db.functions.exceptions import UnknownDBFunctionID from http.client import CannotSendRequest diff --git a/mathesar/tests/rpc/test_connections.py b/mathesar/tests/rpc/test_connections.py deleted file mode 100644 index ea6cf14661..0000000000 --- a/mathesar/tests/rpc/test_connections.py +++ /dev/null @@ -1,80 +0,0 @@ -import pytest -from unittest.mock import patch -import mathesar.rpc.connections as rpc_conn - - -@pytest.mark.parametrize( - "create_db", [True, False] -) -@pytest.mark.parametrize( - "connection_id", [None, 3] -) -@pytest.mark.parametrize( - "sample_data", [ - [], - ['library_management'], - ['movie_collection'], - ['library_management', 'movie_collection'] - ] -) -def test_add_from_known_connection(create_db, connection_id, sample_data): - with patch.object(rpc_conn, 'ConnectionReturn'): - with patch.object( - rpc_conn.connections, - 'copy_connection_from_preexisting' - ) as mock_exec: - rpc_conn.add_from_known_connection( - nickname='mathesar_tables', - database='mathesar', - create_db=create_db, - connection_id=connection_id, - sample_data=sample_data - ) - call_args = mock_exec.call_args_list[0][0] - assert call_args[0] == { - 'connection_type': 'internal_database', - 'connection_id': connection_id - } or { - 'connection_type': 'user_database', - 'connection_id': connection_id - } - assert call_args[1] == 'mathesar_tables' - assert call_args[2] == 'mathesar' - assert call_args[3] == create_db - assert call_args[4] == sample_data - - -@pytest.mark.parametrize( - "port", ['5432', 5432] -) -@pytest.mark.parametrize( - "sample_data", [ - [], - ['library_management'], - ['movie_collection'], - ['library_management', 'movie_collection'] - ] -) -def test_add_from_scratch(port, sample_data): - with patch.object(rpc_conn, 'ConnectionReturn'): - with patch.object( - rpc_conn.connections, - 'create_connection_from_scratch' - ) as mock_exec: - rpc_conn.add_from_scratch( - nickname='mathesar_tables', - database='mathesar', - user='mathesar_user', - password='mathesar_password', - host='mathesar_dev_db', - port=port, - sample_data=sample_data - ) - call_args = mock_exec.call_args_list[0][0] - assert call_args[0] == 'mathesar_user' - assert call_args[1] == 'mathesar_password' - assert call_args[2] == 'mathesar_dev_db' - assert call_args[3] == port - assert call_args[4] == 'mathesar_tables' - assert call_args[5] == 'mathesar' - assert call_args[6] == sample_data diff --git a/mathesar/tests/rpc/test_endpoints.py b/mathesar/tests/rpc/test_endpoints.py index 0a169e3c53..bc9c612a8b 100644 --- a/mathesar/tests/rpc/test_endpoints.py +++ b/mathesar/tests/rpc/test_endpoints.py @@ -10,7 +10,6 @@ from mathesar.rpc import collaborators from mathesar.rpc import columns -from mathesar.rpc import connections from mathesar.rpc import constraints from mathesar.rpc import data_modeling from mathesar.rpc import databases @@ -81,22 +80,6 @@ [user_is_authenticated] ), - ( - connections.add_from_known_connection, - "connections.add_from_known_connection", - [user_is_superuser] - ), - ( - connections.add_from_scratch, - "connections.add_from_scratch", - [user_is_superuser] - ), - ( - connections.grant_access_to_user, - "connections.grant_access_to_user", - [user_is_superuser] - ), - ( constraints.list_, "constraints.list", diff --git a/mathesar/tests/test_models.py b/mathesar/tests/test_models.py deleted file mode 100644 index 2c4da0dddb..0000000000 --- a/mathesar/tests/test_models.py +++ /dev/null @@ -1,82 +0,0 @@ -import pytest -from unittest.mock import patch -from django.core.cache import cache - -from mathesar.models.deprecated import Connection, Schema, Table, schema_utils -from mathesar.utils.models import attempt_dumb_query - - -def test_schema_name_sets_cache(monkeypatch, test_db_model): - monkeypatch.setattr( - Schema, '_sa_engine', lambda x: None - ) - monkeypatch.setattr( - schema_utils, 'get_schema_name_from_oid', lambda *_: 'myname' - ) - cache.clear() - schema = Schema(oid=123, database=test_db_model) - name = schema.name - assert cache.get(f"{schema.database.name}_schema_name_{schema.oid}") == name - - -def test_schema_name_uses_cache(monkeypatch, test_db_model): - monkeypatch.setattr( - Schema, '_sa_engine', lambda _: None - ) - cache.clear() - with patch.object( - schema_utils, 'get_schema_name_from_oid', return_value='myname' - ) as mock_get_name: - schema = Schema(oid=123, database=test_db_model) - name_one = schema.name - name_two = schema.name - assert name_one == name_two - assert mock_get_name.call_count == 1 - - -def test_schema_name_handles_missing(monkeypatch, test_db_model): - monkeypatch.setattr( - Schema, '_sa_engine', lambda _: None - ) - cache.clear() - - def mock_name_getter(*_): - raise TypeError - monkeypatch.setattr( - schema_utils, 'get_schema_name_from_oid', mock_name_getter - ) - schema = Schema(oid=123, database=test_db_model) - name_ = schema.name - assert name_ == 'MISSING' - - -@pytest.mark.parametrize("model", [Connection, Schema, Table]) -def test_model_queryset_reflects_db_objects(model): - with patch('mathesar.state.base.reflect_db_objects') as mock_reflect: - model.objects.all() - mock_reflect.assert_called() - - -@pytest.mark.parametrize("model", [Connection, Schema, Table]) -def test_model_current_queryset_does_not_reflects_db_objects(model): - with patch('mathesar.state.base.reflect_db_objects') as mock_reflect: - model.current_objects.all() - mock_reflect.assert_not_called() - - -@pytest.mark.parametrize('iteration', range(2)) -def test_database_engine_cache_stability(FUN_create_dj_db, iteration, uid): - """ - We are using an engine cache to minimize new engine creations; however, a cached engine might - unexpectedly fail if its underlying database is dropped and then recreated. This test checks - that that is transparently handled. - - This test uses two iterations: first one creates a database, populates our engine cache, then - drops the database during cleanup; the second, recreates the database, fetches a cached engine, - and tests it. - """ - del iteration # An unused parameter - some_db_name = uid - FUN_create_dj_db(some_db_name) - db_model = Connection.objects.get(name=some_db_name) - attempt_dumb_query(db_model._sa_engine) diff --git a/mathesar/tests/test_multi_db.py b/mathesar/tests/test_multi_db.py deleted file mode 100644 index 96a3bd5df1..0000000000 --- a/mathesar/tests/test_multi_db.py +++ /dev/null @@ -1,85 +0,0 @@ -import pytest -from django.core.exceptions import ValidationError - -from mathesar.models.deprecated import Table, Schema, Connection - - -@pytest.fixture(autouse=True) -def multi_db_test_db(FUN_create_dj_db, uid): - db_name = f"mathesar_multi_db_test_{uid}" - FUN_create_dj_db(db_name) - return db_name - - -@pytest.fixture -def multi_db_engine(multi_db_test_db, MOD_engine_cache): - return MOD_engine_cache(multi_db_test_db) - - -# NOTE this test might result in false failures, because the way we define user -# databases might not support runtime changes in certain edge cases (and such changes are -# performed in this tests' fixtures). Talk to Brent or Dom in case of problems. -def test_multi_db_schema(engine, multi_db_engine, client, create_db_schema): - test_schemas = ["test_schema_1", "test_schema_2"] - for schema_name in test_schemas: - create_db_schema(schema_name, engine) - create_db_schema("multi_db_" + schema_name, multi_db_engine) - - response = client.get('/api/db/v0/schemas/') - response_data = response.json() - response_schemas = [ - s['name'] for s in response_data['results'] if s['name'] != 'public' - ] - - assert response.status_code == 200 - assert len(response_schemas) == 4 - - expected_schemas = test_schemas + ["multi_db_" + s for s in test_schemas] - assert set(response_schemas) == set(expected_schemas) - - -# NOTE this test might result in false failures, because the way we define user -# databases might not support runtime changes in certain edge cases (and such changes are -# performed in this tests' fixtures). Talk to Brent or Dom in case of problems. -def test_multi_db_tables(engine, multi_db_engine, client, create_mathesar_table): - schema_name = "test_multi_db_tables_schema" - test_tables = ["test_table_1", "test_table_2"] - for table_name in test_tables: - create_mathesar_table(table_name, schema_name, [], engine) - create_mathesar_table( - "multi_db_" + table_name, schema_name, [], multi_db_engine - ) - - response = client.get('/api/db/v0/tables/') - - assert response.status_code == 200 - - response_tables = [s['name'] for s in response.json()['results']] - expected_tables = test_tables + ["multi_db_" + s for s in test_tables] - for table_name in expected_tables: - assert table_name in response_tables - - -def test_multi_db_oid_unique(): - """ - Ensure the same OID is allowed for different dbs - """ - schema_oid = 5000 - table_oid = 5001 - all_dbs = Connection.objects.all() - assert len(all_dbs) > 1 - for db in all_dbs: - schema = Schema.objects.create(oid=schema_oid, database=db) - Table.objects.create(oid=table_oid, schema=schema) - - -def test_single_db_oid_unique_exception(): - table_oid = 5001 - dbs = Connection.objects.all() - assert len(dbs) > 0 - db = dbs[0] - schema_1 = Schema.objects.create(oid=4000, database=db) - schema_2 = Schema.objects.create(oid=5000, database=db) - with pytest.raises(ValidationError): - Table.objects.create(oid=table_oid, schema=schema_1) - Table.objects.create(oid=table_oid, schema=schema_2) diff --git a/mathesar/urls.py b/mathesar/urls.py index 17f821d555..42cc7566dc 100644 --- a/mathesar/urls.py +++ b/mathesar/urls.py @@ -10,39 +10,15 @@ from mathesar.users.superuser_create import SuperuserFormView db_router = routers.DefaultRouter() -db_router.register(r'tables', db_viewsets.TableViewSet, basename='table') -db_router.register(r'queries', db_viewsets.QueryViewSet, basename='query') -db_router.register(r'links', db_viewsets.LinkViewSet, basename='links') -db_router.register(r'schemas', db_viewsets.SchemaViewSet, basename='schema') -db_router.register(r'connections', db_viewsets.ConnectionViewSet, basename='connection') db_router.register(r'data_files', db_viewsets.DataFileViewSet, basename='data-file') -db_table_router = routers.NestedSimpleRouter(db_router, r'tables', lookup='table') -db_table_router.register(r'records', db_viewsets.RecordViewSet, basename='table-record') -db_table_router.register(r'settings', db_viewsets.TableSettingsViewSet, basename='table-setting') -db_table_router.register(r'columns', db_viewsets.ColumnViewSet, basename='table-column') -db_table_router.register(r'constraints', db_viewsets.ConstraintViewSet, basename='table-constraint') - ui_router = routers.DefaultRouter() -ui_router.register(r'version', ui_viewsets.VersionViewSet, basename='version') -ui_router.register(r'connections', ui_viewsets.ConnectionViewSet, basename='connection') ui_router.register(r'users', ui_viewsets.UserViewSet, basename='user') -ui_router.register(r'database_roles', ui_viewsets.DatabaseRoleViewSet, basename='database_role') -ui_router.register(r'schema_roles', ui_viewsets.SchemaRoleViewSet, basename='schema_role') - -ui_table_router = routers.NestedSimpleRouter(db_router, r'tables', lookup='table') -ui_table_router.register(r'records', ui_viewsets.RecordViewSet, basename='table-record') - -# Shares -ui_router.register(r'tables/(?P[^/.]+)/shares', ui_viewsets.SharedTableViewSet, basename='shared-table') -ui_router.register(r'queries/(?P[^/.]+)/shares', ui_viewsets.SharedQueryViewSet, basename='shared-query') urlpatterns = [ path('api/rpc/v0/', views.MathesarRPCEntryPoint.as_view()), path('api/db/v0/', include(db_router.urls)), - path('api/db/v0/', include(db_table_router.urls)), path('api/ui/v0/', include(ui_router.urls)), - path('api/ui/v0/', include(ui_table_router.urls)), path('api/ui/v0/reflect/', views.reflect_all, name='reflect_all'), path('auth/password_reset_confirm', MathesarPasswordResetConfirmView.as_view(), name='password_reset_confirm'), path('auth/login/', superuser_exist(LoginView.as_view(redirect_authenticated_user=True)), name='login'), diff --git a/mathesar/utils/connections.py b/mathesar/utils/connections.py deleted file mode 100644 index b999e87789..0000000000 --- a/mathesar/utils/connections.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Utilities to help with creating and managing connections in Mathesar.""" -from sqlalchemy.exc import OperationalError -from mathesar.models.deprecated import Connection -from db import install, connection as dbconn - - -class BadInstallationTarget(Exception): - """Raise when an attempt is made to install on a disallowed target""" - pass - - -def copy_connection_from_preexisting( - connection, nickname, db_name, create_db, sample_data -): - if connection['connection_type'] == 'internal_database': - conn_model = Connection.create_from_settings_key('default') - elif connection['connection_type'] == 'user_database': - conn_model = Connection.current_objects.get(id=connection['id']) - conn_model.id = None - else: - raise KeyError("connection_type") - root_db = conn_model.db_name - return _save_and_install( - conn_model, db_name, root_db, nickname, create_db, sample_data - ) - - -def create_connection_from_scratch( - user, password, host, port, nickname, db_name, sample_data -): - conn_model = Connection(username=user, password=password, host=host, port=port) - root_db = db_name - return _save_and_install( - conn_model, db_name, root_db, nickname, False, sample_data - ) - - -def create_connection_with_new_user( - connection, user, password, nickname, db_name, create_db, sample_data -): - conn_model = copy_connection_from_preexisting( - connection, nickname, db_name, create_db, [] - ) - engine = conn_model._sa_engine - conn_model.username = user - conn_model.password = password - conn_model.save() - dbconn.execute_msar_func_with_engine( - engine, - 'create_role', - conn_model.username, - conn_model.password, - True - ) - _load_sample_data(conn_model._sa_engine, sample_data) - return conn_model - - -def _save_and_install( - conn_model, db_name, root_db, nickname, create_db, sample_data -): - conn_model.name = nickname - conn_model.db_name = db_name - _validate_conn_model(conn_model) - conn_model.save() - try: - install.install_mathesar( - database_name=conn_model.db_name, - username=conn_model.username, - password=conn_model.password, - hostname=conn_model.host, - port=conn_model.port, - skip_confirm=True, - create_db=create_db, - root_db=root_db, - ) - except OperationalError as e: - conn_model.delete() - raise e - _load_sample_data(conn_model._sa_engine, sample_data) - return conn_model - - -def _load_sample_data(engine, sample_data): - pass - - -def _validate_conn_model(conn_model): - internal_conn_model = Connection.create_from_settings_key('default') - if ( - internal_conn_model is not None - and conn_model.host == internal_conn_model.host - and conn_model.port == internal_conn_model.port - and conn_model.db_name == internal_conn_model.db_name - ): - raise BadInstallationTarget( - "Mathesar can't be installed in the internal DB namespace" - ) diff --git a/mathesar/utils/display_options_inference.py b/mathesar/utils/display_options_inference.py deleted file mode 100644 index c5799f4104..0000000000 --- a/mathesar/utils/display_options_inference.py +++ /dev/null @@ -1,71 +0,0 @@ -import json -from importlib import resources as impresources - -from thefuzz import fuzz - -from db.columns.operations.select import get_column_attnum_from_name -from db.types import base -from db.types.base import get_qualified_name -from db.types.custom.money import get_first_money_array_with_symbol -from db.metadata import get_empty_metadata - -from mathesar import data - -MATHESAR_MONEY = get_qualified_name(base.MathesarCustomType.MATHESAR_MONEY.value) - - -def infer_mathesar_money_display_options(table_oid, engine, column_attnum): - """ - Display options are inferred based on the values of the first valid row with a currency symbol, - """ - money_array = get_first_money_array_with_symbol(table_oid, engine, column_attnum) - if money_array is None: - return None - else: - try: - inp_file = (impresources.files(data) / 'currency_info.json') - with inp_file.open("rb") as f: # or "rt" as text file with universal newlines - currency_dict = json.load(f) - except AttributeError: - # if Python < PY3.9, fall back to the method deprecated in PY3.11. - currency_dict = json.load(impresources.open_text(data, 'currency_info.json')) - greatest_currency_similarity_score = 10 # Threshold score - selected_currency_details = None - for currency_code, currency_details in currency_dict.items(): - currency_similarity_score = fuzz.ratio(currency_details['currency_symbol'], money_array[3]) - if currency_similarity_score == 100: - selected_currency_details = currency_details - break - elif currency_similarity_score > greatest_currency_similarity_score: - greatest_currency_similarity_score = currency_similarity_score - selected_currency_details = currency_details - if selected_currency_details is not None: - return { - 'currency_symbol': selected_currency_details['currency_symbol'], - 'symbol_location': 'after-minus', - 'number_format': 'english', - } - else: - return { - 'currency_symbol': money_array[3], - 'symbol_location': 'after-minus', - 'number_format': 'english', - } - - -def infer_table_column_display_options(table, col_name_type_dict): - inferred_display_options = {} - for column_name, columnn_type in col_name_type_dict.items(): - inference_fn = display_options_inference_map.get(columnn_type.lower()) - if inference_fn is not None: - # TODO reuse metadata - column_attnum = get_column_attnum_from_name(table.oid, column_name, table.schema._sa_engine, metadata=get_empty_metadata()) - inferred_display_options[column_name] = inference_fn(table.oid, table.schema._sa_engine, column_attnum) - else: - inferred_display_options[column_name] = None - return inferred_display_options - - -display_options_inference_map = { - MATHESAR_MONEY: infer_mathesar_money_display_options -} diff --git a/mathesar/utils/joins.py b/mathesar/utils/joins.py deleted file mode 100644 index cc67105dba..0000000000 --- a/mathesar/utils/joins.py +++ /dev/null @@ -1,80 +0,0 @@ -from db.tables.operations import select as ma_sel -from db.metadata import get_empty_metadata -from mathesar.models.deprecated import Table, Column, Constraint - -TARGET = 'target' -FK_PATH = 'fk_path' -JP_PATH = 'jp_path' -DEPTH = 'depth' -MULTIPLE_RESULTS = 'multiple_results' -NAME = 'name' -COLUMNS = 'columns' -TABLES = 'tables' -JOINABLE_TABLES = 'joinable_tables' -TYPE = 'type' - - -def get_processed_joinable_tables(table, limit=None, offset=None, max_depth=2): - raw_joinable_tables = ma_sel.get_joinable_tables( - table.schema._sa_engine, - get_empty_metadata(), - base_table_oid=table.oid, - max_depth=max_depth, - limit=limit, - offset=offset - ) - table_info = {} - column_info = {} - - database = table.schema.database - - def _prefetch_metadata_side_effector(table): - columns = table.columns.all() - table_info.update( - { - table.id: { - NAME: table.name, COLUMNS: [col.id for col in columns] - } - } - ) - column_info.update( - { - col.id: {NAME: col.name, TYPE: col.db_type.id} - for col in columns - } - ) - return table.id - - joinable_tables = [ - { - TARGET: _prefetch_metadata_side_effector( - Table.objects.get( - schema__database=database, - oid=row[ma_sel.TARGET], - ) - ), - JP_PATH: [ - [ - Column.objects.get( - table__schema__database=database, - table__oid=oid, - attnum=attnum, - ).id - for oid, attnum in edge - ] - for edge in row[ma_sel.JP_PATH] - ], - FK_PATH: [ - [Constraint.objects.get(table__schema__database=database, oid=oid).id, reverse] - for oid, reverse in row[ma_sel.FK_PATH] - ], - DEPTH: row[ma_sel.DEPTH], - MULTIPLE_RESULTS: row[ma_sel.MULTIPLE_RESULTS], - } - for row in raw_joinable_tables - ] - return { - JOINABLE_TABLES: joinable_tables, - TABLES: table_info, - COLUMNS: column_info, - } diff --git a/mathesar/utils/json.py b/mathesar/utils/json.py deleted file mode 100644 index 52e3dec49d..0000000000 --- a/mathesar/utils/json.py +++ /dev/null @@ -1,19 +0,0 @@ -import datetime - -from rest_framework.utils.encoders import JSONEncoder -from rest_framework.renderers import JSONRenderer - - -class MathesarJSONEncoder(JSONEncoder): - def default(self, obj): - if isinstance(obj, datetime.time): - representation = obj.isoformat() - return representation - elif isinstance(obj, datetime.datetime): - return obj.isoformat() - else: - return super().default(obj) - - -class MathesarJSONRenderer(JSONRenderer): - encoder_class = MathesarJSONEncoder diff --git a/mathesar/utils/permissions.py b/mathesar/utils/permissions.py index 34e2363be4..14a0be7243 100644 --- a/mathesar/utils/permissions.py +++ b/mathesar/utils/permissions.py @@ -6,20 +6,13 @@ from mathesar.examples.library_dataset import load_library_dataset from mathesar.examples.movies_dataset import load_movies_dataset from mathesar.models.base import Server, Database, ConfiguredRole, UserDatabaseRoleMap -from mathesar.models.deprecated import Connection -from mathesar.models.users import User -from mathesar.utils.connections import BadInstallationTarget INTERNAL_DB_KEY = 'default' -def migrate_connection_for_user(connection_id, user_id): - """Move data from old-style connection model to new models.""" - conn = Connection.current_objects.get(id=connection_id) - user = User.objects.get(id=user_id) - return _setup_connection_models( - conn.host, conn.port, conn.db_name, conn.username, conn.password, user - ) +class BadInstallationTarget(Exception): + """Raise when an attempt is made to install on a disallowed target""" + pass @transaction.atomic diff --git a/mathesar/utils/schemas.py b/mathesar/utils/schemas.py deleted file mode 100644 index 833cfc33b9..0000000000 --- a/mathesar/utils/schemas.py +++ /dev/null @@ -1,26 +0,0 @@ -from django.core.exceptions import ObjectDoesNotExist -from rest_framework.exceptions import ValidationError - -from db.schemas.operations.create import create_schema_via_sql_alchemy -from db.schemas.utils import get_schema_oid_from_name, get_mathesar_schemas -from mathesar.database.base import create_mathesar_engine -from mathesar.models.deprecated import Schema, Connection - - -def create_schema_and_object(name, connection_id, comment=None): - try: - database_model = Connection.objects.get(id=connection_id) - database_name = database_model.name - except ObjectDoesNotExist: - raise ValidationError({"database": f"Database '{database_name}' not found"}) - - engine = create_mathesar_engine(database_model) - - all_schemas = get_mathesar_schemas(engine) - if name in all_schemas: - raise ValidationError({"name": f"Schema name {name} is not unique"}) - create_schema_via_sql_alchemy(name, engine, comment) - schema_oid = get_schema_oid_from_name(name, engine) - - schema = Schema.objects.create(oid=schema_oid, database=database_model) - return schema diff --git a/mathesar/utils/tables.py b/mathesar/utils/tables.py index 5ad5ea9715..45d027866e 100644 --- a/mathesar/utils/tables.py +++ b/mathesar/utils/tables.py @@ -1,89 +1,4 @@ -from sqlalchemy import MetaData - -from db.tables.operations.create import create_mathesar_table -from db.tables.operations.infer_types import infer_table_column_types -from mathesar.database.base import create_mathesar_engine -from mathesar.imports.base import create_table_from_data_file -from mathesar.models.deprecated import Table from mathesar.models.base import Database, TableMetaData -from mathesar.state.django import reflect_columns_from_tables -from mathesar.state import get_cached_metadata - -TABLE_NAME_TEMPLATE = 'Table' - -POSTGRES_NAME_LEN_CAP = 63 - - -def get_table_column_types(table, columns_might_have_defaults=True): - schema = table.schema - db_types = infer_table_column_types( - schema.name, - table.name, - schema._sa_engine, - metadata=get_cached_metadata(), - columns_might_have_defaults=columns_might_have_defaults, - ) - col_types = { - col.name: db_type.id - for col, db_type in zip(table.sa_columns, db_types) - if not col.is_default - and not col.primary_key - and not col.foreign_keys - } - return col_types - - -def gen_table_name(schema, data_files=None): - if data_files: - data_file = data_files[0] - base_name = data_file.base_name - else: - base_name = None - - if base_name and len(base_name) >= POSTGRES_NAME_LEN_CAP - 8: - # Ensures we have at least 7 digits to work with - base_name = None - - if not base_name: - base_name = TABLE_NAME_TEMPLATE - table_num = Table.objects.count() - name = f'{TABLE_NAME_TEMPLATE} {table_num}' - else: - table_num = 0 - name = base_name - - metadata = MetaData(bind=schema._sa_engine, schema=schema.name) - metadata.reflect() - while '.'.join((schema.name, name)) in metadata.tables: - table_num += 1 - name = f'{base_name} {table_num}' - if len(name) > POSTGRES_NAME_LEN_CAP: - base_name = base_name[:-1] - name = base_name + f' {table_num}' - return name - - -def create_table_from_datafile(data_files, name, schema, comment=None): - data_file = data_files[0] - table = create_table_from_data_file(data_file, name, schema, comment) - return table - - -def create_empty_table(name, schema, comment=None): - """ - Create an empty table, with only Mathesar's internal columns. - - :param name: the parsed and validated table name - :param schema: the parsed and validated schema model - :return: the newly created blank table - """ - engine = create_mathesar_engine(schema.database) - db_table_oid = create_mathesar_table(engine, name, schema.oid, comment=comment) - # Using current_objects to create the table instead of objects. objects - # triggers re-reflection, which will cause a race condition to create the table - table, _ = Table.current_objects.get_or_create(oid=db_table_oid, schema=schema) - reflect_columns_from_tables([table], metadata=get_cached_metadata()) - return table def list_tables_meta_data(database_id):