diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a21d3e7908..2746b1cdb7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,6 +6,7 @@ on: branches: - "develop" - "main" + - "AH/develop" jobs: # Currently GH Actions provides no simple method for "sharing" diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index aff8ac5a14..b0c7c34578 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -4,6 +4,7 @@ on: push: branches: - develop + - AH/develop tags: - "*" workflow_dispatch: @@ -43,6 +44,8 @@ jobs: if [[ ${GITHUB_EVENT_NAME} == "push" ]]; then if [[ "${GITHUB_REF}" == "refs/heads/develop" ]]; then SEED_TAG=seedplatform/seed:develop + elif [[ "${GITHUB_REF}" == "refs/heads/AH/develop" ]]; then + SEED_TAG=seedplatform/seed:develop-ah elif [[ "${GITHUB_REF}" =~ "refs/tags/v" ]]; then # you can have multiple tags, separated by commas SEED_TAG=seedplatform/seed:${GITHUB_REF#refs/tags/v},seedplatform/seed:latest diff --git a/config/settings/common.py b/config/settings/common.py index 5e8d077b1e..fa105d9a6f 100644 --- a/config/settings/common.py +++ b/config/settings/common.py @@ -103,6 +103,7 @@ 'crispy_forms', # needed to squash warnings around collectstatic with rest_framework 'post_office', 'django_celery_beat', + 'treebeard', ) @@ -148,7 +149,6 @@ ], 'js': [ 'compressor.filters.jsmin.JSMinFilter', - 'compressor.filters.yuglify.YUglifyJSFilter', ] } STATICFILES_DIRS = [ diff --git a/locale/en_US/LC_MESSAGES/django.mo b/locale/en_US/LC_MESSAGES/django.mo index fab5a46cbf..969638ad05 100644 Binary files a/locale/en_US/LC_MESSAGES/django.mo and b/locale/en_US/LC_MESSAGES/django.mo differ diff --git a/locale/en_US/LC_MESSAGES/django.po b/locale/en_US/LC_MESSAGES/django.po index d84446e63a..a1842105be 100644 --- a/locale/en_US/LC_MESSAGES/django.po +++ b/locale/en_US/LC_MESSAGES/django.po @@ -56,9 +56,42 @@ msgstr "A list of your imported headers." msgid "A preview of your concatenated data will appear in the box below." msgstr "A preview of your concatenated data will appear in the box below." +msgid "ACCESS_LEVEL_DELETE_AREYOUSURE" +msgstr "Are you sure you want to delete this Access Level Instance? The operation will also delete all Access Level Instances below this one." + +msgid "ACCESS_LEVEL_DESCRIPTION" +msgstr " Define access levels to structure your organization's content. An organization can have one or more access levels. All organizations have at least one level (level 1), which is named with the organization name by default. All access level names are customizable." + msgid "ACCESS_LEVEL_FILENAME_UPLOADED" msgstr "Access Level Instances File has been uploaded" +msgid "ACCESS_LEVEL_INSTANCE_DESCRIPTION" +msgstr "Access level instances are custom subgroups that can be defined within each access level. These ALIs are used to associate content together and restrict user access to data. There is a single ALI in level 1 of the hierarchy, the root of the tree. This is named 'root' by default, but can be customized. All other levels can contain one or more ALIs." + +msgid "ACCESS_LEVEL_ORG_OWNER" +msgstr "As an organization owner, you can set the structure of your access level tree using the buttons below. This process should be performed prior to importing data and adding users to your organization." + +msgid "ACCESS_LEVEL_OWNER_END" +msgstr "Once you have configured your organization's access level tree, you are ready to add users and properties to different levels of the tree." + +msgid "ACCESS_LEVEL_OWNER_STEP1" +msgstr "Configure the number and name of your access levels using the Edit/Add Access Levels button." + +msgid "ACCESS_LEVEL_OWNER_STEP2" +msgstr "Upload a spreadsheet of access level instances. The spreadsheet should have a column for each access level, with the header matching your customized level name. Access level instances make up the rows of the spreadsheet. Make sure to fully define the path of each access level instance. You can also select the level, parent access level instance, and configure the name of a single new access level instance using the Add Access Level Instance button." + +msgid "ACCESS_LEVEL_ROOT_TEXT" +msgstr "There is only one access level instance allowed at the root of the tree (level 1). Click on the pencil icon if you wish to rename it." + +msgid "ACCESS_LEVEL_STRUCTURE_UPLOAD_TEXT" +msgstr "Upload your Organizational Structure in spreadsheet form. The file should include a column for each access level with its column header equal to the custom access level name. Each row should contain an access level instance, with its ancestry fully defined (parent node name, grand-parent node name, etc.)" + +msgid "ACCESS_LEVEL_TREE_HELP_1" +msgstr "This page allows you to view the structure of your organization's content. The structure is made up of hierarchical levels that define a tree; these are called access levels. Each level contains one or more nodes, called access level instances. Permissions are managed by associating content and users with a particular access level instance, thereby grouping and restricting access to the content. The number of access levels in your structure, the names of the levels, and the names of the access level instances are customizable." + +msgid "ACCESS_LEVEL_UPLOAD_PROGRESS_MSG" +msgstr "Saving access level instances in progress... This process depends on the size of your file and may take several minutes." + msgid "ADD_FILES_TO" msgstr "Add files to {dataset_name}." @@ -125,6 +158,33 @@ msgstr "Accept" msgid "Accept Terms of Service?" msgstr "Accept Terms of Service?" +msgid "Access Level" +msgstr "Access Level" + +msgid "Access Level Instance" +msgstr "Access Level Instance" + +msgid "Access Level Instance Information" +msgstr "Access Level Instance Information" + +msgid "Access Level Instance Tree" +msgstr "Access Level Instance Tree" + +msgid "Access Level Instances Errors" +msgstr "Access Level Instances Errors" + +msgid "Access Level Tree" +msgstr "Access Level Tree" + +msgid "Access Levels" +msgstr "Access Levels" + +msgid "Access Levels (AL)" +msgstr "Access Levels (AL)" + +msgid "Access Levels Instances (ALI)" +msgstr "Access Levels Instances (ALI)" + msgid "Acknowledge" msgstr "Acknowledge" @@ -147,6 +207,9 @@ msgstr "Actual Field" msgid "Add" msgstr "Add" +msgid "Add Access Level" +msgstr "Add Access Level" + msgid "Add Column" msgstr "Add Column" @@ -156,6 +219,12 @@ msgstr "Add Data Files" msgid "Add Label" msgstr "Add Label" +msgid "Add Level Instance (Node) to the Organizational Structure" +msgstr "Add Level Instance (Node) to the Organizational Structure" + +msgid "Add Levels to the Organizational Structure Tree" +msgstr "Add Levels to the Organizational Structure Tree" + msgid "Add Mapping" msgstr "Add Mapping" @@ -276,6 +345,12 @@ msgstr "Are you sure you want to unmerge these tax lots and then merge with the msgid "Area" msgstr "Area" +msgid "Area Column" +msgstr "Area Column" + +msgid "Area Target Column" +msgstr "Area Target Column" + msgid "As the admin of your SEED instance you can control what data is shared throughout your organization and between your sub-organizations as well as what data is shared externally with the public-at-large. The subset of data you choose to share with the public can be different than the subset shared between your sub-organizations." msgstr "As the admin of your SEED instance you can control what data is shared throughout your organization and between your sub-organizations as well as what data is shared externally with the public-at-large. The subset of data you choose to share with the public can be different than the subset shared between your sub-organizations." @@ -336,6 +411,9 @@ msgstr "Back to List" msgid "Back to Mapping" msgstr "Back to Mapping" +msgid "Baseline Cycle" +msgstr "Baseline Cycle" + #: seed/models/models.py:135 msgid "Benchmarking" msgstr "Benchmarking" @@ -433,6 +511,9 @@ msgstr "Need to configure your Program?" msgid "CONFIRMING_DELETE_PROFILE" msgstr "Are you sure you want to delete the profile" +msgid "CONFIRM_ACCESS_LEVEL_DELETE" +msgstr "Are you sure you want to delete this access level? This action will also delete {num_access_levels} access level instances." + msgid "CONFIRM_AND_START_MATCHING" msgstr "Confirm mappings & start matching" @@ -467,6 +548,9 @@ msgstr "Cancel" msgid "Cannot delete protected objects while related objects still exist" msgstr "Cannot delete protected objects while related objects still exist" +msgid "Cannot delete this access level instance." +msgstr "Cannot delete this access level instance." + msgid "Change Merged Property" msgstr "Change Merged Property" @@ -598,6 +682,9 @@ msgstr "Conditioned Floor Area" msgid "Configuration" msgstr "Configuration" +msgid "Configure Goals" +msgstr "Configure Goals" + #: seed/landing/templates/landing/password_reset_confirm.html:64 #: seed/landing/templates/landing/signup.html:63 msgid "Confirm" @@ -737,6 +824,12 @@ msgstr "Cross-Cycles" msgid "Current Column Mapping Profile" msgstr "Current Column Mapping Profile" +msgid "Current Cycle" +msgstr "Current Cycle" + +msgid "Current Cycle will be measured against Baseline Cycle" +msgstr "Current Cycle will be measured against Baseline Cycle" + msgid "Current Filters" msgstr "Current Filters" @@ -755,6 +848,9 @@ msgstr "Custom ID 1 (Property)" msgid "Custom ID 1 (Tax Lot)" msgstr "Custom ID 1 (Tax Lot)" +msgid "Custom Name for Level" +msgstr "Custom Name for Level" + msgid "Custom Reports" msgstr "Custom Reports" @@ -767,6 +863,9 @@ msgstr "Cycle" msgid "Cycle Name" msgstr "Cycle Name" +msgid "Cycle Selection" +msgstr "Cycle Selection" + msgid "Cycle updated." msgstr "Cycle updated." @@ -905,6 +1004,9 @@ msgstr "Default Display Fields" msgid "Delete" msgstr "Delete" +msgid "Delete Access Level Instance" +msgstr "Delete Access Level Instance" + msgid "Delete All Mappings" msgstr "Delete All Mappings" @@ -1045,6 +1147,9 @@ msgstr "Choose an EnergyStar Portfolio Manager (ESPM) data importing method belo msgid "EUI" msgstr "EUI" +msgid "EUI Target Columns" +msgstr "EUI Target Columns" + msgid "EXCLUDE" msgstr "EXCLUDE" @@ -1054,6 +1159,9 @@ msgstr "For “extra data” fields, this allows the user to set the type, such msgid "Edit" msgstr "Edit" +msgid "Edit Access Level Instance Name" +msgstr "Edit Access Level Instance Name" + msgid "Edit Name" msgstr "Edit Name" @@ -1105,6 +1213,9 @@ msgstr "English" msgid "Enter Email Address" msgstr "Enter Email Address" +msgid "Enter a Name for the new Access Level Instance" +msgstr "Enter a Name for the new Access Level Instance" + msgid "Enter a valid email address." msgstr "Enter a valid email address." @@ -1132,6 +1243,9 @@ msgstr "Error" msgid "Error Processing Data" msgstr "Error Processing Data" +msgid "Example Access Level Tree" +msgstr "Example Access Level Tree" + msgid "Example Usage" msgstr "Example Usage" @@ -1351,6 +1465,12 @@ msgstr "Geocoding now..." msgid "GJ/m²/year" msgstr "GJ/m²/year" +msgid "GOAL" +msgstr "GOAL" + +msgid "GOAL_SETUP_TEXT" +msgstr "Configure one or more portfolio Energy Use Intensity (EUI) reduction goals below. Select a baseline cycle and a current cycle for comparison, indicate the level in your access level tree that this goal applies to, specify a percentage EUI improvement target, and indicate which fields in your data should be used for EUI and square footage information." + msgid "GREENBUTTON_CONTENTS_TITLE" msgstr "Confirm GreenButton File Contents" @@ -1397,6 +1517,9 @@ msgstr "Go to Meters" msgid "Go to Notes" msgstr "Go to Notes" +msgid "Goal Setup" +msgstr "Goal Setup" + msgid "Gross Floor Area" msgstr "Gross Floor Area" @@ -1415,6 +1538,12 @@ msgstr "HTTP Error! Status Code: 404. The requested URL was not found." msgid "HTTP Error! Status Code: 500. Internal Server Error." msgstr "HTTP Error! Status Code: 500. Internal Server Error." +msgid "Have your organization owner update the column's data type to \"Area\"" +msgstr "Have your organization owner update the column's data type to \"Area\"" + +msgid "Have your organization owner update the data type of the column to \"EUI\"" +msgstr "Have your organization owner update the data type of the column to \"EUI\"" + #: seed/templates/seed/account_create_email.html:2 msgid "Hello %(first_name)s, " msgstr "Hello %(first_name)s, " @@ -1633,6 +1762,15 @@ msgstr "Left Axis" msgid "Left Half" msgstr "Left Half" +msgid "Level" +msgstr "Level" + +msgid "Level Instance" +msgstr "Level Instance" + +msgid "Loading Summary Data..." +msgstr "Loading Summary Data..." + msgid "Loading data..." msgstr "Loading data..." @@ -1752,6 +1890,9 @@ msgstr "Manage compliance" msgid "Managed by" msgstr "Managed by" +msgid "Managing the Access Level Tree" +msgstr "Managing the Access Level Tree" + msgid "Manual Matching" msgstr "Manual Matching" @@ -1975,6 +2116,9 @@ msgstr "National Renewable Energy Laboratory" msgid "New Analysis" msgstr "New Analysis" +msgid "New Goal" +msgstr "New Goal" + msgid "New Note" msgstr "New Note" @@ -2040,6 +2184,9 @@ msgstr "Not Null" msgid "Not all inventory items were successfully deleted" msgstr "Not all inventory items were successfully deleted" +msgid "Not seeing your column?" +msgstr "Not seeing your column?" + msgid "Note:" msgstr "Note:" @@ -2180,6 +2327,9 @@ msgstr "Portfolio Manager Meter Import Results" msgid "PM_PROPERTY_ID_MATCHING_CRITERIA_WARNING" msgstr "Removing PM Property ID from matching criteria can cause unexpected issues for Portfolio Manager Meter imports." +msgid "PORTFOLIO_SUMMARY_HEADER_TEXT" +msgstr "The portfolio summary page compares 2 cycles to calculate progress toward an Energy Use Intensity reduction goal. Cycle selection and goal details can be customized by clicking the Configure Goals button below." + msgid "POST_GEOCODING_COUNTS" msgstr "Updated counts after geocoding" @@ -2271,6 +2421,15 @@ msgstr "Please wait while your data is loaded..." msgid "Populate SEED Headers with best known matches" msgstr "Populate SEED Headers with best known matches" +msgid "Portfolio Summary" +msgstr "Portfolio Summary" + +msgid "Portfolio Summary will only include properties belonging to this Access Level Instance." +msgstr "Portfolio Summary will only include properties belonging to this Access Level Instance." + +msgid "Portfolio Target" +msgstr "Portfolio Target" + msgid "Postal Code" msgstr "Postal Code" @@ -2298,6 +2457,9 @@ msgstr "Preview Loading" msgid "Previous" msgstr "Previous" +msgid "Primary Column" +msgstr "Primary Column" + msgid "Primary Tax Lot ID" msgstr "Primary Tax Lot ID" @@ -2437,6 +2599,9 @@ msgstr "Rename allows users to change the name of the underlying field shown in msgid "REVIEW YOUR DATA MAPPINGS" msgstr "REVIEW YOUR DATA MAPPINGS" +msgid "REVIEW_MATCHING_CRITERIA" +msgstr "Review your matching criteria on the Column Settings page before importing data for the first time. You will not be able to remove fields from the matching criteria after data is added to your organization." + msgid "Read more here." msgstr "Read more here." @@ -2471,6 +2636,9 @@ msgstr "Related Tax Lots" msgid "Remove" msgstr "Remove" +msgid "Remove Access Level" +msgstr "Remove Access Level" + msgid "Remove Column Mappings" msgstr "Remove Column Mappings" @@ -2778,6 +2946,9 @@ msgstr "STEP 1: Map Your Data" msgid "STEP 2: Review Your Data Mappings" msgstr "STEP 2: Review Your Data Mappings" +msgid "SUB_ORG_DEPRECATION" +msgstr "The Sub-Organization functionality is no longer recommended and will be deprecated soon. Visit the Access Level Tree page to define access levels to structure your organization's content." + msgid "SUGGEST_TO_VERIFY" msgstr "Please verify before continuing." @@ -2838,6 +3009,9 @@ msgstr "Search field name" msgid "Search table name" msgstr "Search table name" +msgid "Secondary (optional)" +msgstr "Secondary (optional)" + msgid "Security" msgstr "Security" @@ -2885,6 +3059,12 @@ msgstr "Select columns from the lists below to make them appear on the left or r msgid "Select one or more filter groups to load your inventory." msgstr "Select one or more filter groups to load your inventory." +msgid "Select the Level at which you want to add the new Instance" +msgstr "Select the Level at which you want to add the new Instance" + +msgid "Select the new Instance's Parent Access Level Instance" +msgstr "Select the new Instance's Parent Access Level Instance" + msgid "Select the type of file(s) you would like to add to" msgstr "Select the type of file(s) you would like to add to" @@ -3013,6 +3193,12 @@ msgstr "State (Property)" msgid "State (Tax Lot)" msgstr "State (Tax Lot)" +msgid "Step 1: Add Access Levels" +msgstr "Step 1: Add Access Levels" + +msgid "Step 2: Upload Access Level Instances" +msgstr "Step 2: Upload Access Level Instances" + msgid "Sub-Org Name" msgstr "Sub-Org Name" @@ -3079,6 +3265,9 @@ msgstr "Target Column" msgid "Target Field" msgstr "Target Field" +msgid "Target to quantify Portfolio EUI improvement. Must be between 0 and 100." +msgstr "Target to quantify Portfolio EUI improvement. Must be between 0 and 100." + msgid "Tax Lot" msgstr "Tax Lot" @@ -3128,6 +3317,9 @@ msgstr "Terms of Service" msgid "Terms of Service as of %(tos.created|date:\"SHORT_DATE_FORMAT\")s" msgstr "Terms of Service as of %(tos.created|date:\"SHORT_DATE_FORMAT\")s" +msgid "Tertiary (optional)" +msgstr "Tertiary (optional)" + msgid "Test Connection" msgstr "Test Connection" @@ -3209,8 +3401,8 @@ msgstr "There is also a link to the SEED-Platform Users forum, where you can con msgid "There was an error loading the page" msgstr "There was an error loading the page" -msgid "This action replaces any of your current columns with the comma-delmited columns you provided. Would you like to continue?" -msgstr "This action replaces any of your current columns with the comma-delmited columns you provided. Would you like to continue?" +msgid "This action replaces any of your current columns with the comma-delimited columns you provided. Would you like to continue?" +msgstr "This action replaces any of your current columns with the comma-delimited columns you provided. Would you like to continue?" msgid "This action updates properties within the selected cycle with data from the Audit Template account associated with this organization. Only Properties with Audit Template Building IDs corresponding to those saved in Audit Template will be updated." msgstr "This action updates properties within the selected cycle with data from the Audit Template account associated with this organization. Only Properties with Audit Template Building IDs corresponding to those saved in Audit Template will be updated." @@ -3409,6 +3601,12 @@ msgstr "Update Salesforce" msgid "Update UBID" msgstr "Update UBID" +msgid "Update the data type of the column to \"Area\" in" +msgstr "Update the column's data type to \"Area\" in" + +msgid "Update the data type of the column to \"EUI\" in" +msgstr "Update the data type of the column to \"EUI\" in" + msgid "Update with Audit Template" msgstr "Update with Audit Template" @@ -3849,6 +4047,9 @@ msgstr "first name" msgid "for your SEED Platform user account" msgstr "for your SEED Platform user account" +msgid "goal" +msgstr "Goal" + #: seed/models/models.py:183 msgid "gray" msgstr "gray" diff --git a/locale/fr_CA/LC_MESSAGES/django.mo b/locale/fr_CA/LC_MESSAGES/django.mo index e73f313759..581b1fb6a6 100644 Binary files a/locale/fr_CA/LC_MESSAGES/django.mo and b/locale/fr_CA/LC_MESSAGES/django.mo differ diff --git a/locale/fr_CA/LC_MESSAGES/django.po b/locale/fr_CA/LC_MESSAGES/django.po index 817d0acab1..085ccd3808 100644 --- a/locale/fr_CA/LC_MESSAGES/django.po +++ b/locale/fr_CA/LC_MESSAGES/django.po @@ -57,9 +57,42 @@ msgstr "Une liste de vos en-têtes importés." msgid "A preview of your concatenated data will appear in the box below." msgstr "Un aperçu de vos données concaténées apparaîtra dans la case ci-dessous." +msgid "ACCESS_LEVEL_DELETE_AREYOUSURE" +msgstr "Êtes-vous sûr de vouloir supprimer cette instance de niveau d'accès ? L'opération supprimera également toutes les instances de niveau d'accès inférieures à celle-ci." + +msgid "ACCESS_LEVEL_DESCRIPTION" +msgstr "Définissez des niveaux d'accès pour structurer le contenu de votre organisation. Une organisation peut avoir un ou plusieurs niveaux d'accès. Toutes les organisations ont au moins un niveau (niveau 1), nommé par défaut avec le nom de l'organisation. Tous les noms de niveaux d'accès sont personnalisables." + msgid "ACCESS_LEVEL_FILENAME_UPLOADED" msgstr "Le fichier d'instances de niveau d'accès a été téléchargé" +msgid "ACCESS_LEVEL_INSTANCE_DESCRIPTION" +msgstr "Les instances de niveau d'accès sont des sous-groupes personnalisés qui peuvent être définis au sein de chaque niveau d'accès. Ces ALI sont utilisées pour associer du contenu et restreindre l'accès des utilisateurs aux données. Il existe une seule ALI au niveau 1 de la hiérarchie, la racine du graphe en arbre. Celui-ci est nommé « racine » par défaut, mais peut être personnalisé. Tous les autres niveaux peuvent contenir une ou plusieurs ALI." + +msgid "ACCESS_LEVEL_ORG_OWNER" +msgstr "En tant que propriétaire d'une organisation, vous pouvez définir la structure de votre graphe en arbre de niveaux d'accès à l'aide des boutons ci-dessous. Ce processus doit être effectué avant d'importer des données et d'ajouter des utilisateurs à votre organisation." + +msgid "ACCESS_LEVEL_OWNER_END" +msgstr "Une fois que vous avez configuré le graphe en arbre des niveaux d'accès de votre organisation, vous êtes prêt à ajouter des utilisateurs et des propriétés aux différents niveaux de l'arbre." + +msgid "ACCESS_LEVEL_OWNER_STEP1" +msgstr "Configurez le numéro et le nom de vos niveaux d'accès à l'aide du bouton Modifier/Ajouter des niveaux d'accès." + +msgid "ACCESS_LEVEL_OWNER_STEP2" +msgstr "Téléchargez une feuille de calcul des instances de niveau d'accès. La feuille de calcul doit avoir un colonne pour chaque niveau d'accès, avec l'en-tête correspondant à votre nom de niveau personnalisé. Les instances de niveau d'accès constituent les lignes de la feuille de calcul. Assurez-vous de définir entièrement le chemin de chaque instance de niveau d’accès. Vous pouvez également sélectionner le niveau, l'instance de niveau d'accès parent et configurer le nom d'une seule nouvelle instance de niveau d'accès à l'aide du bouton Ajouter une instance de niveau d'accès." + +msgid "ACCESS_LEVEL_ROOT_TEXT" +msgstr "Il n'y a qu'une seule instance de niveau d'accès autorisée à la racine du graphe en arbre (niveau 1). Cliquez sur l'icône en forme de crayon si vous souhaitez le renommer." + +msgid "ACCESS_LEVEL_STRUCTURE_UPLOAD_TEXT" +msgstr "Téléchargez votre structure organisationnelle sous forme de feuille de calcul. Le fichier doit inclure une colonne pour chaque niveau d'accès avec son en-tête de colonne égal au nom du niveau d'accès personnalisé. Chaque ligne doit contenir une instance de niveau d'accès, avec son ascendance entièrement définie (nom du nœud parent, nom du nœud grand-parent, etc.)" + +msgid "ACCESS_LEVEL_TREE_HELP_1" +msgstr "Cette page vous permet de visualiser la structure du contenu de votre organisation. La structure est constituée de niveaux hiérarchiques qui définissent une arborescence ; c'est ce qu'on appelle les niveaux d'accès. Chaque niveau contient un ou plusieurs nœuds, appelés instances de niveau d'accès. Les autorisations sont gérées en associant le contenu et les utilisateurs à une instance de niveau d'accès particulière, regroupant et restreignant ainsi l'accès au contenu. Le nombre de niveaux d'accès dans votre structure, les noms des niveaux et les noms des instances de niveau d'accès sont personnalisables." + +msgid "ACCESS_LEVEL_UPLOAD_PROGRESS_MSG" +msgstr "Enregistrement des instances de niveau d'accès en cours... Ce processus dépend de la taille de votre fichier et peut prendre plusieurs minutes." + msgid "ADD_FILES_TO" msgstr "Ajoutez des fichiers à {dataset_name}." @@ -127,6 +160,33 @@ msgstr "Acceptez" msgid "Accept Terms of Service?" msgstr "Accepter les conditions d'utilisation?" +msgid "Access Level" +msgstr "Niveau d'accès" + +msgid "Access Level Instance" +msgstr "Instance de niveau d'accès" + +msgid "Access Level Instance Information" +msgstr "Informations sur l'instance de niveau d'accès" + +msgid "Access Level Instance Tree" +msgstr "Graphe en arbre des instances de niveau d'accès" + +msgid "Access Level Instances Errors" +msgstr "Erreurs d’instances de niveau d’accès" + +msgid "Access Level Tree" +msgstr "Graphe en arbre des niveaux d'accès" + +msgid "Access Levels" +msgstr "Niveaux d'accès" + +msgid "Access Levels (AL)" +msgstr "Niveaux d'accès (AL)" + +msgid "Access Levels Instances (ALI)" +msgstr "Instances de niveaux d'accès (ALI)" + msgid "Acknowledge" msgstr "Reconnaître" @@ -149,6 +209,9 @@ msgstr "Champ réel" msgid "Add" msgstr "Ajouter" +msgid "Add Access Level" +msgstr "Ajouter un niveau d'accès" + msgid "Add Column" msgstr "Ajouter une colonne" @@ -158,6 +221,12 @@ msgstr "Ajouter des fichiers de données" msgid "Add Label" msgstr "Ajouter une étiquette" +msgid "Add Level Instance (Node) to the Organizational Structure" +msgstr "Ajouter une instance de niveau (nœud) à la structure organisationnelle" + +msgid "Add Levels to the Organizational Structure Tree" +msgstr "Ajouter des niveaux à l'arbre de la structure organisationnelle" + msgid "Add Mapping" msgstr "Ajouter une configuration" @@ -278,6 +347,12 @@ msgstr "Êtes-vous sûr de vouloir annuler la fusion de ces lots fiscaux pour en msgid "Area" msgstr "Superficie" +msgid "Area Column" +msgstr "Colonne de Superficie" + +msgid "Area Target Column" +msgstr "Colonne Cible de Superficie" + msgid "As the admin of your SEED instance you can control what data is shared throughout your organization and between your sub-organizations as well as what data is shared externally with the public-at-large. The subset of data you choose to share with the public can be different than the subset shared between your sub-organizations." msgstr "En tant qu'administrateur de votre instance SEED, vous pouvez contrôler les données partagées dans votre organisation et entre vos sous-organisations ainsi que les données partagées à l'externe avec le public. Le sous-ensemble de données que vous choisissez de partager avec le public peut être différent du sous-ensemble partagé entre vos sous-organisations." @@ -338,6 +413,9 @@ msgstr "Retour à la Liste" msgid "Back to Mapping" msgstr "Retournez à les mappages" +msgid "Baseline Cycle" +msgstr "Cycle de référence" + #: seed/models/models.py:135 #, fuzzy msgid "Benchmarking" @@ -439,6 +517,9 @@ msgstr "Besoin de configurer votre programme?" msgid "CONFIRMING_DELETE_PROFILE" msgstr "Êtes-vous sûr de vouloir supprimer le profil" +msgid "CONFIRM_ACCESS_LEVEL_DELETE" +msgstr "Êtes-vous sûr de vouloir supprimer ce niveau d'accès? Cette action supprimera également {num_access_levels} instances de niveau d'accès." + msgid "CONFIRM_AND_START_MATCHING" msgstr "Confirmer et commencer l'appariement" @@ -474,6 +555,9 @@ msgstr "Annuler" msgid "Cannot delete protected objects while related objects still exist" msgstr "Impossible de supprimer des objets protégés alors que des objets apparentés existent toujours" +msgid "Cannot delete this access level instance." +msgstr "Impossible de supprimer cette instance de niveau d'accès." + msgid "Change Merged Property" msgstr "Modifier la propriété fusionnée" @@ -606,6 +690,9 @@ msgstr "Surface climatisé" msgid "Configuration" msgstr "Configuration" +msgid "Configure Goals" +msgstr "Configurer les Objectifs" + #: seed/landing/templates/landing/password_reset_confirm.html:64 #: seed/landing/templates/landing/signup.html:63 msgid "Confirm" @@ -746,6 +833,12 @@ msgstr "Entre les Cycles" msgid "Current Column Mapping Profile" msgstr "Profil de mappage de colonne actuel" +msgid "Current Cycle" +msgstr "Cycle actuel" + +msgid "Current Cycle will be measured against Baseline Cycle" +msgstr "Le cycle actuel sera mesuré par rapport au cycle de référence" + msgid "Current Filters" msgstr "Filtres Actuel" @@ -764,6 +857,9 @@ msgstr "ID personnalisé 1 (propriété)" msgid "Custom ID 1 (Tax Lot)" msgstr "ID personnalisé 1 (lot d'impôt)" +msgid "Custom Name for Level" +msgstr "Nom personnalisé pour le niveau" + msgid "Custom Reports" msgstr "Rapports" @@ -776,6 +872,9 @@ msgstr "Cycle" msgid "Cycle Name" msgstr "Nom du cycle" +msgid "Cycle Selection" +msgstr "Sélection des cycles" + msgid "Cycle updated." msgstr "Cycle mis à jour." @@ -914,6 +1013,9 @@ msgstr "Champs d'affichage par défaut" msgid "Delete" msgstr "Supprimer" +msgid "Delete Access Level Instance" +msgstr "Supprimer une instance de niveau d'accès" + msgid "Delete All Mappings" msgstr "Supprimer tous les mappages" @@ -1055,6 +1157,9 @@ msgstr "Choisissez une méthode d'importation de données EnergyStar Portfolio M msgid "EUI" msgstr "IUE" +msgid "EUI Target Columns" +msgstr "Colonnes cibles EUI" + msgid "EXCLUDE" msgstr "EXCLURE" @@ -1064,6 +1169,9 @@ msgstr "Pour les champs «données supplémentaires», cela permet à l'utilisat msgid "Edit" msgstr "Modifier" +msgid "Edit Access Level Instance Name" +msgstr "Modifier le nom de l'instance du niveau d'accès" + msgid "Edit Name" msgstr "Modifier le nom" @@ -1116,6 +1224,9 @@ msgstr "Anglais" msgid "Enter Email Address" msgstr "Entrer l'adresse e-mail" +msgid "Enter a Name for the new Access Level Instance" +msgstr "Entrez un nom pour la nouvelle instance de niveau d'accès" + msgid "Enter a valid email address." msgstr "Entrez une adresse mail valide." @@ -1143,6 +1254,9 @@ msgstr "Erreur" msgid "Error Processing Data" msgstr "Erreur lors du traitement des données" +msgid "Example Access Level Tree" +msgstr "Exemple de graphe en arbre de niveaux d'accès" + msgid "Example Usage" msgstr "Exemple d'utilisation" @@ -1364,6 +1478,12 @@ msgstr "Géocodage maintenant ..." msgid "GJ/m²/year" msgstr "GJ/m²/année" +msgid "GOAL" +msgstr "OBJECTIF" + +msgid "GOAL_SETUP_TEXT" +msgstr "Configurez un ou plusieurs objectifs de réduction de l'intensité de l'utilisation d'énergie (IUE) du portefeuille ci-dessous. Sélectionnez un cycle de référence et un cycle actuel à des fins de comparaison, identifiez le niveau dans votre arborescence de niveaux d'accès auquel cet objectif s'applique, spécifiez un objectif d'amélioration de l'IUE en pourcentage et indiquez quels champs de vos données doivent être utilisés pour les informations sur l'IUE et la superficie en pieds carrés." + msgid "GREENBUTTON_CONTENTS_TITLE" msgstr "Confirmer le contenu du fichier GreenButton" @@ -1410,6 +1530,9 @@ msgstr "Aller aux compteurs" msgid "Go to Notes" msgstr "Accéder aux Notes" +msgid "Goal Setup" +msgstr "Configuration des objectifs" + msgid "Gross Floor Area" msgstr "Surface brute" @@ -1428,6 +1551,12 @@ msgstr "Erreur HTTP! Code d'état : 404. L'URL demandée est introuvable." msgid "HTTP Error! Status Code: 500. Internal Server Error." msgstr "Erreur HTTP! Code d'état : 500. Erreur interne du serveur." +msgid "Have your organization owner update the column's data type to \"Area\"" +msgstr "Demandez au propriétaire de votre organisation d s'assurer que le type de données de la colonne est \"Area\" dans" + +msgid "Have your organization owner update the data type of the column to \"EUI\"" +msgstr "Demandez au propriétaire de votre organisation de mettre à jour le type de données de la colonne en \"EUI\"." + #: seed/templates/seed/account_create_email.html:2 msgid "Hello %(first_name)s, " msgstr "Bonjour% (first_name) s," @@ -1648,6 +1777,15 @@ msgstr "Axe gauche" msgid "Left Half" msgstr "Moitié gauche" +msgid "Level" +msgstr "Niveau" + +msgid "Level Instance" +msgstr "Instance de niveau" + +msgid "Loading Summary Data..." +msgstr "Chargement des données récapitulatives..." + msgid "Loading data..." msgstr "Chargeant les données ..." @@ -1767,6 +1905,9 @@ msgstr "Gérer la conformité" msgid "Managed by" msgstr "Dirigé par" +msgid "Managing the Access Level Tree" +msgstr "Graphe en arbre des niveaux d'accès" + msgid "Manual Matching" msgstr "Correspondance manuelle" @@ -1983,7 +2124,7 @@ msgid "NUM_PROPERTY_TAXLOT_COLUMNS_AVAILABLE" msgstr "Il existe {num_property_columns} colonnes de propriétés et {num_taxlot_columns} colonnes de lot d'impôt qui vous sont accessibles." msgid "Name" -msgstr "nom" +msgstr "Nom" msgid "National Renewable Energy Laboratory" msgstr "Laboratoire National Des Énergies Renouvelables" @@ -1991,6 +2132,9 @@ msgstr "Laboratoire National Des Énergies Renouvelables" msgid "New Analysis" msgstr "Nouvelle analyse" +msgid "New Goal" +msgstr "Nouvel Objectif" + msgid "New Note" msgstr "Nouvelle note" @@ -2056,6 +2200,9 @@ msgstr "Pas nul" msgid "Not all inventory items were successfully deleted" msgstr "Tous les éléments de l'inventaire n'ont pas été supprimés" +msgid "Not seeing your column?" +msgstr "Vous ne voyez pas votre colonne?" + msgid "Note:" msgstr "Remarque:" @@ -2196,6 +2343,9 @@ msgstr "Résultats d'importation de Portfolio Manager" msgid "PM_PROPERTY_ID_MATCHING_CRITERIA_WARNING" msgstr "La suppression de l'ID de propriété PM des critères de correspondance peut entraîner des problèmes inattendus pour les importations de compteurs Portfolio Manager." +msgid "PORTFOLIO_SUMMARY_HEADER_TEXT" +msgstr "La page de résumé du portefeuille compare 2 cycles pour calculer les progrès vers un objectif de réduction de l'intensité de l'utilisation d'énergie. La sélection du cycle et les détails des objectifs peuvent être personnalisés en cliquant sur le bouton Configurer les Objectifs ci-dessous." + msgid "POST_GEOCODING_COUNTS" msgstr "Nombre mis à jour après le géocodage" @@ -2287,6 +2437,15 @@ msgstr "Veuillez patienter pendant que vos données sont chargées ..." msgid "Populate SEED Headers with best known matches" msgstr "Remplissez les en-têtes SEED avec les correspondances les plus connues" +msgid "Portfolio Summary" +msgstr "Résumé du portefeuille" + +msgid "Portfolio Summary will only include properties belonging to this Access Level Instance." +msgstr "Le résumé du portefeuille inclura uniquement les propriétés appartenant à cette instance de niveau d'accès." + +msgid "Portfolio Target" +msgstr "Cible du portefeuille" + msgid "Postal Code" msgstr "Code postal" @@ -2314,6 +2473,9 @@ msgstr "Aperçu Chargement" msgid "Previous" msgstr "Précédente" +msgid "Primary Column" +msgstr "Colonne principale" + msgid "Primary Tax Lot ID" msgstr "ID de lot d'impôt primaire" @@ -2457,6 +2619,9 @@ msgstr "Renommer permet aux utilisateurs de modifier le nom du champ sous-jacent msgid "REVIEW YOUR DATA MAPPINGS" msgstr "EXAMINER VOS MAPPAGES DE DONNÉES" +msgid "REVIEW_MATCHING_CRITERIA" +msgstr "Vérifiez vos critères de correspondance sur la page Paramètres de colonne avant d'importer des données pour la première fois. Vous ne pourrez pas supprimer des champs des critères de correspondance une fois les données ajoutées à votre organisation." + msgid "Read more here." msgstr "Lire la suite ici." @@ -2491,6 +2656,9 @@ msgstr "Lots d'impôt connexes" msgid "Remove" msgstr "Supprimer" +msgid "Remove Access Level" +msgstr "Supprimer le niveau d'accès" + msgid "Remove Column Mappings" msgstr "Supprimer les mappages de colonnes" @@ -2802,6 +2970,9 @@ msgstr "ÉTAPE 1: Mappez vos données" msgid "STEP 2: Review Your Data Mappings" msgstr "ÉTAPE 2: Consultez vos mappages de données" +msgid "SUB_ORG_DEPRECATION" +msgstr "La fonctionnalité Sous-organisation n’est plus recommandée et sera bientôt obsolète. Visitez la page des niveaux d'accès pour définir les niveaux d'accès afin de structurer le contenu de votre organisation." + #, fuzzy msgid "SUGGEST_TO_VERIFY" msgstr "Veuillez vérifier les points suivants avant de continuer." @@ -2863,6 +3034,9 @@ msgstr "Rechercher le nom du champ" msgid "Search table name" msgstr "Rechercher le nom de la table" +msgid "Secondary (optional)" +msgstr "Secondaire (facultatif)" + msgid "Security" msgstr "Sécurité" @@ -2910,6 +3084,12 @@ msgstr "Sélectionnez les colonnes dans les listes ci-dessous pour les faire app msgid "Select one or more filter groups to load your inventory." msgstr "Sélectionnez un ou plusieurs groupes de filtres pour charger votre inventaire." +msgid "Select the Level at which you want to add the new Instance" +msgstr "Sélectionnez le niveau auquel vous souhaitez ajouter la nouvelle instance" + +msgid "Select the new Instance's Parent Access Level Instance" +msgstr "Sélectionnez l'instance de niveau d'accès parent de la nouvelle instance" + msgid "Select the type of file(s) you would like to add to" msgstr "Sélectionnez le type de fichier (s) que vous souhaitez ajouter à" @@ -3038,6 +3218,12 @@ msgstr "Etat (Propriété)" msgid "State (Tax Lot)" msgstr "État (lot d'impôt)" +msgid "Step 1: Add Access Levels" +msgstr "Étape 1: ajouter des niveaux d'accès" + +msgid "Step 2: Upload Access Level Instances" +msgstr "Étape 2: Télécharger les instances de niveau d'accès" + msgid "Sub-Org Name" msgstr "Nom de la sous-organisation" @@ -3104,6 +3290,9 @@ msgstr "Colonne cible" msgid "Target Field" msgstr "Champ cible" +msgid "Target to quantify Portfolio EUI improvement. Must be between 0 and 100." +msgstr "Cible pour quantifier l’amélioration de l’IUE du portefeuille. Doit être compris entre 0 et 100." + msgid "Tax Lot" msgstr "Lot d'impôt" @@ -3154,6 +3343,9 @@ msgstr "Conditions d'utilisation" msgid "Terms of Service as of %(tos.created|date:\"SHORT_DATE_FORMAT\")s" msgstr "Conditions de service à partir de% (tos.created | date: \"SHORT_DATE_FORMAT\") s" +msgid "Tertiary (optional)" +msgstr "Tertiaire (facultatif)" + msgid "Test Connection" msgstr "Tester la Connexion" @@ -3235,7 +3427,7 @@ msgstr "Il y a aussi un lien vers le forum SEED-Platform Users, où vous pouvez msgid "There was an error loading the page" msgstr "Une erreur s'est produite lors du chargement de la page" -msgid "This action replaces any of your current columns with the comma-delmited columns you provided. Would you like to continue?" +msgid "This action replaces any of your current columns with the comma-delimited columns you provided. Would you like to continue?" msgstr "Cette action remplace n'importe laquelle de vos colonnes actuelles par les colonnes délimitées par des virgules que vous avez fournies. Voulez-vous continuer?" msgid "This action updates properties within the selected cycle with data from the Audit Template account associated with this organization. Only Properties with Audit Template Building IDs corresponding to those saved in Audit Template will be updated." @@ -3436,6 +3628,12 @@ msgstr "Mettre à jour Salesforce" msgid "Update UBID" msgstr "Mettre à jour UBID" +msgid "Update the data type of the column to \"Area\" in" +msgstr "Assurez-vous que le type de données de la colonne est bien \"Area\" dans" + +msgid "Update the data type of the column to \"EUI\" in" +msgstr "Assurez-vous que le type de données de la colonne est \"EUI\" dans" + msgid "Update with Audit Template" msgstr "Mise à jour avec Audit Template" @@ -3877,6 +4075,9 @@ msgstr "Prénom" msgid "for your SEED Platform user account" msgstr "pour votre compte utilisateur SEED Platform" +msgid "goal" +msgstr "Objectif" + #: seed/models/models.py:183 msgid "gray" msgstr "gris" diff --git a/requirements/base.txt b/requirements/base.txt index a457d26bde..c779538962 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -79,3 +79,5 @@ django-braces==1.14.0 django-oauth-toolkit==1.2.0 future==0.18.3 + +django-treebeard==4.6.1 diff --git a/seed/analysis_pipelines/better/pipeline.py b/seed/analysis_pipelines/better/pipeline.py index 68cbf0ea5a..d19042de02 100644 --- a/seed/analysis_pipelines/better/pipeline.py +++ b/seed/analysis_pipelines/better/pipeline.py @@ -414,6 +414,11 @@ def _process_results(self, analysis_id): # gather all columns to store BETTER_VALID_MODEL_E_COL = 'better_valid_model_electricity' BETTER_VALID_MODEL_F_COL = 'better_valid_model_fuel' + + # if user is at root level and has role member or owner, columns can be created + # otherwise set the 'missing_columns' flag for later + missing_columns = False + column_data_paths = [ # Combined Savings ExtraDataColumnPath( @@ -523,19 +528,27 @@ def _process_results(self, analysis_id): # check if the column exists with the bare minimum required pieces of data. For example, # don't check column_description and display_name because they may be changed by # the user at a later time. - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name=column_data_path.column_name, - organization=analysis.organization, - table_name='PropertyState', - ) - - # add in the other fields of the columns only if it is a new column. - if created: - column.display_name = column_data_path.column_display_name - column.column_description = column_data_path.column_display_name - - column.save() + # if column doesn't exist, and user has permission to create, then create + try: + Column.objects.get( + is_extra_data=True, + column_name=column_data_path.column_name, + organization=analysis.organization, + table_name='PropertyState', + ) + except Exception: + if analysis.can_create(): + column, created = Column.objects.create( + is_extra_data=True, + column_name=column_data_path.column_name, + organization=analysis.organization, + table_name='PropertyState', + ) + column.display_name = column_data_path.column_display_name + column.column_description = column_data_path.column_display_name + column.save() + else: + missing_columns = True # Update the original PropertyView's PropertyState with analysis results of interest analysis_property_views = analysis.analysispropertyview_set.prefetch_related('property', 'cycle').all() @@ -549,7 +562,7 @@ def _process_results(self, analysis_id): for data_path in column_data_paths: value = get_json_path(data_path.json_path, raw_better_results) if value is not None: - # some of the ee_measures return an empty string, which should be falsey + # some of the ee_measures return an empty string, which should be falsy if 'ee_measures' in data_path.json_path and value == '': value = 0.0 * data_path.unit_multiplier # to be consistent else: @@ -596,9 +609,11 @@ def _process_results(self, analysis_id): else: cleaned_results[col_name] = value - original_property_state = property_view_by_apv_id[analysis_property_view.id].state - original_property_state.extra_data.update(cleaned_results) - original_property_state.save() + # if no columns are missing, save back to property + if not missing_columns: + original_property_state = property_view_by_apv_id[analysis_property_view.id].state + original_property_state.extra_data.update(cleaned_results) + original_property_state.save() @shared_task(bind=True) diff --git a/seed/analysis_pipelines/bsyncr.py b/seed/analysis_pipelines/bsyncr.py index 37965ecbb7..e367ae31d0 100644 --- a/seed/analysis_pipelines/bsyncr.py +++ b/seed/analysis_pipelines/bsyncr.py @@ -323,7 +323,7 @@ def _parse_analysis_property_view_id(filepath): analysis_property_view_id_elem = input_file_tree.xpath(id_xpath, namespaces=NAMESPACES) if len(analysis_property_view_id_elem) != 1: - raise AnalysisPipelineException(f'Expected BuildlingSync file to have exactly one "{PREMISES_ID_NAME}" PremisesIdentifier') + raise AnalysisPipelineException(f'Expected BuildingSync file to have exactly one "{PREMISES_ID_NAME}" PremisesIdentifier') return int(analysis_property_view_id_elem[0].text) diff --git a/seed/analysis_pipelines/co2.py b/seed/analysis_pipelines/co2.py index 4193367357..637b15fca7 100644 --- a/seed/analysis_pipelines/co2.py +++ b/seed/analysis_pipelines/co2.py @@ -350,27 +350,43 @@ def _run_analysis(self, meter_readings_by_analysis_property_view, analysis_id): # displayname and description if the column already exists because # the user might have changed them which would re-create new columns # here. - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name='analysis_co2', - organization=analysis.organization, - table_name='PropertyState', - ) - if created: - column.display_name = 'Average Annual CO2 (kgCO2e)' - column.column_description = 'Average Annual CO2 (kgCO2e)' - column.save() - - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name='analysis_co2_coverage', - organization=analysis.organization, - table_name='PropertyState', - ) - if created: - column.display_name = 'Average Annual CO2 Coverage (% of the year)' - column.column_description = 'Average Annual CO2 Coverage (% of the year)' - column.save() + + # if user is at root level and has role member or owner, columns can be created + # otherwise set the 'missing_columns' flag for later + missing_columns = False + + column_meta = [ + { + 'column_name': 'analysis_co2', + 'display_name': 'Average Annual CO2 (kgCO2e)', + 'description': 'Average Annual CO2 (kgCO2e)' + }, { + 'column_name': 'analysis_co2_coverage', + 'display_name': 'Average Annual CO2 Coverage (% of the year)', + 'description': 'Average Annual CO2 Coverage (% of the year)' + } + ] + + for col in column_meta: + try: + Column.objects.get( + column_name=col["column_name"], + organization=analysis.organization, + table_name='PropertyState', + ) + except Exception: + if analysis.can_create(): + column = Column.objects.create( + is_extra_data=True, + column_name=col["column_name"], + organization=analysis.organization, + table_name='PropertyState', + ) + column.display_name = col["display_name"] + column.column_description = col["description"] + column.save() + else: + missing_columns = True # fix the meter readings dict b/c celery messes with it when serializing meter_readings_by_analysis_property_view = { @@ -431,10 +447,17 @@ def _run_analysis(self, meter_readings_by_analysis_property_view, analysis_id): } analysis_property_view.save() if save_co2_results: - # Convert the analysis results which reports in kgCO2e to MtCO2e which is the canonical database field units - property_view.state.total_ghg_emissions = co2['average_annual_kgco2e'] / 1000 - property_view.state.total_ghg_emissions_intensity = co2['average_annual_kgco2e'] / property_view.state.gross_floor_area.magnitude - property_view.state.save() + # only save to property view if columns exist + if not missing_columns: + # store the extra_data columns from the analysis + property_view.state.extra_data.update({ + 'analysis_co2': co2['average_annual_kgco2e'], + 'analysis_co2_coverage': co2['annual_coverage_percent'] + }) + # Also Convert the analysis results which reports in kgCO2e to MtCO2e which is the canonical database field units + property_view.state.total_ghg_emissions = co2['average_annual_kgco2e'] / 1000 + property_view.state.total_ghg_emissions_intensity = co2['average_annual_kgco2e'] / property_view.state.gross_floor_area.magnitude + property_view.state.save() # all done! pipeline.set_analysis_status_to_completed() diff --git a/seed/analysis_pipelines/eeej.py b/seed/analysis_pipelines/eeej.py index 614375f959..581f7219c1 100644 --- a/seed/analysis_pipelines/eeej.py +++ b/seed/analysis_pipelines/eeej.py @@ -51,29 +51,38 @@ EJSCREEN_URL_STUB = 'https://ejscreen.epa.gov/mapper/EJscreen_SOE_report.aspx?namestr=&geometry={"spatialReference":{"wkid":4326},"x":LONG,"y":LAT}&distance=1&unit=9035&areatype=&areaid=&f=report' -def _get_data_for_census_tract_fetch(property_view_ids, organization): +def _get_data_for_census_tract_fetch(property_view_ids, organization, can_create_columns): """Performs basic validation of the properties for running EEEJ and returns any errors Fetches census tract information based on address if it doesn't exist already :param property_view_ids :param organization + :param can_create_columns - does the user have permission to create columns :returns: dictionary[id:str], dictionary of property_view_ids to error message """ # invalid_location = [] loc_data_by_property_view = {} errors_by_property_view_id = {} - # make sure the Census Tract column exists - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name=TRACT_FIELDNAME, - organization=organization, - table_name='PropertyState', - ) - if created: - column.display_name = 'Census Tract' - column.column_description = '2010 Census Tract' - column.save() + # check that Census Tract column exists. If not, create if you can + try: + Column.objects.get( + column_name=TRACT_FIELDNAME, + organization=organization, + table_name='PropertyState', + ) + except Exception: + # does user have permission to create? + if can_create_columns: + column = Column.objects.create( + column_name=TRACT_FIELDNAME, + organization=organization, + table_name='PropertyState', + is_extra_data=True, + ) + column.display_name = 'Census Tract' + column.column_description = '2010 Census Tract' + column.save() property_views = PropertyView.objects.filter(id__in=property_view_ids) for property_view in property_views: @@ -327,8 +336,8 @@ def _prepare_analysis(self, property_view_ids, start_analysis=True): # current implementation will *always* start the analysis immediately analysis = Analysis.objects.get(id=self._analysis_id) - # TODO: check that we have the data we need to retrieve census tract for each property - loc_data_by_property_view, errors_by_property_view_id = _get_data_for_census_tract_fetch(property_view_ids, analysis.organization) + # check that we have the data we need to retrieve census tract for each property + loc_data_by_property_view, errors_by_property_view_id = _get_data_for_census_tract_fetch(property_view_ids, analysis.organization, analysis.can_create()) if not loc_data_by_property_view: AnalysisMessage.log_and_create( @@ -393,76 +402,59 @@ def _run_analysis(self, loc_data_by_analysis_property_view, analysis_id): progress_data = pipeline.set_analysis_status_to_running() progress_data.step('Calculating EEEJ Indicators') analysis = Analysis.objects.get(id=analysis_id) + # if user is at root level and has role member or owner, columns can be created + # otherwise set the 'missing_columns' flag for later + missing_columns = False + + # make sure we have the extra data columns we need + column_meta = [ + { + 'column_name': 'analysis_dac', + 'display_name': 'Disadvantaged Community', + 'description': 'Property located in a Disadvantaged Community as defined by CEJST' + }, { + 'column_name': 'analysis_energy_burden_low_income', + 'display_name': 'Energy Burden and low Income?', + 'description': 'Is this property located in an energy burdened census tract. Energy Burden defined by CEJST as greater than or equal to the 90th percentile for energy burden and is low income.' + }, { + 'column_name': 'analysis_energy_burden_percentile', + 'display_name': 'Energy Burden Percentile', + 'description': 'Energy Burden Percentile as identified by CEJST' + }, { + 'column_name': 'analysis_low_income', + 'display_name': 'Low Income?', + 'description': 'Is this property located in a census tract identified as Low Income by CEJST?' + }, { + 'column_name': 'analysis_share_neighbors_disadvantaged', + 'display_name': 'Share of Neighboring Tracts Identified as Disadvantaged', + 'description': 'The percentage of neighboring census tracts that have been identified as disadvantaged by CEJST' + }, { + 'column_name': 'analysis_number_affordable_housing', + 'display_name': 'Number of Affordable Housing Locations in Tract', + 'description': 'Number of affordable housing locations (both public housing developments and multi-family assisted housing) identified by HUD in census tract' + } + ] - # make sure we have the extra data columns we need, don't set the - # displayname and description if the column already exists because - # the user might have changed them which would re-create new columns - # here. - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name='analysis_dac', - organization=analysis.organization, - table_name='PropertyState', - ) - if created: - column.display_name = 'Disadvantaged Community' - column.column_description = 'Property located in a Disadvantaged Community as defined by CEJST' - column.save() - - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name='analysis_energy_burden_low_income', - organization=analysis.organization, - table_name='PropertyState', - ) - if created: - column.display_name = 'Energy Burden and low Income?' - column.column_description = 'Is this property located in an energy burdened census tract. Energy Burden defined by CEJST as greater than or equal to the 90th percentile for energy burden and is low income.' - column.save() - - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name='analysis_energy_burden_percentile', - organization=analysis.organization, - table_name='PropertyState', - ) - if created: - column.display_name = 'Energy Burden Percentile' - column.column_description = 'Energy Burden Percentile as identified by CEJST' - column.save() - - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name='analysis_low_income', - organization=analysis.organization, - table_name='PropertyState', - ) - if created: - column.display_name = 'Low Income?' - column.column_description = 'Is this property located in a census tract identified as Low Income by CEJST?' - column.save() - - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name='analysis_share_neighbors_disadvantaged', - organization=analysis.organization, - table_name='PropertyState', - ) - if created: - column.display_name = 'Share of Neighboring Tracts Identified as Disadvantaged' - column.column_description = 'The percentage of neighboring census tracts that have been identified as disadvantaged by CEJST' - column.save() - - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name='analysis_number_affordable_housing', - organization=analysis.organization, - table_name='PropertyState', - ) - if created: - column.display_name = 'Number of Affordable Housing Locations in Tract' - column.column_description = 'Number of affordable housing locations (both public housing developments and multi-family assisted housing) identified by HUD in census tract' - column.save() + for col in column_meta: + try: + Column.objects.get( + column_name=col["column_name"], + organization=analysis.organization, + table_name='PropertyState', + ) + except Exception: + if analysis.can_create(): + column = Column.objects.create( + is_extra_data=True, + column_name=col["column_name"], + organization=analysis.organization, + table_name='PropertyState', + ) + column.display_name = col["display_name"] + column.column_description = col["description"] + column.save() + else: + missing_columns = True # fix the dict b/c celery messes with it when serializing analysis_property_view_ids = list(loc_data_by_analysis_property_view.keys()) @@ -505,17 +497,19 @@ def _run_analysis(self, loc_data_by_analysis_property_view, analysis_id): analysis_property_view.save() - # TODO: save each indicators back to property_view + # save each indicators back to property_view + # only if you can property_view = property_views_by_apv_id[analysis_property_view.id] - property_view.state.extra_data.update({ - 'analysis_census_tract': results[analysis_property_view.id]['census_tract'], - 'analysis_dac': results[analysis_property_view.id]['dac'], - 'analysis_energy_burden_low_income': results[analysis_property_view.id]['energy_burden_low_income'], - 'analysis_energy_burden_percentile': results[analysis_property_view.id]['energy_burden_percentile'], - 'analysis_low_income': results[analysis_property_view.id]['low_income'], - 'analysis_share_neighbors_disadvantaged': results[analysis_property_view.id]['share_neighbors_disadvantaged'], - 'analysis_number_affordable_housing': results[analysis_property_view.id]['number_affordable_housing'], - }) + if not missing_columns: + property_view.state.extra_data.update({ + 'analysis_census_tract': results[analysis_property_view.id]['census_tract'], + 'analysis_dac': results[analysis_property_view.id]['dac'], + 'analysis_energy_burden_low_income': results[analysis_property_view.id]['energy_burden_low_income'], + 'analysis_energy_burden_percentile': results[analysis_property_view.id]['energy_burden_percentile'], + 'analysis_low_income': results[analysis_property_view.id]['low_income'], + 'analysis_share_neighbors_disadvantaged': results[analysis_property_view.id]['share_neighbors_disadvantaged'], + 'analysis_number_affordable_housing': results[analysis_property_view.id]['number_affordable_housing'], + }) # store lat/lng (if blank) Census geocoder codes at the street address level (not Point level like mapquest) # store anyway but record as "Census Geocoder (L1AAA)" vs. mapquest "High (P1AAA)" diff --git a/seed/analysis_pipelines/eui.py b/seed/analysis_pipelines/eui.py index 051d779f5b..0bd9be5996 100644 --- a/seed/analysis_pipelines/eui.py +++ b/seed/analysis_pipelines/eui.py @@ -231,27 +231,43 @@ def _run_analysis(self, meter_readings_by_analysis_property_view, analysis_id): # displayname and description if the column already exists because # the user might have changed them which would re-create new columns # here. - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name='analysis_eui', - organization=analysis.organization, - table_name='PropertyState', - ) - if created: - column.display_name = 'Fractional EUI (kBtu/sqft)' - column.column_description = 'Fractional EUI (kBtu/sqft)' - column.save() - - column, created = Column.objects.get_or_create( - is_extra_data=True, - column_name='analysis_eui_coverage', - organization=analysis.organization, - table_name='PropertyState', - ) - if created: - column.display_name = 'EUI Coverage (% of the year)' - column.column_description = 'EUI Coverage (% of the year)' - column.save() + + # if user is at root level and has role member or owner, columns can be created + # otherwise set the 'missing_columns' flag for later + missing_columns = False + + column_meta = [ + { + 'column_name': 'analysis_eui', + 'display_name': 'Fractional EUI (kBtu/sqft)', + 'description': 'Fractional EUI (kBtu/sqft)' + }, { + 'column_name': 'analysis_eui_coverage', + 'display_name': 'EUI Coverage (% of the year)', + 'description': 'EUI Coverage (% of the year)' + } + ] + + for col in column_meta: + try: + Column.objects.get( + column_name=col["column_name"], + organization=analysis.organization, + table_name='PropertyState', + ) + except Exception: + if analysis.can_create(): + column = Column.objects.create( + is_extra_data=True, + column_name=col["column_name"], + organization=analysis.organization, + table_name='PropertyState', + ) + column.display_name = col["display_name"] + column.column_description = col["description"] + column.save() + else: + missing_columns = True # fix the meter readings dict b/c celery messes with it when serializing meter_readings_by_analysis_property_view = { @@ -281,10 +297,12 @@ def _run_analysis(self, meter_readings_by_analysis_property_view, analysis_id): } analysis_property_view.save() - property_view = property_views_by_apv_id[analysis_property_view.id] - property_view.state.extra_data.update({'analysis_eui': eui['eui']}) - property_view.state.extra_data.update({'analysis_eui_coverage': eui['coverage']}) - property_view.state.save() + # only save to property view if columns exist + if not missing_columns: + property_view = property_views_by_apv_id[analysis_property_view.id] + property_view.state.extra_data.update({'analysis_eui': eui['eui']}) + property_view.state.extra_data.update({'analysis_eui_coverage': eui['coverage']}) + property_view.state.save() # all done! pipeline.set_analysis_status_to_completed() diff --git a/seed/api/v3/urls.py b/seed/api/v3/urls.py index d98b3d6833..e0b0f55270 100644 --- a/seed/api/v3/urls.py +++ b/seed/api/v3/urls.py @@ -9,6 +9,7 @@ from rest_framework_nested import routers as nested_routers from seed.views.main import celery_queue +from seed.views.v3.access_levels import AccessLevelViewSet from seed.views.v3.analyses import AnalysisViewSet from seed.views.v3.analysis_messages import AnalysisMessageViewSet from seed.views.v3.analysis_views import AnalysisPropertyViewViewSet @@ -30,11 +31,14 @@ from seed.views.v3.filter_group import FilterGroupViewSet from seed.views.v3.gbr_properties import GBRPropertyViewSet from seed.views.v3.geocode import GeocodeViewSet +from seed.views.v3.goal_notes import GoalNoteViewSet +from seed.views.v3.goals import GoalViewSet from seed.views.v3.green_assessment_properties import ( GreenAssessmentPropertyViewSet ) from seed.views.v3.green_assessment_urls import GreenAssessmentURLViewSet from seed.views.v3.green_assessments import GreenAssessmentViewSet +from seed.views.v3.historical_notes import HistoricalNoteViewSet from seed.views.v3.import_files import ImportFileViewSet from seed.views.v3.label_inventories import LabelInventoryViewSet from seed.views.v3.labels import LabelViewSet @@ -77,7 +81,8 @@ api_v3_router.register(r'derived_columns', DerivedColumnViewSet, basename='derived_columns') api_v3_router.register(r'eeej', EEEJViewSet, basename='eeej') api_v3_router.register(r'filter_groups', FilterGroupViewSet, basename='filter_groups') -api_v3_router.register(r'gbr_properties', GBRPropertyViewSet, basename='properties') +api_v3_router.register(r'gbr_properties', GBRPropertyViewSet, basename='gbr_properties') +api_v3_router.register(r'goals', GoalViewSet, basename='goals') api_v3_router.register(r'geocode', GeocodeViewSet, basename='geocode') api_v3_router.register(r'green_assessment_properties', GreenAssessmentPropertyViewSet, basename='green_assessment_properties') api_v3_router.register(r'green_assessment_urls', GreenAssessmentURLViewSet, basename='green_assessment_urls') @@ -106,6 +111,7 @@ organizations_router = nested_routers.NestedSimpleRouter(api_v3_router, r'organizations', lookup='organization') organizations_router.register(r'users', OrganizationUserViewSet, basename='organization-users') +organizations_router.register(r'access_levels', AccessLevelViewSet, basename='organization-access_levels') analysis_views_router = nested_routers.NestedSimpleRouter(api_v3_router, r'analyses', lookup='analysis') analysis_views_router.register(r'views', AnalysisPropertyViewViewSet, basename='analysis-views') @@ -121,6 +127,8 @@ properties_router.register(r'notes', NoteViewSet, basename='property-notes') properties_router.register(r'scenarios', PropertyScenarioViewSet, basename='property-scenarios') properties_router.register(r'events', EventViewSet, basename='property-events') +properties_router.register(r'goal_notes', GoalNoteViewSet, basename='property-goal-notes') +properties_router.register(r'historical_notes', HistoricalNoteViewSet, basename='property-historical-notes') # This is a third level router, so we need to register it with the second level router meters_router = nested_routers.NestedSimpleRouter(properties_router, r'meters', lookup='meter') diff --git a/seed/building_sync/building_sync.py b/seed/building_sync/building_sync.py index 5ebe3ccf1f..be1d5b7fc9 100644 --- a/seed/building_sync/building_sync.py +++ b/seed/building_sync/building_sync.py @@ -485,7 +485,7 @@ def _parse_version(self): if not bsync_element.tag.endswith('BuildingSync'): raise ParsingError('Expected BuildingSync element as root element in xml') - # first check for a version attribute in the buldingsync tag + # first check for a version attribute in the buildingsync tag if "version" in bsync_element.attrib: return bsync_element.attrib["version"] diff --git a/seed/building_sync/tests/test_mappings.py b/seed/building_sync/tests/test_mappings.py index f18fc21256..aea71522f4 100644 --- a/seed/building_sync/tests/test_mappings.py +++ b/seed/building_sync/tests/test_mappings.py @@ -230,7 +230,7 @@ def test_update_tree_changes_text_on_existing_element(self): # tree should remain valid as well after inserting self.xmlschema.validate(etree.tostring(self.tree).decode()) - def test_update_tree_changes_text_on_nonexisting_element(self): + def test_update_tree_changes_text_on_nonexistent_element(self): # -- Setup # auc:PremisesName does not exist in the tree xpath = '/'.join([ diff --git a/seed/data_importer/access_level_instances_parser.py b/seed/data_importer/access_level_instances_parser.py new file mode 100644 index 0000000000..b8ccc2fdff --- /dev/null +++ b/seed/data_importer/access_level_instances_parser.py @@ -0,0 +1,61 @@ +# !/usr/bin/env python +# encoding: utf-8 +""" +SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. +See also https://github.com/seed-platform/seed/main/LICENSE.md +""" + +from seed.lib.mcm import reader +from seed.lib.superperms.orgs.models import Organization + + +class AccessLevelInstancesParser(object): + """ + This class parses and validates different details about access level instances + Import File - to be created before execution. + + The expected input is a csv/xlsx. The columns headers should be the names of the + access hierarchy levels, omitting the root level since it is the same across all. Example: + + - Level 2 Name : string + - Level 3 Name : string + - ... + """ + + def __init__(self, org_id, access_level_instances_details, num_levels): + # defaulted to None to show it hasn't been cached yet + self.access_level_instances_details = access_level_instances_details + self._org_id = org_id + self.num_levels = num_levels + + @classmethod + def factory(cls, access_level_instances_file, org_id): + """Factory function for accessLevelInstancesParser + + :param access_level_instances_file: File + :param org_id: int + :return: AccessLevelInstancesParser + """ + parser = reader.MCMParser(access_level_instances_file) + raw_data = list(parser.data) + + try: + keys = list(raw_data[0].keys()) + except IndexError: + raise ValueError("File has no rows") + + level_names = keys + # already checked that headers match level names before saving file + # raise ValueError if not + AccessLevelInstancesParser._access_level_names(org_id) + + num_levels = len(level_names) + + return cls(org_id, raw_data, num_levels) + + @classmethod + def _access_level_names(cls, org_id): + access_level_names = [] + if org_id: + access_level_names = Organization.objects.get(pk=org_id).access_level_names + return access_level_names diff --git a/seed/data_importer/match.py b/seed/data_importer/match.py index 7caf5af2c0..adc64aeb79 100644 --- a/seed/data_importer/match.py +++ b/seed/data_importer/match.py @@ -6,7 +6,6 @@ """ import datetime as dt import math -from functools import reduce from celery import shared_task from celery.utils.log import get_task_logger @@ -37,6 +36,9 @@ ) from seed.models.auditlog import AUDIT_IMPORT from seed.utils.match import ( + MultipleALIError, + NoAccessError, + NoViewsError, empty_criteria_filter, match_merge_link, matching_criteria_column_names, @@ -136,22 +138,43 @@ def match_and_link_incoming_properties_and_taxlots_by_cycle(file_pk, progress_ke progress_data.step('Matching data') # Set defaults - property_duplicates_against_existing_count = 0 + # property - within file + property_initial_incoming_count = 0 property_duplicates_within_file_count = 0 - property_merges_against_existing_count = 0 - property_merges_between_existing_count = 0 + property_duplicates_within_file_errors = [] property_merges_within_file_count = 0 - property_new_count = 0 + property_merges_within_file_errors = [] + + # property - within existing records + property_merges_between_existing_count = 0 - tax_lot_duplicates_against_existing_count = 0 + # property - introduce file to existing + property_duplicates_against_existing_count = 0 + merged_property_views = [] + merged_property_state_errors = [] + linked_property_views = [] + linked_property_state_errors = [] + new_property_views = [] + new_property_state_errors = [] + + # taxlot - within file + tax_lot_initial_incoming_count = 0 tax_lot_duplicates_within_file_count = 0 - tax_lot_merges_against_existing_count = 0 - tax_lot_merges_between_existing_count = 0 + tax_lot_duplicates_within_file_errors = [] tax_lot_merges_within_file_count = 0 - tax_lot_new_count = 0 + tax_lot_merges_within_file_errors = [] + + # taxlot - within existing records + taxlot_merges_between_existing_count = 0 - merged_linked_property_views = [] - merged_linked_taxlot_views = [] + # taxlot - introduce file to existing + taxlot_duplicates_against_existing_count = 0 + merged_taxlot_views = [] + merged_taxlot_state_errors = [] + linked_taxlot_views = [] + linked_taxlot_state_errors = [] + new_taxlot_views = [] + new_taxlot_state_errors = [] # Get lists and counts of all the properties and tax lots based on the import file. property_initial_incoming_count = incoming_properties.count() @@ -170,14 +193,14 @@ def match_and_link_incoming_properties_and_taxlots_by_cycle(file_pk, progress_ke if merge_duplicates: promoted_property_ids, property_duplicates_within_file_count = incoming_properties.values_list('id', flat=True), 0 else: - promoted_property_ids, property_duplicates_within_file_count = filter_duplicate_states( + promoted_property_ids, property_duplicates_within_file_errors, property_duplicates_within_file_count = filter_duplicate_states( incoming_properties, sub_progress_key, ) # Within the ImportFile, merge -States together based on user defined matching_criteria log_debug('Start Properties inclusive_match_and_merge') - promoted_property_ids, property_merges_within_file_count = inclusive_match_and_merge( + promoted_property_ids, property_merges_within_file_count, property_merges_within_file_errors = inclusive_match_and_merge( promoted_property_ids, org, PropertyState, @@ -186,9 +209,17 @@ def match_and_link_incoming_properties_and_taxlots_by_cycle(file_pk, progress_ke # Filter Cycle-wide duplicates then merge and/or assign -States to -Views log_debug('Start Properties states_to_views') - merged_property_views, property_duplicates_against_existing_count, property_new_count, property_merges_against_existing_count, property_merges_between_existing_count = states_to_views( + ( + property_merges_between_existing_count, + property_duplicates_against_existing_count, + merged_property_views, + merged_property_state_errors, # TODO: we don't do anything with these. they should probably be deleted. + new_property_views, + errored_new_property_states, + ) = states_to_views( promoted_property_ids, org, + import_file.import_record.access_level_instance, cycle, PropertyState, sub_progress_key, @@ -197,23 +228,38 @@ def match_and_link_incoming_properties_and_taxlots_by_cycle(file_pk, progress_ke # Look for links across Cycles log_debug('Start Properties link_views') - merged_linked_property_views = link_views( + ( + merged_property_views, + # no merged_property_state_errors, they got off the ride before linking + linked_property_views, # note: view that merge _and_ linked are in merged_property_views, not linked_property_views + linked_property_state_errors, + new_property_views, + new_property_state_errors, + ) = link_views_and_states( merged_property_views, + new_property_views, + errored_new_property_states, PropertyView, + cycle, + import_file.import_record.access_level_instance, sub_progress_key, ) + # TODO: the states and Property should probably be deleted too + errored_linked_property_views = PropertyView.objects.filter(state__in=linked_property_state_errors) + errored_linked_property_views.delete() + if incoming_tax_lots.exists(): # Within the ImportFile, filter out the duplicates. log_debug("Start TaxLots filter_duplicate_states") - promoted_tax_lot_ids, tax_lot_duplicates_within_file_count = filter_duplicate_states( + promoted_tax_lot_ids, tax_lot_duplicates_within_file_errors, tax_lot_duplicates_within_file_count = filter_duplicate_states( incoming_tax_lots, sub_progress_key, ) # Within the ImportFile, merge -States together based on user defined matching_criteria log_debug('Start TaxLots inclusive_match_and_merge') - promoted_tax_lot_ids, tax_lot_merges_within_file_count = inclusive_match_and_merge( + promoted_tax_lot_ids, tax_lot_merges_within_file_count, tax_lot_merges_within_file_errors = inclusive_match_and_merge( promoted_tax_lot_ids, org, TaxLotState, @@ -222,9 +268,18 @@ def match_and_link_incoming_properties_and_taxlots_by_cycle(file_pk, progress_ke # Filter Cycle-wide duplicates then merge and/or assign -States to -Views log_debug('Start TaxLots states_to_views') - merged_linked_taxlot_views, tax_lot_duplicates_against_existing_count, tax_lot_new_count, tax_lot_merges_against_existing_count, tax_lot_merges_between_existing_count = states_to_views( + ( + + taxlot_merges_between_existing_count, + taxlot_duplicates_against_existing_count, + merged_taxlot_views, + merged_taxlot_state_errors, # TODO: we don't do anything with these. they should probably be deleted. + new_taxlot_views, + errored_new_taxlot_states, + ) = states_to_views( promoted_tax_lot_ids, org, + import_file.import_record.access_level_instance, cycle, TaxLotState, sub_progress_key, @@ -232,38 +287,114 @@ def match_and_link_incoming_properties_and_taxlots_by_cycle(file_pk, progress_ke # Look for links across Cycles log_debug('Start TaxLots link_views') - merged_linked_taxlot_views = link_views( - merged_linked_taxlot_views, + ( + merged_taxlot_views, + # no merged_taxlot_state_errors, they got off the ride before linking + linked_taxlot_views, # note: view that merge _and_ linked are in merged_taxlot_views, not linked_taxlot_views + linked_taxlot_state_errors, + new_taxlot_views, + new_taxlot_state_errors, + ) = link_views_and_states( + merged_taxlot_views, + new_taxlot_views, + errored_new_taxlot_states, TaxLotView, + cycle, + import_file.import_record.access_level_instance, sub_progress_key, ) + # TODO: the states and taxlot should probably be deleted too + errored_linked_taxlot_views = TaxLotView.objects.filter(state__in=linked_taxlot_state_errors) + errored_linked_taxlot_views.delete() + log_debug('Start pair_new_states') progress_data.step('Pairing data') pair_new_states( - merged_linked_property_views, - merged_linked_taxlot_views, + linked_property_views + new_property_views + merged_property_views, + linked_taxlot_views + new_taxlot_views + merged_taxlot_views, sub_progress_key, ) return { + # property - within file 'property_initial_incoming': property_initial_incoming_count, - 'property_duplicates_against_existing': property_duplicates_against_existing_count, 'property_duplicates_within_file': property_duplicates_within_file_count, - 'property_merges_against_existing': property_merges_against_existing_count, - 'property_merges_between_existing': property_merges_between_existing_count, + 'property_duplicates_within_file_errors': len(property_duplicates_within_file_errors), 'property_merges_within_file': property_merges_within_file_count, - 'property_new': property_new_count, + 'property_merges_within_file_errors': len(property_merges_within_file_errors), + + # property - within existing records + 'property_merges_between_existing': property_merges_between_existing_count, + + # property - introduce file to existing + 'property_duplicates_against_existing': property_duplicates_against_existing_count, + 'property_merges_against_existing': len(merged_property_views), + 'property_merges_against_existing_errors': len(merged_property_state_errors), + 'property_links_against_existing': len(linked_property_views), + 'property_links_against_existing_errors': len(linked_property_state_errors), + 'property_new': len(new_property_views), + 'property_new_errors': len(new_property_state_errors), + + # taxlot - within file 'tax_lot_initial_incoming': tax_lot_initial_incoming_count, - 'tax_lot_duplicates_against_existing': tax_lot_duplicates_against_existing_count, 'tax_lot_duplicates_within_file': tax_lot_duplicates_within_file_count, - 'tax_lot_merges_against_existing': tax_lot_merges_against_existing_count, - 'tax_lot_merges_between_existing': tax_lot_merges_between_existing_count, + 'tax_lot_duplicates_within_file_errors': len(tax_lot_duplicates_within_file_errors), 'tax_lot_merges_within_file': tax_lot_merges_within_file_count, - 'tax_lot_new': tax_lot_new_count, + 'tax_lot_merges_within_file_errors': len(tax_lot_merges_within_file_errors), + + # taxlot - within existing records + 'tax_lot_merges_between_existing': taxlot_merges_between_existing_count, + + # taxlot - introduce file to existing + 'tax_lot_duplicates_against_existing': taxlot_duplicates_against_existing_count, + 'tax_lot_merges_against_existing': len(merged_taxlot_views), + 'tax_lot_merges_against_existing_errors': len(merged_taxlot_state_errors), + 'tax_lot_links_against_existing': len(linked_taxlot_views), + 'tax_lot_links_against_existing_errors': len(linked_taxlot_state_errors), + 'tax_lot_new': len(new_taxlot_views), + 'tax_lot_new_errored': len(new_taxlot_state_errors), } +def link_views_and_states(merged_views, new_views, errored_new_states, ViewClass, cycle, ali, sub_progress_key): + shared_args = [ViewClass, cycle, ali, sub_progress_key] + + # merged_property_views are attached to properties that existed in the db prior to import, so it + # REALLY should not fail. + ( + merged_and_linked_views, + merged_views, + merge_and_linked_states_errors, + _, + ) = link_states([v.state for v in merged_views], *shared_args) + + # new_views may try to link invalidly if the existing records has a different ali. In that case, + # the new record should have never been created. + ( + linked_views_a, + new_views, + linked_state_errors_a, + _, + ) = link_states([v.state for v in new_views], *shared_args) + + # errored_new_states are new states without alis that also didn't merge. If they don't link or + # try to link invalidly, we throw them out. As there are not yet attached to a record, successfully + # not linking is not an option. + ( + linked_views_b, + _, + linked_state_errors_b, + new_state_errors, + ) = link_states(errored_new_states, *shared_args) + + merged_views += merged_and_linked_views + linked_views = linked_views_a + linked_views_b + linked_state_errors = linked_state_errors_a + linked_state_errors_b + + return merged_views, linked_views, linked_state_errors, new_views, new_state_errors + + def filter_duplicate_states(unmatched_states, sub_progress_key): """ Takes a QuerySet of -States and flags then separates exact duplicates. This @@ -278,30 +409,54 @@ def filter_duplicate_states(unmatched_states, sub_progress_key): flag the corresponding -States with DATA_STATE_DELETE. :param unmatched_states: QS - :return: canonical_state_ids, duplicate_count + :return: canonical_state_ids, errors_state_ids, duplicate_count """ sub_progress_data = update_sub_progress_total(4, sub_progress_key) sub_progress_data.step('Matching Data (1/6): Filtering Duplicate States') - ids_grouped_by_hash = unmatched_states.\ + states_grouped_by_hash = unmatched_states.\ values('hash_object').\ - annotate(duplicate_sets=ArrayAgg('id')).\ - values_list('duplicate_sets', flat=True) + annotate(duplicate_sets=ArrayAgg('id'), duplicate_sets_ali=ArrayAgg('raw_access_level_instance_id')) sub_progress_data.step('Matching Data (1/6): Filtering Duplicate States') + + # For group of states with the same ali, find and select the canonical_state # For consistency, take the first member of each of the duplicate sets - canonical_state_ids = [ - ids.pop(ids.index(min(ids))) - for ids - in ids_grouped_by_hash - ] + canonical_state_ids = [] + duplicate_state_ids = [] + errors_state_ids = [] + for states in states_grouped_by_hash: + states = [ + {"id": id, "ali_id": ali_id} for id, ali_id + in zip(states["duplicate_sets"], states["duplicate_sets_ali"]) + ] + states.sort(key=lambda x: x["id"]) + states_with_ali = [s for s in states if s["ali_id"] is not None] + present_ali_ids = set(state["ali_id"] for state in states_with_ali) + + # None have alis, just choose first + if len(present_ali_ids) == 0: + canonical_state = states[0] + + # One ali! choose the first non-null + elif len(present_ali_ids) == 1: + canonical_state = states_with_ali[0] + + # More than one ali was specified! all are of these duplicates are invalid + else: + errors_state_ids += [s["id"] for s in states] + continue + + canonical_state_ids.append(canonical_state["id"]) + states.remove(canonical_state) + duplicate_state_ids += [s["id"] for s in states] + sub_progress_data.step('Matching Data (1/6): Filtering Duplicate States') - duplicate_state_ids = reduce(lambda x, y: x + y, ids_grouped_by_hash) duplicate_count = unmatched_states.filter(pk__in=duplicate_state_ids).update(data_state=DATA_STATE_DELETE) sub_progress_data.step('Matching Data (1/6): Filtering Duplicate States') sub_progress_data.finish_with_success() - return canonical_state_ids, duplicate_count + return canonical_state_ids, errors_state_ids, duplicate_count def inclusive_match_and_merge(unmatched_state_ids, org, StateClass, sub_progress_key): @@ -340,6 +495,7 @@ def inclusive_match_and_merge(unmatched_state_ids, org, StateClass, sub_progress # Collapse groups of matches found in the previous step into 1 -State per group merges_within_file = 0 + errored_states = [] priorities = Column.retrieve_priorities(org) batch_size = math.ceil(len(matched_id_groups) / 100) for idx, ids in enumerate(matched_id_groups): @@ -348,6 +504,11 @@ def inclusive_match_and_merge(unmatched_state_ids, org, StateClass, sub_progress promoted_ids += ids else: states = [s for s in StateClass.objects.filter(pk__in=ids).order_by('-id')] + raw_ali_ids = set(s.raw_access_level_instance for s in states if s.raw_access_level_instance is not None) + if len(raw_ali_ids) > 1: + errored_states += states + continue + merge_state = states.pop() merges_within_file += len(states) @@ -364,10 +525,11 @@ def inclusive_match_and_merge(unmatched_state_ids, org, StateClass, sub_progress # Flag the soon to be promoted ID -States as having gone through matching StateClass.objects.filter(pk__in=promoted_ids).update(data_state=DATA_STATE_MATCHING) - return promoted_ids, merges_within_file + + return promoted_ids, merges_within_file, errored_states -def states_to_views(unmatched_state_ids, org, cycle, StateClass, sub_progress_key, merge_duplicates=False): +def states_to_views(unmatched_state_ids, org, access_level_instance, cycle, StateClass, sub_progress_key, merge_duplicates=False): """ The purpose of this method is to take incoming -States and, apply them to a -View. In the process of doing so, -States could be flagged for "deletion" @@ -480,15 +642,31 @@ def states_to_views(unmatched_state_ids, org, cycle, StateClass, sub_progress_ke # Process -States into -Views either directly (promoted_ids) or post-merge (merge_state_pairs). _log.debug("There are %s merge_state_pairs and %s promote_states" % (len(merge_state_pairs), promote_states.count())) priorities = Column.retrieve_priorities(org.pk) - processed_views = [] - promoted_ids = [] - merged_state_ids = [] try: with transaction.atomic(): + # For each merge_state_pairs, try to merge the new state into the existing property views + merged_views = [] + merged_state_ids = [] + errored_merged_states = [] batch_size = math.ceil(len(merge_state_pairs) / 100) for idx, state_pair in enumerate(merge_state_pairs): existing_state, newer_state = state_pair existing_view = ViewClass.objects.get(state_id=existing_state.id) + existing_obj = getattr(existing_view, "property" if table_name == 'PropertyState' else "taxlot") + + # ensure that new ali and existing ali match and that we have access to existing ali. + new_ali = newer_state.raw_access_level_instance + if new_ali is None: + if not ( + existing_obj.access_level_instance == access_level_instance or + existing_obj.access_level_instance.is_descendant_of(access_level_instance) + ): + errored_merged_states.append(newer_state) + continue + else: + if existing_obj.access_level_instance != new_ali: + errored_merged_states.append(newer_state) + continue # Merge -States and assign new/merged -State to existing -View merged_state = save_state_match(existing_state, newer_state, priorities) @@ -496,18 +674,25 @@ def states_to_views(unmatched_state_ids, org, cycle, StateClass, sub_progress_ke existing_view.state = merged_state existing_view.save() - processed_views.append(existing_view) + merged_views.append(existing_view) merged_state_ids.append(merged_state.id) if batch_size > 0 and idx % batch_size == 0: sub_progress_data.step('Matching Data (4/6): Merging State Pairs') sub_progress_data = update_sub_progress_total(100, sub_progress_key, finish=True) + # For each state that doesn't merge into an existing property, promote it, creating a new property + new_views = [] + promoted_state_ids = [] + errored_new_states = [] batch_size = math.ceil(len(promote_states) / 100) for idx, state in enumerate(promote_states): - promoted_ids.append(state.id) created_view = state.promote(cycle) - processed_views.append(created_view) + if created_view is None: + errored_new_states.append(state) + else: + promoted_state_ids.append(state.id) + new_views.append(created_view) if batch_size > 0 and idx % batch_size == 0: sub_progress_data.step('Matching Data (5/6): Promoting States') sub_progress_data.finish_with_success() @@ -515,22 +700,28 @@ def states_to_views(unmatched_state_ids, org, cycle, StateClass, sub_progress_ke except IntegrityError as e: raise IntegrityError("Could not merge results with error: %s" % (e)) - new_count = len(promoted_ids) # update merge_state while excluding any states that were a product of a previous, file-inclusive merge - StateClass.objects.filter(pk__in=promoted_ids).exclude(merge_state=MERGE_STATE_MERGED).update( + StateClass.objects.filter(pk__in=promoted_state_ids).exclude(merge_state=MERGE_STATE_MERGED).update( merge_state=MERGE_STATE_NEW ) - matched_count = StateClass.objects.filter(pk__in=merged_state_ids).update( + StateClass.objects.filter(pk__in=merged_state_ids).update( data_state=DATA_STATE_MATCHING, merge_state=MERGE_STATE_MERGED ) - return list(set(processed_views)), duplicate_count, new_count, matched_count, merged_between_existing_count + return ( + merged_between_existing_count, + duplicate_count, + list(set(merged_views)), # so no dupes, I think? + errored_merged_states, + new_views, + errored_new_states, + ) -def link_views(merged_views, ViewClass, sub_progress_key): +def link_states(states, ViewClass, cycle, highest_ali, sub_progress_key): """ - Run each of the given -Views through a linking round. + Run each of the given -States through a linking round. For details on the actual linking logic, please refer to the the match_merge_link() method. @@ -543,21 +734,34 @@ def link_views(merged_views, ViewClass, sub_progress_key): else: state_class_name = "TaxLotState" - processed_views = [] - - batch_size = math.ceil(len(merged_views) / 100) - for idx, view in enumerate(merged_views): - _merge_count, _link_count, view_id = match_merge_link(view.id, state_class_name) - - if view_id is not None: - processed_views.append(ViewClass.objects.get(pk=view_id)) + linked_views = [] + unlinked_views = [] + invalid_link_states = [] + unlinked_states = [] + + batch_size = math.ceil(len(states) / 100) + for idx, state in enumerate(states): + try: + _merge_count, _link_count, view_id = match_merge_link(state.id, state_class_name, highest_ali=highest_ali, cycle=cycle) + except (MultipleALIError, NoAccessError): + invalid_link_states.append(state.id) + continue + except NoViewsError: + unlinked_states.append(state.id) + continue + + view = ViewClass.objects.get(pk=view_id) + if _link_count == 0: + unlinked_views.append(view) else: - processed_views.append(view) + linked_views.append(view) + if batch_size > 0 and idx % batch_size == 0: sub_progress_data.step('Matching Data (6/6): Merging Views') + sub_progress_data.finish_with_success() - return processed_views + return linked_views, unlinked_views, invalid_link_states, unlinked_states def save_state_match(state1, state2, priorities): diff --git a/seed/data_importer/migrations/0019_accountability_hierarchy.py b/seed/data_importer/migrations/0019_accountability_hierarchy.py new file mode 100644 index 0000000000..33813ccae7 --- /dev/null +++ b/seed/data_importer/migrations/0019_accountability_hierarchy.py @@ -0,0 +1,41 @@ +# Generated by Django 3.2.23 on 2024-03-08 06:05 + +import django.db.models.deletion +from django.db import migrations, models, transaction + + +@transaction.atomic +def set_import_records_ali(apps, schema_editor): + ImportRecord = apps.get_model('data_importer', 'ImportRecord') + AccessLevelInstance = apps.get_model('orgs', 'AccessLevelInstance') + + if ImportRecord.objects.filter(super_organization=None).exists(): + raise ValueError("Some ImportRecords have no super_organization, and are orphaned. This shouldn't have happened and these ImportRecords cannot be migrated. Please add a super_organization or delete the orphaned ImportRecords and try again.") + + root_alis = {ali.organization_id: ali for ali in AccessLevelInstance.objects.filter(depth=1)} + + for import_record in ImportRecord.objects.all().iterator(): + import_record.access_level_instance = root_alis[import_record.super_organization_id] + import_record.save(update_fields=['access_level_instance']) + + +class Migration(migrations.Migration): + + dependencies = [ + ('orgs', '0030_accountability_hierarchy'), + ('data_importer', '0018_importfile_multiple_cycle_upload'), + ] + + operations = [ + migrations.AddField( + model_name='importrecord', + name='access_level_instance', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='import_record', to='orgs.accesslevelinstance'), + ), + migrations.RunPython(set_import_records_ali, reverse_code=migrations.RunPython.noop), + migrations.AlterField( + model_name='importrecord', + name='access_level_instance', + field=models.ForeignKey(null=False, on_delete=django.db.models.deletion.CASCADE, related_name='import_record', to='orgs.accesslevelinstance'), + ), + ] diff --git a/seed/data_importer/models.py b/seed/data_importer/models.py index 9e5584b9e4..4d0ea9d1b3 100644 --- a/seed/data_importer/models.py +++ b/seed/data_importer/models.py @@ -20,6 +20,7 @@ from config.utils import de_camel_case from seed.data_importer.managers import NotDeletedManager from seed.lib.mcm.reader import ROW_DELIMITER +from seed.lib.superperms.orgs.models import AccessLevelInstance from seed.lib.superperms.orgs.models import Organization as SuperOrganization from seed.utils.cache import ( delete_cache, @@ -92,6 +93,7 @@ class ImportRecord(NotDeletableModel): help_text='The application (e.g., BPD or SEED) for this dataset', default='seed') owner = models.ForeignKey('landing.SEEDUser', on_delete=models.CASCADE, blank=True, null=True) + access_level_instance = models.ForeignKey(AccessLevelInstance, on_delete=models.CASCADE, null=False, related_name="import_record") start_time = models.DateTimeField(blank=True, null=True) finish_time = models.DateTimeField(blank=True, null=True) created_at = models.DateTimeField(blank=True, null=True) @@ -596,6 +598,10 @@ def __del__(self): def from_portfolio_manager(self): return self._strcmp(self.source_program, 'PortfolioManager') + @property + def access_level_instance(self): + return self.import_record.access_level_instance + @property def from_buildingsync(self): source_type = self.source_type if self.source_type else '' diff --git a/seed/data_importer/tasks.py b/seed/data_importer/tasks.py index f75a9c603b..a6796d8ba8 100644 --- a/seed/data_importer/tasks.py +++ b/seed/data_importer/tasks.py @@ -12,6 +12,7 @@ import json import os import tempfile +import time import traceback import zipfile from bisect import bisect_left @@ -27,8 +28,10 @@ from celery.utils.log import get_task_logger from dateutil import parser from django.contrib.gis.geos import GEOSGeometry +from django.core.exceptions import ObjectDoesNotExist from django.core.files.uploadedfile import SimpleUploadedFile from django.db import DataError, IntegrityError, connection, transaction +from django.db.models import Q from django.db.utils import ProgrammingError from django.utils import timezone as tz from django.utils.timezone import make_naive @@ -36,6 +39,9 @@ from seed.building_sync import validation_client from seed.building_sync.building_sync import BuildingSync +from seed.data_importer.access_level_instances_parser import ( + AccessLevelInstancesParser +) from seed.data_importer.equivalence_partitioner import EquivalencePartitioner from seed.data_importer.match import ( match_and_link_incoming_properties_and_taxlots @@ -53,7 +59,7 @@ from seed.lib.mcm.mapper import expand_rows from seed.lib.mcm.utils import batch from seed.lib.progress_data.progress_data import ProgressData -from seed.lib.superperms.orgs.models import Organization +from seed.lib.superperms.orgs.models import AccessLevelInstance, Organization from seed.lib.xml_mapping import reader as xml_reader from seed.models import ( ASSESSED_BS, @@ -393,6 +399,9 @@ def map_row_chunk(ids, file_pk, source_type, prog_key, **kwargs): map_model_obj.import_file = import_file map_model_obj.source_type = save_type map_model_obj.organization = import_file.import_record.super_organization + # _process_ali_data(map_model_obj, import_file.access_level_instance) + _process_ali_data(map_model_obj, row, import_file.access_level_instance, table_mappings.get("")) + if hasattr(map_model_obj, 'data_state'): map_model_obj.data_state = DATA_STATE_MAPPING if hasattr(map_model_obj, 'clean'): @@ -493,6 +502,59 @@ def map_row_chunk(ids, file_pk, source_type, prog_key, **kwargs): return True +def _process_ali_data(model, raw_data, import_file_ali, ah_mappings): + org_alns = model.organization.access_level_names + + # if org only has root, just assign it to root, they won't have any ali info + if AccessLevelInstance.objects.filter(organization=model.organization).count() == 1: + model.raw_access_level_instance = model.organization.root + return + + # if not mappings + if not ah_mappings: + model.raw_access_level_instance_error = "Missing Access Level mappings." + return + + # clean ali_info + ali_info = { + to_col: raw_data.get(from_col) + for from_col, (_, to_col, _, _) in ah_mappings.items() + if raw_data.get(from_col) is not None and raw_data.get(from_col) != "" + } + if not ali_info: + model.raw_access_level_instance_error = "Missing Access Level Column data." + return + + # ensure we have a valid set of keys, else error out + needed_keys = set(org_alns[:len(ali_info)]) + if needed_keys != ali_info.keys(): + model.raw_access_level_instance_error = "Missing/Incomplete Access Level Column." + return + + # try to get ali matching ali info within subtree + paths_match = Q(path=ali_info) + in_subtree = Q(lft__gte=import_file_ali.lft, rgt__lte=import_file_ali.rgt) + ali = AccessLevelInstance.objects.filter(organization=model.organization).filter(Q(paths_match & in_subtree)).first() + + # if ali is None, we error + if ali is None: + is_ancestor = Q(lft__lt=import_file_ali.lft, rgt__gt=import_file_ali.rgt) + ancestor_ali = AccessLevelInstance.objects.filter(organization=model.organization).filter(Q(is_ancestor & paths_match)).first() + + # differing errors if + # 1. the user can see the ali but cannot access, or + # 2. the ali cannot be seen by the user and/or doesn't exist. + if ancestor_ali is not None: + model.raw_access_level_instance_error = "Access Level Instance cannot be accessed with the permissions of this import file." + else: + model.raw_access_level_instance_error = "Access Level Information does not match any existing Access Level Instance." + + return + + # success! + model.raw_access_level_instance = ali + + def _store_raw_footprint_and_create_rule(footprint_details, table, org, import_file, original_row, map_model_obj): column_name = footprint_details['raw_field'] + ' (Invalid Footprint)' @@ -811,6 +873,22 @@ def finish_raw_save(results, file_pk, progress_key): return finished_progress_data +@shared_task(ignore_result=True) +def finish_raw_ali_save(results, progress_key): + """ + Finish importing the raw Access Level Instances file. + + :param results: List of results from the parent task + :param filename: filename that was being imported + :param progress_key: string, Progress Key to append progress + :param summary: Summary to be saved on ProgressData as a message + :return: results: results from the other tasks before the chord ran + """ + progress_data = ProgressData.from_key(progress_key) + + return progress_data.finish_with_success(results) + + def cache_first_rows(import_file, parser): """Cache headers, and rows 2-6 for validation/viewing. @@ -1008,6 +1086,103 @@ def _save_sensor_readings_task(readings_tuples, data_logger_id, sensor_column_na return result +@shared_task +def _save_access_level_instances_task(rows, org_id, progress_key): + progress_data = ProgressData.from_key(progress_key) + result = {} + + # get org + try: + org = Organization.objects.get(pk=org_id) + except ObjectDoesNotExist: + raise 'Could not retrieve organization at pk = ' + str(org_id) + + # get access level names (array of ordered names) + access_level_names = AccessLevelInstancesParser._access_level_names(org_id) + access_level_names = [x.lower() for x in access_level_names] + # determine whether root level was provided in file (if not, remove from list) + has_root = True + if rows: + headers = [x.lower() for x in rows[0].keys()] + + if access_level_names[0] not in headers: + access_level_names.pop(0) + has_root = False + # saves the non-existent instances + for row in rows: + message = None + # process in order of access_level_names + # create the needed instances along the way + current_level = None + children = [org.root] + if not has_root: + children = org.root.get_children() + current_level = org.root + # lowercase keys just in case + row = {k.lower(): v for k, v in row.items()} + + for key, val in row.items(): + # check headers + if key not in access_level_names: + message = f"Error reading CSV data row: {row}...no access level for column {key} found" + break + # check for empty value (don't add blanks) + if not val: + message = f"Blank value for column {key} in CSV data row: {row}...skipping" + break + + # does this level exist already? + looking_for = val + + found = False + for child in children: + if child.name == looking_for: + found = True + current_level = child + children = current_level.get_children() + break + + if not found: + if not current_level: + # this would mean they are trying to add ROOT and that can't be + message = f"Error attempting to add '{val}' as another root element. Root element already defined as: {org.root.name}" + break + # add it + try: + current_level = org.add_new_access_level_instance(current_level.id, looking_for) + current_level.refresh_from_db() + # get its children (should be empty) + children = current_level.get_children() + except Exception as e: + message = f"Error has occurred when adding element '{val}' for entry: {row}: {e}" + break + + # add a row but only for errors + if message: + result[row[access_level_names[-1]]] = {'message': message} + + progress_data.step() + + return result + + +@shared_task +def _save_access_level_instances_data_create_tasks(filename, org_id, progress_key): + progress_data = ProgressData.from_key(progress_key) + + # open and read in file + parser = AccessLevelInstancesParser.factory( + open(filename, 'r'), + org_id + ) + access_level_instances_data = parser.access_level_instances_details + + progress_data.total = len(access_level_instances_data) + progress_data.save() + results = _save_access_level_instances_task(access_level_instances_data, org_id, progress_data.key) + return finish_raw_ali_save(results, progress_data.key) + + @shared_task def _save_greenbutton_data_task(readings, meter_id, meter_usage_point_id, progress_key): """ @@ -1305,6 +1480,23 @@ def _save_raw_data_create_tasks(file_pk, progress_key): return chord(tasks, interval=15)(finish_raw_save.s(file_pk, progress_data.key)) +def save_raw_access_level_instances_data(filename, org_id): + """ save data and keep progress """ + progress_data = ProgressData(func_name='save_raw_access_level_instances_data', unique_id=int(time.time())) + try: + # queue up the tasks and immediately return. This is needed in the case of large hierarchy files + # causing the website to timeout due to inactivity. + _save_access_level_instances_data_create_tasks.s(filename, org_id, progress_data.key).delay() + except StopIteration: + progress_data.finish_with_error('StopIteration Exception', traceback.format_exc()) + except KeyError as e: + progress_data.finish_with_error('Invalid Column Name: "' + str(e) + '"', traceback.format_exc()) + except Exception as e: + progress_data.finish_with_error('Unhandled Error: ' + str(e), traceback.format_exc()) + + return progress_data.result() + + def save_raw_espm_data_synchronous(file_pk: int) -> dict: """This method is a one-off method for saving ESPM the raw data synchronously. This is needed for the ESPM update method that runs on a single property. The `save_raw_data` diff --git a/seed/data_importer/tests/integration/test_data_import.py b/seed/data_importer/tests/integration/test_data_import.py index e211765ae7..e0448f2824 100644 --- a/seed/data_importer/tests/integration/test_data_import.py +++ b/seed/data_importer/tests/integration/test_data_import.py @@ -104,7 +104,7 @@ def test_map_data(self): """Save mappings based on user specifications.""" # Create new import file to test import_record = ImportRecord.objects.create( - owner=self.user, last_modified_by=self.user, super_organization=self.org + owner=self.user, last_modified_by=self.user, super_organization=self.org, access_level_instance=self.org.root ) import_file = ImportFile.objects.create( import_record=import_record, @@ -515,7 +515,9 @@ def test_promote_properties(self): # Promote the PropertyState to a PropertyView pv1 = ps.promote(self.cycle) + ps.raw_access_level_instance = self.org.root # un-clear it pv2 = ps.promote(self.cycle) # should just return the same object + ps.raw_access_level_instance = self.org.root # un-clear it self.assertEqual(pv1, pv2) # promote the same state for a new cycle, same data @@ -528,7 +530,7 @@ def test_promote_properties(self): # For some reason if you comment out the next two test cases (TestMappingPropertiesOnly and # TestMappingTaxLotsOnly), the test_views_matching.py file will fail. I cannot figure out -# what is causing this and it is really annoying. Inherenting from DataMappingBaseTestCase +# what is causing this and it is really annoying. Inheriting from DataMappingBaseTestCase # will delete all the model data upon completion, Maybe because FAKE_MAPPINGS # is not a copy, rather a pointer? diff --git a/seed/data_importer/tests/integration/test_demo_v2.py b/seed/data_importer/tests/integration/test_demo_v2.py index 44c12ca405..716b6c9839 100644 --- a/seed/data_importer/tests/integration/test_demo_v2.py +++ b/seed/data_importer/tests/integration/test_demo_v2.py @@ -54,13 +54,13 @@ def set_up(self, import_file_source_type): ) import_record_1 = ImportRecord.objects.create( - owner=user, last_modified_by=user, super_organization=org + owner=user, last_modified_by=user, super_organization=org, access_level_instance=org.root ) import_file_1 = ImportFile.objects.create(import_record=import_record_1, cycle=cycle) import_record_2 = ImportRecord.objects.create( - owner=user, last_modified_by=user, super_organization=org + owner=user, last_modified_by=user, super_organization=org, access_level_instance=org.root ) import_file_2 = ImportFile.objects.create(import_record=import_record_2, cycle=cycle) diff --git a/seed/data_importer/tests/integration/test_equivalence_partioner_with_file.py b/seed/data_importer/tests/integration/test_equivalence_partioner_with_file.py index 5b5ed6a2ab..d27407dbd7 100644 --- a/seed/data_importer/tests/integration/test_equivalence_partioner_with_file.py +++ b/seed/data_importer/tests/integration/test_equivalence_partioner_with_file.py @@ -44,7 +44,7 @@ def test_equivalence(self): all_unmatched_properties = self.import_file.find_unmatched_property_states() sub_progress_data = ProgressData(func_name='match_sub_progress', unique_id=123) sub_progress_data.save() - unmatched_property_ids, duplicate_property_count = match.filter_duplicate_states( + unmatched_property_ids, _, duplicate_property_count = match.filter_duplicate_states( all_unmatched_properties, sub_progress_data.key, ) diff --git a/seed/data_importer/tests/integration/test_footprints_import.py b/seed/data_importer/tests/integration/test_footprints_import.py index 56a0ff7a98..1a9585f779 100644 --- a/seed/data_importer/tests/integration/test_footprints_import.py +++ b/seed/data_importer/tests/integration/test_footprints_import.py @@ -56,13 +56,13 @@ def set_up(self, import_file_source_type): ) import_record_1 = ImportRecord.objects.create( - owner=user, last_modified_by=user, super_organization=org + owner=user, last_modified_by=user, super_organization=org, access_level_instance=org.root ) import_file_1 = ImportFile.objects.create(import_record=import_record_1, cycle=cycle) import_record_2 = ImportRecord.objects.create( - owner=user, last_modified_by=user, super_organization=org + owner=user, last_modified_by=user, super_organization=org, access_level_instance=org.root ) import_file_2 = ImportFile.objects.create(import_record=import_record_2, cycle=cycle) diff --git a/seed/data_importer/tests/integration/test_merge_duplicate_rows.py b/seed/data_importer/tests/integration/test_merge_duplicate_rows.py index 5d38fc5a70..3d4b998063 100644 --- a/seed/data_importer/tests/integration/test_merge_duplicate_rows.py +++ b/seed/data_importer/tests/integration/test_merge_duplicate_rows.py @@ -204,7 +204,7 @@ def test_import_duplicates(self): sub_progress_data = ProgressData(func_name='match_sub_progress', unique_id=123) sub_progress_data.save() - unique_property_states, _ = match.filter_duplicate_states(ps, sub_progress_data.key) + unique_property_states, _, _ = match.filter_duplicate_states(ps, sub_progress_data.key) self.assertEqual(len(unique_property_states), 4) tasks.geocode_and_match_buildings_task(self.import_file.id) diff --git a/seed/data_importer/tests/test_AH_import.py b/seed/data_importer/tests/test_AH_import.py new file mode 100644 index 0000000000..0f529451ca --- /dev/null +++ b/seed/data_importer/tests/test_AH_import.py @@ -0,0 +1,951 @@ +# !/usr/bin/env python +# encoding: utf-8 +""" +SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. +See also https://github.com/seed-platform/seed/main/LICENSE.md +""" + + +from datetime import datetime + +from seed.data_importer.match import ( + match_and_link_incoming_properties_and_taxlots +) +from seed.lib.progress_data.progress_data import ProgressData +from seed.models import ( + ASSESSED_RAW, + DATA_STATE_MAPPING, + DATA_STATE_MATCHING, + Property, + PropertyState, + PropertyView +) +from seed.test_helpers.fake import ( + FakeCycleFactory, + FakePropertyFactory, + FakePropertyStateFactory, + FakePropertyViewFactory +) +from seed.tests.util import DataMappingBaseTestCase + + +class TestAHImportFile(DataMappingBaseTestCase): + def setUp(self): + selfvars = self.set_up(ASSESSED_RAW) + self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars + self.import_file.mapping_done = True + self.import_file.save() + self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) + + # create tree + self.org.access_level_names = ["1st Gen", "2nd Gen", "3rd Gen"] + mom = self.org.add_new_access_level_instance(self.org.root.id, "mom") + self.me_ali = self.org.add_new_access_level_instance(mom.id, "me") + self.sister = self.org.add_new_access_level_instance(mom.id, "sister") + self.org.save() + + self.property_state_factory = FakePropertyStateFactory(organization=self.org) + self.property_factory = FakePropertyFactory(organization=self.org) + + self.base_details = { + 'import_file_id': self.import_file.id, + 'data_state': DATA_STATE_MAPPING, + } + + progress_data = ProgressData(func_name='match_buildings', unique_id=self.import_file) + sub_progress_data = ProgressData(func_name='match_sub_progress', unique_id=self.import_file) + self.action_args = [self.import_file.id, progress_data.key, sub_progress_data.key] + + self.blank_result = { + 'import_file_records': None, + 'property_initial_incoming': 0, + 'property_duplicates_against_existing': 0, + 'property_duplicates_within_file': 0, + 'property_duplicates_within_file_errors': 0, + 'property_merges_against_existing': 0, + 'property_merges_against_existing_errors': 0, + 'property_merges_between_existing': 0, + 'property_merges_within_file': 0, + 'property_merges_within_file_errors': 0, + 'property_new': 0, + 'property_new_errors': 0, + 'tax_lot_initial_incoming': 0, + 'tax_lot_duplicates_against_existing': 0, + 'tax_lot_duplicates_within_file': 0, + 'tax_lot_duplicates_within_file_errors': 0, + 'tax_lot_merges_against_existing': 0, + 'tax_lot_merges_against_existing_errors': 0, + 'tax_lot_merges_between_existing': 0, + 'tax_lot_merges_within_file': 0, + 'tax_lot_merges_within_file_errors': 0, + 'tax_lot_new': 0, + 'tax_lot_new_errored': 0, + } + + +class TestAHImport(TestAHImportFile): + def test_AH_set(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_new'] = 1 + self.assertDictContainsSubset(self.blank_result, results) + + # Assert - Property was created with correct ali + assert Property.objects.count() == 1 + p = Property.objects.first() + assert p.access_level_instance == self.me_ali + + def test_no_AH(self): + # Set Up + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_new_errors'] = 1 + self.assertDictContainsSubset(self.blank_result, results) + + # Assert - No property was created + assert Property.objects.count() == 0 + assert PropertyState.objects.count() == 1 + + +class TestAHImportDuplicateIncoming(TestAHImportFile): + def setUp(self): + super().setUp() + + # this causes all the states to be duplicates + self.base_details["ubid"] = '86HJPCWQ+2VV-1-3-2-3' + self.base_details["no_default_data"] = False + + def test_duplicate_both_good(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_duplicates_within_file'] = 1 + self.blank_result['property_new'] = 1 + self.assertDictContainsSubset(self.blank_result, results) + + # Assert - 1 Property, 1 PropertyViews, 2 PropertyStates + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.me_ali + assert PropertyState.objects.count() == 2 + + def test_duplicate_both_good_but_different_a(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.org.root.id + self.property_state_factory.get_property_state(**self.base_details) + + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_duplicates_within_file_errors'] = 2 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # Assert - No properties created + self.assertEqual(Property.objects.count(), 0) + self.assertEqual(PropertyState.objects.count(), 2) + + def test_duplicate_both_good_but_different_b(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + self.base_details["raw_access_level_instance_id"] = self.org.root.id + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_duplicates_within_file_errors'] = 2 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # 1 Property, 1 PropertyViews, 2 PropertyStates + self.assertEqual(Property.objects.count(), 0) + self.assertEqual(PropertyState.objects.count(), 2) + + def test_duplicate_one_bad_a(self): + # Set Up + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.property_state_factory.get_property_state(**self.base_details) + + self.base_details["raw_access_level_instance_error"] = None + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_duplicates_within_file'] = 1 + self.blank_result['property_new'] = 1 + self.assertDictContainsSubset(self.blank_result, results) + + # 1 Property, 1 PropertyViews, 2 PropertyStates + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.me_ali + assert PropertyState.objects.count() == 2 + + def test_duplicate_one_bad_b(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.base_details["raw_access_level_instance_id"] = None + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_duplicates_within_file'] = 1 + self.blank_result['property_new'] = 1 + self.assertDictContainsSubset(self.blank_result, results) + + # 1 Property, 1 PropertyViews, 2 PropertyStates + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.me_ali + assert PropertyState.objects.count() == 2 + + def test_duplicate_both_bad(self): + # Set Up + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.property_state_factory.get_property_state(**self.base_details) + + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_duplicates_within_file'] = 1 + self.blank_result['property_new_errors'] = 1 # cause it doesnt have an ali + self.assertDictContainsSubset(self.blank_result, results) + + # 0 Property, 0 PropertyViews, 0 PropertyStates + self.assertEqual(Property.objects.count(), 0) + self.assertEqual(PropertyState.objects.count(), 2) + + +class TestAHImportMatchIncoming(TestAHImportFile): + def setUp(self): + super().setUp() + + # this causes all the states to match + self.base_details["ubid"] = '86HJPCWQ+2VV-1-3-2-3' + self.base_details["no_default_data"] = False + + def test_match_both_good(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_merges_within_file'] = 1 + self.blank_result['property_new'] = 1 + self.assertDictContainsSubset(self.blank_result, results) + + # 1 Property, 1 PropertyView, 3 PropertyStates (2 imported, 1 merge result) + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.me_ali + assert PropertyState.objects.count() == 3 + + def test_match_both_good_but_different_a(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.org.root.id + self.property_state_factory.get_property_state(**self.base_details) + + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_merges_within_file_errors'] = 2 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # No Property and states deleted + self.assertEqual(Property.objects.count(), 0) + self.assertEqual(PropertyState.objects.count(), 2) + + def test_match_both_good_but_different_b(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + self.base_details["raw_access_level_instance_id"] = self.org.root.id + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_merges_within_file_errors'] = 2 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # No Property and states deleted + self.assertEqual(Property.objects.count(), 0) + self.assertEqual(PropertyState.objects.count(), 2) + + def test_match_one_bad_a(self): + # Set Up + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.property_state_factory.get_property_state(**self.base_details) + + self.base_details["raw_access_level_instance_error"] = None + self.base_details["raw_access_level_instance_id"] = self.org.root.id + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_merges_within_file'] = 1 + self.blank_result['property_new'] = 1 + self.assertDictContainsSubset(self.blank_result, results) + + # 1 Property, 1 PropertyView, 3 PropertyStates (2 imported, 1 merge result) + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.org.root + assert PropertyState.objects.count() == 3 + + def test_match_one_bad_b(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.base_details["raw_access_level_instance_id"] = None + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_merges_within_file'] = 1 + self.blank_result['property_new'] = 1 + self.assertDictContainsSubset(self.blank_result, results) + + # 1 Property, 1 PropertyView, 3 PropertyStates (2 imported, 1 merge result) + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.me_ali + assert PropertyState.objects.count() == 3 + + def test_match_both_bad(self): + # Set Up + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.property_state_factory.get_property_state(**self.base_details) + + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 2 + self.blank_result['property_merges_within_file'] = 1 + self.blank_result['property_new_errors'] = 1 + self.assertDictContainsSubset(self.blank_result, results) + + # No Property created and both states deleted + self.assertEqual(Property.objects.count(), 0) + self.assertEqual(PropertyState.objects.count(), 3) + + +class TestAHImportDuplicateExisting(TestAHImportFile): + def setUp(self): + super().setUp() + + # this causes all the states to be duplicates + self.base_details["no_default_data"] = False + + self.state = self.property_state_factory.get_property_state(**self.base_details) + self.state.import_file = None + self.state.save() + self.existing_property = self.property_factory.get_property(access_level_instance=self.me_ali) + self.view = PropertyView.objects.create(property=self.existing_property, cycle=self.cycle, state=self.state) + + def test_duplicate_both_good(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_duplicates_against_existing'] = 1 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # 1 Property, 1 PropertyViews, 2 PropertyStates + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.me_ali + assert PropertyState.objects.count() == 2 + + def test_duplicate_both_good_but_different(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.org.root.id + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_duplicates_against_existing'] = 1 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # No Property and states deleted + self.assertEqual(Property.objects.count(), 1) + self.assertEqual(PropertyState.objects.count(), 2) + + def test_duplicate_both_good_but_no_access_to_existing(self): + # Set Up + self.existing_property.access_level_instance = self.sister + self.existing_property.save() + self.import_record.access_level_instance = self.me_ali + self.import_record.save() + + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_duplicates_against_existing'] = 1 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # Only one Property + self.assertEqual(Property.objects.count(), 1) + + # unmerged + self.assertEqual(PropertyState.objects.count(), 2) + + # city unchanged + assert PropertyView.objects.count() == 1 + pv = PropertyView.objects.first() + assert pv.state.city is None + + def test_duplicate_incoming_error(self): + # Set Up + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_duplicates_against_existing'] = 1 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # 1 Property, 1 PropertyViews, 2 PropertyStates + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.me_ali + assert PropertyState.objects.count() == 2 + + def test_duplicate_incoming_error_and_no_access_to_existing(self): + # Set Up + self.existing_property.access_level_instance = self.sister + self.existing_property.save() + self.import_record.access_level_instance = self.me_ali + self.import_record.save() + + self.base_details["raw_access_level_instance_id"] = None + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_duplicates_against_existing'] = 1 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # Only one Property + self.assertEqual(Property.objects.count(), 1) + + # unmerged + self.assertEqual(PropertyState.objects.count(), 2) + + # city unchanged + assert PropertyView.objects.count() == 1 + pv = PropertyView.objects.first() + assert pv.state.city is None + + +class TestAHImportMatchExisting(TestAHImportFile): + def setUp(self): + super().setUp() + + # this causes all the states to match + self.base_details["ubid"] = '86HJPCWQ+2VV-1-3-2-3' + self.base_details["no_default_data"] = False + + self.state = self.property_state_factory.get_property_state(**self.base_details) + self.state.data_state = DATA_STATE_MATCHING + self.state.save() + self.existing_property = self.property_factory.get_property(access_level_instance=self.me_ali) + self.view = PropertyView.objects.create(property=self.existing_property, cycle=self.cycle, state=self.state) + + def test_match_both_good(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_merges_against_existing'] = 1 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # one Property in right ali + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.me_ali + + # merged + assert PropertyState.objects.count() == 3 + + # city changed + assert PropertyView.objects.count() == 1 + pv = PropertyView.objects.first() + assert pv.state.city == "Denver" + + def test_match_both_good_but_different(self): + # Set Up + self.base_details["raw_access_level_instance_id"] = self.org.root.id + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_merges_against_existing_errors'] = 1 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # Only one Property + self.assertEqual(Property.objects.count(), 1) + + # unmerged + self.assertEqual(PropertyState.objects.count(), 2) + + # city unchanged + assert PropertyView.objects.count() == 1 + pv = PropertyView.objects.first() + assert pv.state.city is None + + def test_match_both_good_but_no_access_to_existing(self): + # Set Up + self.existing_property.access_level_instance = self.sister + self.existing_property.save() + self.import_record.access_level_instance = self.me_ali + self.import_record.save() + + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_merges_against_existing_errors'] = 1 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # Only one Property + self.assertEqual(Property.objects.count(), 1) + + # unmerged + self.assertEqual(PropertyState.objects.count(), 2) + + # city unchanged + assert PropertyView.objects.count() == 1 + pv = PropertyView.objects.first() + assert pv.state.city is None + + def test_match_incoming_error(self): + # Set Up + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_merges_against_existing'] = 1 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # Only one Property + self.assertEqual(Property.objects.count(), 1) + + # city unchanged + assert PropertyView.objects.count() == 1 + pv = PropertyView.objects.first() + assert pv.state.city == "Denver" + + # merged + self.assertEqual(PropertyState.objects.count(), 3) + + def test_match_incoming_error_and_no_access_to_existing(self): + # Set Up + self.existing_property.access_level_instance = self.sister + self.existing_property.save() + self.import_record.access_level_instance = self.me_ali + self.import_record.save() + + self.base_details["raw_access_level_instance_id"] = None + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_merges_against_existing_errors'] = 1 + self.blank_result['property_new'] = 0 + self.assertDictContainsSubset(self.blank_result, results) + + # Only one Property + self.assertEqual(Property.objects.count(), 1) + + # unmerged + self.assertEqual(PropertyState.objects.count(), 2) + + # city unchanged + assert PropertyView.objects.count() == 1 + pv = PropertyView.objects.first() + assert pv.state.city is None + + +class TestAHImportMatchExistingDifferentCycle(TestAHImportFile): + def setUp(self): + super().setUp() + + self.property_view_factory = FakePropertyViewFactory(organization=self.org) + + # this causes all the states to match + self.base_details["ubid"] = '86HJPCWQ+2VV-1-3-2-3' + self.base_details["no_default_data"] = False + + self.state = self.property_state_factory.get_property_state(**self.base_details) + self.state.data_state = DATA_STATE_MATCHING + self.state.save() + self.existing_property = self.property_factory.get_property(access_level_instance=self.me_ali) + self.other_cycle = self.cycle_factory.get_cycle(start=datetime(2010, 10, 10)) + self.view = PropertyView.objects.create( + property=self.existing_property, + cycle=self.other_cycle, + state=self.state + ) + + def test_has_ali_merges_and_links(self): + # Set Up - create view for merge + self.property_view_factory.get_property_view(prprty=self.existing_property, cycle=self.cycle) + + # Set Up - update new state info + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_merges_against_existing'] = 1 + self.blank_result['property_links_against_existing'] = 0 # not 1 cause the view it merged into was already linked + self.blank_result['property_new'] = 0 # not 1 cause the property already exists + self.assertDictContainsSubset(self.blank_result, results) + + # one Property in right ali + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.me_ali + + # 3 in a little merge tree, 1 in the other cycle + assert PropertyState.objects.count() == 4 + + # city changed in the correct view + assert PropertyView.objects.count() == 2 + view_info = PropertyView.objects.values("property_id", "cycle_id", "state__city") + assert list(view_info) == [ + { + "property_id": self.existing_property.id, + "cycle_id": self.other_cycle.id, + "state__city": None, + }, + { + "property_id": self.existing_property.id, + "cycle_id": self.cycle.id, + "state__city": "Denver", + }, + ] + + def test_no_ali_merges_and_links(self): + # Set Up - create view for merge + self.property_view_factory.get_property_view(prprty=self.existing_property, cycle=self.cycle) + + # Set Up - update new state info + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_merges_against_existing'] = 1 + self.blank_result['property_links_against_existing'] = 0 # not 1 cause the view it merged into was already linked + self.blank_result['property_new'] = 0 # not 1 cause the property already exists + self.assertDictContainsSubset(self.blank_result, results) + + # one Property in right ali + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.me_ali + + # 3 in a little merge tree, 1 in the other cycle + assert PropertyState.objects.count() == 4 + + # city changed in the correct view + assert PropertyView.objects.count() == 2 + view_info = PropertyView.objects.values("property_id", "cycle_id", "state__city") + + assert list(view_info) == [ + { + "property_id": self.existing_property.id, + "cycle_id": self.other_cycle.id, + "state__city": None, + }, + { + "property_id": self.existing_property.id, + "cycle_id": self.cycle.id, + "state__city": "Denver", + }, + ] + + def test_has_ali_links(self): + # Set Up - update new state info + self.base_details["raw_access_level_instance_id"] = self.me_ali.id + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_links_against_existing'] = 1 + self.blank_result['property_new'] = 0 # not 1 cause the property already exists + self.assertDictContainsSubset(self.blank_result, results) + + # 1 in each cycle + assert PropertyState.objects.count() == 2 + + # city changed in the correct view + assert PropertyView.objects.count() == 2 + view_info = PropertyView.objects.values("property_id", "property__access_level_instance_id", "cycle_id", "state__city") + assert list(view_info) == [ + { + "property_id": self.existing_property.id, + "property__access_level_instance_id": self.me_ali.id, + "cycle_id": self.other_cycle.id, + "state__city": None, + }, + { + "property_id": self.existing_property.id, + "property__access_level_instance_id": self.me_ali.id, + "cycle_id": self.cycle.id, + "state__city": "Denver", + }, + ] + + def test_has_ali_links_different_ali(self): + # Set Up - update existing state ali + self.existing_property.access_level_instance = self.sister + self.existing_property.save() + + # Set Up - update new state info + self.base_details["raw_access_level_instance_id"] = self.me_ali.id # different ali + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_links_against_existing_errors'] = 1 + self.assertDictContainsSubset(self.blank_result, results) + + # theres two Properties and two PropertyStates, but one of each is abandoned + self.assertEqual(Property.objects.count(), 2) + assert PropertyState.objects.count() == 2 + + # no change + assert PropertyView.objects.count() == 1 + view_info = PropertyView.objects.values("property_id", "property__access_level_instance_id", "cycle_id", "state__city") + assert list(view_info) == [ + { + "property_id": self.existing_property.id, + "property__access_level_instance_id": self.sister.id, + "cycle_id": self.other_cycle.id, + "state__city": None, + }, + ] + + def test_no_ali_links(self): + # Set Up - update new state info + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_links_against_existing'] = 1 + self.blank_result['property_new'] = 0 # not 1 cause the property already exists + self.assertDictContainsSubset(self.blank_result, results) + + # two properties, but one abandoned + self.assertEqual(Property.objects.count(), 2) + + # 1 in each cycle + assert PropertyState.objects.count() == 2 + + # city changed in the correct view + assert PropertyView.objects.count() == 2 + view_info = PropertyView.objects.values("property_id", "cycle_id", "state__city") + assert list(view_info) == [ + { + "property_id": self.existing_property.id, + "cycle_id": self.other_cycle.id, + "state__city": None, + }, + { + "property_id": self.existing_property.id, + "cycle_id": self.cycle.id, + "state__city": "Denver", + }, + ] + + def test_no_ali_links_no_access(self): + # Set Up - file and existing property have different alis + self.existing_property.access_level_instance = self.sister + self.existing_property.save() + self.import_record.access_level_instance = self.me_ali + self.import_record.save() + + # update new state info + self.base_details["raw_access_level_instance_error"] = "uh oh" + self.base_details['city'] = 'Denver' # so not duplicate + self.property_state_factory.get_property_state(**self.base_details) + + # Action + results = match_and_link_incoming_properties_and_taxlots(*self.action_args) + + # Assert - results + self.blank_result['property_initial_incoming'] = 1 + self.blank_result['property_links_against_existing_errors'] = 1 + self.assertDictContainsSubset(self.blank_result, results) + + # one Property in right ali + self.assertEqual(Property.objects.count(), 1) + p = Property.objects.first() + assert p.access_level_instance == self.sister + + # no change + assert PropertyView.objects.count() == 1 + view_info = PropertyView.objects.values("property_id", "cycle_id", "state__city") + assert list(view_info) == [ + { + "property_id": self.existing_property.id, + "cycle_id": self.other_cycle.id, + "state__city": None, + }, + ] diff --git a/seed/data_importer/tests/test_link_incoming.py b/seed/data_importer/tests/test_link_incoming.py index 8a4b6a0620..582a1d57f5 100644 --- a/seed/data_importer/tests/test_link_incoming.py +++ b/seed/data_importer/tests/test_link_incoming.py @@ -65,6 +65,7 @@ def test_match_merge_link_for_properties(self): 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } self.property_state_factory.get_property_state(**base_state_details) @@ -163,6 +164,7 @@ def test_match_merge_link_for_taxlots(self): 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } self.taxlot_state_factory.get_taxlot_state(**base_state_details) diff --git a/seed/data_importer/tests/test_mapping.py b/seed/data_importer/tests/test_mapping.py index 687a966bb6..8a9c5535c3 100644 --- a/seed/data_importer/tests/test_mapping.py +++ b/seed/data_importer/tests/test_mapping.py @@ -14,7 +14,13 @@ from seed.data_importer import tasks from seed.data_importer.tests.util import FAKE_MAPPINGS from seed.lib.mcm import mapper -from seed.models import ASSESSED_RAW, DATA_STATE_IMPORT, Column +from seed.models import ( + ASSESSED_RAW, + DATA_STATE_IMPORT, + DATA_STATE_MAPPING, + Column, + PropertyState +) from seed.models.column_mappings import get_column_mapping from seed.test_helpers.fake import ( FakePropertyFactory, @@ -255,6 +261,178 @@ def test_mapping_takes_into_account_selected_units(self): self.assertAlmostEqual(state.gross_floor_area, (100 * ureg('m**2')).to('ft**2')) +class TestMappingAccessLevelInstance(DataMappingBaseTestCase): + def setUp(self): + selfvars = self.set_up(ASSESSED_RAW) + self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars + self.property_state_factory = FakePropertyStateFactory(organization=self.org) + + # create tree + self.org.access_level_names = ["1st Gen", "2nd Gen", "3rd Gen"] + mom = self.org.add_new_access_level_instance(self.org.root.id, "mom") + self.me_ali = self.org.add_new_access_level_instance(mom.id, "me") + self.brother_ali = self.org.add_new_access_level_instance(mom.id, "brother") + self.org.save() + + # create state + self.state = self.property_state_factory.get_property_state_as_extra_data( + import_file_id=self.import_file.id, + source_type=ASSESSED_RAW, + data_state=DATA_STATE_IMPORT, + random_extra=42, + ) + + # create mappings + suggested_mappings = mapper.build_column_mapping( + list(self.state.extra_data.keys()) + self.org.access_level_names, + Column.retrieve_all_by_tuple(self.org), + previous_mapping=get_column_mapping, + map_args=[self.org], + thresh=90 + ) + mappings = [] + for raw_column, suggestion in suggested_mappings.items(): + mapping = { + "from_field": raw_column, + "from_units": None, + "to_table_name": suggestion[0], + "to_field": suggestion[1], + "to_field_display_name": suggestion[1], + } + mappings.append(mapping) + Column.create_mappings(mappings, self.org, self.user, self.import_file.id) + + def test_map_good_ah_data(self): + # state has good AH info + self.state.extra_data["1st Gen"] = "root" + self.state.extra_data["2nd Gen"] = "mom" + self.state.extra_data["3rd Gen"] = "me" + self.state.save() + + # map state + tasks.map_data(self.import_file.id) + ps = PropertyState.objects.get( + data_state=DATA_STATE_MAPPING, + organization=self.org, + import_file=self.import_file, + ) + + # extra data gone and raw ali set + assert "2nd Gen" not in ps.extra_data + assert "3rd Gen" not in ps.extra_data + assert ps.raw_access_level_instance == self.me_ali + assert ps.raw_access_level_instance_error is None + + def test_map_good_ah_data_no_permissions_ancestor(self): + self.import_record.access_level_instance = self.me_ali + self.import_record.save() + + # state has good AH info + self.state.extra_data["1st Gen"] = "root" + self.state.extra_data["2nd Gen"] = "mom" + self.state.extra_data["3rd Gen"] = None + self.state.save() + + # map state + tasks.map_data(self.import_file.id) + ps = PropertyState.objects.get( + data_state=DATA_STATE_MAPPING, + organization=self.org, + import_file=self.import_file, + ) + + # extra data gone and raw ali set + assert "2nd Gen" not in ps.extra_data + assert "3rd Gen" not in ps.extra_data + assert ps.raw_access_level_instance_error == "Access Level Instance cannot be accessed with the permissions of this import file." + + def test_map_good_ah_data_no_permissions(self): + self.import_record.access_level_instance = self.me_ali + self.import_record.save() + + # state has good AH info + self.state.extra_data["1st Gen"] = "root" + self.state.extra_data["2nd Gen"] = "mom" + self.state.extra_data["3rd Gen"] = "brother" + self.state.save() + + # map state + tasks.map_data(self.import_file.id) + ps = PropertyState.objects.get( + data_state=DATA_STATE_MAPPING, + organization=self.org, + import_file=self.import_file, + ) + + # extra data gone and raw ali set + assert "2nd Gen" not in ps.extra_data + assert "3rd Gen" not in ps.extra_data + assert ps.raw_access_level_instance_error == "Access Level Information does not match any existing Access Level Instance." + + def test_map_ah_data_missing_columns(self): + # state has missing AH info + self.state.extra_data["1st Gen"] = "root" + # no 2nd Gen + self.state.extra_data["3rd Gen"] = "me" + self.state.save() + + # map state + tasks.map_data(self.import_file.id) + ps = PropertyState.objects.get( + data_state=DATA_STATE_MAPPING, + organization=self.org, + import_file=self.import_file, + ) + + # extra data gone and raw ali error is set + assert "2nd Gen" not in ps.extra_data + assert "3rd Gen" not in ps.extra_data + assert ps.raw_access_level_instance is None + assert ps.raw_access_level_instance_error == "Missing/Incomplete Access Level Column." + + def test_map_ah_data_missing_value(self): + # state has blank AH info + self.state.extra_data["1st Gen"] = "root" + self.state.extra_data["2nd Gen"] = None + self.state.extra_data["3rd Gen"] = "me" + self.state.save() + + # map state + tasks.map_data(self.import_file.id) + ps = PropertyState.objects.get( + data_state=DATA_STATE_MAPPING, + organization=self.org, + import_file=self.import_file, + ) + + # extra data gone and raw ali error is set + assert "2nd Gen" not in ps.extra_data + assert "3rd Gen" not in ps.extra_data + assert ps.raw_access_level_instance is None + assert ps.raw_access_level_instance_error == "Missing/Incomplete Access Level Column." + + def test_map_ah_data_bad_value(self): + # state has bad AH info + self.state.extra_data["1st Gen"] = "root" + self.state.extra_data["2nd Gen"] = "mom" + self.state.extra_data["3rd Gen"] = "I dont exist" + self.state.save() + + # map state + tasks.map_data(self.import_file.id) + ps = PropertyState.objects.get( + data_state=DATA_STATE_MAPPING, + organization=self.org, + import_file=self.import_file, + ) + + # extra data gone and raw ali error is set + assert "2nd Gen" not in ps.extra_data + assert "3rd Gen" not in ps.extra_data + assert ps.raw_access_level_instance is None + assert ps.raw_access_level_instance_error == "Access Level Information does not match any existing Access Level Instance." + + class TestDuplicateFileHeaders(DataMappingBaseTestCase): def setUp(self): filename = getattr(self, 'filename', 'example-data-properties-duplicate-headers.xlsx') diff --git a/seed/data_importer/tests/test_match_incoming.py b/seed/data_importer/tests/test_match_incoming.py index abe7841958..6e8b6f7faf 100644 --- a/seed/data_importer/tests/test_match_incoming.py +++ b/seed/data_importer/tests/test_match_incoming.py @@ -68,6 +68,7 @@ def test_duplicate_properties_identified(self): 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create pair of properties that are exact duplicates self.property_state_factory.get_property_state(**base_details) @@ -99,6 +100,7 @@ def test_duplicate_taxlots_identified(self): 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create pair of properties that are exact duplicates self.taxlot_state_factory.get_taxlot_state(**base_details) @@ -130,6 +132,7 @@ def test_match_properties_if_all_default_fields_match(self): 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create first set of properties that match each other ps_1 = self.property_state_factory.get_property_state(**base_details) @@ -209,6 +212,7 @@ def test_match_taxlots_if_all_default_fields_match(self): 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create first set of taxlots that match each other tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) @@ -285,6 +289,7 @@ def test_match_properties_on_ubid(self): 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create set of properties that match each other self.property_state_factory.get_property_state(**base_details) @@ -309,6 +314,7 @@ def test_match_properties_normalized_address_used_instead_of_address_line_1(self 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create set of properties that have the same address_line_1 in slightly different format base_details['address_line_1'] = '123 Match Street' @@ -335,6 +341,7 @@ def test_match_taxlots_normalized_address_used_instead_of_address_line_1(self): 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create set of taxlots that have the same address_line_1 in slightly different format base_details['address_line_1'] = '123 Match Street' @@ -366,6 +373,7 @@ def test_no_matches_if_all_matching_criteria_is_None(self): 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create set of properties that won't match @@ -396,6 +404,7 @@ def test_match_properties_get_rolled_up_into_one_in_the_order_their_uploaded(sel 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create first set of properties that match each other base_details['city'] = 'Philadelphia' @@ -438,6 +447,7 @@ def test_duplicate_properties_identified(self): 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create property in first ImportFile ps_1 = self.property_state_factory.get_property_state(**base_details) @@ -469,6 +479,7 @@ def test_match_properties_if_all_default_fields_match(self): 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create property in first ImportFile ps_1 = self.property_state_factory.get_property_state(**base_details) @@ -540,6 +551,7 @@ def test_match_properties_rolls_up_multiple_existing_matches_in_id_order_if_they 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create 3 non-matching properties in first ImportFile ps_1 = self.property_state_factory.get_property_state(**base_details) @@ -601,6 +613,7 @@ def test_match_taxlots_if_all_default_fields_match(self): 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create property in first ImportFile tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) @@ -672,6 +685,7 @@ def test_match_taxlots_rolls_up_multiple_existing_matches_in_id_order_if_they_ex 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create 3 non-matching taxlots in first ImportFile tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) @@ -748,6 +762,7 @@ def test_properties(self): 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # No matching_criteria values @@ -787,6 +802,7 @@ def test_properties(self): 'import_file_id': self.import_file_2.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create 1 duplicate of the 'No matching_criteria values' properties @@ -863,18 +879,30 @@ def test_properties(self): 'import_file_records': None, # This is calculated in a separate process 'property_duplicates_against_existing': 1, 'property_duplicates_within_file': 2, + 'property_duplicates_within_file_errors': 0, 'property_initial_incoming': 10, 'property_merges_against_existing': 1, + 'property_merges_against_existing_errors': 0, + 'property_links_against_existing': 0, + 'property_links_against_existing_errors': 0, 'property_merges_between_existing': 0, 'property_merges_within_file': 2, + 'property_merges_within_file_errors': 0, 'property_new': 4, + 'property_new_errors': 0, 'tax_lot_duplicates_against_existing': 0, 'tax_lot_duplicates_within_file': 0, + 'tax_lot_duplicates_within_file_errors': 0, 'tax_lot_initial_incoming': 0, 'tax_lot_merges_against_existing': 0, + 'tax_lot_merges_against_existing_errors': 0, + 'tax_lot_links_against_existing': 0, + 'tax_lot_links_against_existing_errors': 0, 'tax_lot_merges_between_existing': 0, 'tax_lot_merges_within_file': 0, + 'tax_lot_merges_within_file_errors': 0, 'tax_lot_new': 0, + 'tax_lot_new_errored': 0, } self.assertEqual(results, expected) @@ -893,6 +921,7 @@ def test_taxlots(self): 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # No matching_criteria values @@ -932,6 +961,7 @@ def test_taxlots(self): 'import_file_id': self.import_file_2.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create 2 duplicates of the 'No matching_criteria values' taxlots @@ -1003,18 +1033,30 @@ def test_taxlots(self): 'import_file_records': None, # This is calculated in a separate process 'property_duplicates_against_existing': 0, 'property_duplicates_within_file': 0, + 'property_duplicates_within_file_errors': 0, 'property_initial_incoming': 0, 'property_merges_against_existing': 0, + 'property_merges_against_existing_errors': 0, 'property_merges_between_existing': 0, 'property_merges_within_file': 0, + 'property_merges_within_file_errors': 0, + 'property_links_against_existing': 0, + 'property_links_against_existing_errors': 0, 'property_new': 0, + 'property_new_errors': 0, 'tax_lot_duplicates_against_existing': 1, 'tax_lot_duplicates_within_file': 3, + 'tax_lot_duplicates_within_file_errors': 0, 'tax_lot_initial_incoming': 10, 'tax_lot_merges_against_existing': 1, + 'tax_lot_merges_against_existing_errors': 0, + 'tax_lot_links_against_existing': 0, + 'tax_lot_links_against_existing_errors': 0, 'tax_lot_merges_between_existing': 0, 'tax_lot_merges_within_file': 2, + 'tax_lot_merges_within_file_errors': 0, 'tax_lot_new': 3, + 'tax_lot_new_errored': 0, } self.assertEqual(results, expected) @@ -1065,7 +1107,7 @@ def test_filter_duplicate_states(self): props = self.import_file.find_unmatched_property_states() sub_progress_data = ProgressData(func_name='match_sub_progress', unique_id=123) sub_progress_data.save() - uniq_state_ids, dup_state_count = filter_duplicate_states(props, sub_progress_data.key) + uniq_state_ids, _, dup_state_count = filter_duplicate_states(props, sub_progress_data.key) # There should be 6 uniq states. 5 from the second call, and one of 'The Same Address' self.assertEqual(len(uniq_state_ids), 6) @@ -1128,6 +1170,7 @@ def test_match_buildingsync_works_when_no_existing_scenarios_or_meters(self): 'import_file_id': self.import_file_2.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create a property which will match with the BuildingSync file self.property_state_factory.get_property_state(**base_details) @@ -1180,6 +1223,7 @@ def test_match_buildingsync_works_when_there_are_existing_different_scenarios_an 'import_file_id': self.import_file_2.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + "raw_access_level_instance_id": self.org.root.id, } # Create a property which will match with the BuildingSync file ps_orig = self.property_state_factory.get_property_state(**base_details) @@ -1293,7 +1337,7 @@ def setUp(self): end=date(1999, 12, 31), ) - base_details = {'import_file_id': self.import_file.id} + base_details = {'import_file_id': self.import_file.id, "raw_access_level_instance_id": self.org.root.id} # Properties for cycle 2010_2014 base_details['property_name'] = 'p2010_2014a' base_details['year_ending'] = date(2012, 12, 12) diff --git a/seed/data_importer/tests/test_match_unicode.py b/seed/data_importer/tests/test_match_unicode.py index e14646e05e..b78b4814bb 100644 --- a/seed/data_importer/tests/test_match_unicode.py +++ b/seed/data_importer/tests/test_match_unicode.py @@ -33,7 +33,7 @@ class TestUnicodeNormalization(DataMappingBaseTestCase): def test_unicode_normalization(self): """Test a few cases. The unicodedata.normalize('NFC', text) method combines the - the letter and diacritics, which seems to provide the best compatibility.""" + letter and diacritics, which seems to provide the best compatibility.""" # Guillemets unicode_text = "Café «Déjà Vu»" expected_out = "Café \"Déjà Vu\"" @@ -137,6 +137,7 @@ def test_unicode_matching(self): 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, + 'raw_access_level_instance_id': self.org.root.id, } self.property_state_factory.get_property_state(**base_state_details) diff --git a/seed/docs/views.py b/seed/docs/views.py index 7400d59c92..882578fb70 100644 --- a/seed/docs/views.py +++ b/seed/docs/views.py @@ -64,7 +64,7 @@ def faq_page(request): faq_data[category_name].append(parsed_faq._asdict()) if not request.user.is_anonymous: - initial_org_id, initial_org_name, initial_org_user_role = _get_default_org( + initial_org_id, initial_org_name, initial_org_user_role, access_level_instance_name, access_level_instance_id, is_ali_root, is_ali_leaf = _get_default_org( request.user ) debug = settings.DEBUG diff --git a/seed/lib/mcm/reader.py b/seed/lib/mcm/reader.py index e88395a3da..bf34833a28 100644 --- a/seed/lib/mcm/reader.py +++ b/seed/lib/mcm/reader.py @@ -400,7 +400,7 @@ def XLSDictReader(self, sheet, header_row=0): :param sheet: xlrd Sheet :param header_row: the row index to start with - :returns: Generator yeilding a row as Dict + :returns: Generator yielding a row as Dict """ # save off the headers into a member variable. Only do this once. If XLSDictReader is diff --git a/seed/lib/merging/merging.py b/seed/lib/merging/merging.py index e544454ed6..834043b215 100644 --- a/seed/lib/merging/merging.py +++ b/seed/lib/merging/merging.py @@ -239,6 +239,9 @@ def merge_state(merged_state, state1, state2, priorities, ignore_merge_protectio state2_present_columns ) + default_ali = state1.raw_access_level_instance + merged_state.raw_access_level_instance = default_ali if default_ali is not None else state2.raw_access_level_instance + # merge measures, scenarios, simulations if isinstance(merged_state, PropertyState): PropertyState.merge_relationships(merged_state, state1, state2) diff --git a/seed/lib/progress_data/progress_data.py b/seed/lib/progress_data/progress_data.py index ae18746562..c2a8689bd1 100644 --- a/seed/lib/progress_data/progress_data.py +++ b/seed/lib/progress_data/progress_data.py @@ -180,8 +180,8 @@ def result(self): def increment_value(self): """ - Return the value to increment the progress back. Currently this is always assume that that - size of the step is 1 to the self.total count. + Return the value used to increment the progress. Currently, this always assumes that the + size of the step is 1 / self.total count. :return: float, value to increment the step by """ diff --git a/seed/lib/superperms/orgs/decorators.py b/seed/lib/superperms/orgs/decorators.py index 85209e5f70..8de17e4162 100644 --- a/seed/lib/superperms/orgs/decorators.py +++ b/seed/lib/superperms/orgs/decorators.py @@ -9,16 +9,29 @@ from inspect import signature from django.conf import settings -from django.http import HttpResponseForbidden +from django.core.exceptions import ObjectDoesNotExist +from django.http import HttpResponseForbidden, JsonResponse +from django.utils.datastructures import MultiValueDictKeyError +from rest_framework import status +from seed.data_importer.models import ImportFile, ImportRecord from seed.lib.superperms.orgs.models import ( ROLE_MEMBER, ROLE_OWNER, ROLE_VIEWER, + AccessLevelInstance, Organization, OrganizationUser ) from seed.lib.superperms.orgs.permissions import get_org_id +from seed.models import ( + Analysis, + Goal, + Property, + PropertyView, + TaxLotView, + UbidModel +) # Allow Super Users to ignore permissions. ALLOW_SUPER_USER_PERMS = getattr(settings, 'ALLOW_SUPER_USER_PERMS', True) @@ -66,6 +79,16 @@ def requires_superuser(org_user): return org_user.user.is_superuser +def requires_root_member_access(org_user): + """ User must be an owner or member at the root access level""" + return org_user.access_level_instance.depth == 1 and org_user.role_level >= ROLE_MEMBER + + +def requires_non_leaf_access(org_user): + """ User must be a non leaf member. Exception when user is both root and leaf. """ + return org_user.access_level_instance.is_root() or not org_user.access_level_instance.is_leaf() + + def can_create_sub_org(org_user): return requires_parent_org_owner(org_user) @@ -130,6 +153,8 @@ def can_view_data(org_user): 'requires_owner': requires_owner, 'requires_member': requires_member, 'requires_viewer': requires_viewer, + 'requires_root_member_access': requires_root_member_access, + 'requires_non_leaf_access': requires_non_leaf_access, 'can_create_sub_org': can_create_sub_org, 'can_remove_org': can_remove_org, 'can_invite_member': can_invite_member, @@ -179,9 +204,9 @@ def _validate_permissions(perm_name, request, requires_org): org = Organization.objects.get(pk=org_id) except Organization.DoesNotExist: return _make_resp('org_dne') - # Skip perms checks if settings allow super_users to bypass. if request.user.is_superuser and ALLOW_SUPER_USER_PERMS: + request.access_level_instance_id = org.root.id return try: @@ -190,6 +215,8 @@ def _validate_permissions(perm_name, request, requires_org): ) except OrganizationUser.DoesNotExist: return _make_resp('user_dne') + else: + request.access_level_instance_id = org_user.access_level_instance.id if not PERMS.get(perm_name, lambda x: False)(org_user): return _make_resp('perm_denied') @@ -197,7 +224,6 @@ def _validate_permissions(perm_name, request, requires_org): def has_perm_class(perm_name: str, requires_org: bool = True): """Proceed if user from request has ``perm_name``.""" - def decorator(fn): params = list(signature(fn).parameters) if params and params[0] == 'self': @@ -212,3 +238,205 @@ def _wrapped(request, *args, **kwargs): return _wrapped return decorator + + +def assert_hierarchy_access( + request, + property_id_kwarg=None, + property_view_id_kwarg=None, + param_property_view_id=None, + taxlot_view_id_kwarg=None, + import_file_id_kwarg=None, + param_import_file_id=None, + import_record_id_kwarg=None, + body_ali_id=None, + body_import_file_id=None, + body_property_id=None, + analysis_id_kwarg=None, + ubid_id_kwarg=None, + body_import_record_id=None, + body_property_state_id=None, + body_taxlot_state_id=None, + param_import_record_id=None, + goal_id_kwarg=None, + *args, + **kwargs +): + + """Helper function to has_hierarchy_access""" + body = request.data + params = request.GET + try: + if property_id_kwarg and property_id_kwarg in kwargs: + property = Property.objects.get(pk=kwargs[property_id_kwarg]) + requests_ali = property.access_level_instance + + elif body_property_id and body_property_id in body: + property = Property.objects.get(pk=body[body_property_id]) + requests_ali = property.access_level_instance + + elif body_property_state_id and body_property_state_id in body: + # there should only be one property_view with a specific property state + property_view = PropertyView.objects.get(state_id=body[body_property_state_id]) + requests_ali = property_view.property.access_level_instance + + elif body_taxlot_state_id and body_taxlot_state_id in body: + taxlot_view = TaxLotView.objects.get(state_id=body[body_taxlot_state_id]) + requests_ali = taxlot_view.taxlot.access_level_instance + + elif property_view_id_kwarg and property_view_id_kwarg in kwargs: + property_view = PropertyView.objects.get(pk=kwargs[property_view_id_kwarg]) + requests_ali = property_view.property.access_level_instance + + elif param_property_view_id and param_property_view_id in params: + property_view = PropertyView.objects.get(pk=params[param_property_view_id]) + requests_ali = property_view.property.access_level_instance + + elif param_import_file_id and param_import_file_id in params: + import_file = ImportFile.objects.get(pk=params[param_import_file_id]) + requests_ali = import_file.access_level_instance + + elif taxlot_view_id_kwarg and taxlot_view_id_kwarg in kwargs: + taxlot_view = TaxLotView.objects.get(pk=kwargs[taxlot_view_id_kwarg]) + requests_ali = taxlot_view.taxlot.access_level_instance + + elif import_file_id_kwarg and import_file_id_kwarg in kwargs: + import_file = ImportFile.objects.get(pk=kwargs[import_file_id_kwarg]) + requests_ali = import_file.access_level_instance + + elif body_ali_id and body_ali_id in body: + requests_ali = AccessLevelInstance.objects.get(pk=body[body_ali_id]) + + elif import_record_id_kwarg and import_record_id_kwarg in kwargs: + import_record = ImportRecord.objects.get(pk=kwargs[import_record_id_kwarg]) + requests_ali = import_record.access_level_instance + + elif body_import_file_id and body_import_file_id in body: + import_file = ImportFile.objects.get(pk=body[body_import_file_id]) + requests_ali = import_file.access_level_instance + + elif body_import_record_id and body_import_record_id in body: + import_record = ImportRecord.objects.get(pk=body[body_import_record_id]) + requests_ali = import_record.access_level_instance + + elif param_import_record_id and (param_import_record_id in request.POST or param_import_record_id in request.GET): + import_record_pk = request.POST.get(param_import_record_id, request.GET.get(param_import_record_id)) + import_record = ImportRecord.objects.get(pk=import_record_pk) + requests_ali = import_record.access_level_instance + + elif analysis_id_kwarg and analysis_id_kwarg in kwargs: + if int(kwargs[analysis_id_kwarg]) < 1: + return + analysis = Analysis.objects.get(pk=kwargs[analysis_id_kwarg]) + requests_ali = analysis.access_level_instance + + elif ubid_id_kwarg and ubid_id_kwarg in kwargs: + ubid = UbidModel.objects.get(pk=kwargs[ubid_id_kwarg]) + if ubid.property: + requests_ali = ubid.property.propertyview_set.first().property.access_level_instance + else: + requests_ali = ubid.taxlot.taxlotview_set.first().taxlot.access_level_instance + + elif goal_id_kwarg and goal_id_kwarg in kwargs: + goal = Goal.objects.get(pk=kwargs[goal_id_kwarg]) + body_ali_id = body.get('access_level_instance') + if body_ali_id: + body_ali = AccessLevelInstance.objects.get(pk=body_ali_id) + requests_ali = body_ali if body_ali.depth < goal.access_level_instance.depth else goal.access_level_instance + else: + requests_ali = goal.access_level_instance + + else: + property_view = PropertyView.objects.get(pk=request.GET['property_view_id']) + requests_ali = property_view.property.access_level_instance + + except (ObjectDoesNotExist, MultiValueDictKeyError): + return JsonResponse({ + 'status': 'error', + 'message': 'No such resource.' + }, status=status.HTTP_404_NOT_FOUND) + + user_ali = AccessLevelInstance.objects.get(pk=request.access_level_instance_id) + if not (user_ali == requests_ali or requests_ali.is_descendant_of(user_ali)): + return JsonResponse({ + 'status': 'error', + 'message': 'No such resource.' + }, status=status.HTTP_404_NOT_FOUND) + + +def has_hierarchy_access( + property_id_kwarg=None, + property_view_id_kwarg=None, + param_property_view_id=None, + taxlot_view_id_kwarg=None, + import_file_id_kwarg=None, + param_import_file_id=None, + import_record_id_kwarg=None, + body_ali_id=None, + body_import_file_id=None, + body_property_id=None, + analysis_id_kwarg=None, + ubid_id_kwarg=None, + body_import_record_id=None, + body_property_state_id=None, + body_taxlot_state_id=None, + param_import_record_id=None, + goal_id_kwarg=None +): + """Must be called after has_perm_class""" + def decorator(fn): + params = list(signature(fn).parameters) + if params and params[0] == 'self': + @wraps(fn) + def _wrapped(self, request, *args, **kwargs): + return assert_hierarchy_access( + request, + property_id_kwarg, + property_view_id_kwarg, + param_property_view_id, + taxlot_view_id_kwarg, + import_file_id_kwarg, + param_import_file_id, + import_record_id_kwarg, + body_ali_id, + body_import_file_id, + body_property_id, + analysis_id_kwarg, + ubid_id_kwarg, + body_import_record_id, + body_property_state_id, + body_taxlot_state_id, + param_import_record_id, + goal_id_kwarg, + *args, + **kwargs + ) or fn(self, request, *args, **kwargs) + else: + @wraps(fn) + def _wrapped(request, *args, **kwargs): + return assert_hierarchy_access( + request, + property_id_kwarg, + property_view_id_kwarg, + param_property_view_id, + taxlot_view_id_kwarg, + import_file_id_kwarg, + param_import_file_id, + import_record_id_kwarg, + body_ali_id, + body_import_file_id, + body_property_id, + analysis_id_kwarg, + ubid_id_kwarg, + body_import_record_id, + body_property_state_id, + body_taxlot_state_id, + param_import_record_id, + goal_id_kwarg, + *args, + **kwargs + ) or fn(request, *args, **kwargs) + + return _wrapped + + return decorator diff --git a/seed/lib/superperms/orgs/migrations/0030_accountability_hierarchy.py b/seed/lib/superperms/orgs/migrations/0030_accountability_hierarchy.py new file mode 100644 index 0000000000..181a4ed33b --- /dev/null +++ b/seed/lib/superperms/orgs/migrations/0030_accountability_hierarchy.py @@ -0,0 +1,79 @@ +# Generated by Django 3.2.23 on 2024-03-08 06:05 + +import django.db.models.deletion +from django.db import migrations, models, transaction + + +@transaction.atomic +def create_root_access_levels(apps, schema_editor): + Organization = apps.get_model('orgs', 'Organization') + AccessLevelInstance = apps.get_model('orgs', 'AccessLevelInstance') + + for i, org in enumerate(Organization.objects.all()): + org.access_level_names = [org.name] + org.save() + + AccessLevelInstance.objects.create( + tree_id=i, + organization=org, + name='root', + path={org.access_level_names[0]: 'root'}, + depth=1, + lft=1, + rgt=2, + ) + + +@transaction.atomic +def assign_users_to_root_access_level(apps, schema_editor): + OrganizationUser = apps.get_model('orgs', 'OrganizationUser') + AccessLevelInstance = apps.get_model('orgs', 'AccessLevelInstance') + + root_alis = {ali.organization_id: ali for ali in AccessLevelInstance.objects.filter(depth=1)} + + for user in OrganizationUser.objects.all(): + user.access_level_instance = root_alis[user.organization_id] + user.save(update_fields=['access_level_instance']) + + +class Migration(migrations.Migration): + + dependencies = [ + ('orgs', '0029_auto_20240105_1257'), + ] + + operations = [ + migrations.AddField( + model_name='organization', + name='access_level_names', + field=models.JSONField(default=list), + ), + migrations.CreateModel( + name='AccessLevelInstance', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('lft', models.PositiveIntegerField(db_index=True)), + ('rgt', models.PositiveIntegerField(db_index=True)), + ('tree_id', models.PositiveIntegerField(db_index=True)), + ('depth', models.PositiveIntegerField(db_index=True)), + ('name', models.CharField(max_length=100)), + ('path', models.JSONField()), + ('organization', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='orgs.organization')), + ], + options={ + 'abstract': False, + }, + ), + migrations.AddField( + model_name='organizationuser', + name='access_level_instance', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='users', to='orgs.accesslevelinstance'), + ), + migrations.RunPython(create_root_access_levels), + migrations.RunPython(assign_users_to_root_access_level, reverse_code=migrations.RunPython.noop), + migrations.AlterField( + model_name='organizationuser', + name='access_level_instance', + field=models.ForeignKey(null=False, on_delete=django.db.models.deletion.CASCADE, related_name='users', to='orgs.accesslevelinstance'), + ) + ] diff --git a/seed/lib/superperms/orgs/models.py b/seed/lib/superperms/orgs/models.py index f42d5503a4..d0cc728b5b 100644 --- a/seed/lib/superperms/orgs/models.py +++ b/seed/lib/superperms/orgs/models.py @@ -8,8 +8,10 @@ from django.conf import settings from django.contrib.auth.models import User -from django.db import models -from django.db.models.signals import pre_delete +from django.db import IntegrityError, models, transaction +from django.db.models.signals import post_save, pre_delete, pre_save +from django.dispatch import receiver +from treebeard.ns_tree import NS_Node from seed.lib.superperms.orgs.exceptions import TooManyNestedOrgs @@ -64,6 +66,7 @@ class Meta: role_level = models.IntegerField( default=ROLE_OWNER, choices=ROLE_LEVEL_CHOICES ) + access_level_instance = models.ForeignKey("AccessLevelInstance", on_delete=models.CASCADE, null=False, related_name="users") def delete(self, *args, **kwargs): """Ensure we preserve at least one Owner for this org.""" @@ -90,6 +93,62 @@ def __str__(self): ) +@receiver(pre_save, sender=OrganizationUser) +def presave_organization_user(sender, instance, **kwargs): + if instance.role_level == ROLE_OWNER and instance.access_level_instance != instance.organization.root: + raise IntegrityError("Owners must be member of the organization's root.") + + +class AccessLevelInstance(NS_Node): + """Node in the Accountability Hierarchy tree""" + name = models.CharField(max_length=100, null=False) + organization = models.ForeignKey('Organization', on_delete=models.CASCADE) + # path automatically maintained dict of ancestors names by access level names. + # See get_path and set_path. + path = models.JSONField(null=False) + + node_order_by = ['name'] + + # TODO: Add constraint that siblings cannot have same name. + + def get_path(self): + """get a dictionary detailing the ancestors of this Access Level Instance + """ + level_names = self.organization.access_level_names + ancestors = { + level_names[depth - 1]: name + for depth, name + in self.get_ancestors().values_list("depth", "name") + } + ancestors[level_names[self.depth - 1]] = self.name + + return ancestors + + def __str__(self): + access_level_name = self.organization.access_level_names[self.depth - 1] + return f"{self.name}: {self.organization.name} Access Level {access_level_name}" + + +@receiver(pre_save, sender=AccessLevelInstance) +def set_path(sender, instance, **kwargs): + # if instance is new, set path + if instance.id is None: + instance.path = instance.get_path() + + # else, if we updated the name... + else: + previous = AccessLevelInstance.objects.get(pk=instance.id) + if instance.name != previous.name: + level_name = instance.organization.access_level_names[instance.depth - 1] + with transaction.atomic(): + # update our path + instance.path[level_name] = instance.name + # update our children's path + for ali in instance.get_descendants(): + ali.path[level_name] = instance.name + ali.save() + + class Organization(models.Model): """A group of people that optionally contains another sub group.""" @@ -229,6 +288,8 @@ class Meta: # Salesforce Functionality salesforce_enabled = models.BooleanField(default=False) + access_level_names = models.JSONField(default=list) + # UBID Threshold ubid_threshold = models.FloatField(default=1.0) @@ -250,7 +311,7 @@ def is_member(self, user): """Return True if user object has a relation to this organization.""" return user in self.users.all() - def add_member(self, user, role=ROLE_OWNER): + def add_member(self, user, access_level_instance_id, role=ROLE_OWNER): """Add a user to an organization. Returns a boolean if a new OrganizationUser record was created""" if OrganizationUser.objects.filter(user=user, organization=self).exists(): return False @@ -260,7 +321,7 @@ def add_member(self, user, role=ROLE_OWNER): user.is_active = True user.save() - _, created = OrganizationUser.objects.get_or_create(user=user, organization=self, role_level=role) + _, created = OrganizationUser.objects.get_or_create(user=user, organization=self, access_level_instance_id=access_level_instance_id, role_level=role) return created @@ -283,6 +344,30 @@ def is_owner(self, user): user=user, role_level=ROLE_OWNER, organization=self, ).exists() + def has_role_member(self, user): + """ + Return True if the user has a relation to this org, with a role of + member. + """ + return OrganizationUser.objects.filter( + user=user, role_level=ROLE_MEMBER, organization=self, + ).exists() + + def is_user_ali_root(self, user): + """ + Return True if the user's ali is at the root of the organization + """ + is_root = False + + ou = OrganizationUser.objects.filter( + user=user, organization=self, + ) + if ou.count() > 0: + ou = ou.first() + if ou.access_level_instance == self.root: + is_root = True + return is_root + def get_exportable_fields(self): """Default to parent definition of exportable fields.""" if self.parent_org: @@ -318,9 +403,29 @@ def parent_id(self): return self.id return self.parent_org.id + def add_new_access_level_instance(self, parent_id: int, name: str) -> AccessLevelInstance: + parent = AccessLevelInstance.objects.get(pk=parent_id) + + if len(self.access_level_names) < parent.depth + 1: + raise UserWarning('Cannot create child at an unnamed level') + + new_access_level_instance = parent.add_child(organization=self, name=name) + + return new_access_level_instance + + def get_access_tree(self, from_ali=None) -> list: + if from_ali is None: + from_ali = self.root + + return AccessLevelInstance.dump_bulk(from_ali) + def __str__(self): return 'Organization: {0}({1})'.format(self.name, self.pk) + @property + def root(self): + return AccessLevelInstance.objects.get(organization=self, depth=1) + def organization_pre_delete(sender, instance, **kwargs): from seed.data_importer.models import ImportFile, ImportRecord @@ -331,3 +436,70 @@ def organization_pre_delete(sender, instance, **kwargs): pre_delete.connect(organization_pre_delete, sender=Organization) + + +@receiver(pre_save, sender=Organization) +def presave_organization(sender, instance, **kwargs): + from seed.models import Column + + if instance.id is None: + return + + previous = Organization.objects.get(pk=instance.id) + previous_access_level_names = previous.access_level_names + + if previous_access_level_names != instance.access_level_names: + _assert_alns_are_valid(instance) + _update_alis_path_keys(instance, previous_access_level_names) + + taken_names = Column.objects.filter(organization=instance, display_name__in=instance.access_level_names).values_list("display_name", flat=True) + if len(taken_names) > 0: + raise ValueError(f"{taken_names} are column names.") + + +def _assert_alns_are_valid(org): + from seed.models import Column + alns = org.access_level_names + + if len(set(alns)) != len(alns): # if not unique + raise ValueError("Organization's access_level_names must be unique.") + + columns_with_same_names = Column.objects.filter(organization=org, display_name__in=alns) + if columns_with_same_names.count() > 0: + repeated_names = set(columns_with_same_names.values_list("display_name", flat=True)) + raise ValueError(f"Access level names cannot match SEED column names: {list(repeated_names)}") + + +def _update_alis_path_keys(org, previous_access_level_names): + """For each instance.access_level_names item changed, update the ali.paths + """ + alis = AccessLevelInstance.objects.filter(organization=org) + min_len = min(len(previous_access_level_names), len(org.access_level_names)) + + with transaction.atomic(): + # for each name in access_level_name... + for i in range(min_len): + previous_access_level_name = previous_access_level_names[i] + current_access_level_name = org.access_level_names[i] + + # If the name was changed, alter the paths of the ALIs. + if previous_access_level_name != current_access_level_name: + for ali in alis: + if previous_access_level_name in ali.path: + ali.path[current_access_level_name] = ali.path[previous_access_level_name] + del ali.path[previous_access_level_name] + ali.save() + + +@receiver(post_save, sender=Organization) +def post_save_organization(sender, instance, created, **kwargs): + """ + Give new Orgs a Accountability Hierarchy root. + """ + if created: + if not instance.access_level_names: + instance.access_level_names = [instance.name] + + root = AccessLevelInstance.add_root(organization=instance, name="root") + root.save() + instance.save() diff --git a/seed/lib/superperms/tests/test_organization_access_levels.py b/seed/lib/superperms/tests/test_organization_access_levels.py new file mode 100644 index 0000000000..dbb693a193 --- /dev/null +++ b/seed/lib/superperms/tests/test_organization_access_levels.py @@ -0,0 +1,117 @@ +# !/usr/bin/env python +# encoding: utf-8 +""" +SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. +See also https://github.com/seed-platform/seed/main/LICENSE.md +""" +from django.test import TestCase + +from seed.landing.models import SEEDUser as User +from seed.lib.superperms.orgs.models import AccessLevelInstance +from seed.utils.organizations import create_organization + + +class TestOrganizationAccessLevels(TestCase): + def setUp(self): + user_details = { + 'username': 'test_user@demo.com', + 'password': 'test_pass', + 'email': 'test_user@demo.com' + } + self.fake_user = User.objects.create_user(**user_details) + + def test_tree_on_create(self): + fake_org, _, _ = create_organization(self.fake_user, 'Organization A') + + # has right access_level_names + assert fake_org.access_level_names == ["Organization A"] + + # has right AccessLevelInstances + assert AccessLevelInstance.objects.filter(organization=fake_org).count() == 1 + root = fake_org.root + assert root.name == "root" + + # get right access_tree + assert fake_org.get_access_tree() == [ + { + 'id': root.pk, + 'data': { + 'name': 'root', + 'organization': fake_org.id, + 'path': {'Organization A': 'root'}, + }, + } + ] + + def test_create_level_instance_without_name(self): + fake_org, _, _ = create_organization(self.fake_user, 'Organization A') + + # create access level instance on an unnamed Instance + with self.assertRaises(Exception): + fake_org.add_new_access_level_instance(fake_org.root.id, "mom") + + def test_build_out_tree(self): + fake_org, _, _ = create_organization(self.fake_user, 'Organization A') + + # populate tree + fake_org.access_level_names += ["2nd gen", "3rd gen"] + fake_org.save() + aunt = fake_org.add_new_access_level_instance(fake_org.root.id, "aunt") + mom = fake_org.add_new_access_level_instance(fake_org.root.id, "mom") + me = fake_org.add_new_access_level_instance(mom.id, "me") + + # get tree + assert fake_org.get_access_tree() == [ + { + 'id': fake_org.root.pk, + 'data': { + 'name': 'root', + 'organization': fake_org.id, + 'path': {'Organization A': 'root'}, + }, + 'children': [ + { + 'id': aunt.pk, + 'data': { + 'name': 'aunt', + 'organization': fake_org.id, + 'path': {'Organization A': 'root', '2nd gen': 'aunt'}, + } + }, + { + 'id': mom.pk, + 'data': { + 'name': 'mom', + 'organization': fake_org.id, + 'path': {'Organization A': 'root', '2nd gen': 'mom'}, + }, + 'children': [ + { + 'id': me.pk, + 'data': { + 'name': 'me', + 'organization': fake_org.id, + 'path': {'Organization A': 'root', '2nd gen': 'mom', '3rd gen': 'me'}, + } + } + ] + } + ] + } + ] + + def test_get_path(self): + fake_org, _, _ = create_organization(self.fake_user, 'Organization A') + + # populate tree + fake_org.access_level_names += ["2nd gen", "3rd gen"] + fake_org.save() + fake_org.add_new_access_level_instance(fake_org.root.id, "aunt") + mom = fake_org.add_new_access_level_instance(fake_org.root.id, "mom") + me = fake_org.add_new_access_level_instance(mom.id, "me") + + assert me.get_path() == { + "Organization A": "root", + "2nd gen": "mom", + "3rd gen": "me", + } diff --git a/seed/management/commands/create_default_user.py b/seed/management/commands/create_default_user.py index eb8497f612..b60e56fd92 100644 --- a/seed/management/commands/create_default_user.py +++ b/seed/management/commands/create_default_user.py @@ -73,7 +73,7 @@ def handle(self, *args, **options): self.stdout.write( 'Org <%s> already exists, adding user' % options['organization'], ending='\n' ) - org.add_member(u, ROLE_OWNER) + org.add_member(u, org.root.id, ROLE_OWNER) else: self.stdout.write( 'Creating org <%s> ...' % options['organization'], diff --git a/seed/management/commands/make_superuser.py b/seed/management/commands/make_superuser.py index b81010cb84..0880b984f2 100644 --- a/seed/management/commands/make_superuser.py +++ b/seed/management/commands/make_superuser.py @@ -62,7 +62,7 @@ def handle(self, *args, **options): for org in organizations: print("Adding user to {}.".format(org)) - org.add_member(user) + org.add_member(user, access_level_instance_id=org.root.id) else: # NL added this but is not going to make it the default because it may cause # security issues for others. Not sure yet. Comment here if you think we should diff --git a/seed/migrations/0215_accountability_hierarchy.py b/seed/migrations/0215_accountability_hierarchy.py new file mode 100644 index 0000000000..5dffeef409 --- /dev/null +++ b/seed/migrations/0215_accountability_hierarchy.py @@ -0,0 +1,167 @@ +# Generated by Django 3.2.23 on 2024-03-08 06:05 + +import django.core.validators +import django.db.models.deletion +from django.db import migrations, models, transaction + + +@transaction.atomic +def assign_properties_to_root_access_level(apps, schema_editor): + Property = apps.get_model('seed', 'Property') + AccessLevelInstance = apps.get_model('orgs', 'AccessLevelInstance') + + root_alis = {ali.organization_id: ali for ali in AccessLevelInstance.objects.filter(depth=1)} + + for property in Property.objects.all().iterator(): + property.access_level_instance = root_alis[property.organization_id] + property.save(update_fields=['access_level_instance']) + + +@transaction.atomic +def assign_taxlots_to_root_access_level(apps, schema_editor): + TaxLot = apps.get_model('seed', 'TaxLot') + AccessLevelInstance = apps.get_model('orgs', 'AccessLevelInstance') + + root_alis = {ali.organization_id: ali for ali in AccessLevelInstance.objects.filter(depth=1)} + + for taxlot in TaxLot.objects.all().iterator(): + taxlot.access_level_instance = root_alis[taxlot.organization_id] + taxlot.save(update_fields=['access_level_instance']) + + +@transaction.atomic +def assign_analyses_to_root_access_level(apps, schema_editor): + Analysis = apps.get_model('seed', 'Analysis') + AccessLevelInstance = apps.get_model('orgs', 'AccessLevelInstance') + + if Analysis.objects.filter(organization=None).exists(): + raise ValueError("Some Analyses have no organization, and are orphaned. This shouldn't have happened and these Analyses cannot be migrated. Please add an organization or delete the orphaned analyses and try again.") + + root_alis = {ali.organization_id: ali for ali in AccessLevelInstance.objects.filter(depth=1)} + + for analysis in Analysis.objects.all().iterator(): + analysis.access_level_instance = root_alis[analysis.organization_id] + analysis.save(update_fields=['access_level_instance']) + + +@transaction.atomic +def backfill_historical_notes(apps, schema_editor): + Property = apps.get_model('seed', 'Property') + HistoricalNote = apps.get_model('seed', 'HistoricalNote') + + properties_ids = Property.objects.filter(historical_note__isnull=True).values_list('id', flat=True) + + historical_notes_to_create = [HistoricalNote(property_id=property_id, text='') for property_id in properties_ids] + HistoricalNote.objects.bulk_create(historical_notes_to_create) + + +class Migration(migrations.Migration): + + dependencies = [ + ('orgs', '0030_accountability_hierarchy'), + ('seed', '0214_delete_filtergroup_labels'), + ] + + operations = [ + migrations.CreateModel( + name='Goal', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('target_percentage', models.IntegerField(validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(100)])), + ('name', models.CharField(max_length=255, unique=True)), + ('access_level_instance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='orgs.accesslevelinstance')), + ('area_column', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='goal_area_columns', to='seed.column')), + ('baseline_cycle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='goal_baseline_cycles', to='seed.cycle')), + ('current_cycle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='goal_current_cycles', to='seed.cycle')), + ('eui_column1', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='goal_eui_column1s', to='seed.column')), + ('eui_column2', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='goal_eui_column2s', to='seed.column')), + ('eui_column3', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='goal_eui_column3s', to='seed.column')), + ('organization', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='orgs.organization')), + ], + ), + migrations.AddField( + model_name='analysis', + name='access_level_instance', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='analyses', to='orgs.accesslevelinstance'), + ), + migrations.AddField( + model_name='property', + name='access_level_instance', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='properties', to='orgs.accesslevelinstance'), + ), + migrations.AddField( + model_name='propertystate', + name='raw_access_level_instance', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='orgs.accesslevelinstance'), + ), + migrations.AddField( + model_name='propertystate', + name='raw_access_level_instance_error', + field=models.TextField(null=True), + ), + migrations.AddField( + model_name='taxlot', + name='access_level_instance', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='taxlots', to='orgs.accesslevelinstance'), + ), + migrations.AddField( + model_name='taxlotstate', + name='raw_access_level_instance', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='orgs.accesslevelinstance'), + ), + migrations.AddField( + model_name='taxlotstate', + name='raw_access_level_instance_error', + field=models.TextField(null=True), + ), + migrations.CreateModel( + name='HistoricalNote', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('text', models.TextField(blank=True)), + ('property', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='historical_note', to='seed.property')), + ], + ), + migrations.CreateModel( + name='GoalNote', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('question', models.CharField(blank=True, choices=[('Is this a new construction or acquisition?', 'Is this a new construction or acquisition?'), ('Do you have data to report?', 'Do you have data to report?'), ('Is this value correct?', 'Is this value correct?'), ('Are these values correct?', 'Are these values correct?'), ('Other or multiple flags; explain in Additional Notes field', 'Other or multiple flags; explain in Additional Notes field')], max_length=1024, null=True)), + ('resolution', models.CharField(blank=True, max_length=1024, null=True)), + ('passed_checks', models.BooleanField(default=False)), + ('new_or_acquired', models.BooleanField(default=False)), + ('goal', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='seed.goal')), + ('property', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='seed.property')), + ], + ), + migrations.RunPython( + code=assign_properties_to_root_access_level, + reverse_code=migrations.RunPython.noop, + ), + migrations.RunPython( + code=assign_taxlots_to_root_access_level, + reverse_code=migrations.RunPython.noop, + ), + migrations.RunPython( + code=assign_analyses_to_root_access_level, + reverse_code=migrations.RunPython.noop, + ), + migrations.RunPython( + code=backfill_historical_notes, + ), + migrations.AlterField( + model_name='property', + name='access_level_instance', + field=models.ForeignKey(null=False, on_delete=django.db.models.deletion.CASCADE, related_name='properties', to='orgs.accesslevelinstance'), + ), + migrations.AlterField( + model_name='taxlot', + name='access_level_instance', + field=models.ForeignKey(null=False, on_delete=django.db.models.deletion.CASCADE, related_name='taxlots', to='orgs.accesslevelinstance'), + ), + migrations.AlterField( + model_name='analysis', + name='access_level_instance', + field=models.ForeignKey(null=False, on_delete=django.db.models.deletion.CASCADE, related_name='analyses', to='orgs.accesslevelinstance'), + ), + ] diff --git a/seed/models/__init__.py b/seed/models/__init__.py index f59de0b4c7..24dfd4289e 100644 --- a/seed/models/__init__.py +++ b/seed/models/__init__.py @@ -44,6 +44,8 @@ from .events import * # noqa from .ubid_models import * # noqa from .uniformat import * # noqa +from .goals import * # noqa +from .goal_notes import * # noqa from .certification import ( # noqa GreenAssessment, diff --git a/seed/models/analyses.py b/seed/models/analyses.py index 567f18d4b7..4d08488b92 100644 --- a/seed/models/analyses.py +++ b/seed/models/analyses.py @@ -10,7 +10,7 @@ from seed.analysis_pipelines.utils import get_json_path from seed.landing.models import SEEDUser as User -from seed.lib.superperms.orgs.models import Organization +from seed.lib.superperms.orgs.models import AccessLevelInstance, Organization logger = logging.getLogger(__name__) @@ -61,6 +61,7 @@ class Analysis(models.Model): status = models.IntegerField(default=PENDING_CREATION, choices=STATUS_TYPES) user = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, blank=True) organization = models.ForeignKey(Organization, on_delete=models.CASCADE) + access_level_instance = models.ForeignKey(AccessLevelInstance, on_delete=models.CASCADE, null=False, related_name="analyses", blank=False) configuration = models.JSONField(default=dict, blank=True) # parsed_results can contain any results gathered from the resulting file(s) # that are applicable to the entire analysis (i.e., all properties involved). @@ -195,3 +196,6 @@ def in_terminal_state(self): :returns: bool """ return self.status in [self.FAILED, self.STOPPED, self.COMPLETED] + + def can_create(self): + return self.organization.is_user_ali_root(self.user.id) and (self.organization.is_owner(self.user.id) or self.organization.has_role_member(self.user.id)) diff --git a/seed/models/building_file.py b/seed/models/building_file.py index 483c1bb135..69ee92ba1a 100644 --- a/seed/models/building_file.py +++ b/seed/models/building_file.py @@ -137,7 +137,7 @@ def _kbtu_thermal_conversion_factors(self): return self._cache_kbtu_thermal_conversion_factors - def process(self, organization_id, cycle, property_view=None, promote_property_state=True): + def process(self, organization_id, cycle, property_view=None, promote_property_state=True, access_level_instance=None): """ Process the building file that was uploaded and create the correct models for the object @@ -385,7 +385,8 @@ def process(self, organization_id, cycle, property_view=None, promote_property_s # set the property_state to the new one property_state = merged_state - elif not property_view and promote_property_state: + elif not property_view and promote_property_state and access_level_instance: + property_state.raw_access_level_instance = access_level_instance property_view = property_state.promote(cycle) else: return True, property_state, None, messages diff --git a/seed/models/certification.py b/seed/models/certification.py index d3b3998f1d..d7f6bd37e0 100644 --- a/seed/models/certification.py +++ b/seed/models/certification.py @@ -26,12 +26,12 @@ ) from seed.utils.strings import titlecase -DEFAULT_GREEN_ASSESSEMENT_VALIDITY_DURATION = getattr( +DEFAULT_GREEN_ASSESSMENT_VALIDITY_DURATION = getattr( settings, 'GREEN_ASSESSMENT_DEFAULT_VALIDITY_DURATION', None ) -if DEFAULT_GREEN_ASSESSEMENT_VALIDITY_DURATION: - DEFAULT_GREEN_ASSESSEMENT_VALIDITY_DURATION = datetime.timedelta( - DEFAULT_GREEN_ASSESSEMENT_VALIDITY_DURATION +if DEFAULT_GREEN_ASSESSMENT_VALIDITY_DURATION: + DEFAULT_GREEN_ASSESSMENT_VALIDITY_DURATION = datetime.timedelta( + DEFAULT_GREEN_ASSESSMENT_VALIDITY_DURATION ) # logger = logging.getLogger(__name__) @@ -91,7 +91,7 @@ def __str__(self): is_integer_score = models.BooleanField(default=True) validity_duration = models.DurationField( null=True, blank=True, - default=DEFAULT_GREEN_ASSESSEMENT_VALIDITY_DURATION + default=DEFAULT_GREEN_ASSESSMENT_VALIDITY_DURATION ) class Meta: diff --git a/seed/models/columns.py b/seed/models/columns.py index c3fd8e09f3..d82f7612fe 100644 --- a/seed/models/columns.py +++ b/seed/models/columns.py @@ -21,6 +21,7 @@ from past.builtins import basestring from seed.lib.superperms.orgs.models import Organization as SuperOrganization +from seed.lib.superperms.orgs.models import OrganizationUser from seed.models.column_mappings import ColumnMapping from seed.models.models import Unit @@ -110,6 +111,8 @@ class Column(models.Model): 'import_file', 'long_lat', 'merge_state', + 'raw_access_level_instance_error', + 'raw_access_level_instance_id', 'source_type', 'updated', ] + EXCLUDED_COLUMN_RETURN_FIELDS @@ -950,7 +953,7 @@ def create_mappings(mappings, organization, user, import_file_id=None): # Take the existing object and return the same object with the db column objects added to # the dictionary (to_column_object and from_column_object) - mappings = Column._column_fields_to_columns(mappings, organization) + mappings = Column._column_fields_to_columns(mappings, organization, user) for mapping in mappings: if isinstance(mapping, dict): try: @@ -1007,7 +1010,7 @@ def create_mappings(mappings, organization, user, import_file_id=None): return True @staticmethod - def _column_fields_to_columns(fields, organization): + def _column_fields_to_columns(fields, organization, user): """ List of dictionaries to process into column objects. This method will create the columns if they did not previously exist. Note that fields are probably mutable, but the method @@ -1041,115 +1044,47 @@ def _column_fields_to_columns(fields, organization): Returns: dict with lists of columns to which is mappable. """ - - def select_col_obj(column_name, table_name, organization_column): - if organization_column: - return [organization_column] - else: - # Try for "global" column definitions, e.g., BEDES. - Note the BEDES are not - # loaded into the database as of 9/8/2016 so not sure if this code is ever - # exercised - obj = Column.objects.filter(organization=None, column_name=column_name).first() - - if obj: - # create organization mapped column - obj.pk = None - obj.id = None - obj.organization = organization - obj.save() - - return [obj] - else: - if table_name: - obj, _ = Column.objects.get_or_create( - organization=organization, - column_name=column_name, - table_name=table_name, - is_extra_data=is_extra_data, - ) - return [obj] - else: - obj, _ = Column.objects.get_or_create( - organization=organization, - column_name=column_name, - is_extra_data=is_extra_data, - ) - return [obj] - # Container to store the dicts with the Column object new_data = [] + org_user = OrganizationUser.objects.get(organization=organization, user=user) + is_root_user = org_user.access_level_instance == organization.root for field in fields: new_field = field - - # Check if the extra_data field in the model object is a database column - is_extra_data = True - for c in Column.DATABASE_COLUMNS: - if field['to_table_name'] == c['table_name'] and field['to_field'] == c[ - 'column_name']: - is_extra_data = False - break - - try: - to_org_col, _ = Column.objects.get_or_create( - organization=organization, - column_name=field['to_field'], - table_name=field['to_table_name'], - is_extra_data=is_extra_data - ) - except Column.MultipleObjectsReturned: - _log.debug("More than one to_column found for {}.{}".format(field['to_table_name'], - field['to_field'])) - # raise Exception("Cannot handle more than one to_column returned for {}.{}".format( - # field['to_field'], field['to_table_name'])) - - # TODO: write something to remove the duplicate columns - to_org_col = Column.objects.filter(organization=organization, - column_name=field['to_field'], - table_name=field['to_table_name'], - is_extra_data=is_extra_data).first() - _log.debug("Grabbing the first to_column") - - try: - # the from column is the field in the import file, thus the table_name needs to be - # blank. Eventually need to handle passing in import_file_id - from_org_col, _ = Column.objects.update_or_create( - organization=organization, - table_name='', - column_name=field['from_field'], - is_extra_data=False, # Column objects representing raw/header rows are NEVER extra data - defaults={'units_pint': field.get('from_units', None)} - ) - except Column.MultipleObjectsReturned: - # We want to avoid the ambiguity of having multiple Column objects for a specific raw column. - # To do that, delete all multiples along with any associated ColumnMapping objects. - _log.debug( - "More than one from_column found for {}.{}".format(field['to_table_name'], - field['to_field'])) - - all_from_cols = Column.objects.filter( - organization=organization, - table_name='', - column_name=field['from_field'], - is_extra_data=False - ) - - ColumnMapping.objects.filter(column_raw__id__in=models.Subquery(all_from_cols.values('id'))).delete() - all_from_cols.delete() - - from_org_col = Column.objects.create( - organization=organization, - table_name='', - units_pint=field.get('from_units', None), - column_name=field['from_field'], - column_description=field['from_field'], - is_extra_data=False # Column objects representing raw/header rows are NEVER extra data - ) - _log.debug("Creating a new from_column") - - new_field['to_column_object'] = select_col_obj(field['to_field'], - field['to_table_name'], to_org_col) - new_field['from_column_object'] = select_col_obj(field['from_field'], "", from_org_col) + is_ah_data = any([ + field['to_field'] == name for name in organization.access_level_names + ]) + is_extra_data = not any([ + field['to_table_name'] == c['table_name'] and field['to_field'] == c['column_name'] + for c in Column.DATABASE_COLUMNS + ]) + + to_col_params = { + "organization": organization, + "column_name": field['to_field'], + "table_name": '' if is_ah_data else field['to_table_name'], + "is_extra_data": is_extra_data, + } + if is_root_user or is_ah_data: + to_org_col, _ = Column.objects.get_or_create(**to_col_params) + else: + try: + to_org_col = Column.objects.get(**to_col_params) + except Column.DoesNotExist: + raise PermissionError(f"user does not have permission to create column {field['to_field']}") + + # the from column is the field in the import file, thus the table_name needs to be + # blank. Eventually need to handle passing in import_file_id + from_org_col, _ = Column.objects.update_or_create( + organization=organization, + table_name='', + column_name=field['from_field'], + is_extra_data=False, # Column objects representing raw/header rows are NEVER extra data + defaults={'units_pint': field.get('from_units', None)} + ) + + new_field['to_column_object'] = [to_org_col] + new_field['from_column_object'] = [from_org_col] new_data.append(new_field) return new_data @@ -1613,5 +1548,14 @@ def validate_model(sender, **kwargs): if 'raw' in kwargs and not kwargs['raw']: instance.full_clean() + if instance.display_name is not None and instance.organization is not None: + if instance.display_name in instance.organization.access_level_names: + raise IntegrityError("This display name is already an access level name and cannot be used.") + + if instance.organization_id: + org = SuperOrganization.objects.get(pk=instance.organization_id) + if instance.display_name in org.access_level_names: + raise ValidationError("This display name is an organization access level name.") + pre_save.connect(validate_model, sender=Column) diff --git a/seed/models/compliance_metrics.py b/seed/models/compliance_metrics.py index 223a3cf7a2..48fba60570 100644 --- a/seed/models/compliance_metrics.py +++ b/seed/models/compliance_metrics.py @@ -44,7 +44,7 @@ class ComplianceMetric(models.Model): def __str__(self): return 'Program Metric - %s' % self.name - def evaluate(self): + def evaluate(self, user_ali): response = { 'meta': { 'organization': self.organization.id, @@ -61,7 +61,6 @@ def evaluate(self): query_dict = QueryDict(mutable=True) if self.filter_group and self.filter_group.query_dict: query_dict.update(self.filter_group.query_dict) - # print(f"query dict: {query_dict}") # grab cycles cycle_ids = self.cycles.values_list('pk', flat=True).order_by('start') @@ -95,6 +94,7 @@ def evaluate(self): property_response = properties_across_cycles_with_filters( self.organization_id, + user_ali, cycle_ids, query_dict, column_ids diff --git a/seed/models/data_views.py b/seed/models/data_views.py index 5809d0e526..4b518f544b 100644 --- a/seed/models/data_views.py +++ b/seed/models/data_views.py @@ -24,11 +24,11 @@ class DataView(models.Model): cycles = models.ManyToManyField(Cycle) filter_groups = models.ManyToManyField(FilterGroup) - def get_inventory(self): - views_by_filter_group_id, _ = self.views_by_filter() + def get_inventory(self, user_ali): + views_by_filter_group_id, _ = self.views_by_filter(user_ali) return views_by_filter_group_id - def views_by_filter(self): + def views_by_filter(self, user_ali): filter_group_views = {} views_by_filter_group_id = {} for filter_group in self.filter_groups.all(): @@ -37,8 +37,8 @@ def views_by_filter(self): query_dict = QueryDict(mutable=True) query_dict.update(filter_group.query_dict) for cycle in self.cycles.all(): - filter_views = self._get_filter_group_views(cycle, query_dict) - label_views = self._get_label_views(cycle, filter_group) + filter_views = self._get_filter_group_views(cycle, query_dict, user_ali) + label_views = self._get_label_views(cycle, filter_group, user_ali) views = self._combine_views(filter_views, label_views) filter_group_views[filter_group.id][cycle.id] = views for view in views: @@ -47,7 +47,7 @@ def views_by_filter(self): return views_by_filter_group_id, filter_group_views - def evaluate(self, columns): + def evaluate(self, columns, user_ali): # RETURN VALUE STRUCTURE # meta: {data_view: data_view.id, organization: organization.id}, @@ -67,7 +67,7 @@ def evaluate(self, columns): # 'Average': 123, # 'Count': 123, # 'Maximum': 123, - # 'Minumum': 123, + # 'Minimum': 123, # 'Sum': 123, # 'views_by_default_field: { # view.state.default_field || state.id: 123, @@ -102,7 +102,7 @@ def evaluate(self, columns): } } - response['views_by_filter_group_id'], views_by_filter = self.views_by_filter() + response['views_by_filter_group_id'], views_by_filter = self.views_by_filter(user_ali) # assign data based on source column id for column in columns: @@ -223,7 +223,7 @@ def _evaluate_extra_data(self, states, aggregation, column): return round(type_to_aggregate[aggregation], 2) def _evaluate_derived_column(self, states, aggregation, column): - # to evluate a derived_column: DerivedColumn.evaluate(propertyState) + # to evaluate a derived_column: DerivedColumn.evaluate(propertyState) values = [] for state in states: @@ -241,27 +241,33 @@ def _combine_views(self, filter_views, label_views): else: return list(filter_views) - def _get_label_views(self, cycle, filter_group): + def _get_label_views(self, cycle, filter_group, user_ali): if not (filter_group.and_labels.exists() or filter_group.or_labels.exists() or filter_group.exclude_labels.exists()): return None + permissions_filter = { + "property__access_level_instance__lft__gte": user_ali.lft, + "property__access_level_instance__rgt__lte": user_ali.rgt, + } + and_labels = filter_group.and_labels.all() or_labels = filter_group.or_labels.all() exclude_labels = filter_group.exclude_labels.all() views = None + if and_labels.exists(): # and - views = views or cycle.propertyview_set.all() + views = views or cycle.propertyview_set.filter(**permissions_filter) for label in and_labels: views = views.filter(labels=label) if or_labels.exists(): # or - views = views or cycle.propertyview_set.all() + views = views or cycle.propertyview_set.filter(**permissions_filter) views = views.filter(labels__in=or_labels) if exclude_labels.exists(): # exclude - views = views or cycle.propertyview_set.all() + views = views or cycle.propertyview_set.filter(**permissions_filter) views = views.exclude(labels__in=exclude_labels) return list(views) - def _get_filter_group_views(self, cycle, query_dict): + def _get_filter_group_views(self, cycle, query_dict, user_ali): org_id = self.organization.id columns = Column.retrieve_all( org_id=org_id, @@ -271,13 +277,18 @@ def _get_filter_group_views(self, cycle, query_dict): ) annotations = {} try: - filters, annotations, order_by = build_view_filters_and_sorts(query_dict, columns) + filters, annotations, order_by = build_view_filters_and_sorts(query_dict, columns, 'property') except Exception: logging.error('error with filter group') views_list = ( PropertyView.objects.select_related('property', 'state', 'cycle') - .filter(property__organization_id=org_id, cycle=cycle) + .filter( + property__organization_id=org_id, + cycle=cycle, + property__access_level_instance__lft__gte=user_ali.lft, + property__access_level_instance__rgt__lte=user_ali.rgt, + ) ) views_list = views_list.annotate(**annotations).filter(filters).order_by(*order_by) diff --git a/seed/models/goal_notes.py b/seed/models/goal_notes.py new file mode 100644 index 0000000000..cbd6139121 --- /dev/null +++ b/seed/models/goal_notes.py @@ -0,0 +1,30 @@ +""" +SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. +See also https://github.com/seed-platform/seed/main/LICENSE.md +""" +from django.db import models + +from seed.models import Goal, Property + + +class GoalNote(models.Model): + QUESTION_CHOICES = ( + ('Is this a new construction or acquisition?', 'Is this a new construction or acquisition?'), + ('Do you have data to report?', 'Do you have data to report?'), + ('Is this value correct?', 'Is this value correct?'), + ('Are these values correct?', 'Are these values correct?'), + ('Other or multiple flags; explain in Additional Notes field', 'Other or multiple flags; explain in Additional Notes field'), + ) + + goal = models.ForeignKey(Goal, on_delete=models.CASCADE) + property = models.ForeignKey(Property, on_delete=models.CASCADE) + + question = models.CharField(max_length=1024, choices=QUESTION_CHOICES, blank=True, null=True) + resolution = models.CharField(max_length=1024, blank=True, null=True) + passed_checks = models.BooleanField(default=False) + new_or_acquired = models.BooleanField(default=False) + + def serialize(self): + from seed.serializers.goal_notes import GoalNoteSerializer + serializer = GoalNoteSerializer(self) + return serializer.data diff --git a/seed/models/goals.py b/seed/models/goals.py new file mode 100644 index 0000000000..f4fcf22718 --- /dev/null +++ b/seed/models/goals.py @@ -0,0 +1,73 @@ +""" +SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. +See also https://github.com/seed-platform/seed/main/LICENSE.md +""" + +from django.core.validators import MaxValueValidator, MinValueValidator +from django.db import models +from django.db.models import Q +from django.db.models.signals import post_save +from django.dispatch import receiver + +from seed.models import ( + AccessLevelInstance, + Column, + Cycle, + Organization, + Property +) + + +class Goal(models.Model): + organization = models.ForeignKey(Organization, on_delete=models.CASCADE) + baseline_cycle = models.ForeignKey(Cycle, on_delete=models.CASCADE, related_name='goal_baseline_cycles') + current_cycle = models.ForeignKey(Cycle, on_delete=models.CASCADE, related_name='goal_current_cycles') + access_level_instance = models.ForeignKey(AccessLevelInstance, on_delete=models.CASCADE) + eui_column1 = models.ForeignKey(Column, on_delete=models.CASCADE, related_name='goal_eui_column1s') + # eui column 2 and 3 optional + eui_column2 = models.ForeignKey(Column, on_delete=models.CASCADE, related_name='goal_eui_column2s', blank=True, null=True) + eui_column3 = models.ForeignKey(Column, on_delete=models.CASCADE, related_name='goal_eui_column3s', blank=True, null=True) + area_column = models.ForeignKey(Column, on_delete=models.CASCADE, related_name='goal_area_columns') + target_percentage = models.IntegerField(validators=[MinValueValidator(0), MaxValueValidator(100)]) + name = models.CharField(max_length=255, unique=True) + + def __str__(self): + return f"Goal - {self.name}" + + def eui_columns(self): + """ Preferred column order """ + eui_columns = [self.eui_column1, self.eui_column2, self.eui_column3] + return [column for column in eui_columns if column] + + def properties(self): + properties = Property.objects.filter( + Q(views__cycle=self.baseline_cycle) | + Q(views__cycle=self.current_cycle), + access_level_instance__lft__gte=self.access_level_instance.lft, + access_level_instance__rgt__lte=self.access_level_instance.rgt + ).distinct() + + return properties + + +@receiver(post_save, sender=Goal) +def post_save_goal(sender, instance, **kwargs): + from seed.models import GoalNote + + # retrieve a flat set of all property ids associated with this goal + goal_property_ids = set(instance.properties().values_list('id', flat=True)) + + # retrieve a flat set of all property ids from the previous goal (through goal note which has not been created/updated yet) + previous_property_ids = set(instance.goalnote_set.values_list('property_id', flat=True)) + + # create, or update has added more properties to the goal + new_property_ids = goal_property_ids - previous_property_ids + # update has removed properties from the goal + removed_property_ids = previous_property_ids - goal_property_ids + + if new_property_ids: + new_goal_notes = [GoalNote(goal=instance, property_id=id) for id in new_property_ids] + GoalNote.objects.bulk_create(new_goal_notes) + + if removed_property_ids: + GoalNote.objects.filter(goal=instance, property_id__in=removed_property_ids).delete() diff --git a/seed/models/notes.py b/seed/models/notes.py index efe80b3b47..275cf1b4e9 100644 --- a/seed/models/notes.py +++ b/seed/models/notes.py @@ -9,7 +9,7 @@ from seed.landing.models import SEEDUser as User from seed.lib.superperms.orgs.models import Organization -from seed.models import MAX_NAME_LENGTH, PropertyView, TaxLotView +from seed.models import MAX_NAME_LENGTH, Property, PropertyView, TaxLotView from seed.utils.generic import obj_to_dict @@ -108,3 +108,13 @@ def create_from_edit(self, user_id, view, new_values, previous_values): def to_dict(self): return obj_to_dict(self) + + +class HistoricalNote(models.Model): + text = models.TextField(blank=True) + property = models.OneToOneField(Property, on_delete=models.CASCADE, related_name='historical_note') + + def serialize(self): + from seed.serializers.historical_notes import HistoricalNoteSerializer + serializer = HistoricalNoteSerializer(self) + return serializer.data diff --git a/seed/models/properties.py b/seed/models/properties.py index 62d25081a1..9135d7ad74 100644 --- a/seed/models/properties.py +++ b/seed/models/properties.py @@ -13,6 +13,7 @@ from django.conf import settings from django.contrib.gis.db import models as geomodels +from django.core.exceptions import ValidationError from django.db import IntegrityError, models, transaction from django.db.models import Case, Value, When from django.db.models.signals import ( @@ -30,7 +31,7 @@ from seed.data_importer.models import ImportFile # from seed.utils.cprofile import cprofile from seed.lib.mcm.cleaners import date_cleaner -from seed.lib.superperms.orgs.models import Organization +from seed.lib.superperms.orgs.models import AccessLevelInstance, Organization from seed.models.cycles import Cycle from seed.models.models import ( DATA_STATE, @@ -73,6 +74,7 @@ class Property(models.Model): The property can also reference a parent property. """ organization = models.ForeignKey(Organization, on_delete=models.CASCADE) + access_level_instance = models.ForeignKey(AccessLevelInstance, on_delete=models.CASCADE, null=False, related_name="properties") # Handle properties that may have multiple properties (e.g., buildings) parent_property = models.ForeignKey('Property', on_delete=models.CASCADE, blank=True, null=True) @@ -132,6 +134,28 @@ def copy_meters(self, source_property_id, source_persists=True): target_meter.copy_readings(source_meter, overlaps_possible=True) +@receiver(pre_save, sender=Property) +def set_default_access_level_instance(sender, instance, **kwargs): + """If ALI not set, put this Property as the root.""" + if instance.access_level_instance_id is None: + root = AccessLevelInstance.objects.get(organization_id=instance.organization_id, depth=1) + instance.access_level_instance_id = root.id + + bad_taxlotproperty = TaxLotProperty.objects \ + .filter(property_view__property=instance) \ + .exclude(taxlot_view__taxlot__access_level_instance=instance.access_level_instance) \ + .exists() + if bad_taxlotproperty: + raise ValidationError("cannot change property's ALI to AlI different than related taxlots.") + + +@receiver(post_save, sender=Property) +def post_save_property(sender, instance, created, **kwargs): + if created: + from seed.models import HistoricalNote + HistoricalNote.objects.get_or_create(property=instance) + + class PropertyState(models.Model): """Store a single property. This contains all the state information about the property @@ -159,6 +183,8 @@ class PropertyState(models.Model): organization = models.ForeignKey(Organization, on_delete=models.CASCADE) data_state = models.IntegerField(choices=DATA_STATE, default=DATA_STATE_UNKNOWN) merge_state = models.IntegerField(choices=MERGE_STATE, default=MERGE_STATE_UNKNOWN, null=True) + raw_access_level_instance = models.ForeignKey(AccessLevelInstance, null=True, on_delete=models.SET_NULL) + raw_access_level_instance_error = models.TextField(null=True) jurisdiction_property_id = models.TextField(null=True, blank=True, db_collation='natural_sort') @@ -360,7 +386,13 @@ def promote(self, cycle, property_id=None): _log.error("Could not promote this property") return None else: - prop = Property.objects.create(organization=self.organization) + if self.raw_access_level_instance is None: + _log.error("Could not promote this property: no raw_access_level_instance") + return None + + prop = Property.objects.create(organization=self.organization, access_level_instance=self.raw_access_level_instance) + self.raw_access_level_instance = None + self.raw_access_level_instance_error = None pv = PropertyView.objects.create(property=prop, cycle=cycle, state=self) diff --git a/seed/models/tax_lot_properties.py b/seed/models/tax_lot_properties.py index c9892f900b..d05f0d05c2 100644 --- a/seed/models/tax_lot_properties.py +++ b/seed/models/tax_lot_properties.py @@ -14,8 +14,11 @@ from django.apps import apps from django.contrib.gis.db.models import GeometryField from django.contrib.gis.geos import GEOSGeometry +from django.core.exceptions import ValidationError from django.db import models from django.db.models import Count +from django.db.models.signals import pre_save +from django.dispatch import receiver from django.utils.timezone import make_naive from quantityfield.units import ureg @@ -140,6 +143,7 @@ def serialize( show_columns: Optional[list[int]], columns_from_database: list[dict], include_related: bool = True, + goal_id: int = False, ) -> list[dict]: """ This method takes a list of TaxLotViews or PropertyViews and returns the data along @@ -264,9 +268,11 @@ def serialize( obj_dict['merged_indicator'] = obj.state_id in merged_state_ids - # This is only applicable to Properties since Tax Lots don't have meters if this_cls == 'Property': + obj_dict.update(obj.property.access_level_instance.get_path()) obj_dict['meters_exist_indicator'] = len(obj.property.meters.all()) > 0 + else: + obj_dict.update(obj.taxlot.access_level_instance.get_path()) # bring in GIS data obj_dict[lookups['bounding_box']] = bounding_box_wkt(obj.state) @@ -287,6 +293,12 @@ def serialize( if obj_dict.get('measures'): del obj_dict['measures'] + # add goal note data + if goal_id: + goal_note = obj.property.goalnote_set.filter(goal=goal_id).first() + obj_dict['goal_note'] = goal_note.serialize() if goal_note else None + obj_dict['historical_note'] = obj.property.historical_note.serialize() + results.append(obj_dict) return results @@ -431,3 +443,12 @@ def get_related( join_map[getattr(join, lookups['obj_view_id'])] = [join_dict] return join_map + + +@receiver(pre_save, sender=TaxLotProperty) +def presave_organization(sender, instance, **kwargs): + p_ali = instance.property_view.property.access_level_instance.pk + t_ali = instance.taxlot_view.taxlot.access_level_instance.pk + + if p_ali != t_ali: + raise ValidationError("taxlot and property must have same access level instance.") diff --git a/seed/models/tax_lots.py b/seed/models/tax_lots.py index 70be18b232..fe51acff13 100644 --- a/seed/models/tax_lots.py +++ b/seed/models/tax_lots.py @@ -11,13 +11,14 @@ from os import path from django.contrib.gis.db import models as geomodels +from django.core.exceptions import ValidationError from django.db import models from django.db.models import Case, Value, When from django.db.models.signals import m2m_changed, post_save, pre_save from django.dispatch import receiver from seed.data_importer.models import ImportFile -from seed.lib.superperms.orgs.models import Organization +from seed.lib.superperms.orgs.models import AccessLevelInstance, Organization from seed.models.cycles import Cycle from seed.models.models import ( DATA_STATE, @@ -46,6 +47,7 @@ class TaxLot(models.Model): # NOTE: we have been calling this the organization. We # should stay consistent, although I prefer the name organization (!super_org) organization = models.ForeignKey(Organization, on_delete=models.CASCADE) + access_level_instance = models.ForeignKey(AccessLevelInstance, on_delete=models.CASCADE, null=False, related_name="taxlots") # Track when the entry was created and when it was updated created = models.DateTimeField(auto_now_add=True) @@ -55,6 +57,21 @@ def __str__(self): return 'TaxLot - %s' % self.pk +@receiver(pre_save, sender=TaxLot) +def set_default_access_level_instance(sender, instance, **kwargs): + """If ALI not set, put this TaxLot as the root.""" + if instance.access_level_instance_id is None: + root = AccessLevelInstance.objects.get(organization_id=instance.organization_id, depth=1) + instance.access_level_instance_id = root.id + + bad_taxlotproperty = TaxLotProperty.objects \ + .filter(taxlot_view__taxlot=instance) \ + .exclude(property_view__property__access_level_instance=instance.access_level_instance) \ + .exists() + if bad_taxlotproperty: + raise ValidationError("cannot change property's ALI to AlI different than related properties.") + + class TaxLotState(models.Model): # The state field names should match pretty close to the pdf, just # because these are the most 'public' fields in terms of @@ -67,6 +84,8 @@ class TaxLotState(models.Model): organization = models.ForeignKey(Organization, on_delete=models.CASCADE) data_state = models.IntegerField(choices=DATA_STATE, default=DATA_STATE_UNKNOWN) merge_state = models.IntegerField(choices=MERGE_STATE, default=MERGE_STATE_UNKNOWN, null=True) + raw_access_level_instance = models.ForeignKey(AccessLevelInstance, null=True, on_delete=models.SET_NULL) + raw_access_level_instance_error = models.TextField(null=True) custom_id_1 = models.CharField(max_length=255, null=True, blank=True, db_collation='natural_sort') @@ -135,7 +154,13 @@ def promote(self, cycle): if self.organization is None: _log.error("organization is None") - taxlot = TaxLot.objects.create(organization=self.organization) + if self.raw_access_level_instance is None: + _log.error("Could not promote this taxlot: no raw_access_level_instance") + return None + + taxlot = TaxLot.objects.create(organization=self.organization, access_level_instance=self.raw_access_level_instance) + self.raw_access_level_instance = None + self.raw_access_level_instance_error = None tlv = TaxLotView.objects.create(taxlot=taxlot, cycle=cycle, state=self) diff --git a/seed/serializers/access_level_instances.py b/seed/serializers/access_level_instances.py new file mode 100644 index 0000000000..1c3f07947e --- /dev/null +++ b/seed/serializers/access_level_instances.py @@ -0,0 +1,16 @@ +# !/usr/bin/env python +# encoding: utf-8 +""" +SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. +See also https://github.com/seed-platform/seed/main/LICENSE.md +""" +from rest_framework import serializers + +from seed.models import AccessLevelInstance + + +class AccessLevelInstanceSerializer(serializers.ModelSerializer): + + class Meta: + model = AccessLevelInstance + fields = '__all__' diff --git a/seed/serializers/data_views.py b/seed/serializers/data_views.py index b5dc9873c5..78cf9c1406 100644 --- a/seed/serializers/data_views.py +++ b/seed/serializers/data_views.py @@ -53,11 +53,11 @@ def update(self, instance, validated_data): instance.save() # if new parameters are provided, delete previous ones so we can create the new params - paramters_data = validated_data.get('parameters') - if paramters_data: + parameters_data = validated_data.get('parameters') + if parameters_data: DataViewParameter.objects.filter(data_view=instance).delete() - for parameter in paramters_data: + for parameter in parameters_data: DataViewParameter.objects.create(data_view=instance, **parameter) return instance diff --git a/seed/serializers/goal_notes.py b/seed/serializers/goal_notes.py new file mode 100644 index 0000000000..bb78fc83ea --- /dev/null +++ b/seed/serializers/goal_notes.py @@ -0,0 +1,14 @@ +""" +SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. +See also https://github.com/seed-platform/seed/main/LICENSE.md +""" +from rest_framework import serializers + +from seed.models import GoalNote + + +class GoalNoteSerializer(serializers.ModelSerializer): + + class Meta: + model = GoalNote + fields = '__all__' diff --git a/seed/serializers/goals.py b/seed/serializers/goals.py new file mode 100644 index 0000000000..15c4952289 --- /dev/null +++ b/seed/serializers/goals.py @@ -0,0 +1,48 @@ +""" +SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. +See also https://github.com/seed-platform/seed/main/LICENSE.md +""" +from django.core.exceptions import ValidationError +from rest_framework import serializers + +from seed.models import Goal + + +class GoalSerializer(serializers.ModelSerializer): + + class Meta: + model = Goal + fields = '__all__' + + def to_representation(self, obj): + result = super().to_representation(obj) + result['level_name_index'] = obj.access_level_instance.depth - 1 + return result + + def validate(self, data): + # partial update allows a cycle or ali to be blank + baseline_cycle = data.get('baseline_cycle') or self.instance.baseline_cycle + current_cycle = data.get('current_cycle') or self.instance.current_cycle + organization = data.get('organization') or self.instance.organization + ali = data.get('access_level_instance') or self.instance.access_level_instance + + if baseline_cycle == current_cycle: + raise ValidationError('Cycles must be unique.') + + if baseline_cycle.end > current_cycle.end: + raise ValidationError('Baseline Cycle must precede Current Cycle.') + + if not all([ + getattr(baseline_cycle, 'organization', None) == organization, + getattr(current_cycle, 'organization', None) == organization, + getattr(ali, 'organization', None) == organization + ]): + raise ValidationError('Organization mismatch.') + + # non Null columns must be unique + eui_columns = [data.get('eui_column1'), data.get('eui_column2'), data.get('eui_column3')] + unique_columns = {column for column in eui_columns if column is not None} + if len(unique_columns) < len([column for column in eui_columns if column is not None]): + raise ValidationError('Columns must be unique.') + + return data diff --git a/seed/serializers/historical_notes.py b/seed/serializers/historical_notes.py new file mode 100644 index 0000000000..743b08c31f --- /dev/null +++ b/seed/serializers/historical_notes.py @@ -0,0 +1,14 @@ +""" +SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. +See also https://github.com/seed-platform/seed/main/LICENSE.md +""" +from rest_framework import serializers + +from seed.models import HistoricalNote + + +class HistoricalNoteSerializer(serializers.ModelSerializer): + + class Meta: + model = HistoricalNote + fields = '__all__' diff --git a/seed/serializers/properties.py b/seed/serializers/properties.py index 985388f275..6377cf6a7f 100644 --- a/seed/serializers/properties.py +++ b/seed/serializers/properties.py @@ -27,6 +27,9 @@ TaxLotProperty, TaxLotView ) +from seed.serializers.access_level_instances import ( + AccessLevelInstanceSerializer +) from seed.serializers.building_file import BuildingFileSerializer from seed.serializers.certification import ( GreenAssessmentPropertyReadOnlySerializer @@ -109,6 +112,7 @@ class PropertySerializer(serializers.ModelSerializer): updated = serializers.DateTimeField("%Y-%m-%dT%H:%M:%S.%fZ", default_timezone=pytz.utc, read_only=True) inventory_documents = InventoryDocumentSerializer(many=True, read_only=True) + access_level_instance = AccessLevelInstanceSerializer(many=False, read_only=True) class Meta: model = Property @@ -123,6 +127,27 @@ def many_init(cls, *args, **kwargs): return PropertyListSerializer(*args, **kwargs) +class CreatePropertySerializer(serializers.ModelSerializer): + # The created and updated fields are in UTC time and need to be casted accordingly in this format + created = serializers.DateTimeField("%Y-%m-%dT%H:%M:%S.%fZ", default_timezone=pytz.utc, read_only=True) + updated = serializers.DateTimeField("%Y-%m-%dT%H:%M:%S.%fZ", default_timezone=pytz.utc, read_only=True) + + inventory_documents = InventoryDocumentSerializer(many=True, read_only=True) + access_level_instance_id = serializers.IntegerField(required=True) + organization_id = serializers.IntegerField(required=True) + + class Meta: + model = Property + fields = ( + 'id', + 'organization_id', + 'access_level_instance_id', + 'created', + 'updated', + 'inventory_documents', + ) + + class PropertyMinimalSerializer(serializers.ModelSerializer): """Define fields to avoid label lookup""" @@ -225,6 +250,7 @@ class PropertyStatePromoteWritableSerializer(serializers.ModelSerializer): # to support the old state serializer method with the PROPERTY_STATE_FIELDS variables import_file_id = serializers.IntegerField(allow_null=True, read_only=True) organization_id = serializers.IntegerField() + raw_access_level_instance_id = serializers.IntegerField() # read-only core fields id = serializers.IntegerField(read_only=True) diff --git a/seed/static/seed/images/ali-tree-example-lg.webp b/seed/static/seed/images/ali-tree-example-lg.webp new file mode 100644 index 0000000000..49118a5880 Binary files /dev/null and b/seed/static/seed/images/ali-tree-example-lg.webp differ diff --git a/seed/static/seed/js/controllers/admin_controller.js b/seed/static/seed/js/controllers/admin_controller.js index 4b88296374..959e68517f 100644 --- a/seed/static/seed/js/controllers/admin_controller.js +++ b/seed/static/seed/js/controllers/admin_controller.js @@ -10,12 +10,14 @@ angular.module('BE.seed.controller.admin', []).controller('admin_controller', [ 'organization_service', 'column_mappings_service', 'uploader_service', + 'ah_service', 'auth_payload', 'organizations_payload', 'user_profile_payload', 'users_payload', 'Notification', '$window', + '$translate', // eslint-disable-next-line func-names function ( $scope, @@ -25,12 +27,14 @@ angular.module('BE.seed.controller.admin', []).controller('admin_controller', [ organization_service, column_mappings_service, uploader_service, + ah_service, auth_payload, organizations_payload, user_profile_payload, users_payload, Notification, - $window + $window, + $translate ) { $scope.is_superuser = auth_payload.auth.requires_superuser; $scope.user = {}; @@ -51,6 +55,25 @@ angular.module('BE.seed.controller.admin', []).controller('admin_controller', [ css: 'alert-success' }; $scope.username = `${user_profile_payload.first_name} ${user_profile_payload.last_name}`; + $scope.level_names = []; + $scope.level_name_index = null; + $scope.roles = [{ + name: $translate.instant('Owner'), + value: 'owner' + }, { + name: $translate.instant('Member'), + value: 'member' + }, { + name: $translate.instant('Viewer'), + value: 'viewer' + }]; + + let access_level_instances_by_depth = {}; + + $scope.change_selected_level_index = () => { + const new_level_instance_depth = parseInt($scope.level_name_index, 10) + 1; + $scope.potential_level_instances = access_level_instances_by_depth[new_level_instance_depth]; + }; const update_alert = (is_ok, message) => { $scope.alert.show = true; @@ -59,23 +82,57 @@ angular.module('BE.seed.controller.admin', []).controller('admin_controller', [ }; $scope.update_alert = update_alert; + const get_users = () => { + user_service.get_users().then((data) => { + $scope.org.users = data.users; + }); + }; + + const process_organizations = (data) => { + $scope.org_user.organizations = data.organizations; + _.forEach($scope.org_user.organizations, (org) => { + org.total_inventory = _.reduce(org.cycles, (sum, cycle) => sum + cycle.num_properties + cycle.num_taxlots, 0); + }); + }; + + const get_organizations = () => organization_service.get_organizations().then(process_organizations).catch((response) => { + $log.log({ message: 'error from data call', status: response.status, data: response.data }); + update_alert(false, `error getting organizations: ${response.data.message}`); + }); + + $scope.org_form.new_org = () => { + $scope.user.organization = undefined; + $scope.user.access_level_instance_id = undefined; + $scope.level_name_index = undefined; + }; + $scope.org_form.existing_org = () => { + $scope.user.org_name = undefined; + $scope.user.access_level_instance_id = undefined; + $scope.level_name_index = undefined; + + organization_service.get_organization_access_level_tree($scope.user.organization.id).then((access_level_tree) => { + $scope.level_names = access_level_tree.access_level_names; + access_level_instances_by_depth = ah_service.calculate_access_level_instances_by_depth(access_level_tree.access_level_tree); + }); + }; $scope.org_form.reset = () => { + $scope.user = { role: $scope.roles[0].value }; $scope.org.user_email = ''; $scope.org.name = ''; }; $scope.org_form.add = (org) => { organization_service .add(org) - .then(() => { - get_organizations().then(() => { - $scope.$emit('organization_list_updated'); - }); + .then(async () => { + await get_organizations(); + $scope.$emit('organization_list_updated'); update_alert(true, `Organization ${org.name} created`); }) .catch((response) => { update_alert(false, `error creating organization: ${response.data.message}`); }); }; + $scope.user_form.add = (user) => { user_service .add(user) @@ -98,41 +155,24 @@ angular.module('BE.seed.controller.admin', []).controller('admin_controller', [ update_alert(false, `error creating user: ${response.data.message}`); }); }; - $scope.org_form.not_ready = () => _.isUndefined($scope.org.email) || organization_exists($scope.org.name); - - var organization_exists = (name) => { + const organization_exists = (name) => { const orgs = _.map($scope.org_user.organizations, (org) => org.name.toLowerCase()); return orgs.includes(name.toLowerCase()); }; + $scope.org_form.not_ready = () => $scope.org.email === undefined || organization_exists($scope.org.name); + $scope.user_form.not_ready = () => !$scope.user.organization && !$scope.user.org_name; $scope.user_form.reset = () => { - $scope.user = {}; + $scope.user = { role: $scope.roles[0].value }; + $scope.level_names = []; }; $scope.org_form.reset(); $scope.user_form.reset(); - var get_users = () => { - user_service.get_users().then((data) => { - $scope.org.users = data.users; - }); - }; - - const process_organizations = (data) => { - $scope.org_user.organizations = data.organizations; - _.forEach($scope.org_user.organizations, (org) => { - org.total_inventory = _.reduce(org.cycles, (sum, cycle) => sum + cycle.num_properties + cycle.num_taxlots, 0); - }); - }; - - var get_organizations = () => organization_service.get_organizations().then(process_organizations, (response) => { - $log.log({ message: 'error from data call', status: response.status, data: response.data }); - update_alert(false, `error getting organizations: ${response.data.message}`); - }); - $scope.get_organizations_users = (org) => { if (org) { organization_service @@ -180,6 +220,7 @@ angular.module('BE.seed.controller.admin', []).controller('admin_controller', [ }; $scope.confirm_column_mappings_delete = (org) => { + // eslint-disable-next-line no-restricted-globals,no-alert const yes = confirm(`Are you sure you want to delete the '${org.name}' column mappings? This will invalidate preexisting mapping review data`); if (yes) { $scope.delete_org_column_mappings(org); @@ -203,6 +244,7 @@ angular.module('BE.seed.controller.admin', []).controller('admin_controller', [ * for an org's inventory. */ $scope.confirm_inventory_delete = (org) => { + // eslint-disable-next-line no-restricted-globals,no-alert const yes = confirm(`Are you sure you want to PERMANENTLY delete '${org.name}'s properties and tax lots?`); if (yes) { $scope.delete_org_inventory(org); @@ -233,8 +275,10 @@ angular.module('BE.seed.controller.admin', []).controller('admin_controller', [ }; $scope.confirm_org_delete = (org) => { + // eslint-disable-next-line no-restricted-globals const yes = confirm(`Are you sure you want to PERMANENTLY delete the entire '${org.name}' organization?`); if (yes) { + // eslint-disable-next-line no-restricted-globals const again = confirm(`Deleting an organization is permanent. Confirm again to delete '${org.name}'`); if (again) { $scope.delete_org(org); diff --git a/seed/static/seed/js/controllers/analyses_controller.js b/seed/static/seed/js/controllers/analyses_controller.js index 8ddb4f93be..0057551d51 100644 --- a/seed/static/seed/js/controllers/analyses_controller.js +++ b/seed/static/seed/js/controllers/analyses_controller.js @@ -11,20 +11,32 @@ angular 'cycles_payload', 'organization_payload', 'organization_service', - 'users_payload', + // 'users_payload', 'auth_payload', 'messages_payload', 'urls', 'analyses_service', 'Notification', // eslint-disable-next-line func-names - function ($scope, analyses_payload, cycles_payload, organization_payload, organization_service, users_payload, auth_payload, messages_payload, urls, analyses_service, Notification) { + function ( + $scope, + analyses_payload, + cycles_payload, + organization_payload, + organization_service, + //users_payload, + auth_payload, + messages_payload, + urls, + analyses_service, + Notification + ) { $scope.org = organization_payload.organization; $scope.auth = auth_payload.auth; $scope.analyses = analyses_payload.analyses; $scope.views = analyses_payload.views; $scope.original_views = analyses_payload.original_views; - $scope.users = users_payload.users; + // $scope.users = users_payload.users; $scope.messages = messages_payload.messages; $scope.cycles = cycles_payload.cycles; diff --git a/seed/static/seed/js/controllers/column_mappings_controller.js b/seed/static/seed/js/controllers/column_mappings_controller.js index 770d5fe16c..c36e082b98 100644 --- a/seed/static/seed/js/controllers/column_mappings_controller.js +++ b/seed/static/seed/js/controllers/column_mappings_controller.js @@ -374,7 +374,7 @@ angular.module('BE.seed.controller.column_mappings', []).controller('column_mapp '' + - '' + + '' + '
-
+
diff --git a/seed/static/seed/partials/inventory_detail_settings.html b/seed/static/seed/partials/inventory_detail_settings.html index b3ff83dee3..f64d370a70 100644 --- a/seed/static/seed/partials/inventory_detail_settings.html +++ b/seed/static/seed/partials/inventory_detail_settings.html @@ -28,18 +28,20 @@

Column Order/VisibilityDetail Column List Profile: - - - - + + + + + +

diff --git a/seed/static/seed/partials/inventory_list.html b/seed/static/seed/partials/inventory_list.html index f5869a4a9f..b39785b387 100644 --- a/seed/static/seed/partials/inventory_list.html +++ b/seed/static/seed/partials/inventory_list.html @@ -41,23 +41,24 @@

{$:: (inventory_type === 'taxlots' ? 'Tax Lots' : 'Properties') | translate - - - - - - - - - - - + + + + + + + + + + + - - + + - + + @@ -127,7 +128,7 @@

{$:: (inventory_type === 'taxlots' ? 'Tax Lots' : 'Properties') | translate -
+
diff --git a/seed/static/seed/partials/inventory_reports.html b/seed/static/seed/partials/inventory_reports.html index e140b5c2fa..038afb6309 100644 --- a/seed/static/seed/partials/inventory_reports.html +++ b/seed/static/seed/partials/inventory_reports.html @@ -39,12 +39,12 @@

{$::
- +
- +
diff --git a/seed/static/seed/partials/inventory_settings.html b/seed/static/seed/partials/inventory_settings.html index 36e3104e06..1d43663a9d 100644 --- a/seed/static/seed/partials/inventory_settings.html +++ b/seed/static/seed/partials/inventory_settings.html @@ -23,25 +23,27 @@

Column Order/VisibilityColumn List Profile: - - - - + + + + + +

@@ -53,7 +55,7 @@

Add Shared Tax Lots

Add Shared Properties

diff --git a/seed/static/seed/partials/mapping.html b/seed/static/seed/partials/mapping.html index c51f2b7817..87e5e74dd6 100644 --- a/seed/static/seed/partials/mapping.html +++ b/seed/static/seed/partials/mapping.html @@ -155,6 +155,9 @@

Please review SEED column names or the column names of the file being imported. Duplicate values are not allowed in either case.

+
+ You do not have access to create columns. +
Please review SEED column names or the column names of the file being imported. Column names that match an existing Derived Column name are not allowed.
@@ -199,17 +202,17 @@

- + - + - + kgCO2e/m**2/year - + {$:: col.name $} {$:: cell_value $} @@ -276,7 +279,7 @@

+ + + diff --git a/seed/static/seed/partials/new_member_modal.html b/seed/static/seed/partials/new_member_modal.html index 99f83c9d8b..0194ef4fa7 100644 --- a/seed/static/seed/partials/new_member_modal.html +++ b/seed/static/seed/partials/new_member_modal.html @@ -13,23 +13,38 @@

- +

Last name is required.

- +

Enter a valid email address.

- + +
+ +
+
+
+ +
+ +
+
+
+
- +
+
+ {$ error_message $} +
- + @@ -40,12 +40,12 @@

diff --git a/seed/static/seed/partials/organization_access_level_tree.html b/seed/static/seed/partials/organization_access_level_tree.html new file mode 100644 index 0000000000..bae403b9e2 --- /dev/null +++ b/seed/static/seed/partials/organization_access_level_tree.html @@ -0,0 +1,149 @@ +
+ +
+
+
+
+
+
+
+
+

{$:: 'Access Level Tree' | translate $}

+
+
+
+
+
+ +
+

ACCESS_LEVEL_TREE_HELP_1

+

Access Levels (AL)

+

ACCESS_LEVEL_DESCRIPTION

+

Access Levels Instances (ALI)

+

ACCESS_LEVEL_INSTANCE_DESCRIPTION

+

Example Access Level Tree

+ + +

Managing the Access Level Tree

+

ACCESS_LEVEL_ORG_OWNER

+
    +
  • {$:: 'Step 1: Add Access Levels' | translate $} - + ACCESS_LEVEL_OWNER_STEP1 +
  • +
  • {$:: 'Step 2: Upload Access Level Instances' | translate $} - + ACCESS_LEVEL_OWNER_STEP2 +
  • +
+

ACCESS_LEVEL_OWNER_END

+
+
+

Access Levels

+ +
+
- -
+ + + + + + +
Level {$:: ($index + 1) $} Name
+ {$:: level $} +
+
+ +
+

Access Level Instance Tree

+ + +
+
+ {$:: 'Level' | translate $} 1: {$ access_level_names[0] $} +
+
+

ACCESS_LEVEL_ROOT_TEXT

+ {$:: access_level_tree[0].data.name $} + + + + +
+
+
+
+
+
+ + +
+
+ + + diff --git a/seed/static/seed/partials/organization_add_access_level_instance_modal.html b/seed/static/seed/partials/organization_add_access_level_instance_modal.html new file mode 100644 index 0000000000..947a3336f9 --- /dev/null +++ b/seed/static/seed/partials/organization_add_access_level_instance_modal.html @@ -0,0 +1,38 @@ + + + diff --git a/seed/static/seed/partials/organization_add_access_level_modal.html b/seed/static/seed/partials/organization_add_access_level_modal.html new file mode 100644 index 0000000000..9099f6c54c --- /dev/null +++ b/seed/static/seed/partials/organization_add_access_level_modal.html @@ -0,0 +1,31 @@ + + + diff --git a/seed/static/seed/partials/organization_delete_access_level_instance_modal.html b/seed/static/seed/partials/organization_delete_access_level_instance_modal.html new file mode 100644 index 0000000000..6223327b58 --- /dev/null +++ b/seed/static/seed/partials/organization_delete_access_level_instance_modal.html @@ -0,0 +1,24 @@ + + + + + + diff --git a/seed/static/seed/partials/organization_edit_access_level_instance_modal.html b/seed/static/seed/partials/organization_edit_access_level_instance_modal.html new file mode 100644 index 0000000000..ad44728407 --- /dev/null +++ b/seed/static/seed/partials/organization_edit_access_level_instance_modal.html @@ -0,0 +1,15 @@ + + + diff --git a/seed/static/seed/partials/portfolio_summary.html b/seed/static/seed/partials/portfolio_summary.html new file mode 100644 index 0000000000..5378b13414 --- /dev/null +++ b/seed/static/seed/partials/portfolio_summary.html @@ -0,0 +1,89 @@ +
+ +
+ +
+
+
+
+
+ {$:: 'The portfolio summary page compares 2 cycles to calculate progress toward an Energy Use Intensity reduction goal.' | translate $} + {$:: 'Cycle selection and goal details can be customized by clicking the Configure Goals button below.' | translate $} +
+
+
+ +
+ +
+ +
+
+ +
+
+
+
{$ detail[0] $}:
+
{$ detail[1] $}
+
+
+
+
+
{$ detail[0] $}:
+
{$ detail[1] $}
+
+
+
+
+ +
+

Portfolio Summary

+
+
+

{$:: 'Loading Summary Data...' | translate $}

+ + + +
+
+
+ + +
+
+ +
+
+ {$ inventory_pagination.start $}-{$ inventory_pagination.end $} of {$ inventory_pagination.total $} + + ({$ selected_display $} - Select All - Select None) + + + +
+
+ +
+
+
+
+
+
+

Loading Data...

+ + + +
+
diff --git a/seed/static/seed/partials/ubid_admin.html b/seed/static/seed/partials/ubid_admin.html index f1ba93882d..caa7eb5c7a 100644 --- a/seed/static/seed/partials/ubid_admin.html +++ b/seed/static/seed/partials/ubid_admin.html @@ -1,7 +1,7 @@
-
+
@@ -28,10 +28,10 @@
Preferred
- - diff --git a/seed/static/seed/partials/update_item_labels_modal.html b/seed/static/seed/partials/update_item_labels_modal.html index aab313af17..6cd5753fde 100644 --- a/seed/static/seed/partials/update_item_labels_modal.html +++ b/seed/static/seed/partials/update_item_labels_modal.html @@ -3,7 +3,7 @@