diff --git a/django_api/etools_prp/apps/core/admin.py b/django_api/etools_prp/apps/core/admin.py index f66a48382..8a070200f 100644 --- a/django_api/etools_prp/apps/core/admin.py +++ b/django_api/etools_prp/apps/core/admin.py @@ -3,7 +3,7 @@ from leaflet.admin import LeafletGeoAdmin from unicef_locations.models import CartoDBTable -from .cartodb import update_sites_from_cartodb +from .cartodb import import_locations from .forms import CartoDBTableForm from .models import Location, PRPRoleOld, Realm, ResponsePlan, Workspace @@ -56,7 +56,7 @@ def parent_table_name(self, obj): def import_sites(self, request, queryset): for table in queryset: - update_sites_from_cartodb.delay(table.pk) + import_locations.delay(table.pk) class WorkspaceAdmin(admin.ModelAdmin): diff --git a/django_api/etools_prp/apps/core/cartodb.py b/django_api/etools_prp/apps/core/cartodb.py index 249000c72..248c4044f 100644 --- a/django_api/etools_prp/apps/core/cartodb.py +++ b/django_api/etools_prp/apps/core/cartodb.py @@ -11,6 +11,7 @@ from pyrestcli.auth import BaseAuthClient from unicef_locations.models import CartoDBTable +from etools_prp.apps.core.locations_sync import PRPLocationSynchronizer from etools_prp.apps.core.models import Location logger = get_task_logger('core.cartodb') @@ -153,6 +154,11 @@ def create_location(pcode, return True, sites_not_added, sites_created, sites_updated +@shared_task +def import_locations(carto_table_pk): + PRPLocationSynchronizer(carto_table_pk).sync() + + @shared_task def update_sites_from_cartodb(carto_table_pk): """ diff --git a/django_api/etools_prp/apps/core/locations_sync.py b/django_api/etools_prp/apps/core/locations_sync.py new file mode 100644 index 000000000..27fff694c --- /dev/null +++ b/django_api/etools_prp/apps/core/locations_sync.py @@ -0,0 +1,198 @@ +import logging +from datetime import datetime + +from django.db import IntegrityError +from django.db.models import F, Value +from django.db.models.functions import Concat + +from carto.exceptions import CartoException +from celery.utils.log import get_task_logger +from unicef_locations.exceptions import InvalidRemap +from unicef_locations.synchronizers import LocationSynchronizer +from unicef_locations.utils import get_location_model + +from etools_prp.apps.core.models import Workspace +from etools_prp.apps.utils.query import has_related_records + +logger = get_task_logger(__name__) + + +class PRPLocationSynchronizer(LocationSynchronizer): + """eTools version of synchronizer with use the VisionSyncLog to store log execution""" + + def __init__(self, pk) -> None: + super().__init__(pk) + self.workspace = Workspace.objects.get(title__iexact=self.carto.display_name.partition("_")[0]) + self.qs = get_location_model().objects.filter(workspaces=self.workspace) + + def apply_remap(self, old2new): + """ + Use remap table to swap p-codes + """ + logging.info('Apply Remap') + for old, new in old2new.items(): + if old != new: + try: + old_location = self.qs.get(p_code=old, is_active=True) + except get_location_model().DoesNotExist: + raise InvalidRemap(f'Old location {old} does not exist or is not active') + except get_location_model().MultipleObjectsReturned: + locs = ', '.join([loc.name for loc in self.qs.filter(p_code=old)]) + raise InvalidRemap(f'Multiple active Location exist for pcode {old}: {locs}') + old_location.p_code = new + old_location.save() + logger.info(f'Update through remapping {old} -> {new}') + + def create_or_update_locations(self, batch_size=500): + """ + Create or update locations based on p-code (only active locations are considerate) + + """ + logging.info('Create/Update new locations') + rows = self.get_cartodb_locations() + new, updated, skipped, error = 0, 0, 0, 0 + logging.info(f'Total Rows {len(rows)}') + logging.info(f'Batch size {batch_size}') + for idx in range(0, len(rows), batch_size): + batch = rows[idx:idx + batch_size] + logging.info(f'processing batch {idx+1}') + batch = list(batch) + indexed_batch = {str(item[self.carto.pcode_col]): item for item in batch} + # first get all the pcodes intended to be updated from the rows + all_new_pcodes = [r[self.carto.pcode_col] for r in batch] + # get all records that exist for these pcodes that will need to be adjusted + existing_loc_qs = self.qs.filter(p_code__in=all_new_pcodes, is_active=True) + # from batch keep all rows that are new + rows_to_create = [row for row in batch if row[self.carto.pcode_col] + not in existing_loc_qs.values_list("p_code", flat=True)] + + # get_all_parents and map them by p_code: + parent_pcodes = [] + for row in batch: + parent_pcode = row[self.carto.parent_code_col] if self.carto.parent_code_col in row else None + if parent_pcode: + parent_pcodes.append(row[self.carto.parent_code_col]) + parents_qs = self.qs.filter(p_code__in=parent_pcodes, is_active=True) + # parent location dict {pcode: item} + parents = {r.p_code: r for r in parents_qs.all()} + + # make a list of tuples (row_from_carto, existing_location_object) to iterate over and update + update_tuples = [(indexed_batch[loc.p_code], loc) for loc in existing_loc_qs.all()] + locs_to_update = [] + for row, existing_loc in update_tuples: + pcode = row[self.carto.pcode_col] + name = row[self.carto.name_col] + geom = row['the_geom'] + parent_code = row[self.carto.parent_code_col] if self.carto.parent_code_col in row else None + if all([name, pcode, geom]): + geom_key = 'point' if 'Point' in geom else 'geom' + existing_loc.admin_level = self.carto.admin_level + existing_loc.admin_level_name = self.carto.admin_level_name + existing_loc.name = name + existing_loc.parent = parents.get(parent_code, None) if parent_code else None + setattr(existing_loc, geom_key, geom) + locs_to_update.append(existing_loc) + updated += 1 + else: + skipped += 1 + logger.info(f"Skipping row pcode {pcode}") + + locs_to_create = [] + for row in rows_to_create: + pcode = row[self.carto.pcode_col] + name = row[self.carto.name_col] + geom = row['the_geom'] + parent_code = row[self.carto.parent_code_col] if self.carto.parent_code_col in row else None + geom_key = 'point' if 'Point' in geom else 'geom' + if all([name, pcode, geom]): + values = { + 'p_code': pcode, + 'is_active': True, + 'admin_level': self.carto.admin_level, + 'admin_level_name': self.carto.admin_level_name, + 'name': name, + geom_key: geom, + 'parent': parents.get(parent_code, None) if parent_code else None + } + # set everything to 0 in the tree, we'll rebuild later + for key in ['lft', 'rght', 'level', 'tree_id']: + values[key] = 0 + new_rec = get_location_model()(**values) + locs_to_create.append(new_rec) + new += 1 + else: + skipped += 1 + logger.info(f"Skipping row pcode {pcode}") + + # update the records: + try: + get_location_model().objects.bulk_update(locs_to_update, fields=['p_code', 'is_active', 'admin_level', + 'admin_level_name', 'name', + 'geom', 'point', 'parent']) + except IntegrityError as e: + message = "Duplicates found on update" + logger.exception(e) + logger.exception(message) + raise CartoException(message) + + try: + newly_created = get_location_model().objects.bulk_create(locs_to_create) + except IntegrityError as e: + message = "Duplicates found on create" + logger.exception(e) + logger.exception(message) + raise CartoException(message) + else: + for loc in newly_created: + loc.workspaces.add(self.workspace) + + logger.info("Rebuilding the tree, have patience") + get_location_model().objects.rebuild() + logger.info("Rebuilt") + return new, updated, skipped, error + + def clean_upper_level(self): + """ + Check upper level active locations with no reference + - delete if is leaf + - deactivate if all children are inactive (doesn't exist an active child) + """ + logging.info('Clean upper level') + qs = self.qs.filter(admin_level=self.carto.admin_level - 1, is_active=False) + number_of_inactive_parents = qs.count() + # by default has_related_records excludes self. + # so we need to make sure that the locations have no children (only leaves) + qs = qs.filter(children__isnull=True) + has_related, affected = has_related_records(qs, get_location_model(), model_relations_to_ignore=[Workspace]) + # exclude all locations in use + qs = qs.exclude(pk__in=affected) + logging.info(f'deleting {qs.count()} records out of {number_of_inactive_parents} inactive locs') + qs.delete() + logging.info('Parents are now gone!') + + def handle_obsolete_locations(self, to_deactivate): + """ + Handle obsolete locations: + - deactivate referenced locations + - delete non referenced locations + """ + logging.info('Clean Obsolete Locations') + loc_qs = self.qs.filter(p_code__in=to_deactivate) + + # then instead of using collector to chase cascading or other relations use helper + has_related, affected = has_related_records(loc_qs, get_location_model(), model_relations_to_ignore=[Workspace]) + logger.info(f'Deactivating {affected}') + if has_related: + loc_qs.filter(pk__in=affected).update(is_active=False, + name=Concat(F("name"), + Value(f" [{datetime.today().strftime('%Y-%m-%d')}]"))) + logger.info('Deleting the rest') + else: + logger.info('Deleting all') + # won't be able to delete the ones with children if the children are somehow related. our check does not + # look for nested relations to self. + # update all child locations for the "obsolete locations", leave them orphan; - with the understanding that + # all previous parent relationships are stored in a separate db for reference or remapped adequately + # on subsequent children updates. + get_location_model().objects.filter(parent__in=loc_qs).update(parent=None) + loc_qs.exclude(pk__in=affected).delete() diff --git a/django_api/etools_prp/apps/unicef/views.py b/django_api/etools_prp/apps/unicef/views.py index 2f2f3185e..a01aa2884 100644 --- a/django_api/etools_prp/apps/unicef/views.py +++ b/django_api/etools_prp/apps/unicef/views.py @@ -911,8 +911,10 @@ def _calculate_report_location_totals_per_reports(self, indicator_report, hf_rep else: calculated[loc_id]['total']['d'] += ild.disaggregation['()']['d'] - - calculated[loc_id]['total']['c'] = convert_string_number_to_float(calculated[loc_id]['total']['v']) / calculated[loc_id]['total']['d'] + if calculated[loc_id]['data']['d'] == 0: + calculated[loc_id]['data']['c'] = 0 + else: + calculated[loc_id]['total']['c'] = convert_string_number_to_float(calculated[loc_id]['total']['v']) / calculated[loc_id]['total']['d'] if calculated[loc_id]['total']['c'] is None: calculated[loc_id]['total']['c'] = 0 @@ -936,9 +938,11 @@ def _calculate_report_location_totals_per_reports(self, indicator_report, hf_rep else: calculated[loc_id]['data'][key]['d'] += ild.disaggregation[key]['d'] - - calculated[loc_id]['data'][key]['c'] = convert_string_number_to_float(calculated[loc_id]['data'][key]['v']) \ - / calculated[loc_id]['data'][key]['d'] + if calculated[loc_id]['data'][key]['d'] == 0: + calculated[loc_id]['data'][key]['c'] = 0 + else: + calculated[loc_id]['data'][key]['c'] = convert_string_number_to_float( + calculated[loc_id]['data'][key]['v']) / calculated[loc_id]['data'][key]['d'] if calculated[loc_id]['data'][key]['c'] is None: calculated[loc_id]['data'][key]['c'] = 0 diff --git a/django_api/etools_prp/apps/utils/query.py b/django_api/etools_prp/apps/utils/query.py new file mode 100644 index 000000000..9b17879ed --- /dev/null +++ b/django_api/etools_prp/apps/utils/query.py @@ -0,0 +1,53 @@ +from django.contrib.contenttypes.models import ContentType +from django.db import models + + +def has_related_records(queryset, model, avoid_self=True, model_relations_to_ignore=[]): + ''' + Function that returns a tuple, whether the queryset has records that other models foreign key into (or m2m) + (Bool - whether it has any records, [list of pks] - all records that other model instances relating to them) + ''' + all_impacted_records = [] + # Get all related objects' content types + related_content_types = ContentType.objects.filter( + models.Q(model__in=[field.related_model._meta.object_name.lower() for field in model._meta.get_fields() + if field.one_to_many or field.many_to_many]) + ) + # filter out all deprecated models that no longer exit. rct.model_class would return None if they are no longer + related_content_types = [rct for rct in related_content_types if rct.model_class()] + if avoid_self: + related_content_types = [rct for rct in related_content_types if rct.model_class() != model] + if model_relations_to_ignore: + related_content_types = [rct for rct in related_content_types if rct.model_class() not in model_relations_to_ignore] + # Iterate over related content types and check for related records + for related_content_type in related_content_types: + related_model = related_content_type.model_class() + rel_model_fields = related_model._meta.get_fields(include_hidden=True) + rel_field_names = [f.name for f in rel_model_fields if f.remote_field and f.remote_field.model == model] + # Check if there are any related records for the current queryset + for related_field_name in rel_field_names: + # (f'model {related_model} field: {related_field_name}') - "model Intervention field flat_locations" + if related_model.objects.filter(**{f'{related_field_name}__in': queryset}).exists(): + all_impacted_records += related_model.objects.filter(**{f'{related_field_name}__in': queryset}).\ + values_list(f'{related_field_name}__pk', flat=True) + if all_impacted_records: + return True, list(set(all_impacted_records)) + return False, [] + + +def get_all_items_related(record): + results = [] + model = record._meta.model + related_content_types = ContentType.objects.filter( + models.Q(model__in=[field.related_model._meta.object_name.lower() for field in model._meta.get_fields() + if field.one_to_many or field.many_to_many]) + ) + related_content_types = [rct for rct in related_content_types if rct.model_class()] + for related_content_type in related_content_types: + related_model = related_content_type.model_class() + rel_model_fields = related_model._meta.get_fields(include_hidden=True) + rel_field_names = [f.name for f in rel_model_fields if f.remote_field and f.remote_field.model == model] + for related_field_name in rel_field_names: + if related_model.objects.filter(**{f'{related_field_name}__in': [record]}).exists(): + results.append((related_model, related_model.objects.filter(**{f'{related_field_name}__in': [record]}))) + return results diff --git a/django_api/etools_prp/config/settings.py b/django_api/etools_prp/config/settings.py index d44c0226f..df9215a20 100644 --- a/django_api/etools_prp/config/settings.py +++ b/django_api/etools_prp/config/settings.py @@ -4,6 +4,7 @@ import environ import sentry_sdk +from corsheaders.defaults import default_headers from cryptography.hazmat.backends import default_backend from cryptography.x509 import load_pem_x509_certificate # Build paths inside the project like this: os.path.join(BASE_DIR, ...) @@ -139,6 +140,11 @@ 'https://etools-dev.unicef.org', ] +CORS_ALLOW_HEADERS = ( + *default_headers, + "language", +) + ROOT_URLCONF = 'etools_prp.config.urls' TEMPLATES = [ diff --git a/frontend_ip/src_ts/elements/ip-reporting/js/pd-output-list-toolbar-functions.ts b/frontend_ip/src_ts/elements/ip-reporting/js/pd-output-list-toolbar-functions.ts index 9ed6668b6..4811c4bb2 100644 --- a/frontend_ip/src_ts/elements/ip-reporting/js/pd-output-list-toolbar-functions.ts +++ b/frontend_ip/src_ts/elements/ip-reporting/js/pd-output-list-toolbar-functions.ts @@ -34,7 +34,7 @@ export function computeCanRefresh(report: GenericObject, programmeDocument: Gene } export function computeShowRefresh(roles: any[]) { - return roles.every(function (role) { - return role.role !== 'IP_ADMIN' && role.role !== 'IP_VIEWER'; + return roles.some(function (role) { + return role.role === 'IP_AUTHORIZED_OFFICER' || role.role === 'IP_EDITOR'; }); } diff --git a/frontend_ip/src_ts/etools-prp-common b/frontend_ip/src_ts/etools-prp-common index e50015483..f3292e0bc 160000 --- a/frontend_ip/src_ts/etools-prp-common +++ b/frontend_ip/src_ts/etools-prp-common @@ -1 +1 @@ -Subproject commit e50015483ead60a26458195647f06abd30ad1b2b +Subproject commit f3292e0bc71d1d43dabda6fbe995020440df2bbe