diff --git a/search/admin.py b/search/admin.py index 36e2c54..086dd7f 100644 --- a/search/admin.py +++ b/search/admin.py @@ -1,7 +1,7 @@ from django.contrib import admin from django.urls import reverse from django.utils.html import format_html -from .models import Search, Field, Code, ChronologicCode, Setting, SearchLog +from .models import Search, Field, Code, ChronologicCode, Setting, Event # -- Searches --- @@ -114,11 +114,11 @@ class SettingsAdmin(admin.ModelAdmin): save_as = True -class SearchLogsAdmin(admin.ModelAdmin): +class EventAdmin(admin.ModelAdmin): - list_display = ['search_id', 'log_id', 'log_timestamp', 'category'] - list_filter = ['search_id', 'category'] - search_fields = ['search_id', 'log_id', 'category'] + list_display = ['search_id', 'component_id', 'category', 'title', 'event_timestamp'] + list_filter = ['search_id', 'category', 'component_id'] + search_fields = ['search_id', 'title', 'category'] admin.site.register(Search, SearchAdmin) @@ -126,4 +126,5 @@ class SearchLogsAdmin(admin.ModelAdmin): admin.site.register(Code, CodeAdmin) admin.site.register(ChronologicCode, ChronologicCodesAdmin) admin.site.register(Setting, SettingsAdmin) -admin.site.register(SearchLog, SearchLogsAdmin) +admin.site.register(Event, EventAdmin) + diff --git a/search/management/commands/import_data_csv.py b/search/management/commands/import_data_csv.py index 664b3c0..abcab93 100644 --- a/search/management/commands/import_data_csv.py +++ b/search/management/commands/import_data_csv.py @@ -132,7 +132,7 @@ def handle(self, *args, **options): solr_items = [] - # Clear out the Solr core when ioading default data + # Clear out the Solr core when loading default data if options['nothing_to_report']: solr.delete_doc_by_query(self.solr_core, "format:NTR") @@ -171,12 +171,14 @@ def handle(self, *args, **options): elif 'quarter' in csv_record: if 'fiscal_year' in csv_record: record_id = "{0}-{1}-{2}".format(csv_record['owner_org'], - csv_record['fiscal_year'], csv_record['quarter']) + csv_record['fiscal_year'], + csv_record['quarter']) elif 'year' in csv_record: record_id = "{0}-{1}-{2}".format(csv_record['owner_org'], csv_record['year'], csv_record['quarter']) else: - record_id = "{0}-{1}-{2}".format(csv_record['owner_org'], csv_record['quarter']) + record_id = "{0}-{1}-{2}".format( + csv_record['owner_org'], csv_record['quarter']) if options['report_duplicates']: if record_id in ids: @@ -191,7 +193,8 @@ def handle(self, *args, **options): search_type_plugin = 'search.plugins.{0}'.format(options['search']) if search_type_plugin in self.discovered_plugins: - include, filtered_record = self.discovered_plugins[search_type_plugin].filter_csv_record( + include, filtered_record = self.discovered_plugins[ + search_type_plugin].filter_csv_record( csv_record, self.search_target, self.csv_fields, self.field_codes, 'NTR' if options['nothing_to_report'] else '') if not include: @@ -206,9 +209,10 @@ def handle(self, *args, **options): # Verify that it is a known field fields_to_ignore = ( - 'owner_org_title', 'owner_org', 'record_created', 'record_modified', 'user_modified') + 'owner_org_title', 'owner_org', 'record_created', 'record_modified', + 'user_modified') fields_not_loaded = ( - 'owner_org_title', 'record_created', 'user_modified', 'record_modified',) + 'owner_org_title', 'record_created', 'user_modified', 'record_modified',) if csv_field not in self.csv_fields and csv_field not in fields_to_ignore: self.logger.error("CSV files contains unknown field: {0}".format(csv_field)) exit(-1) @@ -271,8 +275,10 @@ def handle(self, *args, **options): locale='fr_CA') else: csv_decimal = parse_decimal(solr_record[csv_field], locale='en_US') - solr_record[csv_field + '_en'] = format_decimal(csv_decimal, locale='en_CA') - solr_record[csv_field + '_fr'] = format_decimal(csv_decimal, locale='fr_CA') + solr_record[ + csv_field + '_en'] = format_decimal(csv_decimal, locale='en_CA') + solr_record[ + csv_field + '_fr'] = format_decimal(csv_decimal, locale='fr_CA') else: solr_record[csv_field + '_en'] = '' solr_record[csv_field + '_fr'] = '' @@ -368,7 +374,8 @@ def handle(self, *args, **options): self.search_target, self.csv_fields, self.field_codes, - 'NTR' if options[ + 'NTR' if + options[ 'nothing_to_report'] else '') if bd_writer is None: @@ -424,7 +431,6 @@ def handle(self, *args, **options): # Write and remaining records to Solr and commit - try: if len(solr_items) > 0: solr.index(self.solr_core, solr_items) diff --git a/search/management/commands/search_run_tests.py b/search/management/commands/search_run_tests.py index 9b3d321..cd4e247 100644 --- a/search/management/commands/search_run_tests.py +++ b/search/management/commands/search_run_tests.py @@ -4,7 +4,7 @@ import json import logging from os import path -from search.models import Search, Field, Code, Setting, SearchLog +from search.models import Search, Field, Code, Setting from search.query import get_query_fields import traceback import SolrClient diff --git a/search/models/models.py b/search/models/models.py index bc6175e..160c7d2 100644 --- a/search/models/models.py +++ b/search/models/models.py @@ -424,16 +424,24 @@ class Setting(models.Model): value = models.CharField(max_length=1024, verbose_name="Setting Value", blank=True, null=True) -class SearchLog(models.Model): - +class Event(models.Model): + EVENT_CATEGORY = [ + ('notset', 'Undefined / Intéterminé'), + ('error', 'Error / Erreur'), + ('success', 'Success / Succès'), + ('info', 'Information'), + ('warning', 'Warning / Notification'), + ('critical', 'Critical / Urgent'), + ] id = models.AutoField(primary_key=True) - search_id = models.CharField(max_length=32, verbose_name="Search ID") - log_id = models.CharField(max_length=128, verbose_name="Log entry identifier") - log_timestamp = models.DateTimeField() - category = models.CharField(max_length=128, verbose_name="Log entry category", default="", blank=True) - message = models.TextField() + search_id = models.CharField(max_length=32, verbose_name="Search ID", blank=False, default="None") + component_id = models.CharField(max_length=64, verbose_name="Search 2 Component", blank=False, default="None") + title = models.CharField(max_length=512, verbose_name="Log entry name or title", blank=False) + event_timestamp = models.DateTimeField() + category = models.CharField(max_length=12, verbose_name="Category", default="notset", blank=False, choices=EVENT_CATEGORY) + message = models.TextField(blank=True, default="", verbose_name="Detailed Event Message") def save(self, *args, **kwargs): - if not self.id and not self.log_timestamp: - self.log_timestamp = utimezone.now() + if not self.id and not self.event_timestamp: + self.event_timestamp = utimezone.now() super().save(*args, **kwargs) diff --git a/search/tasks.py b/search/tasks.py index 16356c4..eddab00 100644 --- a/search/tasks.py +++ b/search/tasks.py @@ -7,7 +7,7 @@ import hashlib import logging import os -from .models import SearchLog +from .models import Event from SolrClient import SolrClient, SolrResponse from SolrClient.exceptions import ConnectionError, SolrError import time @@ -67,23 +67,23 @@ def save_search_logs_to_file(): logger = logging.getLogger(__name__) one_week_ago = datetime.today() - timedelta(days=settings.SEARCH_LOGGING_ARCHIVE_AFTER_X_DAYS) - logger.info(f'Archiving Search Log entries older than {one_week_ago.strftime("%A, %B %d, %Y")} to {settings.SEARCH_LOGGING_ARCHIVE_FILE}') + logger.info(f'Archiving Search Event Log entries older than {one_week_ago.strftime("%A, %B %d, %Y")} to {settings.SEARCH_LOGGING_ARCHIVE_FILE}') # For new log files, write out the header if not os.path.exists('settings.SEARCH_LOGGING_ARCHIVE_FILE'): # Write out the header with the UTF8 byte-order marker for Excel first with open(settings.SEARCH_LOGGING_ARCHIVE_FILE, 'w', newline='', encoding='utf8') as csv_file: log_writer = csv.writer(csv_file, dialect='excel', quoting=csv.QUOTE_NONE) - headers = ['id', 'search_id', 'log_id', 'log_timestamp', 'message', 'category'] + headers = ['id', 'search_id', 'component_id', 'title', 'event_timestamp', 'category', 'message'] headers[0] = u'\N{BOM}' + headers[0] log_writer.writerow(headers) # Use a CSV writer with forced quoting for the body of the file with open(settings.SEARCH_LOGGING_ARCHIVE_FILE, 'a', newline='', encoding='utf8') as csv_file: log_writer = csv.writer(csv_file, dialect='excel', quoting=csv.QUOTE_ALL) - older_logs = SearchLog.objects.order_by('log_timestamp').filter(log_timestamp__lte=one_week_ago) + older_logs = Event.objects.order_by('log_timestamp').filter(log_timestamp__lte=one_week_ago) for log in older_logs: - log_entry = [log.id, log.search_id, log.log_id, log.log_timestamp, log.message, log.category] + log_entry = [log.id, log.search_id, log.component_id, log.title, log.event_timestamp, log.category, log.message] log_writer.writerow(log_entry) log.delete() logger.info(f'{older_logs.count()} log entries purged.')