diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6c67c97d..32ac72b3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,13 +10,13 @@ repos: - id: check-yaml - repo: https://github.com/asottile/pyupgrade - rev: v3.15.2 + rev: v3.17.0 hooks: - id: pyupgrade args: [--py310-plus] - repo: https://github.com/psf/black - rev: 24.4.2 + rev: 24.8.0 hooks: - id: black @@ -27,7 +27,7 @@ repos: args: ["--profile", "black"] - repo: https://github.com/PyCQA/flake8 - rev: 7.0.0 + rev: 7.1.1 hooks: - id: flake8 args: ["--config=setup.cfg","--per-file-ignores=boranga/settings.py:F405,E402","--ignore=E203, W503"] diff --git a/Dockerfile b/Dockerfile index b0d10582..ecb7f918 100644 --- a/Dockerfile +++ b/Dockerfile @@ -85,16 +85,10 @@ RUN chmod 755 /startup.sh && \ mkdir /app && \ chown -R oim.oim /app && \ ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone && \ - wget https://raw.githubusercontent.com/dbca-wa/wagov_utils/main/wagov_utils/bin/health_check.sh -O /bin/health_check.sh && \ - chmod 755 /bin/health_check.sh && \ - wget https://raw.githubusercontent.com/dbca-wa/wagov_utils/main/wagov_utils/bin-python/scheduler/scheduler.py -O /bin/scheduler.py && \ - chmod 755 /bin/scheduler.py && \ - mkdir /tmp/azcopy/ && \ - wget https://aka.ms/downloadazcopy-v10-linux -O /tmp/azcopy/azcopy.tar.gz && \ - cd /tmp/azcopy/ ; tar -xzvf azcopy.tar.gz && \ - cp /tmp/azcopy/azcopy_linux_amd64_10.*/azcopy /bin/azcopy && \ - chmod 755 /bin/azcopy && \ - rm -rf /tmp/azcopy/ + wget https://raw.githubusercontent.com/dbca-wa/wagov_utils/main/wagov_utils/bin/default_script_installer.sh -O /tmp/default_script_installer.sh && \ + chmod 755 /tmp/default_script_installer.sh && \ + /tmp/default_script_installer.sh && \ + rm -rf /tmp/* FROM configure_boranga as python_dependencies_boranga diff --git a/boranga/components/main/api.py b/boranga/components/main/api.py index 66b84b33..9914b171 100755 --- a/boranga/components/main/api.py +++ b/boranga/components/main/api.py @@ -3,16 +3,21 @@ import pyproj from django.conf import settings +from django.contrib.contenttypes.models import ContentType from django.core.cache import cache +from django_filters import rest_framework as filters +from rest_framework import filters as rest_framework_filters from rest_framework import viewsets from boranga import helpers from boranga.components.main.models import GlobalSettings, HelpTextEntry from boranga.components.main.serializers import ( + ContentTypeSerializer, GlobalSettingsSerializer, HelpTextEntrySerializer, ) from boranga.components.occurrence.models import Datum +from boranga.permissions import IsInternal logger = logging.getLogger(__name__) @@ -41,6 +46,15 @@ def get_queryset(self): return qs +class ContentTypeViewSet(viewsets.ReadOnlyModelViewSet): + queryset = ContentType.objects.filter(app_label="boranga") + serializer_class = ContentTypeSerializer + permission_classes = [IsInternal] + filter_backends = [filters.DjangoFilterBackend, rest_framework_filters.SearchFilter] + filterset_fields = ["app_label", "model"] + search_fields = ["^model"] + + class RetrieveActionLoggingViewsetMixin: """Mixin to automatically log user actions when a user retrieves an instance. diff --git a/boranga/components/main/serializers.py b/boranga/components/main/serializers.py index 1dec4269..2bc335d1 100755 --- a/boranga/components/main/serializers.py +++ b/boranga/components/main/serializers.py @@ -1,3 +1,7 @@ +import logging + +from django.contrib.contenttypes.models import ContentType +from django.db.models.fields.related import ForeignKey, OneToOneField from ledger_api_client.ledger_models import EmailUserRO from ledger_api_client.ledger_models import EmailUserRO as EmailUser from rest_framework import serializers @@ -7,7 +11,12 @@ GlobalSettings, HelpTextEntry, ) -from boranga.helpers import is_django_admin +from boranga.helpers import ( + get_openpyxl_data_validation_type_for_django_field, + is_django_admin, +) + +logger = logging.getLogger(__name__) class CommunicationLogEntrySerializer(serializers.ModelSerializer): @@ -109,3 +118,60 @@ class Meta: def get_user_can_administer(self, obj): return is_django_admin(self.context["request"]) + + +class ContentTypeSerializer(serializers.ModelSerializer): + model_fields = serializers.SerializerMethodField() + model_verbose_name = serializers.SerializerMethodField() + + class Meta: + model = ContentType + fields = "__all__" + + def get_model_verbose_name(self, obj): + if not obj.model_class(): + return None + return obj.model_class()._meta.verbose_name.title() + + def get_model_fields(self, obj): + if not obj.model_class(): + return [] + fields = obj.model_class()._meta.get_fields() + + def filter_fields(field): + return not field.auto_created and not ( + field.is_relation + and type(field) + not in [ + ForeignKey, + OneToOneField, + ] + ) + + fields = list(filter(filter_fields, fields)) + model_fields = [] + for field in fields: + display_name = ( + field.verbose_name.title() + if hasattr(field, "verbose_name") + else field.name + ) + field_type = str(type(field)).split(".")[-1].replace("'>", "") + choices = field.choices if hasattr(field, "choices") else None + allow_null = field.null if hasattr(field, "null") else None + max_length = field.max_length if hasattr(field, "max_length") else None + xlsx_validation_type = get_openpyxl_data_validation_type_for_django_field( + field + ) + model_fields.append( + { + "name": field.name, + "display_name": display_name, + "type": field_type, + "choices": choices, + "allow_null": allow_null, + "max_length": max_length, + "xlsx_validation_type": xlsx_validation_type, + } + ) + return model_fields diff --git a/boranga/components/occurrence/admin.py b/boranga/components/occurrence/admin.py index 23ebc409..12cb685a 100644 --- a/boranga/components/occurrence/admin.py +++ b/boranga/components/occurrence/admin.py @@ -16,6 +16,9 @@ LocationAccuracy, ObservationMethod, OccurrenceGeometry, + OccurrenceReportBulkImportSchema, + OccurrenceReportBulkImportSchemaColumn, + OccurrenceReportBulkImportTask, OccurrenceReportGeometry, OccurrenceSite, OccurrenceTenure, @@ -466,6 +469,25 @@ class CountedSubjectAdmin(ArchivableModelAdminMixin, DeleteProtectedModelAdmin): list_display = ["name"] +class OccurrenceReportBulkImportTaskAdmin(DeleteProtectedModelAdmin): + list_display = ["id", "datetime_queued", "processing_status"] + list_filter = ["processing_status", "datetime_completed"] + readonly_fields = ["datetime_queued"] + + +class OccurrenceReportBulkImportSchemaColumnInline(admin.StackedInline): + model = OccurrenceReportBulkImportSchemaColumn + extra = 0 + + +class OccurrenceReportBulkImportSchemaAdmin(DeleteProtectedModelAdmin): + list_display = ["group_type", "version", "datetime_created", "datetime_updated"] + readonly_fields = ["datetime_created", "datetime_updated"] + list_filter = ["group_type"] + inlines = [OccurrenceReportBulkImportSchemaColumnInline] + ordering = ["group_type", "version"] + + # Each of the following models will be available to Django Admin. admin.site.register(LandForm, LandFormAdmin) admin.site.register(RockType, RockTypeAdmin) @@ -494,3 +516,7 @@ class CountedSubjectAdmin(ArchivableModelAdminMixin, DeleteProtectedModelAdmin): admin.site.register(LocationAccuracy, LocationAccuracyAdmin) admin.site.register(WildStatus, WildStatusAdmin) admin.site.register(OccurrenceSite) +admin.site.register(OccurrenceReportBulkImportTask, OccurrenceReportBulkImportTaskAdmin) +admin.site.register( + OccurrenceReportBulkImportSchema, OccurrenceReportBulkImportSchemaAdmin +) diff --git a/boranga/components/occurrence/api.py b/boranga/components/occurrence/api.py index 152a5e87..cbb62a3d 100644 --- a/boranga/components/occurrence/api.py +++ b/boranga/components/occurrence/api.py @@ -12,6 +12,7 @@ from django.http import HttpResponse from django.shortcuts import get_object_or_404, redirect from django.urls import reverse +from django_filters import rest_framework as filters from ledger_api_client.ledger_models import EmailUserRO as EmailUser from multiselectfield import MultiSelectField from openpyxl import Workbook @@ -21,6 +22,7 @@ from rest_framework.decorators import action as detail_route from rest_framework.decorators import action as list_route from rest_framework.decorators import renderer_classes +from rest_framework.pagination import LimitOffsetPagination from rest_framework.permissions import AllowAny from rest_framework.renderers import JSONRenderer from rest_framework.response import Response @@ -65,6 +67,8 @@ OccurrenceReport, OccurrenceReportAmendmentRequest, OccurrenceReportAmendmentRequestDocument, + OccurrenceReportBulkImportSchema, + OccurrenceReportBulkImportTask, OccurrenceReportDocument, OccurrenceReportGeometry, OccurrenceReportReferral, @@ -110,6 +114,7 @@ IsOccurrenceReportReferee, OccurrenceObjectPermission, OccurrencePermission, + OccurrenceReportBulkImportPermission, OccurrenceReportCopyPermission, OccurrenceReportObjectPermission, OccurrenceReportPermission, @@ -132,6 +137,9 @@ OccurrenceDocumentSerializer, OccurrenceLogEntrySerializer, OccurrenceReportAmendmentRequestSerializer, + OccurrenceReportBulkImportSchemaColumnSerializer, + OccurrenceReportBulkImportSchemaSerializer, + OccurrenceReportBulkImportTaskSerializer, OccurrenceReportDocumentSerializer, OccurrenceReportLogEntrySerializer, OccurrenceReportProposalReferralSerializer, @@ -6230,3 +6238,112 @@ def retract(self, request, *args, **kwargs): instance.occurrence_report, context={"request": request} ) return Response(serializer.data, status=status.HTTP_200_OK) + + +class OccurrenceReportBulkImportTaskViewSet( + viewsets.GenericViewSet, + mixins.CreateModelMixin, + mixins.ListModelMixin, +): + queryset = OccurrenceReportBulkImportTask.objects.all() + permission_classes = [OccurrenceReportBulkImportPermission] + serializer_class = OccurrenceReportBulkImportTaskSerializer + filter_backends = [filters.DjangoFilterBackend] + filterset_fields = ["processing_status"] + pagination_class = LimitOffsetPagination + + def perform_create(self, serializer): + serializer.save(email_user=self.request.user.id) + + @detail_route(methods=["patch"], detail=True) + def retry(self, request, *args, **kwargs): + instance = self.get_object() + instance.retry() + return Response(status=status.HTTP_200_OK) + + @detail_route(methods=["patch"], detail=True) + def revert(self, request, *args, **kwargs): + instance = self.get_object() + instance.revert() + return Response(status=status.HTTP_200_OK) + + +class OccurrenceReportBulkImportSchemaViewSet( + viewsets.GenericViewSet, + mixins.RetrieveModelMixin, + mixins.CreateModelMixin, + mixins.UpdateModelMixin, + mixins.ListModelMixin, +): + queryset = OccurrenceReportBulkImportSchema.objects.all() + serializer_class = OccurrenceReportBulkImportSchemaSerializer + permission_classes = [OccurrenceReportBulkImportPermission] + filter_backends = [filters.DjangoFilterBackend] + filterset_fields = ["group_type"] + + def get_queryset(self): + qs = self.queryset + if not (is_internal(self.request) or self.request.user.is_superuser): + qs = OccurrenceReportBulkImportSchema.objects.none() + return qs + + def perform_create(self, serializer): + latest_version = ( + OccurrenceReportBulkImportSchema.objects.filter( + group_type=serializer.validated_data["group_type"] + ) + .order_by("-version") + .first() + .version + ) + serializer.save(version=latest_version + 1) + return super().perform_create(serializer) + + @list_route(methods=["get"], detail=False) + def get_schema_list_by_group_type(self, request, *args, **kwargs): + group_type = request.GET.get("group_type", None) + if not group_type: + raise serializers.ValidationError( + "Group Type is required to return correct list of values" + ) + + group_type = GroupType.objects.get(name=group_type) + + schema = OccurrenceReportBulkImportSchema.objects.filter(group_type=group_type) + serializer = OccurrenceReportBulkImportSchemaSerializer(schema, many=True) + return Response(serializer.data) + + @detail_route(methods=["get"], detail=True) + def preview_import_file(self, request, *args, **kwargs): + instance = self.get_object() + buffer = BytesIO() + workbook = instance.preview_import_file + workbook.save(buffer) + buffer.seek(0) + filename = f"bulk-import-schema-{instance.group_type.name}-version-{instance.version}-preview.xlsx" + response = HttpResponse(buffer.read(), content_type="application/vnd.ms-excel") + response["Content-Disposition"] = f"attachment; filename={filename}" + buffer.close() + return response + + @detail_route(methods=["post"], detail=True) + def copy(self, request, *args, **kwargs): + instance = self.get_object() + new_instance = instance.copy() + serializer = OccurrenceReportBulkImportSchemaSerializer(new_instance) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + @detail_route(methods=["put"], detail=True) + def save_column(self, request, *args, **kwargs): + instance = self.get_object() + column_data = request.data.get("column_data", None) + if not column_data: + raise serializers.ValidationError("Column data is required") + serializer = OccurrenceReportBulkImportSchemaColumnSerializer( + instance, data=column_data + ) + serializer.is_valid(raise_exception=True) + serializer.save() + + serializer = OccurrenceReportBulkImportSchemaSerializer(instance) + return Response(serializer.data, status=status.HTTP_201_CREATED) diff --git a/boranga/components/occurrence/models.py b/boranga/components/occurrence/models.py index 799e77c2..793a008d 100644 --- a/boranga/components/occurrence/models.py +++ b/boranga/components/occurrence/models.py @@ -1,10 +1,16 @@ +import hashlib import json import logging +import os from abc import abstractmethod from datetime import datetime +from decimal import Decimal +import dateutil +import openpyxl import pyproj import reversion +import xlrd from colorfield.fields import ColorField from django.conf import settings from django.contrib.contenttypes import fields @@ -19,9 +25,14 @@ from django.db import models, transaction from django.db.models import CharField, Count, Func, Q from django.db.models.functions import Cast +from django.utils import timezone from ledger_api_client.ledger_models import EmailUserRO as EmailUser from ledger_api_client.managed_models import SystemGroup from multiselectfield import MultiSelectField +from openpyxl.styles import NamedStyle +from openpyxl.styles.fonts import Font +from openpyxl.utils import get_column_letter +from openpyxl.worksheet.datavalidation import DataValidation from boranga import exceptions from boranga.components.conservation_status.models import ProposalAmendmentReason @@ -287,6 +298,17 @@ class OccurrenceReport(SubmitterInformationModelMixin, RevisionedMixin): internal_application = models.BooleanField(default=False) site = models.TextField(null=True, blank=True) + # If this OCR was created as part of a bulk import task, this field will be populated + bulk_import_task = models.ForeignKey( + "OccurrenceReportBulkImportTask", + on_delete=models.PROTECT, + null=True, + blank=True, + related_name="occurrence_reports", + ) + # A hash of the import row data to allow for duplicate detection + import_hash = models.CharField(max_length=64, null=True, blank=True) + class Meta: app_label = "boranga" ordering = ["-id"] @@ -5146,6 +5168,609 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) +def validate_bulk_import_file_extension(value): + ext = os.path.splitext(value.name)[1] + valid_extensions = [".xlsx"] + if ext not in valid_extensions: + raise ValidationError( + "Only .xlsx files are supported by the bulk import facility!" + ) + + +def get_occurrence_report_bulk_import_path(instance, filename): + return f"occurrence_report/bulk-imports/{timezone.now()}/{filename}" + + +class OccurrenceReportBulkImportTask(ArchivableModel): + schema = models.ForeignKey( + "OccurrenceReportBulkImportSchema", + on_delete=models.PROTECT, + null=True, + blank=True, + ) + _file = models.FileField( + upload_to=get_occurrence_report_bulk_import_path, + max_length=512, + storage=private_storage, + validators=[validate_bulk_import_file_extension], + ) + # A hash of the file to allow for duplicate detection + file_hash = models.CharField(max_length=64, null=True, blank=True) + + rows = models.IntegerField(null=True, editable=False) + rows_processed = models.IntegerField(default=0) + + datetime_queued = models.DateTimeField(auto_now_add=True) + datetime_started = models.DateTimeField(null=True, blank=True) + datetime_completed = models.DateTimeField(null=True, blank=True) + + datetime_error = models.DateTimeField(null=True, blank=True) + error_row = models.IntegerField(null=True, blank=True) + error_message = models.TextField(null=True, blank=True) + + email_user = models.IntegerField(null=False) + + PROCESSING_STATUS_QUEUED = "queued" + PROCESSING_STATUS_STARTED = "started" + PROCESSING_STATUS_FAILED = "failed" + PROCESSING_STATUS_COMPLETED = "completed" + PROCESSING_STATUS_ARCHIVED = "archived" + + PROCESSING_STATUS_CHOICES = ( + (PROCESSING_STATUS_QUEUED, "Queued"), + (PROCESSING_STATUS_STARTED, "Started"), + (PROCESSING_STATUS_FAILED, "Failed"), + (PROCESSING_STATUS_COMPLETED, "Completed"), + (PROCESSING_STATUS_ARCHIVED, "Archived"), + ) + + processing_status = models.CharField( + max_length=20, + choices=PROCESSING_STATUS_CHOICES, + default=PROCESSING_STATUS_QUEUED, + ) + + class Meta: + app_label = "boranga" + verbose_name = "Occurrence Report Bulk Import Task" + verbose_name_plural = "Occurrence Report Bulk Import Tasks" + + def save(self, *args, **kwargs): + if not self.file_hash and self._file: + self._file.seek(0) + self.file_hash = hashlib.sha256(self._file.read()).hexdigest() + super().save(*args, **kwargs) + + @property + def file_name(self): + return os.path.basename(self._file.name) + + @property + def percentage_complete(self): + if self.rows: + return round((self.rows_processed / self.rows) * 100, 2) + return 0 + + @property + def total_time_taken(self): + if self.datetime_started and self.datetime_completed: + delta = self.datetime_completed - self.datetime_started + return delta.total_seconds() + return None + + @property + def total_time_taken_seconds(self): + if self.datetime_started and self.datetime_completed: + delta = self.datetime_completed - self.datetime_started + return delta.seconds + return None + + @property + def total_time_taken_minues(self): + if self.total_time_taken: + return round(self.total_time_taken / 60, 2) + return None + + @property + def total_time_taken_human_readable(self): + if self.total_time_taken is None: + return None + + if self.total_time_taken < 1: + return "Less than a second" + + if self.total_time_taken < 60: + return f"{self.total_time_taken} seconds" + if self.total_time_taken: + whole_minutes = int(self.total_time_taken // 60) + remaining_seconds = round(self.total_time_taken - (whole_minutes * 60)) + if not remaining_seconds: + return f"{whole_minutes} minutes" + return f"{whole_minutes} minutes and {remaining_seconds} seconds" + return None + + @property + def time_taken_per_row(self): + if self.datetime_started and self.datetime_completed: + value = self.total_time_taken / self.rows_processed + return round(value, 6) + return None + + @property + def file_size_bytes(self): + if self._file: + return self._file.size + return None + + @property + def file_size_megabytes(self): + if self.file_size_bytes: + return round(self.file_size_bytes / 1024 / 1024, 2) + return None + + @classmethod + def average_time_taken_per_row(cls): + task_count = cls.objects.filter( + datetime_completed__isnull=False, rows_processed__gt=0 + ).count() + if task_count == 0: + return None + + total_time_taken = 0 + for task in cls.objects.filter( + datetime_completed__isnull=False, rows_processed__gt=0 + ): + total_time_taken += task.time_taken_per_row + + return total_time_taken / task_count + + @property + def estimated_processing_time_seconds(self): + average_time_taken_per_row = ( + OccurrenceReportBulkImportTask.average_time_taken_per_row() + ) + + if self.rows and self.datetime_queued and average_time_taken_per_row: + precisely = (self.rows - self.rows_processed) * average_time_taken_per_row + return round(precisely) + + return None + + @property + def estimated_processing_time_minutes(self): + seconds = self.estimated_processing_time_seconds + if seconds: + return round(seconds / 60) + return None + + @property + def estimated_processing_time_human_readable(self): + minutes = self.estimated_processing_time_minutes + + if not minutes: + return "No processing data available to estimate time" + + if minutes == 0: + return "Less than a minute" + + return f"~{minutes} minutes" + + def count_rows(self): + logger.info(f"Pre-queue processing bulk import task {self.id}") + try: + workbook = openpyxl.load_workbook(self._file) + except Exception as e: + logger.error(f"Error opening bulk import file {self._file.name}: {e}") + self.processing_status = ( + OccurrenceReportBulkImportTask.PROCESSING_STATUS_FAILED + ) + self.datetime_error = timezone.now() + self.error_message = f"Error opening bulk import file: {e}" + self.save() + return + + sheet = workbook.active + self.rows = sheet.max_row - 1 + logger.debug(f"Found {self.rows} rows in bulk import file {self._file.name}") + self.save() + + @classmethod + def validate_headers(self, _file, schema): + logger.info(f"Validating headers for bulk import task {self.id}") + workbook = xlrd.open_workbook(file_contents=_file.read()) + + sheet = workbook.active + + headers = [cell.value for cell in sheet[1]] + + if not headers: + raise ValidationError("No headers found in the file") + + # Check that the headers match the schema (group type and version headings) + + schema_headers = self.schema.columns.all().values_list( + "xlsx_column_header_name", flat=True + ) + if headers == schema_headers: + return True + + extra_headers = set(headers) - set(schema_headers) + missing_headers = set(schema_headers) - set(headers) + error_string = f"Headers do not match schema: {self.schema}." + if missing_headers: + error_string += f" Missing: {missing_headers}" + if extra_headers: + error_string += f" Extra: {extra_headers}" + raise ValidationError(error_string) + + def process(self): + if self.processing_status == self.PROCESSING_STATUS_COMPLETED: + logger.info(f"Bulk import task {self.id} has already been processed") + return + + if self.processing_status == self.PROCESSING_STATUS_FAILED: + logger.info( + f"Bulk import task {self.id} failed. Please correct the issues and try again" + ) + return + + if self.processing_status == self.PROCESSING_STATUS_STARTED: + logger.info(f"Bulk import task {self.id} is already in progress") + return + + self.processing_status = self.PROCESSING_STATUS_STARTED + self.datetime_started = timezone.now() + self.save() + + # Open the file + logger.info(f"Opening bulk import file {self._file.name}") + try: + workbook = openpyxl.load_workbook(self._file, read_only=True) + except Exception as e: + logger.error(f"Error opening bulk import file {self._file.name}: {e}") + self.processing_status = ( + OccurrenceReportBulkImportTask.PROCESSING_STATUS_FAILED + ) + self.datetime_error = timezone.now() + self.error_message = f"Error opening bulk import file: {e}" + self.save() + return + + # Get the first sheet + sheet = workbook.active + + # Get the headers + headers = [cell.value for cell in sheet[1]] + + # TODO: Check that the headers match the schema (group type and version headings) + + # Get the rows + rows = list(sheet.iter_rows(min_row=2, max_row=self.rows + 1, values_only=True)) + + # Occurrence reports to create + occurrence_reports = [] + + # Process the rows + for i, row in enumerate(rows): + self.rows_processed = i + 1 + if self.rows_processed > self.rows: + logger.warning( + f"Bulk import task {self.id} tried to process row {i + 1} " + "which is greater than the total number of rows" + ) + break + + self.save() + + try: + self.process_row(row, headers, occurrence_reports) + except Exception as e: + logger.error(f"Error processing row {i + 1}: {e}") + self.processing_status = ( + OccurrenceReportBulkImportTask.PROCESSING_STATUS_FAILED + ) + self.datetime_error = timezone.now() + self.error_row = i + 1 + self.error_message = f"Error processing row {i + 1}: {e}" + self.save() + return + + # Set the task to completed + self.processing_status = ( + OccurrenceReportBulkImportTask.PROCESSING_STATUS_COMPLETED + ) + self.datetime_completed = timezone.now() + self.save() + + return + + def process_row(self, row, headers, occurrence_reports): + row_hash = hashlib.sha256(str(row).encode()).hexdigest() + OccurrenceReport( + bulk_import_task=self, + import_hash=row_hash, + ) + logger.info(f"Row hash: {row_hash}") + return + + def retry(self): + self.processing_status = self.PROCESSING_STATUS_QUEUED + self.datetime_started = None + self.datetime_completed = None + self.datetime_error = None + self.error_row = None + self.error_message = None + self.save() + + def revert(self): + # TODO: Using delete here due to the sheer number of records that could be created + # Still need to consider if we want to archive them + OccurrenceReport.objects.filter(bulk_import_task=self).delete() + + self.processing_status = self.PROCESSING_STATUS_ARCHIVED + self.archived = True + self.save() + + +class OccurrenceReportBulkImportSchema(models.Model): + group_type = models.ForeignKey( + GroupType, on_delete=models.PROTECT, null=False, blank=False + ) + version = models.IntegerField(default=1) + datetime_created = models.DateTimeField(auto_now_add=True) + datetime_updated = models.DateTimeField(default=datetime.now) + + class Meta: + app_label = "boranga" + verbose_name = "Occurrence Report Bulk Import Schema" + verbose_name_plural = "Occurrence Report Bulk Import Schemas" + ordering = ["group_type", "-version"] + constraints = [ + models.UniqueConstraint( + fields=[ + "group_type", + "version", + ], + name="unique_schema_version", + ) + ] + + def __str__(self): + return f"Group type: {self.group_type.name} (Version: {self.version})" + + @property + def preview_import_file(self): + workbook = openpyxl.Workbook() + worksheet = workbook.active + columns = self.columns.all() + if not columns.exists() or columns.count() == 0: + logger.warning( + f"No columns found for bulk import schema {self}. Returning empty preview file" + ) + return workbook + + headers = [column.xlsx_column_header_name for column in columns] + worksheet.append(headers) + + dv_types = dict(zip(DataValidation.type.values, DataValidation.type.values)) + dv_operators = dict( + zip(DataValidation.operator.values, DataValidation.operator.values) + ) + + # Add the data validation for each column + for index, column in enumerate(columns): + column_letter = get_column_letter(index + 1) + cell_range = f"{column_letter}2:{column_letter}1048576" # 1048576 is the maximum number of rows in Excel + + model_class = column.django_import_content_type.model_class() + if not hasattr(model_class, column.django_import_field_name): + raise ValidationError( + f"Model {model_class} does not have field {column.django_import_field_name}" + ) + model_field = model_class._meta.get_field(column.django_import_field_name) + logger.debug(f"model_field_type: {type(model_field)}") + dv = None + if isinstance(model_field, models.fields.CharField) and model_field.choices: + dv = DataValidation( + type=dv_types["list"], + allow_blank=model_field.null, + formula1=",".join([c[0] for c in model_field.choices]), + error="Please select a valid option from the list", + errorTitle="Invalid selection", + prompt="Select a value from the list", + promptTitle="List selection", + ) + elif isinstance(model_field, models.fields.CharField): + dv = DataValidation( + type=dv_types["textLength"], + allow_blank=model_field.null, + operator=dv_operators["lessThanOrEqual"], + formula1=f"{model_field.max_length}", + error="Text must be less than or equal to {model_field.max_length} characters", + errorTitle="Text too long", + prompt=f"Maximum {model_field.max_length} characters", + promptTitle="Text length", + ) + elif isinstance( + model_field, (models.fields.DateTimeField, models.fields.DateField) + ): + dv = DataValidation( + type=dv_types["date"], + operator=dv_operators["greaterThanOrEqual"], + formula1="1900-01-01", + allow_blank=model_field.null, + error="Please enter a valid date", + errorTitle="Invalid date", + prompt="Enter a date", + promptTitle="Date", + ) + if isinstance(model_field, models.fields.DateTimeField): + date_style = NamedStyle( + name="datetime", number_format="DD/MM/YYYY HH:MM:MM" + ) + for cell in worksheet[column_letter]: + cell.style = date_style + elif isinstance(model_field, models.fields.IntegerField): + dv = DataValidation( + type=dv_types["whole"], + allow_blank=model_field.null, + error="Please enter a whole number", + errorTitle="Invalid number", + prompt="Enter a whole number", + promptTitle="Whole number", + ) + elif isinstance(model_field, models.fields.DecimalField): + dv = DataValidation( + type=dv_types["decimal"], + allow_blank=model_field.null, + error="Please enter a decimal number", + errorTitle="Invalid number", + prompt="Enter a decimal number", + promptTitle="Decimal number", + ) + elif isinstance(model_field, models.fields.BooleanField): + dv = DataValidation( + type=dv_types["list"], + allow_blank=model_field.null, + formula1='"True,False"', + error="Please select True or False", + errorTitle="Invalid selection", + prompt="Select True or False", + promptTitle="Boolean selection", + ) + else: + # Mostly covers TextField + # Postgresql Text field can handle up to 65,535 characters, .xlsx can handle 32,767 characters + # We'll gleefully assume this won't be an issue and not add a data validation for text fields =D + continue + + dv.showErrorMessage = True + worksheet.add_data_validation(dv) + dv.add(cell_range) + + # Make the headers bold + for cell in worksheet["A0:ZZ0"][0]: + cell.font = Font(bold=True) + + # Make the column widths appropriate + dims = {} + for row in worksheet.rows: + for cell in row: + if cell.value: + dims[cell.column] = ( + max((dims.get(cell.column, 0), len(str(cell.value)))) + 2 + ) + 2 + for col, value in dims.items(): + worksheet.column_dimensions[get_column_letter(col)].width = value + + return workbook + + def copy(self): + new_schema = OccurrenceReportBulkImportSchema( + group_type=self.group_type, + version=self.version + 1, + ) + new_schema.save() + + for column in self.columns.all(): + new_column = OccurrenceReportBulkImportSchemaColumn.objects.get( + pk=column.pk + ) + new_column.pk = None + new_column.schema = new_schema + new_column.save() + + return new_schema + + +class OccurrenceReportBulkImportSchemaColumn(models.Model): + schema = models.ForeignKey( + OccurrenceReportBulkImportSchema, + related_name="columns", + on_delete=models.CASCADE, + ) + + # These two fields define where the data from the column will be imported to + django_import_content_type = models.ForeignKey( + ct_models.ContentType, + on_delete=models.PROTECT, + null=True, + blank=True, + related_name="import_columns", + ) + django_import_field_name = models.CharField(max_length=50, blank=False, null=False) + + # The name of the column header in the .xlsx file + xlsx_column_header_name = models.CharField(max_length=50, blank=False, null=False) + + # The following fields are used to embed data validation in the .xlsx file + # so that the users can do a quick check before uploading + xlsx_data_validation_type = models.CharField( + max_length=20, + choices=sorted( + [(x, x) for x in DataValidation.type.values], + key=lambda x: (x[0] is None, x), + ), + null=True, + blank=True, + ) + xlsx_data_validation_allow_blank = models.BooleanField(default=False) + xlsx_data_validation_operator = models.CharField( + max_length=20, + choices=sorted( + [(x, x) for x in DataValidation.operator.values], + key=lambda x: (x[0] is None, x), + ), + null=True, + blank=True, + ) + xlsx_data_validation_formula1 = models.CharField( + max_length=50, blank=True, null=True + ) + xlsx_data_validation_formula2 = models.CharField( + max_length=50, blank=True, null=True + ) + + # TODO: How are we going to do the list lookup validation for much larger datasets (mostly for species) + + class Meta: + app_label = "boranga" + verbose_name = "Occurrence Report Bulk Import Schema Column" + verbose_name_plural = "Occurrence Report Bulk Import Schema Columns" + + def __str__(self): + return f"{self.xlsx_column_header_name} - {self.schema}" + + def validate(self, value): + if self.data_validation_type == "whole": + if not isinstance(value, int): + raise ValidationError( + f"Default value for {self.column_header_name} must be an integer" + ) + + if self.data_validation_type == "decimal": + try: + value = Decimal(value) + except Exception: + raise ValidationError( + f"Default value for {self.column_header_name} must be a decimal" + ) + if self.data_validation_type == "date": + try: + value = dateutil.parser.parse(value) + except Exception: + raise ValidationError( + f"Default value for {self.column_header_name} must be a date" + ) + if self.data_validation_type == "time": + try: + value = dateutil.parser.parse(value) + except Exception: + raise ValidationError( + f"Default value for {self.column_header_name} must be a time" + ) + + # Occurrence Report Document reversion.register(OccurrenceReportDocument) @@ -5225,3 +5850,6 @@ def save(self, *args, **kwargs): "identification", ], ) + +reversion.register(OccurrenceReportGeometry) +reversion.register(OccurrenceGeometry) diff --git a/boranga/components/occurrence/permissions.py b/boranga/components/occurrence/permissions.py index 17174368..6d975ca2 100644 --- a/boranga/components/occurrence/permissions.py +++ b/boranga/components/occurrence/permissions.py @@ -425,6 +425,17 @@ def has_object_permission(self, request, view, obj): return obj.submitter == request.user.id or is_occurrence_assessor(request) +class OccurrenceReportBulkImportPermission(BasePermission): + def has_permission(self, request, view): + if not request.user.is_authenticated: + return False + + if request.user.is_superuser: + return True + + return is_occurrence_assessor(request) + + class OccurrencePermission(BasePermission): def has_permission(self, request, view): if not request.user.is_authenticated: diff --git a/boranga/components/occurrence/serializers.py b/boranga/components/occurrence/serializers.py index 77cc5633..93ab1a07 100644 --- a/boranga/components/occurrence/serializers.py +++ b/boranga/components/occurrence/serializers.py @@ -1,3 +1,4 @@ +import hashlib import logging from django.db import models @@ -36,6 +37,9 @@ OccurrenceReportAmendmentRequest, OccurrenceReportAmendmentRequestDocument, OccurrenceReportApprovalDetails, + OccurrenceReportBulkImportSchema, + OccurrenceReportBulkImportSchemaColumn, + OccurrenceReportBulkImportTask, OccurrenceReportDeclinedDetails, OccurrenceReportDocument, OccurrenceReportGeometry, @@ -3857,3 +3861,91 @@ def get_last_updated_by(self, obj): email_user = retrieve_email_user(obj.last_updated_by) return EmailUserSerializer(email_user).data.get("fullname", None) return None + + +class OccurrenceReportBulkImportTaskSerializer(serializers.ModelSerializer): + estimated_processing_time_human_readable = serializers.CharField(read_only=True) + total_time_taken_human_readable = serializers.CharField(read_only=True) + file_size_megabytes = serializers.CharField(read_only=True) + file_name = serializers.CharField(read_only=True) + percentage_complete = serializers.CharField(read_only=True) + + class Meta: + model = OccurrenceReportBulkImportTask + fields = "__all__" + read_only_fields = ( + "id", + "rows", + "rows_processed", + "datetime_queued", + "datetime_started", + "datetime_completed", + "datetime_error", + "error_row", + "error_message", + "processing_status", + "email_user", + "estimated_processing_time_human_readable", + "total_time_taken_human_readable", + "percentage_complete", + ) + + def create(self, validated_data): + _file = validated_data["_file"] + file_hash = hashlib.sha256(_file.read()).hexdigest() + _file.seek(0) + qs = OccurrenceReportBulkImportTask.objects.filter(file_hash=file_hash) + if qs.filter( + processing_status=OccurrenceReportBulkImportTask.PROCESSING_STATUS_QUEUED + ).exists(): + raise serializers.ValidationError( + "An import task with exactly the same file contents has already been queued." + ) + if qs.filter( + processing_status=OccurrenceReportBulkImportTask.PROCESSING_STATUS_STARTED + ).exists(): + raise serializers.ValidationError( + "An import task with exactly the same file contents is already in progress." + ) + if qs.filter( + processing_status=OccurrenceReportBulkImportTask.PROCESSING_STATUS_COMPLETED + ).exists(): + raise serializers.ValidationError( + "An import task with exactly the same file contents has already been completed." + ) + return super().create(validated_data) + + +class OccurrenceReportBulkImportSchemaColumnSerializer(serializers.ModelSerializer): + + class Meta: + model = OccurrenceReportBulkImportSchemaColumn + fields = "__all__" + read_only_fields = ("id",) + + +class OccurrenceReportBulkImportSchemaSerializer(serializers.ModelSerializer): + columns = OccurrenceReportBulkImportSchemaColumnSerializer(many=True) + group_type_display = serializers.CharField(source="group_type.name", read_only=True) + version = serializers.CharField(read_only=True) + + class Meta: + model = OccurrenceReportBulkImportSchema + fields = "__all__" + read_only_fields = ("id",) + + def update(self, instance, validated_data): + columns_data = validated_data.pop("columns") + # Delete any columns that are not in the new data + instance.columns.exclude( + id__in=[ + column_data["id"] + for column_data in columns_data + if hasattr(column_data, "id") + ] + ).delete() + for column_data in columns_data: + OccurrenceReportBulkImportSchemaColumn.objects.update_or_create( + **column_data + ) + return super().update(instance, validated_data) diff --git a/boranga/frontend/boranga/src/api.js b/boranga/frontend/boranga/src/api.js index edb076cf..5aed3129 100644 --- a/boranga/frontend/boranga/src/api.js +++ b/boranga/frontend/boranga/src/api.js @@ -15,6 +15,7 @@ module.exports = { compliances: "/api/compliances.json", conservation_status_documents: "/api/conservation_status_documents.json", conservation_status: "/api/conservation_status", + content_types: "/api/content_types/", countries: '/api/countries', cs_external_referee_invites: "/api/cs_external_referee_invites", cs_referrals: "/api/cs_referrals.json", @@ -24,6 +25,9 @@ module.exports = { filtered_organisations: '/api/filtered_organisations', help_text_entries: "/api/help_text_entries", marine_treeview: "/api/marine_treeview", + occurrence_report_bulk_imports: "/api/occurrence_report_bulk_imports/", + occurrence_report_bulk_import_schemas: "/api/occurrence_report_bulk_import_schemas/", + occurrence_report_bulk_import_schemas_by_group_type: "/api/occurrence_report_bulk_import_schemas/get_schema_list_by_group_type/", ocr_external_referee_invites: "/api/ocr_external_referee_invites", ocr_referrals: "/api/ocr_referrals.json", organisation_access_group_members: '/api/organisation_access_group_members', diff --git a/boranga/frontend/boranga/src/components/common/occurrence_report_community_dashboard.vue b/boranga/frontend/boranga/src/components/common/occurrence_report_community_dashboard.vue index bdd26288..dad2e66c 100644 --- a/boranga/frontend/boranga/src/components/common/occurrence_report_community_dashboard.vue +++ b/boranga/frontend/boranga/src/components/common/occurrence_report_community_dashboard.vue @@ -60,6 +60,8 @@
+ Bulk Import
diff --git a/boranga/frontend/boranga/src/components/common/occurrence_report_fauna_dashboard.vue b/boranga/frontend/boranga/src/components/common/occurrence_report_fauna_dashboard.vue index cc965c0f..196aca64 100644 --- a/boranga/frontend/boranga/src/components/common/occurrence_report_fauna_dashboard.vue +++ b/boranga/frontend/boranga/src/components/common/occurrence_report_fauna_dashboard.vue @@ -60,6 +60,8 @@
+ Bulk Import
diff --git a/boranga/frontend/boranga/src/components/common/occurrence_report_flora_dashboard.vue b/boranga/frontend/boranga/src/components/common/occurrence_report_flora_dashboard.vue index 52100e15..fb375c73 100644 --- a/boranga/frontend/boranga/src/components/common/occurrence_report_flora_dashboard.vue +++ b/boranga/frontend/boranga/src/components/common/occurrence_report_flora_dashboard.vue @@ -60,6 +60,9 @@
+ Bulk Import +
diff --git a/boranga/frontend/boranga/src/components/internal/occurrence/bulk_import.vue b/boranga/frontend/boranga/src/components/internal/occurrence/bulk_import.vue new file mode 100644 index 00000000..a848e6c4 --- /dev/null +++ b/boranga/frontend/boranga/src/components/internal/occurrence/bulk_import.vue @@ -0,0 +1,485 @@ + + + + diff --git a/boranga/frontend/boranga/src/components/internal/occurrence/bulk_import_schema.vue b/boranga/frontend/boranga/src/components/internal/occurrence/bulk_import_schema.vue new file mode 100644 index 00000000..d123533d --- /dev/null +++ b/boranga/frontend/boranga/src/components/internal/occurrence/bulk_import_schema.vue @@ -0,0 +1,614 @@ + + + + + diff --git a/boranga/frontend/boranga/src/components/internal/occurrence/bulk_import_schema_list.vue b/boranga/frontend/boranga/src/components/internal/occurrence/bulk_import_schema_list.vue new file mode 100644 index 00000000..62ac33d7 --- /dev/null +++ b/boranga/frontend/boranga/src/components/internal/occurrence/bulk_import_schema_list.vue @@ -0,0 +1,149 @@ + + + diff --git a/boranga/frontend/boranga/src/components/internal/routes/index.js b/boranga/frontend/boranga/src/components/internal/routes/index.js index ab3dc67b..f1c3d8c5 100755 --- a/boranga/frontend/boranga/src/components/internal/routes/index.js +++ b/boranga/frontend/boranga/src/components/internal/routes/index.js @@ -10,6 +10,9 @@ import OccurrenceDash from '../occurrence/dashboard.vue' import Occurrence from '../occurrence/occurrence.vue' import OccurrenceReport from '../occurrence/occurrence_report.vue' import OccurrenceReportReferral from '../occurrence/referral.vue' +import BulkImport from '../occurrence/bulk_import.vue' +import BulkImportSchemaList from '../occurrence/bulk_import_schema_list.vue' +import BulkImportSchema from '../occurrence/bulk_import_schema.vue' export default { @@ -63,6 +66,21 @@ export default }, }, children: [ + { + path: 'bulk_import_schema/:bulk_import_schema_id', + name: "occurrence-report-bulk-import-schema-details", + component: BulkImportSchema + }, + { + path: 'bulk_import_schema/', + name: "occurrence-report-bulk-import-schema-list", + component: BulkImportSchemaList + }, + { + path: 'bulk_import/', + name: "occurrence-report-bulk-import", + component: BulkImport + }, { path: ':occurrence_report_id', component: { diff --git a/boranga/helpers.py b/boranga/helpers.py index e885f7cd..439e4da0 100755 --- a/boranga/helpers.py +++ b/boranga/helpers.py @@ -401,6 +401,31 @@ def get_instance_identifier(instance): ) +def get_openpyxl_data_validation_type_for_django_field(field): + from openpyxl.worksheet.datavalidation import DataValidation + + dv_types = dict(zip(DataValidation.type.values, DataValidation.type.values)) + + field_type_map = { + models.CharField: "textLength", + models.IntegerField: "whole", + models.DecimalField: "decimal", + models.BooleanField: "list", + models.DateField: "date", + models.DateTimeField: "date", + } + + if isinstance(field, models.CharField) and field.choices: + return dv_types["list"] + + for django_field, dv_type in field_type_map.items(): + if isinstance(field, django_field): + return dv_types[dv_type] + + # Mainly covers TextField and other fields not explicitly handled + return None + + def clone_model( source_model_class: models.base.ModelBase, target_model_class: models.base.ModelBase, diff --git a/boranga/management/commands/ocr_pre_process_bulk_import_tasks.py b/boranga/management/commands/ocr_pre_process_bulk_import_tasks.py new file mode 100644 index 00000000..c885dca7 --- /dev/null +++ b/boranga/management/commands/ocr_pre_process_bulk_import_tasks.py @@ -0,0 +1,43 @@ +import logging + +from django.core.management.base import BaseCommand + +from boranga.components.occurrence.models import OccurrenceReportBulkImportTask + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = "Pre process the OCR bulk import tasks" + + def handle(self, *args, **options): + logger.info(f"Running command {__name__}") + + # Check if there are already any tasks running and return if so + if OccurrenceReportBulkImportTask.objects.filter( + processing_status=OccurrenceReportBulkImportTask.PROCESSING_STATUS_STARTED + ).exists(): + logger.info("There is already a task running, returning") + return + + # Get the next task to process + task = ( + OccurrenceReportBulkImportTask.objects.filter( + processing_status=OccurrenceReportBulkImportTask.PROCESSING_STATUS_QUEUED, + _file__isnull=False, + rows__isnull=True, + ) + .order_by("datetime_queued") + .first() + ) + + if task is None: + logger.info("No tasks to process, returning") + return + + # Process the task + task.count_rows() + + logger.info(f"OCR Bulk Import Task {task.id} has {task.rows} rows.") + + return diff --git a/boranga/management/commands/ocr_process_bulk_import_queue.py b/boranga/management/commands/ocr_process_bulk_import_queue.py new file mode 100644 index 00000000..ea434fa8 --- /dev/null +++ b/boranga/management/commands/ocr_process_bulk_import_queue.py @@ -0,0 +1,78 @@ +import logging + +from django.conf import settings +from django.core.management.base import BaseCommand +from django.utils import timezone + +from boranga.components.occurrence.models import OccurrenceReportBulkImportTask + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = "Process the OCR bulk import queue" + + def handle(self, *args, **options): + logger.info(f"Running command {__name__}") + + # Check if there are any tasks that have been processing for too long + qs = OccurrenceReportBulkImportTask.objects.filter( + processing_status=OccurrenceReportBulkImportTask.PROCESSING_STATUS_STARTED, + datetime_started__lt=timezone.now() + - timezone.timedelta(seconds=settings.OCR_BULK_IMPORT_TASK_TIMEOUT_SECONDS), + ) + if qs.exists(): + for task in qs: + logger.info( + f"Task {task.id} has been processing for too long. Adding back to the queue" + ) + task.processing_status = ( + OccurrenceReportBulkImportTask.PROCESSING_STATUS_QUEUED + ) + task.rows_processed = 0 + task.save() + + # Check if there are already any tasks running and return if so + if OccurrenceReportBulkImportTask.objects.filter( + processing_status=OccurrenceReportBulkImportTask.PROCESSING_STATUS_STARTED, + ).exists(): + logger.info("There is already a task running, returning") + return + + # Get the next task to process + task = ( + OccurrenceReportBulkImportTask.objects.filter( + processing_status=OccurrenceReportBulkImportTask.PROCESSING_STATUS_QUEUED, + _file__isnull=False, + ) + .order_by("datetime_queued") + .first() + ) + + if task is None: + logger.info("No tasks to process, returning") + return + + try: + # Process the task + task.process() + except KeyboardInterrupt: + logger.info(f"OCR Bulk Import Task {task.id} was interrupted") + task.processing_status = ( + OccurrenceReportBulkImportTask.PROCESSING_STATUS_FAILED + ) + task.error_message = "KeyboardInterrupt" + task.save() + return + except Exception as e: + logger.error(f"Error processing OCR Bulk Import Task {task.id}: {e}") + task.processing_status = ( + OccurrenceReportBulkImportTask.PROCESSING_STATUS_FAILED + ) + task.error_message = str(e) + task.save() + return + + logger.info(f"OCR Bulk Import Task {task.id} completed") + + return diff --git a/boranga/migrations/0428_occurrencereportbulkimporttask_and_more.py b/boranga/migrations/0428_occurrencereportbulkimporttask_and_more.py new file mode 100644 index 00000000..4a82e33b --- /dev/null +++ b/boranga/migrations/0428_occurrencereportbulkimporttask_and_more.py @@ -0,0 +1,70 @@ +# Generated by Django 5.0.8 on 2024-08-15 02:40 + +import boranga.components.occurrence.models +import django.core.files.storage +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("boranga", "0427_alter_community_renamed_from_and_more"), + ] + + operations = [ + migrations.CreateModel( + name="OccurrenceReportBulkImportTask", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "_file", + models.FileField( + max_length=512, + storage=django.core.files.storage.FileSystemStorage( + base_url="/private-media/", + location="/home/oak/dev/boranga/private-media/", + ), + upload_to=boranga.components.occurrence.models.get_occurrence_report_bulk_import_path, + ), + ), + ("rows", models.IntegerField(null=True)), + ("rows_processed", models.IntegerField(default=0)), + ("datetime_queued", models.DateTimeField(auto_now_add=True)), + ("datetime_started", models.DateTimeField(blank=True, null=True)), + ("datetime_completed", models.DateTimeField(blank=True, null=True)), + ("datetime_error", models.DateTimeField(blank=True, null=True)), + ("error_row", models.IntegerField(blank=True, null=True)), + ("error_message", models.TextField(blank=True, null=True)), + ("email_user", models.IntegerField()), + ( + "processing_status", + models.CharField( + choices=[ + ("queued", "Queued"), + ("started", "Started"), + ("failed", "Failed"), + ("completed", "Completed"), + ], + default="queued", + max_length=20, + ), + ), + ], + options={ + "verbose_name": "Occurrence Report Bulk Import Task", + "verbose_name_plural": "Occurrence Report Bulk Import Tasks", + }, + ), + migrations.AlterModelOptions( + name="community", + options={"verbose_name_plural": "communities"}, + ), + ] diff --git a/boranga/migrations/0429_occurrencereport_bulk_import_task_and_more.py b/boranga/migrations/0429_occurrencereport_bulk_import_task_and_more.py new file mode 100644 index 00000000..cd8329c4 --- /dev/null +++ b/boranga/migrations/0429_occurrencereport_bulk_import_task_and_more.py @@ -0,0 +1,30 @@ +# Generated by Django 5.0.8 on 2024-08-15 08:36 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("boranga", "0428_occurrencereportbulkimporttask_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="occurrencereport", + name="bulk_import_task", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="occurrence_reports", + to="boranga.occurrencereportbulkimporttask", + ), + ), + migrations.AlterField( + model_name="occurrencereportbulkimporttask", + name="rows", + field=models.IntegerField(editable=False, null=True), + ), + ] diff --git a/boranga/migrations/0430_occurrencereport_import_hash_and_more.py b/boranga/migrations/0430_occurrencereport_import_hash_and_more.py new file mode 100644 index 00000000..af3f612e --- /dev/null +++ b/boranga/migrations/0430_occurrencereport_import_hash_and_more.py @@ -0,0 +1,66 @@ +# Generated by Django 5.0.8 on 2024-08-16 06:14 + +import boranga.components.conservation_status.models +import boranga.components.meetings.models +import boranga.components.occurrence.models +import boranga.components.species_and_communities.models +import boranga.components.users.models +import django.core.files.storage +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('boranga', '0429_occurrencereport_bulk_import_task_and_more'), + ('contenttypes', '0002_remove_content_type_name'), + ] + + operations = [ + migrations.AddField( + model_name='occurrencereport', + name='import_hash', + field=models.CharField(blank=True, max_length=64, null=True), + ), + migrations.AddField( + model_name='occurrencereportbulkimporttask', + name='file_hash', + field=models.CharField(blank=True, max_length=64, null=True), + ), + migrations.CreateModel( + name='OccurrenceReportBulkImportSchema', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=255)), + ('version', models.IntegerField(default=1)), + ('group_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='boranga.grouptype')), + ], + options={ + 'verbose_name': 'Occurrence Report Bulk Import Schema', + 'verbose_name_plural': 'Occurrence Report Bulk Import Schemas', + }, + ), + migrations.CreateModel( + name='OccurrenceReportBulkImportSchemaColumn', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('import_field_name', models.CharField(max_length=50)), + ('column_header_name', models.CharField(max_length=50)), + ('data_validation_type', models.CharField(choices=[('textLength', 'textLength'), ('time', 'time'), ('list', 'list'), ('custom', 'custom'), (None, None), ('whole', 'whole'), ('decimal', 'decimal'), ('date', 'date')], default='string', max_length=20)), + ('required', models.BooleanField(default=False)), + ('default_value', models.CharField(blank=True, max_length=255, null=True)), + ('max_length', models.IntegerField(blank=True, null=True)), + ('min_value', models.IntegerField(blank=True, null=True)), + ('max_value', models.IntegerField(blank=True, null=True)), + ('list_lookup_field', models.CharField(blank=True, max_length=50, null=True)), + ('import_content_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='import_columns', to='contenttypes.contenttype')), + ('list_lookup_class', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='list_lookup_columns', to='contenttypes.contenttype')), + ('schema', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='columns', to='boranga.occurrencereportbulkimportschema')), + ], + options={ + 'verbose_name': 'Occurrence Report Bulk Import Schema Column', + 'verbose_name_plural': 'Occurrence Report Bulk Import Schema Columns', + }, + ), + ] diff --git a/boranga/migrations/0431_occurrencereportbulkimporttask_archived_and_more.py b/boranga/migrations/0431_occurrencereportbulkimporttask_archived_and_more.py new file mode 100644 index 00000000..c12bcbeb --- /dev/null +++ b/boranga/migrations/0431_occurrencereportbulkimporttask_archived_and_more.py @@ -0,0 +1,36 @@ +# Generated by Django 5.0.8 on 2024-08-19 02:02 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("boranga", "0430_occurrencereport_import_hash_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="occurrencereportbulkimporttask", + name="archived", + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name="occurrencereportbulkimportschemacolumn", + name="data_validation_type", + field=models.CharField( + choices=[ + ("textLength", "textLength"), + ("list", "list"), + ("time", "time"), + ("date", "date"), + ("whole", "whole"), + (None, None), + ("decimal", "decimal"), + ("custom", "custom"), + ], + default="string", + max_length=20, + ), + ), + ] diff --git a/boranga/migrations/0432_occurrencereportbulkimportschema_datetime_created_and_more.py b/boranga/migrations/0432_occurrencereportbulkimportschema_datetime_created_and_more.py new file mode 100644 index 00000000..f147efce --- /dev/null +++ b/boranga/migrations/0432_occurrencereportbulkimportschema_datetime_created_and_more.py @@ -0,0 +1,61 @@ +# Generated by Django 5.0.8 on 2024-08-19 03:25 + +import datetime +import django.utils.timezone +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("boranga", "0431_occurrencereportbulkimporttask_archived_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="occurrencereportbulkimportschema", + name="datetime_created", + field=models.DateTimeField( + auto_now_add=True, default=django.utils.timezone.now + ), + preserve_default=False, + ), + migrations.AddField( + model_name="occurrencereportbulkimportschema", + name="datetime_updated", + field=models.DateTimeField(default=datetime.datetime.now), + ), + migrations.AlterField( + model_name="occurrencereportbulkimportschemacolumn", + name="data_validation_type", + field=models.CharField( + choices=[ + ("whole", "whole"), + ("time", "time"), + ("decimal", "decimal"), + ("custom", "custom"), + ("list", "list"), + ("date", "date"), + ("textLength", "textLength"), + (None, None), + ], + default="string", + max_length=20, + ), + ), + migrations.AlterField( + model_name="occurrencereportbulkimporttask", + name="processing_status", + field=models.CharField( + choices=[ + ("queued", "Queued"), + ("started", "Started"), + ("failed", "Failed"), + ("completed", "Completed"), + ("archived", "Archived"), + ], + default="queued", + max_length=20, + ), + ), + ] diff --git a/boranga/migrations/0433_remove_occurrencereportbulkimportschemacolumn_schema_and_more.py b/boranga/migrations/0433_remove_occurrencereportbulkimportschemacolumn_schema_and_more.py new file mode 100644 index 00000000..26259714 --- /dev/null +++ b/boranga/migrations/0433_remove_occurrencereportbulkimportschemacolumn_schema_and_more.py @@ -0,0 +1,31 @@ +# Generated by Django 5.0.8 on 2024-08-19 03:41 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("boranga", "0432_occurrencereportbulkimportschema_datetime_created_and_more"), + ] + + operations = [ + migrations.RemoveField( + model_name="occurrencereportbulkimportschemacolumn", + name="schema", + ), + migrations.RemoveField( + model_name="occurrencereportbulkimportschemacolumn", + name="import_content_type", + ), + migrations.RemoveField( + model_name="occurrencereportbulkimportschemacolumn", + name="list_lookup_class", + ), + migrations.DeleteModel( + name="OccurrenceReportBulkImportSchema", + ), + migrations.DeleteModel( + name="OccurrenceReportBulkImportSchemaColumn", + ), + ] diff --git a/boranga/migrations/0434_alter_communitydocument__file_and_more.py b/boranga/migrations/0434_alter_communitydocument__file_and_more.py new file mode 100644 index 00000000..a28fcbee --- /dev/null +++ b/boranga/migrations/0434_alter_communitydocument__file_and_more.py @@ -0,0 +1,56 @@ +# Generated by Django 5.0.8 on 2024-08-19 03:42 + +import datetime +import django.core.files.storage +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ( + "boranga", + "0433_remove_occurrencereportbulkimportschemacolumn_schema_and_more", + ), + ] + + operations = [ + migrations.CreateModel( + name="OccurrenceReportBulkImportSchema", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("version", models.IntegerField(default=1)), + ("datetime_created", models.DateTimeField(auto_now_add=True)), + ( + "datetime_updated", + models.DateTimeField(default=datetime.datetime.now), + ), + ( + "group_type", + models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + to="boranga.grouptype", + ), + ), + ], + options={ + "verbose_name": "Occurrence Report Bulk Import Schema", + "verbose_name_plural": "Occurrence Report Bulk Import Schemas", + }, + ), + migrations.AddConstraint( + model_name="occurrencereportbulkimportschema", + constraint=models.UniqueConstraint( + fields=("group_type", "version"), name="unique_schema_version" + ), + ), + ] diff --git a/boranga/migrations/0435_alter_communitydocument__file_and_more.py b/boranga/migrations/0435_alter_communitydocument__file_and_more.py new file mode 100644 index 00000000..266a1646 --- /dev/null +++ b/boranga/migrations/0435_alter_communitydocument__file_and_more.py @@ -0,0 +1,101 @@ +# Generated by Django 5.0.8 on 2024-08-19 03:59 + +import django.core.files.storage +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("boranga", "0434_alter_communitydocument__file_and_more"), + ("contenttypes", "0002_remove_content_type_name"), + ] + + operations = [ + migrations.CreateModel( + name="OccurrenceReportBulkImportSchemaColumn", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("django_import_field_name", models.CharField(max_length=50)), + ("xlsx_column_header_name", models.CharField(max_length=50)), + ( + "xlsx_data_validation_type", + models.CharField( + choices=[ + ("custom", "custom"), + ("whole", "whole"), + (None, None), + ("list", "list"), + ("time", "time"), + ("textLength", "textLength"), + ("decimal", "decimal"), + ("date", "date"), + ], + default="string", + max_length=20, + ), + ), + ( + "xlsx_data_validation_allow_blank", + models.BooleanField(default=False), + ), + ( + "xlsx_data_validation_operator", + models.CharField( + choices=[ + ("lessThan", "lessThan"), + ("greaterThan", "greaterThan"), + ("greaterThanOrEqual", "greaterThanOrEqual"), + ("equal", "equal"), + ("notEqual", "notEqual"), + ("lessThanOrEqual", "lessThanOrEqual"), + ("notBetween", "notBetween"), + ("between", "between"), + (None, None), + ], + default="between", + max_length=20, + ), + ), + ( + "xlsx_data_validation_formula1", + models.CharField(blank=True, max_length=50, null=True), + ), + ( + "xlsx_data_validation_formula2", + models.CharField(blank=True, max_length=50, null=True), + ), + ( + "django_import_content_type", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="import_columns", + to="contenttypes.contenttype", + ), + ), + ( + "schema", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="columns", + to="boranga.occurrencereportbulkimportschema", + ), + ), + ], + options={ + "verbose_name": "Occurrence Report Bulk Import Schema Column", + "verbose_name_plural": "Occurrence Report Bulk Import Schema Columns", + }, + ), + ] diff --git a/boranga/migrations/0436_alter_occurrencereportbulkimportschema_options_and_more.py b/boranga/migrations/0436_alter_occurrencereportbulkimportschema_options_and_more.py new file mode 100644 index 00000000..e88a899c --- /dev/null +++ b/boranga/migrations/0436_alter_occurrencereportbulkimportschema_options_and_more.py @@ -0,0 +1,60 @@ +# Generated by Django 5.0.8 on 2024-08-19 07:11 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("boranga", "0435_alter_communitydocument__file_and_more"), + ] + + operations = [ + migrations.AlterModelOptions( + name="occurrencereportbulkimportschema", + options={ + "ordering": ["group_type", "-version"], + "verbose_name": "Occurrence Report Bulk Import Schema", + "verbose_name_plural": "Occurrence Report Bulk Import Schemas", + }, + ), + migrations.AlterField( + model_name="occurrencereportbulkimportschemacolumn", + name="xlsx_data_validation_operator", + field=models.CharField( + blank=True, + choices=[ + ("notEqual", "notEqual"), + ("between", "between"), + ("greaterThanOrEqual", "greaterThanOrEqual"), + ("lessThan", "lessThan"), + ("lessThanOrEqual", "lessThanOrEqual"), + ("equal", "equal"), + ("greaterThan", "greaterThan"), + ("notBetween", "notBetween"), + (None, None), + ], + max_length=20, + null=True, + ), + ), + migrations.AlterField( + model_name="occurrencereportbulkimportschemacolumn", + name="xlsx_data_validation_type", + field=models.CharField( + blank=True, + choices=[ + ("list", "list"), + ("whole", "whole"), + ("custom", "custom"), + ("textLength", "textLength"), + ("time", "time"), + ("date", "date"), + ("decimal", "decimal"), + (None, None), + ], + max_length=20, + null=True, + ), + ), + ] diff --git a/boranga/migrations/0437_occurrencereportbulkimporttask_schema_and_more.py b/boranga/migrations/0437_occurrencereportbulkimporttask_schema_and_more.py new file mode 100644 index 00000000..2ed60554 --- /dev/null +++ b/boranga/migrations/0437_occurrencereportbulkimporttask_schema_and_more.py @@ -0,0 +1,63 @@ +# Generated by Django 5.0.8 on 2024-08-19 07:40 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("boranga", "0436_alter_occurrencereportbulkimportschema_options_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="occurrencereportbulkimporttask", + name="schema", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + to="boranga.occurrencereportbulkimportschema", + ), + ), + migrations.AlterField( + model_name="occurrencereportbulkimportschemacolumn", + name="xlsx_data_validation_operator", + field=models.CharField( + blank=True, + choices=[ + ("notBetween", "notBetween"), + ("lessThan", "lessThan"), + ("greaterThan", "greaterThan"), + ("between", "between"), + ("notEqual", "notEqual"), + ("lessThanOrEqual", "lessThanOrEqual"), + ("equal", "equal"), + ("greaterThanOrEqual", "greaterThanOrEqual"), + (None, None), + ], + max_length=20, + null=True, + ), + ), + migrations.AlterField( + model_name="occurrencereportbulkimportschemacolumn", + name="xlsx_data_validation_type", + field=models.CharField( + blank=True, + choices=[ + ("textLength", "textLength"), + ("custom", "custom"), + ("decimal", "decimal"), + ("whole", "whole"), + ("date", "date"), + ("list", "list"), + ("time", "time"), + (None, None), + ], + max_length=20, + null=True, + ), + ), + ] diff --git a/boranga/migrations/0438_alter_communitydocument__file_and_more.py b/boranga/migrations/0438_alter_communitydocument__file_and_more.py new file mode 100644 index 00000000..1e8f98fc --- /dev/null +++ b/boranga/migrations/0438_alter_communitydocument__file_and_more.py @@ -0,0 +1,52 @@ +# Generated by Django 5.0.8 on 2024-08-19 07:53 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("boranga", "0437_occurrencereportbulkimporttask_schema_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="occurrencereportbulkimportschemacolumn", + name="xlsx_data_validation_operator", + field=models.CharField( + blank=True, + choices=[ + ("between", "between"), + ("equal", "equal"), + ("greaterThan", "greaterThan"), + ("greaterThanOrEqual", "greaterThanOrEqual"), + ("lessThan", "lessThan"), + ("lessThanOrEqual", "lessThanOrEqual"), + ("notBetween", "notBetween"), + ("notEqual", "notEqual"), + (None, None), + ], + max_length=20, + null=True, + ), + ), + migrations.AlterField( + model_name="occurrencereportbulkimportschemacolumn", + name="xlsx_data_validation_type", + field=models.CharField( + blank=True, + choices=[ + ("custom", "custom"), + ("date", "date"), + ("decimal", "decimal"), + ("list", "list"), + ("textLength", "textLength"), + ("time", "time"), + ("whole", "whole"), + (None, None), + ], + max_length=20, + null=True, + ), + ), + ] diff --git a/boranga/settings.py b/boranga/settings.py index abf4e1a6..f98d440f 100755 --- a/boranga/settings.py +++ b/boranga/settings.py @@ -128,6 +128,7 @@ def show_toolbar(request): "reversion_compare", "nested_admin", "colorfield", + "django_filters", ] ADD_REVERSION_ADMIN = True @@ -465,3 +466,7 @@ def show_toolbar(request): # (_save method of FileSystemStorage class) # As it causes a permission exception when using azure network drives FILE_UPLOAD_PERMISSIONS = None + +OCR_BULK_IMPORT_TASK_TIMEOUT_SECONDS = env( + "OCR_BULK_IMPORT_TASK_TIMEOUT_SECONDS", 60 * 5 +) # Default = 5 minutes diff --git a/boranga/static/boranga/css/base.css b/boranga/static/boranga/css/base.css index f7f3141c..4ae2181d 100755 --- a/boranga/static/boranga/css/base.css +++ b/boranga/static/boranga/css/base.css @@ -129,20 +129,6 @@ body { overflow-x: scroll; } -.modal-header { - background-color: #529b6b; - color: #fcb13f; -} - -.modal-body { - background-color: #529b6b; - color: #fff; -} - -.modal-footer { - background-color: #529b6b; -} - .popover { max-width: 100%; } diff --git a/boranga/templates/webtemplate_dbca/includes/staff_menu.html b/boranga/templates/webtemplate_dbca/includes/staff_menu.html index 07b1e69b..8d808e22 100644 --- a/boranga/templates/webtemplate_dbca/includes/staff_menu.html +++ b/boranga/templates/webtemplate_dbca/includes/staff_menu.html @@ -2,4 +2,7 @@ {% is_django_admin as is_django_admin_user %} {% if is_django_admin_user %}
  • Admin
  • -{% endif %} \ No newline at end of file +{% endif %} +{% if is_django_admin_user or request.user.is_superuser %} +
  • OCR Bulk Import Schema
  • +{% endif %} diff --git a/boranga/urls.py b/boranga/urls.py index 0a4e3132..816d2e38 100755 --- a/boranga/urls.py +++ b/boranga/urls.py @@ -150,6 +150,16 @@ def trigger_error(request): occurrence_api.OccurrenceReportPaginatedViewSet, "occurrence_report_paginated", ) +router.register( + r"occurrence_report_bulk_imports", + occurrence_api.OccurrenceReportBulkImportTaskViewSet, + "occurrence_report_bulk_imports", +) +router.register( + r"occurrence_report_bulk_import_schemas", + occurrence_api.OccurrenceReportBulkImportSchemaViewSet, + "occurrence_report_bulk_import_schemas", +) router.register(r"observer_detail", occurrence_api.ObserverDetailViewSet) router.register(r"contact_detail", occurrence_api.ContactDetailViewSet) router.register(r"occurrence_sites", occurrence_api.OccurrenceSiteViewSet) @@ -178,7 +188,7 @@ def trigger_error(request): router.register( r"help_text_entries", main_api.HelpTextEntryViewSet, "help_text_entries" ) - +router.register(r"content_types", main_api.ContentTypeViewSet, "content_types") router.registry.sort(key=lambda x: x[0]) api_patterns = [ diff --git a/boranga/views.py b/boranga/views.py index 888c0f46..84f8f80a 100644 --- a/boranga/views.py +++ b/boranga/views.py @@ -27,15 +27,11 @@ from boranga.components.species_and_communities.models import Community, Species from boranga.forms import LoginForm from boranga.helpers import ( - is_conservation_status_assessor, is_conservation_status_referee, is_contributor, is_django_admin, is_internal, - is_occurrence_approver, - is_occurrence_assessor, is_occurrence_report_referee, - is_species_communities_approver, ) logger = logging.getLogger(__name__) @@ -209,10 +205,7 @@ def post(self, request): def is_authorised_to_access_community_document(request, document_id): if is_internal(request): # check auth - return ( - request.user.is_superuser - or is_internal(request) - ) + return request.user.is_superuser or is_internal(request) else: return False @@ -220,10 +213,7 @@ def is_authorised_to_access_community_document(request, document_id): def is_authorised_to_access_species_document(request, document_id): if is_internal(request): # check auth - return ( - request.user.is_superuser - or is_internal(request) - ) + return request.user.is_superuser or is_internal(request) else: return False @@ -231,10 +221,7 @@ def is_authorised_to_access_species_document(request, document_id): def is_authorised_to_access_meeting_document(request, document_id): if is_internal(request): # check auth - return ( - request.user.is_superuser - or is_internal(request) - ) + return request.user.is_superuser or is_internal(request) else: return False @@ -257,10 +244,7 @@ def is_authorised_to_access_occurrence_report_document(request, document_id): if is_internal(request): # check auth - return ( - request.user.is_superuser - or is_internal(request) - ) + return request.user.is_superuser or is_internal(request) if is_occurrence_report_referee(request) and is_contributor(request): file_name = get_file_name_from_path(request.path) @@ -322,10 +306,7 @@ def is_authorised_to_access_occurrence_report_document(request, document_id): def is_authorised_to_access_occurrence_document(request, document_id): if is_internal(request): # check auth - return ( - request.user.is_superuser - or is_internal(request) - ) + return request.user.is_superuser or is_internal(request) else: return False @@ -336,10 +317,7 @@ def is_authorised_to_access_conservation_status_document(request, document_id): if is_internal(request): # check auth - return ( - request.user.is_superuser - or is_internal(request) - ) + return request.user.is_superuser or is_internal(request) if is_conservation_status_referee(request) and is_contributor(request): file_name = get_file_name_from_path(request.path) @@ -378,7 +356,7 @@ def is_authorised_to_access_conservation_status_document(request, document_id): document_id, request.path, referee_allowed_paths ) - if is_contributor(request): + if is_contributor(request): contributor_allowed_paths = ["documents", "amendment_request_documents"] file_name = get_file_name_from_path(request.path) return ( diff --git a/python-cron b/python-cron index 3df1cba4..46b7d72a 100644 --- a/python-cron +++ b/python-cron @@ -8,3 +8,4 @@ # * * * * * Command */5 * * * * /app/venv/bin/python3 /app/manage.py runcrons >> /app/logs/cronjob.log 2>&1 +1 * * * * * /app/venv/bin/python3 /app/manage.py runcrons >> /app/logs/cronjob.log 2>&1 diff --git a/requirements.txt b/requirements.txt index e4ed9174..337fbe1f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,6 +15,7 @@ git+https://github.com/xzzy/django-preserialize.git#egg=django-preserialize django-countries~=7.5 django-cron==0.6.0 # This project is no longer maintained django-dynamic-fixture==3.1.3 +django-filter~=24.3 gdal==3.8.4 openpyxl~=3.1 datapackage~=1.15