Skip to content
Open
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion projects/management/commands/update_project_deadlines.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ def handle(self, *args, **options):
projects = Project.objects.all()

for idx, project in enumerate(projects):
log.info(f'Updating project "{project.name}" deadlines ({idx+1}/{len(projects)})')
with transaction.atomic():
project.update_deadlines(initial=True)
project.save()
18 changes: 15 additions & 3 deletions projects/models/project.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import itertools
import logging
from datetime import timedelta
Comment thread
github-code-quality[bot] marked this conversation as resolved.
Fixed

from actstream import action
from actstream.models import Action as ActStreamAction
Expand All @@ -16,6 +17,7 @@
from django.utils.translation import gettext_lazy as _
from django.utils.functional import cached_property
from private_storage.fields import PrivateFileField
from rest_framework.exceptions import ValidationError
Comment thread
github-code-quality[bot] marked this conversation as resolved.
Fixed
Comment thread
github-code-quality[bot] marked this conversation as resolved.
Fixed
from PIL import Image, ImageOps

from projects.actions import verbs
Expand Down Expand Up @@ -267,8 +269,10 @@ def set_attribute_data(self, data):
self.attribute_data = {}
self.update_attribute_data(data)

def update_attribute_data(self, data, confirmed_fields=None, fake=False):
def update_attribute_data(self, data, confirmed_fields=None, fake=False, locked_attributes_data=None):
from datetime import datetime
confirmed_fields = confirmed_fields or []
locked_attributes_data = locked_attributes_data or {}

if not isinstance(self.attribute_data, dict):
self.attribute_data = {}
Expand All @@ -286,6 +290,10 @@ def update_attribute_data(self, data, confirmed_fields=None, fake=False):
if identifier in confirmed_fields:
continue # Skip silently a value that is in confirmed_fields they should not move because already confirmed

# Skip writes to locked attributes in preview/validation mode
if fake and locked_attributes_data and identifier in locked_attributes_data:
continue

self.attribute_data[identifier] = value
if attribute.value_type == Attribute.TYPE_GEOMETRY:
geometry_query_params = {"attribute": attribute, "project": self}
Expand Down Expand Up @@ -547,15 +555,21 @@ def update_deadlines(self, user=None, initial=False, preview_attributes={}, conf

# Update attribute-based deadlines
dls_to_update = []

for dl in self.deadlines.all().select_related("deadline__attribute"):
if not dl.deadline.attribute:
continue

# Skip locked/confirmed fields - they should not be updated
if dl.deadline.attribute.identifier in confirmed_fields:
continue

value = self.attribute_data.get(dl.deadline.attribute.identifier)
value = value if value != 'null' else None
if dl.date != value:
dl.date = value
dls_to_update.append(dl)

self.deadlines.bulk_update(dls_to_update, ['date'])
# Calculate automatic values for newly added deadlines
self._set_calculated_deadlines(
Expand Down Expand Up @@ -703,11 +717,9 @@ def clear_data_by_data_retention_plan(self, data_retention_plan):
self.attribute_data[attribute.identifier] = None
updated = True
if updated:
log.info(f"Clearing data by data_retention_plan '{data_retention_plan}' from project '{self}'")
self.save()

def clear_audit_log_data(self):
log.info(f"Clearing audit log data from project '{self}'")
LogEntry.objects.filter(object_id=str(self.pk)).delete() # Clears django-admin logs from django_admin_log table
ActStreamAction.objects.filter(target_object_id=str(self.pk)).delete() # Clear audit logs from actstream_action table

Expand Down
21 changes: 18 additions & 3 deletions projects/serializers/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -1784,8 +1784,15 @@ def set_initial_data(self, attribute_data, validated_data):
pass

def update(self, instance: Project, validated_data: dict) -> Project:



attribute_data = validated_data.pop("attribute_data", {})
confirmed_fields = self.context["confirmed_fields"]
locked_fields = self.context.get("locked_fields", [])
# Combine confirmed and locked fields into single protected list
protected_fields = list(set(confirmed_fields + locked_fields))

subtype = validated_data.get("subtype")
subtype_changed = subtype is not None and subtype != instance.subtype
phase = validated_data.get("phase")
Expand Down Expand Up @@ -1826,7 +1833,15 @@ def update(self, instance: Project, validated_data: dict) -> Project:

self.update_initial_data(validated_data)
if attribute_data:
instance.update_attribute_data(attribute_data, confirmed_fields=confirmed_fields)
# Check if this is a fake/preview request
is_fake = hasattr(self.context.get("request"), "_fake") and self.context["request"]._fake
locked_attrs_data = self.context.get("locked_attributes_data", {})
instance.update_attribute_data(
attribute_data,
confirmed_fields=protected_fields,
fake=is_fake,
locked_attributes_data=locked_attrs_data
)

project = super(ProjectSerializer, self).update(instance, validated_data)

Expand All @@ -1843,9 +1858,9 @@ def update(self, instance: Project, validated_data: dict) -> Project:
project.update_attribute_data(cleared_attributes)
self.log_updates_attribute_data(cleared_attributes)
project.deadlines.all().delete()
project.update_deadlines(user=user, preview_attributes=attribute_data, confirmed_fields=confirmed_fields)
project.update_deadlines(user=user, preview_attributes=attribute_data, confirmed_fields=protected_fields)
elif should_update_deadlines:
project.update_deadlines(user=user, preview_attributes=attribute_data, confirmed_fields=confirmed_fields)
project.update_deadlines(user=user, preview_attributes=attribute_data, confirmed_fields=protected_fields)
project.deadlines.filter(deadline__attribute__identifier__in=attribute_data.keys())\
.update(edited=timezone.now())

Expand Down
96 changes: 83 additions & 13 deletions projects/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from rest_framework.parsers import MultiPartParser
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from rest_framework.response import Response
from rest_framework.exceptions import ValidationError
from rest_framework.views import APIView
from rest_framework.viewsets import ReadOnlyModelViewSet
from rest_framework_extensions.mixins import NestedViewSetMixin
Expand Down Expand Up @@ -324,6 +325,10 @@ def get_serializer_context(self):
context = super().get_serializer_context()
context["action"] = self.action
context["confirmed_fields"] = self.request.data.get('confirmed_fields', [])
# Extract locked attributes (temporary, not saved to DB) and add field names and VALUES to context
locked_attributes = self.request.data.get('lockedAttributes', {})
context["locked_fields"] = list(locked_attributes.keys()) if isinstance(locked_attributes, dict) else []
context["locked_attributes_data"] = locked_attributes if isinstance(locked_attributes, dict) else {}

if self.action == "list":
context["project_schedule_cache"] = \
Expand Down Expand Up @@ -937,12 +942,20 @@ def attribute_data(self, request):

def update(self, request, *args, **kwargs):
fake = request.query_params.get('fake', False)
# Normalize fake flag to boolean
is_fake = str(fake).lower() in ['1', 'true', 't', 'yes']
# Store the original confirmed_fields before calling update
# should prevent confirmed fields from moving when updating or validating
confirmed_fields = request.data.get('confirmed_fields', [])
original_attribute_data = request.data.get('attribute_data', {})
# Capture the persisted snapshot before any mutation
try:
project_instance = self.get_object()
original_attribute_data = dict(project_instance.attribute_data or {})
except Exception:
original_attribute_data = {}
locked_attributes = request.data.get('lockedAttributes', {})

if not fake:
if not is_fake:
# Actual update logic that saves to db
result = super().update(request, *args, **kwargs)

Expand All @@ -954,17 +967,74 @@ def update(self, request, *args, **kwargs):
# Validation logic ?fake
# Run update in 'ghost' mode where no changes are applied to database but result is returned
with transaction.atomic():
result = super().update(request, *args, **kwargs)

# Before returning, check if we need to restore original values for confirmed fields
if hasattr(result, 'data') and confirmed_fields and 'attribute_data' in result.data:
# Restore original values for confirmed fields
for field in confirmed_fields:
if field in original_attribute_data and field in result.data['attribute_data']:
result.data['attribute_data'][field] = original_attribute_data[field]
#Prevents saving anything to database but returns values that have been changed by validation to frontend
transaction.set_rollback(True)
return result
# Only use new locking logic if lockedAttributes is not empty
if locked_attributes and isinstance(locked_attributes, dict) and len(locked_attributes) > 0:
try:
result = super().update(request, *args, **kwargs)

# Before returning, check if we need to restore original values for confirmed fields
if hasattr(result, 'data') and confirmed_fields and 'attribute_data' in result.data:
# Restore original values for confirmed fields
for field in confirmed_fields:
if field in original_attribute_data and field in result.data['attribute_data']:
result.data['attribute_data'][field] = original_attribute_data[field]

# If locked fields were attempted to be changed during preview, return structured error and echo original payload
try:
resp_attr = result.data.get('attribute_data', {}) if hasattr(result, 'data') else {}
locked_conflicts = []
if isinstance(locked_attributes, dict) and isinstance(resp_attr, dict):
for k, v in locked_attributes.items():
if k in resp_attr and resp_attr.get(k) != v:
locked_conflicts.append(k)
if locked_conflicts:
transaction.set_rollback(True)
return Response({
'locked_fields': locked_conflicts,
'attribute_data': original_attribute_data
}, status=status.HTTP_400_BAD_REQUEST)
except Exception as exc:
log.error(f"[LOCK_DEBUG] Error while evaluating locked field conflicts: {exc}")

# Prevent saving anything to database but returns values for normal preview success
transaction.set_rollback(True)
return result

except ValidationError as ve:
# ValidationError during preview - extract affected fields and return locked_fields format
transaction.set_rollback(True)

# Extract field names from ValidationError detail
affected_fields = []
if hasattr(ve, 'detail') and isinstance(ve.detail, dict):
# DRF ValidationError with field-level errors
if 'attribute_data' in ve.detail and isinstance(ve.detail['attribute_data'], dict):
affected_fields = list(ve.detail['attribute_data'].keys())
else:
affected_fields = list(ve.detail.keys())

# Return locked_fields response format for frontend
if affected_fields:
return Response({
'locked_fields': affected_fields,
'attribute_data': original_attribute_data
}, status=status.HTTP_400_BAD_REQUEST)
else:
# Re-raise if we can't determine affected fields
raise
else:
# No locked attributes - use original validation logic
result = super().update(request, *args, **kwargs)

# Before returning, check if we need to restore original values for confirmed fields
if hasattr(result, 'data') and confirmed_fields and 'attribute_data' in result.data:
# Restore original values for confirmed fields
for field in confirmed_fields:
if field in original_attribute_data and field in result.data['attribute_data']:
result.data['attribute_data'][field] = original_attribute_data[field]
# Prevents saving anything to database but returns values that have been changed by validation to frontend
transaction.set_rollback(True)
return result


class ProjectPhaseViewSet(viewsets.ReadOnlyModelViewSet):
Expand Down