Suite migration vers nouvelle structure
This commit is contained in:
parent
98517da474
commit
b8236f8816
@ -1,6 +1,6 @@
|
||||
from datetime import datetime, timedelta, date, time
|
||||
import calendar
|
||||
from django.db.models import Q
|
||||
from django.db.models import Q, F
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.template.defaultfilters import date as _date
|
||||
@ -234,7 +234,7 @@ class CalendarList:
|
||||
| Q(recurrence_dtend__lt=startdatetime)
|
||||
)
|
||||
)
|
||||
).filter(masked=False).order_by("start_time").prefetch_related("exact_location").prefetch_related("category")
|
||||
).filter(Q(possibly_duplicated__isnull=True)|~Q(possibly_duplicated__representative=F('pk'))).order_by("start_time").prefetch_related("exact_location").prefetch_related("category")
|
||||
|
||||
firstdate = datetime.fromordinal(self.c_firstdate.toordinal())
|
||||
if firstdate.tzinfo is None or firstdate.tzinfo.utcoffset(firstdate) is None:
|
||||
|
@ -12,7 +12,7 @@ import os
|
||||
from django.core.files import File
|
||||
from django.utils import timezone
|
||||
from django.contrib.postgres.search import TrigramSimilarity
|
||||
from django.db.models import Q, Count
|
||||
from django.db.models import Q, Count, F
|
||||
import recurrence.fields
|
||||
import recurrence
|
||||
import copy
|
||||
@ -228,7 +228,7 @@ class DuplicatedEvents(models.Model):
|
||||
return reverse("view_duplicate", kwargs={"pk": self.pk})
|
||||
|
||||
def get_one_event(self):
|
||||
return self.event_set.filter(masked=False).first()
|
||||
return self.event_set.representative
|
||||
|
||||
def merge_into(self, other):
|
||||
# for all objects associated to this group
|
||||
@ -241,6 +241,9 @@ class DuplicatedEvents(models.Model):
|
||||
# then delete the empty group
|
||||
self.delete()
|
||||
|
||||
# this method fixes the duplicated events by using the given event
|
||||
# as the representative one.
|
||||
# if no event is given, the last one (by creation date) is selected.
|
||||
def fix(self, event=None):
|
||||
events = self.event_set.all()
|
||||
if event is None:
|
||||
@ -248,12 +251,12 @@ class DuplicatedEvents(models.Model):
|
||||
for e in events:
|
||||
if event is None:
|
||||
event = e
|
||||
e.masked = e != event
|
||||
if e != event and e.same_uuid(event):
|
||||
e.status = Event.STATUS.TRASH
|
||||
if not event is None:
|
||||
event.status = Event.STATUS.PUBLISHED
|
||||
Event.objects.bulk_update(events, fields=["masked", "status"])
|
||||
self.representative = event
|
||||
Event.objects.bulk_update(events, fields=["status"])
|
||||
self.save()
|
||||
return len(events)
|
||||
|
||||
@ -298,6 +301,12 @@ class DuplicatedEvents(models.Model):
|
||||
|
||||
return nb
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.representative and not self.representative in self.event_set.all():
|
||||
self.representative = None
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
class ReferenceLocation(models.Model):
|
||||
name = models.CharField(verbose_name=_("Name"), help_text=_("Name of the location"), unique=True, null=False)
|
||||
@ -496,14 +505,6 @@ class Event(models.Model):
|
||||
max_length=1024,
|
||||
)
|
||||
|
||||
masked = models.BooleanField(
|
||||
verbose_name=_("Masked"),
|
||||
help_text=_("This event is masked by a duplicated version."),
|
||||
default=False,
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
import_sources = ArrayField(
|
||||
models.CharField(max_length=512),
|
||||
verbose_name=_("Importation source"),
|
||||
@ -635,7 +636,7 @@ class Event(models.Model):
|
||||
return Event.objects.filter(status=Event.STATUS.DRAFT).count()
|
||||
|
||||
def get_qs_events_with_unkwnon_place():
|
||||
return Event.objects.filter(exact_location__isnull=True).filter(~Q(status=Event.STATUS.TRASH)).filter(Q(possibly_duplicated=None)|Q(masked=False))
|
||||
return Event.objects.filter(exact_location__isnull=True).filter(~Q(status=Event.STATUS.TRASH)).filter(Q(possibly_duplicated=None)|~Q(possibly_duplicated__representative=F('pk')))
|
||||
|
||||
def download_image(self):
|
||||
# first download file
|
||||
@ -996,6 +997,9 @@ class Event(models.Model):
|
||||
possibly_duplicated=self.possibly_duplicated
|
||||
).exclude(pk=self.pk)
|
||||
|
||||
def masked(self):
|
||||
return self.possibly_duplicated and self.possibly_duplicated.representative == self
|
||||
|
||||
def get_comparison(events, all=True):
|
||||
result = []
|
||||
for attr in Event.data_fields(all=all, local_img=False, exact_location=False):
|
||||
|
@ -1461,11 +1461,8 @@ def merge_duplicate(request, pk):
|
||||
|
||||
# create a new event that merge the selected events
|
||||
new_event = Event(**new_event_data)
|
||||
new_event.masked = False
|
||||
new_event.status = Event.STATUS.PUBLISHED
|
||||
new_event.possibly_duplicated = edup
|
||||
new_event.set_skip_duplicate_check()
|
||||
|
||||
edup.fix(new_event)
|
||||
|
||||
messages.info(request, _("Creation of a merged event has been successfully completed."))
|
||||
@ -1492,9 +1489,7 @@ def fix_duplicate(request, pk):
|
||||
if form.is_valid():
|
||||
if form.is_action_no_duplicates():
|
||||
events = edup.get_duplicated()
|
||||
for e in events:
|
||||
e.masked = False
|
||||
e.save()
|
||||
|
||||
if len(events) == 0:
|
||||
date = None
|
||||
else:
|
||||
@ -1535,7 +1530,8 @@ def fix_duplicate(request, pk):
|
||||
elif form.is_action_remove():
|
||||
event = form.get_selected_event(edup)
|
||||
event.possibly_duplicated = None
|
||||
event.masked = False
|
||||
if edup.representative == event:
|
||||
edup.representative = None
|
||||
event.save()
|
||||
edup.save()
|
||||
messages.success(
|
||||
|
Loading…
x
Reference in New Issue
Block a user