Optimisation de l'accès à la base de données

This commit is contained in:
Jean-Marie Favreau 2024-11-09 11:19:14 +01:00
parent 4c5decd682
commit 7a8efb8ed7
2 changed files with 17 additions and 12 deletions

View File

@ -114,7 +114,6 @@ class EventForm(ModelForm):
self.fields['category'].queryset = self.fields['category'].queryset.order_by('name')
self.fields['category'].empty_label = None
self.fields['category'].initial = Category.get_default_category()
logger.warning("ça se passe là")
def is_clone_from_url(self):
return self.cloning
@ -160,8 +159,8 @@ class FixDuplicates(Form):
def __init__(self, *args, **kwargs):
edup = kwargs.pop("edup", None)
events = edup.event_set.all()
nb_events = edup.event_set.count()
events = edup.get_duplicated()
nb_events = len(events)
super().__init__(*args, **kwargs)
choices = []

View File

@ -216,13 +216,17 @@ class DuplicatedEvents(models.Model):
verbose_name = _("Duplicated events")
verbose_name_plural = _("Duplicated events")
def __init__(self, *args, **kwargs):
self.events = None
super().__init__(*args, **kwargs)
def nb_duplicated(self):
return self.event_set.count()
def get_duplicated(self):
return self.event_set.order_by(
"created_date"
)
if self.events is None:
self.events = self.event_set.order_by("created_date").all()
return self.events
def get_absolute_url(self):
return reverse("view_duplicate", kwargs={"pk": self.pk})
@ -235,7 +239,7 @@ class DuplicatedEvents(models.Model):
def merge_into(self, other):
# for all objects associated to this group
for e in self.event_set.all():
for e in self.get_duplicated():
# change their group membership
e.other_versions = other
# save them
@ -248,7 +252,7 @@ class DuplicatedEvents(models.Model):
# as the representative one.
# if no event is given, the last one (by creation date) is selected.
def fix(self, event=None):
events = self.event_set.all()
events = self.get_duplicated()
if event is None:
events = events.order_by("-created_date")
for e in events:
@ -289,7 +293,7 @@ class DuplicatedEvents(models.Model):
to_be_fixed = []
for d in DuplicatedEvents.not_fixed_qs().prefetch_related('event_set'):
comp = Event.get_comparison(d.event_set.all())
comp = Event.get_comparison(d.get_duplicated())
similar = len([c for c in comp if not c["similar"]]) == 0
if similar:
to_be_fixed.append(d)
@ -297,8 +301,11 @@ class DuplicatedEvents(models.Model):
nb = len(to_be_fixed)
if nb > 0:
logger.warning("Removing: " + str(nb) + " similar duplicated")
for e in to_be_fixed:
logger.warning(" " + e.event_set.first().title)
for d in to_be_fixed:
if len(d.get_duplicated()) == 0:
logger.warning(" empty")
else:
logger.warning(" " + d.get_duplicated()[0].title)
for s in to_be_fixed:
s.fix()
@ -864,7 +871,6 @@ class Event(models.Model):
self.other_versions is not None
and self.other_versions.nb_duplicated() == 1
):
logger.warning("le other est juste dans ", self.other_versions.event_set.all())
self.other_versions.delete()
self.other_versions = None