Si un import se passe mal, on créé tout de même un événement pour pouvoir le gérer à la main

Fix #219
This commit is contained in:
Jean-Marie Favreau 2024-11-27 18:25:10 +01:00
parent d119f1fa45
commit 8ef620c8e1
4 changed files with 35 additions and 6 deletions

View File

@ -2,7 +2,7 @@ from abc import ABC, abstractmethod
from datetime import datetime, time, date, timedelta
import re
import unicodedata
from django.utils import timezone
@ -240,6 +240,28 @@ class Extractor(ABC):
from .extractor_ggcal_link import GoogleCalendarLinkEventExtractor
if single_event:
return [FacebookEventExtractor(), GoogleCalendarLinkEventExtractor()]
return [FacebookEventExtractor(), GoogleCalendarLinkEventExtractor(), EventNotFoundExtractor()]
else:
return [ICALExtractor(), FacebookEventExtractor(), GoogleCalendarLinkEventExtractor()]
return [ICALExtractor(), FacebookEventExtractor(), GoogleCalendarLinkEventExtractor(), EventNotFoundExtractor()]
# A class that only produce a not found event
class EventNotFoundExtractor(Extractor):
def extract(
self, content, url, url_human=None, default_values=None, published=False
):
self.set_header(url)
self.clear_events()
self.add_event(default_values, "événement sans titre",
None, timezone.now().date(), None,
"l'import a échoué, la saisie doit se faire manuellement à partir de l'url source",
[], [url], published=False, url_human=url)
return self.get_structure()
def clean_url(url):
return url

View File

@ -269,4 +269,5 @@ class TwoStepsExtractorNoPause(TwoStepsExtractor):
result = super().extract(content, url, url_human, default_values, published, only_future, ignore_404)
self.downloader.pause = pause
return result
return result

View File

@ -1,6 +1,11 @@
from .downloader import *
from .extractor import *
import logging
logger = logging.getLogger(__name__)
class URL2Events:
def __init__(
@ -29,8 +34,9 @@ class URL2Events:
else:
# if the extractor is not defined, use a list of default extractors
for e in Extractor.get_default_extractors(self.single_event):
logger.warning('Extractor::' + type(e).__name__)
e.set_downloader(self.downloader)
events = e.extract(content, url, url_human, default_values, published)
if events is not None:
if events is not None and len(events) > 0:
return events
return None

View File

@ -1490,7 +1490,7 @@ class Event(models.Model):
imported = Event.objects.bulk_create(to_import)
# update organisers (m2m relation)
for i, ti in zip(imported, to_import):
if ti.has_pending_organisers():
if ti.has_pending_organisers() and ti.pending_organisers is not None:
i.organisers.set(ti.pending_organisers)
nb_updated = Event.objects.bulk_update(