Compare commits

..

10 Commits

Author SHA1 Message Date
de61752726 nettoyage 2024-05-19 17:17:15 +02:00
ce5ff2df63 make format 2024-05-19 17:11:00 +02:00
018b7a052f 13 amélioration des noms de fichiers 2024-05-19 17:09:25 +02:00
663c4cff71 réorganisation ordre des urls dates 2024-05-19 17:08:55 +02:00
8bd4fafb17 13 export mois 2024-05-19 16:59:13 +02:00
6795a221da 13 export semaine 2024-05-19 16:30:50 +02:00
1db30afc1a WIP export semaine - ne fonctionne pas 2024-05-12 18:19:35 +02:00
fc3b0f8320 correction urls 2024-05-08 11:41:43 +02:00
bd310c7b44 ajout de l’export ical vue jour 2024-05-07 11:58:13 +02:00
7b3aca21f4 poc vue jour 2024-05-06 17:05:54 +02:00
277 changed files with 3735 additions and 17725 deletions

View File

@ -20,7 +20,6 @@ make build-dev Build and run dev environment
make stop-dev Stop dev environment make stop-dev Stop dev environment
make stop-prod Stop prod environment make stop-prod Stop prod environment
make build-prod Build and run prod environment make build-prod Build and run prod environment
make restar-prod Restart prod environment
make all Show help make all Show help
endef endef
@ -55,10 +54,6 @@ create-categories:
docker exec -it $(BACKEND_APP_NAME) $(SHELL) "-c" \ docker exec -it $(BACKEND_APP_NAME) $(SHELL) "-c" \
"python3 manage.py runscript create_categories" "python3 manage.py runscript create_categories"
create-reference-locations:
docker exec -it $(BACKEND_APP_NAME) $(SHELL) "-c" \
"python3 manage.py runscript create_reference_locations"
build-dev: build-dev:
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 docker-compose -f docker-compose.yml up --build -d DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 docker-compose -f docker-compose.yml up --build -d
@ -80,9 +75,6 @@ stop-prod:
restart-backend-prod: restart-backend-prod:
docker-compose -f docker-compose.prod.yml restart backend docker-compose -f docker-compose.prod.yml restart backend
prod-restart:
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 docker-compose -f docker-compose.prod.yml restart
all: help all: help
.PHONY: help lint format test super-user make-migrations migrate build-dev build-prod stop-dev stop-prod all .PHONY: help lint format test super-user make-migrations migrate build-dev build-prod stop-dev stop-prod all

View File

@ -15,12 +15,6 @@ On peut aussi peupler les catégories avec un choix de catégories élémentaire
* ```make create-categories``` * ```make create-categories```
On peut aussi peupler les positions de référence qui serviront aux recherches géographiques avec la commande, après avoir éventuellement modifié le fichier [communes.json](./src/scripts/communes.json) qui contient pour l'exemple toutes les communes récupérées depuis [public.opendatasoft.com](https://public.opendatasoft.com/explore/dataset/georef-france-commune/export/?flg=fr-fr&disjunctive.reg_name&disjunctive.dep_name&disjunctive.arrdep_name&disjunctive.ze2020_name&disjunctive.bv2022_name&disjunctive.epci_name&disjunctive.ept_name&disjunctive.com_name&disjunctive.ze2010_name&disjunctive.com_is_mountain_area&sort=year&refine.dep_name=Puy-de-D%C3%B4me&location=9,45.51597,3.05969&basemap=jawg.light):
* ```make create-reference-locations```
## Notes aux développeurs ## Notes aux développeurs
### Ajout d'une nouvelle source *custom* ### Ajout d'une nouvelle source *custom*
@ -32,13 +26,3 @@ Pour ajouter une nouvelle source custom:
- ajouter à la classe ```RecurrentImport.PROCESSOR``` présente dans le fichier ```src/agenda_culturel/models.py``` une entrée correspondant à cette source pour qu'elle soit proposée aux utilisateurs - ajouter à la classe ```RecurrentImport.PROCESSOR``` présente dans le fichier ```src/agenda_culturel/models.py``` une entrée correspondant à cette source pour qu'elle soit proposée aux utilisateurs
- ajouter à la fonction ```run_recurrent_import``` présente dans le fichier ```src/agenda_culturel/celery.py``` le test correspondant à cet ajout, pour lancer le bon extracteur - ajouter à la fonction ```run_recurrent_import``` présente dans le fichier ```src/agenda_culturel/celery.py``` le test correspondant à cet ajout, pour lancer le bon extracteur
- se rendre sur le site, page administration, et ajouter un import récurrent correspondant à cette nouvelle source - se rendre sur le site, page administration, et ajouter un import récurrent correspondant à cette nouvelle source
### Récupérer un dump du prod sur un serveur dev
* sur le serveur de dev:
* ```docker exec -i agenda_culturel-backend python3 manage.py dumpdata --natural-foreign --natural-primary --format=json --exclude=admin.logentry --indent=2 > fixtures/postgres-backup-20241101.json``` (à noter qu'ici on oublie les comptes, qu'il faudra recréer)
* sur le serveur de prod:
* On récupère le dump json ```scp $SERVEUR:$PATH/fixtures/postgres-backup-20241101.json src/fixtures/```
* ```scripts/reset-database.sh FIXTURE COMMIT``` où ```FIXTURE``` est le timestamp dans le nom de la fixture, et ```COMMIT``` est l'ID du commit git correspondant à celle en prod sur le serveur au moment de la création de la fixture
À noter que les images ne sont pas récupérées.

View File

@ -5,11 +5,10 @@ WORKDIR /usr/src/app
RUN --mount=type=cache,target=/var/cache/apt \ RUN --mount=type=cache,target=/var/cache/apt \
apt-get update && \ apt-get update && \
apt-get install --no-install-recommends -y build-essential libpq-dev gettext chromium-driver gdal-bin fonts-symbola \ apt-get install --no-install-recommends -y build-essential libpq-dev gettext chromium-driver \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
COPY src/requirements.txt ./requirements.txt COPY src/requirements.txt ./requirements.txt
RUN --mount=type=cache,target=/root/.cache/pip \ RUN --mount=type=cache,target=/root/.cache/pip \

View File

@ -32,9 +32,5 @@ http {
error_page 502 /static/html/500.html; error_page 502 /static/html/500.html;
error_page 503 /static/html/500.html; error_page 503 /static/html/500.html;
if ($http_user_agent ~* (Amazonbot|meta-externalagent|ClaudeBot)) {
return 444;
}
} }
} }

View File

@ -23,7 +23,7 @@ services:
command: [ "/bin/bash", "/app/deployment/scripts/wait-db.sh", "/app/deployment/scripts/backend/start.sh" ] command: [ "/bin/bash", "/app/deployment/scripts/wait-db.sh", "/app/deployment/scripts/backend/start.sh" ]
db: db:
image: postgis/postgis:15-3.4-alpine image: postgres:15.2-alpine
container_name: "${APP_NAME}-db" container_name: "${APP_NAME}-db"
hostname: "${POSTGRES_HOST:-db}" hostname: "${POSTGRES_HOST:-db}"
volumes: volumes:

View File

@ -23,7 +23,7 @@ services:
command: [ "/bin/bash", "/app/deployment/scripts/backend/start.sh" ] command: [ "/bin/bash", "/app/deployment/scripts/backend/start.sh" ]
db: db:
image: postgis/postgis:15-3.4-alpine image: postgres:15.2-alpine
container_name: "${APP_NAME}-db" container_name: "${APP_NAME}-db"
hostname: "${POSTGRES_HOST:-db}" hostname: "${POSTGRES_HOST:-db}"
volumes: volumes:

View File

@ -1,40 +0,0 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), arachnee.CExtractor())
url = "https://www.arachnee-concerts.com/wp-admin/admin-ajax.php?action=movies-filter&per_page=9999&date=NaN.NaN.NaN&theatres=Clermont-Fd&cat=&sorting=&list_all_events=&current_page="
url_human = "https://www.arachnee-concerts.com/agenda-des-concerts/Clermont-Fd/"
try:
events = u2e.process(url, url_human, cache = "cache-arachnee.html", default_values = {}, published = True)
exportfile = "events-arachnee.json"
print("Saving events to file {}".format(exportfile))
with open(exportfile, "w") as f:
json.dump(events, f, indent=4, default=str)
except Exception as e:
print("Exception: " + str(e))

View File

@ -1,43 +0,0 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), c3c.CExtractor())
url = "https://billetterie-c3c.clermont-ferrand.fr/"
url_human = "https://billetterie-c3c.clermont-ferrand.fr/"
try:
events = u2e.process(url, url_human, cache = "cache-c3c.html", default_values = {"location": "La Cour des 3 Coquins"}, published = True)
exportfile = "events-c3c.json"
print("Saving events to file {}".format(exportfile))
with open(exportfile, "w") as f:
json.dump(events, f, indent=4, default=str)
except Exception as e:
print("Exception: " + str(e))

View File

@ -28,8 +28,8 @@ from src.agenda_culturel.import_tasks.extractor_facebook import *
if __name__ == "__main__": if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), FacebookEventExtractor()) u2e = URL2Events(ChromiumHeadlessDownloader(), FacebookEventExtractor(single_event=True))
url="https://www.facebook.com/events/s/tour-du-sud-invite-koum/430014373384441/" url="https://www.facebook.com/events/872781744074648"
events = u2e.process(url, cache = "fb.html", published = True) events = u2e.process(url, cache = "fb.html", published = True)

View File

@ -1,43 +0,0 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), fbevents.CExtractor())
url = "https://www.facebook.com/laJeteeClermont/upcoming_hosted_events"
url_human = "https://www.facebook.com/laJeteeClermont/upcoming_hosted_events"
try:
events = u2e.process(url, url_human, cache = "cache-lajetee-fb.html", default_values = {"location": "La Jetée"}, published = True)
exportfile = "events-lajetee-fb.json"
print("Saving events to file {}".format(exportfile))
with open(exportfile, "w") as f:
json.dump(events, f, indent=4, default=str)
except Exception as e:
print("Exception: " + str(e))

View File

@ -32,7 +32,7 @@ if __name__ == "__main__":
url = "https://calendar.google.com/calendar/ical/programmation.lesaugustes%40gmail.com/public/basic.ics" url = "https://calendar.google.com/calendar/ical/programmation.lesaugustes%40gmail.com/public/basic.ics"
url_human = "https://www.cafelesaugustes.fr/la-programmation/" url_human = "https://www.cafelesaugustes.fr/la-programmation/"
events = u2e.process(url, url_human, cache = "cache-augustes.ical", default_values = {"category": "Sans catégorie", "location": "Café lecture les Augustes"}, published = True) events = u2e.process(url, url_human, cache = "cache-augustes.ical", default_values = {"category": "Autre", "location": "Café lecture les Augustes"}, published = True)
exportfile = "events-augustes.json" exportfile = "events-augustes.json"
print("Saving events to file {}".format(exportfile)) print("Saving events to file {}".format(exportfile))

View File

@ -29,8 +29,8 @@ from src.agenda_culturel.import_tasks.custom_extractors import *
if __name__ == "__main__": if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lacomedie.CExtractor()) u2e = URL2Events(SimpleDownloader(), lacomedie.CExtractor())
url = "https://lacomediedeclermont.com/saison24-25/wp-admin/admin-ajax.php?action=load_dates_existantes" url = "https://lacomediedeclermont.com/saison23-24/wp-admin/admin-ajax.php?action=load_dates_existantes"
url_human = "https://lacomediedeclermont.com/saison24-25/" url_human = "https://lacomediedeclermont.com/saison23-24/"
try: try:
events = u2e.process(url, url_human, cache = "cache-lacomedie.html", default_values = {"location": "La Comédie de Clermont"}, published = True) events = u2e.process(url, url_human, cache = "cache-lacomedie.html", default_values = {"location": "La Comédie de Clermont"}, published = True)

View File

@ -33,7 +33,7 @@ if __name__ == "__main__":
url_human = "https://www.lacoope.org/concerts-calendrier/" url_human = "https://www.lacoope.org/concerts-calendrier/"
try: try:
events = u2e.process(url, url_human, cache = "cache-lacoope.html", default_values = {"category": "Fêtes & Concerts", "location": "La Coopérative"}, published = True) events = u2e.process(url, url_human, cache = "cache-lacoope.html", default_values = {"category": "Concert", "location": "La Coopérative"}, published = True)
exportfile = "events-lacoope.json" exportfile = "events-lacoope.json"
print("Saving events to file {}".format(exportfile)) print("Saving events to file {}".format(exportfile))

View File

@ -29,8 +29,8 @@ from src.agenda_culturel.import_tasks.custom_extractors import *
if __name__ == "__main__": if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lapucealoreille.CExtractor()) u2e = URL2Events(SimpleDownloader(), lapucealoreille.CExtractor())
url = "https://www.lapucealoreille63.fr/agenda" url = "https://www.lapucealoreille63.fr/programmation/"
url_human = "https://www.lapucealoreille63.fr/agenda" url_human = "https://www.lapucealoreille63.fr/programmation/"
try: try:
events = u2e.process(url, url_human, cache = "cache-lapucealoreille.xml", default_values = {}, published = True) events = u2e.process(url, url_human, cache = "cache-lapucealoreille.xml", default_values = {}, published = True)

View File

@ -1,43 +0,0 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), wordpress_mec.CExtractor())
url = "https://www.cabaretlepoulailler.fr/agenda/tout-lagenda/"
url_human = "https://www.cabaretlepoulailler.fr/agenda/tout-lagenda/"
try:
events = u2e.process(url, url_human, cache = "cache-le-poulailler.html", default_values = {"location": "Le Poulailler"}, published = True)
exportfile = "events-le-poulailler.json"
print("Saving events to file {}".format(exportfile))
with open(exportfile, "w") as f:
json.dump(events, f, indent=4, default=str)
except Exception as e:
print("Exception: " + str(e))

View File

@ -1,43 +0,0 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lerio.CExtractor())
url = "https://www.cinemalerio.com/evenements/"
url_human = "https://www.cinemalerio.com/evenements/"
try:
events = u2e.process(url, url_human, cache = "cache-le-rio.html", default_values = {"location": "Cinéma le Rio", "category": "Cinéma"}, published = True)
exportfile = "events-le-roi.json"
print("Saving events to file {}".format(exportfile))
with open(exportfile, "w") as f:
json.dump(events, f, indent=4, default=str)
except Exception as e:
print("Exception: " + str(e))

View File

@ -1,43 +0,0 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), wordpress_mec.CExtractor())
url = "https://www.lesvinzelles.com/index.php/programme/"
url_human = "https://www.lesvinzelles.com/index.php/programme/"
try:
events = u2e.process(url, url_human, cache = "cache-les-vinzelles.html", default_values = {"location": "Les Vinzelles"}, published = True)
exportfile = "events-les-vinzelles.json"
print("Saving events to file {}".format(exportfile))
with open(exportfile, "w") as f:
json.dump(events, f, indent=4, default=str)
except Exception as e:
print("Exception: " + str(e))

View File

@ -1,92 +0,0 @@
#!/bin/sh
FIXTURE=$1
COMMIT=$2
FORCE=$3
help() {
echo "USAGE: scripts/reset-database.sh [FIXTURE] [COMMIT]"
echo " "
echo "Parameters:"
echo " FIXTURE A timestamp used in fixture name"
echo " COMMIT A commit ID used by git checkout"
echo " "
echo "Example:"
echo " scripts/reset-database.sh 20241110 cb69ece6ca5ba04e94dcc2758f53869c70224592"
}
bold=$(tput bold)
normal=$(tput sgr0)
echobold() {
echo "${bold}$1${normal}"
}
if ! [ -n "$FORCE" ]; then
nginx=`docker ps|grep nginx`
if [ -n "$nginx" ]; then
echo "WARNING: this script is probably run on a production server. Use a third parameter if you really want to run it."
exit 3
fi
fi
if ! [ -n "$FIXTURE" ]; then
echo "No fixture defined. Abort."
help
exit 1
fi
if ! [ -n "$COMMIT" ]; then
echo "No commit version defined. Abort."
help
exit 1
fi
FFILE=fixtures/postgres-backup-$FIXTURE.json
if ! [ -f "src/$FFILE" ]; then
echo "ERROR: missing fixture file ($FFILE)"
exit 2
fi
echo " "
echobold "WARNING: use Ctrl+C to stop the reset process since a 'no' answer cannot be detected."
echo " "
# remove all elements in database
echobold "Flush database"
docker exec -i agenda_culturel-backend python3 manage.py flush
# move back database structure to the original
echobold "Setup database structure to zero"
docker exec -i agenda_culturel-backend python3 manage.py migrate agenda_culturel zero
# reset code depending on a specific commit
echobold "Move back to the desired commit"
git checkout $COMMIT
# change database to reach this specific version
echobold "Setup database stucture according to the selected commit"
docker exec -i agenda_culturel-backend python3 manage.py migrate agenda_culturel
# remove all elements in database
echobold "Flush database"
docker exec -i agenda_culturel-backend python3 manage.py flush --no-input
# import data
echobold "Import data"
docker exec -i agenda_culturel-backend python3 manage.py loaddata --format=json $FFILE
# reset code to uptodate version
echobold "Move back to last commit"
git checkout main
# update database structure
echobold "Update database"
docker exec -i agenda_culturel-backend python3 manage.py migrate agenda_culturel

View File

@ -3,15 +3,11 @@ from django import forms
from .models import ( from .models import (
Event, Event,
Category, Category,
Tag,
StaticContent, StaticContent,
DuplicatedEvents, DuplicatedEvents,
BatchImportation, BatchImportation,
RecurrentImport, RecurrentImport,
Place, Place,
Message,
ReferenceLocation,
Organisation
) )
from django_better_admin_arrayfield.admin.mixins import DynamicArrayMixin from django_better_admin_arrayfield.admin.mixins import DynamicArrayMixin
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
@ -19,15 +15,11 @@ from django_better_admin_arrayfield.models.fields import DynamicArrayField
admin.site.register(Category) admin.site.register(Category)
admin.site.register(Tag)
admin.site.register(StaticContent) admin.site.register(StaticContent)
admin.site.register(DuplicatedEvents) admin.site.register(DuplicatedEvents)
admin.site.register(BatchImportation) admin.site.register(BatchImportation)
admin.site.register(RecurrentImport) admin.site.register(RecurrentImport)
admin.site.register(Place) admin.site.register(Place)
admin.site.register(Message)
admin.site.register(ReferenceLocation)
admin.site.register(Organisation)
class URLWidget(DynamicArrayWidget): class URLWidget(DynamicArrayWidget):

View File

@ -1,14 +1,8 @@
from datetime import datetime, timedelta, date, time from datetime import datetime, timedelta, date, time
import calendar import calendar
from django.db.models import Q, F from django.db.models import Q
from django.utils import timezone from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from django.template.defaultfilters import date as _date
from django.db.models import CharField
from django.db.models.functions import Lower
CharField.register_lookup(Lower)
import logging import logging
@ -26,7 +20,7 @@ def daterange(start, end, step=timedelta(1)):
class DayInCalendar: class DayInCalendar:
midnight = time(0, 0, 0) midnight = time(23, 59, 59)
def __init__(self, d, on_requested_interval=True): def __init__(self, d, on_requested_interval=True):
self.date = d self.date = d
@ -35,13 +29,10 @@ class DayInCalendar:
self.in_past = d < now self.in_past = d < now
self.today = d == now self.today = d == now
self.tomorrow = d == now + timedelta(days=+1)
self.events = [] self.events = []
self.on_requested_interval = on_requested_interval self.on_requested_interval = on_requested_interval
self.events_by_category = {} self.events_by_category = {}
self.time_intervals = None
self.id = d.strftime('%Y-%m-%d')
def is_in_past(self): def is_in_past(self):
return self.in_past return self.in_past
@ -49,9 +40,6 @@ class DayInCalendar:
def is_today(self): def is_today(self):
return self.today return self.today
def is_tomorrow(self):
return self.tomorrow
def is_ancestor_uuid_event_from_other(self, event): def is_ancestor_uuid_event_from_other(self, event):
for e in self.events: for e in self.events:
if event.is_ancestor_by_uuid(e): if event.is_ancestor_by_uuid(e):
@ -86,30 +74,15 @@ class DayInCalendar:
self._add_event_internal(event) self._add_event_internal(event)
def _add_event_internal(self, event): def _add_event_internal(self, event):
from .models import Category self.events.append(event)
from copy import deepcopy if event.category is None:
# copy event if "" not in self.events_by_category:
local_event = deepcopy(event) self.events_by_category[""] = []
self.events_by_category[""].append(event)
# set values
if local_event.start_day != self.date:
local_event.start_day = self.date
local_event.start_time = None
if local_event.end_day != self.date:
local_event.end_day = None
local_event.end_time = None
# add event to the day
self.events.append(local_event)
# add in its category
if local_event.category is None:
cat = Category.default_name
else: else:
cat = local_event.category.name if event.category.name not in self.events_by_category:
if cat not in self.events_by_category: self.events_by_category[event.category.name] = []
self.events_by_category[cat] = [] self.events_by_category[event.category.name].append(event)
self.events_by_category[cat].append(local_event)
def filter_events(self): def filter_events(self):
self.events.sort( self.events.sort(
@ -117,88 +90,14 @@ class DayInCalendar:
if e.start_time is None if e.start_time is None
else e.start_time else e.start_time
) )
self.today_night = False
if self.is_today():
self.today_night = True
now = timezone.now()
nday = now.date()
ntime = now.time()
found = False
for idx,e in enumerate(self.events):
if (nday < e.start_day) or (nday == e.start_day and e.start_time and ntime <= e.start_time):
self.events[idx].is_first_after_now = True
found = True
break
if found:
self.today_night = False
def is_today_after_events(self):
return self.is_today() and self.today_night
def events_by_category_ordered(self):
from .models import Category
cats = Category.objects.order_by('position')
result = []
for c in cats:
if c.name in self.events_by_category:
result.append((c.name, self.events_by_category[c.name]))
return result
def build_time_intervals(self, all_day_name, all_day_short_name, interval_names, interval_short_names, interval_markers):
self.time_intervals = [IntervalInDay(self.date, i, n[0], n[1]) for i, n in
enumerate(zip([all_day_name] + interval_names, [all_day_short_name] + interval_short_names))]
for e in self.events:
if e.start_time is None:
self.time_intervals[0].add_event(e)
else:
dt = datetime.combine(e.start_day, e.start_time)
ok = False
for i in range(len(interval_markers)):
if dt < interval_markers[i]:
self.time_intervals[i + 1].add_event(e)
ok = True
break
if not ok:
self.time_intervals[-1].add_event(e)
def get_time_intervals(self):
if self.time_intervals is None:
if self.is_today():
all_day_name = _('All day today')
interval_names = [_('This morning'), _('This noon'), _('This afternoon'), _('This evening')]
elif self.is_tomorrow():
name = _("Tomorrow")
all_day_name = _('All day tomorrow')
interval_names = [_('%s morning') % name, _('%s noon') % name, _('%s afternoon') % name, _('%s evening') % name]
else:
name = _date(self.date, "l")
all_day_name = _('All day %s') % name
interval_names = [_('%s morning') % name, _('%s noon') % name, _('%s afternoon') % name, _('%s evening') % name]
all_day_short_name = _('All day')
interval_short_names = [_('Morning'), _('Noon'), _('Afternoon'), _('Evening')]
interval_markers = [datetime.combine(self.date, time(h, m)) for h, m in [(11, 30), (13, 0), (18, 0)]]
self.build_time_intervals(all_day_name, all_day_short_name, interval_names, interval_short_names, interval_markers)
return self.time_intervals
class IntervalInDay(DayInCalendar):
def __init__(self, d, id, name, short_name):
self.name = name
self.short_name = short_name
super().__init__(d)
self.id = self.id + '-' + str(id)
class CalendarList: class CalendarList:
def __init__(self, firstdate, lastdate, filter=None, exact=False, ignore_dup=None, qs=None): def __init__(self, firstdate, lastdate, filter=None, exact=False):
self.firstdate = firstdate self.firstdate = firstdate
self.lastdate = lastdate self.lastdate = lastdate
self.now = date.today() self.now = date.today()
self.filter = filter self.filter = filter
self.ignore_dup = ignore_dup
self.qs = qs
if exact: if exact:
self.c_firstdate = self.firstdate self.c_firstdate = self.firstdate
@ -209,10 +108,6 @@ class CalendarList:
# end the last day of the last week # end the last day of the last week
self.c_lastdate = lastdate + timedelta(days=6 - lastdate.weekday()) self.c_lastdate = lastdate + timedelta(days=6 - lastdate.weekday())
self.calendar_days = None
def build_internal(self):
# create a list of DayInCalendars # create a list of DayInCalendars
self.create_calendar_days() self.create_calendar_days()
@ -223,12 +118,6 @@ class CalendarList:
for i, c in self.calendar_days.items(): for i, c in self.calendar_days.items():
c.filter_events() c.filter_events()
def get_calendar_days(self):
if self.calendar_days is None:
self.build_internal()
return self.calendar_days
def today_in_calendar(self): def today_in_calendar(self):
return self.firstdate <= self.now and self.lastdate >= self.now return self.firstdate <= self.now and self.lastdate >= self.now
@ -237,20 +126,14 @@ class CalendarList:
def fill_calendar_days(self): def fill_calendar_days(self):
if self.filter is None: if self.filter is None:
if self.qs is None:
from .models import Event from .models import Event
qs = Event.objects.all() qs = Event.objects.all()
else:
qs = self.qs
else: else:
qs = self.filter.qs qs = self.filter.qs
startdatetime = datetime.combine(self.c_firstdate, time.min)
if self.ignore_dup: lastdatetime = datetime.combine(self.c_lastdate, time.max)
qs = qs.exclude(other_versions=self.ignore_dup) self.events = qs.filter(
startdatetime = timezone.make_aware(datetime.combine(self.c_firstdate, time.min), timezone.get_default_timezone())
lastdatetime = timezone.make_aware(datetime.combine(self.c_lastdate, time.max), timezone.get_default_timezone())
qs = qs.filter(
(Q(recurrence_dtend__isnull=True) & Q(recurrence_dtstart__lte=lastdatetime)) (Q(recurrence_dtend__isnull=True) & Q(recurrence_dtstart__lte=lastdatetime))
| ( | (
Q(recurrence_dtend__isnull=False) Q(recurrence_dtend__isnull=False)
@ -259,15 +142,7 @@ class CalendarList:
| Q(recurrence_dtend__lt=startdatetime) | Q(recurrence_dtend__lt=startdatetime)
) )
) )
| (Q(start_day__lte=self.c_firstdate) & (Q(end_day__isnull=True) | Q(end_day__gte=self.c_firstdate))) ).order_by("start_time")
).filter(
Q(other_versions__isnull=True) |
Q(other_versions__representative=F('pk')) |
Q(other_versions__representative__isnull=True)
).order_by("start_time", "title__unaccent__lower")
qs = qs.select_related("exact_location").select_related("category").select_related("other_versions").select_related("other_versions__representative")
self.events = qs
firstdate = datetime.fromordinal(self.c_firstdate.toordinal()) firstdate = datetime.fromordinal(self.c_firstdate.toordinal())
if firstdate.tzinfo is None or firstdate.tzinfo.utcoffset(firstdate) is None: if firstdate.tzinfo is None or firstdate.tzinfo.utcoffset(firstdate) is None:
@ -298,31 +173,14 @@ class CalendarList:
return hasattr(self, "month") return hasattr(self, "month")
def calendar_days_list(self): def calendar_days_list(self):
return list(self.get_calendar_days().values()) return list(self.calendar_days.values())
def time_intervals_list(self, onlyfirst=False): def get_events(self):
ds = self.calendar_days_list() return [event for jour in self.calendar_days_list() for event in jour.events]
result = []
for d in ds:
tis = d.get_time_intervals()
for t in tis:
if len(t.events) > 0:
result.append(t)
if onlyfirst:
break
return result
def time_intervals_list_first(self):
return self.time_intervals_list(True)
def export_to_ics(self, request):
from .models import Event
events = [event for day in self.get_calendar_days().values() for event in day.events]
return Event.export_to_ics(events, request)
class CalendarMonth(CalendarList): class CalendarMonth(CalendarList):
def __init__(self, year, month, filter, qs=None): def __init__(self, year, month, filter):
self.year = year self.year = year
self.month = month self.month = month
r = calendar.monthrange(year, month) r = calendar.monthrange(year, month)
@ -330,7 +188,7 @@ class CalendarMonth(CalendarList):
first = date(year, month, 1) first = date(year, month, 1)
last = date(year, month, r[1]) last = date(year, month, r[1])
super().__init__(first, last, filter, qs) super().__init__(first, last, filter)
def get_month_name(self): def get_month_name(self):
return self.firstdate.strftime("%B") return self.firstdate.strftime("%B")
@ -343,14 +201,14 @@ class CalendarMonth(CalendarList):
class CalendarWeek(CalendarList): class CalendarWeek(CalendarList):
def __init__(self, year, week, filter, qs=None): def __init__(self, year, week, filter):
self.year = year self.year = year
self.week = week self.week = week
first = date.fromisocalendar(self.year, self.week, 1) first = date.fromisocalendar(self.year, self.week, 1)
last = date.fromisocalendar(self.year, self.week, 7) last = date.fromisocalendar(self.year, self.week, 7)
super().__init__(first, last, filter, qs) super().__init__(first, last, filter)
def next_week(self): def next_week(self):
return self.firstdate + timedelta(days=7) return self.firstdate + timedelta(days=7)
@ -360,8 +218,8 @@ class CalendarWeek(CalendarList):
class CalendarDay(CalendarList): class CalendarDay(CalendarList):
def __init__(self, date, filter=None, qs=None): def __init__(self, date, filter=None):
super().__init__(date, date, filter=filter, qs=qs, exact=True) super().__init__(date, date, filter, exact=True)
def get_events(self): def get_events(self):
return self.calendar_days_list()[0].events return self.calendar_days_list()[0].events

View File

@ -1,15 +1,9 @@
import os import os
import json import json
from celery import Celery, Task, chain from celery import Celery
from celery.schedules import crontab from celery.schedules import crontab
from celery.utils.log import get_task_logger from celery.utils.log import get_task_logger
from celery.exceptions import MaxRetriesExceededError
import time as time_
from django.conf import settings
from celery.signals import worker_ready
from contextlib import contextmanager
from .import_tasks.downloader import * from .import_tasks.downloader import *
from .import_tasks.extractor import * from .import_tasks.extractor import *
@ -24,8 +18,6 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", f"agenda_culturel.settings.{APP_
app = Celery("agenda_culturel") app = Celery("agenda_culturel")
from django.core.cache import cache
logger = get_task_logger(__name__) logger = get_task_logger(__name__)
@ -38,26 +30,6 @@ app.config_from_object("django.conf:settings", namespace="CELERY")
# Load task modules from all registered Django apps. # Load task modules from all registered Django apps.
app.autodiscover_tasks() app.autodiscover_tasks()
LOCK_EXPIRE = 60 * 10 # Lock expires in 10 minutes
@contextmanager
def memcache_chromium_lock(oid):
lock_id = "chromium-lock"
timeout_at = time_.monotonic() + LOCK_EXPIRE - 3
# cache.add fails if the key already exists
status = cache.add(lock_id, oid, LOCK_EXPIRE)
try:
yield status
finally:
# memcache delete is very slow, but we have to use it to take
# advantage of using add() for atomic locking
if time_.monotonic() < timeout_at and status:
# don't release the lock if we exceeded the timeout
# to lessen the chance of releasing an expired lock
# owned by someone else
# also don't release the lock if we didn't acquire it
cache.delete(lock_id)
def close_import_task(taskid, success, error_message, importer): def close_import_task(taskid, success, error_message, importer):
from agenda_culturel.models import BatchImportation from agenda_culturel.models import BatchImportation
@ -97,35 +69,35 @@ def import_events_from_json(self, json):
# finally, close task # finally, close task
close_import_task(self.request.id, success, error_message, importer) close_import_task(self.request.id, success, error_message, importer)
"""except Exception as e:
logger.error(e)
close_import_task(self.request.id, False, e, importer)"""
@app.task(bind=True)
class ChromiumTask(Task): def run_recurrent_import(self, pk):
_chm = None
@property
def chromiumDownloader(self):
if self._chm is None:
self._chm = ChromiumHeadlessDownloader()
return self._chm
def run_recurrent_import_internal(rimport, downloader, req_id):
from agenda_culturel.models import RecurrentImport, BatchImportation from agenda_culturel.models import RecurrentImport, BatchImportation
from .db_importer import DBImporterEvents from .db_importer import DBImporterEvents
logger.info("Run recurrent import: {}".format(req_id)) logger.info("Run recurrent import: {}".format(self.request.id))
# get the recurrent import
rimport = RecurrentImport.objects.get(pk=pk)
# create a batch importation # create a batch importation
importation = BatchImportation(recurrentImport=rimport, celery_id=req_id) importation = BatchImportation(recurrentImport=rimport, celery_id=self.request.id)
# save batch importation # save batch importation
importation.save() importation.save()
# create an importer # create an importer
importer = DBImporterEvents(req_id) importer = DBImporterEvents(self.request.id)
# prepare downloading and extracting processes
downloader = (
SimpleDownloader()
if rimport.downloader == RecurrentImport.DOWNLOADER.SIMPLE
else ChromiumHeadlessDownloader()
)
if rimport.processor == RecurrentImport.PROCESSOR.ICAL: if rimport.processor == RecurrentImport.PROCESSOR.ICAL:
extractor = ICALExtractor() extractor = ICALExtractor()
elif rimport.processor == RecurrentImport.PROCESSOR.ICALNOBUSY: elif rimport.processor == RecurrentImport.PROCESSOR.ICALNOBUSY:
@ -140,16 +112,6 @@ def run_recurrent_import_internal(rimport, downloader, req_id):
extractor = lefotomat.CExtractor() extractor = lefotomat.CExtractor()
elif rimport.processor == RecurrentImport.PROCESSOR.LAPUCEALOREILLE: elif rimport.processor == RecurrentImport.PROCESSOR.LAPUCEALOREILLE:
extractor = lapucealoreille.CExtractor() extractor = lapucealoreille.CExtractor()
elif rimport.processor == RecurrentImport.PROCESSOR.MECWORDPRESS:
extractor = wordpress_mec.CExtractor()
elif rimport.processor == RecurrentImport.PROCESSOR.FBEVENTS:
extractor = fbevents.CExtractor()
elif rimport.processor == RecurrentImport.PROCESSOR.C3C:
extractor = c3c.CExtractor()
elif rimport.processor == RecurrentImport.PROCESSOR.ARACHNEE:
extractor = arachnee.CExtractor()
elif rimport.processor == RecurrentImport.PROCESSOR.LERIO:
extractor = lerio.CExtractor()
else: else:
extractor = None extractor = None
@ -165,14 +127,13 @@ def run_recurrent_import_internal(rimport, downloader, req_id):
location = rimport.defaultLocation location = rimport.defaultLocation
tags = rimport.defaultTags tags = rimport.defaultTags
published = rimport.defaultPublished published = rimport.defaultPublished
organisers = [] if rimport.defaultOrganiser is None else [rimport.defaultOrganiser.pk]
try: try:
# get events from website # get events from website
events = u2e.process( events = u2e.process(
url, url,
browsable_url, browsable_url,
default_values={"category": category, "location": location, "tags": tags, "organisers": organisers}, default_values={"category": category, "location": location, "tags": tags},
published=published, published=published,
) )
@ -183,203 +144,47 @@ def run_recurrent_import_internal(rimport, downloader, req_id):
success, error_message = importer.import_events(json_events) success, error_message = importer.import_events(json_events)
# finally, close task # finally, close task
close_import_task(req_id, success, error_message, importer) close_import_task(self.request.id, success, error_message, importer)
except Exception as e: except Exception as e:
logger.error(e)
close_import_task(req_id, False, e, importer)
@app.task(base=ChromiumTask, bind=True)
def run_recurrent_import(self, pklist):
from agenda_culturel.models import RecurrentImport
if isinstance(pklist, list):
pk = pklist[0]
is_list = True
else:
is_list = False
pk = pklist
# get the recurrent import
rimport = RecurrentImport.objects.get(pk=pk)
# prepare downloading and extracting processes
if rimport.downloader == RecurrentImport.DOWNLOADER.SIMPLE:
downloader = SimpleDownloader()
elif rimport.downloader == RecurrentImport.DOWNLOADER.CHROMIUMHEADLESS:
downloader = self.chromiumDownloader
downloader.pause = False
else:
downloader = self.chromiumDownloader
downloader.pause = True
# only one thread using Chromium can run at a time,
# to prevent from errors (including strange Facebook errors)
if rimport.downloader in [RecurrentImport.DOWNLOADER.CHROMIUMHEADLESS, RecurrentImport.DOWNLOADER.CHROMIUMHEADLESSPAUSE]:
with memcache_chromium_lock(self.app.oid) as acquired:
if acquired:
run_recurrent_import_internal(rimport, downloader, self.request.id)
return pklist[1:] if is_list else True
else:
run_recurrent_import_internal(rimport, downloader, self.request.id)
return pklist[1:] if is_list else True
try:
# if chromium is locked, we wait before retrying
raise self.retry(countdown=120)
except MaxRetriesExceededError as e:
logger.error(e) logger.error(e)
close_import_task(self.request.id, False, e, importer) close_import_task(self.request.id, False, e, importer)
return pklist[1:] if is_list else False
def run_recurrent_imports_from_list(pklist):
tasks = chain(run_recurrent_import.s(pklist) if i == 0 else run_recurrent_import.s() for i in range(len(pklist)))
tasks.delay()
@app.task(bind=True) @app.task(bind=True)
def daily_imports(self): def daily_imports(self):
from agenda_culturel.models import RecurrentImport from agenda_culturel.models import RecurrentImport
logger.info("Everyday imports") logger.info("Imports quotidiens")
imports = RecurrentImport.objects.filter( imports = RecurrentImport.objects.filter(
recurrence=RecurrentImport.RECURRENCE.DAILY recurrence=RecurrentImport.RECURRENCE.DAILY
).order_by("pk") )
run_recurrent_imports_from_list([imp.pk for imp in imports]) for imp in imports:
run_recurrent_import.delay(imp.pk)
SCREENSHOT_FILE = settings.MEDIA_ROOT + '/screenshot.png'
@app.task(bind=True)
def screenshot(self):
downloader = ChromiumHeadlessDownloader(noimage=False)
downloader.screenshot("https://pommesdelune.fr", SCREENSHOT_FILE)
@worker_ready.connect
def at_start(sender, **k):
if not os.path.isfile(SCREENSHOT_FILE):
logger.info("Init screenshot file")
with sender.app.connection() as conn:
sender.app.send_task('agenda_culturel.celery.screenshot', None, connection=conn)
else:
logger.info("Screenshot file already exists")
@app.task(bind=True) @app.task(bind=True)
def run_all_recurrent_imports(self): def run_all_recurrent_imports(self):
from agenda_culturel.models import RecurrentImport from agenda_culturel.models import RecurrentImport
logger.info("Run all imports") logger.info("Imports complets")
imports = RecurrentImport.objects.all().order_by("pk") imports = RecurrentImport.objects.all()
run_recurrent_imports_from_list([imp.pk for imp in imports]) for imp in imports:
run_recurrent_import.delay(imp.pk)
@app.task(bind=True)
def run_all_recurrent_imports_failed(self):
from agenda_culturel.models import RecurrentImport, BatchImportation
logger.info("Run only failed imports")
imports = RecurrentImport.objects.all().order_by("pk")
run_recurrent_imports_from_list([imp.pk for imp in imports if imp.last_import().status == BatchImportation.STATUS.FAILED])
@app.task(bind=True)
def run_all_recurrent_imports_canceled(self):
from agenda_culturel.models import RecurrentImport, BatchImportation
logger.info("Run only canceled imports")
imports = RecurrentImport.objects.all().order_by("pk")
run_recurrent_imports_from_list([imp.pk for imp in imports if imp.last_import().status == BatchImportation.STATUS.CANCELED])
@app.task(bind=True) @app.task(bind=True)
def weekly_imports(self): def weekly_imports(self):
from agenda_culturel.models import RecurrentImport from agenda_culturel.models import RecurrentImport
logger.info("Weekly imports") logger.info("Imports hebdomadaires")
imports = RecurrentImport.objects.filter( imports = RecurrentImport.objects.filter(
recurrence=RecurrentImport.RECURRENCE.WEEKLY recurrence=RecurrentImport.RECURRENCE.WEEKLY
).order_by("pk")
run_recurrent_imports_from_list([imp.pk for imp in imports])
@app.task(base=ChromiumTask, bind=True)
def import_events_from_url(self, url, cat, tags, force=False, user_id=None):
from .db_importer import DBImporterEvents
from agenda_culturel.models import RecurrentImport, BatchImportation
from agenda_culturel.models import Event, Category
with memcache_chromium_lock(self.app.oid) as acquired:
if acquired:
logger.info("URL import: {}".format(self.request.id))
# clean url
url = Extractor.clean_url(url)
# we check if the url is known
existing = None if force else Event.objects.filter(uuids__contains=[url])
# if it's unknown
if force or len(existing) == 0:
# create an importer
importer = DBImporterEvents(self.request.id)
# create a batch importation
importation = BatchImportation(url_source=url, celery_id=self.request.id)
# save batch importation
importation.save()
try:
## create loader
u2e = URL2Events(ChromiumHeadlessDownloader(), single_event=True)
# set default values
values = {}
if cat is not None:
values = {"category": cat, "tags": tags}
# get event
events = u2e.process(
url, published=False, default_values=values
) )
if events: for imp in imports:
# convert it to json run_recurrent_import.delay(imp.pk)
json_events = json.dumps(events, default=str)
# import events (from json)
success, error_message = importer.import_events(json_events, user_id)
# finally, close task
close_import_task(self.request.id, success, error_message, importer)
else:
close_import_task(self.request.id, False, "Cannot find any event", importer)
except Exception as e:
logger.error(e)
close_import_task(self.request.id, False, e, importer)
return
# if chromium is locked, we wait 30 seconds before retrying
raise self.retry(countdown=30)
@app.task(base=ChromiumTask, bind=True)
def import_events_from_urls(self, urls_cat_tags, user_id=None):
for ucat in urls_cat_tags:
if ucat is not None:
url = ucat[0]
cat = ucat[1]
tags = ucat[2]
import_events_from_url.delay(url, cat, tags, user_id=user_id)
app.conf.beat_schedule = { app.conf.beat_schedule = {
@ -388,10 +193,6 @@ app.conf.beat_schedule = {
# Daily imports at 3:14 a.m. # Daily imports at 3:14 a.m.
"schedule": crontab(hour=3, minute=14), "schedule": crontab(hour=3, minute=14),
}, },
"daily_screenshot": {
"task": "agenda_culturel.celery.screenshot",
"schedule": crontab(hour=3, minute=3),
},
"weekly_imports": { "weekly_imports": {
"task": "agenda_culturel.celery.weekly_imports", "task": "agenda_culturel.celery.weekly_imports",
# Daily imports on Mondays at 2:22 a.m. # Daily imports on Mondays at 2:22 a.m.

View File

@ -11,7 +11,6 @@ class DBImporterEvents:
def __init__(self, celery_id): def __init__(self, celery_id):
self.celery_id = celery_id self.celery_id = celery_id
self.error_message = "" self.error_message = ""
self.user_id = None
self.init_result_properties() self.init_result_properties()
self.today = timezone.now().date().isoformat() self.today = timezone.now().date().isoformat()
@ -35,19 +34,15 @@ class DBImporterEvents:
def get_nb_removed_events(self): def get_nb_removed_events(self):
return self.nb_removed return self.nb_removed
def import_events(self, json_structure, user_id=None): def import_events(self, json_structure):
print(json_structure) print(json_structure)
self.init_result_properties() self.init_result_properties()
self.user_id = user_id
try: try:
structure = json.loads(json_structure) structure = json.loads(json_structure)
except: except:
return (False, "JSON file is not correctly structured") return (False, "JSON file is not correctly structured")
if len(structure) == 0:
return (True, "")
if "header" not in structure: if "header" not in structure:
return (False, "JSON is not correctly structured: missing header") return (False, "JSON is not correctly structured: missing header")
if "events" not in structure: if "events" not in structure:
@ -97,7 +92,7 @@ class DBImporterEvents:
def save_imported(self): def save_imported(self):
self.db_event_objects, self.nb_updated, self.nb_removed = Event.import_events( self.db_event_objects, self.nb_updated, self.nb_removed = Event.import_events(
self.event_objects, remove_missing_from_source=self.url, user_id=self.user_id self.event_objects, remove_missing_from_source=self.url
) )
def is_valid_event_structure(self, event): def is_valid_event_structure(self, event):

View File

@ -1,505 +0,0 @@
import django_filters
from django.utils.translation import gettext_lazy as _
from django import forms
from django.contrib.postgres.search import SearchQuery, SearchHeadline
from django.db.models import Count, Q
from django.http import QueryDict
from django.contrib.gis.measure import D
from django.forms import (
ModelForm,
ValidationError,
TextInput,
Form,
URLField,
MultipleHiddenInput,
Textarea,
CharField,
ChoiceField,
RadioSelect,
MultipleChoiceField,
BooleanField,
HiddenInput,
ModelChoiceField,
)
from .forms import (
URLSubmissionForm,
EventForm,
BatchImportationForm,
FixDuplicates,
SelectEventInList,
MergeDuplicates,
RecurrentImportForm,
CategorisationRuleImportForm,
CategorisationForm,
EventAddPlaceForm,
PlaceForm,
)
from .models import (
ReferenceLocation,
RecurrentImport,
Tag,
Event,
Category,
Message,
DuplicatedEvents
)
class EventFilter(django_filters.FilterSet):
RECURRENT_CHOICES = [
("remove_recurrent", "Masquer les événements récurrents"),
("only_recurrent", "Montrer uniquement les événements récurrents"),
]
DISTANCE_CHOICES = [5, 10, 15, 30]
position = django_filters.ModelChoiceFilter(
label="À proximité de",
method="no_filter",
empty_label=_("Select a location"),
queryset=ReferenceLocation.objects.all().order_by("-main", "name__unaccent")
)
radius = django_filters.ChoiceFilter(
label="Dans un rayon de",
method="no_filter",
choices=[(x, str(x) + " km") for x in DISTANCE_CHOICES],
null_label=None,
empty_label=None
)
exclude_tags = django_filters.MultipleChoiceFilter(
label="Exclure les étiquettes",
choices=[],
lookup_expr="icontains",
field_name="tags",
exclude=True,
widget=forms.SelectMultiple,
)
tags = django_filters.MultipleChoiceFilter(
label="Inclure les étiquettes",
choices=[],
lookup_expr="icontains",
conjoined=True,
field_name="tags",
widget=forms.SelectMultiple,
)
recurrences = django_filters.ChoiceFilter(
label="Inclure la récurrence",
choices=RECURRENT_CHOICES,
method="filter_recurrences",
)
category = django_filters.ModelMultipleChoiceFilter(
label="Filtrer par catégories",
field_name="category__id",
to_field_name="id",
queryset=Category.objects.all(),
widget=MultipleHiddenInput,
)
status = django_filters.MultipleChoiceFilter(
label="Filtrer par status",
choices=Event.STATUS.choices,
field_name="status",
widget=forms.CheckboxSelectMultiple,
)
class Meta:
model = Event
fields = ["category", "tags", "exclude_tags", "status", "recurrences"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not kwargs["request"].user.is_authenticated:
self.form.fields.pop("status")
self.form.fields["exclude_tags"].choices = Tag.get_tag_groups(exclude=True, nb_suggestions=0)
self.form.fields["tags"].choices = Tag.get_tag_groups(include=True)
def filter_recurrences(self, queryset, name, value):
# construct the full lookup expression
lookup = "__".join([name, "isnull"])
return queryset.filter(**{lookup: value == "remove_recurrent"})
def no_filter(self, queryset, name, value):
return queryset
@property
def qs(self):
parent = super().qs
if self.get_cleaned_data("position") is None or self.get_cleaned_data("radius") is None:
return parent
d = self.get_cleaned_data("radius")
p = self.get_cleaned_data("position")
if not isinstance(d, str) or not isinstance(p, ReferenceLocation):
return parent
p = p.location
return parent.exclude(exact_location=False).filter(exact_location__location__distance_lt=(p, D(km=d)))
def get_url(self):
if isinstance(self.form.data, QueryDict):
return self.form.data.urlencode()
else:
return ""
def get_full_url(self):
return self.request.get_full_path()
def get_url_remove_categories(self, catpks, full_path = None):
if full_path is None:
full_path = self.request.get_full_path()
result = full_path
for catpk in catpks:
result = result.replace('category=' + str(catpk), '')
result = result.replace('?&', '?')
result = result.replace('&&', '&')
return result
def get_url_add_categories(self, catpks, full_path = None):
if full_path is None:
full_path = self.request.get_full_path()
result = full_path
for catpk in catpks:
result = result + ('&' if '?' in full_path else '?') + 'category=' + str(catpk)
return result
def get_url_without_filters_only_cats(self):
return self.get_url_without_filters(True)
def get_url_without_filters(self, only_categories=False):
if only_categories:
# on repart d'une url sans option
result = self.request.get_full_path().split("?")[0]
# on ajoute toutes les catégories
result = self.get_url_add_categories([c.pk for c in self.get_categories()], result)
else:
# on supprime toutes les catégories
result = self.get_url_remove_categories([c.pk for c in self.get_categories()])
return result
def get_cleaned_data(self, name):
try:
return self.form.cleaned_data[name]
except AttributeError:
return {}
except KeyError:
return {}
def get_categories(self):
return self.get_cleaned_data("category")
def has_category(self):
return "category" in self.form.cleaned_data and len(self.get_cleaned_data("category")) > 0
def get_tags(self):
return self.get_cleaned_data("tags")
def get_exclude_tags(self):
return self.get_cleaned_data("exclude_tags")
def get_status(self):
return self.get_cleaned_data("status")
def get_position(self):
return self.get_cleaned_data("position")
def get_radius(self):
return self.get_cleaned_data("radius")
def to_str(self, prefix=''):
self.form.full_clean()
result = ' '.join([c.name for c in self.get_categories()] + [t for t in self.get_tags()] + ["~" + t for t in self.get_exclude_tags()] + [str(self.get_position()), str(self.get_radius())])
if len(result) > 0:
result = prefix + result
return result
def get_status_names(self):
if "status" in self.form.cleaned_data:
return [
dict(Event.STATUS.choices)[s] for s in self.get_cleaned_data("status")
]
else:
return []
def get_recurrence_filtering(self):
if "recurrences" in self.form.cleaned_data:
d = dict(self.RECURRENT_CHOICES)
v = self.form.cleaned_data["recurrences"]
if v in d:
return d[v]
else:
return ""
else:
return ""
def is_resetable(self, only_categories=False):
if only_categories:
return len(self.get_cleaned_data("category")) != 0
else:
if self.request.user.is_authenticated:
if (
len(self.get_cleaned_data("status")) != 1
or
self.get_cleaned_data("status")[0] != Event.STATUS.PUBLISHED
):
return True
else:
if (
len(self.get_cleaned_data("status")) != 0
):
return True
return (
len(self.get_cleaned_data("tags")) != 0
or len(self.get_cleaned_data("exclude_tags")) != 0
or len(self.get_cleaned_data("recurrences")) != 0
or ((not self.get_cleaned_data("position") is None) and (not self.get_cleaned_data("radius") is None))
)
def is_active(self, only_categories=False):
if only_categories:
return len(self.get_cleaned_data("category")) != 0
else:
return (
len(self.get_cleaned_data("status")) != 0
or len(self.get_cleaned_data("tags")) != 0
or len(self.get_cleaned_data("exclude_tags")) != 0
or len(self.get_cleaned_data("recurrences")) != 0
or ((not self.get_cleaned_data("position") is None) and (not self.get_cleaned_data("radius") is None))
)
def is_selected(self, cat):
return "category" in self.form.cleaned_data and cat in self.form.cleaned_data["category"]
def is_selected_tag(self, tag):
return "tags" in self.form.cleaned_data and tag in self.form.cleaned_data["tags"]
def get_url_add_tag(self, tag):
full_path = self.request.get_full_path()
result = full_path + ('&' if '?' in full_path else '?') + 'tags=' + str(tag)
return result
def tag_exists(self, tag):
return tag in [t[0] for g in self.form.fields["tags"].choices for t in g[1]]
def set_default_values(request):
if request.user.is_authenticated:
if request.GET.get('status', None) == None:
tempdict = request.GET.copy()
tempdict['status'] = 'published'
request.GET = tempdict
return request
return request
def get_position_radius(self):
if self.get_cleaned_data("position") is None or self.get_cleaned_data("radius") is None:
return ""
else:
return str(self.get_cleaned_data("position")) + ' (' + str(self.get_cleaned_data("radius")) + ' km)'
def is_filtered_by_position_radius(self):
return not self.get_cleaned_data("position") is None and not self.get_cleaned_data("radius") is None
def get_url_add_suggested_position(self, location):
result = self.request.get_full_path()
return result + ('&' if '?' in result else '?') + 'position=' + str(location.pk) + "&radius=" + str(location.suggested_distance)
class EventFilterAdmin(django_filters.FilterSet):
status = django_filters.MultipleChoiceFilter(
choices=Event.STATUS.choices, widget=forms.CheckboxSelectMultiple
)
representative = django_filters.MultipleChoiceFilter(
label=_("Representative version"),
choices=[(True, _("Yes")), (False, _("Non"))],
method="filter_by_representative",
widget=forms.CheckboxSelectMultiple)
import_sources = django_filters.ModelChoiceFilter(
label=_("Imported from"),
method="filter_by_source",
queryset=RecurrentImport.objects.all().order_by("name__unaccent")
)
def filter_by_source(self, queryset, name, value):
src = RecurrentImport.objects.get(pk=value.pk).source
return queryset.filter(import_sources__contains=[src])
def filter_by_representative(self, queryset, name, value):
if value is None or len(value) != 1:
return queryset
else:
q = (Q(other_versions__isnull=True) |
Q(other_versions__representative=F('pk')) |
Q(other_versions__representative__isnull=True))
if value[0] == True:
return queryset.filter(q)
else:
return queryset.exclude(q)
class Meta:
model = Event
fields = ["status"]
class MessagesFilterAdmin(django_filters.FilterSet):
closed = django_filters.MultipleChoiceFilter(
label="Status",
choices=((True, _("Closed")), (False, _("Open"))),
widget=forms.CheckboxSelectMultiple,
)
spam = django_filters.MultipleChoiceFilter(
label="Spam",
choices=((True, _("Spam")), (False, _("Non spam"))),
widget=forms.CheckboxSelectMultiple,
)
class Meta:
model = Message
fields = ["closed", "spam"]
class SimpleSearchEventFilter(django_filters.FilterSet):
q = django_filters.CharFilter(method="custom_filter",
label=_("Search"),
widget=forms.TextInput(attrs={"type": "search"})
)
status = django_filters.MultipleChoiceFilter(
label="Filtrer par status",
choices=Event.STATUS.choices,
field_name="status",
widget=forms.CheckboxSelectMultiple,
)
def custom_filter(self, queryset, name, value):
search_query = SearchQuery(value, config="french")
qs = queryset.filter(
Q(title__icontains=value)
| Q(category__name__icontains=value)
| Q(tags__icontains=[value])
| Q(exact_location__name__icontains=value)
| Q(description__icontains=value)
)
for f in ["title", "category__name", "exact_location__name", "description"]:
params = {
f
+ "_hl": SearchHeadline(
f,
search_query,
start_sel='<span class="highlight">',
stop_sel="</span>",
config="french",
)
}
qs = qs.annotate(**params)
return qs
class Meta:
model = Event
fields = ["q"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not kwargs["request"].user.is_authenticated:
self.form.fields.pop("status")
class SearchEventFilter(django_filters.FilterSet):
tags = django_filters.CharFilter(lookup_expr="icontains")
title = django_filters.CharFilter(method="hl_filter_contains")
location = django_filters.CharFilter(method="hl_filter_contains")
description = django_filters.CharFilter(method="hl_filter_contains")
start_day = django_filters.DateFromToRangeFilter(
widget=django_filters.widgets.RangeWidget(attrs={"type": "date"})
)
status = django_filters.MultipleChoiceFilter(
label="Filtrer par status",
choices=Event.STATUS.choices,
field_name="status",
widget=forms.CheckboxSelectMultiple,
)
o = django_filters.OrderingFilter(
# tuple-mapping retains order
fields=(
("title", "title"),
("description", "description"),
("start_day", "start_day"),
),
)
def hl_filter_contains(self, queryset, name, value):
# first check if it contains
filter_contains = {name + "__contains": value}
queryset = queryset.filter(**filter_contains)
# then hightlight the result
search_query = SearchQuery(value, config="french")
params = {
name
+ "_hl": SearchHeadline(
name,
search_query,
start_sel='<span class="highlight">',
stop_sel="</span>",
config="french",
)
}
return queryset.annotate(**params)
class Meta:
model = Event
fields = ["title", "location", "description", "category", "tags", "start_day"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not kwargs["request"].user.is_authenticated:
self.form.fields.pop("status")
class DuplicatedEventsFilter(django_filters.FilterSet):
fixed = django_filters.BooleanFilter(
label="Résolu",
field_name='representative', method="fixed_qs")
class Meta:
model = DuplicatedEvents
fields = []
def fixed_qs(self, queryset, name, value):
return DuplicatedEvents.not_fixed_qs(queryset, value)
class RecurrentImportFilter(django_filters.FilterSet):
name = django_filters.ModelMultipleChoiceFilter(
label="Filtrer par nom",
field_name="name",
queryset=RecurrentImport.objects.all().order_by("name__unaccent")
)
class Meta:
model = RecurrentImport
fields = ["name"]

View File

@ -16,137 +16,29 @@ from django.forms import (
) )
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
from .utils import PlaceGuesser
from .models import ( from .models import (
Event, Event,
RecurrentImport, RecurrentImport,
CategorisationRule, CategorisationRule,
ModerationAnswer,
ModerationQuestion,
Place, Place,
Category,
Tag,
Message
) )
from django.conf import settings
from django.core.files import File
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from string import ascii_uppercase as auc from string import ascii_uppercase as auc
from .templatetags.utils_extra import int_to_abc from .templatetags.utils_extra import int_to_abc
from django.utils.safestring import mark_safe from django.utils.safestring import mark_safe
from django.utils.timezone import localtime
from django.utils.formats import localize from django.utils.formats import localize
from .templatetags.event_extra import event_field_verbose_name, field_to_html from .templatetags.event_extra import event_field_verbose_name, field_to_html
import os
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class GroupFormMixin:
template_name = 'agenda_culturel/forms/div_group.html'
class FieldGroup:
def __init__(self, id, label, display_label=False, maskable=False, default_masked=True):
self.id = id
self.label = label
self.display_label = display_label
self.maskable = maskable
self.default_masked = default_masked
def toggle_field_name(self):
return 'group_' + self.id
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.groups = []
def add_group(self, *args, **kwargs):
self.groups.append(GroupFormMixin.FieldGroup(*args, **kwargs))
if self.groups[-1].maskable:
self.fields[self.groups[-1].toggle_field_name()] = BooleanField(required=False)
self.fields[self.groups[-1].toggle_field_name()].toggle_group = True
def get_fields_in_group(self, g):
return [f for f in self.visible_fields() if not hasattr(f.field, "toggle_group") and hasattr(f.field, "group_id") and f.field.group_id == g.id]
def get_no_group_fields(self):
return [f for f in self.visible_fields() if not hasattr(f.field, "toggle_group") and (not hasattr(f.field, "group_id") or f.field.group_id == None)]
def fields_by_group(self):
return [(g, self.get_fields_in_group(g)) for g in self.groups] + [(GroupFormMixin.FieldGroup("other", _("Other")), self.get_no_group_fields())]
def clean(self):
result = super().clean()
if result:
data = dict(self.data)
# for each masked group, we remove data
for g in self.groups:
if g.maskable and not g.toggle_field_name() in data:
fields = self.get_fields_in_group(g)
for f in fields:
self.cleaned_data[f.name] = None
return result
class TagForm(ModelForm):
required_css_class = 'required'
class Meta:
model = Tag
fields = ["name", "description", "in_included_suggestions", "in_excluded_suggestions", "principal"]
widgets = {
"name": HiddenInput()
}
class TagRenameForm(Form):
required_css_class = 'required'
name = CharField(
label=_('Name of new tag'),
required=True
)
force = BooleanField(
label=_('Force renaming despite the existence of events already using the chosen tag.'),
)
def __init__(self, *args, **kwargs):
force = kwargs.pop("force", False)
name = kwargs.pop("name", None)
super().__init__(*args, **kwargs)
if not (force or (not len(args) == 0 and 'force' in args[0])):
del self.fields["force"]
if not name is None and self.fields["name"].initial is None:
self.fields["name"].initial = name
def is_force(self):
return "force" in self.fields and self.cleaned_data["force"] == True
class URLSubmissionForm(Form):
required_css_class = 'required'
class EventSubmissionForm(Form):
url = URLField(max_length=512) url = URLField(max_length=512)
category = ModelChoiceField(
label=_("Category"),
queryset=Category.objects.all().order_by("name"),
initial=None,
required=False,
)
tags = MultipleChoiceField(
label=_("Tags"),
initial=None,
choices=[],
required=False
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["tags"].choices = Tag.get_tag_groups(all=True)
class DynamicArrayWidgetURLs(DynamicArrayWidget): class DynamicArrayWidgetURLs(DynamicArrayWidget):
@ -158,58 +50,28 @@ class DynamicArrayWidgetTags(DynamicArrayWidget):
class RecurrentImportForm(ModelForm): class RecurrentImportForm(ModelForm):
required_css_class = 'required'
defaultTags = MultipleChoiceField(
label=_("Tags"),
initial=None,
choices=[],
required=False
)
class Meta: class Meta:
model = RecurrentImport model = RecurrentImport
fields = "__all__" fields = "__all__"
widgets = {
def __init__(self, *args, **kwargs): "defaultTags": DynamicArrayWidgetTags(),
super().__init__(*args, **kwargs) }
self.fields["defaultTags"].choices = Tag.get_tag_groups(all=True)
class CategorisationRuleImportForm(ModelForm): class CategorisationRuleImportForm(ModelForm):
required_css_class = 'required'
class Meta: class Meta:
model = CategorisationRule model = CategorisationRule
fields = "__all__" fields = "__all__"
class EventForm(GroupFormMixin, ModelForm): class EventForm(ModelForm):
required_css_class = 'required'
old_local_image = CharField(widget=HiddenInput(), required=False)
simple_cloning = CharField(widget=HiddenInput(), required=False)
tags = MultipleChoiceField(
label=_("Tags"),
initial=None,
choices=[],
required=False
)
class Meta: class Meta:
model = Event model = Event
exclude = [ exclude = [
"possibly_duplicated",
"imported_date", "imported_date",
"modified_date", "modified_date",
"moderated_date", "moderated_date",
"import_sources",
"image",
"moderated_by_user",
"modified_by_user",
"created_by_user",
"imported_by_user"
] ]
widgets = { widgets = {
"start_day": TextInput( "start_day": TextInput(
@ -228,75 +90,17 @@ class EventForm(GroupFormMixin, ModelForm):
), ),
"end_day": TextInput(attrs={"type": "date"}), "end_day": TextInput(attrs={"type": "date"}),
"end_time": TextInput(attrs={"type": "time"}), "end_time": TextInput(attrs={"type": "time"}),
"other_versions": HiddenInput(),
"uuids": MultipleHiddenInput(), "uuids": MultipleHiddenInput(),
"import_sources": MultipleHiddenInput(),
"reference_urls": DynamicArrayWidgetURLs(), "reference_urls": DynamicArrayWidgetURLs(),
"tags": DynamicArrayWidgetTags(),
} }
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
is_authenticated = kwargs.pop("is_authenticated", False) is_authenticated = kwargs.pop("is_authenticated", False)
self.cloning = kwargs.pop("is_cloning", False)
self.simple_cloning = kwargs.pop("is_simple_cloning", False)
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
if not is_authenticated: if not is_authenticated:
del self.fields["status"] del self.fields["status"]
del self.fields["organisers"]
self.fields['category'].queryset = self.fields['category'].queryset.order_by('name')
self.fields['category'].empty_label = None
self.fields['category'].initial = Category.get_default_category()
self.fields['tags'].choices = Tag.get_tag_groups(all=True)
# set groups
self.add_group('main', _('Main fields'))
self.fields['title'].group_id = 'main'
self.add_group('start', _('Start of event'))
self.fields['start_day'].group_id = 'start'
self.fields['start_time'].group_id = 'start'
self.add_group('end', _('End of event'))
self.fields['end_day'].group_id = 'end'
self.fields['end_time'].group_id = 'end'
self.add_group('recurrences',
_('This is a recurring event'),
maskable=True,
default_masked=not (self.instance and
self.instance.recurrences and
self.instance.recurrences.rrules and
len(self.instance.recurrences.rrules) > 0))
self.fields['recurrences'].group_id = 'recurrences'
self.add_group('details', _('Details'))
self.fields['description'].group_id = 'details'
if is_authenticated:
self.fields['organisers'].group_id = 'details'
self.add_group('location', _('Location'))
self.fields['location'].group_id = 'location'
self.fields['exact_location'].group_id = 'location'
self.add_group('illustration', _('Illustration'))
self.fields['local_image'].group_id = 'illustration'
self.fields['image_alt'].group_id = 'illustration'
if is_authenticated:
self.add_group('meta-admin', _('Meta information'))
self.fields['category'].group_id = 'meta-admin'
self.fields['tags'].group_id = 'meta-admin'
self.fields['status'].group_id = 'meta-admin'
else:
self.add_group('meta', _('Meta information'))
self.fields['category'].group_id = 'meta'
self.fields['tags'].group_id = 'meta'
def is_clone_from_url(self):
return self.cloning
def is_simple_clone_from_url(self):
return self.simple_cloning
def clean_end_day(self): def clean_end_day(self):
start_day = self.cleaned_data.get("start_day") start_day = self.cleaned_data.get("start_day")
@ -324,78 +128,8 @@ class EventForm(GroupFormMixin, ModelForm):
return end_time return end_time
def clean(self):
super().clean()
# when cloning an existing event, we need to copy the local image
if self.cleaned_data['local_image'] is None and \
not self.cleaned_data['old_local_image'] is None and \
self.cleaned_data['old_local_image'] != "":
basename = self.cleaned_data['old_local_image']
old = settings.MEDIA_ROOT + "/" + basename
if os.path.isfile(old):
self.cleaned_data['local_image'] = File(name=basename, file=open(old, "rb"))
class MultipleChoiceFieldAcceptAll(MultipleChoiceField):
def validate(self, value):
pass
class EventModerateForm(ModelForm):
required_css_class = 'required'
tags = MultipleChoiceField(
label=_("Tags"),
help_text=_('Select tags from existing ones.'),
required=False
)
new_tags = MultipleChoiceFieldAcceptAll(
label=_("New tags"),
help_text=_('Create new labels (sparingly). Note: by starting your tag with the characters “TW:”, you''ll create a “trigger warning” tag, and the associated events will be announced as such.'),
widget=DynamicArrayWidget(),
required=False
)
class Meta:
model = Event
fields = [
"status",
"category",
"organisers",
"exact_location",
"tags"
]
widgets = {
"status": RadioSelect
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['category'].queryset = self.fields['category'].queryset.order_by('name')
self.fields['category'].empty_label = None
self.fields['category'].initial = Category.get_default_category()
self.fields['tags'].choices = Tag.get_tag_groups(all=True)
def clean_new_tags(self):
return list(set(self.cleaned_data.get("new_tags")))
def clean(self):
super().clean()
if self.cleaned_data['tags'] is None:
self.cleaned_data['tags'] = []
if not self.cleaned_data.get('new_tags') is None:
self.cleaned_data['tags'] += self.cleaned_data.get('new_tags')
self.cleaned_data['tags'] = list(set(self.cleaned_data['tags']))
class BatchImportationForm(Form): class BatchImportationForm(Form):
required_css_class = 'required'
json = CharField( json = CharField(
label="JSON", label="JSON",
widget=Textarea(attrs={"rows": "10"}), widget=Textarea(attrs={"rows": "10"}),
@ -405,64 +139,54 @@ class BatchImportationForm(Form):
class FixDuplicates(Form): class FixDuplicates(Form):
required_css_class = 'required'
action = ChoiceField() action = ChoiceField()
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
edup = kwargs.pop("edup", None) nb_events = kwargs.pop("nb_events", None)
events = edup.get_duplicated()
nb_events = len(events)
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
choices = [] if nb_events == 2:
initial = None choices = [("NotDuplicates", "Ces événements sont différents")]
for i, e in enumerate(events):
if e.status != Event.STATUS.TRASH or e.modified():
msg = ""
if e.local_version():
msg = _(" (locally modified version)")
if e.status != Event.STATUS.TRASH:
initial = "Select-" + str(e.pk)
if e.pure_import():
msg = _(" (synchronized on import version)")
choices += [ choices += [
( (
"Select-" + str(e.pk), "SelectA",
_("Select {} as representative version.").format(auc[i] + msg) "Ces événements sont identiques, on garde A et on met B à la corbeille",
) )
] ]
for i, e in enumerate(events):
if e.status != Event.STATUS.TRASH and e.local_version():
choices += [ choices += [
( (
"Update-" + str(e.pk), "SelectB",
_("Update {} using some fields from other versions (interactive mode).").format(auc[i]) "Ces événements sont identiques, on garde B et on met A à la corbeille",
) )
] ]
extra = ""
if edup.has_local_version():
extra = _(" Warning: a version is already locally modified.")
if initial is None:
initial = "Merge"
choices += [ choices += [
("Merge", _("Create a new version by merging (interactive mode).") + extra) ("Merge", "Ces événements sont identiques, on fusionne à la main")
] ]
for i, e in enumerate(events): else:
if e.status != Event.STATUS.TRASH: choices = [("NotDuplicates", "Ces événements sont tous différents")]
for i in auc[0:nb_events]:
choices += [ choices += [
( (
"Remove-" + str(e.pk), "Remove" + i,
_("Make {} independent.").format(auc[i])) "L'événement "
+ i
+ " n'est pas identique aux autres, on le rend indépendant",
)
]
for i in auc[0:nb_events]:
choices += [
(
"Select" + i,
"Ces événements sont identiques, on garde "
+ i
+ " et on met les autres à la corbeille",
)
]
choices += [
("Merge", "Ces événements sont identiques, on fusionne à la main")
] ]
choices += [("NotDuplicates", _("Make all versions independent."))]
self.fields["action"].choices = choices self.fields["action"].choices = choices
self.fields["action"].initial = initial
def is_action_no_duplicates(self): def is_action_no_duplicates(self):
return self.cleaned_data["action"] == "NotDuplicates" return self.cleaned_data["action"] == "NotDuplicates"
@ -470,61 +194,49 @@ class FixDuplicates(Form):
def is_action_select(self): def is_action_select(self):
return self.cleaned_data["action"].startswith("Select") return self.cleaned_data["action"].startswith("Select")
def is_action_update(self):
return self.cleaned_data["action"].startswith("Update")
def is_action_remove(self): def is_action_remove(self):
return self.cleaned_data["action"].startswith("Remove") return self.cleaned_data["action"].startswith("Remove")
def get_selected_event_code(self): def get_selected_event_code(self):
if self.is_action_select() or self.is_action_remove() or self.is_action_update(): if self.is_action_select() or self.is_action_remove():
return int(self.cleaned_data["action"].split("-")[-1]) return self.cleaned_data["action"][-1]
else: else:
return None return None
def get_selected_event(self, edup): def get_selected_event_id(self):
selected = self.get_selected_event_code() selected = self.get_selected_event_code()
for e in edup.get_duplicated(): if selected is None:
if e.pk == selected:
return e
return None return None
else:
return auc.rfind(selected)
def get_selected_event(self, edup):
selected = self.get_selected_event_id()
return edup.get_duplicated()[selected]
class SelectEventInList(Form): class SelectEventInList(Form):
required_css_class = 'required' event = ChoiceField()
event = ChoiceField(label=_('Event'))
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
events = kwargs.pop("events", None) events = kwargs.pop("events", None)
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.fields["event"].choices = [ self.fields["event"].choices = [
(e.pk, str(e.start_day) + " " + e.title + ((", " + e.location) if e.location else "")) for e in events (e.pk, str(e.start_day) + " " + e.title + ", " + e.location) for e in events
] ]
class MergeDuplicates(Form): class MergeDuplicates(Form):
required_css_class = 'required' checkboxes_fields = ["reference_urls", "description"]
checkboxes_fields = ["reference_urls", "description", "tags"]
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.duplicates = kwargs.pop("duplicates", None) self.duplicates = kwargs.pop("duplicates", None)
self.event = kwargs.pop("event", None) nb_events = self.duplicates.nb_duplicated()
self.events = list(self.duplicates.get_duplicated())
nb_events = len(self.events)
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
if self.event:
choices = [("event_" + str(self.event.pk), _("Value of the selected version"))] + \
[
("event_" + str(e.pk), _("Value of version {}").format(e.pk)) for e in self.events if e != self.event
]
else:
choices = [ choices = [
("event_" + str(e.pk), _("Value of version {}").format(e.pk)) for e in self.events ("event" + i, "Valeur de l'évenement " + i) for i in auc[0:nb_events]
] ]
for f in self.duplicates.get_items_comparison(): for f in self.duplicates.get_items_comparison():
@ -540,7 +252,7 @@ class MergeDuplicates(Form):
def as_grid(self): def as_grid(self):
result = '<div class="grid">' result = '<div class="grid">'
for i, e in enumerate(self.events): for i, e in enumerate(self.duplicates.get_duplicated()):
result += '<div class="grid entete-badge">' result += '<div class="grid entete-badge">'
result += '<div class="badge-large">' + int_to_abc(i) + "</div>" result += '<div class="badge-large">' + int_to_abc(i) + "</div>"
result += "<ul>" result += "<ul>"
@ -548,17 +260,17 @@ class MergeDuplicates(Form):
'<li><a href="' + e.get_absolute_url() + '">' + e.title + "</a></li>" '<li><a href="' + e.get_absolute_url() + '">' + e.title + "</a></li>"
) )
result += ( result += (
"<li>Création&nbsp;: " + localize(e.created_date) + "</li>" "<li>Création&nbsp;: " + localize(localtime(e.created_date)) + "</li>"
) )
result += ( result += (
"<li>Dernière modification&nbsp;: " "<li>Dernière modification&nbsp;: "
+ localize(e.modified_date) + localize(localtime(e.modified_date))
+ "</li>" + "</li>"
) )
if e.imported_date: if e.imported_date:
result += ( result += (
"<li>Dernière importation&nbsp;: " "<li>Dernière importation&nbsp;: "
+ localize(e.imported_date) + localize(localtime(e.imported_date))
+ "</li>" + "</li>"
) )
result += "</ul>" result += "</ul>"
@ -576,40 +288,23 @@ class MergeDuplicates(Form):
) )
else: else:
result += "<fieldset>" result += "<fieldset>"
if key in self.errors:
result += '<div class="message error"><ul>'
for err in self.errors[key]:
result += "<li>" + err + "</li>"
result += "</ul></div>"
result += '<div class="grid comparison-item">' result += '<div class="grid comparison-item">'
if hasattr(self, "cleaned_data"): if hasattr(self, "cleaned_data"):
checked = self.cleaned_data.get(key) checked = self.cleaned_data.get(key)
else: else:
checked = self.fields[key].initial checked = self.fields[key].initial
i = 0 for i, (v, radio) in enumerate(
if self.event: zip(e["values"], self.fields[e["key"]].choices)
idx = self.events.index(self.event) ):
result += self.comparison_item(key, i, e["values"][idx], self.fields[e["key"]].choices[idx], self.event, checked) result += '<div class="duplicated">'
i += 1 id = "id_" + key + "_" + str(i)
value = "event" + auc[i]
for (v, radio, ev) in zip(e["values"], self.fields[e["key"]].choices, self.events):
if self.event is None or ev != self.event:
result += self.comparison_item(key, i, v, radio, ev, checked)
i += 1
result += "</div></fieldset>"
return mark_safe(result)
def comparison_item(self, key, i, v, radio, ev, checked):
result = '<div class="duplicated">'
id = "id_" + key + "_" + str(ev.pk)
value = "event_" + str(ev.pk)
result += '<input id="' + id + '" name="' + key + '"' result += '<input id="' + id + '" name="' + key + '"'
if key in MergeDuplicates.checkboxes_fields: if key in MergeDuplicates.checkboxes_fields:
result += ' type="checkbox"' result += ' type="checkbox"'
if checked and value in checked: if value in checked:
result += " checked" result += " checked"
else: else:
result += ' type="radio"' result += ' type="radio"'
@ -620,49 +315,73 @@ class MergeDuplicates(Form):
result += ( result += (
'<div class="badge-small">' '<div class="badge-small">'
+ int_to_abc(i) + int_to_abc(i)
+ "</div>") + "</div>"
result += "<div>" + str(field_to_html(v, e["key"]))
if key == "image": + "</div>"
result += str(field_to_html(ev.local_image, "local_image")) + "</div>" )
result += "<div>Lien d'import&nbsp;: " result += "</div></fieldset>"
result += (str(field_to_html(v, key)) + "</div>") return mark_safe(result)
result += "</div>"
return result
def get_selected_events_id(self, key):
def get_selected_events(self, key):
value = self.cleaned_data.get(key) value = self.cleaned_data.get(key)
if key not in self.fields: if key not in self.fields:
return None return None
else: else:
if isinstance(value, list): if isinstance(value, list):
selected = [int(v.split("_")[-1]) for v in value] return [auc.rfind(v[-1]) for v in value]
result = []
for s in selected:
for e in self.duplicates.get_duplicated():
if e.pk == s:
result.append(e)
break
return result
else: else:
selected = int(value.split("_")[-1]) return auc.rfind(value[-1])
for e in self.duplicates.get_duplicated():
if e.pk == selected:
return e
return None
class ModerationQuestionForm(ModelForm):
class Meta:
model = ModerationQuestion
fields = "__all__"
class ModerationAnswerForm(ModelForm):
class Meta:
model = ModerationAnswer
exclude = ["question"]
widgets = {
"adds_tags": DynamicArrayWidgetTags(),
"removes_tags": DynamicArrayWidgetTags(),
}
class ModerateForm(ModelForm):
class Meta:
model = Event
fields = []
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
mqs = ModerationQuestion.objects.all()
mas = ModerationAnswer.objects.all()
for q in mqs:
self.fields[q.complete_id()] = ChoiceField(
widget=RadioSelect,
label=q.question,
choices=[(a.pk, a.html_description()) for a in mas if a.question == q],
required=True,
)
for a in mas:
if a.question == q and a.valid_event(self.instance):
self.fields[q.complete_id()].initial = a.pk
break
class CategorisationForm(Form): class CategorisationForm(Form):
required_css_class = 'required'
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
if "events" in kwargs: if "events" in kwargs:
events = kwargs.pop("events", None) events = kwargs.pop("events", None)
else: else:
events = [] events = []
for f in args[0]: for f in args[0]:
logger.warning("fff: " + f)
if "_" not in f: if "_" not in f:
if f + "_cat" in args[0]: if f + "_cat" in args[0]:
events.append( events.append(
@ -687,8 +406,6 @@ class CategorisationForm(Form):
class EventAddPlaceForm(Form): class EventAddPlaceForm(Form):
required_css_class = 'required'
place = ModelChoiceField( place = ModelChoiceField(
label=_("Place"), label=_("Place"),
queryset=Place.objects.all().order_by("name"), queryset=Place.objects.all().order_by("name"),
@ -714,20 +431,15 @@ class EventAddPlaceForm(Form):
if self.cleaned_data.get("place"): if self.cleaned_data.get("place"):
place = self.cleaned_data.get("place") place = self.cleaned_data.get("place")
self.instance.exact_location = place self.instance.exact_location = place
self.instance.save(update_fields=["exact_location"]) self.instance.save()
if self.cleaned_data.get("add_alias"): if self.cleaned_data.get("add_alias"):
if place.aliases: place.aliases.append(self.instance.location)
place.aliases.append(self.instance.location.strip())
else:
place.aliases = [self.instance.location.strip()]
place.save() place.save()
return self.instance return self.instance
class PlaceForm(GroupFormMixin, ModelForm): class PlaceForm(ModelForm):
required_css_class = 'required'
apply_to_all = BooleanField( apply_to_all = BooleanField(
initial=True, initial=True,
label=_( label=_(
@ -741,70 +453,13 @@ class PlaceForm(GroupFormMixin, ModelForm):
fields = "__all__" fields = "__all__"
widgets = {"location": TextInput()} widgets = {"location": TextInput()}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_group('header', _('Header'))
self.fields['name'].group_id = 'header'
self.add_group('address', _('Address'))
self.fields['address'].group_id = 'address'
self.fields['postcode'].group_id = 'address'
self.fields['city'].group_id = 'address'
self.fields['location'].group_id = 'address'
self.add_group('meta', _('Meta'))
self.fields['aliases'].group_id = 'meta'
self.add_group('information', _('Information'))
self.fields['description'].group_id = 'information'
def as_grid(self): def as_grid(self):
result = ('<div class="grid"><div>' return mark_safe(
'<div class="grid"><div>'
+ super().as_p() + super().as_p()
+ '''</div><div><div class="map-widget"> + '</div><div><div class="map-widget">'
<div id="map_location" style="width: 100%; aspect-ratio: 16/9"></div> + '<div id="map_location" style="width: 100%; aspect-ratio: 16/9"></div><p>Cliquez pour ajuster la position GPS</p></div></div></div>'
<p>Cliquez pour ajuster la position GPS</p></div> )
<input type="checkbox" role="switch" id="lock_position">Verrouiller la position</lock>
<script>
document.getElementById("lock_position").onclick = function() {
const field = document.getElementById("id_location");
if (this.checked)
field.setAttribute("readonly", true);
else
field.removeAttribute("readonly");
}
</script>
</div></div>''')
return mark_safe(result)
def apply(self): def apply(self):
return self.cleaned_data.get("apply_to_all") return self.cleaned_data.get("apply_to_all")
class MessageForm(ModelForm):
class Meta:
model = Message
fields = ["subject", "name", "email", "message", "related_event"]
widgets = {"related_event": HiddenInput(), "user": HiddenInput() }
def __init__(self, *args, **kwargs):
self.event = kwargs.pop("event", False)
self.internal = kwargs.pop("internal", False)
super().__init__(*args, **kwargs)
self.fields['related_event'].required = False
if self.internal:
self.fields.pop("name")
self.fields.pop("email")
class MessageEventForm(ModelForm):
class Meta:
model = Message
fields = ["message"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["message"].label = _("Add a comment")

View File

@ -1,113 +0,0 @@
from ..generic_extractors import *
from bs4 import BeautifulSoup
# A class dedicated to get events from Arachnée Concert
# URL: https://www.arachnee-concerts.com/agenda-des-concerts/
class CExtractor(TwoStepsExtractorNoPause):
def __init__(self):
super().__init__()
self.possible_dates = {}
self.theater = None
def extract(
self,
content,
url,
url_human=None,
default_values=None,
published=False,
only_future=True,
ignore_404=True
):
match = re.match(r".*\&theatres=([^&]*)&.*", url)
if match:
self.theater = match[1]
return super().extract(content, url, url_human, default_values, published, only_future, ignore_404)
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
containers = soup.select("ul.event_container>li")
if containers:
for c in containers:
d = Extractor.parse_french_date(c.select_one(".date").text)
l = c.select_one(".event_auditory").text
if (self.theater is None or (l.startswith(self.theater))) and d < datetime.date.today() + timedelta(days=infuture_days):
t = Extractor.parse_french_time(c.select_one(".time").text)
e_url = c.select_one(".info a")["href"]
if not e_url in self.possible_dates:
self.possible_dates[e_url] = []
self.possible_dates[e_url].append((str(d) + " " + str(t)))
self.add_event_url(e_url)
def add_event_from_content(
self,
event_content,
event_url,
url_human=None,
default_values=None,
published=False,
):
soup = BeautifulSoup(event_content, "html.parser")
title = ", ".join([x.text for x in [soup.select_one(y) for y in [".page_title", ".artiste-subtitle"]] if x])
image = soup.select_one(".entry-image .image_wrapper img")
if not image is None:
image = image["src"]
descs = soup.select(".entry-content p")
if descs:
description = "\n".join([d.text for d in descs])
else:
description = None
category = soup.select_one(".event_category").text
first_cat = Extractor.remove_accents(category.split(",")[0].lower())
tags = []
if first_cat in ["grand spectacle"]:
category = "Spectacles"
tags.append("💃 danse")
elif first_cat in ["theatre", "humour / one man show"]:
category = "Spectacles"
tags.append("🎭 théâtre")
elif first_cat in ["chanson francaise", "musique du monde", "pop / rock", "rap", "rnb", "raggae", "variete"]:
category = "Fêtes & Concerts"
tags.append("🎵 concert")
elif first_cat in ["comedie musicale", "humour / one man show", "spectacle equestre"]:
category = "Spectacles"
elif first_cat in ["spectacle pour enfant"]:
tags = ["🎈 jeune public"]
category = None
else:
category = None
dates = soup.select("#event_ticket_content>ul>li")
for d in dates:
dt = datetime.datetime.fromisoformat(d.select_one(".date")["content"])
date = dt.date()
time = dt.time()
if str(date) + " " + str(time) in self.possible_dates[event_url]:
location = d.select_one(".event_auditory").text
self.add_event_with_props(
default_values,
event_url,
title,
category,
date,
location,
description,
tags,
recurrences=None,
uuids=[event_url + "?d=" + str(date) + "&t=" + str(time)],
url_human=event_url,
start_time=time,
end_day=None,
end_time=None,
published=published,
image=image,
)

View File

@ -1,136 +0,0 @@
from ..generic_extractors import *
from bs4 import BeautifulSoup
from datetime import timedelta
# A class dedicated to get events from La Cour des 3 Coquins
# URL: https://billetterie-c3c.clermont-ferrand.fr//
class CExtractor(TwoStepsExtractor):
nom_lieu = "La Cour des 3 Coquins"
def category_c3c2agenda(self, category):
if not category:
return None
mapping = {"Théâtre": "Spectacles", "Concert": "Fêtes & Concerts", "Projection": "Cinéma"}
mapping_tag = {"Théâtre": "🎭 théâtre", "Concert": "🎵 concert", "Projection": None}
if category in mapping:
return mapping[category], mapping_tag[category]
else:
return None, None
def build_event_url_list(self, content):
soup = BeautifulSoup(content, "html.parser")
events = soup.select("div.fiche-info")
for e in events:
e_url = e.select_one("a.btn.lien_savoir_plus")["href"]
if e_url != "":
e_url = self.url + "/" + e_url
self.add_event_url(e_url)
def add_event_from_content(
self,
event_content,
event_url,
url_human=None,
default_values=None,
published=False,
):
soup = BeautifulSoup(event_content, "html.parser")
title = soup.select_one("h1")
if title:
title = title.text
image = soup.select_one("#media .swiper-slide img")
if image:
image = image["src"]
else:
image = None
description = soup.select_one(".presentation").get_text()
duration = soup.select_one("#criteres .DUREE-V .valeur-critere li")
if not duration is None:
duration = Extractor.parse_french_time(duration.text)
location = self.nom_lieu
categories = []
tags = []
for t in soup.select(".sous-titre span"):
classes = t.get("class")
if classes and len(classes) > 0:
if classes[0].startswith("LIEU-"):
location = t.text
elif classes[0].startswith("THEMATIQUE-"):
cat, tag = self.category_c3c2agenda(t.text)
if cat:
categories.append(cat)
if tag:
tags.append(tag)
# TODO: parser les dates, récupérer les heures ()
dates = [o.get("value") for o in soup.select("select.datedleb_resa option")]
patternCodeSite = re.compile(r'.*gsw_vars\["CODEPRESTATAIRE"\] = "(.*?)";.*', flags=re.DOTALL)
patternCodeObject = re.compile(r'.*gsw_vars\["CODEPRESTATION"\] = "(.*?)";.*', flags=re.DOTALL)
scripts = soup.find_all('script')
codeSite = ""
idObject = ""
for script in scripts:
if(patternCodeSite.match(str(script.string))):
data = patternCodeSite.match(script.string)
codeSite = data.groups()[0]
if(patternCodeObject.match(str(script.string))):
data = patternCodeObject.match(script.string)
idObject = data.groups()[0]
pause = self.downloader.pause
self.downloader.pause = False
# get exact schedule need two supplementary requests
datetimes = []
if codeSite != "" and idObject != "":
for date in dates:
# the first page is required such that the server knows the selected date
page1 = self.downloader.get_content("https://billetterie-c3c.clermont-ferrand.fr/booking?action=searchAjax&cid=2&afficheDirectDispo=" + date + "&type_prestataire=V&cle_fiche=PRESTATION-V-" + codeSite + "-" + idObject + "&datedeb=" + date)
# then we get the form with hours
page2 = self.downloader.get_content("https://billetterie-c3c.clermont-ferrand.fr/booking?action=detailTarifsPrestationAjax&prestation=V-" + codeSite + "-" + idObject)
soup2 = BeautifulSoup(page2, "html.parser")
times = [o.text for o in soup2.select("#quart_en_cours_spec option")]
for t in times:
startdate = Extractor.parse_french_date(date)
starttime = Extractor.parse_french_time(t)
start = datetime.datetime.combine(startdate, starttime)
enddate = None
endtime = None
if duration is not None:
end = start + timedelta(hours=duration.hour, minutes=duration.minute, seconds=duration.second)
enddate = end.date()
endtime = end.time()
datetimes.append((startdate, starttime, enddate, endtime))
self.downloader.pause = pause
category = None
if len(categories) > 0:
category = categories[0]
for dt in datetimes:
self.add_event_with_props(
default_values,
event_url,
title,
category,
dt[0],
location,
description,
tags,
recurrences=None,
uuids=[event_url],
url_human=url_human,
start_time=dt[1],
end_day=dt[2],
end_time=dt[3],
published=published,
image=image,
)

View File

@ -1,71 +0,0 @@
from ..generic_extractors import *
from ..extractor_facebook import FacebookEvent
import json5
from bs4 import BeautifulSoup
import json
import os
from datetime import datetime
import logging
logger = logging.getLogger(__name__)
# A class dedicated to get events from a facebook events page
# such as https://www.facebook.com/laJeteeClermont/events
class CExtractor(TwoStepsExtractor):
def build_event_url_list(self, content):
soup = BeautifulSoup(content, "html.parser")
debug = False
found = False
links = soup.find_all("a")
for link in links:
if link.get("href").startswith('https://www.facebook.com/events/'):
self.add_event_url(link.get('href').split('?')[0])
found = True
if not found and debug:
directory = "errors/"
if not os.path.exists(directory):
os.makedirs(directory)
now = datetime.now()
filename = directory + now.strftime("%Y%m%d_%H%M%S") + ".html"
logger.warning("cannot find any event link in events page. Save content page in " + filename)
with open(filename, "w") as text_file:
text_file.write("<!-- " + self.url + " -->\n\n")
text_file.write(content)
def add_event_from_content(
self,
event_content,
event_url,
url_human=None,
default_values=None,
published=False,
):
fevent = None
soup = BeautifulSoup(event_content, "html.parser")
for json_script in soup.find_all("script", type="application/json"):
json_txt = json_script.get_text()
json_struct = json.loads(json_txt)
fevent = FacebookEvent.find_event_fragment_in_array(
json_struct, fevent
)
if fevent is not None:
for event in fevent.build_events(event_url):
event["published"] = published
self.add_event(default_values, **event)
else:
logger.warning("cannot find any event in page")

View File

@ -1,39 +1,25 @@
from ..generic_extractors import * from ..generic_extractors import *
import json5 import json5
from bs4 import BeautifulSoup
# A class dedicated to get events from La Coopérative de Mai: # A class dedicated to get events from La Coopérative de Mai:
# URL: https://lacomediedeclermont.com/saison23-24/wp-admin/admin-ajax.php?action=load_dates_existantes # URL: https://lacomediedeclermont.com/saison23-24/wp-admin/admin-ajax.php?action=load_dates_existantes
# URL pour les humains: https://lacomediedeclermont.com/saison24-25/ # URL pour les humains: https://lacomediedeclermont.com/saison23-24/
class CExtractor(TwoStepsExtractor): class CExtractor(TwoStepsExtractor):
nom_lieu = "La Comédie de Clermont" nom_lieu = "La Comédie de Clermont"
url_referer = "https://lacomediedeclermont.com/saison24-25/"
def is_to_import_from_url(self, url):
if any(keyword in url for keyword in ["podcast", "on-debriefe", "popcorn", "rencontreautour","rencontre-autour"]):
return False
else:
return True
def category_comedie2agenda(self, category): def category_comedie2agenda(self, category):
mapping = { mapping = {
"Théâtre": "Spectacles", "Théâtre": "Théâtre",
"Danse": "Spectacles", "Danse": "Danse",
"Rencontre": "Rencontres & Débats", "Rencontre": "Autre",
"Sortie de résidence": "Sans catégorie", "Sortie de résidence": "Autre",
"PopCorn Live": "Sans catégorie", "PopCorn Live": "Autre",
}
mapping_tag = {
"Théâtre": "🎭 théâtre",
"Danse": "💃 danse",
"Rencontre": None,
"Sortie de résidence": "sortie de résidence",
"PopCorn Live": None,
} }
if category in mapping: if category in mapping:
return mapping[category], mapping_tag[category] return mapping[category]
else: else:
return None, None return None
def build_event_url_list(self, content): def build_event_url_list(self, content):
dates = json5.loads(content)["data"][0] dates = json5.loads(content)["data"][0]
@ -42,9 +28,7 @@ class CExtractor(TwoStepsExtractor):
for d in list(set(dates)): for d in list(set(dates)):
if not self.only_future or self.now <= datetime.date.fromisoformat(d): if not self.only_future or self.now <= datetime.date.fromisoformat(d):
events = self.downloader.get_content( events = self.downloader.get_content(
url, url, post={"action": "load_evenements_jour", "jour": d}
post={"action": "load_evenements_jour", "jour": d},
referer="https://lacomediedeclermont.com/saison24-25/"
) )
if events: if events:
events = json5.loads(events) events = json5.loads(events)
@ -56,8 +40,6 @@ class CExtractor(TwoStepsExtractor):
e_url = ( e_url = (
e.select("a")[0]["href"] + "#" + d e.select("a")[0]["href"] + "#" + d
) # a "fake" url specific for each day of this show ) # a "fake" url specific for each day of this show
if self.is_to_import_from_url(e_url):
self.add_event_url(e_url) self.add_event_url(e_url)
self.add_event_start_day(e_url, d) self.add_event_start_day(e_url, d)
t = ( t = (
@ -70,20 +52,16 @@ class CExtractor(TwoStepsExtractor):
self.add_event_title(e_url, title) self.add_event_title(e_url, title)
category = e.select("div#lieuevtcal span") category = e.select("div#lieuevtcal span")
if len(category) > 0: if len(category) > 0:
category, tag = self.category_comedie2agenda( category = self.category_comedie2agenda(
category[-1].contents[0] category[-1].contents[0]
) )
if category: if category is not None:
self.add_event_category(e_url, category) self.add_event_category(e_url, category)
if tag:
self.add_event_tag(e_url, tag)
location = ( location = (
e.select("div#lieuevtcal")[0] e.select("div#lieuevtcal")[0]
.contents[-1] .contents[-1]
.split("")[-1] .split("")[-1]
) )
if location.replace(" ", "") == "":
location = self.nom_lieu
self.add_event_location(e_url, location) self.add_event_location(e_url, location)
def add_event_from_content( def add_event_from_content(
@ -97,31 +75,16 @@ class CExtractor(TwoStepsExtractor):
soup = BeautifulSoup(event_content, "html.parser") soup = BeautifulSoup(event_content, "html.parser")
image = soup.select("#imgspec img") image = soup.select("#imgspec img")
if image and len(image) > 0: if image:
image = image[0]["src"] image = image[0]["src"]
else: else:
image = None image = None
description = soup.select("#descspec")[0].get_text().replace("Lire plus...", "")
description = soup.select("#descspec")
if description and len(description) > 0:
description = description[0].get_text().replace("Lire plus...", "")
# on ajoute éventuellement les informations complémentaires
d_suite = ""
for d in ["typedesc", "dureedesc", "lieuspec"]:
comp_desc = soup.select("#" + d)
if comp_desc and len(comp_desc) > 0:
d_suite += "\n\n" + comp_desc[0].get_text()
if d_suite != "":
description += "\n\n> Informations complémentaires:" + d_suite
else:
description = None
url_human = event_url url_human = event_url
self.add_event_with_props( self.add_event_with_props(
default_values,
event_url, event_url,
None, None,
None, None,

View File

@ -1,7 +1,7 @@
from ..generic_extractors import * from ..generic_extractors import *
import re import re
import json5 import json5
from bs4 import BeautifulSoup
# A class dedicated to get events from La Coopérative de Mai: # A class dedicated to get events from La Coopérative de Mai:
# URL: https://www.lacoope.org/concerts-calendrier/ # URL: https://www.lacoope.org/concerts-calendrier/
@ -22,7 +22,7 @@ class CExtractor(TwoStepsExtractor):
for e in data["events"]: for e in data["events"]:
self.add_event_url(e["url"]) self.add_event_url(e["url"])
if e["tag"] == "Gratuit": if e["tag"] == "Gratuit":
self.add_event_tag(e["url"], "💶 gratuit") self.add_event_tag(e["url"], "gratuit")
else: else:
raise Exception("Cannot extract events from javascript") raise Exception("Cannot extract events from javascript")
@ -38,7 +38,7 @@ class CExtractor(TwoStepsExtractor):
soup = BeautifulSoup(event_content, "html.parser") soup = BeautifulSoup(event_content, "html.parser")
title = soup.find("h1").contents[0] title = soup.find("h1").contents[0]
category = "Fêtes & Concerts" category = "Concert"
image = soup.find("meta", property="og:image") image = soup.find("meta", property="og:image")
if image: if image:
image = image["content"] image = image["content"]
@ -53,7 +53,7 @@ class CExtractor(TwoStepsExtractor):
if description is None: if description is None:
description = "" description = ""
tags = ["🎵 concert"] tags = []
link_calendar = soup.select('a[href^="https://calendar.google.com/calendar/"]') link_calendar = soup.select('a[href^="https://calendar.google.com/calendar/"]')
if len(link_calendar) == 0: if len(link_calendar) == 0:
@ -68,7 +68,6 @@ class CExtractor(TwoStepsExtractor):
url_human = event_url url_human = event_url
self.add_event_with_props( self.add_event_with_props(
default_values,
event_url, event_url,
title, title,
category, category,

View File

@ -1,6 +1,6 @@
from ..generic_extractors import * from ..generic_extractors import *
import re import re
from bs4 import BeautifulSoup
# A class dedicated to get events from La puce à l'oreille # A class dedicated to get events from La puce à l'oreille
# URL: https://www.lapucealoreille63.fr/ # URL: https://www.lapucealoreille63.fr/
@ -14,7 +14,12 @@ class CExtractor(TwoStepsExtractor):
for e in events: for e in events:
e_url = e.find("a") e_url = e.find("a")
if e_url: if e_url:
self.add_event_url(e_url["href"]) if self.add_event_url(e_url["href"]):
title = e.select("div[data-testid=richTextElement] h1.font_0 span")
if title:
title = title[0].contents[0].get_text().replace("\n", " ")
title = re.sub(" +", " ", title)
self.add_event_title(e_url["href"], title)
def add_event_from_content( def add_event_from_content(
self, self,
@ -26,12 +31,9 @@ class CExtractor(TwoStepsExtractor):
): ):
soup = BeautifulSoup(event_content, "html.parser") soup = BeautifulSoup(event_content, "html.parser")
title = soup.select("h2")[0].get_text() start_day = self.parse_french_date(
soup.find("h2").get_text()
start_day = Extractor.parse_french_date(
soup.select("h2")[1].get_text()
) # pas parfait, mais bordel que ce site est mal construit ) # pas parfait, mais bordel que ce site est mal construit
print(soup.select("h2")[1].get_text())
spans = soup.select("div[data-testid=richTextElement] span") spans = soup.select("div[data-testid=richTextElement] span")
start_time = None start_time = None
@ -41,13 +43,13 @@ class CExtractor(TwoStepsExtractor):
for span in spans: for span in spans:
txt = span.get_text() txt = span.get_text()
if txt.lstrip().startswith("DÉBUT"): if txt.lstrip().startswith("DÉBUT"):
start_time = Extractor.parse_french_time(txt.split(":")[-1]) start_time = self.parse_french_time(txt.split(":")[-1])
end_time = None end_time = None
elif txt.lstrip().startswith("HORAIRES :"): elif txt.lstrip().startswith("HORAIRES :"):
hs = txt.split(":")[-1].split("-") hs = txt.split(":")[-1].split("-")
start_time = Extractor.parse_french_time(hs[0]) start_time = self.parse_french_time(hs[0])
if len(hs) > 1: if len(hs) > 1:
end_time = Extractor.parse_french_time(hs[1]) end_time = self.parse_french_time(hs[1])
else: else:
end_time = None end_time = None
elif txt.lstrip().startswith("LIEU :") and not location: elif txt.lstrip().startswith("LIEU :") and not location:
@ -55,10 +57,10 @@ class CExtractor(TwoStepsExtractor):
if not location: if not location:
location = self.nom_lieu location = self.nom_lieu
end_day = Extractor.guess_end_day(start_day, start_time, end_time) end_day = self.guess_end_day(start_day, start_time, end_time)
url_human = event_url url_human = event_url
tags = ["🎵 concert"] tags = []
image = soup.select("wow-image img[fetchpriority=high]") image = soup.select("wow-image img[fetchpriority=high]")
if image: if image:
@ -76,10 +78,9 @@ class CExtractor(TwoStepsExtractor):
description = None description = None
self.add_event_with_props( self.add_event_with_props(
default_values,
event_url, event_url,
title, None,
"Fêtes & Concerts", "Concert",
start_day, start_day,
location, location,
description, description,

View File

@ -1,5 +1,5 @@
from ..generic_extractors import * from ..generic_extractors import *
from bs4 import BeautifulSoup
# A class dedicated to get events from Le Fotomat' # A class dedicated to get events from Le Fotomat'
# URL: https://www.lefotomat.com/ # URL: https://www.lefotomat.com/
@ -9,12 +9,11 @@ class CExtractor(TwoStepsExtractor):
def category_fotomat2agenda(self, category): def category_fotomat2agenda(self, category):
if not category: if not category:
return None return None
mapping = {"Concerts": "Fêtes & Concerts"} mapping = {"Concerts": "Concert"}
mapping_tag = {"Concerts": "🎵 concert"}
if category in mapping: if category in mapping:
return mapping[category], mapping_tag[category] return mapping[category]
else: else:
return None, None return None
def build_event_url_list(self, content): def build_event_url_list(self, content):
soup = BeautifulSoup(content, "xml") soup = BeautifulSoup(content, "xml")
@ -27,11 +26,9 @@ class CExtractor(TwoStepsExtractor):
title = e.find("title").contents[0] title = e.find("title").contents[0]
self.add_event_title(e_url, title) self.add_event_title(e_url, title)
category, tag = self.category_fotomat2agenda(e.find("category").contents[0]) category = self.category_fotomat2agenda(e.find("category").contents[0])
if category: if category:
self.add_event_category(e_url, category) self.add_event_category(e_url, category)
if tag:
self.add_event_tag(e_url, tag)
def add_event_from_content( def add_event_from_content(
self, self,
@ -48,10 +45,10 @@ class CExtractor(TwoStepsExtractor):
else: else:
image = None image = None
desc = soup.select("head meta[name=description]")[0]["content"] desc = soup.select("head meta[name=description]")[0]["content"]
start_day = Extractor.parse_french_date(desc.split("-")[0]) start_day = self.parse_french_date(desc.split("-")[0])
start_time = Extractor.parse_french_time(desc.split("-")[1]) start_time = self.parse_french_time(desc.split("-")[1])
end_time = Extractor.parse_french_time(desc.split("-")[2]) end_time = self.parse_french_time(desc.split("-")[2])
end_day = Extractor.guess_end_day(start_day, start_time, end_time) end_day = self.guess_end_day(start_day, start_time, end_time)
location = self.nom_lieu location = self.nom_lieu
descriptions = soup.select("div.vce-col-content") descriptions = soup.select("div.vce-col-content")
@ -72,7 +69,6 @@ class CExtractor(TwoStepsExtractor):
url_human = event_url url_human = event_url
self.add_event_with_props( self.add_event_with_props(
default_values,
event_url, event_url,
None, None,
None, None,

View File

@ -1,91 +0,0 @@
from ..generic_extractors import *
from bs4 import BeautifulSoup
from datetime import datetime
# A class dedicated to get events from Cinéma Le Rio (Clermont-Ferrand)
# URL: https://www.cinemalerio.com/evenements/
class CExtractor(TwoStepsExtractorNoPause):
def __init__(self):
super().__init__()
self.possible_dates = {}
self.theater = None
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
links = soup.select("td.seance_link a")
if links:
for l in links:
print(l["href"])
self.add_event_url(l["href"])
def to_text_select_one(soup, filter):
e = soup.select_one(filter)
if e is None:
return None
else:
return e.text
def add_event_from_content(
self,
event_content,
event_url,
url_human=None,
default_values=None,
published=False,
):
soup = BeautifulSoup(event_content, "html.parser")
title = soup.select_one("h1").text
alerte_date = CExtractor.to_text_select_one(soup, ".alerte_date")
if alerte_date is None:
return
dh = alerte_date.split("à")
# if date is not found, we skip
if len(dh) != 2:
return
date = Extractor.parse_french_date(dh[0], default_year=datetime.now().year)
time = Extractor.parse_french_time(dh[1])
synopsis = CExtractor.to_text_select_one(soup, ".synopsis_bloc")
special_titre = CExtractor.to_text_select_one(soup, ".alerte_titre")
special = CExtractor.to_text_select_one(soup, ".alerte_text")
# it's not a specific event: we skip it
special_lines = None if special is None else special.split('\n')
if special is None or len(special_lines) == 0 or \
(len(special_lines) == 1 and special_lines[0].strip().startswith('En partenariat')):
return
description = "\n\n".join([x for x in [synopsis, special_titre, special] if not x is None])
image = soup.select_one(".col1 img")
image_alt = None
if not image is None:
image_alt = image["alt"]
image = image["src"]
self.add_event_with_props(
default_values,
event_url,
title,
None,
date,
None,
description,
[],
recurrences=None,
uuids=[event_url],
url_human=event_url,
start_time=time,
end_day=None,
end_time=None,
published=published,
image=image,
image_alt=image_alt
)

View File

@ -1,112 +0,0 @@
from ..generic_extractors import *
from bs4 import BeautifulSoup
# A class dedicated to get events from MEC Wordpress plugin
# URL: https://webnus.net/modern-events-calendar/
class CExtractor(TwoStepsExtractor):
def local2agendaCategory(self, category):
mapping = {
"Musique": "Fêtes & Concerts",
"CONCERT": "Fêtes & Concerts",
"VISITE": "Visites & Expositions",
"Spectacle": "Spectacles",
"Rencontre": "Rencontres & Débats",
"Atelier": "Animations & Ateliers",
"Projection": "Cinéma",
}
mapping_tag = {
"Musique": "concert",
"CONCERT": "concert",
"VISITE": None,
"Spectacle": "rhéâtre",
"Rencontre": None,
"Atelier": "atelier",
"Projection": None,
}
if category in mapping:
return mapping[category], mapping_tag[category]
else:
return None, None
def build_event_url_list(self, content):
soup = BeautifulSoup(content, "xml")
events = soup.select("div.mec-tile-event-content")
for e in events:
link = e.select("h4.mec-event-title a")
if len(link) == 1:
url = link[0]["href"]
title = link[0].get_text()
if self.add_event_url(url):
print(url, title)
self.add_event_title(url, title)
categories = e.select(".mec-label-normal")
if len(categories) == 0:
categories = e.select(".mec-category")
if len(categories) > 0:
category, tag = self.local2agendaCategory(categories[0].get_text())
if category:
self.add_event_category(url, category)
if tag:
self.add_event_category(url, tag)
def add_event_from_content(
self,
event_content,
event_url,
url_human=None,
default_values=None,
published=False,
):
soup = BeautifulSoup(event_content, "xml")
start_day = soup.select(".mec-start-date-label")
if start_day and len(start_day) > 0:
start_day = Extractor.parse_french_date(start_day[0].get_text())
else:
start_day = None
t = soup.select(".mec-single-event-time .mec-events-abbr")
if t:
t = t[0].get_text().split("-")
start_time = Extractor.parse_french_time(t[0])
if len(t) > 1:
end_time = Extractor.parse_french_time(t[1])
else:
end_time = None
else:
start_time = None
end_time = None
image = soup.select(".mec-events-event-image img")
if image:
image = image[0]["src"]
else:
image = None
description = soup.select(".mec-event-content")[0].get_text(separator=" ")
url_human = event_url
self.add_event_with_props(
default_values,
event_url,
None,
None,
start_day,
None,
description,
[],
recurrences=None,
uuids=[event_url],
url_human=url_human,
start_time=start_time,
end_day=None,
end_time=end_time,
published=published,
image=image,
)

View File

@ -1,13 +1,11 @@
from urllib.parse import urlencode from urllib.parse import urlencode
import urllib.request import urllib.request
from urllib.request import Request
import os import os
from selenium import webdriver from selenium import webdriver
from selenium.webdriver.chrome.service import Service from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options from selenium.webdriver.chrome.options import Options
from selenium.common.exceptions import *
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
import time
class Downloader(ABC): class Downloader(ABC):
def __init__(self): def __init__(self):
@ -17,13 +15,13 @@ class Downloader(ABC):
def download(self, url, post=None): def download(self, url, post=None):
pass pass
def get_content(self, url, cache=None, referer=None, post=None): def get_content(self, url, cache=None, post=None):
if cache and os.path.exists(cache): if cache and os.path.exists(cache):
print("Loading cache ({})".format(cache)) print("Loading cache ({})".format(cache))
with open(cache) as f: with open(cache) as f:
content = "\n".join(f.readlines()) content = "\n".join(f.readlines())
else: else:
content = self.download(url, referer=referer, post=post) content = self.download(url, post)
if cache: if cache:
print("Saving cache ({})".format(cache)) print("Saving cache ({})".format(cache))
@ -39,102 +37,38 @@ class SimpleDownloader(Downloader):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
def download(self, url, referer=None, post=None): def download(self, url, post=None):
print("Downloading {} referer: {} post: {}".format(url, referer, post)) print("Downloading {}".format(url))
try:
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:126.0) Gecko/20100101 Firefox/126.0",
}
if referer is not None:
headers["Referer"] = referer
req = Request(url, headers=headers)
if post:
post_args = urlencode(post).encode("utf-8")
resource = urllib.request.urlopen(req, post_args)
else:
resource = urllib.request.urlopen(req)
charset = resource.headers.get_content_charset()
if charset:
data = resource.read().decode(charset)
else:
data = resource.read().decode()
return data
try:
if post:
post_args = urlencode(post).encode()
resource = urllib.request.urlopen(url, post_args)
else:
resource = urllib.request.urlopen(url)
data = resource.read().decode(resource.headers.get_content_charset())
return data
except Exception as e: except Exception as e:
print(e) print(e)
return None return None
class ChromiumHeadlessDownloader(Downloader): class ChromiumHeadlessDownloader(Downloader):
def __init__(self, pause=True, noimage=True): def __init__(self):
super().__init__() super().__init__()
self.pause = pause
self.options = Options() self.options = Options()
self.options.add_argument("--headless=new") self.options.add_argument("--headless=new")
self.options.add_argument("--disable-dev-shm-usage") self.options.add_argument("--disable-dev-shm-usage")
self.options.add_argument("--no-sandbox") self.options.add_argument("--no-sandbox")
self.options.add_argument("start-maximized")
self.options.add_argument("enable-automation")
self.options.add_argument("--disable-dev-shm-usage")
self.options.add_argument("--disable-browser-side-navigation")
self.options.add_argument("--disable-gpu")
if noimage:
self.options.add_experimental_option(
"prefs", {
# block image loading
"profile.managed_default_content_settings.images": 2,
}
)
self.service = Service("/usr/bin/chromedriver") self.service = Service("/usr/bin/chromedriver")
self.driver = webdriver.Chrome(service=self.service, options=self.options)
def download(self, url, post=None):
def screenshot(self, url, path_image):
print("Screenshot {}".format(url))
try:
self.driver.get(url)
if self.pause:
time.sleep(2)
self.driver.save_screenshot(path_image)
except:
print(f">> Exception: {URL}")
return False
return True
def download(self, url, referer=None, post=None):
if post: if post:
raise Exception("POST method with Chromium headless not yet implemented") raise Exception("POST method with Chromium headless not yet implemented")
print("Download {}".format(url)) print("Download {}".format(url))
self.driver = webdriver.Chrome(service=self.service, options=self.options)
try:
self.driver.get(url) self.driver.get(url)
if self.pause:
time.sleep(2)
doc = self.driver.page_source doc = self.driver.page_source
self.driver.quit()
except StaleElementReferenceException as e:
print(f">> {type(e).__name__}: {e.args}")
return None
except NoSuchElementException as e:
print(f">> {type(e).__name__}: {e.args}")
return None
except TimeoutException as e:
print(f">> {type(e).__name__}: {e.args}")
return None
except WebDriverException as e:
print(f">> {type(e).__name__}: {e.args}")
return None
except SessionNotCreatedException as e:
print(f">> {type(e).__name__}: {e.args}")
return None
except Exception as e:
print(f">> {type(e).__name__} line {e.__traceback__.tb_lineno} of {__file__}: {e.args}")
return None
except:
print(f">> General Exception: {URL}")
return None
return doc return doc

View File

@ -2,24 +2,20 @@ from abc import ABC, abstractmethod
from datetime import datetime, time, date, timedelta from datetime import datetime, time, date, timedelta
import re import re
import unicodedata import unicodedata
from django.utils import timezone
def remove_accents(input_str):
nfkd_form = unicodedata.normalize("NFKD", input_str)
return "".join([c for c in nfkd_form if not unicodedata.combining(c)])
class Extractor(ABC): class Extractor(ABC):
url_referer=None
def __init__(self): def __init__(self):
self.header = {} self.header = {}
self.events = [] self.events = []
self.downloader = None self.downloader = None
self.referer = ""
def remove_accents(input_str): def guess_end_day(self, start_day, start_time, end_time):
nfkd_form = unicodedata.normalize("NFKD", input_str)
return "".join([c for c in nfkd_form if not unicodedata.combining(c)])
def guess_end_day(start_day, start_time, end_time):
if end_time: if end_time:
if end_time > start_time: if end_time > start_time:
return start_day return start_day
@ -28,7 +24,7 @@ class Extractor(ABC):
else: else:
return start_day return start_day
def guess_month(text): def guess_month(self, text):
mths = [ mths = [
"jan", "jan",
"fe", "fe",
@ -43,42 +39,28 @@ class Extractor(ABC):
"nov", "nov",
"dec", "dec",
] ]
t = Extractor.remove_accents(text).lower() t = remove_accents(text).lower()
for i, m in enumerate(mths): for i, m in enumerate(mths):
if t.startswith(m): if t.startswith(m):
return i + 1 return i + 1
return None return None
def parse_french_date(text, default_year=None): def parse_french_date(self, text):
# format NomJour Numero Mois Année # format NomJour Numero Mois Année
m = re.search( m = re.search(
"[a-zA-ZéÉûÛ:.]+[ ]*([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)", text "[a-zA-ZéÉûÛ:.]+[ ]*([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)", text
) )
if m: if m:
day = m.group(1) day = m.group(1)
month = Extractor.guess_month(m.group(2)) month = self.guess_month(m.group(2))
year = m.group(3) year = m.group(3)
else: else:
# format Numero Mois Annee # format Numero Mois Annee
m = re.search("([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)", text) m = re.search("([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)", text)
if m: if m:
day = m.group(1) day = m.group(1)
month = Extractor.guess_month(m.group(2)) month = self.guess_month(m.group(2))
year = m.group(3) year = m.group(3)
else:
# format Numero Mois Annee
m = re.search("([0-9]+)/([0-9]+)/([0-9]+)", text)
if m:
day = m.group(1)
month = int(m.group(2))
year = m.group(3)
else:
# format Numero Mois Annee
m = re.search("([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)", text)
if m:
day = m.group(1)
month = Extractor.guess_month(m.group(2))
year = default_year
else: else:
# TODO: consolider les cas non satisfaits # TODO: consolider les cas non satisfaits
return None return None
@ -96,7 +78,7 @@ class Extractor(ABC):
return None return None
return date(year, month, day) return date(year, month, day)
def parse_french_time(text): def parse_french_time(self, text):
# format heures minutes secondes # format heures minutes secondes
m = re.search("([0-9]+)[ a-zA-Z:.]+([0-9]+)[ a-zA-Z:.]+([0-9]+)", text) m = re.search("([0-9]+)[ a-zA-Z:.]+([0-9]+)[ a-zA-Z:.]+([0-9]+)", text)
if m: if m:
@ -112,18 +94,11 @@ class Extractor(ABC):
s = "0" s = "0"
else: else:
# format heures # format heures
m = re.search("([0-9]+)[ ]*[Hh:.]", text) m = re.search("([0-9]+)[ Hh:.]", text)
if m: if m:
h = m.group(1) h = m.group(1)
m = "0" m = "0"
s = "0" s = "0"
else:
# format minutes
m = re.search("([0-9]+)[ ]*(?:mn|min|Min|Mn)", text)
if m:
h = "0"
m = m.group(1)
s = "0"
else: else:
return None return None
@ -159,7 +134,6 @@ class Extractor(ABC):
def add_event( def add_event(
self, self,
default_values,
title, title,
category, category,
start_day, start_day,
@ -184,19 +158,14 @@ class Extractor(ABC):
print("ERROR: cannot import an event without start day") print("ERROR: cannot import an event without start day")
return return
tags_default = self.default_value_if_exists(default_values, "tags")
if not tags_default:
tags_default = []
event = { event = {
"title": title, "title": title,
"category": category if category else self.default_value_if_exists(default_values, "category"), "category": category,
"start_day": start_day, "start_day": start_day,
"uuids": uuids, "uuids": uuids,
"location": location if location else self.default_value_if_exists(default_values, "location"), "location": location,
"organisers": self.default_value_if_exists(default_values, "organisers"),
"description": description, "description": description,
"tags": tags + tags_default, "tags": tags,
"published": published, "published": published,
"image": image, "image": image,
"image_alt": image_alt, "image_alt": image_alt,
@ -227,9 +196,6 @@ class Extractor(ABC):
) )
def get_structure(self): def get_structure(self):
if len(self.events) == 0:
return {}
else:
return {"header": self.header, "events": self.events} return {"header": self.header, "events": self.events}
def clean_url(url): def clean_url(url):
@ -244,31 +210,8 @@ class Extractor(ABC):
def get_default_extractors(single_event=False): def get_default_extractors(single_event=False):
from .extractor_ical import ICALExtractor from .extractor_ical import ICALExtractor
from .extractor_facebook import FacebookEventExtractor from .extractor_facebook import FacebookEventExtractor
from .extractor_ggcal_link import GoogleCalendarLinkEventExtractor
if single_event: if single_event:
return [FacebookEventExtractor(), GoogleCalendarLinkEventExtractor(), EventNotFoundExtractor()] return [FacebookEventExtractor(single_event=True)]
else: else:
return [ICALExtractor(), FacebookEventExtractor(), GoogleCalendarLinkEventExtractor(), EventNotFoundExtractor()] return [ICALExtractor(), FacebookEventExtractor(single_event=False)]
# A class that only produce a not found event
class EventNotFoundExtractor(Extractor):
def extract(
self, content, url, url_human=None, default_values=None, published=False
):
self.set_header(url)
self.clear_events()
self.add_event(default_values, "événement sans titre depuis " + url,
None, timezone.now().date(), None,
"l'import a échoué, la saisie doit se faire manuellement à partir de l'url source " + url,
[], [url], published=False, url_human=url)
return self.get_structure()
def clean_url(url):
return url

View File

@ -9,7 +9,9 @@ import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class SimpleFacebookEvent:
class FacebookEventExtractor(Extractor):
class SimpleFacebookEvent:
def __init__(self, data): def __init__(self, data):
self.elements = {} self.elements = {}
@ -17,13 +19,12 @@ class SimpleFacebookEvent:
self.elements[key] = data[key] if key in data else None self.elements[key] = data[key] if key in data else None
if "parent_event" in data: if "parent_event" in data:
self.parent = SimpleFacebookEvent( self.parent = FacebookEventExtractor.SimpleFacebookEvent(
data["parent_event"] data["parent_event"]
) )
class FacebookEvent: class FacebookEvent:
name = "event" name = "event"
# keys corresponds to javascript elements that are containing interesting data
keys = [ keys = [
[ [
"start_time_formatted", "start_time_formatted",
@ -32,8 +33,8 @@ class FacebookEvent:
"name", "name",
"price_info", "price_info",
"cover_media_renderer", "cover_media_renderer",
"event_creator",
"id", "id",
"parent_if_exists_or_self",
"day_time_sentence", "day_time_sentence",
"event_place", "event_place",
"comet_neighboring_siblings", "comet_neighboring_siblings",
@ -41,33 +42,20 @@ class FacebookEvent:
["event_description"], ["event_description"],
["start_timestamp", "end_timestamp"], ["start_timestamp", "end_timestamp"],
] ]
# rules are defined by a sub-key within intersesting data where elements will be find
# each pair in the associated list is a key of our model and a path within FB data to
# get the corresponding field
rules = { rules = {
"event_description": [("description", ["text"])], "event_description": {"description": ["text"]},
"cover_media_renderer": [ "cover_media_renderer": {
("image_alt", ["cover_photo", "photo", "accessibility_caption"]), "image_alt": ["cover_photo", "photo", "accessibility_caption"],
("image", ["cover_photo", "photo", "full_image", "uri"]), "image": ["cover_photo", "photo", "full_image", "uri"],
("image", ["cover_media", 0, "full_image", "uri"]), },
("image_alt", ["cover_media", 0, "accessibility_caption"]), "event_creator": {
], "event_creator_name": ["name"],
"event_creator": "event_creator_url": ["url"],
[("event_creator_name", ["name"]), },
("event_creator_url", ["url"]), "event_place": {"event_place_name": ["name"]},
],
"event_place": [("event_place_name", ["name"])],
} }
def __init__(self, *args): def __init__(self, i, event):
if len(args) == 1:
other = args[0]
self.fragments = other.fragments
self.elements = other.elements
self.neighbor_events = None
else:
i = args[0]
event = args[1]
self.fragments = {} self.fragments = {}
self.elements = {} self.elements = {}
self.neighbor_events = None self.neighbor_events = None
@ -94,25 +82,26 @@ class FacebookEvent:
def add_fragment(self, i, event): def add_fragment(self, i, event):
self.fragments[i] = event self.fragments[i] = event
if FacebookEvent.keys[i] == [ if FacebookEventExtractor.FacebookEvent.keys[i] == [
"start_timestamp", "start_timestamp",
"end_timestamp", "end_timestamp",
]: ]:
self.get_possible_end_timestamp(i, event) self.get_possible_end_timestamp(i, event)
else: else:
for k in FacebookEvent.keys[i]: for k in FacebookEventExtractor.FacebookEvent.keys[i]:
if k == "comet_neighboring_siblings": if k == "comet_neighboring_siblings":
self.get_neighbor_events(event[k]) self.get_neighbor_events(event[k])
elif k in FacebookEvent.rules: elif k in FacebookEventExtractor.FacebookEvent.rules:
for nk, rule in FacebookEvent.rules[k]: for nk, rule in FacebookEventExtractor.FacebookEvent.rules[
k
].items():
error = False error = False
c = event[k] c = event[k]
for ki in rule: for ki in rule:
if c is not None and ki in c or (isinstance(c, list) and ki < len(c)): if c is not None:
c = c[ki] c = c[ki]
else: else:
error = True error = True
break
if not error: if not error:
self.elements[nk] = c self.elements[nk] = c
else: else:
@ -120,12 +109,12 @@ class FacebookEvent:
def get_possible_end_timestamp(self, i, data): def get_possible_end_timestamp(self, i, data):
self.possible_end_timestamp.append( self.possible_end_timestamp.append(
dict((k, data[k]) for k in FacebookEvent.keys[i]) dict((k, data[k]) for k in FacebookEventExtractor.FacebookEvent.keys[i])
) )
def get_neighbor_events(self, data): def get_neighbor_events(self, data):
self.neighbor_events = [ self.neighbor_events = [
SimpleFacebookEvent(d) for d in data FacebookEventExtractor.SimpleFacebookEvent(d) for d in data
] ]
def __str__(self): def __str__(self):
@ -165,24 +154,23 @@ class FacebookEvent:
def find_event_fragment_in_array(array, event, first=True): def find_event_fragment_in_array(array, event, first=True):
if isinstance(array, dict): if isinstance(array, dict):
seen = False seen = False
for i, ks in enumerate(FacebookEvent.keys): for i, ks in enumerate(FacebookEventExtractor.FacebookEvent.keys):
# DEBUG: print([k for k in ks if k in array], "il manque", [k for k in ks if k not in array])
if len(ks) == len([k for k in ks if k in array]): if len(ks) == len([k for k in ks if k in array]):
seen = True seen = True
if event is None: if event is None:
event = FacebookEvent(i, array) event = FacebookEventExtractor.FacebookEvent(i, array)
else: else:
event.add_fragment(i, array) event.add_fragment(i, array)
# only consider the first of FacebookEvent.keys # only consider the first of FacebookEvent.keys
break break
if not seen: if not seen:
for k in array: for k in array:
event = FacebookEvent.find_event_fragment_in_array( event = FacebookEventExtractor.FacebookEvent.find_event_fragment_in_array(
array[k], event, False array[k], event, False
) )
elif isinstance(array, list): elif isinstance(array, list):
for e in array: for e in array:
event = FacebookEvent.find_event_fragment_in_array( event = FacebookEventExtractor.FacebookEvent.find_event_fragment_in_array(
e, event, False e, event, False
) )
@ -191,6 +179,8 @@ class FacebookEvent:
return event return event
def build_event(self, url): def build_event(self, url):
self.get_element("image")
return { return {
"title": self.get_element("name"), "title": self.get_element("name"),
"category": None, "category": None,
@ -204,53 +194,20 @@ class FacebookEvent:
"end_day": self.get_element_date("end_timestamp"), "end_day": self.get_element_date("end_timestamp"),
"end_time": self.get_element_time("end_timestamp"), "end_time": self.get_element_time("end_timestamp"),
"image": self.get_element("image"), "image": self.get_element("image"),
"image_alt": self.get_element("image_alt"), "image_alt": self.get_element("image"),
} }
def get_parent_id(self): def __init__(self, single_event=False):
return self.get_element("parent_if_exists_or_self")["id"] self.single_event = single_event
def build_events(self, url):
if self.neighbor_events is None or len(self.neighbor_events) == 0:
return [self.build_event(url)]
else:
url_base = "https://www.facebook.com/events/" + self.get_parent_id() + "/"
result = []
for nb_e in self.neighbor_events:
# we create a copy of the event
clone = FacebookEvent(self)
# we set start and end timestamp accordnig to the neighbor
clone.elements["start_timestamp"] = nb_e.elements["start_timestamp"]
clone.elements["end_timestamp"] = nb_e.elements["end_timestamp"]
## we generate the event
result.append(clone.build_event(url_base + nb_e.elements["id"] + "/"))
return result
class FacebookEventExtractor(Extractor):
def __init__(self):
super().__init__() super().__init__()
def clean_url(url): def clean_url(url):
if FacebookEventExtractor.is_known_url(url): if FacebookEventExtractor.is_known_url(url):
u = urlparse(url) u = urlparse(url)
result = "https://www.facebook.com" + u.path return "https://www.facebook.com" + u.path
# remove name in the url
match = re.match(r"(.*/events)/s/([a-zA-Z-][a-zA-Z-0-9-]+)/([0-9/]*)", result)
if match:
result = match[1] + "/" + match[3]
if result[-1] == "/":
return result
else:
return result + "/"
else: else:
return url return url
def is_known_url(url): def is_known_url(url):
u = urlparse(url) u = urlparse(url)
return u.netloc in ["facebook.com", "www.facebook.com", "m.facebook.com"] return u.netloc in ["facebook.com", "www.facebook.com", "m.facebook.com"]
@ -266,22 +223,16 @@ class FacebookEventExtractor(Extractor):
for json_script in soup.find_all("script", type="application/json"): for json_script in soup.find_all("script", type="application/json"):
json_txt = json_script.get_text() json_txt = json_script.get_text()
json_struct = json.loads(json_txt) json_struct = json.loads(json_txt)
fevent = FacebookEvent.find_event_fragment_in_array( fevent = FacebookEventExtractor.FacebookEvent.find_event_fragment_in_array(
json_struct, fevent json_struct, fevent
) )
if fevent is not None: if fevent is not None:
self.set_header(url) self.set_header(url)
for event in fevent.build_events(url): event = fevent.build_event(url)
logger.warning("published: " + str(published)) logger.warning("published: " + str(published))
event["published"] = published event["published"] = published
self.add_event(**event)
if default_values and "category" in default_values:
event["category"] = default_values["category"]
self.add_event(default_values, **event)
return self.get_structure() return self.get_structure()
else:
logger.warning("cannot find any event in page")
return None return None

View File

@ -1,66 +0,0 @@
from datetime import datetime
from bs4 import BeautifulSoup
from urllib.parse import urlparse
from .extractor import *
from .generic_extractors import *
import json
import logging
logger = logging.getLogger(__name__)
class GoogleCalendarLinkEventExtractor(Extractor):
def __init__(self):
super().__init__()
self.possible_urls = ["https://calendar.google.com/calendar/", "https://addtocalendar.com/"]
def extract(
self, content, url, url_human=None, default_values=None, published=False
):
soup = BeautifulSoup(content, "html.parser")
for ggu in self.possible_urls:
link_calendar = soup.select('a[href^="' + ggu + '"]')
if len(link_calendar) != 0:
gg_cal = GGCalendar(link_calendar[0]["href"])
if gg_cal.is_valid_event():
start_day = gg_cal.start_day
start_time = gg_cal.start_time
description = gg_cal.description.replace('&nbsp;', '')
end_day = gg_cal.end_day
end_time = gg_cal.end_time
location = gg_cal.location
title = gg_cal.title
url_human = url
self.set_header(url)
category = None
self.add_event(
default_values,
title=title,
category=category,
start_day=start_day,
location=location,
description=description,
tags=[],
uuids=[url],
recurrences=None,
url_human=url_human,
start_time=start_time,
end_day=end_day,
end_time=end_time,
published=published,
image=None,
)
break
return self.get_structure()

View File

@ -27,21 +27,6 @@ class ICALExtractor(Extractor):
except: except:
return None return None
def guess_image_from_vevent(self, event):
item = self.get_item_from_vevent(event, 'ATTACH', raw=True)
if item is None:
return None
# it seems that FMTTYPE is not available through python-icalendar
if isinstance(item, list):
for i in item:
if str(i).lower().endswith('.jpg'):
return str(i)
else:
if str(item).lower().endswith('.jpg'):
return str(item)
return None
def get_dt_item_from_vevent(self, event, name): def get_dt_item_from_vevent(self, event, name):
item = self.get_item_from_vevent(event, name, raw=True) item = self.get_item_from_vevent(event, name, raw=True)
@ -78,7 +63,7 @@ class ICALExtractor(Extractor):
for event in calendar.walk("VEVENT"): for event in calendar.walk("VEVENT"):
title = self.get_item_from_vevent(event, "SUMMARY") title = self.get_item_from_vevent(event, "SUMMARY")
category = None category = self.default_value_if_exists(default_values, "category")
start_day, start_time = self.get_dt_item_from_vevent(event, "DTSTART") start_day, start_time = self.get_dt_item_from_vevent(event, "DTSTART")
@ -91,8 +76,8 @@ class ICALExtractor(Extractor):
end_day = end_day + timedelta(days=-1) end_day = end_day + timedelta(days=-1)
location = self.get_item_from_vevent(event, "LOCATION") location = self.get_item_from_vevent(event, "LOCATION")
if (not location is None) and location.replace(" ", "") == "": if location is None:
location = None location = self.default_value_if_exists(default_values, "location")
description = self.get_item_from_vevent(event, "DESCRIPTION") description = self.get_item_from_vevent(event, "DESCRIPTION")
if description is not None: if description is not None:
@ -127,16 +112,10 @@ class ICALExtractor(Extractor):
) )
# possible limitation: if the ordering is not original then related # possible limitation: if the ordering is not original then related
tags = [] tags = self.default_value_if_exists(default_values, "tags")
last_modified = self.get_item_from_vevent(event, "LAST-MODIFIED", raw=True) last_modified = self.get_item_from_vevent(event, "LAST-MODIFIED", raw=True)
image = self.guess_image_from_vevent(event)
url_event = self.get_item_from_vevent(event, "URL", True)
if url_event:
url_human = url_event
recurrence_entries = {} recurrence_entries = {}
for e in ["RRULE", "EXRULE", "EXDATE", "RDATE"]: for e in ["RRULE", "EXRULE", "EXDATE", "RDATE"]:
i = self.get_item_from_vevent(event, e, raw=True) i = self.get_item_from_vevent(event, e, raw=True)
@ -162,7 +141,6 @@ class ICALExtractor(Extractor):
if uuidrel is not None: if uuidrel is not None:
luuids += [uuidrel] luuids += [uuidrel]
self.add_event( self.add_event(
default_values,
title, title,
category, category,
start_day, start_day,
@ -177,7 +155,6 @@ class ICALExtractor(Extractor):
end_time=end_time, end_time=end_time,
last_modified=last_modified, last_modified=last_modified,
published=published, published=published,
image=image
) )
return self.get_structure() return self.get_structure()
@ -187,7 +164,6 @@ class ICALExtractor(Extractor):
class ICALNoBusyExtractor(ICALExtractor): class ICALNoBusyExtractor(ICALExtractor):
def add_event( def add_event(
self, self,
default_values,
title, title,
category, category,
start_day, start_day,
@ -205,9 +181,8 @@ class ICALNoBusyExtractor(ICALExtractor):
image=None, image=None,
image_alt=None, image_alt=None,
): ):
if title != "Busy" and title != "Accueils bénévoles" and title != "Occupé": if title != "Busy":
super().add_event( super().add_event(
default_values,
title, title,
category, category,
start_day, start_day,
@ -248,7 +223,6 @@ class ICALNoVCExtractor(ICALExtractor):
def add_event( def add_event(
self, self,
default_values,
title, title,
category, category,
start_day, start_day,
@ -267,7 +241,6 @@ class ICALNoVCExtractor(ICALExtractor):
image_alt=None, image_alt=None,
): ):
super().add_event( super().add_event(
default_values,
title, title,
category, category,
start_day, start_day,

View File

@ -2,10 +2,6 @@ from abc import abstractmethod
from urllib.parse import urlparse from urllib.parse import urlparse
from urllib.parse import parse_qs from urllib.parse import parse_qs
import logging
logger = logging.getLogger(__name__)
from .extractor import * from .extractor import *
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
@ -18,30 +14,12 @@ class GGCalendar:
self.url = url self.url = url
self.extract_info() self.extract_info()
def filter_keys(params):
result = {}
for k, v in params.items():
if k.startswith('e[0]'):
result[k.replace('e[0][', '')[:-1]] = v
else:
result[k] = v
return result
def is_valid_event(self):
return self.start_day is not None and self.title is not None
def extract_info(self): def extract_info(self):
parsed_url = urlparse(self.url.replace("#", "%23")) parsed_url = urlparse(self.url.replace("#", "%23"))
params = parse_qs(parsed_url.query) params = parse_qs(parsed_url.query)
params = GGCalendar.filter_keys(params)
self.location = params["location"][0] if "location" in params else None
self.title = params["text"][0] if "text" in params else params["title"][0] if "title" in params else None
self.description = params["description"][0] if "description" in params else None
self.location = params["location"][0] if "location" in params else None self.location = params["location"][0] if "location" in params else None
self.title = params["text"][0] if "text" in params else None
if "dates" in params: if "dates" in params:
dates = [x.replace(" ", "+") for x in params["dates"][0].split("/")] dates = [x.replace(" ", "+") for x in params["dates"][0].split("/")]
if len(dates) > 0: if len(dates) > 0:
@ -55,24 +33,7 @@ class GGCalendar:
else: else:
self.end_day = None self.end_day = None
self.end_time = None self.end_time = None
elif "date_start" in params:
date = parser.parse(params["date_start"][0])
self.start_day = date.date()
self.start_time = date.time()
if "date_end" in params:
dateend = parser.parse(params["date_end"][0])
if dateend != date:
self.end_day = dateend.date()
self.end_time = dateend.time()
else:
self.end_day = None
self.end_time = None
if self.start_time == datetime.time(0):
self.start_time = None
else:
self.end_day = None
self.end_time = None
else: else:
raise Exception("Unable to find a date in google calendar URL") raise Exception("Unable to find a date in google calendar URL")
self.start_day = None self.start_day = None
@ -86,7 +47,6 @@ class GGCalendar:
# - then for each document downloaded from these urls, build the events # - then for each document downloaded from these urls, build the events
# This class is an abstract class # This class is an abstract class
class TwoStepsExtractor(Extractor): class TwoStepsExtractor(Extractor):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
self.event_urls = None self.event_urls = None
@ -136,7 +96,6 @@ class TwoStepsExtractor(Extractor):
def add_event_with_props( def add_event_with_props(
self, self,
default_values,
event_url, event_url,
title, title,
category, category,
@ -170,7 +129,6 @@ class TwoStepsExtractor(Extractor):
location = self.event_properties[event_url]["location"] location = self.event_properties[event_url]["location"]
self.add_event( self.add_event(
default_values,
title, title,
category, category,
start_day, start_day,
@ -212,9 +170,7 @@ class TwoStepsExtractor(Extractor):
default_values=None, default_values=None,
published=False, published=False,
only_future=True, only_future=True,
ignore_404=True
): ):
self.only_future = only_future self.only_future = only_future
self.now = datetime.datetime.now().date() self.now = datetime.datetime.now().date()
self.set_header(url) self.set_header(url)
@ -238,39 +194,10 @@ class TwoStepsExtractor(Extractor):
# first download the content associated with this link # first download the content associated with this link
content_event = self.downloader.get_content(event_url) content_event = self.downloader.get_content(event_url)
if content_event is None: if content_event is None:
msg = "Cannot extract event from url {}".format(event_url) raise Exception(_("Cannot extract event from url {}").format(event_url))
if ignore_404:
logger.error(msg)
else:
raise Exception(msg)
else:
# then extract event information from this html document # then extract event information from this html document
self.add_event_from_content( self.add_event_from_content(
content_event, event_url, url_human, default_values, published content_event, event_url, url_human, default_values, published
) )
return self.get_structure() return self.get_structure()
class TwoStepsExtractorNoPause(TwoStepsExtractor):
def extract(
self,
content,
url,
url_human=None,
default_values=None,
published=False,
only_future=True,
ignore_404=True
):
if hasattr(self.downloader, "pause"):
pause = self.downloader.pause
else:
pause = False
self.downloader.pause = False
result = super().extract(content, url, url_human, default_values, published, only_future, ignore_404)
self.downloader.pause = pause
return result

View File

@ -1,11 +1,6 @@
from .downloader import * from .downloader import *
from .extractor import * from .extractor import *
import logging
logger = logging.getLogger(__name__)
class URL2Events: class URL2Events:
def __init__( def __init__(
@ -18,10 +13,7 @@ class URL2Events:
def process( def process(
self, url, url_human=None, cache=None, default_values=None, published=False self, url, url_human=None, cache=None, default_values=None, published=False
): ):
referer = "" content = self.downloader.get_content(url, cache)
if self.extractor:
referer = self.extractor.url_referer
content = self.downloader.get_content(url, cache, referer=referer)
if content is None: if content is None:
return None return None
@ -34,9 +26,8 @@ class URL2Events:
else: else:
# if the extractor is not defined, use a list of default extractors # if the extractor is not defined, use a list of default extractors
for e in Extractor.get_default_extractors(self.single_event): for e in Extractor.get_default_extractors(self.single_event):
logger.warning('Extractor::' + type(e).__name__)
e.set_downloader(self.downloader) e.set_downloader(self.downloader)
events = e.extract(content, url, url_human, default_values, published) events = e.extract(content, url, url_human, default_values, published)
if events is not None and len(events) > 0: if events is not None:
return events return events
return None return None

File diff suppressed because it is too large Load Diff

View File

@ -20,5 +20,5 @@ class Migration(migrations.Migration):
operations = [ operations = [
migrations.RunPython(forwards_func, reverse_code=forwards_func), migrations.RunPython(forwards_func),
] ]

View File

@ -10,11 +10,6 @@ def groups_permissions_creation(apps, schema_editor):
for name in user_roles: for name in user_roles:
Group.objects.create(name=name) Group.objects.create(name=name)
def groups_permissions_deletion(apps, schema_editor):
user_roles = ["Automation Manager", "Q&A Manager", "Receptionist"]
for name in user_roles:
Group.objects.filter(name=name).delete()
@ -26,5 +21,5 @@ class Migration(migrations.Migration):
] ]
operations = [ operations = [
migrations.RunPython(groups_permissions_creation, reverse_code=groups_permissions_deletion), migrations.RunPython(groups_permissions_creation),
] ]

View File

@ -31,12 +31,6 @@ def update_groups_permissions(apps, schema_editor):
Group.objects.get(name="Receptionist").permissions.add(*receptionist_perms) Group.objects.get(name="Receptionist").permissions.add(*receptionist_perms)
Group.objects.get(name="Receptionist").permissions.add(*read_mod_perms) Group.objects.get(name="Receptionist").permissions.add(*read_mod_perms)
def update_groups_delete(apps, schema_editor):
user_roles = ["Moderator"]
for name in user_roles:
Group.objects.filter(name=name).delete()
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -46,5 +40,5 @@ class Migration(migrations.Migration):
] ]
operations = [ operations = [
migrations.RunPython(update_groups_permissions, reverse_code=update_groups_delete), migrations.RunPython(update_groups_permissions),
] ]

View File

@ -15,9 +15,6 @@ def update_groups_permissions(apps, schema_editor):
Group.objects.get(name="Q&A Manager").permissions.add(*qanda_perms) Group.objects.get(name="Q&A Manager").permissions.add(*qanda_perms)
Group.objects.get(name="Q&A Manager").permissions.add(*read_mod_perms) Group.objects.get(name="Q&A Manager").permissions.add(*read_mod_perms)
def no_permission_change(apps, schema_editor):
pass
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -26,5 +23,5 @@ class Migration(migrations.Migration):
] ]
operations = [ operations = [
migrations.RunPython(update_groups_permissions, reverse_code=no_permission_change), migrations.RunPython(update_groups_permissions),
] ]

View File

@ -11,8 +11,7 @@ def update_groups_permissions(apps, schema_editor):
mod_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model == 'moderationquestion' and i.codename.startswith('use_')] mod_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model == 'moderationquestion' and i.codename.startswith('use_')]
Group.objects.get(name="Moderator").permissions.add(*mod_perms) Group.objects.get(name="Moderator").permissions.add(*mod_perms)
def no_permission_change(apps, schema_editor):
pass
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -21,5 +20,5 @@ class Migration(migrations.Migration):
] ]
operations = [ operations = [
migrations.RunPython(update_groups_permissions, reverse_code=no_permission_change), migrations.RunPython(update_groups_permissions),
] ]

View File

@ -16,11 +16,6 @@ def update_groups_permissions(apps, schema_editor):
editor_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model == 'staticcontent'] editor_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model == 'staticcontent']
Group.objects.get(name="Static content editor").permissions.add(*editor_perms) Group.objects.get(name="Static content editor").permissions.add(*editor_perms)
def update_groups_delete(apps, schema_editor):
user_roles = ["Static content editor"]
for name in user_roles:
Group.objects.filter(name=name).delete()
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -29,5 +24,5 @@ class Migration(migrations.Migration):
] ]
operations = [ operations = [
migrations.RunPython(update_groups_permissions, reverse_code=update_groups_delete), migrations.RunPython(update_groups_permissions),
] ]

View File

@ -1,10 +1,9 @@
# Generated by Django 4.2.7 on 2024-04-27 16:29 # Generated by Django 4.2.7 on 2024-04-27 16:29
from django.db import migrations from django.db import migrations
from django.contrib.auth.models import Group, Permission
def update_groups_permissions(apps, schema_editor): def update_groups_permissions(apps, schema_editor):
Group = apps.get_model("auth", "Group")
Permission = apps.get_model("auth", "Permission")
all_perms = Permission.objects.all() all_perms = Permission.objects.all()
@ -12,9 +11,6 @@ def update_groups_permissions(apps, schema_editor):
moderator_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model in ['place']] moderator_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model in ['place']]
Group.objects.get(name="Moderator").permissions.add(*moderator_perms) Group.objects.get(name="Moderator").permissions.add(*moderator_perms)
def no_permission_change(apps, schema_editor):
pass
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
@ -22,5 +18,5 @@ class Migration(migrations.Migration):
] ]
operations = [ operations = [
migrations.RunPython(update_groups_permissions, reverse_code=no_permission_change), migrations.RunPython(update_groups_permissions),
] ]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.7 on 2024-06-02 12:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0063_alter_event_exact_location'),
]
operations = [
migrations.AlterField(
model_name='recurrentimport',
name='processor',
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', 'la puce à loreille'), ('Plugin wordpress MEC', 'Plugin wordpress MEC')], default='ical', max_length=20, verbose_name='Processor'),
),
]

View File

@ -1,17 +0,0 @@
# Generated by Django 4.2.7 on 2024-08-13 13:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0064_alter_recurrentimport_processor'),
]
operations = [
migrations.AlterModelOptions(
name='place',
options={'ordering': ['name'], 'verbose_name': 'Place', 'verbose_name_plural': 'Places'},
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.7 on 2024-08-17 09:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0065_alter_place_options'),
]
operations = [
migrations.AddField(
model_name='batchimportation',
name='url_source',
field=models.URLField(blank=True, editable=False, help_text='Source URL if no RecurrentImport is associated.', max_length=1024, null=True, verbose_name='URL (if not recurrent import)'),
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 4.2.7 on 2024-08-17 11:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0066_batchimportation_url_source'),
]
operations = [
migrations.AddField(
model_name='categorisationrule',
name='place',
field=models.ForeignKey(blank=True, help_text='Location from place', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.place', verbose_name='Place'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.7 on 2024-08-28 21:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0067_categorisationrule_place'),
]
operations = [
migrations.AlterField(
model_name='recurrentimport',
name='processor',
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page")], default='ical', max_length=20, verbose_name='Processor'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.7 on 2024-08-28 23:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0068_alter_recurrentimport_processor'),
]
operations = [
migrations.AlterField(
model_name='recurrentimport',
name='downloader',
field=models.CharField(choices=[('simple', 'simple'), ('chromium headless', 'Headless Chromium'), ('chromium (pause)', 'Headless Chromium (pause)')], default='simple', max_length=20, verbose_name='Downloader'),
),
]

View File

@ -1,29 +0,0 @@
# Generated by Django 4.2.9 on 2024-08-29 19:16
from django.db import migrations
import django_ckeditor_5.fields
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0069_alter_recurrentimport_downloader'),
]
operations = [
migrations.AlterField(
model_name='contactmessage',
name='comments',
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, default='', help_text='Comments on the message from the moderation team', null=True, verbose_name='Comments'),
),
migrations.AlterField(
model_name='contactmessage',
name='message',
field=django_ckeditor_5.fields.CKEditor5Field(help_text='Your message', verbose_name='Message'),
),
migrations.AlterField(
model_name='staticcontent',
name='text',
field=django_ckeditor_5.fields.CKEditor5Field(help_text='Text as shown to the visitors', verbose_name='Content'),
),
]

View File

@ -1,24 +0,0 @@
# Generated by Django 4.2.9 on 2024-08-29 19:58
from django.db import migrations
import django_ckeditor_5.fields
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0070_alter_contactmessage_comments_and_more'),
]
operations = [
migrations.AlterField(
model_name='contactmessage',
name='message',
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Your message', verbose_name='Message'),
),
migrations.AlterField(
model_name='staticcontent',
name='text',
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Text as shown to the visitors', verbose_name='Content'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.9 on 2024-09-04 21:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0071_alter_contactmessage_message_and_more'),
]
operations = [
migrations.AlterField(
model_name='recurrentimport',
name='processor',
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page"), ('cour3coquins', 'la cour des 3 coquins')], default='ical', max_length=20, verbose_name='Processor'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.9 on 2024-09-04 21:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0072_alter_recurrentimport_processor'),
]
operations = [
migrations.AlterField(
model_name='event',
name='location',
field=models.CharField(blank=True, default='', help_text='Address of the event in case its not available in the already known places (free form)', max_length=512, null=True, verbose_name='Location (free form)'),
),
]

View File

@ -1,29 +0,0 @@
# Generated by Django 4.2.9 on 2024-09-14 12:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0073_alter_event_location'),
]
operations = [
migrations.AddField(
model_name='category',
name='pictogram',
field=models.ImageField(blank=True, help_text='Pictogram of the category', max_length=1024, null=True, upload_to='', verbose_name='Pictogram'),
),
migrations.AlterField(
model_name='event',
name='category',
field=models.ForeignKey(default=None, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
),
migrations.AlterField(
model_name='recurrentimport',
name='defaultCategory',
field=models.ForeignKey(default=None, help_text='Category of each imported event', on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
),
]

View File

@ -1,24 +0,0 @@
# Generated by Django 4.2.9 on 2024-09-14 13:18
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0074_category_pictogram_alter_event_category_and_more'),
]
operations = [
migrations.AlterField(
model_name='event',
name='category',
field=models.ForeignKey(default=1, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
),
migrations.AlterField(
model_name='recurrentimport',
name='defaultCategory',
field=models.ForeignKey(default=1, help_text='Category of each imported event', on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.9 on 2024-09-14 17:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0075_alter_event_category_and_more'),
]
operations = [
migrations.AlterField(
model_name='category',
name='pictogram',
field=models.FileField(blank=True, help_text='Pictogram of the category (svg format)', max_length=1024, null=True, upload_to='', verbose_name='Pictogram'),
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 4.2.9 on 2024-09-14 20:05
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0076_alter_category_pictogram'),
]
operations = [
migrations.AddField(
model_name='category',
name='position',
field=models.IntegerField(default=0, verbose_name='Position for ordering categories'),
),
]

View File

@ -1,24 +0,0 @@
# Generated by Django 4.2.9 on 2024-09-14 13:18
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0077_category_position_alter_event_category_and_more'),
]
operations = [
migrations.AlterField(
model_name='event',
name='category',
field=models.ForeignKey(default=1, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
),
migrations.AlterField(
model_name='recurrentimport',
name='defaultCategory',
field=models.ForeignKey(default=1, help_text='Category of each imported event', on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-09 16:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0078_alter_event_category_and_more'),
]
operations = [
migrations.AddField(
model_name='contactmessage',
name='spam',
field=models.BooleanField(default=False, help_text='This message is a spam.', verbose_name='Spam'),
),
]

View File

@ -1,20 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-10 20:34
import django.contrib.gis.geos.point
from django.db import migrations
import location_field.models.spatial
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0079_contactmessage_spam'),
]
operations = [
migrations.AddField(
model_name='place',
name='location_pt',
field=location_field.models.spatial.LocationField(default=django.contrib.gis.geos.point.Point(45.783329, 3.08333), srid=4326),
),
]

View File

@ -1,38 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-10 20:35
from django.db import migrations
from django.contrib.gis.geos import Point
def change_coord_format(apps, schema_editor):
Place = apps.get_model("agenda_culturel", "Place")
places = Place.objects.values("location", "location_pt").all()
for p in places:
l = p.location.split(',')
if len(l) == 2:
p.location_pt = Point(float(l[1]), float(l[0]))
else:
p.location_pt = Point(3.08333, 45.783329)
p.save(update_fields=["location_pt"])
def reverse_coord_format(apps, schema_editor):
Place = apps.get_model("agenda_culturel", "Place")
places = Place.objects.values("location", "location_pt").all()
for p in places:
p.location = ','.join([p.location_pt[1], p.location_pt[0]])
p.save(update_fields=["location"])
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0080_place_location_pt'),
]
operations = [
migrations.RunPython(change_coord_format, reverse_code=reverse_coord_format),
]

View File

@ -1,20 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-10 21:15
import django.contrib.gis.geos.point
from django.db import migrations
import location_field.models.spatial
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0081_auto_20241010_2235'),
]
operations = [
migrations.AlterField(
model_name='place',
name='location_pt',
field=location_field.models.spatial.LocationField(default=django.contrib.gis.geos.point.Point(3.08333, 45.783329), srid=4326),
),
]

View File

@ -1,17 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-10 21:15
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0082_alter_place_location_pt'),
]
operations = [
migrations.RemoveField(
model_name='place',
name='location',
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-10 21:15
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0083_remove_place_location'),
]
operations = [
migrations.RenameField(
model_name='place',
old_name='location_pt',
new_name='location',
),
]

View File

@ -1,24 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-12 14:45
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0084_rename_location_pt_place_location'),
]
operations = [
migrations.AlterField(
model_name='event',
name='category',
field=models.ForeignKey(default=None, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
),
migrations.AlterField(
model_name='recurrentimport',
name='defaultCategory',
field=models.ForeignKey(default=None, help_text='Category of each imported event', on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-16 09:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0085_alter_event_category_and_more'),
]
operations = [
migrations.AlterField(
model_name='recurrentimport',
name='processor',
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('cour3coquins', 'la cour des 3 coquins')], default='ical', max_length=20, verbose_name='Processor'),
),
]

View File

@ -1,24 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-16 12:55
import django.contrib.gis.geos.point
from django.db import migrations, models
import location_field.models.spatial
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0086_alter_recurrentimport_processor'),
]
operations = [
migrations.CreateModel(
name='ReferenceLocation',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='Name of the location', verbose_name='Name')),
('location', location_field.models.spatial.LocationField(default=django.contrib.gis.geos.point.Point(3.08333, 45.783329), srid=4326)),
('main', models.BooleanField(default=False, help_text='This location is one of the main locations (shown first).', verbose_name='Main')),
],
),
]

View File

@ -1,22 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-16 18:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0087_referencelocation'),
]
operations = [
migrations.AlterModelOptions(
name='referencelocation',
options={'verbose_name': 'Reference location', 'verbose_name_plural': 'Reference locations'},
),
migrations.AlterField(
model_name='referencelocation',
name='name',
field=models.CharField(help_text='Name of the location', unique=True, verbose_name='Name'),
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-17 08:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0088_alter_referencelocation_options_and_more'),
]
operations = [
migrations.AlterField(
model_name='recurrentimport',
name='defaultCategory',
field=models.ForeignKey(blank=True, default=None, help_text='Category of each imported event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-19 13:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0089_alter_recurrentimport_defaultcategory'),
]
operations = [
migrations.AlterField(
model_name='recurrentimport',
name='processor',
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('cour3coquins', 'la cour des 3 coquins'), ('arachnee', 'Arachnée concert')], default='ical', max_length=20, verbose_name='Processor'),
),
]

View File

@ -1,23 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-20 11:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0090_alter_recurrentimport_processor'),
]
operations = [
migrations.AddField(
model_name='duplicatedevents',
name='fixed',
field=models.BooleanField(blank=True, default=False, help_text='This duplicated events is fixed, ie exactly one of the listed events is not masked.', null=True, verbose_name='Fixed'),
),
migrations.AddField(
model_name='event',
name='masked',
field=models.BooleanField(blank=True, default=False, help_text='This event is masked by a duplicated version.', null=True, verbose_name='Masked'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-30 14:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0091_duplicatedevents_fixed_event_masked'),
]
operations = [
migrations.AlterField(
model_name='categorisationrule',
name='weight',
field=models.IntegerField(default=1, help_text='The lower is the weight, the earlier the filter is applied', verbose_name='Weight'),
),
]

View File

@ -1,22 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-30 17:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0092_alter_categorisationrule_weight'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='Tag name', max_length=512, verbose_name='Name')),
('description', models.TextField(blank=True, help_text='Description of the tag', null=True, verbose_name='Description')),
('principal', models.BooleanField(default=True, help_text='This tag is highlighted as a main tag for visitors, particularly in the filter.', verbose_name='Principal')),
],
),
]

View File

@ -1,33 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-30 19:02
from django.db import migrations
from django.contrib.auth.models import Group, Permission
def update_groups_permissions(apps, schema_editor):
# first add a missing role
user_roles = ["Tag editor"]
for name in user_roles:
Group.objects.create(name=name)
all_perms = Permission.objects.all()
# set permissions for moderators
editor_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model == 'tag']
Group.objects.get(name="Tag editor").permissions.add(*editor_perms)
def update_groups_delete(apps, schema_editor):
user_roles = ["Tag editor"]
for name in user_roles:
Group.objects.filter(name=name).delete()
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0093_tag'),
]
operations = [
migrations.RunPython(update_groups_permissions, reverse_code=update_groups_delete),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-30 19:11
from django.db import migrations
import django_ckeditor_5.fields
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0094_auto_20241030_2002'),
]
operations = [
migrations.AlterField(
model_name='tag',
name='description',
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Description of the tag', null=True, verbose_name='Description'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.9 on 2024-10-30 20:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0095_alter_tag_description'),
]
operations = [
migrations.AlterField(
model_name='tag',
name='name',
field=models.CharField(help_text='Tag name', max_length=512, unique=True, verbose_name='Name'),
),
]

View File

@ -1,23 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-01 22:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0096_alter_tag_name'),
]
operations = [
migrations.AlterField(
model_name='category',
name='alt_name',
field=models.CharField(blank=True, help_text='Alternative name used with a time period', max_length=512, null=True, verbose_name='Alternative Name'),
),
migrations.AlterField(
model_name='category',
name='codename',
field=models.CharField(blank=True, help_text='Short name of the category', max_length=3, null=True, verbose_name='Short name'),
),
]

View File

@ -1,21 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-01 22:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0097_alter_category_alt_name_alter_category_codename'),
]
operations = [
migrations.RemoveField(
model_name='category',
name='alt_name',
),
migrations.RemoveField(
model_name='category',
name='codename',
),
]

View File

@ -1,202 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-01 14:22
from django.db import migrations
import os.path
class SimpleCat:
def __init__(self=None,
name=None, color=None,
pictogram=None, position=None,
transfered_to=None,
transtag=None):
self.name = name
self.color = color
self.pictogram = pictogram
self.position = position
self.transfered_to = transfered_to
self.transfered_to_object = {}
self.transtag = transtag
def get_transfered_category(self, e):
# we check if the given event has a corresponding tag (except empty string)
if not e is None:
for t, c in self.transfered_to.items():
if t != "" and t in e.tags:
return c
return self.transfered_to[""] if "" in self.transfered_to else None
def get_transfered_to_object(self, apps, e=None):
if self.transfered_to is None:
return None, None
Category = apps.get_model("agenda_culturel", "Category")
if isinstance(self.transfered_to, dict):
cname = self.get_transfered_category(e)
else:
cname = self.transfered_to
if not cname in self.transfered_to_object.keys():
self.transfered_to_object[cname] = Category.objects.filter(name=cname).first()
return self.transfered_to_object[cname], self.transtag
def get_pictogram_file(self):
from django.core.files import File
f = open(os.path.dirname(__file__) + "/images/" + self.pictogram, "rb")
return File(name=self.pictogram, file=f)
# Color selection
# https://colorkit.co/color-palette-generator/4cae4f-ff9900-2094f3-9b27b0-ffec3d-ff5724-795649-4051b5-009485/
# #4cae4f, #ff9900, #2094f3, #9b27b0, #ffec3d, #ff5724, #795649, #4051b5, #009485
preserved = {
"Nature": {
"old": SimpleCat("Nature", color="#27AEEF", pictogram="leaf.svg", position=8),
"new": SimpleCat("Nature", color="#4cae4f", pictogram="leaf.svg", position=8)
},
"Cinéma": {
"old": SimpleCat("Cinéma", color="#EDE15B", pictogram="theater.svg", position=5),
"new": SimpleCat("Cinéma", color="#ff9900", pictogram="theater.svg", position=4),
},
"Sans catégorie": {
"old": SimpleCat("Sans catégorie", color="#AAAAAA", pictogram="calendar.svg", position=100),
"new": SimpleCat("Sans catégorie", color="#AAAAAA", pictogram="calendar.svg", position=100),
}
}
old_cats = [
SimpleCat("Conférence", "#87BC45", "school-outline.svg", 7, "Rencontres & Débats", "conférence"),
SimpleCat("Exposition", "#BDCF32", "warehouse.svg", 6, "Visites & Expositions", "exposition"),
SimpleCat("Arts du spectacle", "#EDBF33", "track-light.svg", 4, "Spectacles"),
SimpleCat("Danse", "#EF9B20", "dance-ballroom.svg", 3, "Spectacles", "danse"),
SimpleCat("Concert", "#F46A9B", "account-music-outline.svg", 2, "Fêtes & Concerts", "concert"),
SimpleCat("Théâtre", "#EA5545", "drama-masks.svg", 1, "Spectacles", "théâtre")
]
new_cats = [
SimpleCat("Fêtes & Concerts", "#ff5724", "party-popper.svg", 1, {"concert": "Concert", "": "Sans catégorie"}),
SimpleCat("Spectacles", "#edbf33", "track-light.svg", 2, {"théâtre": "Théâtre", "danse": "Danse", "": "Arts du spectacle"}),
SimpleCat("Rencontres & Débats", "#9b27b0", "workshop.svg", 3, {"conférence": "Conférence", "": "Sans catégorie"}),
SimpleCat("Animations & Ateliers", "#4051b5", "tools.svg", 5, "Sans catégorie"),
SimpleCat("Rendez-vous locaux", "#2094f3", "ferris-wheel.svg", 6, "Sans catégorie"),
SimpleCat("Visites & Expositions", "#795649", "compass-outline.svg", 7, {"exposition": "Exposition", "": "Sans catégorie"}),
]
def create_categories(apps, catlist):
Category = apps.get_model("agenda_culturel", "Category")
# only create new categories if old ones are present to avoid filling
# an empty database with ghost categories
if Category.objects.count() > 1:
cats = [Category(name=c.name, color=c.color, position=c.position, pictogram=c.get_pictogram_file()) for c in catlist]
Category.objects.bulk_create(cats)
def delete_categories(apps, catlist):
Category = apps.get_model("agenda_culturel", "Category")
Category.objects.filter(name__in=[c.name for c in catlist]).delete()
def create_new_categories(apps, schema_editor):
create_categories(apps, new_cats)
def delete_new_categories(apps, schema_editor):
delete_categories(apps, new_cats)
def create_old_categories(apps, schema_editor):
create_categories(apps, old_cats)
def delete_old_categories(apps, schema_editor):
delete_categories(apps, old_cats)
def update_preserved_categories(apps, dest):
other = "old" if dest == "new" else "new"
Category = apps.get_model("agenda_culturel", "Category")
cats = Category.objects.filter(name__in=preserved.keys())
ucats = []
for c in cats:
c.color = preserved[c.name][dest].color
c.position = preserved[c.name][dest].position
if preserved[c.name][dest].pictogram != preserved[c.name][other].pictogram:
c.pictogram = preserved[c.name][dest].get_pictogram_file()
ucats.append(c)
Category.objects.bulk_update(ucats, fields=["color", "position", "pictogram"])
def update_preserved_categories_new(apps, schema_editor):
update_preserved_categories(apps, "new")
def update_preserved_categories_old(apps, schema_editor):
update_preserved_categories(apps, "old")
def update_database(apps, cats):
convert = dict([(c.name, c) for c in cats])
# update events
Event = apps.get_model("agenda_culturel", "Event")
events = Event.objects.all()
uevents = []
for e in events:
if e.category and e.category.name in convert.keys():
cat, tag = convert[e.category.name].get_transfered_to_object(apps, e)
e.category = cat
if tag:
if e.tags is None:
e.tags = [tag]
else:
if not tag in e.tags:
e.tags.append(tag)
uevents.append(e)
Event.objects.bulk_update(uevents, fields=["category", "tags"])
# update categorisation rules
CategorisationRule = apps.get_model("agenda_culturel", "CategorisationRule")
crules = CategorisationRule.objects.all()
ucrules = []
for r in crules:
if r.category and r.category.name in convert.keys():
r.category, tag = convert[r.category.name].get_transfered_to_object(apps)
ucrules.append(r)
CategorisationRule.objects.bulk_update(ucrules, fields=["category"])
# update recurrent import
RecurrentImport = apps.get_model("agenda_culturel", "RecurrentImport")
rimports = RecurrentImport.objects.all()
urimports = []
for ri in rimports:
if ri.defaultCategory and ri.defaultCategory.name in convert.keys():
ri.defaultCategory, tag = convert[ri.defaultCategory.name].get_transfered_to_object(apps)
urimports.append(ri)
RecurrentImport.objects.bulk_update(urimports, fields=["defaultCategory"])
def update_database_new(apps, schema_editor):
update_database(apps, old_cats)
def update_database_old(apps, schema_editor):
update_database(apps, new_cats)
def do_nothing(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0098_remove_category_alt_name_remove_category_codename'),
]
operations = [
migrations.RunPython(create_new_categories, reverse_code=delete_new_categories),
migrations.RunPython(update_preserved_categories_new, reverse_code=update_preserved_categories_old),
migrations.RunPython(update_database_new, reverse_code=update_database_old),
migrations.RunPython(delete_old_categories, reverse_code=create_old_categories)
]

View File

@ -1,19 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-02 10:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0099_update_categories'),
]
operations = [
migrations.AddField(
model_name='tag',
name='category',
field=models.ForeignKey(default=None, help_text='This tags corresponds to a sub-category of the given category', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.category', verbose_name='Category'),
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-02 14:13
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0100_tag_category'),
]
operations = [
migrations.AlterField(
model_name='tag',
name='category',
field=models.ForeignKey(blank=True, default=None, help_text='This tags corresponds to a sub-category of the given category', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.category', verbose_name='Category'),
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-07 20:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0101_alter_tag_category'),
]
operations = [
migrations.AddField(
model_name='duplicatedevents',
name='representative',
field=models.ForeignKey(default=None, help_text='This event is the representative event of the duplicated events group', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.event', verbose_name='Representative event'),
),
]

View File

@ -1,59 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-07 20:53
from django.db import migrations
def set_representative_from_fixed_masked(apps, cats):
# get all duplicated events
DuplicatedEvents = apps.get_model("agenda_culturel", "DuplicatedEvents")
duplicated = DuplicatedEvents.objects.all().prefetch_related('event_set')
to_update = []
for d in duplicated:
# there is no representative
d.representative = None
# except if d is fixed
if d.fixed:
# and if there is at least one non masked (should be the case)
e_not_masked = [e for e in d.event_set.all() if not e.masked]
# keep the first one
if len(e_not_masked) >= 1:
d.representative = e_not_masked[0]
to_update.append(d)
DuplicatedEvents.objects.bulk_update(to_update, fields=["representative"])
def set_fixed_masked_from_representative(apps, cats):
Event = apps.get_model("agenda_culturel", "Event")
events = Event.objects.all().prefetch_related("possibly_duplicated")
to_update = []
for e in events:
if not e.possibly_duplicated:
e.masked = False
else:
e.masked = e.possibly_duplicated.representative and e.possibly_duplicated.representative == e
to_update.append(e)
Event.objects.bulk_update(to_update, fields=["masked"])
# get all duplicated events
DuplicatedEvents = apps.get_model("agenda_culturel", "DuplicatedEvents")
duplicated = DuplicatedEvents.objects.all().prefetch_related('event_set')
# for each event
to_update = []
for d in duplicated:
d.fixed = not d.representative is None
to_update.append(d)
DuplicatedEvents.objects.bulk_update(to_update, fields=["fixed"])
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0102_duplicatedevents_representative'),
]
operations = [
migrations.RunPython(set_representative_from_fixed_masked, reverse_code=set_fixed_masked_from_representative),
]

View File

@ -1,17 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-07 21:24
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0103_update_duplicatedevents_datastructure'),
]
operations = [
migrations.RemoveField(
model_name='duplicatedevents',
name='fixed',
),
]

View File

@ -1,23 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-08 08:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0104_remove_duplicatedevents_fixed'),
]
operations = [
migrations.RemoveField(
model_name='event',
name='masked',
),
migrations.RenameField(
model_name='event',
old_name='possibly_duplicated',
new_name='other_versions',
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-09 10:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0105_remove_event_masked_remove_event_possibly_duplicated_and_more'),
]
operations = [
migrations.AlterField(
model_name='event',
name='other_versions',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.duplicatedevents', verbose_name='Other versions'),
),
]

View File

@ -1,30 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-10 21:25
from django.db import migrations
def strip_place_aliases(apps, schema_editor):
Place = apps.get_model("agenda_culturel", "Place")
places = Place.objects.all()
for p in places:
if not p.aliases is None:
p.aliases = [a.strip() for a in p.aliases]
Place.objects.bulk_update(places, fields=["aliases"])
def do_nothing(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0106_alter_event_other_versions'),
]
operations = [
migrations.RunPython(strip_place_aliases, reverse_code=do_nothing)
]

View File

@ -1,44 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-11 10:15
from django.db import migrations
def remove_duplicated_categories(apps, schema_editor):
Category = apps.get_model("agenda_culturel", "Category")
CategorisationRule = apps.get_model("agenda_culturel", "CategorisationRule")
Event = apps.get_model("agenda_culturel", "Event")
catnames = list(set([c.name for c in Category.objects.all()]))
# for each category name
for cname in catnames:
# check if it exists more than one category
if Category.objects.filter(name=cname).count() > 1:
cats = Category.objects.filter(name=cname).order_by("pk")
nbs = [Event.objects.filter(category=c).count() + CategorisationRule.objects.filter(category=c).count() for c in cats]
# if only one category with this name has elements
if len([n for n in nbs if n != 0]) == 1:
# remove all categories without elements
for n, c in zip(nbs, cats):
if n == 0:
c.delete()
else:
# otherwise, remove all but the last one (by ID)
for c in cats[0:-1]:
c.delete()
def do_nothing(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0107_strip_aliases'),
]
operations = [
migrations.RunPython(remove_duplicated_categories, reverse_code=do_nothing)
]

View File

@ -1,19 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-13 09:56
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0108_remove_duplicated_categories'),
]
operations = [
migrations.DeleteModel(
name='ModerationAnswer',
),
migrations.DeleteModel(
name='ModerationQuestion',
),
]

View File

@ -1,23 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-13 17:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0109_delete_moderationanswer_delete_moderationquestion'),
]
operations = [
migrations.AddField(
model_name='tag',
name='in_excluded_suggestions',
field=models.BooleanField(default=False, help_text='This tag will be part of the excluded suggestions.', verbose_name='In excluded suggestions'),
),
migrations.AddField(
model_name='tag',
name='in_included_suggestions',
field=models.BooleanField(default=False, help_text='This tag will be part of the included suggestions.', verbose_name='In included suggestions'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-17 12:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0110_tag_in_excluded_suggestions_and_more'),
]
operations = [
migrations.AlterField(
model_name='referencelocation',
name='main',
field=models.IntegerField(default=0, help_text='This location is one of the main locations (shown first higher values).', verbose_name='Main'),
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-20 15:42
from django.db import migrations
import django_ckeditor_5.fields
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0111_alter_referencelocation_main'),
]
operations = [
migrations.AddField(
model_name='place',
name='description',
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Description of the place, including accessibility.', null=True, verbose_name='Description'),
),
]

View File

@ -1,17 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-20 21:40
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0112_place_description'),
]
operations = [
migrations.RemoveField(
model_name='tag',
name='category',
),
]

View File

@ -1,35 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-22 10:12
from django.db import migrations, models
import django.db.models.deletion
import django_ckeditor_5.fields
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0113_remove_tag_category'),
]
operations = [
migrations.CreateModel(
name='Organisation',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='Organisation name', max_length=512, unique=True, verbose_name='Name')),
('website', models.URLField(blank=True, help_text='Website of the organisation', max_length=1024, null=True, verbose_name='Website')),
('description', django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Description of the organisation.', null=True, verbose_name='Description')),
('principal_place', models.ForeignKey(blank=True, help_text='Place mainly associated with this organizer. Mainly used if there is a similarity in the name, to avoid redundant displays.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.place', verbose_name='Principal place')),
],
),
migrations.AddField(
model_name='event',
name='organisers',
field=models.ManyToManyField(blank=True, help_text='list of event organisers. Organizers will only be displayed if one of them does not normally use the venue.', related_name='organised_events', to='agenda_culturel.organisation', verbose_name='Location (free form)'),
),
migrations.AddField(
model_name='recurrentimport',
name='defaultOrganiser',
field=models.ForeignKey(blank=True, default=None, help_text='Organiser of each imported event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.organisation', verbose_name='Organiser'),
),
]

View File

@ -1,22 +0,0 @@
# Generated by Django 4.2.9 on 2024-11-22 10:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0114_organisation_event_organisers_and_more'),
]
operations = [
migrations.AlterModelOptions(
name='organisation',
options={'verbose_name': 'Organisation', 'verbose_name_plural': 'Organisations'},
),
migrations.AlterField(
model_name='event',
name='organisers',
field=models.ManyToManyField(blank=True, help_text='list of event organisers. Organizers will only be displayed if one of them does not normally use the venue.', related_name='organised_events', to='agenda_culturel.organisation', verbose_name='Organisers'),
),
]

Some files were not shown because too many files have changed in this diff Show More