Compare commits
527 Commits
13_export_
...
main
Author | SHA1 | Date | |
---|---|---|---|
|
430c7b47a2 | ||
|
63d3cb76ea | ||
|
d770cf23f0 | ||
|
cc0c798f5a | ||
|
6ceec954d8 | ||
|
2c22d62302 | ||
|
f79b1f0f89 | ||
|
3c1d51dda1 | ||
|
141949991c | ||
|
290faf0b8f | ||
|
f9f690cac7 | ||
|
5e8d9766ee | ||
|
6589c1b0c3 | ||
|
1055a36084 | ||
|
2cca0322d1 | ||
|
e5c075656c | ||
|
5ef9358b28 | ||
|
0526854d6b | ||
|
bc06b6205d | ||
|
504198b14f | ||
|
526b83ec20 | ||
|
08e66918ab | ||
|
c34fb666b2 | ||
|
a94b9a53f2 | ||
|
c1f7bfd8c4 | ||
|
b1e5414519 | ||
|
3da9a5239a | ||
|
c91cdf0c99 | ||
|
6e8f00ccbe | ||
|
a1984f60f5 | ||
|
ce95fe6504 | ||
|
dd0c037929 | ||
|
41d6b39988 | ||
|
3316d28e09 | ||
|
f7f8d9cb0c | ||
|
ced15d5113 | ||
|
70ae92854f | ||
|
02448cf4d4 | ||
|
14e25b660c | ||
|
92da6585c6 | ||
|
cd52ae0286 | ||
|
e050ce5eda | ||
|
b0b828392a | ||
|
c34abe9158 | ||
|
f52caf9855 | ||
|
bd1330cd2f | ||
|
a31bcc2764 | ||
|
91907be984 | ||
|
27ceac1e46 | ||
|
b3cba9293c | ||
|
c857294345 | ||
|
5a7cc080c7 | ||
|
37ed7c45db | ||
|
bda14c6ccb | ||
|
3d70de9c1b | ||
|
874c1779f8 | ||
|
084b3dfb25 | ||
|
ec707bf272 | ||
|
21b42e4fee | ||
|
d55d029fc7 | ||
|
1d9251946c | ||
|
e875ae626b | ||
|
63aad60260 | ||
|
27bce22670 | ||
|
1fc1fc13e1 | ||
|
252fb8c27b | ||
|
d70eca6493 | ||
|
7f1bbabebf | ||
|
c55ed5c4dc | ||
|
ac3d6796cf | ||
|
bf773686f9 | ||
|
1256adcb8a | ||
|
7120da3e28 | ||
|
4e9ac573ac | ||
|
42fb85af48 | ||
|
256fed1e2e | ||
|
d46ebeae3b | ||
|
3be7d901c8 | ||
|
5549d2172c | ||
|
674bba4a98 | ||
|
34008625d2 | ||
|
65430a2a8f | ||
|
8ef620c8e1 | ||
|
d119f1fa45 | ||
|
41f6dbc352 | ||
ce602c10bd | |||
|
c9275c5ea0 | ||
|
1287d9ee06 | ||
|
d7ec80ff01 | ||
|
555bae8dc8 | ||
|
ac8ddc5123 | ||
|
f9678bbf81 | ||
2680622dfc | |||
|
3c5b5a9fd6 | ||
|
db7604623c | ||
|
afa1844d21 | ||
|
b0b653c1b1 | ||
|
3001685937 | ||
|
a3e13429eb | ||
|
ea5372cae5 | ||
|
5e65ecdb5c | ||
|
ed7944aaa9 | ||
|
5a2dea6989 | ||
|
98092de1f0 | ||
|
03e10e91e2 | ||
|
7a9e74b057 | ||
|
0872af5144 | ||
|
720a187116 | ||
|
3d8fd1cfdf | ||
|
524d178055 | ||
|
2da854545f | ||
|
5a66caae55 | ||
|
ecc347219c | ||
|
918e19fa4f | ||
|
70260fcb4f | ||
|
0190d91268 | ||
|
769c607550 | ||
|
7f029ae541 | ||
|
386eca261a | ||
|
37817cc8f5 | ||
|
96401b6519 | ||
|
4e1441a92f | ||
|
b569464894 | ||
|
507670ebde | ||
|
c5c68bcfef | ||
|
d39ea43efb | ||
|
11bd53cbeb | ||
|
4cc6db84e2 | ||
|
463dd6b3b9 | ||
|
09c2c2117c | ||
|
c4bb86dab4 | ||
|
283ffc4348 | ||
|
2a0abf8e5a | ||
|
62b73dd836 | ||
|
1e278581ed | ||
|
0924d5d36c | ||
|
be62272487 | ||
|
bf5db35e57 | ||
|
af2948827d | ||
|
182208a6f8 | ||
|
9ad3e9e972 | ||
|
fe97c4cb32 | ||
|
956ec7210c | ||
|
5a6f33f8e2 | ||
|
c3f6d6920e | ||
|
47aedc706b | ||
|
1e9698da91 | ||
|
4a0f5b3b14 | ||
|
33a68ee7eb | ||
|
9cab07cb6f | ||
|
6efd6f18c8 | ||
|
493b42c457 | ||
|
0be3c30489 | ||
|
44a04deb26 | ||
|
43e1d3fd26 | ||
|
ae542f76c8 | ||
|
5cfb53de23 | ||
|
11d5cf9aa4 | ||
|
e3c14437ac | ||
|
28f5b2a01b | ||
|
0263976573 | ||
|
79a73d6459 | ||
|
3bd4ef5771 | ||
|
637b976442 | ||
|
d47991d1e0 | ||
|
350a555bea | ||
|
dbf62f3b4a | ||
|
4af14c523c | ||
|
0ae9c399dd | ||
|
e767babd8e | ||
|
df27949036 | ||
|
1f60bf0c39 | ||
|
bb6d83f6fb | ||
|
f93a6164ca | ||
|
fe1061e638 | ||
|
ed2f530f0c | ||
|
0bdd8693ec | ||
|
2ce8f30275 | ||
|
4c2dd9e98c | ||
|
4936365488 | ||
|
cf268523d8 | ||
|
c1a5f92af7 | ||
|
5b6c17fd6a | ||
|
ab347d5656 | ||
|
936f6c1b6b | ||
|
743b393366 | ||
|
84123e8bb9 | ||
|
d4a12cadcd | ||
|
3cd6dd8682 | ||
|
1dead2a695 | ||
|
dafadecd23 | ||
|
d0195612f0 | ||
|
f3664007f7 | ||
|
54e3af00cd | ||
|
4541366af1 | ||
|
decfce4247 | ||
|
e42ac94318 | ||
|
35832485e3 | ||
|
f6ec66c33d | ||
|
2196083894 | ||
|
67c65f14d1 | ||
|
ddb20befe6 | ||
|
fbd138998c | ||
|
45ed0d8828 | ||
|
305136d963 | ||
|
8cd891ad3a | ||
|
6e37828f90 | ||
|
e3c88165c7 | ||
|
f4016e6593 | ||
|
a3255ff460 | ||
|
53e5b52711 | ||
|
b1dcd55ebc | ||
|
daf4ab1eeb | ||
|
4c739422cd | ||
|
0ab30fd317 | ||
|
ca205c5ccd | ||
|
18ca7200a0 | ||
|
0a66a858c5 | ||
|
ce140764cc | ||
|
489d2e2f0f | ||
|
11790f0200 | ||
|
defb6ccfad | ||
|
7f79b7797a | ||
|
1b59ce34f2 | ||
|
4733bb3eec | ||
|
b66f428a0e | ||
|
30c8811b05 | ||
|
7a8efb8ed7 | ||
|
4c5decd682 | ||
|
28ca7b1b03 | ||
|
d756de6993 | ||
|
72242713eb | ||
|
b8236f8816 | ||
|
98517da474 | ||
|
cb69ece6ca | ||
|
3cdb6cdaf9 | ||
|
41196cd32d | ||
|
1f12e8b3fb | ||
|
67f7ed9287 | ||
|
3a01b1caf6 | ||
|
d685f7e63a | ||
|
57344ff5b9 | ||
|
2d9a3d42d2 | ||
|
9bbc8499e5 | ||
|
4186b70e7e | ||
|
40ce9a9cba | ||
|
9345e1b12c | ||
|
e90b5add2a | ||
|
0234f27b4b | ||
|
3c6c1f7963 | ||
|
8e552f2574 | ||
|
af297b5d25 | ||
|
cece41b084 | ||
|
9933d87c04 | ||
|
44b40bcbf1 | ||
|
eeae6f11e4 | ||
|
9e7842f198 | ||
|
e129abee6f | ||
|
597ada73da | ||
|
a7a529c776 | ||
|
05a5aa52d2 | ||
|
5b33777670 | ||
|
d0aae68dd5 | ||
|
e34edc2e7c | ||
|
d267642268 | ||
|
17bc54685d | ||
|
fae4dbbbf2 | ||
|
4f2af09464 | ||
|
5f13e91267 | ||
|
8b6627087b | ||
|
b80f1c038f | ||
|
9c0e895c16 | ||
|
deaef7b650 | ||
|
b9fad56e4e | ||
|
2478a671bf | ||
|
ac98b4c845 | ||
|
6195b0f4bc | ||
|
db20f4a4de | ||
|
82bbbb20b1 | ||
|
0ebb29a759 | ||
|
c47a4eaba0 | ||
|
88bd0e9e6d | ||
|
a3b16482cc | ||
|
302b4c66a7 | ||
|
a09f6751e3 | ||
|
cb214c0926 | ||
|
5a76fc8aea | ||
|
27d44f6918 | ||
|
14efffe6db | ||
|
d5865bb65d | ||
|
47b91b20fd | ||
|
4b97f8c222 | ||
|
760ba7b75e | ||
|
97be0db3d1 | ||
|
6704d30ef1 | ||
|
0dafda30e4 | ||
|
44eeac19c2 | ||
|
4c9494cd42 | ||
|
9f0a1a33cf | ||
|
30aafd4979 | ||
|
c767067f23 | ||
|
90b69af95a | ||
|
58ca1a7f85 | ||
|
7a46bf4733 | ||
|
727f505307 | ||
|
67433f2b72 | ||
|
33e2d1a90a | ||
|
83f176d1cb | ||
|
ef9d0b6024 | ||
|
ef778cdcb5 | ||
|
b38717e52b | ||
|
54cbf8e0eb | ||
|
31c9f79d3f | ||
|
e1721db311 | ||
|
499f90e88c | ||
|
a8841b34d5 | ||
|
3931b4dac1 | ||
|
19617d2427 | ||
|
107c55863c | ||
|
fba52afbb0 | ||
|
e3648f703b | ||
|
cd68d0039c | ||
|
81ae863b45 | ||
|
2253fc50b4 | ||
|
83abaec4f4 | ||
|
521f904778 | ||
|
0ab8ddf404 | ||
|
5f1f5fd003 | ||
|
de4b54baa4 | ||
|
654314a61b | ||
|
4e9336cdb4 | ||
|
ea100e6038 | ||
|
5932ad7170 | ||
|
4c35df7f63 | ||
|
7d98fe6020 | ||
|
80a7b4e57f | ||
|
3ffffd8c4e | ||
|
360f7649ab | ||
|
a84631b28e | ||
|
ed48c1fef3 | ||
|
2d25ccbb6a | ||
|
822d3d66c0 | ||
|
8d90efaa2b | ||
|
427272fde1 | ||
|
ea483c9c37 | ||
|
8e40faa889 | ||
|
41de08c283 | ||
|
13111b2a60 | ||
|
fbed65adcc | ||
|
8eaee2b1a6 | ||
|
670961e6d0 | ||
|
bb272a658e | ||
|
36be200157 | ||
|
a2d4e59e5d | ||
|
0e39ab9a3d | ||
|
a50fc75b01 | ||
|
4cb60d1968 | ||
|
93a478bacf | ||
|
2bc1f36139 | ||
|
e04d94f97a | ||
|
ebfcf8823c | ||
|
3dd4b02e5b | ||
|
67469cf339 | ||
|
38a7fd7e73 | ||
|
bb4bd60b1b | ||
|
a29c9ae6af | ||
|
7341c01e6e | ||
|
7556b39b43 | ||
|
13daec625b | ||
|
99ebc698ab | ||
|
bcb60fe0c0 | ||
|
59f42c1f7d | ||
|
a2bc92fcfc | ||
|
d65887bc8a | ||
|
a9eb2c43d0 | ||
|
492e70d070 | ||
|
89189695ad | ||
|
9961b92913 | ||
|
25dbf0c1ad | ||
|
24c7160e70 | ||
|
fe55c23b74 | ||
|
4405cdfc98 | ||
|
94bba86310 | ||
|
2a6697b28d | ||
|
956374b647 | ||
|
a4fcaf952d | ||
|
a1c5819de0 | ||
|
6f92cdf3c1 | ||
|
062864b6e3 | ||
|
0d8366e57b | ||
|
7756198a39 | ||
|
01387ff5b9 | ||
|
e2bc278ca9 | ||
|
ff4d8283fa | ||
|
2244ab2b39 | ||
|
6c86a8fc18 | ||
|
62060925cd | ||
|
9fdd3edd0e | ||
|
9a0b62aa16 | ||
|
bf4bac340d | ||
|
204092c6fc | ||
|
0c6ea62671 | ||
|
8666f51e76 | ||
|
3a28a5ce30 | ||
|
b329be94c3 | ||
|
c0fc2c97f5 | ||
|
2637132a28 | ||
|
30bc5f49e8 | ||
|
c78411872b | ||
|
a180534be2 | ||
|
c0b1369f56 | ||
|
bc106e2250 | ||
|
736f8c4bae | ||
|
ce318e61db | ||
|
049aa89a3d | ||
|
3c39b16acd | ||
|
d5262c80e8 | ||
|
0e224c1667 | ||
|
a2b6f8bf62 | ||
|
f9038a03f4 | ||
|
f38d4bee97 | ||
|
b6821436e9 | ||
|
8497dff045 | ||
|
c5b98a63bc | ||
|
9d672b3f40 | ||
|
14694a6d85 | ||
|
a0e8291a9c | ||
|
7478542970 | ||
|
2f1c5162f1 | ||
|
e6e19cbcba | ||
|
ff681ed93a | ||
|
1c812a3d65 | ||
|
9bb3373f99 | ||
|
0a5470e73d | ||
|
1a06c45acc | ||
|
f401c611a3 | ||
|
38bfd536fd | ||
|
9b429f6951 | ||
|
2dde114ad8 | ||
|
6d3033ba37 | ||
|
355eb6c5f7 | ||
|
792d057db1 | ||
|
c2410233b1 | ||
|
5df45794c1 | ||
|
f2f35e8fc6 | ||
|
a515d26475 | ||
|
29e4d74f94 | ||
|
5a54e34ed6 | ||
|
753ef3e29d | ||
|
714d7c3835 | ||
|
1533385260 | ||
|
1bfd314196 | ||
|
325a72e6a4 | ||
|
a26d81a74a | ||
|
bc19358ed3 | ||
|
4da9b68a7c | ||
7af73621bb | |||
|
a32d7217c9 | ||
|
12e1f6dfb2 | ||
|
6ca4fe7964 | ||
|
81601ec5da | ||
|
cbb34190cf | ||
|
6902ba22f4 | ||
|
a4d7754b3c | ||
|
72b52ba743 | ||
|
6d80cd0d03 | ||
|
81c6800338 | ||
|
a09fecfd11 | ||
|
cb889df131 | ||
|
3fa8ac2b14 | ||
|
09fe207e5e | ||
|
b765b03a20 | ||
|
3ba24a6cb6 | ||
|
ead08164d5 | ||
|
775374903f | ||
|
c80c1ca231 | ||
|
5fba3dabef | ||
|
8f9415748d | ||
|
0d0e27912e | ||
|
69ff4e7079 | ||
|
c93805aa64 | ||
|
4d4bf65c0c | ||
|
e9c017b38b | ||
|
ec8f0696bb | ||
|
537b62c654 | ||
|
610b99efae | ||
|
b0950d3e1d | ||
|
970c6d7a3c | ||
|
e71d65f04f | ||
|
dd4eb5221c | ||
|
ac641ce702 | ||
|
ca1db6890d | ||
|
b9c63219bb | ||
|
d4cf8b256b | ||
|
ea46fb5501 | ||
|
923c7a0d77 | ||
|
be8b6b8f48 | ||
|
15a562f390 | ||
|
c0c459a213 | ||
|
2b63a79c5d | ||
|
6d91f5aea9 | ||
|
e4627a964a | ||
|
5d095af291 | ||
|
371e6cc9a1 | ||
|
9c6fa7d5ce | ||
|
8e4995558f | ||
|
39aa4a8d3c | ||
|
963d82066e | ||
|
9a97f12337 | ||
|
5797dc98bb | ||
|
533d52a24e | ||
|
4db0db9ec3 | ||
|
ba53394952 | ||
|
d36dbe17a0 | ||
|
4c431e515d | ||
|
ab4bd28607 | ||
|
623274b1a6 | ||
|
7227fd45ae | ||
|
57a514e7b2 | ||
|
7efae63920 | ||
|
8547a671c4 | ||
|
3c1368153c | ||
|
becce291af | ||
|
9c9abd27dd | ||
|
4e41efb75a | ||
ce1269827e | |||
|
bf043f954d |
8
Makefile
8
Makefile
@ -20,6 +20,7 @@ make build-dev Build and run dev environment
|
||||
make stop-dev Stop dev environment
|
||||
make stop-prod Stop prod environment
|
||||
make build-prod Build and run prod environment
|
||||
make restar-prod Restart prod environment
|
||||
make all Show help
|
||||
|
||||
endef
|
||||
@ -54,6 +55,10 @@ create-categories:
|
||||
docker exec -it $(BACKEND_APP_NAME) $(SHELL) "-c" \
|
||||
"python3 manage.py runscript create_categories"
|
||||
|
||||
create-reference-locations:
|
||||
docker exec -it $(BACKEND_APP_NAME) $(SHELL) "-c" \
|
||||
"python3 manage.py runscript create_reference_locations"
|
||||
|
||||
build-dev:
|
||||
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 docker-compose -f docker-compose.yml up --build -d
|
||||
|
||||
@ -75,6 +80,9 @@ stop-prod:
|
||||
restart-backend-prod:
|
||||
docker-compose -f docker-compose.prod.yml restart backend
|
||||
|
||||
prod-restart:
|
||||
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 docker-compose -f docker-compose.prod.yml restart
|
||||
|
||||
all: help
|
||||
|
||||
.PHONY: help lint format test super-user make-migrations migrate build-dev build-prod stop-dev stop-prod all
|
||||
|
18
README.md
18
README.md
@ -15,6 +15,12 @@ On peut aussi peupler les catégories avec un choix de catégories élémentaire
|
||||
|
||||
* ```make create-categories```
|
||||
|
||||
On peut aussi peupler les positions de référence qui serviront aux recherches géographiques avec la commande, après avoir éventuellement modifié le fichier [communes.json](./src/scripts/communes.json) qui contient pour l'exemple toutes les communes récupérées depuis [public.opendatasoft.com](https://public.opendatasoft.com/explore/dataset/georef-france-commune/export/?flg=fr-fr&disjunctive.reg_name&disjunctive.dep_name&disjunctive.arrdep_name&disjunctive.ze2020_name&disjunctive.bv2022_name&disjunctive.epci_name&disjunctive.ept_name&disjunctive.com_name&disjunctive.ze2010_name&disjunctive.com_is_mountain_area&sort=year&refine.dep_name=Puy-de-D%C3%B4me&location=9,45.51597,3.05969&basemap=jawg.light):
|
||||
|
||||
* ```make create-reference-locations```
|
||||
|
||||
|
||||
|
||||
## Notes aux développeurs
|
||||
|
||||
### Ajout d'une nouvelle source *custom*
|
||||
@ -25,4 +31,14 @@ Pour ajouter une nouvelle source custom:
|
||||
- quand l'import fonctionne de manière indépendante dans ces expérimentations, il est tant de l'ajouter au site internet:
|
||||
- ajouter à la classe ```RecurrentImport.PROCESSOR``` présente dans le fichier ```src/agenda_culturel/models.py``` une entrée correspondant à cette source pour qu'elle soit proposée aux utilisateurs
|
||||
- ajouter à la fonction ```run_recurrent_import``` présente dans le fichier ```src/agenda_culturel/celery.py``` le test correspondant à cet ajout, pour lancer le bon extracteur
|
||||
- se rendre sur le site, page administration, et ajouter un import récurrent correspondant à cette nouvelle source
|
||||
- se rendre sur le site, page administration, et ajouter un import récurrent correspondant à cette nouvelle source
|
||||
|
||||
### Récupérer un dump du prod sur un serveur dev
|
||||
|
||||
* sur le serveur de dev:
|
||||
* ```docker exec -i agenda_culturel-backend python3 manage.py dumpdata --natural-foreign --natural-primary --format=json --exclude=admin.logentry --indent=2 > fixtures/postgres-backup-20241101.json``` (à noter qu'ici on oublie les comptes, qu'il faudra recréer)
|
||||
* sur le serveur de prod:
|
||||
* On récupère le dump json ```scp $SERVEUR:$PATH/fixtures/postgres-backup-20241101.json src/fixtures/```
|
||||
* ```scripts/reset-database.sh FIXTURE COMMIT``` où ```FIXTURE``` est le timestamp dans le nom de la fixture, et ```COMMIT``` est l'ID du commit git correspondant à celle en prod sur le serveur au moment de la création de la fixture
|
||||
|
||||
À noter que les images ne sont pas récupérées.
|
@ -5,10 +5,11 @@ WORKDIR /usr/src/app
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends -y build-essential libpq-dev gettext chromium-driver \
|
||||
apt-get install --no-install-recommends -y build-essential libpq-dev gettext chromium-driver gdal-bin fonts-symbola \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
|
||||
COPY src/requirements.txt ./requirements.txt
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
|
@ -32,5 +32,9 @@ http {
|
||||
error_page 502 /static/html/500.html;
|
||||
error_page 503 /static/html/500.html;
|
||||
|
||||
if ($http_user_agent ~* (Amazonbot|meta-externalagent|ClaudeBot)) {
|
||||
return 444;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ services:
|
||||
command: [ "/bin/bash", "/app/deployment/scripts/wait-db.sh", "/app/deployment/scripts/backend/start.sh" ]
|
||||
|
||||
db:
|
||||
image: postgres:15.2-alpine
|
||||
image: postgis/postgis:15-3.4-alpine
|
||||
container_name: "${APP_NAME}-db"
|
||||
hostname: "${POSTGRES_HOST:-db}"
|
||||
volumes:
|
||||
|
@ -23,7 +23,7 @@ services:
|
||||
command: [ "/bin/bash", "/app/deployment/scripts/backend/start.sh" ]
|
||||
|
||||
db:
|
||||
image: postgres:15.2-alpine
|
||||
image: postgis/postgis:15-3.4-alpine
|
||||
container_name: "${APP_NAME}-db"
|
||||
hostname: "${POSTGRES_HOST:-db}"
|
||||
volumes:
|
||||
|
40
experimentations/get_arachnee_events.py
Executable file
40
experimentations/get_arachnee_events.py
Executable file
@ -0,0 +1,40 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), arachnee.CExtractor())
|
||||
url = "https://www.arachnee-concerts.com/wp-admin/admin-ajax.php?action=movies-filter&per_page=9999&date=NaN.NaN.NaN&theatres=Clermont-Fd&cat=&sorting=&list_all_events=¤t_page="
|
||||
url_human = "https://www.arachnee-concerts.com/agenda-des-concerts/Clermont-Fd/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-arachnee.html", default_values = {}, published = True)
|
||||
|
||||
exportfile = "events-arachnee.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
43
experimentations/get_c3c_events.py
Executable file
43
experimentations/get_c3c_events.py
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), c3c.CExtractor())
|
||||
url = "https://billetterie-c3c.clermont-ferrand.fr/"
|
||||
url_human = "https://billetterie-c3c.clermont-ferrand.fr/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-c3c.html", default_values = {"location": "La Cour des 3 Coquins"}, published = True)
|
||||
|
||||
exportfile = "events-c3c.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
@ -28,8 +28,8 @@ from src.agenda_culturel.import_tasks.extractor_facebook import *
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), FacebookEventExtractor(single_event=True))
|
||||
url="https://www.facebook.com/events/872781744074648"
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), FacebookEventExtractor())
|
||||
url="https://www.facebook.com/events/s/tour-du-sud-invite-koum/430014373384441/"
|
||||
|
||||
events = u2e.process(url, cache = "fb.html", published = True)
|
||||
|
||||
|
43
experimentations/get_facebook_events.py
Executable file
43
experimentations/get_facebook_events.py
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), fbevents.CExtractor())
|
||||
url = "https://www.facebook.com/laJeteeClermont/upcoming_hosted_events"
|
||||
url_human = "https://www.facebook.com/laJeteeClermont/upcoming_hosted_events"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-lajetee-fb.html", default_values = {"location": "La Jetée"}, published = True)
|
||||
|
||||
exportfile = "events-lajetee-fb.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
@ -32,7 +32,7 @@ if __name__ == "__main__":
|
||||
url = "https://calendar.google.com/calendar/ical/programmation.lesaugustes%40gmail.com/public/basic.ics"
|
||||
url_human = "https://www.cafelesaugustes.fr/la-programmation/"
|
||||
|
||||
events = u2e.process(url, url_human, cache = "cache-augustes.ical", default_values = {"category": "Autre", "location": "Café lecture les Augustes"}, published = True)
|
||||
events = u2e.process(url, url_human, cache = "cache-augustes.ical", default_values = {"category": "Sans catégorie", "location": "Café lecture les Augustes"}, published = True)
|
||||
|
||||
exportfile = "events-augustes.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
|
@ -29,8 +29,8 @@ from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), lacomedie.CExtractor())
|
||||
url = "https://lacomediedeclermont.com/saison23-24/wp-admin/admin-ajax.php?action=load_dates_existantes"
|
||||
url_human = "https://lacomediedeclermont.com/saison23-24/"
|
||||
url = "https://lacomediedeclermont.com/saison24-25/wp-admin/admin-ajax.php?action=load_dates_existantes"
|
||||
url_human = "https://lacomediedeclermont.com/saison24-25/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-lacomedie.html", default_values = {"location": "La Comédie de Clermont"}, published = True)
|
||||
|
@ -33,7 +33,7 @@ if __name__ == "__main__":
|
||||
url_human = "https://www.lacoope.org/concerts-calendrier/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-lacoope.html", default_values = {"category": "Concert", "location": "La Coopérative"}, published = True)
|
||||
events = u2e.process(url, url_human, cache = "cache-lacoope.html", default_values = {"category": "Fêtes & Concerts", "location": "La Coopérative"}, published = True)
|
||||
|
||||
exportfile = "events-lacoope.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
|
@ -29,8 +29,8 @@ from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), lapucealoreille.CExtractor())
|
||||
url = "https://www.lapucealoreille63.fr/programmation/"
|
||||
url_human = "https://www.lapucealoreille63.fr/programmation/"
|
||||
url = "https://www.lapucealoreille63.fr/agenda"
|
||||
url_human = "https://www.lapucealoreille63.fr/agenda"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-lapucealoreille.xml", default_values = {}, published = True)
|
||||
|
43
experimentations/get_le_poulailler.py
Executable file
43
experimentations/get_le_poulailler.py
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), wordpress_mec.CExtractor())
|
||||
url = "https://www.cabaretlepoulailler.fr/agenda/tout-lagenda/"
|
||||
url_human = "https://www.cabaretlepoulailler.fr/agenda/tout-lagenda/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-le-poulailler.html", default_values = {"location": "Le Poulailler"}, published = True)
|
||||
|
||||
exportfile = "events-le-poulailler.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
43
experimentations/get_le_rio.py
Executable file
43
experimentations/get_le_rio.py
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), lerio.CExtractor())
|
||||
url = "https://www.cinemalerio.com/evenements/"
|
||||
url_human = "https://www.cinemalerio.com/evenements/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-le-rio.html", default_values = {"location": "Cinéma le Rio", "category": "Cinéma"}, published = True)
|
||||
|
||||
exportfile = "events-le-roi.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
43
experimentations/get_les_vinzelles.py
Executable file
43
experimentations/get_les_vinzelles.py
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), wordpress_mec.CExtractor())
|
||||
url = "https://www.lesvinzelles.com/index.php/programme/"
|
||||
url_human = "https://www.lesvinzelles.com/index.php/programme/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-les-vinzelles.html", default_values = {"location": "Les Vinzelles"}, published = True)
|
||||
|
||||
exportfile = "events-les-vinzelles.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
92
scripts/reset-database.sh
Executable file
92
scripts/reset-database.sh
Executable file
@ -0,0 +1,92 @@
|
||||
#!/bin/sh
|
||||
|
||||
|
||||
FIXTURE=$1
|
||||
COMMIT=$2
|
||||
FORCE=$3
|
||||
|
||||
help() {
|
||||
echo "USAGE: scripts/reset-database.sh [FIXTURE] [COMMIT]"
|
||||
echo " "
|
||||
echo "Parameters:"
|
||||
echo " FIXTURE A timestamp used in fixture name"
|
||||
echo " COMMIT A commit ID used by git checkout"
|
||||
echo " "
|
||||
echo "Example:"
|
||||
echo " scripts/reset-database.sh 20241110 cb69ece6ca5ba04e94dcc2758f53869c70224592"
|
||||
}
|
||||
|
||||
bold=$(tput bold)
|
||||
normal=$(tput sgr0)
|
||||
echobold() {
|
||||
echo "${bold}$1${normal}"
|
||||
}
|
||||
|
||||
if ! [ -n "$FORCE" ]; then
|
||||
nginx=`docker ps|grep nginx`
|
||||
if [ -n "$nginx" ]; then
|
||||
echo "WARNING: this script is probably run on a production server. Use a third parameter if you really want to run it."
|
||||
exit 3
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! [ -n "$FIXTURE" ]; then
|
||||
echo "No fixture defined. Abort."
|
||||
help
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [ -n "$COMMIT" ]; then
|
||||
echo "No commit version defined. Abort."
|
||||
help
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
FFILE=fixtures/postgres-backup-$FIXTURE.json
|
||||
|
||||
if ! [ -f "src/$FFILE" ]; then
|
||||
echo "ERROR: missing fixture file ($FFILE)"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
echo " "
|
||||
echobold "WARNING: use Ctrl+C to stop the reset process since a 'no' answer cannot be detected."
|
||||
echo " "
|
||||
|
||||
# remove all elements in database
|
||||
echobold "Flush database"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py flush
|
||||
|
||||
# move back database structure to the original
|
||||
echobold "Setup database structure to zero"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py migrate agenda_culturel zero
|
||||
|
||||
# reset code depending on a specific commit
|
||||
echobold "Move back to the desired commit"
|
||||
git checkout $COMMIT
|
||||
|
||||
# change database to reach this specific version
|
||||
echobold "Setup database stucture according to the selected commit"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py migrate agenda_culturel
|
||||
|
||||
# remove all elements in database
|
||||
echobold "Flush database"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py flush --no-input
|
||||
|
||||
# import data
|
||||
echobold "Import data"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py loaddata --format=json $FFILE
|
||||
|
||||
# reset code to uptodate version
|
||||
echobold "Move back to last commit"
|
||||
git checkout main
|
||||
|
||||
# update database structure
|
||||
echobold "Update database"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py migrate agenda_culturel
|
||||
|
||||
|
@ -3,11 +3,15 @@ from django import forms
|
||||
from .models import (
|
||||
Event,
|
||||
Category,
|
||||
Tag,
|
||||
StaticContent,
|
||||
DuplicatedEvents,
|
||||
BatchImportation,
|
||||
RecurrentImport,
|
||||
Place,
|
||||
Message,
|
||||
ReferenceLocation,
|
||||
Organisation
|
||||
)
|
||||
from django_better_admin_arrayfield.admin.mixins import DynamicArrayMixin
|
||||
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
|
||||
@ -15,11 +19,15 @@ from django_better_admin_arrayfield.models.fields import DynamicArrayField
|
||||
|
||||
|
||||
admin.site.register(Category)
|
||||
admin.site.register(Tag)
|
||||
admin.site.register(StaticContent)
|
||||
admin.site.register(DuplicatedEvents)
|
||||
admin.site.register(BatchImportation)
|
||||
admin.site.register(RecurrentImport)
|
||||
admin.site.register(Place)
|
||||
admin.site.register(Message)
|
||||
admin.site.register(ReferenceLocation)
|
||||
admin.site.register(Organisation)
|
||||
|
||||
|
||||
class URLWidget(DynamicArrayWidget):
|
||||
|
@ -1,8 +1,14 @@
|
||||
from datetime import datetime, timedelta, date, time
|
||||
import calendar
|
||||
from django.db.models import Q
|
||||
from django.db.models import Q, F
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.template.defaultfilters import date as _date
|
||||
|
||||
from django.db.models import CharField
|
||||
from django.db.models.functions import Lower
|
||||
|
||||
CharField.register_lookup(Lower)
|
||||
|
||||
import logging
|
||||
|
||||
@ -20,7 +26,7 @@ def daterange(start, end, step=timedelta(1)):
|
||||
|
||||
|
||||
class DayInCalendar:
|
||||
midnight = time(23, 59, 59)
|
||||
midnight = time(0, 0, 0)
|
||||
|
||||
def __init__(self, d, on_requested_interval=True):
|
||||
self.date = d
|
||||
@ -29,10 +35,13 @@ class DayInCalendar:
|
||||
|
||||
self.in_past = d < now
|
||||
self.today = d == now
|
||||
self.tomorrow = d == now + timedelta(days=+1)
|
||||
self.events = []
|
||||
self.on_requested_interval = on_requested_interval
|
||||
|
||||
self.events_by_category = {}
|
||||
self.time_intervals = None
|
||||
self.id = d.strftime('%Y-%m-%d')
|
||||
|
||||
def is_in_past(self):
|
||||
return self.in_past
|
||||
@ -40,6 +49,9 @@ class DayInCalendar:
|
||||
def is_today(self):
|
||||
return self.today
|
||||
|
||||
def is_tomorrow(self):
|
||||
return self.tomorrow
|
||||
|
||||
def is_ancestor_uuid_event_from_other(self, event):
|
||||
for e in self.events:
|
||||
if event.is_ancestor_by_uuid(e):
|
||||
@ -74,15 +86,30 @@ class DayInCalendar:
|
||||
self._add_event_internal(event)
|
||||
|
||||
def _add_event_internal(self, event):
|
||||
self.events.append(event)
|
||||
if event.category is None:
|
||||
if "" not in self.events_by_category:
|
||||
self.events_by_category[""] = []
|
||||
self.events_by_category[""].append(event)
|
||||
from .models import Category
|
||||
from copy import deepcopy
|
||||
# copy event
|
||||
local_event = deepcopy(event)
|
||||
|
||||
# set values
|
||||
if local_event.start_day != self.date:
|
||||
local_event.start_day = self.date
|
||||
local_event.start_time = None
|
||||
if local_event.end_day != self.date:
|
||||
local_event.end_day = None
|
||||
local_event.end_time = None
|
||||
|
||||
# add event to the day
|
||||
self.events.append(local_event)
|
||||
|
||||
# add in its category
|
||||
if local_event.category is None:
|
||||
cat = Category.default_name
|
||||
else:
|
||||
if event.category.name not in self.events_by_category:
|
||||
self.events_by_category[event.category.name] = []
|
||||
self.events_by_category[event.category.name].append(event)
|
||||
cat = local_event.category.name
|
||||
if cat not in self.events_by_category:
|
||||
self.events_by_category[cat] = []
|
||||
self.events_by_category[cat].append(local_event)
|
||||
|
||||
def filter_events(self):
|
||||
self.events.sort(
|
||||
@ -90,14 +117,88 @@ class DayInCalendar:
|
||||
if e.start_time is None
|
||||
else e.start_time
|
||||
)
|
||||
self.today_night = False
|
||||
if self.is_today():
|
||||
self.today_night = True
|
||||
now = timezone.now()
|
||||
nday = now.date()
|
||||
ntime = now.time()
|
||||
found = False
|
||||
for idx,e in enumerate(self.events):
|
||||
if (nday < e.start_day) or (nday == e.start_day and e.start_time and ntime <= e.start_time):
|
||||
self.events[idx].is_first_after_now = True
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
self.today_night = False
|
||||
|
||||
def is_today_after_events(self):
|
||||
return self.is_today() and self.today_night
|
||||
|
||||
def events_by_category_ordered(self):
|
||||
from .models import Category
|
||||
cats = Category.objects.order_by('position')
|
||||
result = []
|
||||
for c in cats:
|
||||
if c.name in self.events_by_category:
|
||||
result.append((c.name, self.events_by_category[c.name]))
|
||||
return result
|
||||
|
||||
def build_time_intervals(self, all_day_name, all_day_short_name, interval_names, interval_short_names, interval_markers):
|
||||
self.time_intervals = [IntervalInDay(self.date, i, n[0], n[1]) for i, n in
|
||||
enumerate(zip([all_day_name] + interval_names, [all_day_short_name] + interval_short_names))]
|
||||
|
||||
for e in self.events:
|
||||
if e.start_time is None:
|
||||
self.time_intervals[0].add_event(e)
|
||||
else:
|
||||
dt = datetime.combine(e.start_day, e.start_time)
|
||||
ok = False
|
||||
for i in range(len(interval_markers)):
|
||||
if dt < interval_markers[i]:
|
||||
self.time_intervals[i + 1].add_event(e)
|
||||
ok = True
|
||||
break
|
||||
if not ok:
|
||||
self.time_intervals[-1].add_event(e)
|
||||
|
||||
def get_time_intervals(self):
|
||||
if self.time_intervals is None:
|
||||
if self.is_today():
|
||||
all_day_name = _('All day today')
|
||||
interval_names = [_('This morning'), _('This noon'), _('This afternoon'), _('This evening')]
|
||||
elif self.is_tomorrow():
|
||||
name = _("Tomorrow")
|
||||
all_day_name = _('All day tomorrow')
|
||||
interval_names = [_('%s morning') % name, _('%s noon') % name, _('%s afternoon') % name, _('%s evening') % name]
|
||||
else:
|
||||
name = _date(self.date, "l")
|
||||
all_day_name = _('All day %s') % name
|
||||
interval_names = [_('%s morning') % name, _('%s noon') % name, _('%s afternoon') % name, _('%s evening') % name]
|
||||
all_day_short_name = _('All day')
|
||||
interval_short_names = [_('Morning'), _('Noon'), _('Afternoon'), _('Evening')]
|
||||
interval_markers = [datetime.combine(self.date, time(h, m)) for h, m in [(11, 30), (13, 0), (18, 0)]]
|
||||
self.build_time_intervals(all_day_name, all_day_short_name, interval_names, interval_short_names, interval_markers)
|
||||
|
||||
return self.time_intervals
|
||||
|
||||
|
||||
class IntervalInDay(DayInCalendar):
|
||||
|
||||
def __init__(self, d, id, name, short_name):
|
||||
self.name = name
|
||||
self.short_name = short_name
|
||||
super().__init__(d)
|
||||
self.id = self.id + '-' + str(id)
|
||||
|
||||
class CalendarList:
|
||||
def __init__(self, firstdate, lastdate, filter=None, exact=False):
|
||||
def __init__(self, firstdate, lastdate, filter=None, exact=False, ignore_dup=None, qs=None):
|
||||
self.firstdate = firstdate
|
||||
self.lastdate = lastdate
|
||||
self.now = date.today()
|
||||
self.filter = filter
|
||||
self.ignore_dup = ignore_dup
|
||||
self.qs = qs
|
||||
|
||||
if exact:
|
||||
self.c_firstdate = self.firstdate
|
||||
@ -108,6 +209,10 @@ class CalendarList:
|
||||
# end the last day of the last week
|
||||
self.c_lastdate = lastdate + timedelta(days=6 - lastdate.weekday())
|
||||
|
||||
self.calendar_days = None
|
||||
|
||||
|
||||
def build_internal(self):
|
||||
# create a list of DayInCalendars
|
||||
self.create_calendar_days()
|
||||
|
||||
@ -118,6 +223,12 @@ class CalendarList:
|
||||
for i, c in self.calendar_days.items():
|
||||
c.filter_events()
|
||||
|
||||
def get_calendar_days(self):
|
||||
if self.calendar_days is None:
|
||||
self.build_internal()
|
||||
|
||||
return self.calendar_days
|
||||
|
||||
def today_in_calendar(self):
|
||||
return self.firstdate <= self.now and self.lastdate >= self.now
|
||||
|
||||
@ -126,14 +237,20 @@ class CalendarList:
|
||||
|
||||
def fill_calendar_days(self):
|
||||
if self.filter is None:
|
||||
from .models import Event
|
||||
if self.qs is None:
|
||||
from .models import Event
|
||||
|
||||
qs = Event.objects.all()
|
||||
qs = Event.objects.all()
|
||||
else:
|
||||
qs = self.qs
|
||||
else:
|
||||
qs = self.filter.qs
|
||||
startdatetime = datetime.combine(self.c_firstdate, time.min)
|
||||
lastdatetime = datetime.combine(self.c_lastdate, time.max)
|
||||
self.events = qs.filter(
|
||||
|
||||
if self.ignore_dup:
|
||||
qs = qs.exclude(other_versions=self.ignore_dup)
|
||||
startdatetime = timezone.make_aware(datetime.combine(self.c_firstdate, time.min), timezone.get_default_timezone())
|
||||
lastdatetime = timezone.make_aware(datetime.combine(self.c_lastdate, time.max), timezone.get_default_timezone())
|
||||
qs = qs.filter(
|
||||
(Q(recurrence_dtend__isnull=True) & Q(recurrence_dtstart__lte=lastdatetime))
|
||||
| (
|
||||
Q(recurrence_dtend__isnull=False)
|
||||
@ -142,7 +259,15 @@ class CalendarList:
|
||||
| Q(recurrence_dtend__lt=startdatetime)
|
||||
)
|
||||
)
|
||||
).order_by("start_time")
|
||||
| (Q(start_day__lte=self.c_firstdate) & (Q(end_day__isnull=True) | Q(end_day__gte=self.c_firstdate)))
|
||||
).filter(
|
||||
Q(other_versions__isnull=True) |
|
||||
Q(other_versions__representative=F('pk')) |
|
||||
Q(other_versions__representative__isnull=True)
|
||||
).order_by("start_time", "title__unaccent__lower")
|
||||
|
||||
qs = qs.select_related("exact_location").select_related("category").select_related("other_versions").select_related("other_versions__representative")
|
||||
self.events = qs
|
||||
|
||||
firstdate = datetime.fromordinal(self.c_firstdate.toordinal())
|
||||
if firstdate.tzinfo is None or firstdate.tzinfo.utcoffset(firstdate) is None:
|
||||
@ -173,14 +298,31 @@ class CalendarList:
|
||||
return hasattr(self, "month")
|
||||
|
||||
def calendar_days_list(self):
|
||||
return list(self.calendar_days.values())
|
||||
return list(self.get_calendar_days().values())
|
||||
|
||||
def get_events(self):
|
||||
return [event for jour in self.calendar_days_list() for event in jour.events]
|
||||
def time_intervals_list(self, onlyfirst=False):
|
||||
ds = self.calendar_days_list()
|
||||
result = []
|
||||
for d in ds:
|
||||
tis = d.get_time_intervals()
|
||||
for t in tis:
|
||||
if len(t.events) > 0:
|
||||
result.append(t)
|
||||
if onlyfirst:
|
||||
break
|
||||
return result
|
||||
|
||||
def time_intervals_list_first(self):
|
||||
return self.time_intervals_list(True)
|
||||
|
||||
def export_to_ics(self, request):
|
||||
from .models import Event
|
||||
events = [event for day in self.get_calendar_days().values() for event in day.events]
|
||||
return Event.export_to_ics(events, request)
|
||||
|
||||
|
||||
class CalendarMonth(CalendarList):
|
||||
def __init__(self, year, month, filter):
|
||||
def __init__(self, year, month, filter, qs=None):
|
||||
self.year = year
|
||||
self.month = month
|
||||
r = calendar.monthrange(year, month)
|
||||
@ -188,7 +330,7 @@ class CalendarMonth(CalendarList):
|
||||
first = date(year, month, 1)
|
||||
last = date(year, month, r[1])
|
||||
|
||||
super().__init__(first, last, filter)
|
||||
super().__init__(first, last, filter, qs)
|
||||
|
||||
def get_month_name(self):
|
||||
return self.firstdate.strftime("%B")
|
||||
@ -201,14 +343,14 @@ class CalendarMonth(CalendarList):
|
||||
|
||||
|
||||
class CalendarWeek(CalendarList):
|
||||
def __init__(self, year, week, filter):
|
||||
def __init__(self, year, week, filter, qs=None):
|
||||
self.year = year
|
||||
self.week = week
|
||||
|
||||
first = date.fromisocalendar(self.year, self.week, 1)
|
||||
last = date.fromisocalendar(self.year, self.week, 7)
|
||||
|
||||
super().__init__(first, last, filter)
|
||||
super().__init__(first, last, filter, qs)
|
||||
|
||||
def next_week(self):
|
||||
return self.firstdate + timedelta(days=7)
|
||||
@ -218,8 +360,8 @@ class CalendarWeek(CalendarList):
|
||||
|
||||
|
||||
class CalendarDay(CalendarList):
|
||||
def __init__(self, date, filter=None):
|
||||
super().__init__(date, date, filter, exact=True)
|
||||
def __init__(self, date, filter=None, qs=None):
|
||||
super().__init__(date, date, filter=filter, qs=qs, exact=True)
|
||||
|
||||
def get_events(self):
|
||||
return self.calendar_days_list()[0].events
|
||||
|
@ -1,9 +1,15 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
from celery import Celery
|
||||
from celery import Celery, Task, chain
|
||||
from celery.schedules import crontab
|
||||
from celery.utils.log import get_task_logger
|
||||
from celery.exceptions import MaxRetriesExceededError
|
||||
import time as time_
|
||||
from django.conf import settings
|
||||
from celery.signals import worker_ready
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from .import_tasks.downloader import *
|
||||
from .import_tasks.extractor import *
|
||||
@ -18,6 +24,8 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", f"agenda_culturel.settings.{APP_
|
||||
|
||||
app = Celery("agenda_culturel")
|
||||
|
||||
from django.core.cache import cache
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
@ -30,6 +38,26 @@ app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
# Load task modules from all registered Django apps.
|
||||
app.autodiscover_tasks()
|
||||
|
||||
LOCK_EXPIRE = 60 * 10 # Lock expires in 10 minutes
|
||||
|
||||
@contextmanager
|
||||
def memcache_chromium_lock(oid):
|
||||
lock_id = "chromium-lock"
|
||||
timeout_at = time_.monotonic() + LOCK_EXPIRE - 3
|
||||
# cache.add fails if the key already exists
|
||||
status = cache.add(lock_id, oid, LOCK_EXPIRE)
|
||||
try:
|
||||
yield status
|
||||
finally:
|
||||
# memcache delete is very slow, but we have to use it to take
|
||||
# advantage of using add() for atomic locking
|
||||
if time_.monotonic() < timeout_at and status:
|
||||
# don't release the lock if we exceeded the timeout
|
||||
# to lessen the chance of releasing an expired lock
|
||||
# owned by someone else
|
||||
# also don't release the lock if we didn't acquire it
|
||||
cache.delete(lock_id)
|
||||
|
||||
|
||||
def close_import_task(taskid, success, error_message, importer):
|
||||
from agenda_culturel.models import BatchImportation
|
||||
@ -69,35 +97,35 @@ def import_events_from_json(self, json):
|
||||
|
||||
# finally, close task
|
||||
close_import_task(self.request.id, success, error_message, importer)
|
||||
"""except Exception as e:
|
||||
logger.error(e)
|
||||
close_import_task(self.request.id, False, e, importer)"""
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def run_recurrent_import(self, pk):
|
||||
|
||||
class ChromiumTask(Task):
|
||||
_chm = None
|
||||
|
||||
@property
|
||||
def chromiumDownloader(self):
|
||||
if self._chm is None:
|
||||
self._chm = ChromiumHeadlessDownloader()
|
||||
return self._chm
|
||||
|
||||
|
||||
def run_recurrent_import_internal(rimport, downloader, req_id):
|
||||
from agenda_culturel.models import RecurrentImport, BatchImportation
|
||||
from .db_importer import DBImporterEvents
|
||||
|
||||
logger.info("Run recurrent import: {}".format(self.request.id))
|
||||
logger.info("Run recurrent import: {}".format(req_id))
|
||||
|
||||
# get the recurrent import
|
||||
rimport = RecurrentImport.objects.get(pk=pk)
|
||||
|
||||
# create a batch importation
|
||||
importation = BatchImportation(recurrentImport=rimport, celery_id=self.request.id)
|
||||
importation = BatchImportation(recurrentImport=rimport, celery_id=req_id)
|
||||
# save batch importation
|
||||
importation.save()
|
||||
|
||||
# create an importer
|
||||
importer = DBImporterEvents(self.request.id)
|
||||
importer = DBImporterEvents(req_id)
|
||||
|
||||
# prepare downloading and extracting processes
|
||||
downloader = (
|
||||
SimpleDownloader()
|
||||
if rimport.downloader == RecurrentImport.DOWNLOADER.SIMPLE
|
||||
else ChromiumHeadlessDownloader()
|
||||
)
|
||||
|
||||
if rimport.processor == RecurrentImport.PROCESSOR.ICAL:
|
||||
extractor = ICALExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.ICALNOBUSY:
|
||||
@ -112,6 +140,16 @@ def run_recurrent_import(self, pk):
|
||||
extractor = lefotomat.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.LAPUCEALOREILLE:
|
||||
extractor = lapucealoreille.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.MECWORDPRESS:
|
||||
extractor = wordpress_mec.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.FBEVENTS:
|
||||
extractor = fbevents.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.C3C:
|
||||
extractor = c3c.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.ARACHNEE:
|
||||
extractor = arachnee.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.LERIO:
|
||||
extractor = lerio.CExtractor()
|
||||
else:
|
||||
extractor = None
|
||||
|
||||
@ -127,13 +165,14 @@ def run_recurrent_import(self, pk):
|
||||
location = rimport.defaultLocation
|
||||
tags = rimport.defaultTags
|
||||
published = rimport.defaultPublished
|
||||
organisers = [] if rimport.defaultOrganiser is None else [rimport.defaultOrganiser.pk]
|
||||
|
||||
try:
|
||||
# get events from website
|
||||
events = u2e.process(
|
||||
url,
|
||||
browsable_url,
|
||||
default_values={"category": category, "location": location, "tags": tags},
|
||||
default_values={"category": category, "location": location, "tags": tags, "organisers": organisers},
|
||||
published=published,
|
||||
)
|
||||
|
||||
@ -144,47 +183,203 @@ def run_recurrent_import(self, pk):
|
||||
success, error_message = importer.import_events(json_events)
|
||||
|
||||
# finally, close task
|
||||
close_import_task(self.request.id, success, error_message, importer)
|
||||
close_import_task(req_id, success, error_message, importer)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
close_import_task(self.request.id, False, e, importer)
|
||||
close_import_task(req_id, False, e, importer)
|
||||
|
||||
|
||||
|
||||
@app.task(base=ChromiumTask, bind=True)
|
||||
def run_recurrent_import(self, pklist):
|
||||
from agenda_culturel.models import RecurrentImport
|
||||
|
||||
if isinstance(pklist, list):
|
||||
pk = pklist[0]
|
||||
is_list = True
|
||||
else:
|
||||
is_list = False
|
||||
pk = pklist
|
||||
|
||||
# get the recurrent import
|
||||
rimport = RecurrentImport.objects.get(pk=pk)
|
||||
|
||||
# prepare downloading and extracting processes
|
||||
if rimport.downloader == RecurrentImport.DOWNLOADER.SIMPLE:
|
||||
downloader = SimpleDownloader()
|
||||
elif rimport.downloader == RecurrentImport.DOWNLOADER.CHROMIUMHEADLESS:
|
||||
downloader = self.chromiumDownloader
|
||||
downloader.pause = False
|
||||
else:
|
||||
downloader = self.chromiumDownloader
|
||||
downloader.pause = True
|
||||
|
||||
# only one thread using Chromium can run at a time,
|
||||
# to prevent from errors (including strange Facebook errors)
|
||||
if rimport.downloader in [RecurrentImport.DOWNLOADER.CHROMIUMHEADLESS, RecurrentImport.DOWNLOADER.CHROMIUMHEADLESSPAUSE]:
|
||||
with memcache_chromium_lock(self.app.oid) as acquired:
|
||||
if acquired:
|
||||
run_recurrent_import_internal(rimport, downloader, self.request.id)
|
||||
return pklist[1:] if is_list else True
|
||||
else:
|
||||
run_recurrent_import_internal(rimport, downloader, self.request.id)
|
||||
return pklist[1:] if is_list else True
|
||||
|
||||
try:
|
||||
# if chromium is locked, we wait before retrying
|
||||
raise self.retry(countdown=120)
|
||||
except MaxRetriesExceededError as e:
|
||||
logger.error(e)
|
||||
close_import_task(self.request.id, False, e, importer)
|
||||
return pklist[1:] if is_list else False
|
||||
|
||||
|
||||
def run_recurrent_imports_from_list(pklist):
|
||||
|
||||
tasks = chain(run_recurrent_import.s(pklist) if i == 0 else run_recurrent_import.s() for i in range(len(pklist)))
|
||||
tasks.delay()
|
||||
|
||||
@app.task(bind=True)
|
||||
def daily_imports(self):
|
||||
from agenda_culturel.models import RecurrentImport
|
||||
|
||||
logger.info("Imports quotidiens")
|
||||
logger.info("Everyday imports")
|
||||
imports = RecurrentImport.objects.filter(
|
||||
recurrence=RecurrentImport.RECURRENCE.DAILY
|
||||
)
|
||||
).order_by("pk")
|
||||
|
||||
for imp in imports:
|
||||
run_recurrent_import.delay(imp.pk)
|
||||
run_recurrent_imports_from_list([imp.pk for imp in imports])
|
||||
|
||||
|
||||
SCREENSHOT_FILE = settings.MEDIA_ROOT + '/screenshot.png'
|
||||
|
||||
@app.task(bind=True)
|
||||
def screenshot(self):
|
||||
downloader = ChromiumHeadlessDownloader(noimage=False)
|
||||
downloader.screenshot("https://pommesdelune.fr", SCREENSHOT_FILE)
|
||||
|
||||
@worker_ready.connect
|
||||
def at_start(sender, **k):
|
||||
if not os.path.isfile(SCREENSHOT_FILE):
|
||||
logger.info("Init screenshot file")
|
||||
with sender.app.connection() as conn:
|
||||
sender.app.send_task('agenda_culturel.celery.screenshot', None, connection=conn)
|
||||
else:
|
||||
logger.info("Screenshot file already exists")
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def run_all_recurrent_imports(self):
|
||||
from agenda_culturel.models import RecurrentImport
|
||||
|
||||
logger.info("Imports complets")
|
||||
imports = RecurrentImport.objects.all()
|
||||
logger.info("Run all imports")
|
||||
imports = RecurrentImport.objects.all().order_by("pk")
|
||||
|
||||
for imp in imports:
|
||||
run_recurrent_import.delay(imp.pk)
|
||||
run_recurrent_imports_from_list([imp.pk for imp in imports])
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def run_all_recurrent_imports_failed(self):
|
||||
from agenda_culturel.models import RecurrentImport, BatchImportation
|
||||
|
||||
logger.info("Run only failed imports")
|
||||
imports = RecurrentImport.objects.all().order_by("pk")
|
||||
|
||||
run_recurrent_imports_from_list([imp.pk for imp in imports if imp.last_import().status == BatchImportation.STATUS.FAILED])
|
||||
|
||||
@app.task(bind=True)
|
||||
def run_all_recurrent_imports_canceled(self):
|
||||
from agenda_culturel.models import RecurrentImport, BatchImportation
|
||||
|
||||
logger.info("Run only canceled imports")
|
||||
imports = RecurrentImport.objects.all().order_by("pk")
|
||||
|
||||
run_recurrent_imports_from_list([imp.pk for imp in imports if imp.last_import().status == BatchImportation.STATUS.CANCELED])
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def weekly_imports(self):
|
||||
from agenda_culturel.models import RecurrentImport
|
||||
|
||||
logger.info("Imports hebdomadaires")
|
||||
logger.info("Weekly imports")
|
||||
imports = RecurrentImport.objects.filter(
|
||||
recurrence=RecurrentImport.RECURRENCE.WEEKLY
|
||||
)
|
||||
).order_by("pk")
|
||||
|
||||
for imp in imports:
|
||||
run_recurrent_import.delay(imp.pk)
|
||||
run_recurrent_imports_from_list([imp.pk for imp in imports])
|
||||
|
||||
@app.task(base=ChromiumTask, bind=True)
|
||||
def import_events_from_url(self, url, cat, tags, force=False, user_id=None):
|
||||
from .db_importer import DBImporterEvents
|
||||
from agenda_culturel.models import RecurrentImport, BatchImportation
|
||||
from agenda_culturel.models import Event, Category
|
||||
|
||||
with memcache_chromium_lock(self.app.oid) as acquired:
|
||||
if acquired:
|
||||
|
||||
|
||||
logger.info("URL import: {}".format(self.request.id))
|
||||
|
||||
|
||||
# clean url
|
||||
url = Extractor.clean_url(url)
|
||||
|
||||
# we check if the url is known
|
||||
existing = None if force else Event.objects.filter(uuids__contains=[url])
|
||||
# if it's unknown
|
||||
if force or len(existing) == 0:
|
||||
|
||||
# create an importer
|
||||
importer = DBImporterEvents(self.request.id)
|
||||
|
||||
# create a batch importation
|
||||
importation = BatchImportation(url_source=url, celery_id=self.request.id)
|
||||
# save batch importation
|
||||
importation.save()
|
||||
|
||||
try:
|
||||
## create loader
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), single_event=True)
|
||||
# set default values
|
||||
values = {}
|
||||
if cat is not None:
|
||||
values = {"category": cat, "tags": tags}
|
||||
|
||||
# get event
|
||||
events = u2e.process(
|
||||
url, published=False, default_values=values
|
||||
)
|
||||
|
||||
if events:
|
||||
# convert it to json
|
||||
json_events = json.dumps(events, default=str)
|
||||
|
||||
# import events (from json)
|
||||
success, error_message = importer.import_events(json_events, user_id)
|
||||
|
||||
# finally, close task
|
||||
close_import_task(self.request.id, success, error_message, importer)
|
||||
else:
|
||||
close_import_task(self.request.id, False, "Cannot find any event", importer)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
close_import_task(self.request.id, False, e, importer)
|
||||
|
||||
return
|
||||
|
||||
# if chromium is locked, we wait 30 seconds before retrying
|
||||
raise self.retry(countdown=30)
|
||||
|
||||
|
||||
@app.task(base=ChromiumTask, bind=True)
|
||||
def import_events_from_urls(self, urls_cat_tags, user_id=None):
|
||||
for ucat in urls_cat_tags:
|
||||
if ucat is not None:
|
||||
url = ucat[0]
|
||||
cat = ucat[1]
|
||||
tags = ucat[2]
|
||||
|
||||
import_events_from_url.delay(url, cat, tags, user_id=user_id)
|
||||
|
||||
|
||||
app.conf.beat_schedule = {
|
||||
@ -193,6 +388,10 @@ app.conf.beat_schedule = {
|
||||
# Daily imports at 3:14 a.m.
|
||||
"schedule": crontab(hour=3, minute=14),
|
||||
},
|
||||
"daily_screenshot": {
|
||||
"task": "agenda_culturel.celery.screenshot",
|
||||
"schedule": crontab(hour=3, minute=3),
|
||||
},
|
||||
"weekly_imports": {
|
||||
"task": "agenda_culturel.celery.weekly_imports",
|
||||
# Daily imports on Mondays at 2:22 a.m.
|
||||
|
@ -11,6 +11,7 @@ class DBImporterEvents:
|
||||
def __init__(self, celery_id):
|
||||
self.celery_id = celery_id
|
||||
self.error_message = ""
|
||||
self.user_id = None
|
||||
self.init_result_properties()
|
||||
self.today = timezone.now().date().isoformat()
|
||||
|
||||
@ -34,15 +35,19 @@ class DBImporterEvents:
|
||||
def get_nb_removed_events(self):
|
||||
return self.nb_removed
|
||||
|
||||
def import_events(self, json_structure):
|
||||
def import_events(self, json_structure, user_id=None):
|
||||
print(json_structure)
|
||||
self.init_result_properties()
|
||||
self.user_id = user_id
|
||||
|
||||
try:
|
||||
structure = json.loads(json_structure)
|
||||
except:
|
||||
return (False, "JSON file is not correctly structured")
|
||||
|
||||
if len(structure) == 0:
|
||||
return (True, "")
|
||||
|
||||
if "header" not in structure:
|
||||
return (False, "JSON is not correctly structured: missing header")
|
||||
if "events" not in structure:
|
||||
@ -92,7 +97,7 @@ class DBImporterEvents:
|
||||
|
||||
def save_imported(self):
|
||||
self.db_event_objects, self.nb_updated, self.nb_removed = Event.import_events(
|
||||
self.event_objects, remove_missing_from_source=self.url
|
||||
self.event_objects, remove_missing_from_source=self.url, user_id=self.user_id
|
||||
)
|
||||
|
||||
def is_valid_event_structure(self, event):
|
||||
|
505
src/agenda_culturel/filters.py
Normal file
505
src/agenda_culturel/filters.py
Normal file
@ -0,0 +1,505 @@
|
||||
import django_filters
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django import forms
|
||||
from django.contrib.postgres.search import SearchQuery, SearchHeadline
|
||||
from django.db.models import Count, Q
|
||||
|
||||
from django.http import QueryDict
|
||||
from django.contrib.gis.measure import D
|
||||
|
||||
from django.forms import (
|
||||
ModelForm,
|
||||
ValidationError,
|
||||
TextInput,
|
||||
Form,
|
||||
URLField,
|
||||
MultipleHiddenInput,
|
||||
Textarea,
|
||||
CharField,
|
||||
ChoiceField,
|
||||
RadioSelect,
|
||||
MultipleChoiceField,
|
||||
BooleanField,
|
||||
HiddenInput,
|
||||
ModelChoiceField,
|
||||
)
|
||||
|
||||
from .forms import (
|
||||
URLSubmissionForm,
|
||||
EventForm,
|
||||
BatchImportationForm,
|
||||
FixDuplicates,
|
||||
SelectEventInList,
|
||||
MergeDuplicates,
|
||||
RecurrentImportForm,
|
||||
CategorisationRuleImportForm,
|
||||
CategorisationForm,
|
||||
EventAddPlaceForm,
|
||||
PlaceForm,
|
||||
)
|
||||
|
||||
from .models import (
|
||||
ReferenceLocation,
|
||||
RecurrentImport,
|
||||
Tag,
|
||||
Event,
|
||||
Category,
|
||||
Message,
|
||||
DuplicatedEvents
|
||||
)
|
||||
|
||||
|
||||
class EventFilter(django_filters.FilterSet):
|
||||
RECURRENT_CHOICES = [
|
||||
("remove_recurrent", "Masquer les événements récurrents"),
|
||||
("only_recurrent", "Montrer uniquement les événements récurrents"),
|
||||
]
|
||||
|
||||
DISTANCE_CHOICES = [5, 10, 15, 30]
|
||||
|
||||
position = django_filters.ModelChoiceFilter(
|
||||
label="À proximité de",
|
||||
method="no_filter",
|
||||
empty_label=_("Select a location"),
|
||||
queryset=ReferenceLocation.objects.all().order_by("-main", "name__unaccent")
|
||||
)
|
||||
|
||||
radius = django_filters.ChoiceFilter(
|
||||
label="Dans un rayon de",
|
||||
method="no_filter",
|
||||
choices=[(x, str(x) + " km") for x in DISTANCE_CHOICES],
|
||||
null_label=None,
|
||||
empty_label=None
|
||||
)
|
||||
|
||||
exclude_tags = django_filters.MultipleChoiceFilter(
|
||||
label="Exclure les étiquettes",
|
||||
choices=[],
|
||||
lookup_expr="icontains",
|
||||
field_name="tags",
|
||||
exclude=True,
|
||||
widget=forms.SelectMultiple,
|
||||
)
|
||||
|
||||
tags = django_filters.MultipleChoiceFilter(
|
||||
label="Inclure les étiquettes",
|
||||
choices=[],
|
||||
lookup_expr="icontains",
|
||||
conjoined=True,
|
||||
field_name="tags",
|
||||
widget=forms.SelectMultiple,
|
||||
)
|
||||
|
||||
recurrences = django_filters.ChoiceFilter(
|
||||
label="Inclure la récurrence",
|
||||
choices=RECURRENT_CHOICES,
|
||||
method="filter_recurrences",
|
||||
)
|
||||
|
||||
category = django_filters.ModelMultipleChoiceFilter(
|
||||
label="Filtrer par catégories",
|
||||
field_name="category__id",
|
||||
to_field_name="id",
|
||||
queryset=Category.objects.all(),
|
||||
widget=MultipleHiddenInput,
|
||||
)
|
||||
|
||||
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
label="Filtrer par status",
|
||||
choices=Event.STATUS.choices,
|
||||
field_name="status",
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = ["category", "tags", "exclude_tags", "status", "recurrences"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if not kwargs["request"].user.is_authenticated:
|
||||
self.form.fields.pop("status")
|
||||
self.form.fields["exclude_tags"].choices = Tag.get_tag_groups(exclude=True, nb_suggestions=0)
|
||||
self.form.fields["tags"].choices = Tag.get_tag_groups(include=True)
|
||||
|
||||
def filter_recurrences(self, queryset, name, value):
|
||||
# construct the full lookup expression
|
||||
lookup = "__".join([name, "isnull"])
|
||||
return queryset.filter(**{lookup: value == "remove_recurrent"})
|
||||
|
||||
def no_filter(self, queryset, name, value):
|
||||
return queryset
|
||||
|
||||
@property
|
||||
def qs(self):
|
||||
parent = super().qs
|
||||
if self.get_cleaned_data("position") is None or self.get_cleaned_data("radius") is None:
|
||||
return parent
|
||||
d = self.get_cleaned_data("radius")
|
||||
p = self.get_cleaned_data("position")
|
||||
if not isinstance(d, str) or not isinstance(p, ReferenceLocation):
|
||||
return parent
|
||||
p = p.location
|
||||
|
||||
return parent.exclude(exact_location=False).filter(exact_location__location__distance_lt=(p, D(km=d)))
|
||||
|
||||
def get_url(self):
|
||||
if isinstance(self.form.data, QueryDict):
|
||||
return self.form.data.urlencode()
|
||||
else:
|
||||
return ""
|
||||
|
||||
def get_full_url(self):
|
||||
return self.request.get_full_path()
|
||||
|
||||
def get_url_remove_categories(self, catpks, full_path = None):
|
||||
if full_path is None:
|
||||
full_path = self.request.get_full_path()
|
||||
|
||||
result = full_path
|
||||
for catpk in catpks:
|
||||
result = result.replace('category=' + str(catpk), '')
|
||||
result = result.replace('?&', '?')
|
||||
result = result.replace('&&', '&')
|
||||
return result
|
||||
|
||||
def get_url_add_categories(self, catpks, full_path = None):
|
||||
if full_path is None:
|
||||
full_path = self.request.get_full_path()
|
||||
|
||||
result = full_path
|
||||
for catpk in catpks:
|
||||
result = result + ('&' if '?' in full_path else '?') + 'category=' + str(catpk)
|
||||
return result
|
||||
|
||||
def get_url_without_filters_only_cats(self):
|
||||
return self.get_url_without_filters(True)
|
||||
|
||||
|
||||
def get_url_without_filters(self, only_categories=False):
|
||||
|
||||
if only_categories:
|
||||
# on repart d'une url sans option
|
||||
result = self.request.get_full_path().split("?")[0]
|
||||
# on ajoute toutes les catégories
|
||||
result = self.get_url_add_categories([c.pk for c in self.get_categories()], result)
|
||||
else:
|
||||
# on supprime toutes les catégories
|
||||
result = self.get_url_remove_categories([c.pk for c in self.get_categories()])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_cleaned_data(self, name):
|
||||
|
||||
try:
|
||||
return self.form.cleaned_data[name]
|
||||
except AttributeError:
|
||||
return {}
|
||||
except KeyError:
|
||||
return {}
|
||||
|
||||
def get_categories(self):
|
||||
return self.get_cleaned_data("category")
|
||||
|
||||
def has_category(self):
|
||||
return "category" in self.form.cleaned_data and len(self.get_cleaned_data("category")) > 0
|
||||
|
||||
def get_tags(self):
|
||||
return self.get_cleaned_data("tags")
|
||||
|
||||
def get_exclude_tags(self):
|
||||
return self.get_cleaned_data("exclude_tags")
|
||||
|
||||
def get_status(self):
|
||||
return self.get_cleaned_data("status")
|
||||
|
||||
def get_position(self):
|
||||
return self.get_cleaned_data("position")
|
||||
|
||||
def get_radius(self):
|
||||
return self.get_cleaned_data("radius")
|
||||
|
||||
def to_str(self, prefix=''):
|
||||
self.form.full_clean()
|
||||
result = ' '.join([c.name for c in self.get_categories()] + [t for t in self.get_tags()] + ["~" + t for t in self.get_exclude_tags()] + [str(self.get_position()), str(self.get_radius())])
|
||||
if len(result) > 0:
|
||||
result = prefix + result
|
||||
return result
|
||||
|
||||
def get_status_names(self):
|
||||
if "status" in self.form.cleaned_data:
|
||||
return [
|
||||
dict(Event.STATUS.choices)[s] for s in self.get_cleaned_data("status")
|
||||
]
|
||||
else:
|
||||
return []
|
||||
|
||||
def get_recurrence_filtering(self):
|
||||
if "recurrences" in self.form.cleaned_data:
|
||||
d = dict(self.RECURRENT_CHOICES)
|
||||
v = self.form.cleaned_data["recurrences"]
|
||||
if v in d:
|
||||
return d[v]
|
||||
else:
|
||||
return ""
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def is_resetable(self, only_categories=False):
|
||||
if only_categories:
|
||||
return len(self.get_cleaned_data("category")) != 0
|
||||
else:
|
||||
if self.request.user.is_authenticated:
|
||||
if (
|
||||
len(self.get_cleaned_data("status")) != 1
|
||||
or
|
||||
self.get_cleaned_data("status")[0] != Event.STATUS.PUBLISHED
|
||||
):
|
||||
return True
|
||||
else:
|
||||
if (
|
||||
len(self.get_cleaned_data("status")) != 0
|
||||
):
|
||||
return True
|
||||
return (
|
||||
len(self.get_cleaned_data("tags")) != 0
|
||||
or len(self.get_cleaned_data("exclude_tags")) != 0
|
||||
or len(self.get_cleaned_data("recurrences")) != 0
|
||||
or ((not self.get_cleaned_data("position") is None) and (not self.get_cleaned_data("radius") is None))
|
||||
)
|
||||
|
||||
def is_active(self, only_categories=False):
|
||||
if only_categories:
|
||||
return len(self.get_cleaned_data("category")) != 0
|
||||
else:
|
||||
return (
|
||||
len(self.get_cleaned_data("status")) != 0
|
||||
or len(self.get_cleaned_data("tags")) != 0
|
||||
or len(self.get_cleaned_data("exclude_tags")) != 0
|
||||
or len(self.get_cleaned_data("recurrences")) != 0
|
||||
or ((not self.get_cleaned_data("position") is None) and (not self.get_cleaned_data("radius") is None))
|
||||
)
|
||||
|
||||
def is_selected(self, cat):
|
||||
return "category" in self.form.cleaned_data and cat in self.form.cleaned_data["category"]
|
||||
|
||||
def is_selected_tag(self, tag):
|
||||
return "tags" in self.form.cleaned_data and tag in self.form.cleaned_data["tags"]
|
||||
|
||||
def get_url_add_tag(self, tag):
|
||||
full_path = self.request.get_full_path()
|
||||
|
||||
result = full_path + ('&' if '?' in full_path else '?') + 'tags=' + str(tag)
|
||||
|
||||
return result
|
||||
|
||||
def tag_exists(self, tag):
|
||||
return tag in [t[0] for g in self.form.fields["tags"].choices for t in g[1]]
|
||||
|
||||
def set_default_values(request):
|
||||
if request.user.is_authenticated:
|
||||
if request.GET.get('status', None) == None:
|
||||
tempdict = request.GET.copy()
|
||||
tempdict['status'] = 'published'
|
||||
request.GET = tempdict
|
||||
return request
|
||||
return request
|
||||
|
||||
def get_position_radius(self):
|
||||
if self.get_cleaned_data("position") is None or self.get_cleaned_data("radius") is None:
|
||||
return ""
|
||||
else:
|
||||
return str(self.get_cleaned_data("position")) + ' (' + str(self.get_cleaned_data("radius")) + ' km)'
|
||||
|
||||
def is_filtered_by_position_radius(self):
|
||||
return not self.get_cleaned_data("position") is None and not self.get_cleaned_data("radius") is None
|
||||
|
||||
def get_url_add_suggested_position(self, location):
|
||||
result = self.request.get_full_path()
|
||||
return result + ('&' if '?' in result else '?') + 'position=' + str(location.pk) + "&radius=" + str(location.suggested_distance)
|
||||
|
||||
|
||||
class EventFilterAdmin(django_filters.FilterSet):
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
choices=Event.STATUS.choices, widget=forms.CheckboxSelectMultiple
|
||||
)
|
||||
|
||||
representative = django_filters.MultipleChoiceFilter(
|
||||
label=_("Representative version"),
|
||||
choices=[(True, _("Yes")), (False, _("Non"))],
|
||||
method="filter_by_representative",
|
||||
widget=forms.CheckboxSelectMultiple)
|
||||
|
||||
import_sources = django_filters.ModelChoiceFilter(
|
||||
label=_("Imported from"),
|
||||
method="filter_by_source",
|
||||
queryset=RecurrentImport.objects.all().order_by("name__unaccent")
|
||||
)
|
||||
|
||||
def filter_by_source(self, queryset, name, value):
|
||||
src = RecurrentImport.objects.get(pk=value.pk).source
|
||||
return queryset.filter(import_sources__contains=[src])
|
||||
|
||||
def filter_by_representative(self, queryset, name, value):
|
||||
if value is None or len(value) != 1:
|
||||
return queryset
|
||||
else:
|
||||
q = (Q(other_versions__isnull=True) |
|
||||
Q(other_versions__representative=F('pk')) |
|
||||
Q(other_versions__representative__isnull=True))
|
||||
if value[0] == True:
|
||||
return queryset.filter(q)
|
||||
else:
|
||||
return queryset.exclude(q)
|
||||
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = ["status"]
|
||||
|
||||
|
||||
class MessagesFilterAdmin(django_filters.FilterSet):
|
||||
closed = django_filters.MultipleChoiceFilter(
|
||||
label="Status",
|
||||
choices=((True, _("Closed")), (False, _("Open"))),
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
spam = django_filters.MultipleChoiceFilter(
|
||||
label="Spam",
|
||||
choices=((True, _("Spam")), (False, _("Non spam"))),
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Message
|
||||
fields = ["closed", "spam"]
|
||||
|
||||
|
||||
class SimpleSearchEventFilter(django_filters.FilterSet):
|
||||
q = django_filters.CharFilter(method="custom_filter",
|
||||
label=_("Search"),
|
||||
widget=forms.TextInput(attrs={"type": "search"})
|
||||
)
|
||||
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
label="Filtrer par status",
|
||||
choices=Event.STATUS.choices,
|
||||
field_name="status",
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
def custom_filter(self, queryset, name, value):
|
||||
search_query = SearchQuery(value, config="french")
|
||||
qs = queryset.filter(
|
||||
Q(title__icontains=value)
|
||||
| Q(category__name__icontains=value)
|
||||
| Q(tags__icontains=[value])
|
||||
| Q(exact_location__name__icontains=value)
|
||||
| Q(description__icontains=value)
|
||||
)
|
||||
for f in ["title", "category__name", "exact_location__name", "description"]:
|
||||
params = {
|
||||
f
|
||||
+ "_hl": SearchHeadline(
|
||||
f,
|
||||
search_query,
|
||||
start_sel='<span class="highlight">',
|
||||
stop_sel="</span>",
|
||||
config="french",
|
||||
)
|
||||
}
|
||||
qs = qs.annotate(**params)
|
||||
return qs
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = ["q"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if not kwargs["request"].user.is_authenticated:
|
||||
self.form.fields.pop("status")
|
||||
|
||||
|
||||
class SearchEventFilter(django_filters.FilterSet):
|
||||
tags = django_filters.CharFilter(lookup_expr="icontains")
|
||||
title = django_filters.CharFilter(method="hl_filter_contains")
|
||||
location = django_filters.CharFilter(method="hl_filter_contains")
|
||||
description = django_filters.CharFilter(method="hl_filter_contains")
|
||||
start_day = django_filters.DateFromToRangeFilter(
|
||||
widget=django_filters.widgets.RangeWidget(attrs={"type": "date"})
|
||||
)
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
label="Filtrer par status",
|
||||
choices=Event.STATUS.choices,
|
||||
field_name="status",
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
o = django_filters.OrderingFilter(
|
||||
# tuple-mapping retains order
|
||||
fields=(
|
||||
("title", "title"),
|
||||
("description", "description"),
|
||||
("start_day", "start_day"),
|
||||
),
|
||||
)
|
||||
|
||||
def hl_filter_contains(self, queryset, name, value):
|
||||
# first check if it contains
|
||||
filter_contains = {name + "__contains": value}
|
||||
queryset = queryset.filter(**filter_contains)
|
||||
|
||||
# then hightlight the result
|
||||
search_query = SearchQuery(value, config="french")
|
||||
params = {
|
||||
name
|
||||
+ "_hl": SearchHeadline(
|
||||
name,
|
||||
search_query,
|
||||
start_sel='<span class="highlight">',
|
||||
stop_sel="</span>",
|
||||
config="french",
|
||||
)
|
||||
}
|
||||
return queryset.annotate(**params)
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = ["title", "location", "description", "category", "tags", "start_day"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if not kwargs["request"].user.is_authenticated:
|
||||
self.form.fields.pop("status")
|
||||
|
||||
|
||||
class DuplicatedEventsFilter(django_filters.FilterSet):
|
||||
fixed = django_filters.BooleanFilter(
|
||||
label="Résolu",
|
||||
field_name='representative', method="fixed_qs")
|
||||
|
||||
class Meta:
|
||||
model = DuplicatedEvents
|
||||
fields = []
|
||||
|
||||
|
||||
def fixed_qs(self, queryset, name, value):
|
||||
return DuplicatedEvents.not_fixed_qs(queryset, value)
|
||||
|
||||
|
||||
class RecurrentImportFilter(django_filters.FilterSet):
|
||||
|
||||
name = django_filters.ModelMultipleChoiceFilter(
|
||||
label="Filtrer par nom",
|
||||
field_name="name",
|
||||
queryset=RecurrentImport.objects.all().order_by("name__unaccent")
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = RecurrentImport
|
||||
fields = ["name"]
|
||||
|
@ -16,29 +16,137 @@ from django.forms import (
|
||||
)
|
||||
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
|
||||
|
||||
from .utils import PlaceGuesser
|
||||
from .models import (
|
||||
Event,
|
||||
RecurrentImport,
|
||||
CategorisationRule,
|
||||
ModerationAnswer,
|
||||
ModerationQuestion,
|
||||
Place,
|
||||
Category,
|
||||
Tag,
|
||||
Message
|
||||
)
|
||||
from django.conf import settings
|
||||
from django.core.files import File
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from string import ascii_uppercase as auc
|
||||
from .templatetags.utils_extra import int_to_abc
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.timezone import localtime
|
||||
from django.utils.formats import localize
|
||||
from .templatetags.event_extra import event_field_verbose_name, field_to_html
|
||||
import os
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class GroupFormMixin:
|
||||
|
||||
template_name = 'agenda_culturel/forms/div_group.html'
|
||||
|
||||
class FieldGroup:
|
||||
|
||||
def __init__(self, id, label, display_label=False, maskable=False, default_masked=True):
|
||||
self.id = id
|
||||
self.label = label
|
||||
self.display_label = display_label
|
||||
self.maskable = maskable
|
||||
self.default_masked = default_masked
|
||||
|
||||
def toggle_field_name(self):
|
||||
return 'group_' + self.id
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.groups = []
|
||||
|
||||
def add_group(self, *args, **kwargs):
|
||||
self.groups.append(GroupFormMixin.FieldGroup(*args, **kwargs))
|
||||
if self.groups[-1].maskable:
|
||||
self.fields[self.groups[-1].toggle_field_name()] = BooleanField(required=False)
|
||||
self.fields[self.groups[-1].toggle_field_name()].toggle_group = True
|
||||
|
||||
def get_fields_in_group(self, g):
|
||||
return [f for f in self.visible_fields() if not hasattr(f.field, "toggle_group") and hasattr(f.field, "group_id") and f.field.group_id == g.id]
|
||||
|
||||
def get_no_group_fields(self):
|
||||
return [f for f in self.visible_fields() if not hasattr(f.field, "toggle_group") and (not hasattr(f.field, "group_id") or f.field.group_id == None)]
|
||||
|
||||
def fields_by_group(self):
|
||||
return [(g, self.get_fields_in_group(g)) for g in self.groups] + [(GroupFormMixin.FieldGroup("other", _("Other")), self.get_no_group_fields())]
|
||||
|
||||
def clean(self):
|
||||
result = super().clean()
|
||||
|
||||
if result:
|
||||
data = dict(self.data)
|
||||
# for each masked group, we remove data
|
||||
for g in self.groups:
|
||||
if g.maskable and not g.toggle_field_name() in data:
|
||||
fields = self.get_fields_in_group(g)
|
||||
for f in fields:
|
||||
self.cleaned_data[f.name] = None
|
||||
|
||||
return result
|
||||
|
||||
class TagForm(ModelForm):
|
||||
required_css_class = 'required'
|
||||
|
||||
class Meta:
|
||||
model = Tag
|
||||
fields = ["name", "description", "in_included_suggestions", "in_excluded_suggestions", "principal"]
|
||||
widgets = {
|
||||
"name": HiddenInput()
|
||||
}
|
||||
|
||||
class TagRenameForm(Form):
|
||||
required_css_class = 'required'
|
||||
|
||||
name = CharField(
|
||||
label=_('Name of new tag'),
|
||||
required=True
|
||||
)
|
||||
|
||||
force = BooleanField(
|
||||
label=_('Force renaming despite the existence of events already using the chosen tag.'),
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
force = kwargs.pop("force", False)
|
||||
name = kwargs.pop("name", None)
|
||||
super().__init__(*args, **kwargs)
|
||||
if not (force or (not len(args) == 0 and 'force' in args[0])):
|
||||
del self.fields["force"]
|
||||
if not name is None and self.fields["name"].initial is None:
|
||||
self.fields["name"].initial = name
|
||||
|
||||
|
||||
def is_force(self):
|
||||
return "force" in self.fields and self.cleaned_data["force"] == True
|
||||
|
||||
class URLSubmissionForm(Form):
|
||||
required_css_class = 'required'
|
||||
|
||||
class EventSubmissionForm(Form):
|
||||
url = URLField(max_length=512)
|
||||
category = ModelChoiceField(
|
||||
label=_("Category"),
|
||||
queryset=Category.objects.all().order_by("name"),
|
||||
initial=None,
|
||||
required=False,
|
||||
)
|
||||
tags = MultipleChoiceField(
|
||||
label=_("Tags"),
|
||||
initial=None,
|
||||
choices=[],
|
||||
required=False
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["tags"].choices = Tag.get_tag_groups(all=True)
|
||||
|
||||
|
||||
|
||||
|
||||
class DynamicArrayWidgetURLs(DynamicArrayWidget):
|
||||
@ -50,28 +158,58 @@ class DynamicArrayWidgetTags(DynamicArrayWidget):
|
||||
|
||||
|
||||
class RecurrentImportForm(ModelForm):
|
||||
required_css_class = 'required'
|
||||
|
||||
defaultTags = MultipleChoiceField(
|
||||
label=_("Tags"),
|
||||
initial=None,
|
||||
choices=[],
|
||||
required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = RecurrentImport
|
||||
fields = "__all__"
|
||||
widgets = {
|
||||
"defaultTags": DynamicArrayWidgetTags(),
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["defaultTags"].choices = Tag.get_tag_groups(all=True)
|
||||
|
||||
|
||||
class CategorisationRuleImportForm(ModelForm):
|
||||
required_css_class = 'required'
|
||||
|
||||
class Meta:
|
||||
model = CategorisationRule
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class EventForm(ModelForm):
|
||||
class EventForm(GroupFormMixin, ModelForm):
|
||||
required_css_class = 'required'
|
||||
|
||||
old_local_image = CharField(widget=HiddenInput(), required=False)
|
||||
simple_cloning = CharField(widget=HiddenInput(), required=False)
|
||||
|
||||
tags = MultipleChoiceField(
|
||||
label=_("Tags"),
|
||||
initial=None,
|
||||
choices=[],
|
||||
required=False
|
||||
)
|
||||
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
exclude = [
|
||||
"possibly_duplicated",
|
||||
"imported_date",
|
||||
"modified_date",
|
||||
"moderated_date",
|
||||
"import_sources",
|
||||
"image",
|
||||
"moderated_by_user",
|
||||
"modified_by_user",
|
||||
"created_by_user",
|
||||
"imported_by_user"
|
||||
]
|
||||
widgets = {
|
||||
"start_day": TextInput(
|
||||
@ -90,17 +228,75 @@ class EventForm(ModelForm):
|
||||
),
|
||||
"end_day": TextInput(attrs={"type": "date"}),
|
||||
"end_time": TextInput(attrs={"type": "time"}),
|
||||
"other_versions": HiddenInput(),
|
||||
"uuids": MultipleHiddenInput(),
|
||||
"import_sources": MultipleHiddenInput(),
|
||||
"reference_urls": DynamicArrayWidgetURLs(),
|
||||
"tags": DynamicArrayWidgetTags(),
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
is_authenticated = kwargs.pop("is_authenticated", False)
|
||||
self.cloning = kwargs.pop("is_cloning", False)
|
||||
self.simple_cloning = kwargs.pop("is_simple_cloning", False)
|
||||
super().__init__(*args, **kwargs)
|
||||
if not is_authenticated:
|
||||
del self.fields["status"]
|
||||
del self.fields["organisers"]
|
||||
self.fields['category'].queryset = self.fields['category'].queryset.order_by('name')
|
||||
self.fields['category'].empty_label = None
|
||||
self.fields['category'].initial = Category.get_default_category()
|
||||
self.fields['tags'].choices = Tag.get_tag_groups(all=True)
|
||||
|
||||
# set groups
|
||||
self.add_group('main', _('Main fields'))
|
||||
self.fields['title'].group_id = 'main'
|
||||
|
||||
self.add_group('start', _('Start of event'))
|
||||
self.fields['start_day'].group_id = 'start'
|
||||
self.fields['start_time'].group_id = 'start'
|
||||
|
||||
self.add_group('end', _('End of event'))
|
||||
self.fields['end_day'].group_id = 'end'
|
||||
self.fields['end_time'].group_id = 'end'
|
||||
|
||||
self.add_group('recurrences',
|
||||
_('This is a recurring event'),
|
||||
maskable=True,
|
||||
default_masked=not (self.instance and
|
||||
self.instance.recurrences and
|
||||
self.instance.recurrences.rrules and
|
||||
len(self.instance.recurrences.rrules) > 0))
|
||||
|
||||
self.fields['recurrences'].group_id = 'recurrences'
|
||||
|
||||
self.add_group('details', _('Details'))
|
||||
self.fields['description'].group_id = 'details'
|
||||
if is_authenticated:
|
||||
self.fields['organisers'].group_id = 'details'
|
||||
|
||||
self.add_group('location', _('Location'))
|
||||
self.fields['location'].group_id = 'location'
|
||||
self.fields['exact_location'].group_id = 'location'
|
||||
|
||||
self.add_group('illustration', _('Illustration'))
|
||||
self.fields['local_image'].group_id = 'illustration'
|
||||
self.fields['image_alt'].group_id = 'illustration'
|
||||
|
||||
|
||||
if is_authenticated:
|
||||
self.add_group('meta-admin', _('Meta information'))
|
||||
self.fields['category'].group_id = 'meta-admin'
|
||||
self.fields['tags'].group_id = 'meta-admin'
|
||||
self.fields['status'].group_id = 'meta-admin'
|
||||
else:
|
||||
self.add_group('meta', _('Meta information'))
|
||||
self.fields['category'].group_id = 'meta'
|
||||
self.fields['tags'].group_id = 'meta'
|
||||
|
||||
def is_clone_from_url(self):
|
||||
return self.cloning
|
||||
|
||||
def is_simple_clone_from_url(self):
|
||||
return self.simple_cloning
|
||||
|
||||
def clean_end_day(self):
|
||||
start_day = self.cleaned_data.get("start_day")
|
||||
@ -128,8 +324,78 @@ class EventForm(ModelForm):
|
||||
|
||||
return end_time
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# when cloning an existing event, we need to copy the local image
|
||||
if self.cleaned_data['local_image'] is None and \
|
||||
not self.cleaned_data['old_local_image'] is None and \
|
||||
self.cleaned_data['old_local_image'] != "":
|
||||
basename = self.cleaned_data['old_local_image']
|
||||
old = settings.MEDIA_ROOT + "/" + basename
|
||||
if os.path.isfile(old):
|
||||
self.cleaned_data['local_image'] = File(name=basename, file=open(old, "rb"))
|
||||
|
||||
|
||||
class MultipleChoiceFieldAcceptAll(MultipleChoiceField):
|
||||
def validate(self, value):
|
||||
pass
|
||||
|
||||
|
||||
class EventModerateForm(ModelForm):
|
||||
required_css_class = 'required'
|
||||
|
||||
tags = MultipleChoiceField(
|
||||
label=_("Tags"),
|
||||
help_text=_('Select tags from existing ones.'),
|
||||
required=False
|
||||
)
|
||||
|
||||
new_tags = MultipleChoiceFieldAcceptAll(
|
||||
label=_("New tags"),
|
||||
help_text=_('Create new labels (sparingly). Note: by starting your tag with the characters “TW:”, you''ll create a “trigger warning” tag, and the associated events will be announced as such.'),
|
||||
widget=DynamicArrayWidget(),
|
||||
required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = [
|
||||
"status",
|
||||
"category",
|
||||
"organisers",
|
||||
"exact_location",
|
||||
"tags"
|
||||
]
|
||||
widgets = {
|
||||
"status": RadioSelect
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['category'].queryset = self.fields['category'].queryset.order_by('name')
|
||||
self.fields['category'].empty_label = None
|
||||
self.fields['category'].initial = Category.get_default_category()
|
||||
self.fields['tags'].choices = Tag.get_tag_groups(all=True)
|
||||
|
||||
def clean_new_tags(self):
|
||||
return list(set(self.cleaned_data.get("new_tags")))
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
if self.cleaned_data['tags'] is None:
|
||||
self.cleaned_data['tags'] = []
|
||||
|
||||
if not self.cleaned_data.get('new_tags') is None:
|
||||
self.cleaned_data['tags'] += self.cleaned_data.get('new_tags')
|
||||
|
||||
self.cleaned_data['tags'] = list(set(self.cleaned_data['tags']))
|
||||
|
||||
|
||||
class BatchImportationForm(Form):
|
||||
required_css_class = 'required'
|
||||
|
||||
json = CharField(
|
||||
label="JSON",
|
||||
widget=Textarea(attrs={"rows": "10"}),
|
||||
@ -139,54 +405,64 @@ class BatchImportationForm(Form):
|
||||
|
||||
|
||||
class FixDuplicates(Form):
|
||||
required_css_class = 'required'
|
||||
|
||||
action = ChoiceField()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
nb_events = kwargs.pop("nb_events", None)
|
||||
edup = kwargs.pop("edup", None)
|
||||
events = edup.get_duplicated()
|
||||
nb_events = len(events)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if nb_events == 2:
|
||||
choices = [("NotDuplicates", "Ces événements sont différents")]
|
||||
choices += [
|
||||
(
|
||||
"SelectA",
|
||||
"Ces événements sont identiques, on garde A et on met B à la corbeille",
|
||||
)
|
||||
]
|
||||
choices += [
|
||||
(
|
||||
"SelectB",
|
||||
"Ces événements sont identiques, on garde B et on met A à la corbeille",
|
||||
)
|
||||
]
|
||||
choices += [
|
||||
("Merge", "Ces événements sont identiques, on fusionne à la main")
|
||||
]
|
||||
else:
|
||||
choices = [("NotDuplicates", "Ces événements sont tous différents")]
|
||||
for i in auc[0:nb_events]:
|
||||
choices = []
|
||||
initial = None
|
||||
for i, e in enumerate(events):
|
||||
if e.status != Event.STATUS.TRASH or e.modified():
|
||||
msg = ""
|
||||
if e.local_version():
|
||||
msg = _(" (locally modified version)")
|
||||
if e.status != Event.STATUS.TRASH:
|
||||
initial = "Select-" + str(e.pk)
|
||||
if e.pure_import():
|
||||
msg = _(" (synchronized on import version)")
|
||||
choices += [
|
||||
(
|
||||
"Remove" + i,
|
||||
"L'événement "
|
||||
+ i
|
||||
+ " n'est pas identique aux autres, on le rend indépendant",
|
||||
"Select-" + str(e.pk),
|
||||
_("Select {} as representative version.").format(auc[i] + msg)
|
||||
)
|
||||
]
|
||||
for i in auc[0:nb_events]:
|
||||
|
||||
for i, e in enumerate(events):
|
||||
if e.status != Event.STATUS.TRASH and e.local_version():
|
||||
choices += [
|
||||
(
|
||||
"Select" + i,
|
||||
"Ces événements sont identiques, on garde "
|
||||
+ i
|
||||
+ " et on met les autres à la corbeille",
|
||||
"Update-" + str(e.pk),
|
||||
_("Update {} using some fields from other versions (interactive mode).").format(auc[i])
|
||||
)
|
||||
]
|
||||
choices += [
|
||||
("Merge", "Ces événements sont identiques, on fusionne à la main")
|
||||
]
|
||||
|
||||
|
||||
extra = ""
|
||||
if edup.has_local_version():
|
||||
extra = _(" Warning: a version is already locally modified.")
|
||||
|
||||
if initial is None:
|
||||
initial = "Merge"
|
||||
choices += [
|
||||
("Merge", _("Create a new version by merging (interactive mode).") + extra)
|
||||
]
|
||||
for i, e in enumerate(events):
|
||||
if e.status != Event.STATUS.TRASH:
|
||||
choices += [
|
||||
(
|
||||
"Remove-" + str(e.pk),
|
||||
_("Make {} independent.").format(auc[i]))
|
||||
]
|
||||
choices += [("NotDuplicates", _("Make all versions independent."))]
|
||||
|
||||
self.fields["action"].choices = choices
|
||||
self.fields["action"].initial = initial
|
||||
|
||||
def is_action_no_duplicates(self):
|
||||
return self.cleaned_data["action"] == "NotDuplicates"
|
||||
@ -194,50 +470,62 @@ class FixDuplicates(Form):
|
||||
def is_action_select(self):
|
||||
return self.cleaned_data["action"].startswith("Select")
|
||||
|
||||
def is_action_update(self):
|
||||
return self.cleaned_data["action"].startswith("Update")
|
||||
|
||||
def is_action_remove(self):
|
||||
return self.cleaned_data["action"].startswith("Remove")
|
||||
|
||||
def get_selected_event_code(self):
|
||||
if self.is_action_select() or self.is_action_remove():
|
||||
return self.cleaned_data["action"][-1]
|
||||
if self.is_action_select() or self.is_action_remove() or self.is_action_update():
|
||||
return int(self.cleaned_data["action"].split("-")[-1])
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_selected_event_id(self):
|
||||
selected = self.get_selected_event_code()
|
||||
if selected is None:
|
||||
return None
|
||||
else:
|
||||
return auc.rfind(selected)
|
||||
|
||||
def get_selected_event(self, edup):
|
||||
selected = self.get_selected_event_id()
|
||||
return edup.get_duplicated()[selected]
|
||||
selected = self.get_selected_event_code()
|
||||
for e in edup.get_duplicated():
|
||||
if e.pk == selected:
|
||||
return e
|
||||
return None
|
||||
|
||||
|
||||
class SelectEventInList(Form):
|
||||
event = ChoiceField()
|
||||
required_css_class = 'required'
|
||||
|
||||
event = ChoiceField(label=_('Event'))
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
events = kwargs.pop("events", None)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.fields["event"].choices = [
|
||||
(e.pk, str(e.start_day) + " " + e.title + ", " + e.location) for e in events
|
||||
(e.pk, str(e.start_day) + " " + e.title + ((", " + e.location) if e.location else "")) for e in events
|
||||
]
|
||||
|
||||
|
||||
class MergeDuplicates(Form):
|
||||
checkboxes_fields = ["reference_urls", "description"]
|
||||
required_css_class = 'required'
|
||||
|
||||
checkboxes_fields = ["reference_urls", "description", "tags"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.duplicates = kwargs.pop("duplicates", None)
|
||||
nb_events = self.duplicates.nb_duplicated()
|
||||
self.event = kwargs.pop("event", None)
|
||||
self.events = list(self.duplicates.get_duplicated())
|
||||
nb_events = len(self.events)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
choices = [
|
||||
("event" + i, "Valeur de l'évenement " + i) for i in auc[0:nb_events]
|
||||
]
|
||||
|
||||
if self.event:
|
||||
choices = [("event_" + str(self.event.pk), _("Value of the selected version"))] + \
|
||||
[
|
||||
("event_" + str(e.pk), _("Value of version {}").format(e.pk)) for e in self.events if e != self.event
|
||||
]
|
||||
else:
|
||||
choices = [
|
||||
("event_" + str(e.pk), _("Value of version {}").format(e.pk)) for e in self.events
|
||||
]
|
||||
|
||||
for f in self.duplicates.get_items_comparison():
|
||||
if not f["similar"]:
|
||||
@ -252,7 +540,7 @@ class MergeDuplicates(Form):
|
||||
|
||||
def as_grid(self):
|
||||
result = '<div class="grid">'
|
||||
for i, e in enumerate(self.duplicates.get_duplicated()):
|
||||
for i, e in enumerate(self.events):
|
||||
result += '<div class="grid entete-badge">'
|
||||
result += '<div class="badge-large">' + int_to_abc(i) + "</div>"
|
||||
result += "<ul>"
|
||||
@ -260,17 +548,17 @@ class MergeDuplicates(Form):
|
||||
'<li><a href="' + e.get_absolute_url() + '">' + e.title + "</a></li>"
|
||||
)
|
||||
result += (
|
||||
"<li>Création : " + localize(localtime(e.created_date)) + "</li>"
|
||||
"<li>Création : " + localize(e.created_date) + "</li>"
|
||||
)
|
||||
result += (
|
||||
"<li>Dernière modification : "
|
||||
+ localize(localtime(e.modified_date))
|
||||
+ localize(e.modified_date)
|
||||
+ "</li>"
|
||||
)
|
||||
if e.imported_date:
|
||||
result += (
|
||||
"<li>Dernière importation : "
|
||||
+ localize(localtime(e.imported_date))
|
||||
+ localize(e.imported_date)
|
||||
+ "</li>"
|
||||
)
|
||||
result += "</ul>"
|
||||
@ -288,100 +576,93 @@ class MergeDuplicates(Form):
|
||||
)
|
||||
else:
|
||||
result += "<fieldset>"
|
||||
if key in self.errors:
|
||||
result += '<div class="message error"><ul>'
|
||||
for err in self.errors[key]:
|
||||
result += "<li>" + err + "</li>"
|
||||
result += "</ul></div>"
|
||||
result += '<div class="grid comparison-item">'
|
||||
if hasattr(self, "cleaned_data"):
|
||||
checked = self.cleaned_data.get(key)
|
||||
else:
|
||||
checked = self.fields[key].initial
|
||||
|
||||
for i, (v, radio) in enumerate(
|
||||
zip(e["values"], self.fields[e["key"]].choices)
|
||||
):
|
||||
result += '<div class="duplicated">'
|
||||
id = "id_" + key + "_" + str(i)
|
||||
value = "event" + auc[i]
|
||||
|
||||
result += '<input id="' + id + '" name="' + key + '"'
|
||||
if key in MergeDuplicates.checkboxes_fields:
|
||||
result += ' type="checkbox"'
|
||||
if value in checked:
|
||||
result += " checked"
|
||||
else:
|
||||
result += ' type="radio"'
|
||||
if checked == value:
|
||||
result += " checked"
|
||||
result += ' value="' + value + '"'
|
||||
result += ">"
|
||||
result += (
|
||||
'<div class="badge-small">'
|
||||
+ int_to_abc(i)
|
||||
+ "</div>"
|
||||
+ str(field_to_html(v, e["key"]))
|
||||
+ "</div>"
|
||||
)
|
||||
i = 0
|
||||
if self.event:
|
||||
idx = self.events.index(self.event)
|
||||
result += self.comparison_item(key, i, e["values"][idx], self.fields[e["key"]].choices[idx], self.event, checked)
|
||||
i += 1
|
||||
|
||||
for (v, radio, ev) in zip(e["values"], self.fields[e["key"]].choices, self.events):
|
||||
if self.event is None or ev != self.event:
|
||||
result += self.comparison_item(key, i, v, radio, ev, checked)
|
||||
i += 1
|
||||
result += "</div></fieldset>"
|
||||
|
||||
return mark_safe(result)
|
||||
|
||||
def get_selected_events_id(self, key):
|
||||
def comparison_item(self, key, i, v, radio, ev, checked):
|
||||
result = '<div class="duplicated">'
|
||||
id = "id_" + key + "_" + str(ev.pk)
|
||||
value = "event_" + str(ev.pk)
|
||||
|
||||
result += '<input id="' + id + '" name="' + key + '"'
|
||||
if key in MergeDuplicates.checkboxes_fields:
|
||||
result += ' type="checkbox"'
|
||||
if checked and value in checked:
|
||||
result += " checked"
|
||||
else:
|
||||
result += ' type="radio"'
|
||||
if checked == value:
|
||||
result += " checked"
|
||||
result += ' value="' + value + '"'
|
||||
result += ">"
|
||||
result += (
|
||||
'<div class="badge-small">'
|
||||
+ int_to_abc(i)
|
||||
+ "</div>")
|
||||
result += "<div>"
|
||||
if key == "image":
|
||||
result += str(field_to_html(ev.local_image, "local_image")) + "</div>"
|
||||
result += "<div>Lien d'import : "
|
||||
|
||||
result += (str(field_to_html(v, key)) + "</div>")
|
||||
result += "</div>"
|
||||
return result
|
||||
|
||||
|
||||
def get_selected_events(self, key):
|
||||
value = self.cleaned_data.get(key)
|
||||
if key not in self.fields:
|
||||
return None
|
||||
else:
|
||||
if isinstance(value, list):
|
||||
return [auc.rfind(v[-1]) for v in value]
|
||||
selected = [int(v.split("_")[-1]) for v in value]
|
||||
result = []
|
||||
for s in selected:
|
||||
for e in self.duplicates.get_duplicated():
|
||||
if e.pk == s:
|
||||
result.append(e)
|
||||
break
|
||||
return result
|
||||
else:
|
||||
return auc.rfind(value[-1])
|
||||
selected = int(value.split("_")[-1])
|
||||
for e in self.duplicates.get_duplicated():
|
||||
if e.pk == selected:
|
||||
return e
|
||||
|
||||
|
||||
class ModerationQuestionForm(ModelForm):
|
||||
class Meta:
|
||||
model = ModerationQuestion
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ModerationAnswerForm(ModelForm):
|
||||
class Meta:
|
||||
model = ModerationAnswer
|
||||
exclude = ["question"]
|
||||
widgets = {
|
||||
"adds_tags": DynamicArrayWidgetTags(),
|
||||
"removes_tags": DynamicArrayWidgetTags(),
|
||||
}
|
||||
|
||||
|
||||
class ModerateForm(ModelForm):
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
mqs = ModerationQuestion.objects.all()
|
||||
mas = ModerationAnswer.objects.all()
|
||||
|
||||
for q in mqs:
|
||||
self.fields[q.complete_id()] = ChoiceField(
|
||||
widget=RadioSelect,
|
||||
label=q.question,
|
||||
choices=[(a.pk, a.html_description()) for a in mas if a.question == q],
|
||||
required=True,
|
||||
)
|
||||
for a in mas:
|
||||
if a.question == q and a.valid_event(self.instance):
|
||||
self.fields[q.complete_id()].initial = a.pk
|
||||
break
|
||||
return None
|
||||
|
||||
|
||||
class CategorisationForm(Form):
|
||||
required_css_class = 'required'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "events" in kwargs:
|
||||
events = kwargs.pop("events", None)
|
||||
else:
|
||||
events = []
|
||||
for f in args[0]:
|
||||
logger.warning("fff: " + f)
|
||||
if "_" not in f:
|
||||
if f + "_cat" in args[0]:
|
||||
events.append(
|
||||
@ -406,6 +687,8 @@ class CategorisationForm(Form):
|
||||
|
||||
|
||||
class EventAddPlaceForm(Form):
|
||||
required_css_class = 'required'
|
||||
|
||||
place = ModelChoiceField(
|
||||
label=_("Place"),
|
||||
queryset=Place.objects.all().order_by("name"),
|
||||
@ -431,15 +714,20 @@ class EventAddPlaceForm(Form):
|
||||
if self.cleaned_data.get("place"):
|
||||
place = self.cleaned_data.get("place")
|
||||
self.instance.exact_location = place
|
||||
self.instance.save()
|
||||
self.instance.save(update_fields=["exact_location"])
|
||||
if self.cleaned_data.get("add_alias"):
|
||||
place.aliases.append(self.instance.location)
|
||||
if place.aliases:
|
||||
place.aliases.append(self.instance.location.strip())
|
||||
else:
|
||||
place.aliases = [self.instance.location.strip()]
|
||||
place.save()
|
||||
|
||||
return self.instance
|
||||
|
||||
|
||||
class PlaceForm(ModelForm):
|
||||
class PlaceForm(GroupFormMixin, ModelForm):
|
||||
required_css_class = 'required'
|
||||
|
||||
apply_to_all = BooleanField(
|
||||
initial=True,
|
||||
label=_(
|
||||
@ -453,13 +741,70 @@ class PlaceForm(ModelForm):
|
||||
fields = "__all__"
|
||||
widgets = {"location": TextInput()}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.add_group('header', _('Header'))
|
||||
self.fields['name'].group_id = 'header'
|
||||
|
||||
|
||||
self.add_group('address', _('Address'))
|
||||
self.fields['address'].group_id = 'address'
|
||||
self.fields['postcode'].group_id = 'address'
|
||||
self.fields['city'].group_id = 'address'
|
||||
self.fields['location'].group_id = 'address'
|
||||
|
||||
self.add_group('meta', _('Meta'))
|
||||
self.fields['aliases'].group_id = 'meta'
|
||||
|
||||
self.add_group('information', _('Information'))
|
||||
self.fields['description'].group_id = 'information'
|
||||
|
||||
def as_grid(self):
|
||||
return mark_safe(
|
||||
'<div class="grid"><div>'
|
||||
result = ('<div class="grid"><div>'
|
||||
+ super().as_p()
|
||||
+ '</div><div><div class="map-widget">'
|
||||
+ '<div id="map_location" style="width: 100%; aspect-ratio: 16/9"></div><p>Cliquez pour ajuster la position GPS</p></div></div></div>'
|
||||
)
|
||||
+ '''</div><div><div class="map-widget">
|
||||
<div id="map_location" style="width: 100%; aspect-ratio: 16/9"></div>
|
||||
<p>Cliquez pour ajuster la position GPS</p></div>
|
||||
<input type="checkbox" role="switch" id="lock_position">Verrouiller la position</lock>
|
||||
<script>
|
||||
document.getElementById("lock_position").onclick = function() {
|
||||
const field = document.getElementById("id_location");
|
||||
if (this.checked)
|
||||
field.setAttribute("readonly", true);
|
||||
else
|
||||
field.removeAttribute("readonly");
|
||||
}
|
||||
</script>
|
||||
</div></div>''')
|
||||
|
||||
return mark_safe(result)
|
||||
|
||||
def apply(self):
|
||||
return self.cleaned_data.get("apply_to_all")
|
||||
|
||||
class MessageForm(ModelForm):
|
||||
|
||||
class Meta:
|
||||
model = Message
|
||||
fields = ["subject", "name", "email", "message", "related_event"]
|
||||
widgets = {"related_event": HiddenInput(), "user": HiddenInput() }
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.event = kwargs.pop("event", False)
|
||||
self.internal = kwargs.pop("internal", False)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['related_event'].required = False
|
||||
if self.internal:
|
||||
self.fields.pop("name")
|
||||
self.fields.pop("email")
|
||||
|
||||
class MessageEventForm(ModelForm):
|
||||
|
||||
class Meta:
|
||||
model = Message
|
||||
fields = ["message"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["message"].label = _("Add a comment")
|
113
src/agenda_culturel/import_tasks/custom_extractors/arachnee.py
Normal file
113
src/agenda_culturel/import_tasks/custom_extractors/arachnee.py
Normal file
@ -0,0 +1,113 @@
|
||||
from ..generic_extractors import *
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# A class dedicated to get events from Arachnée Concert
|
||||
# URL: https://www.arachnee-concerts.com/agenda-des-concerts/
|
||||
class CExtractor(TwoStepsExtractorNoPause):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.possible_dates = {}
|
||||
self.theater = None
|
||||
|
||||
def extract(
|
||||
self,
|
||||
content,
|
||||
url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
only_future=True,
|
||||
ignore_404=True
|
||||
):
|
||||
match = re.match(r".*\&theatres=([^&]*)&.*", url)
|
||||
if match:
|
||||
self.theater = match[1]
|
||||
|
||||
return super().extract(content, url, url_human, default_values, published, only_future, ignore_404)
|
||||
|
||||
def build_event_url_list(self, content, infuture_days=180):
|
||||
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
containers = soup.select("ul.event_container>li")
|
||||
if containers:
|
||||
for c in containers:
|
||||
d = Extractor.parse_french_date(c.select_one(".date").text)
|
||||
l = c.select_one(".event_auditory").text
|
||||
if (self.theater is None or (l.startswith(self.theater))) and d < datetime.date.today() + timedelta(days=infuture_days):
|
||||
t = Extractor.parse_french_time(c.select_one(".time").text)
|
||||
e_url = c.select_one(".info a")["href"]
|
||||
if not e_url in self.possible_dates:
|
||||
self.possible_dates[e_url] = []
|
||||
self.possible_dates[e_url].append((str(d) + " " + str(t)))
|
||||
self.add_event_url(e_url)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
title = ", ".join([x.text for x in [soup.select_one(y) for y in [".page_title", ".artiste-subtitle"]] if x])
|
||||
|
||||
image = soup.select_one(".entry-image .image_wrapper img")
|
||||
if not image is None:
|
||||
image = image["src"]
|
||||
|
||||
descs = soup.select(".entry-content p")
|
||||
if descs:
|
||||
description = "\n".join([d.text for d in descs])
|
||||
else:
|
||||
description = None
|
||||
|
||||
category = soup.select_one(".event_category").text
|
||||
first_cat = Extractor.remove_accents(category.split(",")[0].lower())
|
||||
tags = []
|
||||
if first_cat in ["grand spectacle"]:
|
||||
category = "Spectacles"
|
||||
tags.append("💃 danse")
|
||||
elif first_cat in ["theatre", "humour / one man show"]:
|
||||
category = "Spectacles"
|
||||
tags.append("🎭 théâtre")
|
||||
elif first_cat in ["chanson francaise", "musique du monde", "pop / rock", "rap", "rnb", "raggae", "variete"]:
|
||||
category = "Fêtes & Concerts"
|
||||
tags.append("🎵 concert")
|
||||
elif first_cat in ["comedie musicale", "humour / one man show", "spectacle equestre"]:
|
||||
category = "Spectacles"
|
||||
elif first_cat in ["spectacle pour enfant"]:
|
||||
tags = ["🎈 jeune public"]
|
||||
category = None
|
||||
else:
|
||||
category = None
|
||||
|
||||
dates = soup.select("#event_ticket_content>ul>li")
|
||||
for d in dates:
|
||||
dt = datetime.datetime.fromisoformat(d.select_one(".date")["content"])
|
||||
date = dt.date()
|
||||
time = dt.time()
|
||||
if str(date) + " " + str(time) in self.possible_dates[event_url]:
|
||||
location = d.select_one(".event_auditory").text
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
date,
|
||||
location,
|
||||
description,
|
||||
tags,
|
||||
recurrences=None,
|
||||
uuids=[event_url + "?d=" + str(date) + "&t=" + str(time)],
|
||||
url_human=event_url,
|
||||
start_time=time,
|
||||
end_day=None,
|
||||
end_time=None,
|
||||
published=published,
|
||||
image=image,
|
||||
)
|
136
src/agenda_culturel/import_tasks/custom_extractors/c3c.py
Normal file
136
src/agenda_culturel/import_tasks/custom_extractors/c3c.py
Normal file
@ -0,0 +1,136 @@
|
||||
from ..generic_extractors import *
|
||||
from bs4 import BeautifulSoup
|
||||
from datetime import timedelta
|
||||
|
||||
# A class dedicated to get events from La Cour des 3 Coquins
|
||||
# URL: https://billetterie-c3c.clermont-ferrand.fr//
|
||||
class CExtractor(TwoStepsExtractor):
|
||||
nom_lieu = "La Cour des 3 Coquins"
|
||||
|
||||
def category_c3c2agenda(self, category):
|
||||
if not category:
|
||||
return None
|
||||
mapping = {"Théâtre": "Spectacles", "Concert": "Fêtes & Concerts", "Projection": "Cinéma"}
|
||||
mapping_tag = {"Théâtre": "🎭 théâtre", "Concert": "🎵 concert", "Projection": None}
|
||||
if category in mapping:
|
||||
return mapping[category], mapping_tag[category]
|
||||
else:
|
||||
return None, None
|
||||
|
||||
def build_event_url_list(self, content):
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
events = soup.select("div.fiche-info")
|
||||
|
||||
for e in events:
|
||||
e_url = e.select_one("a.btn.lien_savoir_plus")["href"]
|
||||
if e_url != "":
|
||||
e_url = self.url + "/" + e_url
|
||||
self.add_event_url(e_url)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
|
||||
title = soup.select_one("h1")
|
||||
if title:
|
||||
title = title.text
|
||||
|
||||
image = soup.select_one("#media .swiper-slide img")
|
||||
if image:
|
||||
image = image["src"]
|
||||
else:
|
||||
image = None
|
||||
|
||||
description = soup.select_one(".presentation").get_text()
|
||||
duration = soup.select_one("#criteres .DUREE-V .valeur-critere li")
|
||||
if not duration is None:
|
||||
duration = Extractor.parse_french_time(duration.text)
|
||||
|
||||
location = self.nom_lieu
|
||||
categories = []
|
||||
tags = []
|
||||
for t in soup.select(".sous-titre span"):
|
||||
classes = t.get("class")
|
||||
if classes and len(classes) > 0:
|
||||
if classes[0].startswith("LIEU-"):
|
||||
location = t.text
|
||||
elif classes[0].startswith("THEMATIQUE-"):
|
||||
cat, tag = self.category_c3c2agenda(t.text)
|
||||
if cat:
|
||||
categories.append(cat)
|
||||
if tag:
|
||||
tags.append(tag)
|
||||
|
||||
# TODO: parser les dates, récupérer les heures ()
|
||||
dates = [o.get("value") for o in soup.select("select.datedleb_resa option")]
|
||||
|
||||
patternCodeSite = re.compile(r'.*gsw_vars\["CODEPRESTATAIRE"\] = "(.*?)";.*', flags=re.DOTALL)
|
||||
patternCodeObject = re.compile(r'.*gsw_vars\["CODEPRESTATION"\] = "(.*?)";.*', flags=re.DOTALL)
|
||||
scripts = soup.find_all('script')
|
||||
codeSite = ""
|
||||
idObject = ""
|
||||
for script in scripts:
|
||||
if(patternCodeSite.match(str(script.string))):
|
||||
data = patternCodeSite.match(script.string)
|
||||
codeSite = data.groups()[0]
|
||||
if(patternCodeObject.match(str(script.string))):
|
||||
data = patternCodeObject.match(script.string)
|
||||
idObject = data.groups()[0]
|
||||
|
||||
|
||||
pause = self.downloader.pause
|
||||
self.downloader.pause = False
|
||||
# get exact schedule need two supplementary requests
|
||||
datetimes = []
|
||||
if codeSite != "" and idObject != "":
|
||||
for date in dates:
|
||||
# the first page is required such that the server knows the selected date
|
||||
page1 = self.downloader.get_content("https://billetterie-c3c.clermont-ferrand.fr/booking?action=searchAjax&cid=2&afficheDirectDispo=" + date + "&type_prestataire=V&cle_fiche=PRESTATION-V-" + codeSite + "-" + idObject + "&datedeb=" + date)
|
||||
# then we get the form with hours
|
||||
page2 = self.downloader.get_content("https://billetterie-c3c.clermont-ferrand.fr/booking?action=detailTarifsPrestationAjax&prestation=V-" + codeSite + "-" + idObject)
|
||||
soup2 = BeautifulSoup(page2, "html.parser")
|
||||
times = [o.text for o in soup2.select("#quart_en_cours_spec option")]
|
||||
for t in times:
|
||||
startdate = Extractor.parse_french_date(date)
|
||||
starttime = Extractor.parse_french_time(t)
|
||||
start = datetime.datetime.combine(startdate, starttime)
|
||||
enddate = None
|
||||
endtime = None
|
||||
if duration is not None:
|
||||
end = start + timedelta(hours=duration.hour, minutes=duration.minute, seconds=duration.second)
|
||||
enddate = end.date()
|
||||
endtime = end.time()
|
||||
datetimes.append((startdate, starttime, enddate, endtime))
|
||||
self.downloader.pause = pause
|
||||
|
||||
category = None
|
||||
if len(categories) > 0:
|
||||
category = categories[0]
|
||||
|
||||
for dt in datetimes:
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
dt[0],
|
||||
location,
|
||||
description,
|
||||
tags,
|
||||
recurrences=None,
|
||||
uuids=[event_url],
|
||||
url_human=url_human,
|
||||
start_time=dt[1],
|
||||
end_day=dt[2],
|
||||
end_time=dt[3],
|
||||
published=published,
|
||||
image=image,
|
||||
)
|
@ -0,0 +1,71 @@
|
||||
from ..generic_extractors import *
|
||||
from ..extractor_facebook import FacebookEvent
|
||||
import json5
|
||||
from bs4 import BeautifulSoup
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# A class dedicated to get events from a facebook events page
|
||||
# such as https://www.facebook.com/laJeteeClermont/events
|
||||
class CExtractor(TwoStepsExtractor):
|
||||
|
||||
|
||||
def build_event_url_list(self, content):
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
debug = False
|
||||
|
||||
found = False
|
||||
links = soup.find_all("a")
|
||||
for link in links:
|
||||
if link.get("href").startswith('https://www.facebook.com/events/'):
|
||||
self.add_event_url(link.get('href').split('?')[0])
|
||||
found = True
|
||||
|
||||
if not found and debug:
|
||||
directory = "errors/"
|
||||
if not os.path.exists(directory):
|
||||
os.makedirs(directory)
|
||||
now = datetime.now()
|
||||
filename = directory + now.strftime("%Y%m%d_%H%M%S") + ".html"
|
||||
logger.warning("cannot find any event link in events page. Save content page in " + filename)
|
||||
with open(filename, "w") as text_file:
|
||||
text_file.write("<!-- " + self.url + " -->\n\n")
|
||||
text_file.write(content)
|
||||
|
||||
|
||||
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
|
||||
fevent = None
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
for json_script in soup.find_all("script", type="application/json"):
|
||||
json_txt = json_script.get_text()
|
||||
json_struct = json.loads(json_txt)
|
||||
fevent = FacebookEvent.find_event_fragment_in_array(
|
||||
json_struct, fevent
|
||||
)
|
||||
|
||||
if fevent is not None:
|
||||
for event in fevent.build_events(event_url):
|
||||
event["published"] = published
|
||||
|
||||
self.add_event(default_values, **event)
|
||||
else:
|
||||
logger.warning("cannot find any event in page")
|
||||
|
||||
|
@ -1,25 +1,39 @@
|
||||
from ..generic_extractors import *
|
||||
import json5
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# A class dedicated to get events from La Coopérative de Mai:
|
||||
# URL: https://lacomediedeclermont.com/saison23-24/wp-admin/admin-ajax.php?action=load_dates_existantes
|
||||
# URL pour les humains: https://lacomediedeclermont.com/saison23-24/
|
||||
# URL pour les humains: https://lacomediedeclermont.com/saison24-25/
|
||||
class CExtractor(TwoStepsExtractor):
|
||||
nom_lieu = "La Comédie de Clermont"
|
||||
url_referer = "https://lacomediedeclermont.com/saison24-25/"
|
||||
|
||||
def is_to_import_from_url(self, url):
|
||||
if any(keyword in url for keyword in ["podcast", "on-debriefe", "popcorn", "rencontreautour","rencontre-autour"]):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def category_comedie2agenda(self, category):
|
||||
mapping = {
|
||||
"Théâtre": "Théâtre",
|
||||
"Danse": "Danse",
|
||||
"Rencontre": "Autre",
|
||||
"Sortie de résidence": "Autre",
|
||||
"PopCorn Live": "Autre",
|
||||
"Théâtre": "Spectacles",
|
||||
"Danse": "Spectacles",
|
||||
"Rencontre": "Rencontres & Débats",
|
||||
"Sortie de résidence": "Sans catégorie",
|
||||
"PopCorn Live": "Sans catégorie",
|
||||
}
|
||||
mapping_tag = {
|
||||
"Théâtre": "🎭 théâtre",
|
||||
"Danse": "💃 danse",
|
||||
"Rencontre": None,
|
||||
"Sortie de résidence": "sortie de résidence",
|
||||
"PopCorn Live": None,
|
||||
}
|
||||
if category in mapping:
|
||||
return mapping[category]
|
||||
return mapping[category], mapping_tag[category]
|
||||
else:
|
||||
return None
|
||||
return None, None
|
||||
|
||||
def build_event_url_list(self, content):
|
||||
dates = json5.loads(content)["data"][0]
|
||||
@ -28,7 +42,9 @@ class CExtractor(TwoStepsExtractor):
|
||||
for d in list(set(dates)):
|
||||
if not self.only_future or self.now <= datetime.date.fromisoformat(d):
|
||||
events = self.downloader.get_content(
|
||||
url, post={"action": "load_evenements_jour", "jour": d}
|
||||
url,
|
||||
post={"action": "load_evenements_jour", "jour": d},
|
||||
referer="https://lacomediedeclermont.com/saison24-25/"
|
||||
)
|
||||
if events:
|
||||
events = json5.loads(events)
|
||||
@ -40,29 +56,35 @@ class CExtractor(TwoStepsExtractor):
|
||||
e_url = (
|
||||
e.select("a")[0]["href"] + "#" + d
|
||||
) # a "fake" url specific for each day of this show
|
||||
self.add_event_url(e_url)
|
||||
self.add_event_start_day(e_url, d)
|
||||
t = (
|
||||
str(e.select("div#datecal")[0])
|
||||
.split(" ")[-1]
|
||||
.split("<")[0]
|
||||
)
|
||||
self.add_event_start_time(e_url, t)
|
||||
title = e.select("a")[0].contents[0]
|
||||
self.add_event_title(e_url, title)
|
||||
category = e.select("div#lieuevtcal span")
|
||||
if len(category) > 0:
|
||||
category = self.category_comedie2agenda(
|
||||
category[-1].contents[0]
|
||||
|
||||
if self.is_to_import_from_url(e_url):
|
||||
self.add_event_url(e_url)
|
||||
self.add_event_start_day(e_url, d)
|
||||
t = (
|
||||
str(e.select("div#datecal")[0])
|
||||
.split(" ")[-1]
|
||||
.split("<")[0]
|
||||
)
|
||||
if category is not None:
|
||||
self.add_event_category(e_url, category)
|
||||
location = (
|
||||
e.select("div#lieuevtcal")[0]
|
||||
.contents[-1]
|
||||
.split("•")[-1]
|
||||
)
|
||||
self.add_event_location(e_url, location)
|
||||
self.add_event_start_time(e_url, t)
|
||||
title = e.select("a")[0].contents[0]
|
||||
self.add_event_title(e_url, title)
|
||||
category = e.select("div#lieuevtcal span")
|
||||
if len(category) > 0:
|
||||
category, tag = self.category_comedie2agenda(
|
||||
category[-1].contents[0]
|
||||
)
|
||||
if category:
|
||||
self.add_event_category(e_url, category)
|
||||
if tag:
|
||||
self.add_event_tag(e_url, tag)
|
||||
location = (
|
||||
e.select("div#lieuevtcal")[0]
|
||||
.contents[-1]
|
||||
.split("•")[-1]
|
||||
)
|
||||
if location.replace(" ", "") == "":
|
||||
location = self.nom_lieu
|
||||
self.add_event_location(e_url, location)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
@ -75,16 +97,31 @@ class CExtractor(TwoStepsExtractor):
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
|
||||
image = soup.select("#imgspec img")
|
||||
if image:
|
||||
if image and len(image) > 0:
|
||||
image = image[0]["src"]
|
||||
else:
|
||||
image = None
|
||||
|
||||
description = soup.select("#descspec")[0].get_text().replace("Lire plus...", "")
|
||||
|
||||
description = soup.select("#descspec")
|
||||
if description and len(description) > 0:
|
||||
description = description[0].get_text().replace("Lire plus...", "")
|
||||
# on ajoute éventuellement les informations complémentaires
|
||||
|
||||
d_suite = ""
|
||||
for d in ["typedesc", "dureedesc", "lieuspec"]:
|
||||
comp_desc = soup.select("#" + d)
|
||||
if comp_desc and len(comp_desc) > 0:
|
||||
d_suite += "\n\n" + comp_desc[0].get_text()
|
||||
if d_suite != "":
|
||||
description += "\n\n> Informations complémentaires:" + d_suite
|
||||
else:
|
||||
description = None
|
||||
|
||||
url_human = event_url
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
None,
|
||||
None,
|
||||
|
@ -1,7 +1,7 @@
|
||||
from ..generic_extractors import *
|
||||
import re
|
||||
import json5
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# A class dedicated to get events from La Coopérative de Mai:
|
||||
# URL: https://www.lacoope.org/concerts-calendrier/
|
||||
@ -22,7 +22,7 @@ class CExtractor(TwoStepsExtractor):
|
||||
for e in data["events"]:
|
||||
self.add_event_url(e["url"])
|
||||
if e["tag"] == "Gratuit":
|
||||
self.add_event_tag(e["url"], "gratuit")
|
||||
self.add_event_tag(e["url"], "💶 gratuit")
|
||||
|
||||
else:
|
||||
raise Exception("Cannot extract events from javascript")
|
||||
@ -38,7 +38,7 @@ class CExtractor(TwoStepsExtractor):
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
|
||||
title = soup.find("h1").contents[0]
|
||||
category = "Concert"
|
||||
category = "Fêtes & Concerts"
|
||||
image = soup.find("meta", property="og:image")
|
||||
if image:
|
||||
image = image["content"]
|
||||
@ -53,7 +53,7 @@ class CExtractor(TwoStepsExtractor):
|
||||
if description is None:
|
||||
description = ""
|
||||
|
||||
tags = []
|
||||
tags = ["🎵 concert"]
|
||||
|
||||
link_calendar = soup.select('a[href^="https://calendar.google.com/calendar/"]')
|
||||
if len(link_calendar) == 0:
|
||||
@ -68,6 +68,7 @@ class CExtractor(TwoStepsExtractor):
|
||||
url_human = event_url
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
|
@ -1,6 +1,6 @@
|
||||
from ..generic_extractors import *
|
||||
import re
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# A class dedicated to get events from La puce à l'oreille
|
||||
# URL: https://www.lapucealoreille63.fr/
|
||||
@ -14,12 +14,7 @@ class CExtractor(TwoStepsExtractor):
|
||||
for e in events:
|
||||
e_url = e.find("a")
|
||||
if e_url:
|
||||
if self.add_event_url(e_url["href"]):
|
||||
title = e.select("div[data-testid=richTextElement] h1.font_0 span")
|
||||
if title:
|
||||
title = title[0].contents[0].get_text().replace("\n", " ")
|
||||
title = re.sub(" +", " ", title)
|
||||
self.add_event_title(e_url["href"], title)
|
||||
self.add_event_url(e_url["href"])
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
@ -31,9 +26,12 @@ class CExtractor(TwoStepsExtractor):
|
||||
):
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
|
||||
start_day = self.parse_french_date(
|
||||
soup.find("h2").get_text()
|
||||
title = soup.select("h2")[0].get_text()
|
||||
|
||||
start_day = Extractor.parse_french_date(
|
||||
soup.select("h2")[1].get_text()
|
||||
) # pas parfait, mais bordel que ce site est mal construit
|
||||
print(soup.select("h2")[1].get_text())
|
||||
|
||||
spans = soup.select("div[data-testid=richTextElement] span")
|
||||
start_time = None
|
||||
@ -43,13 +41,13 @@ class CExtractor(TwoStepsExtractor):
|
||||
for span in spans:
|
||||
txt = span.get_text()
|
||||
if txt.lstrip().startswith("DÉBUT"):
|
||||
start_time = self.parse_french_time(txt.split(":")[-1])
|
||||
start_time = Extractor.parse_french_time(txt.split(":")[-1])
|
||||
end_time = None
|
||||
elif txt.lstrip().startswith("HORAIRES :"):
|
||||
hs = txt.split(":")[-1].split("-")
|
||||
start_time = self.parse_french_time(hs[0])
|
||||
start_time = Extractor.parse_french_time(hs[0])
|
||||
if len(hs) > 1:
|
||||
end_time = self.parse_french_time(hs[1])
|
||||
end_time = Extractor.parse_french_time(hs[1])
|
||||
else:
|
||||
end_time = None
|
||||
elif txt.lstrip().startswith("LIEU :") and not location:
|
||||
@ -57,10 +55,10 @@ class CExtractor(TwoStepsExtractor):
|
||||
|
||||
if not location:
|
||||
location = self.nom_lieu
|
||||
end_day = self.guess_end_day(start_day, start_time, end_time)
|
||||
end_day = Extractor.guess_end_day(start_day, start_time, end_time)
|
||||
|
||||
url_human = event_url
|
||||
tags = []
|
||||
tags = ["🎵 concert"]
|
||||
|
||||
image = soup.select("wow-image img[fetchpriority=high]")
|
||||
if image:
|
||||
@ -78,9 +76,10 @@ class CExtractor(TwoStepsExtractor):
|
||||
description = None
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
None,
|
||||
"Concert",
|
||||
title,
|
||||
"Fêtes & Concerts",
|
||||
start_day,
|
||||
location,
|
||||
description,
|
||||
|
@ -1,5 +1,5 @@
|
||||
from ..generic_extractors import *
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# A class dedicated to get events from Le Fotomat'
|
||||
# URL: https://www.lefotomat.com/
|
||||
@ -9,11 +9,12 @@ class CExtractor(TwoStepsExtractor):
|
||||
def category_fotomat2agenda(self, category):
|
||||
if not category:
|
||||
return None
|
||||
mapping = {"Concerts": "Concert"}
|
||||
mapping = {"Concerts": "Fêtes & Concerts"}
|
||||
mapping_tag = {"Concerts": "🎵 concert"}
|
||||
if category in mapping:
|
||||
return mapping[category]
|
||||
return mapping[category], mapping_tag[category]
|
||||
else:
|
||||
return None
|
||||
return None, None
|
||||
|
||||
def build_event_url_list(self, content):
|
||||
soup = BeautifulSoup(content, "xml")
|
||||
@ -26,9 +27,11 @@ class CExtractor(TwoStepsExtractor):
|
||||
title = e.find("title").contents[0]
|
||||
self.add_event_title(e_url, title)
|
||||
|
||||
category = self.category_fotomat2agenda(e.find("category").contents[0])
|
||||
category, tag = self.category_fotomat2agenda(e.find("category").contents[0])
|
||||
if category:
|
||||
self.add_event_category(e_url, category)
|
||||
if tag:
|
||||
self.add_event_tag(e_url, tag)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
@ -45,10 +48,10 @@ class CExtractor(TwoStepsExtractor):
|
||||
else:
|
||||
image = None
|
||||
desc = soup.select("head meta[name=description]")[0]["content"]
|
||||
start_day = self.parse_french_date(desc.split("-")[0])
|
||||
start_time = self.parse_french_time(desc.split("-")[1])
|
||||
end_time = self.parse_french_time(desc.split("-")[2])
|
||||
end_day = self.guess_end_day(start_day, start_time, end_time)
|
||||
start_day = Extractor.parse_french_date(desc.split("-")[0])
|
||||
start_time = Extractor.parse_french_time(desc.split("-")[1])
|
||||
end_time = Extractor.parse_french_time(desc.split("-")[2])
|
||||
end_day = Extractor.guess_end_day(start_day, start_time, end_time)
|
||||
|
||||
location = self.nom_lieu
|
||||
descriptions = soup.select("div.vce-col-content")
|
||||
@ -69,6 +72,7 @@ class CExtractor(TwoStepsExtractor):
|
||||
url_human = event_url
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
None,
|
||||
None,
|
||||
|
91
src/agenda_culturel/import_tasks/custom_extractors/lerio.py
Normal file
91
src/agenda_culturel/import_tasks/custom_extractors/lerio.py
Normal file
@ -0,0 +1,91 @@
|
||||
from ..generic_extractors import *
|
||||
from bs4 import BeautifulSoup
|
||||
from datetime import datetime
|
||||
|
||||
# A class dedicated to get events from Cinéma Le Rio (Clermont-Ferrand)
|
||||
# URL: https://www.cinemalerio.com/evenements/
|
||||
class CExtractor(TwoStepsExtractorNoPause):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.possible_dates = {}
|
||||
self.theater = None
|
||||
|
||||
def build_event_url_list(self, content, infuture_days=180):
|
||||
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
links = soup.select("td.seance_link a")
|
||||
if links:
|
||||
for l in links:
|
||||
print(l["href"])
|
||||
self.add_event_url(l["href"])
|
||||
|
||||
def to_text_select_one(soup, filter):
|
||||
e = soup.select_one(filter)
|
||||
if e is None:
|
||||
return None
|
||||
else:
|
||||
return e.text
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
|
||||
title = soup.select_one("h1").text
|
||||
|
||||
alerte_date = CExtractor.to_text_select_one(soup, ".alerte_date")
|
||||
if alerte_date is None:
|
||||
return
|
||||
dh = alerte_date.split("à")
|
||||
# if date is not found, we skip
|
||||
if len(dh) != 2:
|
||||
return
|
||||
|
||||
date = Extractor.parse_french_date(dh[0], default_year=datetime.now().year)
|
||||
time = Extractor.parse_french_time(dh[1])
|
||||
|
||||
synopsis = CExtractor.to_text_select_one(soup, ".synopsis_bloc")
|
||||
special_titre = CExtractor.to_text_select_one(soup, ".alerte_titre")
|
||||
special = CExtractor.to_text_select_one(soup, ".alerte_text")
|
||||
|
||||
# it's not a specific event: we skip it
|
||||
special_lines = None if special is None else special.split('\n')
|
||||
if special is None or len(special_lines) == 0 or \
|
||||
(len(special_lines) == 1 and special_lines[0].strip().startswith('En partenariat')):
|
||||
return
|
||||
|
||||
description = "\n\n".join([x for x in [synopsis, special_titre, special] if not x is None])
|
||||
|
||||
image = soup.select_one(".col1 img")
|
||||
image_alt = None
|
||||
if not image is None:
|
||||
image_alt = image["alt"]
|
||||
image = image["src"]
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
None,
|
||||
date,
|
||||
None,
|
||||
description,
|
||||
[],
|
||||
recurrences=None,
|
||||
uuids=[event_url],
|
||||
url_human=event_url,
|
||||
start_time=time,
|
||||
end_day=None,
|
||||
end_time=None,
|
||||
published=published,
|
||||
image=image,
|
||||
image_alt=image_alt
|
||||
)
|
@ -0,0 +1,112 @@
|
||||
from ..generic_extractors import *
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
# A class dedicated to get events from MEC Wordpress plugin
|
||||
# URL: https://webnus.net/modern-events-calendar/
|
||||
class CExtractor(TwoStepsExtractor):
|
||||
|
||||
def local2agendaCategory(self, category):
|
||||
mapping = {
|
||||
"Musique": "Fêtes & Concerts",
|
||||
"CONCERT": "Fêtes & Concerts",
|
||||
"VISITE": "Visites & Expositions",
|
||||
"Spectacle": "Spectacles",
|
||||
"Rencontre": "Rencontres & Débats",
|
||||
"Atelier": "Animations & Ateliers",
|
||||
"Projection": "Cinéma",
|
||||
}
|
||||
mapping_tag = {
|
||||
"Musique": "concert",
|
||||
"CONCERT": "concert",
|
||||
"VISITE": None,
|
||||
"Spectacle": "rhéâtre",
|
||||
"Rencontre": None,
|
||||
"Atelier": "atelier",
|
||||
"Projection": None,
|
||||
}
|
||||
|
||||
if category in mapping:
|
||||
return mapping[category], mapping_tag[category]
|
||||
else:
|
||||
return None, None
|
||||
|
||||
def build_event_url_list(self, content):
|
||||
soup = BeautifulSoup(content, "xml")
|
||||
|
||||
events = soup.select("div.mec-tile-event-content")
|
||||
for e in events:
|
||||
link = e.select("h4.mec-event-title a")
|
||||
if len(link) == 1:
|
||||
url = link[0]["href"]
|
||||
title = link[0].get_text()
|
||||
|
||||
if self.add_event_url(url):
|
||||
print(url, title)
|
||||
self.add_event_title(url, title)
|
||||
|
||||
categories = e.select(".mec-label-normal")
|
||||
if len(categories) == 0:
|
||||
categories = e.select(".mec-category")
|
||||
if len(categories) > 0:
|
||||
category, tag = self.local2agendaCategory(categories[0].get_text())
|
||||
if category:
|
||||
self.add_event_category(url, category)
|
||||
if tag:
|
||||
self.add_event_category(url, tag)
|
||||
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
soup = BeautifulSoup(event_content, "xml")
|
||||
|
||||
start_day = soup.select(".mec-start-date-label")
|
||||
if start_day and len(start_day) > 0:
|
||||
start_day = Extractor.parse_french_date(start_day[0].get_text())
|
||||
else:
|
||||
start_day = None
|
||||
t = soup.select(".mec-single-event-time .mec-events-abbr")
|
||||
if t:
|
||||
t = t[0].get_text().split("-")
|
||||
start_time = Extractor.parse_french_time(t[0])
|
||||
if len(t) > 1:
|
||||
end_time = Extractor.parse_french_time(t[1])
|
||||
else:
|
||||
end_time = None
|
||||
else:
|
||||
start_time = None
|
||||
end_time = None
|
||||
|
||||
image = soup.select(".mec-events-event-image img")
|
||||
if image:
|
||||
image = image[0]["src"]
|
||||
else:
|
||||
image = None
|
||||
description = soup.select(".mec-event-content")[0].get_text(separator=" ")
|
||||
|
||||
url_human = event_url
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
None,
|
||||
None,
|
||||
start_day,
|
||||
None,
|
||||
description,
|
||||
[],
|
||||
recurrences=None,
|
||||
uuids=[event_url],
|
||||
url_human=url_human,
|
||||
start_time=start_time,
|
||||
end_day=None,
|
||||
end_time=end_time,
|
||||
published=published,
|
||||
image=image,
|
||||
)
|
@ -1,11 +1,13 @@
|
||||
from urllib.parse import urlencode
|
||||
import urllib.request
|
||||
from urllib.request import Request
|
||||
import os
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.chrome.service import Service
|
||||
from selenium.webdriver.chrome.options import Options
|
||||
from selenium.common.exceptions import *
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
import time
|
||||
|
||||
class Downloader(ABC):
|
||||
def __init__(self):
|
||||
@ -15,13 +17,13 @@ class Downloader(ABC):
|
||||
def download(self, url, post=None):
|
||||
pass
|
||||
|
||||
def get_content(self, url, cache=None, post=None):
|
||||
def get_content(self, url, cache=None, referer=None, post=None):
|
||||
if cache and os.path.exists(cache):
|
||||
print("Loading cache ({})".format(cache))
|
||||
with open(cache) as f:
|
||||
content = "\n".join(f.readlines())
|
||||
else:
|
||||
content = self.download(url, post)
|
||||
content = self.download(url, referer=referer, post=post)
|
||||
|
||||
if cache:
|
||||
print("Saving cache ({})".format(cache))
|
||||
@ -37,38 +39,102 @@ class SimpleDownloader(Downloader):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def download(self, url, post=None):
|
||||
print("Downloading {}".format(url))
|
||||
|
||||
def download(self, url, referer=None, post=None):
|
||||
print("Downloading {} referer: {} post: {}".format(url, referer, post))
|
||||
try:
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:126.0) Gecko/20100101 Firefox/126.0",
|
||||
}
|
||||
if referer is not None:
|
||||
headers["Referer"] = referer
|
||||
req = Request(url, headers=headers)
|
||||
if post:
|
||||
post_args = urlencode(post).encode()
|
||||
resource = urllib.request.urlopen(url, post_args)
|
||||
post_args = urlencode(post).encode("utf-8")
|
||||
resource = urllib.request.urlopen(req, post_args)
|
||||
else:
|
||||
resource = urllib.request.urlopen(url)
|
||||
data = resource.read().decode(resource.headers.get_content_charset())
|
||||
resource = urllib.request.urlopen(req)
|
||||
charset = resource.headers.get_content_charset()
|
||||
if charset:
|
||||
data = resource.read().decode(charset)
|
||||
else:
|
||||
data = resource.read().decode()
|
||||
return data
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
|
||||
class ChromiumHeadlessDownloader(Downloader):
|
||||
def __init__(self):
|
||||
def __init__(self, pause=True, noimage=True):
|
||||
super().__init__()
|
||||
self.pause = pause
|
||||
self.options = Options()
|
||||
self.options.add_argument("--headless=new")
|
||||
self.options.add_argument("--disable-dev-shm-usage")
|
||||
self.options.add_argument("--no-sandbox")
|
||||
self.service = Service("/usr/bin/chromedriver")
|
||||
self.options.add_argument("start-maximized")
|
||||
self.options.add_argument("enable-automation")
|
||||
self.options.add_argument("--disable-dev-shm-usage")
|
||||
self.options.add_argument("--disable-browser-side-navigation")
|
||||
self.options.add_argument("--disable-gpu")
|
||||
if noimage:
|
||||
self.options.add_experimental_option(
|
||||
"prefs", {
|
||||
# block image loading
|
||||
"profile.managed_default_content_settings.images": 2,
|
||||
}
|
||||
)
|
||||
|
||||
def download(self, url, post=None):
|
||||
self.service = Service("/usr/bin/chromedriver")
|
||||
self.driver = webdriver.Chrome(service=self.service, options=self.options)
|
||||
|
||||
|
||||
def screenshot(self, url, path_image):
|
||||
print("Screenshot {}".format(url))
|
||||
try:
|
||||
self.driver.get(url)
|
||||
if self.pause:
|
||||
time.sleep(2)
|
||||
self.driver.save_screenshot(path_image)
|
||||
except:
|
||||
print(f">> Exception: {URL}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def download(self, url, referer=None, post=None):
|
||||
if post:
|
||||
raise Exception("POST method with Chromium headless not yet implemented")
|
||||
print("Download {}".format(url))
|
||||
self.driver = webdriver.Chrome(service=self.service, options=self.options)
|
||||
|
||||
self.driver.get(url)
|
||||
doc = self.driver.page_source
|
||||
self.driver.quit()
|
||||
try:
|
||||
self.driver.get(url)
|
||||
if self.pause:
|
||||
time.sleep(2)
|
||||
doc = self.driver.page_source
|
||||
|
||||
|
||||
except StaleElementReferenceException as e:
|
||||
print(f">> {type(e).__name__}: {e.args}")
|
||||
return None
|
||||
except NoSuchElementException as e:
|
||||
print(f">> {type(e).__name__}: {e.args}")
|
||||
return None
|
||||
except TimeoutException as e:
|
||||
print(f">> {type(e).__name__}: {e.args}")
|
||||
return None
|
||||
except WebDriverException as e:
|
||||
print(f">> {type(e).__name__}: {e.args}")
|
||||
return None
|
||||
except SessionNotCreatedException as e:
|
||||
print(f">> {type(e).__name__}: {e.args}")
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f">> {type(e).__name__} line {e.__traceback__.tb_lineno} of {__file__}: {e.args}")
|
||||
return None
|
||||
except:
|
||||
print(f">> General Exception: {URL}")
|
||||
return None
|
||||
|
||||
return doc
|
||||
|
@ -2,20 +2,24 @@ from abc import ABC, abstractmethod
|
||||
from datetime import datetime, time, date, timedelta
|
||||
import re
|
||||
import unicodedata
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
def remove_accents(input_str):
|
||||
nfkd_form = unicodedata.normalize("NFKD", input_str)
|
||||
return "".join([c for c in nfkd_form if not unicodedata.combining(c)])
|
||||
|
||||
|
||||
class Extractor(ABC):
|
||||
url_referer=None
|
||||
|
||||
def __init__(self):
|
||||
self.header = {}
|
||||
self.events = []
|
||||
self.downloader = None
|
||||
self.referer = ""
|
||||
|
||||
def guess_end_day(self, start_day, start_time, end_time):
|
||||
def remove_accents(input_str):
|
||||
nfkd_form = unicodedata.normalize("NFKD", input_str)
|
||||
return "".join([c for c in nfkd_form if not unicodedata.combining(c)])
|
||||
|
||||
def guess_end_day(start_day, start_time, end_time):
|
||||
if end_time:
|
||||
if end_time > start_time:
|
||||
return start_day
|
||||
@ -24,7 +28,7 @@ class Extractor(ABC):
|
||||
else:
|
||||
return start_day
|
||||
|
||||
def guess_month(self, text):
|
||||
def guess_month(text):
|
||||
mths = [
|
||||
"jan",
|
||||
"fe",
|
||||
@ -39,31 +43,45 @@ class Extractor(ABC):
|
||||
"nov",
|
||||
"dec",
|
||||
]
|
||||
t = remove_accents(text).lower()
|
||||
t = Extractor.remove_accents(text).lower()
|
||||
for i, m in enumerate(mths):
|
||||
if t.startswith(m):
|
||||
return i + 1
|
||||
return None
|
||||
|
||||
def parse_french_date(self, text):
|
||||
def parse_french_date(text, default_year=None):
|
||||
# format NomJour Numero Mois Année
|
||||
m = re.search(
|
||||
"[a-zA-ZéÉûÛ:.]+[ ]*([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)", text
|
||||
)
|
||||
if m:
|
||||
day = m.group(1)
|
||||
month = self.guess_month(m.group(2))
|
||||
month = Extractor.guess_month(m.group(2))
|
||||
year = m.group(3)
|
||||
else:
|
||||
# format Numero Mois Annee
|
||||
m = re.search("([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)", text)
|
||||
if m:
|
||||
day = m.group(1)
|
||||
month = self.guess_month(m.group(2))
|
||||
month = Extractor.guess_month(m.group(2))
|
||||
year = m.group(3)
|
||||
else:
|
||||
# TODO: consolider les cas non satisfaits
|
||||
return None
|
||||
# format Numero Mois Annee
|
||||
m = re.search("([0-9]+)/([0-9]+)/([0-9]+)", text)
|
||||
if m:
|
||||
day = m.group(1)
|
||||
month = int(m.group(2))
|
||||
year = m.group(3)
|
||||
else:
|
||||
# format Numero Mois Annee
|
||||
m = re.search("([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)", text)
|
||||
if m:
|
||||
day = m.group(1)
|
||||
month = Extractor.guess_month(m.group(2))
|
||||
year = default_year
|
||||
else:
|
||||
# TODO: consolider les cas non satisfaits
|
||||
return None
|
||||
|
||||
if month is None:
|
||||
return None
|
||||
@ -78,7 +96,7 @@ class Extractor(ABC):
|
||||
return None
|
||||
return date(year, month, day)
|
||||
|
||||
def parse_french_time(self, text):
|
||||
def parse_french_time(text):
|
||||
# format heures minutes secondes
|
||||
m = re.search("([0-9]+)[ a-zA-Z:.]+([0-9]+)[ a-zA-Z:.]+([0-9]+)", text)
|
||||
if m:
|
||||
@ -94,13 +112,20 @@ class Extractor(ABC):
|
||||
s = "0"
|
||||
else:
|
||||
# format heures
|
||||
m = re.search("([0-9]+)[ Hh:.]", text)
|
||||
m = re.search("([0-9]+)[ ]*[Hh:.]", text)
|
||||
if m:
|
||||
h = m.group(1)
|
||||
m = "0"
|
||||
s = "0"
|
||||
else:
|
||||
return None
|
||||
# format minutes
|
||||
m = re.search("([0-9]+)[ ]*(?:mn|min|Min|Mn)", text)
|
||||
if m:
|
||||
h = "0"
|
||||
m = m.group(1)
|
||||
s = "0"
|
||||
else:
|
||||
return None
|
||||
|
||||
try:
|
||||
h = int(h)
|
||||
@ -134,6 +159,7 @@ class Extractor(ABC):
|
||||
|
||||
def add_event(
|
||||
self,
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@ -158,14 +184,19 @@ class Extractor(ABC):
|
||||
print("ERROR: cannot import an event without start day")
|
||||
return
|
||||
|
||||
tags_default = self.default_value_if_exists(default_values, "tags")
|
||||
if not tags_default:
|
||||
tags_default = []
|
||||
|
||||
event = {
|
||||
"title": title,
|
||||
"category": category,
|
||||
"category": category if category else self.default_value_if_exists(default_values, "category"),
|
||||
"start_day": start_day,
|
||||
"uuids": uuids,
|
||||
"location": location,
|
||||
"location": location if location else self.default_value_if_exists(default_values, "location"),
|
||||
"organisers": self.default_value_if_exists(default_values, "organisers"),
|
||||
"description": description,
|
||||
"tags": tags,
|
||||
"tags": tags + tags_default,
|
||||
"published": published,
|
||||
"image": image,
|
||||
"image_alt": image_alt,
|
||||
@ -196,7 +227,10 @@ class Extractor(ABC):
|
||||
)
|
||||
|
||||
def get_structure(self):
|
||||
return {"header": self.header, "events": self.events}
|
||||
if len(self.events) == 0:
|
||||
return {}
|
||||
else:
|
||||
return {"header": self.header, "events": self.events}
|
||||
|
||||
def clean_url(url):
|
||||
from .extractor_ical import ICALExtractor
|
||||
@ -210,8 +244,31 @@ class Extractor(ABC):
|
||||
def get_default_extractors(single_event=False):
|
||||
from .extractor_ical import ICALExtractor
|
||||
from .extractor_facebook import FacebookEventExtractor
|
||||
from .extractor_ggcal_link import GoogleCalendarLinkEventExtractor
|
||||
|
||||
if single_event:
|
||||
return [FacebookEventExtractor(single_event=True)]
|
||||
return [FacebookEventExtractor(), GoogleCalendarLinkEventExtractor(), EventNotFoundExtractor()]
|
||||
else:
|
||||
return [ICALExtractor(), FacebookEventExtractor(single_event=False)]
|
||||
return [ICALExtractor(), FacebookEventExtractor(), GoogleCalendarLinkEventExtractor(), EventNotFoundExtractor()]
|
||||
|
||||
|
||||
# A class that only produce a not found event
|
||||
class EventNotFoundExtractor(Extractor):
|
||||
|
||||
def extract(
|
||||
self, content, url, url_human=None, default_values=None, published=False
|
||||
):
|
||||
self.set_header(url)
|
||||
self.clear_events()
|
||||
|
||||
self.add_event(default_values, "événement sans titre depuis " + url,
|
||||
None, timezone.now().date(), None,
|
||||
"l'import a échoué, la saisie doit se faire manuellement à partir de l'url source " + url,
|
||||
[], [url], published=False, url_human=url)
|
||||
|
||||
return self.get_structure()
|
||||
|
||||
|
||||
def clean_url(url):
|
||||
return url
|
||||
|
||||
|
@ -9,205 +9,248 @@ import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class SimpleFacebookEvent:
|
||||
def __init__(self, data):
|
||||
self.elements = {}
|
||||
|
||||
class FacebookEventExtractor(Extractor):
|
||||
class SimpleFacebookEvent:
|
||||
def __init__(self, data):
|
||||
self.elements = {}
|
||||
for key in ["id", "start_timestamp", "end_timestamp"]:
|
||||
self.elements[key] = data[key] if key in data else None
|
||||
|
||||
for key in ["id", "start_timestamp", "end_timestamp"]:
|
||||
self.elements[key] = data[key] if key in data else None
|
||||
if "parent_event" in data:
|
||||
self.parent = SimpleFacebookEvent(
|
||||
data["parent_event"]
|
||||
)
|
||||
|
||||
if "parent_event" in data:
|
||||
self.parent = FacebookEventExtractor.SimpleFacebookEvent(
|
||||
data["parent_event"]
|
||||
)
|
||||
|
||||
class FacebookEvent:
|
||||
name = "event"
|
||||
keys = [
|
||||
[
|
||||
"start_time_formatted",
|
||||
"start_timestamp",
|
||||
"is_past",
|
||||
"name",
|
||||
"price_info",
|
||||
"cover_media_renderer",
|
||||
"event_creator",
|
||||
"id",
|
||||
"day_time_sentence",
|
||||
"event_place",
|
||||
"comet_neighboring_siblings",
|
||||
class FacebookEvent:
|
||||
name = "event"
|
||||
# keys corresponds to javascript elements that are containing interesting data
|
||||
keys = [
|
||||
[
|
||||
"start_time_formatted",
|
||||
"start_timestamp",
|
||||
"is_past",
|
||||
"name",
|
||||
"price_info",
|
||||
"cover_media_renderer",
|
||||
"id",
|
||||
"parent_if_exists_or_self",
|
||||
"day_time_sentence",
|
||||
"event_place",
|
||||
"comet_neighboring_siblings",
|
||||
],
|
||||
["event_description"],
|
||||
["start_timestamp", "end_timestamp"],
|
||||
]
|
||||
# rules are defined by a sub-key within intersesting data where elements will be find
|
||||
# each pair in the associated list is a key of our model and a path within FB data to
|
||||
# get the corresponding field
|
||||
rules = {
|
||||
"event_description": [("description", ["text"])],
|
||||
"cover_media_renderer": [
|
||||
("image_alt", ["cover_photo", "photo", "accessibility_caption"]),
|
||||
("image", ["cover_photo", "photo", "full_image", "uri"]),
|
||||
("image", ["cover_media", 0, "full_image", "uri"]),
|
||||
("image_alt", ["cover_media", 0, "accessibility_caption"]),
|
||||
],
|
||||
"event_creator":
|
||||
[("event_creator_name", ["name"]),
|
||||
("event_creator_url", ["url"]),
|
||||
],
|
||||
["event_description"],
|
||||
["start_timestamp", "end_timestamp"],
|
||||
]
|
||||
rules = {
|
||||
"event_description": {"description": ["text"]},
|
||||
"cover_media_renderer": {
|
||||
"image_alt": ["cover_photo", "photo", "accessibility_caption"],
|
||||
"image": ["cover_photo", "photo", "full_image", "uri"],
|
||||
},
|
||||
"event_creator": {
|
||||
"event_creator_name": ["name"],
|
||||
"event_creator_url": ["url"],
|
||||
},
|
||||
"event_place": {"event_place_name": ["name"]},
|
||||
}
|
||||
"event_place": [("event_place_name", ["name"])],
|
||||
}
|
||||
|
||||
def __init__(self, i, event):
|
||||
def __init__(self, *args):
|
||||
if len(args) == 1:
|
||||
other = args[0]
|
||||
self.fragments = other.fragments
|
||||
self.elements = other.elements
|
||||
self.neighbor_events = None
|
||||
else:
|
||||
i = args[0]
|
||||
event = args[1]
|
||||
self.fragments = {}
|
||||
self.elements = {}
|
||||
self.neighbor_events = None
|
||||
self.possible_end_timestamp = []
|
||||
self.add_fragment(i, event)
|
||||
|
||||
def get_element(self, key):
|
||||
return self.elements[key] if key in self.elements else None
|
||||
def get_element(self, key):
|
||||
return self.elements[key] if key in self.elements else None
|
||||
|
||||
def get_element_date(self, key):
|
||||
v = self.get_element(key)
|
||||
return (
|
||||
datetime.fromtimestamp(v).date() if v is not None and v != 0 else None
|
||||
)
|
||||
def get_element_date(self, key):
|
||||
v = self.get_element(key)
|
||||
return (
|
||||
datetime.fromtimestamp(v).date() if v is not None and v != 0 else None
|
||||
)
|
||||
|
||||
def get_element_time(self, key):
|
||||
v = self.get_element(key)
|
||||
return (
|
||||
datetime.fromtimestamp(v).strftime("%H:%M")
|
||||
if v is not None and v != 0
|
||||
else None
|
||||
)
|
||||
def get_element_time(self, key):
|
||||
v = self.get_element(key)
|
||||
return (
|
||||
datetime.fromtimestamp(v).strftime("%H:%M")
|
||||
if v is not None and v != 0
|
||||
else None
|
||||
)
|
||||
|
||||
def add_fragment(self, i, event):
|
||||
self.fragments[i] = event
|
||||
def add_fragment(self, i, event):
|
||||
self.fragments[i] = event
|
||||
|
||||
if FacebookEventExtractor.FacebookEvent.keys[i] == [
|
||||
"start_timestamp",
|
||||
"end_timestamp",
|
||||
]:
|
||||
self.get_possible_end_timestamp(i, event)
|
||||
else:
|
||||
for k in FacebookEventExtractor.FacebookEvent.keys[i]:
|
||||
if k == "comet_neighboring_siblings":
|
||||
self.get_neighbor_events(event[k])
|
||||
elif k in FacebookEventExtractor.FacebookEvent.rules:
|
||||
for nk, rule in FacebookEventExtractor.FacebookEvent.rules[
|
||||
k
|
||||
].items():
|
||||
error = False
|
||||
c = event[k]
|
||||
for ki in rule:
|
||||
if c is not None:
|
||||
c = c[ki]
|
||||
else:
|
||||
error = True
|
||||
if not error:
|
||||
self.elements[nk] = c
|
||||
if FacebookEvent.keys[i] == [
|
||||
"start_timestamp",
|
||||
"end_timestamp",
|
||||
]:
|
||||
self.get_possible_end_timestamp(i, event)
|
||||
else:
|
||||
for k in FacebookEvent.keys[i]:
|
||||
if k == "comet_neighboring_siblings":
|
||||
self.get_neighbor_events(event[k])
|
||||
elif k in FacebookEvent.rules:
|
||||
for nk, rule in FacebookEvent.rules[k]:
|
||||
error = False
|
||||
c = event[k]
|
||||
for ki in rule:
|
||||
if c is not None and ki in c or (isinstance(c, list) and ki < len(c)):
|
||||
c = c[ki]
|
||||
else:
|
||||
error = True
|
||||
break
|
||||
if not error:
|
||||
self.elements[nk] = c
|
||||
else:
|
||||
self.elements[k] = event[k]
|
||||
|
||||
def get_possible_end_timestamp(self, i, data):
|
||||
self.possible_end_timestamp.append(
|
||||
dict((k, data[k]) for k in FacebookEvent.keys[i])
|
||||
)
|
||||
|
||||
def get_neighbor_events(self, data):
|
||||
self.neighbor_events = [
|
||||
SimpleFacebookEvent(d) for d in data
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
str(self.elements)
|
||||
+ "\n Neighbors: "
|
||||
+ ", ".join([ne.elements["id"] for ne in self.neighbor_events])
|
||||
)
|
||||
|
||||
def consolidate_current_event(self):
|
||||
if (
|
||||
self.neighbor_events is not None
|
||||
and "id" in self.elements
|
||||
and "end_timestamp" not in self.elements
|
||||
):
|
||||
if self.neighbor_events is not None and "id" in self.elements:
|
||||
id = self.elements["id"]
|
||||
for ne in self.neighbor_events:
|
||||
if ne.elements["id"] == id:
|
||||
self.elements["end_timestamp"] = ne.elements[
|
||||
"end_timestamp"
|
||||
]
|
||||
|
||||
if (
|
||||
"end_timestamp" not in self.elements
|
||||
and len(self.possible_end_timestamp) != 0
|
||||
):
|
||||
for s in self.possible_end_timestamp:
|
||||
if (
|
||||
"start_timestamp" in s
|
||||
and "start_timestamp" in self.elements
|
||||
and s["start_timestamp"] == self.elements["start_timestamp"]
|
||||
):
|
||||
self.elements["end_timestamp"] = s["end_timestamp"]
|
||||
break
|
||||
|
||||
def find_event_fragment_in_array(array, event, first=True):
|
||||
if isinstance(array, dict):
|
||||
seen = False
|
||||
for i, ks in enumerate(FacebookEvent.keys):
|
||||
# DEBUG: print([k for k in ks if k in array], "il manque", [k for k in ks if k not in array])
|
||||
if len(ks) == len([k for k in ks if k in array]):
|
||||
seen = True
|
||||
if event is None:
|
||||
event = FacebookEvent(i, array)
|
||||
else:
|
||||
self.elements[k] = event[k]
|
||||
|
||||
def get_possible_end_timestamp(self, i, data):
|
||||
self.possible_end_timestamp.append(
|
||||
dict((k, data[k]) for k in FacebookEventExtractor.FacebookEvent.keys[i])
|
||||
)
|
||||
|
||||
def get_neighbor_events(self, data):
|
||||
self.neighbor_events = [
|
||||
FacebookEventExtractor.SimpleFacebookEvent(d) for d in data
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
str(self.elements)
|
||||
+ "\n Neighbors: "
|
||||
+ ", ".join([ne.elements["id"] for ne in self.neighbor_events])
|
||||
)
|
||||
|
||||
def consolidate_current_event(self):
|
||||
if (
|
||||
self.neighbor_events is not None
|
||||
and "id" in self.elements
|
||||
and "end_timestamp" not in self.elements
|
||||
):
|
||||
if self.neighbor_events is not None and "id" in self.elements:
|
||||
id = self.elements["id"]
|
||||
for ne in self.neighbor_events:
|
||||
if ne.elements["id"] == id:
|
||||
self.elements["end_timestamp"] = ne.elements[
|
||||
"end_timestamp"
|
||||
]
|
||||
|
||||
if (
|
||||
"end_timestamp" not in self.elements
|
||||
and len(self.possible_end_timestamp) != 0
|
||||
):
|
||||
for s in self.possible_end_timestamp:
|
||||
if (
|
||||
"start_timestamp" in s
|
||||
and "start_timestamp" in self.elements
|
||||
and s["start_timestamp"] == self.elements["start_timestamp"]
|
||||
):
|
||||
self.elements["end_timestamp"] = s["end_timestamp"]
|
||||
break
|
||||
|
||||
def find_event_fragment_in_array(array, event, first=True):
|
||||
if isinstance(array, dict):
|
||||
seen = False
|
||||
for i, ks in enumerate(FacebookEventExtractor.FacebookEvent.keys):
|
||||
if len(ks) == len([k for k in ks if k in array]):
|
||||
seen = True
|
||||
if event is None:
|
||||
event = FacebookEventExtractor.FacebookEvent(i, array)
|
||||
else:
|
||||
event.add_fragment(i, array)
|
||||
# only consider the first of FacebookEvent.keys
|
||||
break
|
||||
if not seen:
|
||||
for k in array:
|
||||
event = FacebookEventExtractor.FacebookEvent.find_event_fragment_in_array(
|
||||
array[k], event, False
|
||||
)
|
||||
elif isinstance(array, list):
|
||||
for e in array:
|
||||
event = FacebookEventExtractor.FacebookEvent.find_event_fragment_in_array(
|
||||
e, event, False
|
||||
event.add_fragment(i, array)
|
||||
# only consider the first of FacebookEvent.keys
|
||||
break
|
||||
if not seen:
|
||||
for k in array:
|
||||
event = FacebookEvent.find_event_fragment_in_array(
|
||||
array[k], event, False
|
||||
)
|
||||
elif isinstance(array, list):
|
||||
for e in array:
|
||||
event = FacebookEvent.find_event_fragment_in_array(
|
||||
e, event, False
|
||||
)
|
||||
|
||||
if event is not None and first:
|
||||
event.consolidate_current_event()
|
||||
return event
|
||||
if event is not None and first:
|
||||
event.consolidate_current_event()
|
||||
return event
|
||||
|
||||
def build_event(self, url):
|
||||
self.get_element("image")
|
||||
def build_event(self, url):
|
||||
return {
|
||||
"title": self.get_element("name"),
|
||||
"category": None,
|
||||
"start_day": self.get_element_date("start_timestamp"),
|
||||
"location": self.get_element("event_place_name"),
|
||||
"description": self.get_element("description"),
|
||||
"tags": [],
|
||||
"uuids": [url],
|
||||
"url_human": url,
|
||||
"start_time": self.get_element_time("start_timestamp"),
|
||||
"end_day": self.get_element_date("end_timestamp"),
|
||||
"end_time": self.get_element_time("end_timestamp"),
|
||||
"image": self.get_element("image"),
|
||||
"image_alt": self.get_element("image_alt"),
|
||||
}
|
||||
|
||||
return {
|
||||
"title": self.get_element("name"),
|
||||
"category": None,
|
||||
"start_day": self.get_element_date("start_timestamp"),
|
||||
"location": self.get_element("event_place_name"),
|
||||
"description": self.get_element("description"),
|
||||
"tags": [],
|
||||
"uuids": [url],
|
||||
"url_human": url,
|
||||
"start_time": self.get_element_time("start_timestamp"),
|
||||
"end_day": self.get_element_date("end_timestamp"),
|
||||
"end_time": self.get_element_time("end_timestamp"),
|
||||
"image": self.get_element("image"),
|
||||
"image_alt": self.get_element("image"),
|
||||
}
|
||||
def get_parent_id(self):
|
||||
return self.get_element("parent_if_exists_or_self")["id"]
|
||||
|
||||
def __init__(self, single_event=False):
|
||||
self.single_event = single_event
|
||||
|
||||
def build_events(self, url):
|
||||
|
||||
if self.neighbor_events is None or len(self.neighbor_events) == 0:
|
||||
return [self.build_event(url)]
|
||||
else:
|
||||
url_base = "https://www.facebook.com/events/" + self.get_parent_id() + "/"
|
||||
result = []
|
||||
for nb_e in self.neighbor_events:
|
||||
# we create a copy of the event
|
||||
clone = FacebookEvent(self)
|
||||
# we set start and end timestamp accordnig to the neighbor
|
||||
clone.elements["start_timestamp"] = nb_e.elements["start_timestamp"]
|
||||
clone.elements["end_timestamp"] = nb_e.elements["end_timestamp"]
|
||||
## we generate the event
|
||||
result.append(clone.build_event(url_base + nb_e.elements["id"] + "/"))
|
||||
return result
|
||||
|
||||
class FacebookEventExtractor(Extractor):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def clean_url(url):
|
||||
if FacebookEventExtractor.is_known_url(url):
|
||||
u = urlparse(url)
|
||||
return "https://www.facebook.com" + u.path
|
||||
result = "https://www.facebook.com" + u.path
|
||||
|
||||
# remove name in the url
|
||||
match = re.match(r"(.*/events)/s/([a-zA-Z-][a-zA-Z-0-9-]+)/([0-9/]*)", result)
|
||||
if match:
|
||||
result = match[1] + "/" + match[3]
|
||||
|
||||
if result[-1] == "/":
|
||||
return result
|
||||
else:
|
||||
return result + "/"
|
||||
else:
|
||||
return url
|
||||
|
||||
|
||||
def is_known_url(url):
|
||||
u = urlparse(url)
|
||||
return u.netloc in ["facebook.com", "www.facebook.com", "m.facebook.com"]
|
||||
@ -223,16 +266,22 @@ class FacebookEventExtractor(Extractor):
|
||||
for json_script in soup.find_all("script", type="application/json"):
|
||||
json_txt = json_script.get_text()
|
||||
json_struct = json.loads(json_txt)
|
||||
fevent = FacebookEventExtractor.FacebookEvent.find_event_fragment_in_array(
|
||||
fevent = FacebookEvent.find_event_fragment_in_array(
|
||||
json_struct, fevent
|
||||
)
|
||||
|
||||
if fevent is not None:
|
||||
self.set_header(url)
|
||||
event = fevent.build_event(url)
|
||||
logger.warning("published: " + str(published))
|
||||
event["published"] = published
|
||||
self.add_event(**event)
|
||||
for event in fevent.build_events(url):
|
||||
logger.warning("published: " + str(published))
|
||||
event["published"] = published
|
||||
|
||||
if default_values and "category" in default_values:
|
||||
event["category"] = default_values["category"]
|
||||
self.add_event(default_values, **event)
|
||||
return self.get_structure()
|
||||
else:
|
||||
logger.warning("cannot find any event in page")
|
||||
|
||||
|
||||
return None
|
||||
|
66
src/agenda_culturel/import_tasks/extractor_ggcal_link.py
Normal file
66
src/agenda_culturel/import_tasks/extractor_ggcal_link.py
Normal file
@ -0,0 +1,66 @@
|
||||
from datetime import datetime
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .extractor import *
|
||||
from .generic_extractors import *
|
||||
|
||||
import json
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class GoogleCalendarLinkEventExtractor(Extractor):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.possible_urls = ["https://calendar.google.com/calendar/", "https://addtocalendar.com/"]
|
||||
|
||||
def extract(
|
||||
self, content, url, url_human=None, default_values=None, published=False
|
||||
):
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
for ggu in self.possible_urls:
|
||||
|
||||
link_calendar = soup.select('a[href^="' + ggu + '"]')
|
||||
if len(link_calendar) != 0:
|
||||
|
||||
gg_cal = GGCalendar(link_calendar[0]["href"])
|
||||
|
||||
if gg_cal.is_valid_event():
|
||||
start_day = gg_cal.start_day
|
||||
start_time = gg_cal.start_time
|
||||
description = gg_cal.description.replace(' ', '')
|
||||
end_day = gg_cal.end_day
|
||||
end_time = gg_cal.end_time
|
||||
location = gg_cal.location
|
||||
title = gg_cal.title
|
||||
url_human = url
|
||||
|
||||
self.set_header(url)
|
||||
|
||||
category = None
|
||||
|
||||
self.add_event(
|
||||
default_values,
|
||||
title=title,
|
||||
category=category,
|
||||
start_day=start_day,
|
||||
location=location,
|
||||
description=description,
|
||||
tags=[],
|
||||
uuids=[url],
|
||||
recurrences=None,
|
||||
url_human=url_human,
|
||||
start_time=start_time,
|
||||
end_day=end_day,
|
||||
end_time=end_time,
|
||||
published=published,
|
||||
image=None,
|
||||
)
|
||||
|
||||
break
|
||||
|
||||
|
||||
return self.get_structure()
|
@ -27,6 +27,21 @@ class ICALExtractor(Extractor):
|
||||
except:
|
||||
return None
|
||||
|
||||
def guess_image_from_vevent(self, event):
|
||||
item = self.get_item_from_vevent(event, 'ATTACH', raw=True)
|
||||
if item is None:
|
||||
return None
|
||||
|
||||
# it seems that FMTTYPE is not available through python-icalendar
|
||||
if isinstance(item, list):
|
||||
for i in item:
|
||||
if str(i).lower().endswith('.jpg'):
|
||||
return str(i)
|
||||
else:
|
||||
if str(item).lower().endswith('.jpg'):
|
||||
return str(item)
|
||||
return None
|
||||
|
||||
def get_dt_item_from_vevent(self, event, name):
|
||||
item = self.get_item_from_vevent(event, name, raw=True)
|
||||
|
||||
@ -63,7 +78,7 @@ class ICALExtractor(Extractor):
|
||||
|
||||
for event in calendar.walk("VEVENT"):
|
||||
title = self.get_item_from_vevent(event, "SUMMARY")
|
||||
category = self.default_value_if_exists(default_values, "category")
|
||||
category = None
|
||||
|
||||
start_day, start_time = self.get_dt_item_from_vevent(event, "DTSTART")
|
||||
|
||||
@ -76,8 +91,8 @@ class ICALExtractor(Extractor):
|
||||
end_day = end_day + timedelta(days=-1)
|
||||
|
||||
location = self.get_item_from_vevent(event, "LOCATION")
|
||||
if location is None:
|
||||
location = self.default_value_if_exists(default_values, "location")
|
||||
if (not location is None) and location.replace(" ", "") == "":
|
||||
location = None
|
||||
|
||||
description = self.get_item_from_vevent(event, "DESCRIPTION")
|
||||
if description is not None:
|
||||
@ -112,10 +127,16 @@ class ICALExtractor(Extractor):
|
||||
)
|
||||
# possible limitation: if the ordering is not original then related
|
||||
|
||||
tags = self.default_value_if_exists(default_values, "tags")
|
||||
tags = []
|
||||
|
||||
last_modified = self.get_item_from_vevent(event, "LAST-MODIFIED", raw=True)
|
||||
|
||||
image = self.guess_image_from_vevent(event)
|
||||
|
||||
url_event = self.get_item_from_vevent(event, "URL", True)
|
||||
if url_event:
|
||||
url_human = url_event
|
||||
|
||||
recurrence_entries = {}
|
||||
for e in ["RRULE", "EXRULE", "EXDATE", "RDATE"]:
|
||||
i = self.get_item_from_vevent(event, e, raw=True)
|
||||
@ -141,6 +162,7 @@ class ICALExtractor(Extractor):
|
||||
if uuidrel is not None:
|
||||
luuids += [uuidrel]
|
||||
self.add_event(
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@ -155,6 +177,7 @@ class ICALExtractor(Extractor):
|
||||
end_time=end_time,
|
||||
last_modified=last_modified,
|
||||
published=published,
|
||||
image=image
|
||||
)
|
||||
|
||||
return self.get_structure()
|
||||
@ -164,6 +187,7 @@ class ICALExtractor(Extractor):
|
||||
class ICALNoBusyExtractor(ICALExtractor):
|
||||
def add_event(
|
||||
self,
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@ -181,8 +205,9 @@ class ICALNoBusyExtractor(ICALExtractor):
|
||||
image=None,
|
||||
image_alt=None,
|
||||
):
|
||||
if title != "Busy":
|
||||
if title != "Busy" and title != "Accueils bénévoles" and title != "Occupé":
|
||||
super().add_event(
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@ -223,6 +248,7 @@ class ICALNoVCExtractor(ICALExtractor):
|
||||
|
||||
def add_event(
|
||||
self,
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@ -241,6 +267,7 @@ class ICALNoVCExtractor(ICALExtractor):
|
||||
image_alt=None,
|
||||
):
|
||||
super().add_event(
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
|
@ -2,6 +2,10 @@ from abc import abstractmethod
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import parse_qs
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
from .extractor import *
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@ -14,12 +18,30 @@ class GGCalendar:
|
||||
self.url = url
|
||||
self.extract_info()
|
||||
|
||||
def filter_keys(params):
|
||||
result = {}
|
||||
|
||||
for k, v in params.items():
|
||||
if k.startswith('e[0]'):
|
||||
result[k.replace('e[0][', '')[:-1]] = v
|
||||
else:
|
||||
result[k] = v
|
||||
|
||||
return result
|
||||
|
||||
def is_valid_event(self):
|
||||
return self.start_day is not None and self.title is not None
|
||||
|
||||
def extract_info(self):
|
||||
parsed_url = urlparse(self.url.replace("#", "%23"))
|
||||
params = parse_qs(parsed_url.query)
|
||||
|
||||
params = GGCalendar.filter_keys(params)
|
||||
|
||||
self.location = params["location"][0] if "location" in params else None
|
||||
self.title = params["text"][0] if "text" in params else params["title"][0] if "title" in params else None
|
||||
self.description = params["description"][0] if "description" in params else None
|
||||
self.location = params["location"][0] if "location" in params else None
|
||||
self.title = params["text"][0] if "text" in params else None
|
||||
if "dates" in params:
|
||||
dates = [x.replace(" ", "+") for x in params["dates"][0].split("/")]
|
||||
if len(dates) > 0:
|
||||
@ -33,7 +55,24 @@ class GGCalendar:
|
||||
else:
|
||||
self.end_day = None
|
||||
self.end_time = None
|
||||
elif "date_start" in params:
|
||||
date = parser.parse(params["date_start"][0])
|
||||
self.start_day = date.date()
|
||||
self.start_time = date.time()
|
||||
if "date_end" in params:
|
||||
dateend = parser.parse(params["date_end"][0])
|
||||
if dateend != date:
|
||||
self.end_day = dateend.date()
|
||||
self.end_time = dateend.time()
|
||||
else:
|
||||
self.end_day = None
|
||||
self.end_time = None
|
||||
if self.start_time == datetime.time(0):
|
||||
self.start_time = None
|
||||
|
||||
else:
|
||||
self.end_day = None
|
||||
self.end_time = None
|
||||
else:
|
||||
raise Exception("Unable to find a date in google calendar URL")
|
||||
self.start_day = None
|
||||
@ -47,6 +86,7 @@ class GGCalendar:
|
||||
# - then for each document downloaded from these urls, build the events
|
||||
# This class is an abstract class
|
||||
class TwoStepsExtractor(Extractor):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.event_urls = None
|
||||
@ -96,6 +136,7 @@ class TwoStepsExtractor(Extractor):
|
||||
|
||||
def add_event_with_props(
|
||||
self,
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
@ -129,6 +170,7 @@ class TwoStepsExtractor(Extractor):
|
||||
location = self.event_properties[event_url]["location"]
|
||||
|
||||
self.add_event(
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@ -170,7 +212,9 @@ class TwoStepsExtractor(Extractor):
|
||||
default_values=None,
|
||||
published=False,
|
||||
only_future=True,
|
||||
ignore_404=True
|
||||
):
|
||||
|
||||
self.only_future = only_future
|
||||
self.now = datetime.datetime.now().date()
|
||||
self.set_header(url)
|
||||
@ -194,10 +238,39 @@ class TwoStepsExtractor(Extractor):
|
||||
# first download the content associated with this link
|
||||
content_event = self.downloader.get_content(event_url)
|
||||
if content_event is None:
|
||||
raise Exception(_("Cannot extract event from url {}").format(event_url))
|
||||
# then extract event information from this html document
|
||||
self.add_event_from_content(
|
||||
content_event, event_url, url_human, default_values, published
|
||||
)
|
||||
msg = "Cannot extract event from url {}".format(event_url)
|
||||
if ignore_404:
|
||||
logger.error(msg)
|
||||
else:
|
||||
raise Exception(msg)
|
||||
else:
|
||||
# then extract event information from this html document
|
||||
self.add_event_from_content(
|
||||
content_event, event_url, url_human, default_values, published
|
||||
)
|
||||
|
||||
return self.get_structure()
|
||||
|
||||
|
||||
class TwoStepsExtractorNoPause(TwoStepsExtractor):
|
||||
|
||||
def extract(
|
||||
self,
|
||||
content,
|
||||
url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
only_future=True,
|
||||
ignore_404=True
|
||||
):
|
||||
if hasattr(self.downloader, "pause"):
|
||||
pause = self.downloader.pause
|
||||
else:
|
||||
pause = False
|
||||
self.downloader.pause = False
|
||||
result = super().extract(content, url, url_human, default_values, published, only_future, ignore_404)
|
||||
self.downloader.pause = pause
|
||||
|
||||
return result
|
||||
|
||||
|
@ -1,6 +1,11 @@
|
||||
from .downloader import *
|
||||
from .extractor import *
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
class URL2Events:
|
||||
def __init__(
|
||||
@ -13,7 +18,10 @@ class URL2Events:
|
||||
def process(
|
||||
self, url, url_human=None, cache=None, default_values=None, published=False
|
||||
):
|
||||
content = self.downloader.get_content(url, cache)
|
||||
referer = ""
|
||||
if self.extractor:
|
||||
referer = self.extractor.url_referer
|
||||
content = self.downloader.get_content(url, cache, referer=referer)
|
||||
|
||||
if content is None:
|
||||
return None
|
||||
@ -26,8 +34,9 @@ class URL2Events:
|
||||
else:
|
||||
# if the extractor is not defined, use a list of default extractors
|
||||
for e in Extractor.get_default_extractors(self.single_event):
|
||||
logger.warning('Extractor::' + type(e).__name__)
|
||||
e.set_downloader(self.downloader)
|
||||
events = e.extract(content, url, url_human, default_values, published)
|
||||
if events is not None:
|
||||
if events is not None and len(events) > 0:
|
||||
return events
|
||||
return None
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -20,5 +20,5 @@ class Migration(migrations.Migration):
|
||||
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forwards_func),
|
||||
migrations.RunPython(forwards_func, reverse_code=forwards_func),
|
||||
]
|
@ -10,6 +10,11 @@ def groups_permissions_creation(apps, schema_editor):
|
||||
for name in user_roles:
|
||||
Group.objects.create(name=name)
|
||||
|
||||
def groups_permissions_deletion(apps, schema_editor):
|
||||
user_roles = ["Automation Manager", "Q&A Manager", "Receptionist"]
|
||||
|
||||
for name in user_roles:
|
||||
Group.objects.filter(name=name).delete()
|
||||
|
||||
|
||||
|
||||
@ -21,5 +26,5 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(groups_permissions_creation),
|
||||
migrations.RunPython(groups_permissions_creation, reverse_code=groups_permissions_deletion),
|
||||
]
|
||||
|
@ -31,6 +31,12 @@ def update_groups_permissions(apps, schema_editor):
|
||||
Group.objects.get(name="Receptionist").permissions.add(*receptionist_perms)
|
||||
Group.objects.get(name="Receptionist").permissions.add(*read_mod_perms)
|
||||
|
||||
def update_groups_delete(apps, schema_editor):
|
||||
user_roles = ["Moderator"]
|
||||
|
||||
for name in user_roles:
|
||||
Group.objects.filter(name=name).delete()
|
||||
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@ -40,5 +46,5 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(update_groups_permissions),
|
||||
migrations.RunPython(update_groups_permissions, reverse_code=update_groups_delete),
|
||||
]
|
||||
|
@ -15,6 +15,9 @@ def update_groups_permissions(apps, schema_editor):
|
||||
Group.objects.get(name="Q&A Manager").permissions.add(*qanda_perms)
|
||||
Group.objects.get(name="Q&A Manager").permissions.add(*read_mod_perms)
|
||||
|
||||
def no_permission_change(apps, schema_editor):
|
||||
pass
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@ -23,5 +26,5 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(update_groups_permissions),
|
||||
migrations.RunPython(update_groups_permissions, reverse_code=no_permission_change),
|
||||
]
|
||||
|
@ -11,7 +11,8 @@ def update_groups_permissions(apps, schema_editor):
|
||||
mod_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model == 'moderationquestion' and i.codename.startswith('use_')]
|
||||
Group.objects.get(name="Moderator").permissions.add(*mod_perms)
|
||||
|
||||
|
||||
def no_permission_change(apps, schema_editor):
|
||||
pass
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@ -20,5 +21,5 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(update_groups_permissions),
|
||||
migrations.RunPython(update_groups_permissions, reverse_code=no_permission_change),
|
||||
]
|
||||
|
@ -16,6 +16,11 @@ def update_groups_permissions(apps, schema_editor):
|
||||
editor_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model == 'staticcontent']
|
||||
Group.objects.get(name="Static content editor").permissions.add(*editor_perms)
|
||||
|
||||
def update_groups_delete(apps, schema_editor):
|
||||
user_roles = ["Static content editor"]
|
||||
|
||||
for name in user_roles:
|
||||
Group.objects.filter(name=name).delete()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@ -24,5 +29,5 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(update_groups_permissions),
|
||||
migrations.RunPython(update_groups_permissions, reverse_code=update_groups_delete),
|
||||
]
|
||||
|
@ -1,9 +1,10 @@
|
||||
# Generated by Django 4.2.7 on 2024-04-27 16:29
|
||||
|
||||
from django.db import migrations
|
||||
from django.contrib.auth.models import Group, Permission
|
||||
|
||||
def update_groups_permissions(apps, schema_editor):
|
||||
Group = apps.get_model("auth", "Group")
|
||||
Permission = apps.get_model("auth", "Permission")
|
||||
|
||||
all_perms = Permission.objects.all()
|
||||
|
||||
@ -11,6 +12,9 @@ def update_groups_permissions(apps, schema_editor):
|
||||
moderator_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model in ['place']]
|
||||
Group.objects.get(name="Moderator").permissions.add(*moderator_perms)
|
||||
|
||||
def no_permission_change(apps, schema_editor):
|
||||
pass
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
@ -18,5 +22,5 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(update_groups_permissions),
|
||||
migrations.RunPython(update_groups_permissions, reverse_code=no_permission_change),
|
||||
]
|
||||
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2024-06-02 12:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0063_alter_event_exact_location'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', 'la puce à loreille'), ('Plugin wordpress MEC', 'Plugin wordpress MEC')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
]
|
17
src/agenda_culturel/migrations/0065_alter_place_options.py
Normal file
17
src/agenda_culturel/migrations/0065_alter_place_options.py
Normal file
@ -0,0 +1,17 @@
|
||||
# Generated by Django 4.2.7 on 2024-08-13 13:08
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0064_alter_recurrentimport_processor'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='place',
|
||||
options={'ordering': ['name'], 'verbose_name': 'Place', 'verbose_name_plural': 'Places'},
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2024-08-17 09:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0065_alter_place_options'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='batchimportation',
|
||||
name='url_source',
|
||||
field=models.URLField(blank=True, editable=False, help_text='Source URL if no RecurrentImport is associated.', max_length=1024, null=True, verbose_name='URL (if not recurrent import)'),
|
||||
),
|
||||
]
|
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.7 on 2024-08-17 11:23
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0066_batchimportation_url_source'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='categorisationrule',
|
||||
name='place',
|
||||
field=models.ForeignKey(blank=True, help_text='Location from place', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.place', verbose_name='Place'),
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2024-08-28 21:42
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0067_categorisationrule_place'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page")], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2024-08-28 23:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0068_alter_recurrentimport_processor'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='downloader',
|
||||
field=models.CharField(choices=[('simple', 'simple'), ('chromium headless', 'Headless Chromium'), ('chromium (pause)', 'Headless Chromium (pause)')], default='simple', max_length=20, verbose_name='Downloader'),
|
||||
),
|
||||
]
|
@ -0,0 +1,29 @@
|
||||
# Generated by Django 4.2.9 on 2024-08-29 19:16
|
||||
|
||||
from django.db import migrations
|
||||
import django_ckeditor_5.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0069_alter_recurrentimport_downloader'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='contactmessage',
|
||||
name='comments',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, default='', help_text='Comments on the message from the moderation team', null=True, verbose_name='Comments'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='contactmessage',
|
||||
name='message',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(help_text='Your message', verbose_name='Message'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='staticcontent',
|
||||
name='text',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(help_text='Text as shown to the visitors', verbose_name='Content'),
|
||||
),
|
||||
]
|
@ -0,0 +1,24 @@
|
||||
# Generated by Django 4.2.9 on 2024-08-29 19:58
|
||||
|
||||
from django.db import migrations
|
||||
import django_ckeditor_5.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0070_alter_contactmessage_comments_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='contactmessage',
|
||||
name='message',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Your message', verbose_name='Message'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='staticcontent',
|
||||
name='text',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Text as shown to the visitors', verbose_name='Content'),
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-09-04 21:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0071_alter_contactmessage_message_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page"), ('cour3coquins', 'la cour des 3 coquins')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
]
|
18
src/agenda_culturel/migrations/0073_alter_event_location.py
Normal file
18
src/agenda_culturel/migrations/0073_alter_event_location.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-09-04 21:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0072_alter_recurrentimport_processor'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='location',
|
||||
field=models.CharField(blank=True, default='', help_text='Address of the event in case its not available in the already known places (free form)', max_length=512, null=True, verbose_name='Location (free form)'),
|
||||
),
|
||||
]
|
@ -0,0 +1,29 @@
|
||||
# Generated by Django 4.2.9 on 2024-09-14 12:57
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0073_alter_event_location'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='category',
|
||||
name='pictogram',
|
||||
field=models.ImageField(blank=True, help_text='Pictogram of the category', max_length=1024, null=True, upload_to='', verbose_name='Pictogram'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='category',
|
||||
field=models.ForeignKey(default=None, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='defaultCategory',
|
||||
field=models.ForeignKey(default=None, help_text='Category of each imported event', on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
]
|
@ -0,0 +1,24 @@
|
||||
# Generated by Django 4.2.9 on 2024-09-14 13:18
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0074_category_pictogram_alter_event_category_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='category',
|
||||
field=models.ForeignKey(default=1, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='defaultCategory',
|
||||
field=models.ForeignKey(default=1, help_text='Category of each imported event', on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-09-14 17:47
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0075_alter_event_category_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='category',
|
||||
name='pictogram',
|
||||
field=models.FileField(blank=True, help_text='Pictogram of the category (svg format)', max_length=1024, null=True, upload_to='', verbose_name='Pictogram'),
|
||||
),
|
||||
]
|
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.9 on 2024-09-14 20:05
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0076_alter_category_pictogram'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='category',
|
||||
name='position',
|
||||
field=models.IntegerField(default=0, verbose_name='Position for ordering categories'),
|
||||
),
|
||||
]
|
@ -0,0 +1,24 @@
|
||||
# Generated by Django 4.2.9 on 2024-09-14 13:18
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0077_category_position_alter_event_category_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='category',
|
||||
field=models.ForeignKey(default=1, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='defaultCategory',
|
||||
field=models.ForeignKey(default=1, help_text='Category of each imported event', on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
]
|
18
src/agenda_culturel/migrations/0079_contactmessage_spam.py
Normal file
18
src/agenda_culturel/migrations/0079_contactmessage_spam.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-09 16:31
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0078_alter_event_category_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='contactmessage',
|
||||
name='spam',
|
||||
field=models.BooleanField(default=False, help_text='This message is a spam.', verbose_name='Spam'),
|
||||
),
|
||||
]
|
20
src/agenda_culturel/migrations/0080_place_location_pt.py
Normal file
20
src/agenda_culturel/migrations/0080_place_location_pt.py
Normal file
@ -0,0 +1,20 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-10 20:34
|
||||
|
||||
import django.contrib.gis.geos.point
|
||||
from django.db import migrations
|
||||
import location_field.models.spatial
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0079_contactmessage_spam'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='place',
|
||||
name='location_pt',
|
||||
field=location_field.models.spatial.LocationField(default=django.contrib.gis.geos.point.Point(45.783329, 3.08333), srid=4326),
|
||||
),
|
||||
]
|
38
src/agenda_culturel/migrations/0081_auto_20241010_2235.py
Normal file
38
src/agenda_culturel/migrations/0081_auto_20241010_2235.py
Normal file
@ -0,0 +1,38 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-10 20:35
|
||||
|
||||
from django.db import migrations
|
||||
from django.contrib.gis.geos import Point
|
||||
|
||||
def change_coord_format(apps, schema_editor):
|
||||
Place = apps.get_model("agenda_culturel", "Place")
|
||||
places = Place.objects.values("location", "location_pt").all()
|
||||
|
||||
for p in places:
|
||||
l = p.location.split(',')
|
||||
if len(l) == 2:
|
||||
p.location_pt = Point(float(l[1]), float(l[0]))
|
||||
else:
|
||||
p.location_pt = Point(3.08333, 45.783329)
|
||||
p.save(update_fields=["location_pt"])
|
||||
|
||||
def reverse_coord_format(apps, schema_editor):
|
||||
Place = apps.get_model("agenda_culturel", "Place")
|
||||
places = Place.objects.values("location", "location_pt").all()
|
||||
|
||||
for p in places:
|
||||
p.location = ','.join([p.location_pt[1], p.location_pt[0]])
|
||||
p.save(update_fields=["location"])
|
||||
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0080_place_location_pt'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(change_coord_format, reverse_code=reverse_coord_format),
|
||||
]
|
||||
|
||||
|
@ -0,0 +1,20 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-10 21:15
|
||||
|
||||
import django.contrib.gis.geos.point
|
||||
from django.db import migrations
|
||||
import location_field.models.spatial
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0081_auto_20241010_2235'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='place',
|
||||
name='location_pt',
|
||||
field=location_field.models.spatial.LocationField(default=django.contrib.gis.geos.point.Point(3.08333, 45.783329), srid=4326),
|
||||
),
|
||||
]
|
17
src/agenda_culturel/migrations/0083_remove_place_location.py
Normal file
17
src/agenda_culturel/migrations/0083_remove_place_location.py
Normal file
@ -0,0 +1,17 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-10 21:15
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0082_alter_place_location_pt'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='place',
|
||||
name='location',
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-10 21:15
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0083_remove_place_location'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='place',
|
||||
old_name='location_pt',
|
||||
new_name='location',
|
||||
),
|
||||
]
|
@ -0,0 +1,24 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-12 14:45
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0084_rename_location_pt_place_location'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='category',
|
||||
field=models.ForeignKey(default=None, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='defaultCategory',
|
||||
field=models.ForeignKey(default=None, help_text='Category of each imported event', on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-16 09:04
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0085_alter_event_category_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('cour3coquins', 'la cour des 3 coquins')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
]
|
24
src/agenda_culturel/migrations/0087_referencelocation.py
Normal file
24
src/agenda_culturel/migrations/0087_referencelocation.py
Normal file
@ -0,0 +1,24 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-16 12:55
|
||||
|
||||
import django.contrib.gis.geos.point
|
||||
from django.db import migrations, models
|
||||
import location_field.models.spatial
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0086_alter_recurrentimport_processor'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ReferenceLocation',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(help_text='Name of the location', verbose_name='Name')),
|
||||
('location', location_field.models.spatial.LocationField(default=django.contrib.gis.geos.point.Point(3.08333, 45.783329), srid=4326)),
|
||||
('main', models.BooleanField(default=False, help_text='This location is one of the main locations (shown first).', verbose_name='Main')),
|
||||
],
|
||||
),
|
||||
]
|
@ -0,0 +1,22 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-16 18:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0087_referencelocation'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='referencelocation',
|
||||
options={'verbose_name': 'Reference location', 'verbose_name_plural': 'Reference locations'},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='referencelocation',
|
||||
name='name',
|
||||
field=models.CharField(help_text='Name of the location', unique=True, verbose_name='Name'),
|
||||
),
|
||||
]
|
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-17 08:27
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0088_alter_referencelocation_options_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='defaultCategory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Category of each imported event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-19 13:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0089_alter_recurrentimport_defaultcategory'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('cour3coquins', 'la cour des 3 coquins'), ('arachnee', 'Arachnée concert')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
]
|
@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-20 11:38
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0090_alter_recurrentimport_processor'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='duplicatedevents',
|
||||
name='fixed',
|
||||
field=models.BooleanField(blank=True, default=False, help_text='This duplicated events is fixed, ie exactly one of the listed events is not masked.', null=True, verbose_name='Fixed'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='masked',
|
||||
field=models.BooleanField(blank=True, default=False, help_text='This event is masked by a duplicated version.', null=True, verbose_name='Masked'),
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-30 14:31
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0091_duplicatedevents_fixed_event_masked'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='categorisationrule',
|
||||
name='weight',
|
||||
field=models.IntegerField(default=1, help_text='The lower is the weight, the earlier the filter is applied', verbose_name='Weight'),
|
||||
),
|
||||
]
|
22
src/agenda_culturel/migrations/0093_tag.py
Normal file
22
src/agenda_culturel/migrations/0093_tag.py
Normal file
@ -0,0 +1,22 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-30 17:52
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0092_alter_categorisationrule_weight'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Tag',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(help_text='Tag name', max_length=512, verbose_name='Name')),
|
||||
('description', models.TextField(blank=True, help_text='Description of the tag', null=True, verbose_name='Description')),
|
||||
('principal', models.BooleanField(default=True, help_text='This tag is highlighted as a main tag for visitors, particularly in the filter.', verbose_name='Principal')),
|
||||
],
|
||||
),
|
||||
]
|
33
src/agenda_culturel/migrations/0094_auto_20241030_2002.py
Normal file
33
src/agenda_culturel/migrations/0094_auto_20241030_2002.py
Normal file
@ -0,0 +1,33 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-30 19:02
|
||||
|
||||
from django.db import migrations
|
||||
from django.contrib.auth.models import Group, Permission
|
||||
|
||||
def update_groups_permissions(apps, schema_editor):
|
||||
# first add a missing role
|
||||
user_roles = ["Tag editor"]
|
||||
|
||||
for name in user_roles:
|
||||
Group.objects.create(name=name)
|
||||
|
||||
all_perms = Permission.objects.all()
|
||||
|
||||
# set permissions for moderators
|
||||
editor_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model == 'tag']
|
||||
Group.objects.get(name="Tag editor").permissions.add(*editor_perms)
|
||||
|
||||
def update_groups_delete(apps, schema_editor):
|
||||
user_roles = ["Tag editor"]
|
||||
|
||||
for name in user_roles:
|
||||
Group.objects.filter(name=name).delete()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0093_tag'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(update_groups_permissions, reverse_code=update_groups_delete),
|
||||
]
|
19
src/agenda_culturel/migrations/0095_alter_tag_description.py
Normal file
19
src/agenda_culturel/migrations/0095_alter_tag_description.py
Normal file
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-30 19:11
|
||||
|
||||
from django.db import migrations
|
||||
import django_ckeditor_5.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0094_auto_20241030_2002'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='description',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Description of the tag', null=True, verbose_name='Description'),
|
||||
),
|
||||
]
|
18
src/agenda_culturel/migrations/0096_alter_tag_name.py
Normal file
18
src/agenda_culturel/migrations/0096_alter_tag_name.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-10-30 20:42
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0095_alter_tag_description'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='name',
|
||||
field=models.CharField(help_text='Tag name', max_length=512, unique=True, verbose_name='Name'),
|
||||
),
|
||||
]
|
@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-01 22:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0096_alter_tag_name'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='category',
|
||||
name='alt_name',
|
||||
field=models.CharField(blank=True, help_text='Alternative name used with a time period', max_length=512, null=True, verbose_name='Alternative Name'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='category',
|
||||
name='codename',
|
||||
field=models.CharField(blank=True, help_text='Short name of the category', max_length=3, null=True, verbose_name='Short name'),
|
||||
),
|
||||
]
|
@ -0,0 +1,21 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-01 22:08
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0097_alter_category_alt_name_alter_category_codename'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='category',
|
||||
name='alt_name',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='category',
|
||||
name='codename',
|
||||
),
|
||||
]
|
202
src/agenda_culturel/migrations/0099_update_categories.py
Normal file
202
src/agenda_culturel/migrations/0099_update_categories.py
Normal file
@ -0,0 +1,202 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-01 14:22
|
||||
|
||||
from django.db import migrations
|
||||
import os.path
|
||||
|
||||
class SimpleCat:
|
||||
def __init__(self=None,
|
||||
name=None, color=None,
|
||||
pictogram=None, position=None,
|
||||
transfered_to=None,
|
||||
transtag=None):
|
||||
self.name = name
|
||||
self.color = color
|
||||
self.pictogram = pictogram
|
||||
self.position = position
|
||||
self.transfered_to = transfered_to
|
||||
self.transfered_to_object = {}
|
||||
self.transtag = transtag
|
||||
|
||||
def get_transfered_category(self, e):
|
||||
# we check if the given event has a corresponding tag (except empty string)
|
||||
if not e is None:
|
||||
for t, c in self.transfered_to.items():
|
||||
if t != "" and t in e.tags:
|
||||
return c
|
||||
|
||||
return self.transfered_to[""] if "" in self.transfered_to else None
|
||||
|
||||
def get_transfered_to_object(self, apps, e=None):
|
||||
if self.transfered_to is None:
|
||||
return None, None
|
||||
Category = apps.get_model("agenda_culturel", "Category")
|
||||
|
||||
if isinstance(self.transfered_to, dict):
|
||||
cname = self.get_transfered_category(e)
|
||||
else:
|
||||
cname = self.transfered_to
|
||||
|
||||
if not cname in self.transfered_to_object.keys():
|
||||
self.transfered_to_object[cname] = Category.objects.filter(name=cname).first()
|
||||
|
||||
return self.transfered_to_object[cname], self.transtag
|
||||
|
||||
def get_pictogram_file(self):
|
||||
from django.core.files import File
|
||||
f = open(os.path.dirname(__file__) + "/images/" + self.pictogram, "rb")
|
||||
return File(name=self.pictogram, file=f)
|
||||
|
||||
# Color selection
|
||||
# https://colorkit.co/color-palette-generator/4cae4f-ff9900-2094f3-9b27b0-ffec3d-ff5724-795649-4051b5-009485/
|
||||
# #4cae4f, #ff9900, #2094f3, #9b27b0, #ffec3d, #ff5724, #795649, #4051b5, #009485
|
||||
|
||||
preserved = {
|
||||
"Nature": {
|
||||
"old": SimpleCat("Nature", color="#27AEEF", pictogram="leaf.svg", position=8),
|
||||
"new": SimpleCat("Nature", color="#4cae4f", pictogram="leaf.svg", position=8)
|
||||
},
|
||||
"Cinéma": {
|
||||
"old": SimpleCat("Cinéma", color="#EDE15B", pictogram="theater.svg", position=5),
|
||||
"new": SimpleCat("Cinéma", color="#ff9900", pictogram="theater.svg", position=4),
|
||||
},
|
||||
"Sans catégorie": {
|
||||
"old": SimpleCat("Sans catégorie", color="#AAAAAA", pictogram="calendar.svg", position=100),
|
||||
"new": SimpleCat("Sans catégorie", color="#AAAAAA", pictogram="calendar.svg", position=100),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
old_cats = [
|
||||
SimpleCat("Conférence", "#87BC45", "school-outline.svg", 7, "Rencontres & Débats", "conférence"),
|
||||
SimpleCat("Exposition", "#BDCF32", "warehouse.svg", 6, "Visites & Expositions", "exposition"),
|
||||
SimpleCat("Arts du spectacle", "#EDBF33", "track-light.svg", 4, "Spectacles"),
|
||||
SimpleCat("Danse", "#EF9B20", "dance-ballroom.svg", 3, "Spectacles", "danse"),
|
||||
SimpleCat("Concert", "#F46A9B", "account-music-outline.svg", 2, "Fêtes & Concerts", "concert"),
|
||||
SimpleCat("Théâtre", "#EA5545", "drama-masks.svg", 1, "Spectacles", "théâtre")
|
||||
]
|
||||
|
||||
new_cats = [
|
||||
SimpleCat("Fêtes & Concerts", "#ff5724", "party-popper.svg", 1, {"concert": "Concert", "": "Sans catégorie"}),
|
||||
SimpleCat("Spectacles", "#edbf33", "track-light.svg", 2, {"théâtre": "Théâtre", "danse": "Danse", "": "Arts du spectacle"}),
|
||||
SimpleCat("Rencontres & Débats", "#9b27b0", "workshop.svg", 3, {"conférence": "Conférence", "": "Sans catégorie"}),
|
||||
SimpleCat("Animations & Ateliers", "#4051b5", "tools.svg", 5, "Sans catégorie"),
|
||||
SimpleCat("Rendez-vous locaux", "#2094f3", "ferris-wheel.svg", 6, "Sans catégorie"),
|
||||
SimpleCat("Visites & Expositions", "#795649", "compass-outline.svg", 7, {"exposition": "Exposition", "": "Sans catégorie"}),
|
||||
]
|
||||
|
||||
def create_categories(apps, catlist):
|
||||
Category = apps.get_model("agenda_culturel", "Category")
|
||||
|
||||
# only create new categories if old ones are present to avoid filling
|
||||
# an empty database with ghost categories
|
||||
if Category.objects.count() > 1:
|
||||
|
||||
cats = [Category(name=c.name, color=c.color, position=c.position, pictogram=c.get_pictogram_file()) for c in catlist]
|
||||
|
||||
Category.objects.bulk_create(cats)
|
||||
|
||||
|
||||
|
||||
def delete_categories(apps, catlist):
|
||||
Category = apps.get_model("agenda_culturel", "Category")
|
||||
Category.objects.filter(name__in=[c.name for c in catlist]).delete()
|
||||
|
||||
def create_new_categories(apps, schema_editor):
|
||||
create_categories(apps, new_cats)
|
||||
|
||||
def delete_new_categories(apps, schema_editor):
|
||||
delete_categories(apps, new_cats)
|
||||
|
||||
def create_old_categories(apps, schema_editor):
|
||||
create_categories(apps, old_cats)
|
||||
|
||||
def delete_old_categories(apps, schema_editor):
|
||||
delete_categories(apps, old_cats)
|
||||
|
||||
|
||||
|
||||
def update_preserved_categories(apps, dest):
|
||||
other = "old" if dest == "new" else "new"
|
||||
Category = apps.get_model("agenda_culturel", "Category")
|
||||
|
||||
cats = Category.objects.filter(name__in=preserved.keys())
|
||||
ucats = []
|
||||
for c in cats:
|
||||
c.color = preserved[c.name][dest].color
|
||||
c.position = preserved[c.name][dest].position
|
||||
if preserved[c.name][dest].pictogram != preserved[c.name][other].pictogram:
|
||||
c.pictogram = preserved[c.name][dest].get_pictogram_file()
|
||||
ucats.append(c)
|
||||
|
||||
Category.objects.bulk_update(ucats, fields=["color", "position", "pictogram"])
|
||||
|
||||
def update_preserved_categories_new(apps, schema_editor):
|
||||
update_preserved_categories(apps, "new")
|
||||
|
||||
def update_preserved_categories_old(apps, schema_editor):
|
||||
update_preserved_categories(apps, "old")
|
||||
|
||||
|
||||
def update_database(apps, cats):
|
||||
convert = dict([(c.name, c) for c in cats])
|
||||
|
||||
# update events
|
||||
Event = apps.get_model("agenda_culturel", "Event")
|
||||
events = Event.objects.all()
|
||||
uevents = []
|
||||
for e in events:
|
||||
if e.category and e.category.name in convert.keys():
|
||||
cat, tag = convert[e.category.name].get_transfered_to_object(apps, e)
|
||||
e.category = cat
|
||||
if tag:
|
||||
if e.tags is None:
|
||||
e.tags = [tag]
|
||||
else:
|
||||
if not tag in e.tags:
|
||||
e.tags.append(tag)
|
||||
uevents.append(e)
|
||||
Event.objects.bulk_update(uevents, fields=["category", "tags"])
|
||||
|
||||
# update categorisation rules
|
||||
CategorisationRule = apps.get_model("agenda_culturel", "CategorisationRule")
|
||||
crules = CategorisationRule.objects.all()
|
||||
ucrules = []
|
||||
for r in crules:
|
||||
if r.category and r.category.name in convert.keys():
|
||||
r.category, tag = convert[r.category.name].get_transfered_to_object(apps)
|
||||
ucrules.append(r)
|
||||
CategorisationRule.objects.bulk_update(ucrules, fields=["category"])
|
||||
|
||||
# update recurrent import
|
||||
RecurrentImport = apps.get_model("agenda_culturel", "RecurrentImport")
|
||||
rimports = RecurrentImport.objects.all()
|
||||
urimports = []
|
||||
for ri in rimports:
|
||||
if ri.defaultCategory and ri.defaultCategory.name in convert.keys():
|
||||
ri.defaultCategory, tag = convert[ri.defaultCategory.name].get_transfered_to_object(apps)
|
||||
urimports.append(ri)
|
||||
RecurrentImport.objects.bulk_update(urimports, fields=["defaultCategory"])
|
||||
|
||||
|
||||
def update_database_new(apps, schema_editor):
|
||||
update_database(apps, old_cats)
|
||||
|
||||
def update_database_old(apps, schema_editor):
|
||||
update_database(apps, new_cats)
|
||||
|
||||
def do_nothing(apps, schema_editor):
|
||||
pass
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0098_remove_category_alt_name_remove_category_codename'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(create_new_categories, reverse_code=delete_new_categories),
|
||||
migrations.RunPython(update_preserved_categories_new, reverse_code=update_preserved_categories_old),
|
||||
migrations.RunPython(update_database_new, reverse_code=update_database_old),
|
||||
migrations.RunPython(delete_old_categories, reverse_code=create_old_categories)
|
||||
]
|
||||
|
19
src/agenda_culturel/migrations/0100_tag_category.py
Normal file
19
src/agenda_culturel/migrations/0100_tag_category.py
Normal file
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-02 10:54
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0099_update_categories'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tag',
|
||||
name='category',
|
||||
field=models.ForeignKey(default=None, help_text='This tags corresponds to a sub-category of the given category', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
]
|
19
src/agenda_culturel/migrations/0101_alter_tag_category.py
Normal file
19
src/agenda_culturel/migrations/0101_alter_tag_category.py
Normal file
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-02 14:13
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0100_tag_category'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='category',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='This tags corresponds to a sub-category of the given category', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
]
|
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-07 20:53
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0101_alter_tag_category'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='duplicatedevents',
|
||||
name='representative',
|
||||
field=models.ForeignKey(default=None, help_text='This event is the representative event of the duplicated events group', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.event', verbose_name='Representative event'),
|
||||
),
|
||||
]
|
@ -0,0 +1,59 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-07 20:53
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
def set_representative_from_fixed_masked(apps, cats):
|
||||
# get all duplicated events
|
||||
DuplicatedEvents = apps.get_model("agenda_culturel", "DuplicatedEvents")
|
||||
duplicated = DuplicatedEvents.objects.all().prefetch_related('event_set')
|
||||
|
||||
to_update = []
|
||||
for d in duplicated:
|
||||
# there is no representative
|
||||
d.representative = None
|
||||
# except if d is fixed
|
||||
if d.fixed:
|
||||
# and if there is at least one non masked (should be the case)
|
||||
e_not_masked = [e for e in d.event_set.all() if not e.masked]
|
||||
# keep the first one
|
||||
if len(e_not_masked) >= 1:
|
||||
d.representative = e_not_masked[0]
|
||||
to_update.append(d)
|
||||
|
||||
DuplicatedEvents.objects.bulk_update(to_update, fields=["representative"])
|
||||
|
||||
def set_fixed_masked_from_representative(apps, cats):
|
||||
Event = apps.get_model("agenda_culturel", "Event")
|
||||
events = Event.objects.all().prefetch_related("possibly_duplicated")
|
||||
|
||||
to_update = []
|
||||
for e in events:
|
||||
if not e.possibly_duplicated:
|
||||
e.masked = False
|
||||
else:
|
||||
e.masked = e.possibly_duplicated.representative and e.possibly_duplicated.representative == e
|
||||
to_update.append(e)
|
||||
|
||||
Event.objects.bulk_update(to_update, fields=["masked"])
|
||||
|
||||
# get all duplicated events
|
||||
DuplicatedEvents = apps.get_model("agenda_culturel", "DuplicatedEvents")
|
||||
duplicated = DuplicatedEvents.objects.all().prefetch_related('event_set')
|
||||
|
||||
# for each event
|
||||
to_update = []
|
||||
for d in duplicated:
|
||||
d.fixed = not d.representative is None
|
||||
to_update.append(d)
|
||||
|
||||
DuplicatedEvents.objects.bulk_update(to_update, fields=["fixed"])
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0102_duplicatedevents_representative'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(set_representative_from_fixed_masked, reverse_code=set_fixed_masked_from_representative),
|
||||
]
|
@ -0,0 +1,17 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-07 21:24
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0103_update_duplicatedevents_datastructure'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='duplicatedevents',
|
||||
name='fixed',
|
||||
),
|
||||
]
|
@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-08 08:30
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0104_remove_duplicatedevents_fixed'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='event',
|
||||
name='masked',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='event',
|
||||
old_name='possibly_duplicated',
|
||||
new_name='other_versions',
|
||||
),
|
||||
]
|
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-09 10:43
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0105_remove_event_masked_remove_event_possibly_duplicated_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='other_versions',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.duplicatedevents', verbose_name='Other versions'),
|
||||
),
|
||||
]
|
30
src/agenda_culturel/migrations/0107_strip_aliases.py
Normal file
30
src/agenda_culturel/migrations/0107_strip_aliases.py
Normal file
@ -0,0 +1,30 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-10 21:25
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def strip_place_aliases(apps, schema_editor):
|
||||
Place = apps.get_model("agenda_culturel", "Place")
|
||||
|
||||
places = Place.objects.all()
|
||||
|
||||
for p in places:
|
||||
if not p.aliases is None:
|
||||
p.aliases = [a.strip() for a in p.aliases]
|
||||
|
||||
Place.objects.bulk_update(places, fields=["aliases"])
|
||||
|
||||
|
||||
|
||||
def do_nothing(apps, schema_editor):
|
||||
pass
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0106_alter_event_other_versions'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(strip_place_aliases, reverse_code=do_nothing)
|
||||
]
|
@ -0,0 +1,44 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-11 10:15
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def remove_duplicated_categories(apps, schema_editor):
|
||||
Category = apps.get_model("agenda_culturel", "Category")
|
||||
CategorisationRule = apps.get_model("agenda_culturel", "CategorisationRule")
|
||||
Event = apps.get_model("agenda_culturel", "Event")
|
||||
|
||||
|
||||
catnames = list(set([c.name for c in Category.objects.all()]))
|
||||
|
||||
# for each category name
|
||||
for cname in catnames:
|
||||
# check if it exists more than one category
|
||||
if Category.objects.filter(name=cname).count() > 1:
|
||||
cats = Category.objects.filter(name=cname).order_by("pk")
|
||||
nbs = [Event.objects.filter(category=c).count() + CategorisationRule.objects.filter(category=c).count() for c in cats]
|
||||
|
||||
# if only one category with this name has elements
|
||||
if len([n for n in nbs if n != 0]) == 1:
|
||||
# remove all categories without elements
|
||||
for n, c in zip(nbs, cats):
|
||||
if n == 0:
|
||||
c.delete()
|
||||
else:
|
||||
# otherwise, remove all but the last one (by ID)
|
||||
for c in cats[0:-1]:
|
||||
c.delete()
|
||||
|
||||
|
||||
def do_nothing(apps, schema_editor):
|
||||
pass
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0107_strip_aliases'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(remove_duplicated_categories, reverse_code=do_nothing)
|
||||
]
|
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-13 09:56
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0108_remove_duplicated_categories'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name='ModerationAnswer',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='ModerationQuestion',
|
||||
),
|
||||
]
|
@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-13 17:27
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0109_delete_moderationanswer_delete_moderationquestion'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tag',
|
||||
name='in_excluded_suggestions',
|
||||
field=models.BooleanField(default=False, help_text='This tag will be part of the excluded suggestions.', verbose_name='In excluded suggestions'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='tag',
|
||||
name='in_included_suggestions',
|
||||
field=models.BooleanField(default=False, help_text='This tag will be part of the included suggestions.', verbose_name='In included suggestions'),
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-17 12:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0110_tag_in_excluded_suggestions_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='referencelocation',
|
||||
name='main',
|
||||
field=models.IntegerField(default=0, help_text='This location is one of the main locations (shown first higher values).', verbose_name='Main'),
|
||||
),
|
||||
]
|
19
src/agenda_culturel/migrations/0112_place_description.py
Normal file
19
src/agenda_culturel/migrations/0112_place_description.py
Normal file
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-20 15:42
|
||||
|
||||
from django.db import migrations
|
||||
import django_ckeditor_5.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0111_alter_referencelocation_main'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='place',
|
||||
name='description',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Description of the place, including accessibility.', null=True, verbose_name='Description'),
|
||||
),
|
||||
]
|
17
src/agenda_culturel/migrations/0113_remove_tag_category.py
Normal file
17
src/agenda_culturel/migrations/0113_remove_tag_category.py
Normal file
@ -0,0 +1,17 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-20 21:40
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0112_place_description'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='tag',
|
||||
name='category',
|
||||
),
|
||||
]
|
@ -0,0 +1,35 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-22 10:12
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django_ckeditor_5.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0113_remove_tag_category'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Organisation',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(help_text='Organisation name', max_length=512, unique=True, verbose_name='Name')),
|
||||
('website', models.URLField(blank=True, help_text='Website of the organisation', max_length=1024, null=True, verbose_name='Website')),
|
||||
('description', django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Description of the organisation.', null=True, verbose_name='Description')),
|
||||
('principal_place', models.ForeignKey(blank=True, help_text='Place mainly associated with this organizer. Mainly used if there is a similarity in the name, to avoid redundant displays.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.place', verbose_name='Principal place')),
|
||||
],
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='organisers',
|
||||
field=models.ManyToManyField(blank=True, help_text='list of event organisers. Organizers will only be displayed if one of them does not normally use the venue.', related_name='organised_events', to='agenda_culturel.organisation', verbose_name='Location (free form)'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='recurrentimport',
|
||||
name='defaultOrganiser',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Organiser of each imported event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.organisation', verbose_name='Organiser'),
|
||||
),
|
||||
]
|
@ -0,0 +1,22 @@
|
||||
# Generated by Django 4.2.9 on 2024-11-22 10:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0114_organisation_event_organisers_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='organisation',
|
||||
options={'verbose_name': 'Organisation', 'verbose_name_plural': 'Organisations'},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='organisers',
|
||||
field=models.ManyToManyField(blank=True, help_text='list of event organisers. Organizers will only be displayed if one of them does not normally use the venue.', related_name='organised_events', to='agenda_culturel.organisation', verbose_name='Organisers'),
|
||||
),
|
||||
]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user