Compare commits
12 Commits
master
...
externalis
Author | SHA1 | Date | |
---|---|---|---|
2df8c63df9 | |||
be787f7108 | |||
525900fe84 | |||
5f1cd67cc0 | |||
7612eb0b7f | |||
6ae2729652 | |||
a9362bba1d | |||
e795acb5a8 | |||
1c0421f87d | |||
37273e4953 | |||
4960e78fa8 | |||
ee7aebf5fb |
16
README.md
16
README.md
@ -45,10 +45,20 @@ Ces chemins sont relatifs à `rdoo.py`, il est possible de passer des chemins ab
|
|||||||
|
|
||||||
Il est possible de ne pas archiver en passant l'argument `-na, --no-archive`.
|
Il est possible de ne pas archiver en passant l'argument `-na, --no-archive`.
|
||||||
|
|
||||||
## Umap
|
### Concaténation des tags dans l'export json
|
||||||
Les fichiers json générés peuvent être directements utilisés dans [umap](https://umap.openstreetmap.fr/fr/) en les [important](https://wiki.openstreetmap.org/wiki/FR:UMap/Guide/Importer_un_fichier_de_donn%C3%A9es) et choisissant le format de données `osm`.
|
`-nc, --no-concatenation`, pour ne pas concaténer les tags dans le champ description du json exporté.
|
||||||
|
|
||||||
Si vous disposez d'un serveur pour héberger le script ou ses résultats, le lien du fichier peut être utilisé directement dans umap comme `données distantes` d'un calque, en cochant `dynamique` et `avec proxy`.
|
### Timeout
|
||||||
|
`-t, --timeout`, pour définir le timeout en secondes de la requêt Overpass (défaut 25s).
|
||||||
|
|
||||||
|
## Traductions
|
||||||
|
Les tags peuvent être traduits grâce au fichier `traductions.json` contenant la configuration sous la forme `"clef" : "tableau_de_valeurs"` où la clef est la valeur du tag OSM à traduire et le tableau de valeurs est de la forme `"valeur" : "traduction"`. Le fichier par défaut contient l'exemple de la traduction du tag `"bicycle_parking"`.
|
||||||
|
|
||||||
|
## Umap
|
||||||
|
[Utilisation avec Umap](docs/umap.md)
|
||||||
|
|
||||||
|
## Requêtes personnalisées
|
||||||
|
[Requêtes personnalisées](docs/requetes_perso.md)
|
||||||
|
|
||||||
## Inspirations / ressources :
|
## Inspirations / ressources :
|
||||||
### urls ressources
|
### urls ressources
|
||||||
|
148
configuration/champs_generiques.json
Normal file
148
configuration/champs_generiques.json
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
{
|
||||||
|
"CHAMPS_STATIONNEMENT": {
|
||||||
|
"amenity": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "aménagement"
|
||||||
|
},
|
||||||
|
"capacity": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "nombre d'emplacements"
|
||||||
|
},
|
||||||
|
"access": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "accès"
|
||||||
|
},
|
||||||
|
"bicycle_parking": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "type"
|
||||||
|
},
|
||||||
|
"covered": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "couvert"
|
||||||
|
},
|
||||||
|
"operator": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "opérateur"
|
||||||
|
},
|
||||||
|
"operator:type": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "type d'opérateur"
|
||||||
|
},
|
||||||
|
"fee": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "frais"
|
||||||
|
},
|
||||||
|
"check_date:capacity": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "date_vérification"
|
||||||
|
},
|
||||||
|
"source": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "source"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"CHAMPS_POI": {
|
||||||
|
"name": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": ""
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": ""
|
||||||
|
},
|
||||||
|
"website": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": ""
|
||||||
|
},
|
||||||
|
"addr:housenumber": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": ""
|
||||||
|
},
|
||||||
|
"addr:street": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": ""
|
||||||
|
},
|
||||||
|
"addr:postcode": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": ""
|
||||||
|
},
|
||||||
|
"addr:city": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": ""
|
||||||
|
},
|
||||||
|
"contact:email": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "email"
|
||||||
|
},
|
||||||
|
"contact:twitter": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "Twitter"
|
||||||
|
},
|
||||||
|
"contact:facebook": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "Facebook"
|
||||||
|
},
|
||||||
|
"contact:phone": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "Téléphone"
|
||||||
|
},
|
||||||
|
"network": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "Réseau"
|
||||||
|
},
|
||||||
|
"office": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "Bureau"
|
||||||
|
},
|
||||||
|
"opening_hours": {
|
||||||
|
"export_json": "Oui",
|
||||||
|
"FR": "Horaires"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"CHAMPS_ADRESSE": {
|
||||||
|
"api_adresse:geometry:coordinates:lon": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "lon_adresse_etalab"
|
||||||
|
},
|
||||||
|
"api_adresse:geometry:coordinates:lat": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "lat_adresse_etalab"
|
||||||
|
},
|
||||||
|
"api_adresse:properties:label": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "adresse_etalab"
|
||||||
|
},
|
||||||
|
"api_adresse:properties:score": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "score_etalab"
|
||||||
|
},
|
||||||
|
"api_adresse:properties:housenumber": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "numero_etalab"
|
||||||
|
},
|
||||||
|
"api_adresse:properties:type": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "type_etalab"
|
||||||
|
},
|
||||||
|
"api_adresse:properties:name": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "numero_et_voie_etalab"
|
||||||
|
},
|
||||||
|
"api_adresse:properties:postcode": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "code_postal_etalab"
|
||||||
|
},
|
||||||
|
"api_adresse:properties:citycode": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "code_INSEE_etalab"
|
||||||
|
},
|
||||||
|
"api_adresse:properties:city": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "ville_etalab"
|
||||||
|
},
|
||||||
|
"api_adresse:properties:street": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": "rue_etalab"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
96
configuration/requetes.json
Normal file
96
configuration/requetes.json
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
{
|
||||||
|
"stationnements_velos_non_publics": {
|
||||||
|
"overpass": "nwr[\"amenity\"=\"bicycle_parking\"][\"access\"~\"(no|permit|private|customers|permissive)\"](area:aire_de_recherche);",
|
||||||
|
"champs_generiques": [
|
||||||
|
"CHAMPS_STATIONNEMENT",
|
||||||
|
"CHAMPS_ADRESSE"
|
||||||
|
],
|
||||||
|
"champ_local": ""
|
||||||
|
},
|
||||||
|
"stationnements_velos_publics": {
|
||||||
|
"overpass": "nwr[\"amenity\"=\"bicycle_parking\"](area:aire_de_recherche); - nwr[\"amenity\"=\"bicycle_parking\"][\"access\"~\"(no|permit|private|customers|permissive)\"](area:aire_de_recherche);",
|
||||||
|
"champs_generiques": [
|
||||||
|
"CHAMPS_STATIONNEMENT",
|
||||||
|
"CHAMPS_ADRESSE"
|
||||||
|
],
|
||||||
|
"champ_local": ""
|
||||||
|
},
|
||||||
|
"ateliers_autoreparation": {
|
||||||
|
"overpass": "nwr[\"service:bicycle:diy\"=\"yes\"](area:aire_de_recherche);",
|
||||||
|
"champs_generiques": [
|
||||||
|
"CHAMPS_POI",
|
||||||
|
"CHAMPS_ADRESSE"
|
||||||
|
],
|
||||||
|
"champ_local": {
|
||||||
|
"service:bicycle:diy": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"associations_velo": {
|
||||||
|
"overpass": "nwr[\"association\"=\"bicycle\"](area:aire_de_recherche);",
|
||||||
|
"champs_generiques": [
|
||||||
|
"CHAMPS_POI",
|
||||||
|
"CHAMPS_ADRESSE"
|
||||||
|
],
|
||||||
|
"champ_local": {
|
||||||
|
"association": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fabriquants_velo": {
|
||||||
|
"overpass": "nwr[\"craft\"=\"bicycle\"](area:aire_de_recherche);",
|
||||||
|
"champs_generiques": [
|
||||||
|
"CHAMPS_POI",
|
||||||
|
"CHAMPS_ADRESSE"
|
||||||
|
],
|
||||||
|
"champ_local": {
|
||||||
|
"craft": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"vendeurs_velo": {
|
||||||
|
"overpass": "nwr[\"shop\"=\"bicycle\"](area:aire_de_recherche); nwr[\"service:bicycle:retail\"=\"yes\"](area:aire_de_recherche);",
|
||||||
|
"champs_generiques": [
|
||||||
|
"CHAMPS_POI",
|
||||||
|
"CHAMPS_ADRESSE"
|
||||||
|
],
|
||||||
|
"champ_local": {
|
||||||
|
"shop": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"velos_libre_service": {
|
||||||
|
"overpass": "nwr[\"amenity\"=\"bicycle_rental\"](area:aire_de_recherche);",
|
||||||
|
"champs_generiques": [
|
||||||
|
"CHAMPS_POI",
|
||||||
|
"CHAMPS_ADRESSE"
|
||||||
|
],
|
||||||
|
"champ_local": {
|
||||||
|
"amenity": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"location_velo": {
|
||||||
|
"overpass": "nwr[\"service:bicycle:rental\"=\"yes\"](area:aire_de_recherche);",
|
||||||
|
"champs_generiques": [
|
||||||
|
"CHAMPS_POI",
|
||||||
|
"CHAMPS_ADRESSE"
|
||||||
|
],
|
||||||
|
"champ_local": {
|
||||||
|
"service:bicycle:rental": {
|
||||||
|
"export_json": "Non",
|
||||||
|
"FR": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
23
configuration/traductions.json
Normal file
23
configuration/traductions.json
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bicycle_parking": {
|
||||||
|
"stands": "Arceaux",
|
||||||
|
"wall_loops": "Pince roues",
|
||||||
|
"rack": "Râteliers",
|
||||||
|
"anchors": "Ancrage",
|
||||||
|
"shed": "Abri collectif",
|
||||||
|
"bollard": "Potelet",
|
||||||
|
"lockers": "Abris individuels",
|
||||||
|
"wide_stands": "Arceaux espacés",
|
||||||
|
"ground_slots": "Fente dans le sol",
|
||||||
|
"building": "Bâtiment",
|
||||||
|
"informal": "Informel",
|
||||||
|
"wave": "Râteliers",
|
||||||
|
"streetpod": "Arceaux",
|
||||||
|
"tree": "Arbre à bicyclettes",
|
||||||
|
"crossbar": "Barre",
|
||||||
|
"rope": "Câble",
|
||||||
|
"two-tier": "Deux étages",
|
||||||
|
"floor": "Sol",
|
||||||
|
"handlebar_holder": "Accroche-guidons"
|
||||||
|
}
|
||||||
|
}
|
32
docs/requetes_perso.md
Normal file
32
docs/requetes_perso.md
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
## Requêtes personnalisées
|
||||||
|
|
||||||
|
Les requêtes sont stockées dans le fichier [requetes.json](../configuration/requetes.json) qui contient un tableau de requêtes.
|
||||||
|
|
||||||
|
### Requêtes
|
||||||
|
```json
|
||||||
|
"fontaines": {
|
||||||
|
"commentaire": "champ de texte libre",
|
||||||
|
"overpass": "nwr[\"amenity\"=\"fountain\"](area:aire_de_recherche);",
|
||||||
|
"champs_generiques": [
|
||||||
|
"CHAMPS_POI",
|
||||||
|
"CHAMPS_ADRESSE"
|
||||||
|
],
|
||||||
|
"champ_local": ""
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Un bloc de requête commence par son nom et nécessite obligatoirement :
|
||||||
|
- la chaîne de recherche `overpass` (penser à échapper les `"` avec `\`) ([overpass turbo](https://overpass-turbo.eu/), [wiki Overpass_QL](https://wiki.openstreetmap.org/wiki/FR:Overpass_API/Overpass_QL))
|
||||||
|
- les `champs_generiques` utilisés, qui contiennent les informations de tags osm ou géocodage
|
||||||
|
- le `champ_local` utilisé.
|
||||||
|
|
||||||
|
Il peut aussi contenir un `commentaire` qui ne serait pas interprété par rdoo.
|
||||||
|
|
||||||
|
### Champs
|
||||||
|
Les champs génériques sont stockées dans le fichier [champs_generiques.json](../configuration/champs_generiques.json) et sont divisés en 3 blocs :
|
||||||
|
- `CHAMPS_STATIONNEMENT` pour les tags liés au stationnement vélo
|
||||||
|
- `CHAMPS_POI` pour les tags liés aux informations classiques (nom, description, moyens de contact, addresse)
|
||||||
|
- `CHAMPS_ADRESSE` pour les tags correspondant au géocodage inversé via Etalab.
|
||||||
|
|
||||||
|
|
||||||
|
### Traduction
|
||||||
|
Les traductions sont stockées dans le fichier [traductions.json](../configuration/traductions.json), le tag à traduire est suivi d'un tableau de correspondance des valeurs à traduire.
|
11
docs/umap.md
Normal file
11
docs/umap.md
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
## Utilisation avec Umap
|
||||||
|
|
||||||
|
### Import
|
||||||
|
Les fichiers json générés peuvent être directements utilisés dans [umap](https://umap.openstreetmap.fr/fr/) en les [important](https://wiki.openstreetmap.org/wiki/FR:UMap/Guide/Importer_un_fichier_de_donn%C3%A9es) et choisissant le format de données `osm`.
|
||||||
|
|
||||||
|
![configuration import](umap_import.png)
|
||||||
|
|
||||||
|
### Distant
|
||||||
|
Si vous disposez d'un serveur pour héberger le script ou ses résultats, le lien du fichier peut être utilisé directement dans umap comme `données distantes` d'un calque, en cochant `dynamique` et `avec proxy`, au format de données `osm`.
|
||||||
|
|
||||||
|
![configuration distante](umap_distant.png)
|
BIN
docs/umap_distant.png
Normal file
BIN
docs/umap_distant.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 113 KiB |
BIN
docs/umap_import.png
Normal file
BIN
docs/umap_import.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 83 KiB |
@ -1,166 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
|
|
||||||
# Copyright 2021 Olav63, SebF
|
|
||||||
#
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
|
|
||||||
"""Module des requêtes"""
|
|
||||||
|
|
||||||
|
|
||||||
class Requete:
|
|
||||||
"""Objet requête"""
|
|
||||||
|
|
||||||
nom: str
|
|
||||||
critere: str
|
|
||||||
champs: dict
|
|
||||||
|
|
||||||
def __init__(self, nom, critere, champs):
|
|
||||||
self.nom = nom
|
|
||||||
self.critere = critere
|
|
||||||
self.champs = champs
|
|
||||||
|
|
||||||
|
|
||||||
REQS = []
|
|
||||||
CHAMPS_STATIONNEMENT = {
|
|
||||||
"amenity": {"export_json": "Non", "FR": "aménagement"},
|
|
||||||
"capacity": {"export_json": "Oui", "FR": "nombre d'emplacements"},
|
|
||||||
"access": {"export_json": "Oui", "FR": "accès"},
|
|
||||||
"bicycle_parking": {"export_json": "Oui", "FR": "type"},
|
|
||||||
"covered": {"export_json": "Oui", "FR": "couvert"},
|
|
||||||
"operator": {"export_json": "Oui", "FR": "opérateur"},
|
|
||||||
"operator:type": {"export_json": "Oui", "FR": "type d'opérateur"},
|
|
||||||
"fee": {"export_json": "Oui", "FR": "frais"},
|
|
||||||
"check_date:capacity": {"export_json": "Non", "FR": "date_vérification"},
|
|
||||||
"source": {"export_json": "Non", "FR": "source"},
|
|
||||||
}
|
|
||||||
CHAMPS_POI = {
|
|
||||||
"name": {"export_json": "Oui", "FR": ""},
|
|
||||||
"description": {"export_json": "Oui", "FR": ""},
|
|
||||||
"website": {"export_json": "Oui", "FR": ""},
|
|
||||||
"addr:housenumber": {"export_json": "Oui", "FR": ""},
|
|
||||||
"addr:street": {"export_json": "Oui", "FR": ""},
|
|
||||||
"addr:postcode": {"export_json": "Oui", "FR": ""},
|
|
||||||
"addr:city": {"export_json": "Oui", "FR": ""},
|
|
||||||
"contact:email": {"export_json": "Oui", "FR": "email"},
|
|
||||||
"contact:twitter": {"export_json": "Oui", "FR": "Twitter"},
|
|
||||||
"contact:facebook": {"export_json": "Oui", "FR": "Facebook"},
|
|
||||||
"contact:phone": {"export_json": "Oui", "FR": "Téléphone"},
|
|
||||||
"network": {"export_json": "Oui", "FR": "Réseau"},
|
|
||||||
"office": {"export_json": "Oui", "FR": "Bureau"},
|
|
||||||
"opening_hours": {"export_json": "Oui", "FR": "Horaires"},
|
|
||||||
}
|
|
||||||
CHAMPS_ADRESSE = {
|
|
||||||
"api_adresse:geometry:coordinates:lon": {
|
|
||||||
"export_json": "Non",
|
|
||||||
"FR": "lon_adresse_etalab",
|
|
||||||
},
|
|
||||||
"api_adresse:geometry:coordinates:lat": {
|
|
||||||
"export_json": "Non",
|
|
||||||
"FR": "lat_adresse_etalab",
|
|
||||||
},
|
|
||||||
"api_adresse:properties:label": {"export_json": "Non", "FR": "adresse_etalab"},
|
|
||||||
"api_adresse:properties:score": {"export_json": "Non", "FR": "score_etalab"},
|
|
||||||
"api_adresse:properties:housenumber": {"export_json": "Non", "FR": "numero_etalab"},
|
|
||||||
"api_adresse:properties:type": {"export_json": "Non", "FR": "type_etalab"},
|
|
||||||
"api_adresse:properties:name": {
|
|
||||||
"export_json": "Non",
|
|
||||||
"FR": "numero_et_voie_etalab",
|
|
||||||
},
|
|
||||||
"api_adresse:properties:postcode": {
|
|
||||||
"export_json": "Non",
|
|
||||||
"FR": "code_postal_etalab",
|
|
||||||
},
|
|
||||||
"api_adresse:properties:citycode": {
|
|
||||||
"export_json": "Non",
|
|
||||||
"FR": "code_INSEE_etalab",
|
|
||||||
},
|
|
||||||
"api_adresse:properties:city": {"export_json": "Non", "FR": "ville_etalab"},
|
|
||||||
"api_adresse:properties:street": {"export_json": "Non", "FR": "rue_etalab"},
|
|
||||||
}
|
|
||||||
|
|
||||||
# pylint: disable=C0301
|
|
||||||
STATIONNEMENT_NON_PUBLIC_REQ = r'nwr["amenity"="bicycle_parking"]["access"~"(no|permit|private|customers|permissive)"](area:aire_de_recherche);'
|
|
||||||
REQS.append(
|
|
||||||
Requete(
|
|
||||||
"stationnements_velos_non_publics",
|
|
||||||
STATIONNEMENT_NON_PUBLIC_REQ,
|
|
||||||
dict(CHAMPS_STATIONNEMENT, **CHAMPS_ADRESSE),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
REQS.append(
|
|
||||||
Requete(
|
|
||||||
"stationnements_velos_publics",
|
|
||||||
# pylint: disable=C0301
|
|
||||||
rf'nwr["amenity"="bicycle_parking"](area:aire_de_recherche); - {STATIONNEMENT_NON_PUBLIC_REQ}',
|
|
||||||
dict(CHAMPS_STATIONNEMENT, **CHAMPS_ADRESSE),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
CHAMP_LOCAL = {"service:bicycle:diy": {"export_json": "Non", "FR": ""}}
|
|
||||||
REQS.append(
|
|
||||||
Requete(
|
|
||||||
"ateliers_autoreparation",
|
|
||||||
r'nwr["service:bicycle:diy"="yes"](area:aire_de_recherche);',
|
|
||||||
dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
CHAMP_LOCAL = {"association": {"export_json": "Non", "FR": ""}}
|
|
||||||
REQS.append(
|
|
||||||
Requete(
|
|
||||||
"associations_velo",
|
|
||||||
r'nwr["association"="bicycle"](area:aire_de_recherche);',
|
|
||||||
dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
CHAMP_LOCAL = {"craft": {"export_json": "Non", "FR": ""}}
|
|
||||||
REQS.append(
|
|
||||||
Requete(
|
|
||||||
"fabriquants_velo",
|
|
||||||
r'nwr["craft"="bicycle"](area:aire_de_recherche);',
|
|
||||||
dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
CHAMP_LOCAL = {"shop": {"export_json": "Non", "FR": ""}}
|
|
||||||
REQS.append(
|
|
||||||
Requete(
|
|
||||||
"vendeurs_velo",
|
|
||||||
# pylint: disable=C0301
|
|
||||||
r'nwr["shop"="bicycle"](area:aire_de_recherche); nwr["service:bicycle:retail"="yes"](area:aire_de_recherche);',
|
|
||||||
dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
CHAMP_LOCAL = {"amenity": {"export_json": "Non", "FR": ""}}
|
|
||||||
REQS.append(
|
|
||||||
Requete(
|
|
||||||
"velos_libre_service",
|
|
||||||
r'nwr["amenity"="bicycle_rental"](area:aire_de_recherche);',
|
|
||||||
dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
CHAMP_LOCAL = {"service:bicycle:rental": {"export_json": "Non", "FR": ""}}
|
|
||||||
REQS.append(
|
|
||||||
Requete(
|
|
||||||
"location_velo",
|
|
||||||
r'nwr["service:bicycle:rental"="yes"](area:aire_de_recherche);',
|
|
||||||
dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
|
|
||||||
)
|
|
||||||
)
|
|
72
rdoo.py
72
rdoo.py
@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Module principal :
|
Module principal :
|
||||||
- récupération de données par appel à Overpass
|
- récupération de données par appel à Overpass
|
||||||
- géocodage inverse
|
- géocodage inverse
|
||||||
- export des données en JSON pour utilisation avec umap
|
- export des données en JSON pour utilisation avec umap
|
||||||
@ -31,9 +31,8 @@ import os
|
|||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
from osm_vc63 import errors
|
from rdoopy import errors
|
||||||
from osm_vc63 import requetes
|
from rdoopy.utils import Utils
|
||||||
from osm_vc63.utils import Utils
|
|
||||||
|
|
||||||
OVERPASS_URL = "http://overpass-api.de/api/interpreter"
|
OVERPASS_URL = "http://overpass-api.de/api/interpreter"
|
||||||
GEO_API_URL = "https://api-adresse.data.gouv.fr"
|
GEO_API_URL = "https://api-adresse.data.gouv.fr"
|
||||||
@ -45,30 +44,6 @@ MAX_RETRY = 4
|
|||||||
RETRY_DELAY = 120
|
RETRY_DELAY = 120
|
||||||
|
|
||||||
|
|
||||||
# traductions des tags bicycle_parking
|
|
||||||
TRAD_BICYCLE_PARKING = {
|
|
||||||
"stands": "Arceaux",
|
|
||||||
"wall_loops": "Pince roues",
|
|
||||||
"rack": "Râteliers",
|
|
||||||
"anchors": "Ancrage",
|
|
||||||
"shed": "Abri collectif",
|
|
||||||
"bollard": "Potelet",
|
|
||||||
"lockers": "Abris individuels",
|
|
||||||
"wide_stands": "Arceaux espacés",
|
|
||||||
"ground_slots": "Fente dans le sol",
|
|
||||||
"building": "Bâtiment",
|
|
||||||
"informal": "Informel",
|
|
||||||
"wave": "Râteliers",
|
|
||||||
"streetpod": "Arceaux",
|
|
||||||
"tree": "Arbre à bicyclettes",
|
|
||||||
"crossbar": "Barre",
|
|
||||||
"rope": "Câble",
|
|
||||||
"two-tier": "Deux étages",
|
|
||||||
"floor": "Sol",
|
|
||||||
"handlebar_holder": "Accroche-guidons",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def init_argparse() -> argparse.ArgumentParser:
|
def init_argparse() -> argparse.ArgumentParser:
|
||||||
"""Définition des arguments possibles."""
|
"""Définition des arguments possibles."""
|
||||||
|
|
||||||
@ -130,6 +105,19 @@ def init_argparse() -> argparse.ArgumentParser:
|
|||||||
)
|
)
|
||||||
parser.set_defaults(archivage=True)
|
parser.set_defaults(archivage=True)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-nc",
|
||||||
|
"--no-concatenation",
|
||||||
|
dest="concatenation",
|
||||||
|
action="store_false",
|
||||||
|
help="Désactiver la concaténation des tags dans l'export json",
|
||||||
|
)
|
||||||
|
parser.set_defaults(concatenation=True)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-t", "--timeout", type=int, help="Définir le temps de timeout.", default=25
|
||||||
|
)
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
@ -142,10 +130,13 @@ def main():
|
|||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
format="%(asctime)s [%(levelname)s] %(message)s",
|
format="%(asctime)s [%(levelname)s] %(message)s",
|
||||||
level=getattr(logging, args.log_level.upper()),
|
level=getattr(logging, args.log_level.upper()),
|
||||||
handlers=[logging.FileHandler("rdoo.log"), logging.StreamHandler(sys.stdout)],
|
handlers=[
|
||||||
|
logging.FileHandler("rdoo.log", encoding="utf-8"),
|
||||||
|
logging.StreamHandler(sys.stdout),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
utils = Utils(OVERPASS_URL, GEO_API_URL, args.dossier_resultats)
|
utils = Utils(OVERPASS_URL, GEO_API_URL, args.dossier_resultats, args.timeout)
|
||||||
|
|
||||||
if args.archivage:
|
if args.archivage:
|
||||||
utils.archivage(args.dossier_archive)
|
utils.archivage(args.dossier_archive)
|
||||||
@ -155,14 +146,16 @@ def main():
|
|||||||
# l'id de l'area se calcule en ajoutant 3600000000 au numéro de l'objet OSM
|
# l'id de l'area se calcule en ajoutant 3600000000 au numéro de l'objet OSM
|
||||||
aire_de_recherche = str(3_600_000_000 + args.zone)
|
aire_de_recherche = str(3_600_000_000 + args.zone)
|
||||||
|
|
||||||
for req in requetes.REQS:
|
for req in utils.json_reqs:
|
||||||
for nb_essai in range(MAX_RETRY): # on tente max_retry fois
|
for nb_essai in range(MAX_RETRY): # on tente max_retry fois
|
||||||
try:
|
try:
|
||||||
|
|
||||||
logging.info(f"# Requête en cours : {req.nom}")
|
logging.info(f"# Requête en cours : {req}")
|
||||||
|
|
||||||
# appel overpass
|
# appel overpass
|
||||||
data = utils.run_overpass_query(req.critere, aire_de_recherche)
|
data = utils.run_overpass_query(
|
||||||
|
utils.json_reqs[req]["overpass"], aire_de_recherche
|
||||||
|
)
|
||||||
nb_resultats = len(data["elements"])
|
nb_resultats = len(data["elements"])
|
||||||
logging.info(f"{nb_resultats} résultats")
|
logging.info(f"{nb_resultats} résultats")
|
||||||
|
|
||||||
@ -172,16 +165,17 @@ def main():
|
|||||||
data = utils.geocodage_csv(data)
|
data = utils.geocodage_csv(data)
|
||||||
|
|
||||||
# traduction
|
# traduction
|
||||||
data = utils.traduction(
|
for key in utils.traductions:
|
||||||
"bicycle_parking", TRAD_BICYCLE_PARKING, data
|
data = utils.traduction(key, utils.traductions[key], data)
|
||||||
)
|
|
||||||
|
|
||||||
# Sauvegarde
|
# Sauvegarde
|
||||||
os.makedirs(args.dossier_resultats, exist_ok=True)
|
os.makedirs(args.dossier_resultats, exist_ok=True)
|
||||||
export_json = utils.nettoyage_json_pour_umap(data, req.champs)
|
export_json = utils.export_json_pour_umap(
|
||||||
|
data, utils.json_reqs[req]["champs"], args.concatenation
|
||||||
|
)
|
||||||
|
|
||||||
utils.save_as_json(export_json, req.nom)
|
utils.save_as_json(export_json, req)
|
||||||
utils.save_as_ods(req.champs, data, req.nom)
|
utils.save_as_ods(utils.json_reqs[req]["champs"], data, req)
|
||||||
|
|
||||||
# doucement sur overpass
|
# doucement sur overpass
|
||||||
time.sleep(30)
|
time.sleep(30)
|
||||||
|
@ -29,7 +29,7 @@ import csv
|
|||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
import requests
|
import requests
|
||||||
from pyexcel_ods3 import save_data
|
from pyexcel_ods3 import save_data
|
||||||
from osm_vc63 import errors
|
from rdoopy import errors
|
||||||
|
|
||||||
|
|
||||||
class Utils:
|
class Utils:
|
||||||
@ -39,13 +39,23 @@ class Utils:
|
|||||||
geo_api_url: str
|
geo_api_url: str
|
||||||
dossier_resultats: str
|
dossier_resultats: str
|
||||||
|
|
||||||
def __init__(self, overpass_url, geo_api_url, dossier_resultats):
|
def __init__(self, overpass_url, geo_api_url, dossier_resultats, timeout):
|
||||||
self.overpass_url = overpass_url
|
self.overpass_url = overpass_url
|
||||||
self.geo_api_url = geo_api_url
|
self.geo_api_url = geo_api_url
|
||||||
self.dossier_resultats = dossier_resultats
|
self.dossier_resultats = dossier_resultats
|
||||||
|
self.timeout = timeout
|
||||||
|
|
||||||
|
with open("configuration/traductions.json", encoding="utf-8") as trads:
|
||||||
|
self.traductions = json.load(trads)
|
||||||
|
|
||||||
|
self.lecture_requetes()
|
||||||
|
|
||||||
def save_as_ods(self, fields, data, nom_req, ods_data_sheet=OrderedDict()):
|
def save_as_ods(self, fields, data, nom_req, ods_data_sheet=OrderedDict()):
|
||||||
"""Sauvegarde de data dans un classeur ods"""
|
"""Sauvegarde de data dans un classeur ods.
|
||||||
|
|
||||||
|
Le paramètre ods_data_sheet est évalué une seule fois à la définition de la fonction,
|
||||||
|
ce qui enregistre les data de chaque appel dans une nouvelle feuille.
|
||||||
|
"""
|
||||||
|
|
||||||
# ods_data_sheet = OrderedDict()
|
# ods_data_sheet = OrderedDict()
|
||||||
ods_data = []
|
ods_data = []
|
||||||
@ -79,13 +89,15 @@ class Utils:
|
|||||||
def save_as_json(self, export_json, nom_req):
|
def save_as_json(self, export_json, nom_req):
|
||||||
"""Enregistrement du JSON"""
|
"""Enregistrement du JSON"""
|
||||||
|
|
||||||
json_file = open(self.dossier_resultats + nom_req + ".json", "w")
|
json_file = open(
|
||||||
|
self.dossier_resultats + nom_req + ".json", "w", encoding="utf-8"
|
||||||
|
)
|
||||||
json_file.write(json.dumps(export_json))
|
json_file.write(json.dumps(export_json))
|
||||||
json_file.close()
|
json_file.close()
|
||||||
|
|
||||||
logging.info("Sauvegarde résultat format JSON/OSM")
|
logging.info("Sauvegarde résultat format JSON/OSM")
|
||||||
|
|
||||||
def nettoyage_json_pour_umap(self, data, overpass_query_fields):
|
def export_json_pour_umap(self, data, overpass_query_fields, concatenation):
|
||||||
"""Sélection uniquement des champs export_json == oui"""
|
"""Sélection uniquement des champs export_json == oui"""
|
||||||
|
|
||||||
export_json = {
|
export_json = {
|
||||||
@ -99,7 +111,7 @@ class Utils:
|
|||||||
|
|
||||||
for element in data["elements"]:
|
for element in data["elements"]:
|
||||||
export_json["elements"].append(
|
export_json["elements"].append(
|
||||||
{"type": element["type"], "id": element["id"]}
|
{"type": element["type"], "id": element["id"], "tags": {}}
|
||||||
)
|
)
|
||||||
|
|
||||||
# positionnement des éléments
|
# positionnement des éléments
|
||||||
@ -108,20 +120,35 @@ class Utils:
|
|||||||
export_json["elements"][index_line]["lon"] = element["lon"]
|
export_json["elements"][index_line]["lon"] = element["lon"]
|
||||||
else: # ways et relations
|
else: # ways et relations
|
||||||
export_json["elements"][index_line]["center"] = element["center"]
|
export_json["elements"][index_line]["center"] = element["center"]
|
||||||
export_json["elements"][index_line]["nodes"] = element["nodes"]
|
|
||||||
|
|
||||||
# filtrage des tags
|
# filtrage des tags
|
||||||
description = ""
|
description = ""
|
||||||
for tag in overpass_query_fields.keys():
|
for tag in overpass_query_fields.keys():
|
||||||
if overpass_query_fields[tag]["export_json"] == "Oui":
|
if (
|
||||||
if tag in element["tags"]:
|
overpass_query_fields[tag]["export_json"] == "Oui"
|
||||||
if overpass_query_fields[tag]["FR"] != "":
|
and tag in element["tags"]
|
||||||
description = (
|
):
|
||||||
description + overpass_query_fields[tag]["FR"] + " : "
|
if concatenation:
|
||||||
)
|
ajout = (
|
||||||
|
str(element["tags"][tag])
|
||||||
description = description + str(element["tags"][tag]) + "\n"
|
if overpass_query_fields[tag]["FR"] == ""
|
||||||
export_json["elements"][index_line]["tags"] = {"description": description}
|
else overpass_query_fields[tag]["FR"]
|
||||||
|
+ " : "
|
||||||
|
+ str(element["tags"][tag])
|
||||||
|
)
|
||||||
|
description = description + ajout + "\n"
|
||||||
|
export_json["elements"][index_line]["tags"] = {
|
||||||
|
"description": description[:-1]
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
tagname = (
|
||||||
|
tag
|
||||||
|
if overpass_query_fields[tag]["FR"] == ""
|
||||||
|
else overpass_query_fields[tag]["FR"]
|
||||||
|
)
|
||||||
|
export_json["elements"][index_line]["tags"].update(
|
||||||
|
{tagname: element["tags"][tag]}
|
||||||
|
)
|
||||||
|
|
||||||
index_line = index_line + 1
|
index_line = index_line + 1
|
||||||
|
|
||||||
@ -131,15 +158,9 @@ class Utils:
|
|||||||
"""Envoie la requête Overpass et retourne la réponse JSON."""
|
"""Envoie la requête Overpass et retourne la réponse JSON."""
|
||||||
|
|
||||||
overpass_query = (
|
overpass_query = (
|
||||||
"""[out:json];
|
f"[out:json][timeout: {str(self.timeout)}];({critere});out center;"
|
||||||
(
|
|
||||||
"""
|
|
||||||
+ critere
|
|
||||||
+ """
|
|
||||||
);
|
|
||||||
out center;
|
|
||||||
"""
|
|
||||||
)
|
)
|
||||||
|
|
||||||
overpass_query = overpass_query.replace("aire_de_recherche", aire_de_recherche)
|
overpass_query = overpass_query.replace("aire_de_recherche", aire_de_recherche)
|
||||||
|
|
||||||
response = requests.get(self.overpass_url, params={"data": overpass_query})
|
response = requests.get(self.overpass_url, params={"data": overpass_query})
|
||||||
@ -158,7 +179,9 @@ class Utils:
|
|||||||
url = self.geo_api_url + "/reverse/csv/"
|
url = self.geo_api_url + "/reverse/csv/"
|
||||||
|
|
||||||
# création du fichier à envoyer à l'API
|
# création du fichier à envoyer à l'API
|
||||||
with open("tmp_geocodage.csv", "w", newline="") as tmp_csv_file:
|
with open(
|
||||||
|
"tmp_geocodage.csv", "w", newline="", encoding="utf-8"
|
||||||
|
) as tmp_csv_file:
|
||||||
csv_writer = csv.writer(tmp_csv_file)
|
csv_writer = csv.writer(tmp_csv_file)
|
||||||
csv_writer.writerow(["lat", "lon"])
|
csv_writer.writerow(["lat", "lon"])
|
||||||
|
|
||||||
@ -226,7 +249,7 @@ class Utils:
|
|||||||
"""Traduit le champ tag des éléments de data avec dict"""
|
"""Traduit le champ tag des éléments de data avec dict"""
|
||||||
|
|
||||||
for element in data["elements"]:
|
for element in data["elements"]:
|
||||||
if tag in element["tags"]:
|
if tag in element["tags"] and tag in dictionnaire.keys():
|
||||||
element["tags"][tag] = dictionnaire[element["tags"][tag]]
|
element["tags"][tag] = dictionnaire[element["tags"][tag]]
|
||||||
|
|
||||||
return data
|
return data
|
||||||
@ -254,3 +277,23 @@ class Utils:
|
|||||||
for file in os.listdir(self.dossier_resultats)
|
for file in os.listdir(self.dossier_resultats)
|
||||||
if not os.path.isdir(self.dossier_resultats + file)
|
if not os.path.isdir(self.dossier_resultats + file)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def lecture_requetes(self):
|
||||||
|
"""Lecture des requêtes dans les fichiers de configuration"""
|
||||||
|
|
||||||
|
with open("configuration/requetes.json", encoding="utf-8") as reqs:
|
||||||
|
self.json_reqs = json.load(reqs)
|
||||||
|
|
||||||
|
with open(
|
||||||
|
"configuration/champs_generiques.json", encoding="utf-8"
|
||||||
|
) as champs_generiques:
|
||||||
|
self.json_champs_generiques = json.load(champs_generiques)
|
||||||
|
|
||||||
|
for req in self.json_reqs:
|
||||||
|
self.json_reqs[req]["champs"] = dict(self.json_reqs[req]["champ_local"])
|
||||||
|
for champ in self.json_reqs[req]["champs_generiques"]:
|
||||||
|
self.json_reqs[req]["champs"].update(self.json_champs_generiques[champ])
|
||||||
|
|
||||||
|
# nettoyage
|
||||||
|
self.json_reqs[req].pop("champ_local", None)
|
||||||
|
self.json_reqs[req].pop("champs_generiques", None)
|
Loading…
Reference in New Issue
Block a user