2021-10-09 20:40:50 +02:00
|
|
|
#!/usr/bin/env python3
|
2021-10-21 22:27:19 +02:00
|
|
|
|
|
|
|
|
|
|
|
# Copyright 2021 Olav63, SebF
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
|
2021-10-10 16:35:27 +02:00
|
|
|
"""Collections de méthodes utilitaires"""
|
2021-10-09 20:40:50 +02:00
|
|
|
|
|
|
|
import json
|
2021-10-23 19:48:52 +02:00
|
|
|
import logging
|
2021-10-23 22:03:25 +02:00
|
|
|
import os
|
|
|
|
import datetime
|
|
|
|
import shutil
|
|
|
|
import pathlib
|
2021-11-01 18:58:42 +01:00
|
|
|
import csv
|
2021-10-10 16:35:27 +02:00
|
|
|
from collections import OrderedDict
|
2021-10-10 10:55:15 +02:00
|
|
|
import requests
|
2021-10-09 20:40:50 +02:00
|
|
|
from pyexcel_ods3 import save_data
|
2021-10-10 10:55:15 +02:00
|
|
|
from osm_vc63 import errors
|
2021-10-09 20:40:50 +02:00
|
|
|
|
|
|
|
|
2021-10-10 10:55:15 +02:00
|
|
|
class Utils:
|
2021-10-10 16:35:27 +02:00
|
|
|
"""Classe de méthodes utilitaires"""
|
|
|
|
|
2021-10-10 10:55:15 +02:00
|
|
|
overpass_url: str
|
|
|
|
geo_api_url: str
|
2021-10-24 08:24:41 +02:00
|
|
|
dossier_resultats: str
|
2021-10-10 10:55:15 +02:00
|
|
|
|
2021-10-24 08:24:41 +02:00
|
|
|
def __init__(self, overpass_url, geo_api_url, dossier_resultats):
|
2021-10-10 10:55:15 +02:00
|
|
|
self.overpass_url = overpass_url
|
|
|
|
self.geo_api_url = geo_api_url
|
2021-10-24 08:24:41 +02:00
|
|
|
self.dossier_resultats = dossier_resultats
|
2021-10-10 10:55:15 +02:00
|
|
|
|
2021-12-29 20:50:24 +01:00
|
|
|
with open("traductions.json", encoding="utf-8") as trads:
|
|
|
|
self.traductions = json.load(trads)
|
|
|
|
|
2021-12-29 20:58:05 +01:00
|
|
|
self.lecture_requetes()
|
|
|
|
|
2021-10-23 20:34:31 +02:00
|
|
|
def save_as_ods(self, fields, data, nom_req, ods_data_sheet=OrderedDict()):
|
2021-10-09 20:40:50 +02:00
|
|
|
"""Sauvegarde de data dans un classeur ods"""
|
|
|
|
|
2021-10-23 20:34:31 +02:00
|
|
|
# ods_data_sheet = OrderedDict()
|
2021-10-10 16:35:27 +02:00
|
|
|
ods_data = []
|
|
|
|
ods_data.append(fields.keys())
|
2021-10-09 20:40:50 +02:00
|
|
|
index_line = 2
|
|
|
|
|
|
|
|
for element in data["elements"]:
|
|
|
|
line = []
|
|
|
|
index_col = 0
|
|
|
|
|
|
|
|
for field in fields.keys():
|
|
|
|
if field in element["tags"]:
|
|
|
|
if field == "capacity":
|
|
|
|
val = element["tags"][field]
|
|
|
|
line.append(int(val) if val.isdigit() else val)
|
|
|
|
else:
|
|
|
|
line.append(element["tags"][field])
|
|
|
|
else:
|
|
|
|
line.append("")
|
|
|
|
index_col = index_col + 1
|
|
|
|
|
2021-10-10 16:35:27 +02:00
|
|
|
ods_data.append(line)
|
2021-10-09 20:40:50 +02:00
|
|
|
index_line = index_line + 1
|
|
|
|
|
2021-10-23 20:34:31 +02:00
|
|
|
ods_data_sheet.update({f"{nom_req}": ods_data})
|
2021-10-09 20:40:50 +02:00
|
|
|
|
2021-10-24 08:24:41 +02:00
|
|
|
save_data(self.dossier_resultats + "resultats.ods", ods_data_sheet)
|
2021-10-09 20:40:50 +02:00
|
|
|
|
2021-10-23 19:48:52 +02:00
|
|
|
logging.info("Sauvegarde résultats format ODS")
|
2021-10-09 20:40:50 +02:00
|
|
|
|
2021-10-10 16:32:16 +02:00
|
|
|
def save_as_json(self, export_json, nom_req):
|
2021-10-09 20:40:50 +02:00
|
|
|
"""Enregistrement du JSON"""
|
|
|
|
|
2021-10-24 08:24:41 +02:00
|
|
|
json_file = open(self.dossier_resultats + nom_req + ".json", "w")
|
2021-10-10 16:35:27 +02:00
|
|
|
json_file.write(json.dumps(export_json))
|
|
|
|
json_file.close()
|
2021-10-09 20:40:50 +02:00
|
|
|
|
2021-10-23 19:48:52 +02:00
|
|
|
logging.info("Sauvegarde résultat format JSON/OSM")
|
2021-10-09 20:47:09 +02:00
|
|
|
|
|
|
|
def nettoyage_json_pour_umap(self, data, overpass_query_fields):
|
|
|
|
"""Sélection uniquement des champs export_json == oui"""
|
|
|
|
|
|
|
|
export_json = {
|
|
|
|
"version": data["version"],
|
|
|
|
"generator": data["generator"] + " and ETALAB API",
|
|
|
|
"osm3s": data["osm3s"],
|
|
|
|
"elements": [],
|
|
|
|
}
|
|
|
|
|
|
|
|
index_line = 0
|
|
|
|
|
|
|
|
for element in data["elements"]:
|
|
|
|
export_json["elements"].append(
|
|
|
|
{"type": element["type"], "id": element["id"]}
|
|
|
|
)
|
|
|
|
|
|
|
|
# positionnement des éléments
|
|
|
|
if element["type"] == "node": # noeuds
|
|
|
|
export_json["elements"][index_line]["lat"] = element["lat"]
|
|
|
|
export_json["elements"][index_line]["lon"] = element["lon"]
|
|
|
|
else: # ways et relations
|
|
|
|
export_json["elements"][index_line]["center"] = element["center"]
|
|
|
|
export_json["elements"][index_line]["nodes"] = element["nodes"]
|
|
|
|
|
|
|
|
# filtrage des tags
|
|
|
|
description = ""
|
|
|
|
for tag in overpass_query_fields.keys():
|
|
|
|
if overpass_query_fields[tag]["export_json"] == "Oui":
|
|
|
|
if tag in element["tags"]:
|
|
|
|
if overpass_query_fields[tag]["FR"] != "":
|
|
|
|
description = (
|
|
|
|
description + overpass_query_fields[tag]["FR"] + " : "
|
|
|
|
)
|
|
|
|
|
|
|
|
description = description + str(element["tags"][tag]) + "\n"
|
|
|
|
export_json["elements"][index_line]["tags"] = {"description": description}
|
|
|
|
|
|
|
|
index_line = index_line + 1
|
|
|
|
|
2021-10-18 21:08:22 +02:00
|
|
|
return export_json
|
2021-10-10 10:55:15 +02:00
|
|
|
|
|
|
|
def run_overpass_query(self, critere, aire_de_recherche):
|
|
|
|
"""Envoie la requête Overpass et retourne la réponse JSON."""
|
|
|
|
|
|
|
|
overpass_query = (
|
|
|
|
"""[out:json];
|
|
|
|
(
|
|
|
|
"""
|
|
|
|
+ critere
|
|
|
|
+ """
|
|
|
|
);
|
|
|
|
out center;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
overpass_query = overpass_query.replace("aire_de_recherche", aire_de_recherche)
|
|
|
|
|
|
|
|
response = requests.get(self.overpass_url, params={"data": overpass_query})
|
|
|
|
|
|
|
|
if response.status_code != 200:
|
2021-10-10 16:31:54 +02:00
|
|
|
raise errors.OverpassError(response.status_code)
|
2021-10-10 10:55:15 +02:00
|
|
|
|
|
|
|
return response.json()
|
|
|
|
|
2021-11-01 19:30:44 +01:00
|
|
|
def geocodage_csv(self, data):
|
2021-11-01 18:58:42 +01:00
|
|
|
"""
|
|
|
|
Renseigne une adresse pour chaque élément de data
|
|
|
|
en une fois via csv
|
|
|
|
"""
|
|
|
|
|
|
|
|
url = self.geo_api_url + "/reverse/csv/"
|
|
|
|
|
2021-11-01 19:30:44 +01:00
|
|
|
# création du fichier à envoyer à l'API
|
|
|
|
with open("tmp_geocodage.csv", "w", newline="") as tmp_csv_file:
|
|
|
|
csv_writer = csv.writer(tmp_csv_file)
|
|
|
|
csv_writer.writerow(["lat", "lon"])
|
2021-11-01 18:58:42 +01:00
|
|
|
|
2021-11-01 19:30:44 +01:00
|
|
|
for element in data["elements"]:
|
2021-11-01 18:58:42 +01:00
|
|
|
if element["type"] == "node":
|
|
|
|
csv_writer.writerow([element["lat"], element["lon"]])
|
|
|
|
else:
|
|
|
|
csv_writer.writerow(
|
|
|
|
[element["center"]["lat"], element["center"]["lon"]]
|
|
|
|
)
|
|
|
|
|
2021-11-01 19:30:44 +01:00
|
|
|
# préparation et envoi de la requête
|
|
|
|
payload = dict(
|
|
|
|
[("data", ("tmp_geocodage.csv", open("tmp_geocodage.csv", "rb").read()))]
|
2021-11-01 18:58:42 +01:00
|
|
|
)
|
2021-11-01 19:30:44 +01:00
|
|
|
response = requests.post(url, files=payload)
|
|
|
|
|
2021-11-01 21:10:43 +01:00
|
|
|
# nettoyage
|
2021-11-01 19:30:44 +01:00
|
|
|
os.remove("tmp_geocodage.csv")
|
2021-11-01 18:58:42 +01:00
|
|
|
|
|
|
|
if response.status_code != 200:
|
|
|
|
raise errors.GeoApiError(response.status_code)
|
|
|
|
|
2021-11-01 21:10:43 +01:00
|
|
|
# affectation des addresses
|
|
|
|
for element in data["elements"]:
|
|
|
|
for row in csv.DictReader(response.text.splitlines()):
|
2021-11-01 21:23:30 +01:00
|
|
|
if element["type"] == "node":
|
|
|
|
lat_ok = row["lat"] == str(element["lat"])
|
|
|
|
lon_ok = row["lon"] == str(element["lon"])
|
|
|
|
else:
|
|
|
|
lat_ok = row["lat"] == str(element["center"]["lat"])
|
|
|
|
lon_ok = row["lon"] == str(element["center"]["lon"])
|
|
|
|
|
2021-11-01 21:10:43 +01:00
|
|
|
if lat_ok and lon_ok:
|
|
|
|
element["tags"]["api_adresse:geometry:coordinates:lon"] = row[
|
|
|
|
"result_longitude"
|
|
|
|
]
|
|
|
|
element["tags"]["api_adresse:geometry:coordinates:lat"] = row[
|
|
|
|
"result_latitude"
|
|
|
|
]
|
|
|
|
element["tags"]["api_adresse:properties:label"] = row[
|
|
|
|
"result_label"
|
|
|
|
]
|
|
|
|
element["tags"]["api_adresse:properties:housenumber"] = row[
|
|
|
|
"result_housenumber"
|
|
|
|
]
|
|
|
|
element["tags"]["api_adresse:properties:type"] = row["result_type"]
|
|
|
|
element["tags"]["api_adresse:properties:name"] = row["result_name"]
|
|
|
|
element["tags"]["api_adresse:properties:postcode"] = row[
|
|
|
|
"result_postcode"
|
|
|
|
]
|
|
|
|
element["tags"]["api_adresse:properties:citycode"] = row[
|
|
|
|
"result_citycode"
|
|
|
|
]
|
|
|
|
element["tags"]["api_adresse:properties:city"] = row["result_city"]
|
|
|
|
element["tags"]["api_adresse:properties:street"] = row[
|
|
|
|
"result_street"
|
|
|
|
]
|
|
|
|
|
2021-11-01 18:58:42 +01:00
|
|
|
logging.info("Géocodage inversé terminé")
|
|
|
|
|
2021-11-01 19:30:44 +01:00
|
|
|
return data
|
2021-11-01 18:58:42 +01:00
|
|
|
|
2021-10-10 17:55:26 +02:00
|
|
|
def traduction(self, tag, dictionnaire, data):
|
|
|
|
"""Traduit le champ tag des éléments de data avec dict"""
|
|
|
|
|
|
|
|
for element in data["elements"]:
|
|
|
|
if tag in element["tags"]:
|
|
|
|
element["tags"][tag] = dictionnaire[element["tags"][tag]]
|
|
|
|
|
|
|
|
return data
|
2021-10-23 22:03:25 +02:00
|
|
|
|
2021-10-24 08:24:41 +02:00
|
|
|
def archivage(self, dossier_archive):
|
2021-10-23 22:03:25 +02:00
|
|
|
"""Archivage des données précédentes"""
|
|
|
|
|
2021-10-24 08:24:41 +02:00
|
|
|
fichier = pathlib.Path(self.dossier_resultats + "resultats.ods")
|
2021-10-23 22:03:25 +02:00
|
|
|
|
|
|
|
if not fichier.exists():
|
|
|
|
return
|
|
|
|
|
|
|
|
date_fichier = datetime.date.fromtimestamp(fichier.stat().st_ctime)
|
2021-11-06 19:06:49 +01:00
|
|
|
|
|
|
|
# une seule archive par date
|
|
|
|
if os.path.isdir(dossier_archive + str(date_fichier)):
|
|
|
|
shutil.rmtree(dossier_archive + str(date_fichier))
|
|
|
|
os.makedirs(dossier_archive + str(date_fichier))
|
2021-10-23 22:03:25 +02:00
|
|
|
|
|
|
|
# pylint: disable=W0106
|
|
|
|
[
|
|
|
|
shutil.move(
|
2021-10-24 08:24:41 +02:00
|
|
|
self.dossier_resultats + file, dossier_archive + str(date_fichier)
|
2021-10-23 22:03:25 +02:00
|
|
|
)
|
2021-10-24 08:24:41 +02:00
|
|
|
for file in os.listdir(self.dossier_resultats)
|
|
|
|
if not os.path.isdir(self.dossier_resultats + file)
|
2021-10-23 22:03:25 +02:00
|
|
|
]
|
2021-12-29 20:58:05 +01:00
|
|
|
|
|
|
|
def lecture_requetes(self):
|
|
|
|
"""Lecture des requêtes dans les fichiers de configuration"""
|
|
|
|
|
|
|
|
with open("requetes.json", encoding="utf-8") as reqs:
|
|
|
|
self.json_reqs = json.load(reqs)
|
|
|
|
|
|
|
|
with open("champs_generiques.json", encoding="utf-8") as champs_generiques:
|
|
|
|
self.json_champs_generiques = json.load(champs_generiques)
|
|
|
|
|
|
|
|
for req in self.json_reqs:
|
|
|
|
self.json_reqs[req]["champs"] = dict(self.json_reqs[req]["champ_local"])
|
|
|
|
for champ in self.json_reqs[req]["champs_generiques"]:
|
|
|
|
self.json_reqs[req]["champs"].update(self.json_champs_generiques[champ])
|
|
|
|
|
|
|
|
# nettoyage
|
|
|
|
self.json_reqs[req].pop("champ_local", None)
|
|
|
|
self.json_reqs[req].pop("champs_generiques", None)
|