forked from Olav63/outils_OSM
200 lines
5.6 KiB
Python
200 lines
5.6 KiB
Python
#!/usr/bin/env python3
|
|
|
|
|
|
# Copyright 2021 Olav63, SebF
|
|
#
|
|
# This program is free software: you can redistribute it and/or modify
|
|
# it under the terms of the GNU General Public License as published by
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
# (at your option) any later version.
|
|
#
|
|
# This program is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License
|
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
|
|
"""
|
|
Module principal :
|
|
- récupération de données par appel à Overpass
|
|
- géocodage inverse
|
|
- export des données en JSON pour utilisation avec umap
|
|
- sauvegarde des données en ods
|
|
"""
|
|
|
|
|
|
import time
|
|
import os
|
|
import argparse
|
|
import logging
|
|
import sys
|
|
from rdoopy import errors
|
|
from rdoopy.utils import Utils
|
|
|
|
OVERPASS_URL = "http://overpass-api.de/api/interpreter"
|
|
GEO_API_URL = "https://api-adresse.data.gouv.fr"
|
|
|
|
# nombre maxi de retries quand echec API
|
|
MAX_RETRY = 4
|
|
|
|
# delai en secondes entre les tentatives
|
|
RETRY_DELAY = 120
|
|
|
|
|
|
def init_argparse() -> argparse.ArgumentParser:
|
|
"""Définition des arguments possibles."""
|
|
|
|
parser = argparse.ArgumentParser(
|
|
usage="%(prog)s [OPTIONS] ...",
|
|
description="Exporte les données de la Cyclosphère d'une zone géographique.",
|
|
)
|
|
|
|
parser.add_argument(
|
|
"-z",
|
|
"--zone",
|
|
type=int,
|
|
help="Choisir la zone géographique à inspecter.",
|
|
default=7406,
|
|
)
|
|
|
|
parser.add_argument(
|
|
"-gi",
|
|
"--geocoding-inverse",
|
|
dest="geocoding_inverse",
|
|
action="store_true",
|
|
help="Activer le géocodage inverse",
|
|
)
|
|
parser.add_argument(
|
|
"-ngi",
|
|
"--no-geocoding-inverse",
|
|
dest="geocoding_inverse",
|
|
action="store_false",
|
|
help="Désactiver le géocodage inverse",
|
|
)
|
|
parser.set_defaults(geocoding_inverse=True)
|
|
|
|
parser.add_argument(
|
|
"-l", "--log-level", type=str, help="Définir le niveau de log.", default="INFO"
|
|
)
|
|
|
|
parser.add_argument(
|
|
"-r",
|
|
"--dossier-resultats",
|
|
type=str,
|
|
help="Définir le dossier d'archive",
|
|
default="resultats/",
|
|
)
|
|
|
|
parser.add_argument(
|
|
"-a",
|
|
"--dossier-archive",
|
|
type=str,
|
|
help="Définir le dossier d'archive",
|
|
default="resultats/archives/",
|
|
)
|
|
|
|
parser.add_argument(
|
|
"-na",
|
|
"--no-archivage",
|
|
dest="archivage",
|
|
action="store_false",
|
|
help="Désactiver l'archivage'",
|
|
)
|
|
parser.set_defaults(archivage=True)
|
|
|
|
parser.add_argument(
|
|
"-nc",
|
|
"--no-concatenation",
|
|
dest="concatenation",
|
|
action="store_false",
|
|
help="Désactiver la concaténation des tags dans l'export json",
|
|
)
|
|
parser.set_defaults(concatenation=True)
|
|
|
|
parser.add_argument(
|
|
"-t", "--timeout", type=int, help="Définir le temps de timeout.", default=25
|
|
)
|
|
|
|
return parser
|
|
|
|
|
|
def main():
|
|
"""Routine principale"""
|
|
|
|
parser = init_argparse()
|
|
args = parser.parse_args()
|
|
|
|
logging.basicConfig(
|
|
format="%(asctime)s [%(levelname)s] %(message)s",
|
|
level=getattr(logging, args.log_level.upper()),
|
|
handlers=[
|
|
logging.FileHandler("rdoo.log", encoding="utf-8"),
|
|
logging.StreamHandler(sys.stdout),
|
|
],
|
|
)
|
|
|
|
utils = Utils(OVERPASS_URL, GEO_API_URL, args.dossier_resultats, args.timeout)
|
|
|
|
if args.archivage:
|
|
utils.archivage(args.dossier_archive)
|
|
|
|
logging.info("##### Nouvelle récupération ######")
|
|
|
|
# l'id de l'area se calcule en ajoutant 3600000000 au numéro de l'objet OSM
|
|
aire_de_recherche = str(3_600_000_000 + args.zone)
|
|
|
|
for req in utils.json_reqs:
|
|
for nb_essai in range(MAX_RETRY): # on tente max_retry fois
|
|
try:
|
|
|
|
logging.info(f"# Requête en cours : {req}")
|
|
|
|
# appel overpass
|
|
data = utils.run_overpass_query(
|
|
utils.json_reqs[req]["overpass"], aire_de_recherche
|
|
)
|
|
nb_resultats = len(data["elements"])
|
|
logging.info(f"{nb_resultats} résultats")
|
|
|
|
if nb_resultats > 0:
|
|
if args.geocoding_inverse:
|
|
# géocodage inverse
|
|
data = utils.geocodage_csv(data)
|
|
|
|
# traduction
|
|
for key in utils.traductions:
|
|
data = utils.traduction(key, utils.traductions[key], data)
|
|
|
|
# Sauvegarde
|
|
os.makedirs(args.dossier_resultats, exist_ok=True)
|
|
export_json = utils.export_json_pour_umap(
|
|
data, utils.json_reqs[req]["champs"], args.concatenation
|
|
)
|
|
|
|
utils.save_as_json(export_json, req)
|
|
utils.save_as_ods(utils.json_reqs[req]["champs"], data, req)
|
|
|
|
# doucement sur overpass
|
|
time.sleep(30)
|
|
break
|
|
except errors.ApiError:
|
|
|
|
if nb_essai >= MAX_RETRY - 1:
|
|
logging.error("Trop d'erreurs d'API - abandon")
|
|
exit()
|
|
|
|
logging.error(f"Erreur API - on retente dans {RETRY_DELAY}s")
|
|
except:
|
|
logging.error("Exception", stack_info=True, exc_info=True)
|
|
|
|
time.sleep(RETRY_DELAY)
|
|
|
|
logging.info("##### Terminé #####")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|