Compare commits

...

23 Commits

Author SHA1 Message Date
daf711de2c temporisation sur Overpass 2021-10-10 19:53:43 +02:00
60078c9284 inutile de poursuivre s'il n'y a pas de résultats 2021-10-10 19:24:18 +02:00
4d05328ea3 pylint et print de log 2021-10-10 18:54:19 +02:00
e6b432af4e ajout d'une barre de progression 2021-10-10 18:53:37 +02:00
6d288c1672 extraction du géocodage et traduction vers utils 2021-10-10 17:55:26 +02:00
b5ab062b3e pylint sur requetes 2021-10-10 16:53:12 +02:00
af09edea2b pylint sur fichier principal 2021-10-10 16:50:27 +02:00
1c535dd9b9 pylint sur utils 2021-10-10 16:35:27 +02:00
29d2e60299 renommage des méthodes de sauvegarde 2021-10-10 16:32:16 +02:00
5dfa477ca7 pylint sur les erreurs 2021-10-10 16:31:54 +02:00
7cfe3d3327 formattage avec Black 2021-10-10 10:56:13 +02:00
16ed9aa07b déplacement des méthodes de requête dans la classe Save renommée en Utils 2021-10-10 10:55:15 +02:00
85eb60333a déplacement de la méthode de nettoyage json avec les méthodes de sauvegarde 2021-10-09 20:47:09 +02:00
bcb9b7e9b2 méthodes de sauvegarde déplacées dans une classe 2021-10-09 20:40:50 +02:00
27aea631bf déplacement de logique dans la méthode de requêtage overpass 2021-10-09 15:28:41 +02:00
f97ffdb6d9 extraction du nettoyage du JSON pour export 2021-10-09 15:23:03 +02:00
a78342fe73 extraction de méthode sauvegarde json 2021-10-09 15:10:36 +02:00
6ba6577661 commentaire 2021-10-09 14:56:40 +02:00
feeb17d592 géocodage inversé commenté pour alléger les tests 2021-10-09 14:51:13 +02:00
72135e0f36 déplacement et commentaire des méthodes 2021-10-09 14:49:13 +02:00
eeac2f26aa création du main 2021-10-09 14:31:01 +02:00
3a786f0f68 correction requête 2021-10-09 11:55:00 +02:00
e84075e017 extraction de la méthode de sauvegarde ods 2021-10-09 11:35:06 +02:00
5 changed files with 426 additions and 326 deletions

View File

@ -1,7 +1,10 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""Errors module"""
class Api_error(Exception): class ApiError(Exception):
"""Api exception"""
def __init__(self, http_code, message="erreur appel API"): def __init__(self, http_code, message="erreur appel API"):
self.http_code = http_code self.http_code = http_code
self.message = message self.message = message
@ -11,9 +14,13 @@ class Api_error(Exception):
return f"{self.http_code} -> {self.message}" return f"{self.http_code} -> {self.message}"
class Overpass_error(Api_error): class OverpassError(ApiError):
"""Overpass exception"""
pass pass
class Geo_api_error(Api_error): class GeoApiError(ApiError):
"""GeoApi exception"""
pass pass

View File

@ -1,7 +1,10 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""Module des requêtes"""
class requete: class Requete:
"""Objet requête"""
nom: str nom: str
critere: str critere: str
champs: dict champs: dict
@ -12,8 +15,8 @@ class requete:
self.champs = champs self.champs = champs
reqs = [] REQS = []
champs_stationnement = { CHAMPS_STATIONNEMENT = {
"amenity": {"export_json": "Non", "FR": "aménagement"}, "amenity": {"export_json": "Non", "FR": "aménagement"},
"capacity": {"export_json": "Oui", "FR": "nombre d'emplacements"}, "capacity": {"export_json": "Oui", "FR": "nombre d'emplacements"},
"access": {"export_json": "Oui", "FR": "accès"}, "access": {"export_json": "Oui", "FR": "accès"},
@ -25,7 +28,7 @@ champs_stationnement = {
"check_date:capacity": {"export_json": "Non", "FR": "date_vérification"}, "check_date:capacity": {"export_json": "Non", "FR": "date_vérification"},
"source": {"export_json": "Non", "FR": "source"}, "source": {"export_json": "Non", "FR": "source"},
} }
champs_poi = { CHAMPS_POI = {
"name": {"export_json": "Oui", "FR": ""}, "name": {"export_json": "Oui", "FR": ""},
"description": {"export_json": "Oui", "FR": ""}, "description": {"export_json": "Oui", "FR": ""},
"website": {"export_json": "Oui", "FR": ""}, "website": {"export_json": "Oui", "FR": ""},
@ -41,8 +44,7 @@ champs_poi = {
"office": {"export_json": "Oui", "FR": "Bureau"}, "office": {"export_json": "Oui", "FR": "Bureau"},
"opening_hours": {"export_json": "Oui", "FR": "Horaires"}, "opening_hours": {"export_json": "Oui", "FR": "Horaires"},
} }
# fields api_adresse (issus du géocodage inversé) CHAMPS_ADRESSE = {
champs_adresse = {
"api_adresse:geometry:coordinates:lon": { "api_adresse:geometry:coordinates:lon": {
"export_json": "Non", "export_json": "Non",
"FR": "lon_adresse_etalab", "FR": "lon_adresse_etalab",
@ -71,73 +73,75 @@ champs_adresse = {
"api_adresse:properties:street": {"export_json": "Non", "FR": "rue_etalab"}, "api_adresse:properties:street": {"export_json": "Non", "FR": "rue_etalab"},
} }
reqs.append( REQS.append(
requete( Requete(
"stationnements_velos_publics", "stationnements_velos_publics",
# pylint: disable=C0301
r'nwr["amenity"="bicycle_parking"](area:aire_de_recherche); - nwr["amenity"="bicycle_parking"]["access"~"(no|permit|private|customers)"](area:aire_de_recherche);', r'nwr["amenity"="bicycle_parking"](area:aire_de_recherche); - nwr["amenity"="bicycle_parking"]["access"~"(no|permit|private|customers)"](area:aire_de_recherche);',
dict(champs_stationnement, **champs_adresse), dict(CHAMPS_STATIONNEMENT, **CHAMPS_ADRESSE),
) )
) )
reqs.append( REQS.append(
requete( Requete(
"stationnements_velos_non_publics", "stationnements_velos_non_publics",
# pylint: disable=C0301
r'nwr["amenity"="bicycle_parking"]["access"~"(no|permit|private|customers)"](area:aire_de_recherche);', r'nwr["amenity"="bicycle_parking"]["access"~"(no|permit|private|customers)"](area:aire_de_recherche);',
dict(champs_stationnement, **champs_adresse), dict(CHAMPS_STATIONNEMENT, **CHAMPS_ADRESSE),
) )
) )
champ_local = {"service:bicycle:diy": {"export_json": "Non", "FR": ""}} CHAMP_LOCAL = {"service:bicycle:diy": {"export_json": "Non", "FR": ""}}
reqs.append( REQS.append(
requete( Requete(
"ateliers_autoreparation", "ateliers_autoreparation",
r'nwr["service:bicycle:diy"="yes"](area:aire_de_recherche);', r'nwr["service:bicycle:diy"="yes"](area:aire_de_recherche);',
dict(champ_local, **champs_poi, **champs_adresse), dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
) )
) )
champ_local = {"association": {"export_json": "Non", "FR": ""}} CHAMP_LOCAL = {"association": {"export_json": "Non", "FR": ""}}
reqs.append( REQS.append(
requete( Requete(
"associations_velo", "associations_velo",
r'nwr["association"="bicycle"](area:aire_de_recherche);', r'nwr["association"="bicycle"](area:aire_de_recherche);',
dict(champ_local, **champs_poi, **champs_adresse), dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
) )
) )
champ_local = {"craft": {"export_json": "Non", "FR": ""}} CHAMP_LOCAL = {"craft": {"export_json": "Non", "FR": ""}}
reqs.append( REQS.append(
requete( Requete(
"fabriquants_velo", "fabriquants_velo",
r'nwr["craft"="bicycle"](area:aire_de_recherche);', r'nwr["craft"="bicycle"](area:aire_de_recherche);',
dict(champ_local, **champs_poi, **champs_adresse), dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
) )
) )
champ_local = {"shop": {"export_json": "Non", "FR": ""}} CHAMP_LOCAL = {"shop": {"export_json": "Non", "FR": ""}}
reqs.append( REQS.append(
requete( Requete(
"vendeurs_velo", "vendeurs_velo",
r'nwr["shop"="bicycle"](area:aire_de_recherche); + nwr["service:bicycle:retail"="yes"](area:aire_de_recherche)', # pylint: disable=C0301
dict(champ_local, **champs_poi, **champs_adresse), r'nwr["shop"="bicycle"](area:aire_de_recherche); nwr["service:bicycle:retail"="yes"](area:aire_de_recherche);',
dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
) )
) )
champ_local = {"amenity": {"export_json": "Non", "FR": ""}} CHAMP_LOCAL = {"amenity": {"export_json": "Non", "FR": ""}}
reqs.append( REQS.append(
requete( Requete(
"velos_libre_service", "velos_libre_service",
r'nwr["amenity"="bicycle_rental"](area:aire_de_recherche);', r'nwr["amenity"="bicycle_rental"](area:aire_de_recherche);',
dict(champ_local, **champs_poi, **champs_adresse), dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
) )
) )
champ_local = {"service:bicycle:rental": {"export_json": "Non", "FR": ""}} CHAMP_LOCAL = {"service:bicycle:rental": {"export_json": "Non", "FR": ""}}
reqs.append( REQS.append(
requete( Requete(
"location_velo", "location_velo",
r'nwr["service:bicycle:rental"="yes"](area:aire_de_recherche);', r'nwr["service:bicycle:rental"="yes"](area:aire_de_recherche);',
dict(champ_local, **champs_poi, **champs_adresse), dict(CHAMP_LOCAL, **CHAMPS_POI, **CHAMPS_ADRESSE),
) )
) )

251
osm_vc63/utils.py Normal file
View File

@ -0,0 +1,251 @@
#!/usr/bin/env python3
"""Collections de méthodes utilitaires"""
import json
from collections import OrderedDict
import requests
from pyexcel_ods3 import save_data
from osm_vc63 import errors
class Utils:
"""Classe de méthodes utilitaires"""
overpass_url: str
geo_api_url: str
dossier_sauvegarde: str
def __init__(self, overpass_url, geo_api_url, dossier_sauvegarde):
self.overpass_url = overpass_url
self.geo_api_url = geo_api_url
self.dossier_sauvegarde = dossier_sauvegarde
def save_as_ods(self, fields, data, nom_req):
"""Sauvegarde de data dans un classeur ods"""
ods_data_sheet = OrderedDict()
ods_data = []
ods_data.append(fields.keys())
index_line = 2
for element in data["elements"]:
line = []
index_col = 0
for field in fields.keys():
if field in element["tags"]:
if field == "capacity":
val = element["tags"][field]
line.append(int(val) if val.isdigit() else val)
else:
line.append(element["tags"][field])
else:
line.append("")
index_col = index_col + 1
ods_data.append(line)
index_line = index_line + 1
ods_data_sheet.update({"resultats": ods_data})
save_data(self.dossier_sauvegarde + nom_req + ".ods", ods_data_sheet)
print("Sauvegarde résultats format ODS")
def save_as_json(self, export_json, nom_req):
"""Enregistrement du JSON"""
json_file = open(self.dossier_sauvegarde + nom_req + ".json", "w")
json_file.write(json.dumps(export_json))
json_file.close()
print("Sauvegarde résultat format JSON/OSM")
def nettoyage_json_pour_umap(self, data, overpass_query_fields):
"""Sélection uniquement des champs export_json == oui"""
export_json = {
"version": data["version"],
"generator": data["generator"] + " and ETALAB API",
"osm3s": data["osm3s"],
"elements": [],
}
index_line = 0
for element in data["elements"]:
export_json["elements"].append(
{"type": element["type"], "id": element["id"]}
)
# positionnement des éléments
if element["type"] == "node": # noeuds
export_json["elements"][index_line]["lat"] = element["lat"]
export_json["elements"][index_line]["lon"] = element["lon"]
else: # ways et relations
export_json["elements"][index_line]["center"] = element["center"]
export_json["elements"][index_line]["nodes"] = element["nodes"]
# filtrage des tags
description = ""
for tag in overpass_query_fields.keys():
if overpass_query_fields[tag]["export_json"] == "Oui":
if tag in element["tags"]:
if overpass_query_fields[tag]["FR"] != "":
description = (
description + overpass_query_fields[tag]["FR"] + " : "
)
description = description + str(element["tags"][tag]) + "\n"
export_json["elements"][index_line]["tags"] = {"description": description}
index_line = index_line + 1
return export_json
def run_overpass_query(self, critere, aire_de_recherche):
"""Envoie la requête Overpass et retourne la réponse JSON."""
overpass_query = (
"""[out:json];
(
"""
+ critere
+ """
);
out center;
"""
)
overpass_query = overpass_query.replace("aire_de_recherche", aire_de_recherche)
# print("Execution requete overpass : \n" + overpass_query)
response = requests.get(self.overpass_url, params={"data": overpass_query})
if response.status_code != 200:
raise errors.OverpassError(response.status_code)
return response.json()
def run_reverse_geocoding(self, lat, lon):
"""Retourne une adresse JSON à partir d'une position GPS."""
url = self.geo_api_url + "/reverse/"
response = requests.get(url, params={"lon": str(lon), "lat": str(lat)})
if response.status_code != 200:
raise errors.GeoApiError(response.status_code)
return response.json()
# TODO : optimiser en faisant un appel au service /reverse/csv/ plutot que le service unitaire /reverse/
def geocodage(self, data):
"""Renseigne une adresse pour chaque élément de data"""
for element in self.progress_bar(data["elements"], prefix="Géocodage"):
if element["type"] == "node":
rev_geocode = self.run_reverse_geocoding(element["lat"], element["lon"])
else:
rev_geocode = self.run_reverse_geocoding(
element["center"]["lat"], element["center"]["lon"]
)
api_adresse = rev_geocode["features"][0]
element["tags"]["api_adresse:geometry:coordinates:lon"] = api_adresse[
"geometry"
]["coordinates"][0]
element["tags"]["api_adresse:geometry:coordinates:lat"] = api_adresse[
"geometry"
]["coordinates"][1]
element["tags"]["api_adresse:properties:label"] = api_adresse["properties"][
"label"
]
element["tags"]["api_adresse:properties:score"] = api_adresse["properties"][
"score"
]
if "housenumber" in api_adresse["properties"]:
element["tags"]["api_adresse:properties:housenumber"] = api_adresse[
"properties"
]["housenumber"]
element["tags"]["api_adresse:properties:type"] = api_adresse["properties"][
"type"
]
element["tags"]["api_adresse:properties:name"] = api_adresse["properties"][
"name"
]
element["tags"]["api_adresse:properties:postcode"] = api_adresse[
"properties"
]["postcode"]
element["tags"]["api_adresse:properties:citycode"] = api_adresse[
"properties"
]["citycode"]
element["tags"]["api_adresse:properties:city"] = api_adresse["properties"][
"city"
]
if "street" in api_adresse["properties"]:
element["tags"]["api_adresse:properties:street"] = api_adresse[
"properties"
]["street"]
element["tags"]["api_adresse:properties:attribution"] = rev_geocode[
"attribution"
]
element["tags"]["api_adresse:properties:licence"] = rev_geocode["licence"]
return data
def traduction(self, tag, dictionnaire, data):
"""Traduit le champ tag des éléments de data avec dict"""
for element in data["elements"]:
if tag in element["tags"]:
element["tags"][tag] = dictionnaire[element["tags"][tag]]
return data
def progress_bar(
# pylint:disable=C0330
self,
iterable,
decimals=1,
length=50,
prefix="",
fill="",
print_end="\r",
):
"""
Call in a loop to create terminal progress bar
@params:
iterable - Required : iterable object (Iterable)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
prefix - Optional : prefix string (Str)
fill - Optional : bar fill character (Str)
print_end - Optional : end character (e.g. "\r", "\r\n") (Str)
"""
total = len(iterable)
if total == 0:
return
# Initial Call
print(f"\r{prefix} |{'-' * length}| {0}%", end=print_end)
# Update Progress Bar
for i, item in enumerate(iterable):
yield item
percent = ("{0:." + str(decimals) + "f}").format(
100 * ((i + 1) / float(total))
)
filled = int(length * (i + 1) // total)
progress = fill * filled + "-" * (length - filled)
print(f"\r{prefix} |{progress}| {percent}%", end=print_end)
# Print New Line on Complete
print()

View File

@ -1,285 +0,0 @@
#!/usr/bin/env python3
# inspiration :
# https://towardsdatascience.com/loading-data-from-openstreetmap-with-python-and-the-overpass-api-513882a27fd0
# https://geo.api.gouv.fr/adresse
# https://wiki.cartocite.fr/doku.php?id=umap:10_-_je_valorise_les_donnees_openstreetmap_avec_umap
# https://sites-formations.univ-rennes2.fr/mastersigat/Cours/Intro_Overpass.pdf
# usage des tags : https://taginfo.openstreetmap.org/tags/?key=amenity&value=bicycle_parking#combinations
# exemple URL données pour umap : https://www.velocite63.fr/velocite63/OSM/stationnements_velos_publics.json
# penser à cocher "proxy" dans la rubrique "données distantes" du calque
# export ODS :
# https://pythonhosted.org/pyexcel-ods/
# pip3 install pyexcel-ods3
import requests
import json
import time
from pyexcel_ods3 import save_data
from collections import OrderedDict
import os
from osm_vc63 import errors
from osm_vc63 import requetes
overpass_url="http://overpass-api.de/api/interpreter"
geo_api_url = "https://api-adresse.data.gouv.fr"
dossier_sauvegarde = "resultats/"
# nombre maxi de retries quand echec API
max_retry = 4
# delai en secondes entre les tentatives
retry_delay = 120
# id du département "Puy de Dôme" : 7406
# id Riom : 1693144
# id Clermont : 110866
# id Romagnat : 138269
# l'id de l'area se calcule en ajoutant 3600000000 au numéro de l'objet OSM
aire_de_recherche = str(3600000000+110866)
# ----------------------------------------------
trad_bicycle_parking = {
"stands": "Arceaux",
"wall_loops": "Pince roues",
"rack": "Râteliers",
"anchors": "Ancrage",
"shed": "Abri collectif",
"bollard": "Potelet",
"lockers": "Abris individuels",
"wide_stands": "Arceaux espacés",
"ground_slots": "Fente dans le sol",
"building": "Bâtiment",
"informal": "Informel",
"wave": "Râteliers",
"streetpod": "Arceaux",
"tree": "Arbre à bicyclettes",
"crossbar": "Barre",
"rope": "Câble",
"two-tier": "Deux étages",
"floor": "Sol",
"handlebar_holder": "Accroche-guidons"}
# ----------------------------------------------
def run_overpass_query(query) :
response = requests.get(overpass_url, params={'data': query})
if (response.status_code != 200) :
raise errors.Overpass_error(response.status_code)
return (response.json())
def run_reverse_geocoding(lat, lon) :
url = geo_api_url + "/reverse/"
response = requests.get(url, params={'lon' : str(lon), 'lat' : str(lat)})
if (response.status_code != 200) :
raise errors.Geo_api_error(response.status_code)
return (response.json())
# ----------------------------------------------
def executer_requete_et_exporter_resultats(nom_req, critere, aire_de_recherche, overpass_query_fields) :
print ("Nom requête : "+nom_req)
overpass_query = """[out:json];
(
"""+critere+"""
);
out center;
"""
overpass_query = overpass_query.replace("aire_de_recherche", aire_de_recherche)
print("Execution requete overpass : \n"+overpass_query)
data = run_overpass_query(overpass_query)
nb_elements = len(data["elements"])
print("Nombre d'elements : "+str(nb_elements))
print("Géocodage inversé : ", end="", flush=True)
# @TODO : optimiser en faisant un appel au service /reverse/csv/ plutot que le service unitaire /reverse/
for element in data["elements"]:
if (element["type"] == "node") :
rev_geocode = run_reverse_geocoding(element["lat"], element["lon"])
else :
rev_geocode = run_reverse_geocoding(element["center"]["lat"], element["center"]["lon"])
api_adresse = rev_geocode["features"][0]
element["tags"]["api_adresse:geometry:coordinates:lon"] = api_adresse["geometry"]["coordinates"][0]
element["tags"]["api_adresse:geometry:coordinates:lat"] = api_adresse["geometry"]["coordinates"][1]
element["tags"]["api_adresse:properties:label"] = api_adresse["properties"]["label"]
element["tags"]["api_adresse:properties:score"] = api_adresse["properties"]["score"]
if ("housenumber" in api_adresse["properties"]) :
element["tags"]["api_adresse:properties:housenumber"] = api_adresse["properties"]["housenumber"]
element["tags"]["api_adresse:properties:type"] = api_adresse["properties"]["type"]
element["tags"]["api_adresse:properties:name"] = api_adresse["properties"]["name"]
element["tags"]["api_adresse:properties:postcode"] = api_adresse["properties"]["postcode"]
element["tags"]["api_adresse:properties:citycode"] = api_adresse["properties"]["citycode"]
element["tags"]["api_adresse:properties:city"] = api_adresse["properties"]["city"]
if ("street" in api_adresse["properties"]) :
element["tags"]["api_adresse:properties:street"] = api_adresse["properties"]["street"]
element["tags"]["api_adresse:properties:attribution"] = rev_geocode["attribution"]
element["tags"]["api_adresse:properties:licence"] = rev_geocode["licence"]
# traduction
if "bicycle_parking" in element["tags"]:
element["tags"]["bicycle_parking"] = trad_bicycle_parking[element["tags"]["bicycle_parking"]]
print("X", end="", flush=True)
#else :
# print("-", end="", flush=True)
print()
print("Sauvegarde résultat format JSON/OSM")
export_json = {"version": data["version"],
"generator" : data["generator"] + " and ETALAB API",
"osm3s" : data["osm3s"],
"elements": []
}
index_line = 0
# on refait un JSON allégé juste avec les données qu'on va afficher sur la carte UMAP
for element in data["elements"]:
export_json["elements"].append({"type" : element["type"],
"id" : element["id"]})
if (element["type"] == "node") :
export_json["elements"][index_line]["lat"] = element["lat"]
export_json["elements"][index_line]["lon"] = element["lon"]
else :
export_json["elements"][index_line]["center"] = element["center"]
export_json["elements"][index_line]["nodes"] = element["nodes"]
#export_json["elements"][index_line]["tags"] = element["tags"]
description = ""
for tag in overpass_query_fields.keys() :
if overpass_query_fields[tag]["export_json"] == "Oui" :
if tag in element["tags"] :
if overpass_query_fields[tag]["FR"] != "" :
description = description + overpass_query_fields[tag]["FR"] + " : "
description = description + str(element["tags"][tag]) + "\n"
export_json["elements"][index_line]["tags"] = {"description": description}
index_line = index_line + 1
# print (json.dumps(export_json))
os.makedirs(dossier_sauvegarde, exist_ok = True)
jsonFile = open(dossier_sauvegarde + nom_req+".json", "w")
jsonFile.write(json.dumps(export_json))
jsonFile.close()
# ===========================================
print("Sauvegarde résultats format ODS")
ODSdataSheet = OrderedDict()
ODSdata = []
ODSdata.append(overpass_query_fields.keys())
index_line = 2
for element in data["elements"]:
line = []
index_col = 0
# if (element["type"] == "node") :
for field in overpass_query_fields.keys() :
if (field in element["tags"]) :
if field == "capacity":
val = element["tags"][field]
line.append(int(val) if val.isdigit() else val)
else :
line.append(element["tags"][field])
else :
line.append("")
index_col = index_col + 1
ODSdata.append(line)
index_line = index_line + 1
ODSdataSheet.update({"resultats": ODSdata})
save_data(dossier_sauvegarde + nom_req+".ods", ODSdataSheet)
for req in requetes.reqs :
for nb_essai in range(max_retry) :
# on tente max_retry fois
try :
executer_requete_et_exporter_resultats(req.nom, req.critere, aire_de_recherche, req.champs)
break
except errors.Api_error :
if (nb_essai == max_retry) :
print ("trop d'erreurs d'API - abandon")
exit()
print ("erreur API - on retente dans "+str(retry_delay)+"s")
time.sleep(retry_delay)
print("Fini")

View File

@ -0,0 +1,123 @@
#!/usr/bin/env python3
"""
Module principal : 
- récupération de données par appel à Overpass
- géocodage inverse
- export des données en JSON pour utilisation avec umap
- sauvegarde des données en ods
"""
# inspiration :
# https://towardsdatascience.com/loading-data-from-openstreetmap-with-python-and-the-overpass-api-513882a27fd0
# https://geo.api.gouv.fr/adresse
# https://wiki.cartocite.fr/doku.php?id=umap:10_-_je_valorise_les_donnees_openstreetmap_avec_umap
# https://sites-formations.univ-rennes2.fr/mastersigat/Cours/Intro_Overpass.pdf
# usage des tags :
# https://taginfo.openstreetmap.org/tags/?key=amenity&value=bicycle_parking#combinations
# exemple URL données pour umap :
# https://www.velocite63.fr/velocite63/OSM/stationnements_velos_publics.json
# penser à cocher "proxy" dans la rubrique "données distantes" du calque
# export ODS :
# https://pythonhosted.org/pyexcel-ods/
# pip3 install pyexcel-ods3
import time
import os
from osm_vc63 import errors
from osm_vc63 import requetes
from osm_vc63.utils import Utils
OVERPASS_URL = "http://overpass-api.de/api/interpreter"
GEO_API_URL = "https://api-adresse.data.gouv.fr"
DOSSIER_SAUVEGARDE = "resultats/"
# nombre maxi de retries quand echec API
MAX_RETRY = 4
# delai en secondes entre les tentatives
RETRY_DELAY = 120
# id du département "Puy de Dôme" : 7406
# id Riom : 1693144
# id Clermont : 110866
# id Romagnat : 138269
# l'id de l'area se calcule en ajoutant 3600000000 au numéro de l'objet OSM
AIRE_DE_RECHERCHE = str(3_600_000_000 + 110_866)
# traductions des tags bicycle_parking
TRAD_BICYCLE_PARKING = {
"stands": "Arceaux",
"wall_loops": "Pince roues",
"rack": "Râteliers",
"anchors": "Ancrage",
"shed": "Abri collectif",
"bollard": "Potelet",
"lockers": "Abris individuels",
"wide_stands": "Arceaux espacés",
"ground_slots": "Fente dans le sol",
"building": "Bâtiment",
"informal": "Informel",
"wave": "Râteliers",
"streetpod": "Arceaux",
"tree": "Arbre à bicyclettes",
"crossbar": "Barre",
"rope": "Câble",
"two-tier": "Deux étages",
"floor": "Sol",
"handlebar_holder": "Accroche-guidons",
}
def main():
"""Routine principale"""
for req in requetes.REQS:
for nb_essai in range(MAX_RETRY): # on tente max_retry fois
try:
utils = Utils(OVERPASS_URL, GEO_API_URL, DOSSIER_SAUVEGARDE)
print(f"{75*'#'}\r\nRequête en cours : {req.nom}")
# appel overpass
data = utils.run_overpass_query(req.critere, AIRE_DE_RECHERCHE)
nb_resultats = len(data["elements"])
print(f"{nb_resultats} résultats")
if nb_resultats > 0:
# géocodage inverse
data = utils.geocodage(data)
# traduction
data = utils.traduction(
"bicycle_parking", TRAD_BICYCLE_PARKING, data
)
# Sauvegarde
os.makedirs(DOSSIER_SAUVEGARDE, exist_ok=True)
export_json = utils.nettoyage_json_pour_umap(data, req.champs)
utils.save_as_json(export_json, req.nom)
utils.save_as_ods(req.champs, data, req.nom)
# doucement sur overpass
time.sleep(30)
break
except errors.ApiError:
if nb_essai == MAX_RETRY:
print("trop d'erreurs d'API - abandon")
exit()
print("erreur API - on retente dans " + str(RETRY_DELAY) + "s")
time.sleep(RETRY_DELAY)
print("\r\n ### Terminé ###")
if __name__ == "__main__":
main()