extraction du géocodage et traduction vers utils
This commit is contained in:
parent
b5ab062b3e
commit
6d288c1672
@ -118,7 +118,7 @@ class Utils:
|
|||||||
)
|
)
|
||||||
overpass_query = overpass_query.replace("aire_de_recherche", aire_de_recherche)
|
overpass_query = overpass_query.replace("aire_de_recherche", aire_de_recherche)
|
||||||
|
|
||||||
print("Execution requete overpass : \n" + overpass_query)
|
# print("Execution requete overpass : \n" + overpass_query)
|
||||||
response = requests.get(self.overpass_url, params={"data": overpass_query})
|
response = requests.get(self.overpass_url, params={"data": overpass_query})
|
||||||
|
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
@ -137,3 +137,74 @@ class Utils:
|
|||||||
raise errors.GeoApiError(response.status_code)
|
raise errors.GeoApiError(response.status_code)
|
||||||
|
|
||||||
return response.json()
|
return response.json()
|
||||||
|
|
||||||
|
# TODO : optimiser en faisant un appel au service /reverse/csv/ plutot que le service unitaire /reverse/
|
||||||
|
def geocodage(self, data):
|
||||||
|
"""Renseigne une adresse pour chaque élément de data"""
|
||||||
|
|
||||||
|
for element in data["elements"]:
|
||||||
|
if element["type"] == "node":
|
||||||
|
rev_geocode = self.run_reverse_geocoding(element["lat"], element["lon"])
|
||||||
|
else:
|
||||||
|
rev_geocode = self.run_reverse_geocoding(
|
||||||
|
element["center"]["lat"], element["center"]["lon"]
|
||||||
|
)
|
||||||
|
|
||||||
|
api_adresse = rev_geocode["features"][0]
|
||||||
|
|
||||||
|
element["tags"]["api_adresse:geometry:coordinates:lon"] = api_adresse[
|
||||||
|
"geometry"
|
||||||
|
]["coordinates"][0]
|
||||||
|
element["tags"]["api_adresse:geometry:coordinates:lat"] = api_adresse[
|
||||||
|
"geometry"
|
||||||
|
]["coordinates"][1]
|
||||||
|
|
||||||
|
element["tags"]["api_adresse:properties:label"] = api_adresse["properties"][
|
||||||
|
"label"
|
||||||
|
]
|
||||||
|
element["tags"]["api_adresse:properties:score"] = api_adresse["properties"][
|
||||||
|
"score"
|
||||||
|
]
|
||||||
|
|
||||||
|
if "housenumber" in api_adresse["properties"]:
|
||||||
|
element["tags"]["api_adresse:properties:housenumber"] = api_adresse[
|
||||||
|
"properties"
|
||||||
|
]["housenumber"]
|
||||||
|
|
||||||
|
element["tags"]["api_adresse:properties:type"] = api_adresse["properties"][
|
||||||
|
"type"
|
||||||
|
]
|
||||||
|
|
||||||
|
element["tags"]["api_adresse:properties:name"] = api_adresse["properties"][
|
||||||
|
"name"
|
||||||
|
]
|
||||||
|
element["tags"]["api_adresse:properties:postcode"] = api_adresse[
|
||||||
|
"properties"
|
||||||
|
]["postcode"]
|
||||||
|
element["tags"]["api_adresse:properties:citycode"] = api_adresse[
|
||||||
|
"properties"
|
||||||
|
]["citycode"]
|
||||||
|
element["tags"]["api_adresse:properties:city"] = api_adresse["properties"][
|
||||||
|
"city"
|
||||||
|
]
|
||||||
|
|
||||||
|
if "street" in api_adresse["properties"]:
|
||||||
|
element["tags"]["api_adresse:properties:street"] = api_adresse[
|
||||||
|
"properties"
|
||||||
|
]["street"]
|
||||||
|
|
||||||
|
element["tags"]["api_adresse:properties:attribution"] = rev_geocode[
|
||||||
|
"attribution"
|
||||||
|
]
|
||||||
|
element["tags"]["api_adresse:properties:licence"] = rev_geocode["licence"]
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def traduction(self, tag, dictionnaire, data):
|
||||||
|
"""Traduit le champ tag des éléments de data avec dict"""
|
||||||
|
|
||||||
|
for element in data["elements"]:
|
||||||
|
if tag in element["tags"]:
|
||||||
|
element["tags"][tag] = dictionnaire[element["tags"][tag]]
|
||||||
|
|
||||||
|
return data
|
||||||
|
@ -43,14 +43,11 @@ RETRY_DELAY = 120
|
|||||||
# id Riom : 1693144
|
# id Riom : 1693144
|
||||||
# id Clermont : 110866
|
# id Clermont : 110866
|
||||||
# id Romagnat : 138269
|
# id Romagnat : 138269
|
||||||
|
|
||||||
# l'id de l'area se calcule en ajoutant 3600000000 au numéro de l'objet OSM
|
# l'id de l'area se calcule en ajoutant 3600000000 au numéro de l'objet OSM
|
||||||
AIRE_DE_RECHERCHE = str(3600000000 + 110866)
|
AIRE_DE_RECHERCHE = str(3600000000 + 110866)
|
||||||
|
|
||||||
|
# traductions des tags bicycle_parking
|
||||||
# ----------------------------------------------
|
TRAD_BICYCLE_PARKING = {
|
||||||
|
|
||||||
trad_bicycle_parking = {
|
|
||||||
"stands": "Arceaux",
|
"stands": "Arceaux",
|
||||||
"wall_loops": "Pince roues",
|
"wall_loops": "Pince roues",
|
||||||
"rack": "Râteliers",
|
"rack": "Râteliers",
|
||||||
@ -73,91 +70,30 @@ trad_bicycle_parking = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def executer_requete_et_exporter_resultats(
|
|
||||||
nom_req, critere, aire_de_recherche, overpass_query_fields
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Appelle Overpass et exporte les résultats
|
|
||||||
|
|
||||||
nom_req : nom de la requête (type d'informations recherchées)
|
|
||||||
critere : requête passée à Overpass
|
|
||||||
aire_de_recherche : zone géographique d'intérêt
|
|
||||||
overpass_query_fields : champs récupérés pour la réponse
|
|
||||||
"""
|
|
||||||
|
|
||||||
utils = Utils(OVERPASS_URL, GEO_API_URL, DOSSIER_SAUVEGARDE)
|
|
||||||
data = utils.run_overpass_query(critere, aire_de_recherche)
|
|
||||||
|
|
||||||
nb_elements = len(data["elements"])
|
|
||||||
|
|
||||||
print("Nombre d'elements : " + str(nb_elements))
|
|
||||||
"""
|
|
||||||
print("Géocodage inversé : ", end="", flush=True)
|
|
||||||
|
|
||||||
# @TODO : optimiser en faisant un appel au service /reverse/csv/ plutot que le service unitaire /reverse/
|
|
||||||
|
|
||||||
for element in data["elements"]:
|
|
||||||
|
|
||||||
if (element["type"] == "node") :
|
|
||||||
rev_geocode = run_reverse_geocoding(element["lat"], element["lon"])
|
|
||||||
else :
|
|
||||||
rev_geocode = run_reverse_geocoding(element["center"]["lat"], element["center"]["lon"])
|
|
||||||
|
|
||||||
api_adresse = rev_geocode["features"][0]
|
|
||||||
|
|
||||||
element["tags"]["api_adresse:geometry:coordinates:lon"] = api_adresse["geometry"]["coordinates"][0]
|
|
||||||
element["tags"]["api_adresse:geometry:coordinates:lat"] = api_adresse["geometry"]["coordinates"][1]
|
|
||||||
|
|
||||||
element["tags"]["api_adresse:properties:label"] = api_adresse["properties"]["label"]
|
|
||||||
element["tags"]["api_adresse:properties:score"] = api_adresse["properties"]["score"]
|
|
||||||
|
|
||||||
if ("housenumber" in api_adresse["properties"]) :
|
|
||||||
element["tags"]["api_adresse:properties:housenumber"] = api_adresse["properties"]["housenumber"]
|
|
||||||
|
|
||||||
element["tags"]["api_adresse:properties:type"] = api_adresse["properties"]["type"]
|
|
||||||
|
|
||||||
element["tags"]["api_adresse:properties:name"] = api_adresse["properties"]["name"]
|
|
||||||
element["tags"]["api_adresse:properties:postcode"] = api_adresse["properties"]["postcode"]
|
|
||||||
element["tags"]["api_adresse:properties:citycode"] = api_adresse["properties"]["citycode"]
|
|
||||||
element["tags"]["api_adresse:properties:city"] = api_adresse["properties"]["city"]
|
|
||||||
|
|
||||||
if ("street" in api_adresse["properties"]) :
|
|
||||||
element["tags"]["api_adresse:properties:street"] = api_adresse["properties"]["street"]
|
|
||||||
|
|
||||||
element["tags"]["api_adresse:properties:attribution"] = rev_geocode["attribution"]
|
|
||||||
element["tags"]["api_adresse:properties:licence"] = rev_geocode["licence"]
|
|
||||||
|
|
||||||
|
|
||||||
# traduction
|
|
||||||
if "bicycle_parking" in element["tags"]:
|
|
||||||
element["tags"]["bicycle_parking"] = trad_bicycle_parking[element["tags"]["bicycle_parking"]]
|
|
||||||
|
|
||||||
print("X", end="", flush=True)
|
|
||||||
|
|
||||||
#else :
|
|
||||||
# print("-", end="", flush=True)
|
|
||||||
|
|
||||||
print()
|
|
||||||
"""
|
|
||||||
|
|
||||||
export_json = utils.nettoyage_json_pour_umap(data, overpass_query_fields)
|
|
||||||
|
|
||||||
# Sauvegarde
|
|
||||||
os.makedirs(DOSSIER_SAUVEGARDE, exist_ok=True)
|
|
||||||
|
|
||||||
utils.save_as_json(export_json, nom_req)
|
|
||||||
utils.save_as_ods(overpass_query_fields, data, nom_req)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Routine principale"""
|
"""Routine principale"""
|
||||||
|
|
||||||
for req in requetes.REQS:
|
for req in requetes.REQS:
|
||||||
for nb_essai in range(MAX_RETRY): # on tente max_retry fois
|
for nb_essai in range(MAX_RETRY): # on tente max_retry fois
|
||||||
try:
|
try:
|
||||||
executer_requete_et_exporter_resultats(
|
utils = Utils(OVERPASS_URL, GEO_API_URL, DOSSIER_SAUVEGARDE)
|
||||||
req.nom, req.critere, AIRE_DE_RECHERCHE, req.champs
|
|
||||||
)
|
# appel overpass
|
||||||
|
data = utils.run_overpass_query(req.critere, AIRE_DE_RECHERCHE)
|
||||||
|
|
||||||
|
# géocodage inverse
|
||||||
|
data = utils.geocodage(data)
|
||||||
|
|
||||||
|
# traduction
|
||||||
|
data = utils.traduction("bicycle_parking", TRAD_BICYCLE_PARKING, data)
|
||||||
|
|
||||||
|
# Sauvegarde
|
||||||
|
os.makedirs(DOSSIER_SAUVEGARDE, exist_ok=True)
|
||||||
|
export_json = utils.nettoyage_json_pour_umap(data, req.champs)
|
||||||
|
|
||||||
|
utils.save_as_json(export_json, req.nom)
|
||||||
|
utils.save_as_ods(req.champs, data, req.nom)
|
||||||
|
|
||||||
break
|
break
|
||||||
except errors.ApiError:
|
except errors.ApiError:
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user