add files from the Scripts repo https://forge.chapril.org/tykayn/scripts
This commit is contained in:
parent
cbe2ec28d2
commit
a5ebecee00
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
**/*/node_modules
|
||||||
|
/node_modules/
|
||||||
|
/coverage
|
39
README.md
39
README.md
@ -0,0 +1,39 @@
|
|||||||
|
# Conversion de jeux de données geojson en tags OSM
|
||||||
|
|
||||||
|
installation
|
||||||
|
```bash
|
||||||
|
npm i
|
||||||
|
npm i -g geojsontoosm
|
||||||
|
|
||||||
|
```
|
||||||
|
## Tout convertir faire d'un coup
|
||||||
|
Pour récupérer le jeu de données pour les bornes elec et convertir le geojson en fichier osm, lancer le script bash
|
||||||
|
```bash
|
||||||
|
bash refresh_data.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Export depuis overpass turbo dans OSM
|
||||||
|
https://overpass-turbo.eu/s/1yhY
|
||||||
|
|
||||||
|
## config
|
||||||
|
Configurer le filtre de département et les autres options dans convert_to_osm_tags.ts.
|
||||||
|
|
||||||
|
## utilisation
|
||||||
|
```bash
|
||||||
|
node convert_to_osm_tags.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
résultat en json dans le dossier output.
|
||||||
|
La sortie est filtrée selon le mappage donné et la zone de code postal de la config.
|
||||||
|
Réalisé pour l'intégration des bornes elec.
|
||||||
|
# Ressources
|
||||||
|
|
||||||
|
Tableau de conversion de l'open data
|
||||||
|
https://wiki.openstreetmap.org/wiki/France/data.gouv.fr/Bornes_de_Recharge_pour_V%C3%A9hicules_%C3%89lectriques
|
||||||
|
|
||||||
|
# Roadmap
|
||||||
|
## bornes recharge elec IRVE
|
||||||
|
* ne pas mettre ref:EU:EVSE si la valeur vaut "non concerné"
|
||||||
|
* convertir les False en no
|
||||||
|
* enlever les source:date
|
||||||
|
* enlever les name
|
6
babel.config.js
Normal file
6
babel.config.js
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
module.exports = {
|
||||||
|
presets: [
|
||||||
|
['@babel/preset-env', {targets: {node: 'current'}}],
|
||||||
|
'@babel/preset-typescript',
|
||||||
|
],
|
||||||
|
};
|
183
build_departments.ts
Normal file
183
build_departments.ts
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
/**
|
||||||
|
build all files for departements from the dataset and the mapping engine
|
||||||
|
**/
|
||||||
|
|
||||||
|
const {exec} = require('child_process');
|
||||||
|
const execSync = require("child_process").execSync;
|
||||||
|
|
||||||
|
let min_department = 1;
|
||||||
|
const max_departement = 1
|
||||||
|
let enable_domtom = true;
|
||||||
|
|
||||||
|
// const max_departement = 95
|
||||||
|
|
||||||
|
|
||||||
|
function exec_command(command: string) {
|
||||||
|
|
||||||
|
|
||||||
|
console.log('=> command',
|
||||||
|
command
|
||||||
|
)
|
||||||
|
const result = execSync(command);
|
||||||
|
|
||||||
|
// convert and show the output.
|
||||||
|
console.log(result.toString("utf8"));
|
||||||
|
}
|
||||||
|
|
||||||
|
function build_department_geojson(department: string) {
|
||||||
|
let command = `ts-node convert_to_osm_tags.ts --department=${department}`;
|
||||||
|
console.log('command', command)
|
||||||
|
exec_command(command)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function convert_geojson_to_osm_for_department(department: string) {
|
||||||
|
let command = `geojsontoosm output/my_converted_data_set_filtered_zipcode_${department}.json > osm_output/bornes-irve-filetered-from-etalab-opendata_dep_${department}.osm`;
|
||||||
|
console.log('command', command)
|
||||||
|
exec_command(command)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let ii = min_department; ii <= max_departement; ii++) {
|
||||||
|
let departement = '' + ii
|
||||||
|
build_department_geojson(departement)
|
||||||
|
}
|
||||||
|
for (let ii = min_department; ii <= max_departement; ii++) {
|
||||||
|
let departement = '' + ii
|
||||||
|
convert_geojson_to_osm_for_department(departement)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// build_department_geojson('2A')
|
||||||
|
// convert_geojson_to_osm_for_department('2A')
|
||||||
|
// build_department_geojson('2B')
|
||||||
|
// convert_geojson_to_osm_for_department('2B')
|
||||||
|
|
||||||
|
if (enable_domtom) {
|
||||||
|
// départements corse et dom tom
|
||||||
|
let more_departements = ['2A', '2B'
|
||||||
|
, '971'
|
||||||
|
, '972'
|
||||||
|
, '973'
|
||||||
|
, '974'
|
||||||
|
, '975'
|
||||||
|
, '976']
|
||||||
|
|
||||||
|
more_departements.forEach((department => {
|
||||||
|
console.log(' mapping data for department ' + department)
|
||||||
|
let departement_string = '' + department
|
||||||
|
build_department_geojson(departement_string)
|
||||||
|
|
||||||
|
}))
|
||||||
|
more_departements.forEach((department => {
|
||||||
|
console.log('convert to osm ' + department)
|
||||||
|
let departement_string = '' + department
|
||||||
|
convert_geojson_to_osm_for_department(departement_string)
|
||||||
|
}))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Numéro Départements Chef-lieu
|
||||||
|
|
||||||
|
01 Ain Bourg-en-Bresse
|
||||||
|
02 Aisne Laon
|
||||||
|
03 Allier Moulin
|
||||||
|
04 Alpes-de-Haute-Provence Digne
|
||||||
|
05 Hautes-Alpes Gap
|
||||||
|
06 Alpes-Maritimes Nice
|
||||||
|
07 Ardèche Privas
|
||||||
|
08 Ardennes Charleville-Mézières
|
||||||
|
09 Ariège Foix
|
||||||
|
10 Aube Troyes
|
||||||
|
11 Aude Carcassonne
|
||||||
|
12 Aveyron Rodez
|
||||||
|
13 Bouches-du-Rhône Marseille
|
||||||
|
14 Calvados Caen
|
||||||
|
15 Cantal Aurillac
|
||||||
|
16 Charente Angoulême
|
||||||
|
17 Charente-Maritime La Rochelle
|
||||||
|
18 Cher Bourges
|
||||||
|
19 Corrèze Tulle
|
||||||
|
21 Côte-d'Or Dijon
|
||||||
|
22 Côtes d'Armor Saint-Brieuc
|
||||||
|
23 Creuse Guéret
|
||||||
|
24 Dordogne Périgueux
|
||||||
|
25 Doubs Besançon
|
||||||
|
26 Drôme Valence
|
||||||
|
27 Eure Evreux
|
||||||
|
28 Eure-et-Loir Chartres
|
||||||
|
29 Finistère Quimper
|
||||||
|
30 Gard Nîmes
|
||||||
|
31 Haute-Garonne Toulouse
|
||||||
|
32 Gers Auch
|
||||||
|
33 Gironde Bordeaux
|
||||||
|
34 Hérault Montpellier
|
||||||
|
35 Ille-et-Vilaine Rennes
|
||||||
|
36 Indre Châteauroux
|
||||||
|
37 Indre-et-Loire Tours
|
||||||
|
38 Isère Grenoble
|
||||||
|
39 Jura Lons-le-Saunier
|
||||||
|
40 Landes Mont-de-Marsan
|
||||||
|
41 Loir-et-Cher Blois
|
||||||
|
42 Loire St-Étienne
|
||||||
|
43 Haute-Loire Le Puy
|
||||||
|
44 Loire-Atlantique Nantes
|
||||||
|
45 Loiret Orléans
|
||||||
|
46 Lot Cahors
|
||||||
|
47 Lot-et-Garonne Agen
|
||||||
|
48 Lozère Mende
|
||||||
|
49 Maine-et-Loire Angers
|
||||||
|
50 Manche St-Lô
|
||||||
|
51 Marne Châlons-sur-Marne
|
||||||
|
52 Haute-Marne Chaumont
|
||||||
|
53 Mayenne Laval
|
||||||
|
54 Meurthe-et-Moselle Nancy
|
||||||
|
55 Meuse Bar-le-Duc
|
||||||
|
56 Morbihan Vannes
|
||||||
|
57 Moselle Metz
|
||||||
|
58 Nièvre Nevers
|
||||||
|
59 Nord Lille
|
||||||
|
60 Oise Beauvais
|
||||||
|
61 Orne Alençon
|
||||||
|
62 Pas-de-Calais Arras
|
||||||
|
63 Puy-de-Dôme Clermont-Ferrand
|
||||||
|
64 Pyrénées-Atlantiques Pau
|
||||||
|
65 Hautes-Pyrénées Tarbes
|
||||||
|
66 Pyrénées-Orientales Perpignan
|
||||||
|
67 Bas-Rhin Strasbourg
|
||||||
|
68 Haut-Rhin Colmar
|
||||||
|
69 Rhône Lyon
|
||||||
|
70 Haute-Saône Vesoul
|
||||||
|
71 Saône-et-Loire Mâcon
|
||||||
|
72 Sarthe Le Mans
|
||||||
|
73 Savoie Chambéry
|
||||||
|
74 Haute-Savoie Annecy
|
||||||
|
75 Paris Paris
|
||||||
|
76 Seine-Maritime Rouen
|
||||||
|
77 Seine-et-Marne Melun
|
||||||
|
78 Yvelines Versailles
|
||||||
|
79 Deux-Sèvres Niort
|
||||||
|
80 Somme Amiens
|
||||||
|
81 Tarn Albi
|
||||||
|
82 Tarn-et-Garonne Montauban
|
||||||
|
83 Var Toulon
|
||||||
|
84 Vaucluse Avignon
|
||||||
|
85 Vendée La Roche-sur-Yon
|
||||||
|
86 Vienne Poitiers
|
||||||
|
87 Haute-Vienne Limoges
|
||||||
|
88 Vosges Épinal
|
||||||
|
89 Yonne Auxerre
|
||||||
|
90 Territoire de Belfort Belfort
|
||||||
|
91 Essonne Evry
|
||||||
|
92 Hauts-de-Seine Nanterre
|
||||||
|
93 Seine-St-Denis Bobigny
|
||||||
|
94 Val-de-Marne Créteil
|
||||||
|
95 Val-D'Oise Pontoise
|
||||||
|
2A Corse-du-Sud Ajaccio
|
||||||
|
2B Haute-Corse Bastia
|
||||||
|
971 Guadeloupe Basse-Terre
|
||||||
|
972 Martinique Fort-de-France
|
||||||
|
973 Guyane Cayenne
|
||||||
|
974 La Réunion Saint-Denis
|
||||||
|
976 Mayotte Mamoudzou
|
||||||
|
**/
|
299
convert_to_osm_tags.ts
Normal file
299
convert_to_osm_tags.ts
Normal file
@ -0,0 +1,299 @@
|
|||||||
|
/**
|
||||||
|
* convertisseur de données de bornes de recharge électrique à partir de données Chargemap et open data Etalab
|
||||||
|
*/
|
||||||
|
import * as fs from 'fs'
|
||||||
|
|
||||||
|
import mappingConfigIRVE from './mappings/converters/configIRVE'
|
||||||
|
import mappingConfigIRVE_simple from './mappings/converters/mappingConfigIRVE_simple'
|
||||||
|
import mapping_engine from './mappings/engine'
|
||||||
|
import {BoundingBoxCoordinatesType, FeatureCollection} from "./mappings/mapping-config.type";
|
||||||
|
import utils from './mappings/utils'
|
||||||
|
|
||||||
|
|
||||||
|
const minimist = require('minimist')
|
||||||
|
|
||||||
|
const debugLog = utils.debugLog;
|
||||||
|
|
||||||
|
let use_mapping_engine = false
|
||||||
|
use_mapping_engine = true
|
||||||
|
|
||||||
|
let Mapping_engine = new mapping_engine(mappingConfigIRVE)
|
||||||
|
|
||||||
|
let mini_arguments: any = minimist(process.argv.slice(2))
|
||||||
|
|
||||||
|
|
||||||
|
// let sourceFilePathGeoJson = './etalab_data/small.json'
|
||||||
|
let sourceFilePathGeoJson = './etalab_data/all.json'
|
||||||
|
// let sourceFilePathGeoJson = './output/my_converted_data_set_filtered_zipcode_91.json'
|
||||||
|
// let sourceFilePathGeoJson = './output/my_converted_data_set_filtered_zipcode_91_small.json'
|
||||||
|
|
||||||
|
// wip filter
|
||||||
|
let filterOnBoundingBox = true
|
||||||
|
filterOnBoundingBox = false
|
||||||
|
|
||||||
|
let boundingBoxCoordinates: BoundingBoxCoordinatesType = {
|
||||||
|
xMin: 1.91,
|
||||||
|
xMax: 2.38,
|
||||||
|
yMin: 48.7,
|
||||||
|
yMax: 48.4,
|
||||||
|
}
|
||||||
|
|
||||||
|
let filterCoordinates = true
|
||||||
|
filterCoordinates = false
|
||||||
|
|
||||||
|
let enable_filter_on_department = true
|
||||||
|
enable_filter_on_department = false
|
||||||
|
|
||||||
|
let filterDepartment = 91
|
||||||
|
if (mini_arguments['department']) {
|
||||||
|
filterDepartment = mini_arguments['department']
|
||||||
|
enable_filter_on_department = true
|
||||||
|
}
|
||||||
|
if (mini_arguments['source']) {
|
||||||
|
sourceFilePathGeoJson = mini_arguments['source']
|
||||||
|
}
|
||||||
|
if (mini_arguments['engine']) {
|
||||||
|
use_mapping_engine = mini_arguments['engine']
|
||||||
|
}
|
||||||
|
|
||||||
|
let filterZipCode = new RegExp(`^${filterDepartment}`)
|
||||||
|
let filterZipCodeAdresse = new RegExp(` ${filterDepartment}`)
|
||||||
|
let filteredName = ''
|
||||||
|
|
||||||
|
if (enable_filter_on_department) {
|
||||||
|
filteredName = '_filtered_zipcode_' + filterDepartment
|
||||||
|
} else if (filterOnBoundingBox) {
|
||||||
|
filteredName = '_filtered_bbox_' + boundingBoxCoordinates.xMin + '-' + boundingBoxCoordinates.xMax + '_' + boundingBoxCoordinates.yMin + '-' + boundingBoxCoordinates.yMax
|
||||||
|
}
|
||||||
|
let pointCounterMax: number = 1000000
|
||||||
|
|
||||||
|
let limitConversionToFirstPoint: boolean = false
|
||||||
|
// limitConversionToFirstPoint = true
|
||||||
|
|
||||||
|
if (limitConversionToFirstPoint) {
|
||||||
|
pointCounterMax = 1
|
||||||
|
}
|
||||||
|
let defaultPropertiesOfPoint: any = {
|
||||||
|
'amenity': 'charging_station'
|
||||||
|
}
|
||||||
|
|
||||||
|
let converted_geo_json: any = {
|
||||||
|
type: 'FeatureCollection',
|
||||||
|
features: []
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
let output_folder = 'output';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* crée un fichier dans le dossier par défaut, output
|
||||||
|
* @param fileName
|
||||||
|
* @param fileContent
|
||||||
|
*/
|
||||||
|
function writeFile(fileName: string, fileContent: any) {
|
||||||
|
debugLog('write file ', fileName)
|
||||||
|
|
||||||
|
|
||||||
|
return fs.writeFile(
|
||||||
|
`./${output_folder}/${fileName}`,
|
||||||
|
fileContent,
|
||||||
|
'utf8',
|
||||||
|
(err) => {
|
||||||
|
if (err) {
|
||||||
|
debugLog(`Error writing file: ${err}`)
|
||||||
|
} else {
|
||||||
|
debugLog(`File ${fileName} is written successfully!`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param sourceFilePath
|
||||||
|
* @param mapping
|
||||||
|
* @param pointCounterMax
|
||||||
|
* @param boundingBoxCoordinates
|
||||||
|
*/
|
||||||
|
function convertDataForIRVE(sourceFilePath: string, mapping: any, pointCounterMax: number, boundingBoxCoordinates: any) {
|
||||||
|
debugLog('convertDataForIRVE from ', sourceFilePath)
|
||||||
|
|
||||||
|
fs.readFile(sourceFilePath, 'utf8', function (err, data) {
|
||||||
|
let point_counter = 0
|
||||||
|
let feature_points_after_filter: any = []
|
||||||
|
|
||||||
|
if (err) {
|
||||||
|
return debugLog(err)
|
||||||
|
}
|
||||||
|
let data_transformed: FeatureCollection = JSON.parse(data)
|
||||||
|
// debug('data keys ', Object.keys(dataTransformed))
|
||||||
|
// debugLog('properties of point 0', data_transformed.features[0])
|
||||||
|
|
||||||
|
if (data_transformed.features) {
|
||||||
|
|
||||||
|
debugLog('data found, features:', data_transformed.features.length)
|
||||||
|
|
||||||
|
// find interesting list of points to use
|
||||||
|
let list_of_points: any = data_transformed.features
|
||||||
|
debugLog('listOfPoints.length', list_of_points.length)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* filtering
|
||||||
|
* TODO: do it in the engine
|
||||||
|
*/
|
||||||
|
// for each point from the data source, filter if we take it or not
|
||||||
|
list_of_points.forEach((feature_point: any) => {
|
||||||
|
|
||||||
|
let regex_filter_test_result = true
|
||||||
|
|
||||||
|
if (enable_filter_on_department) {
|
||||||
|
debugLog('filtre sur les départements activé')
|
||||||
|
regex_filter_test_result = (
|
||||||
|
filterZipCode.test(feature_point.properties.consolidated_code_postal)
|
||||||
|
||
|
||||||
|
filterZipCodeAdresse.test(feature_point.properties.adresse_station)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
debugLog('pas de filtre sur les départements')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (filterOnBoundingBox) {
|
||||||
|
debugLog('filtre sur les coordonnées bounding box activé')
|
||||||
|
|
||||||
|
let x = feature_point.properties.coordonneesXY[0]
|
||||||
|
let xMin = boundingBoxCoordinates.xMin
|
||||||
|
let xMax = boundingBoxCoordinates.xMax
|
||||||
|
let yMin = boundingBoxCoordinates.yMin
|
||||||
|
let yMax = boundingBoxCoordinates.yMax
|
||||||
|
|
||||||
|
let y = feature_point.properties.coordonneesXY[1]
|
||||||
|
regex_filter_test_result = (
|
||||||
|
(x >= xMin && x <= xMax)
|
||||||
|
&&
|
||||||
|
(y >= yMin && y <= yMax)
|
||||||
|
)
|
||||||
|
|
||||||
|
} else {
|
||||||
|
debugLog('pas de filtre sur les coordonnées bounding box')
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// TODO add filter offset max
|
||||||
|
// filter points depending on zipcode
|
||||||
|
if (regex_filter_test_result) {
|
||||||
|
feature_points_after_filter.push(feature_point)
|
||||||
|
debugLog(' +1 point', point_counter)
|
||||||
|
point_counter++
|
||||||
|
}
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* conversion
|
||||||
|
*/
|
||||||
|
debugLog(' after filtering, feature_points_after_filter number of points: ', feature_points_after_filter.length)
|
||||||
|
feature_points_after_filter.forEach((feature_point: any) => {
|
||||||
|
// debugLog('featurePoint.properties.consolidated_code_postal', feature_point.properties.consolidated_code_postal)
|
||||||
|
debugLog('convert : work on 1 point')
|
||||||
|
// debugLog('convert :featurePoint', feature_point)
|
||||||
|
let mapped_point: any = {}
|
||||||
|
|
||||||
|
if (use_mapping_engine) {
|
||||||
|
// debugLog('convert :using mapping engine on feature point'
|
||||||
|
// , feature_point
|
||||||
|
// )
|
||||||
|
mapped_point = Mapping_engine.mapElementFromConf(feature_point)
|
||||||
|
debugLog('mapped_point', mapped_point)
|
||||||
|
} else {
|
||||||
|
debugLog('convert :using simple converter on feature point', feature_point)
|
||||||
|
mapped_point = mapElementFromConfSimple(feature_point, mapping)
|
||||||
|
}
|
||||||
|
// debugLog('mapped_point one point', mapped_point)
|
||||||
|
if (mapped_point) {
|
||||||
|
converted_geo_json.features.push(mapped_point)
|
||||||
|
debugLog('convert : added one point to converted_geo_json')
|
||||||
|
} else {
|
||||||
|
debugLog('convert : !!! there is no map one point')
|
||||||
|
}
|
||||||
|
})
|
||||||
|
// output new geojson
|
||||||
|
|
||||||
|
debugLog('convert : convertedGeoJson.features.length', converted_geo_json.features.length)
|
||||||
|
// write file on disk
|
||||||
|
if (converted_geo_json.features.length) {
|
||||||
|
|
||||||
|
let fileNameToWrite = 'my_converted_data_set' + filteredName + '.json'
|
||||||
|
console.log('features: ', converted_geo_json.features.length)
|
||||||
|
debugLog('convert : write file ', fileNameToWrite)
|
||||||
|
writeFile(fileNameToWrite, JSON.stringify(converted_geo_json, null, 2))
|
||||||
|
|
||||||
|
} else {
|
||||||
|
console.log('convert : no writing of file, because there is no converted feature')
|
||||||
|
}
|
||||||
|
// console.log('convert : converted_geo_json output:', converted_geo_json.features)
|
||||||
|
|
||||||
|
return converted_geo_json
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* retuns the converted element from mapping config if present, null otherwise
|
||||||
|
*/
|
||||||
|
function mapElementFromConfSimple(featurePoint: any, mappingConfig: any) {
|
||||||
|
let mappingKeys = Object.keys(mappingConfig)
|
||||||
|
let featurePointPropertiesKeys = Object.keys(featurePoint.properties)
|
||||||
|
|
||||||
|
debugLog('keys', mappingKeys, featurePointPropertiesKeys)
|
||||||
|
|
||||||
|
let newProperties: any = defaultPropertiesOfPoint
|
||||||
|
|
||||||
|
// reinit properties of current point
|
||||||
|
let basePoint = Object.create(featurePoint)
|
||||||
|
basePoint.type = featurePoint.type
|
||||||
|
basePoint.geometry = featurePoint.geometry
|
||||||
|
basePoint.properties = newProperties
|
||||||
|
|
||||||
|
// apply new properties if found in mapping config
|
||||||
|
featurePointPropertiesKeys.forEach((pointKeyName: string) => {
|
||||||
|
|
||||||
|
if (mappingKeys.indexOf(pointKeyName) !== -1) {
|
||||||
|
// debugLog('found element', pointKeyName, '=>', mappingConfig[pointKeyName], 'value : ', featurePoint.properties[pointKeyName])
|
||||||
|
let convertedValue: any = ''
|
||||||
|
if (utils.isBooleanKey(pointKeyName)) {
|
||||||
|
|
||||||
|
let copyOfValue: any = '' + featurePoint.properties[pointKeyName]
|
||||||
|
if (typeof copyOfValue === typeof Object && copyOfValue.key_converted) {
|
||||||
|
copyOfValue = copyOfValue.key_converted
|
||||||
|
}
|
||||||
|
convertedValue = copyOfValue.toLowerCase() == 'true' ? 'yes' : 'no'
|
||||||
|
} else {
|
||||||
|
convertedValue = featurePoint.properties[pointKeyName]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (convertedValue) {
|
||||||
|
let convertedKey: any = mappingConfig[pointKeyName]
|
||||||
|
newProperties[convertedKey] = convertedValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
debugLog('basePoint', basePoint)
|
||||||
|
return basePoint
|
||||||
|
}
|
||||||
|
|
||||||
|
if (use_mapping_engine) {
|
||||||
|
debugLog(' - using mapping engine')
|
||||||
|
debugLog(' - pointCounterMax', pointCounterMax)
|
||||||
|
Mapping_engine.setConfig(mappingConfigIRVE)
|
||||||
|
convertDataForIRVE(sourceFilePathGeoJson, mappingConfigIRVE, pointCounterMax, boundingBoxCoordinates)
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
let mappingConfigIRVE = mappingConfigIRVE_simple
|
||||||
|
convertDataForIRVE(sourceFilePathGeoJson, mappingConfigIRVE, pointCounterMax, boundingBoxCoordinates)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
3624
data_other/bornes_elec_présentes_en_essonne_dans_osm.json
Normal file
3624
data_other/bornes_elec_présentes_en_essonne_dans_osm.json
Normal file
File diff suppressed because it is too large
Load Diff
4929
data_other/export.geojson
Normal file
4929
data_other/export.geojson
Normal file
File diff suppressed because it is too large
Load Diff
204423
data_other/export.json
Normal file
204423
data_other/export.json
Normal file
File diff suppressed because it is too large
Load Diff
1121
data_other/issues.kml
Normal file
1121
data_other/issues.kml
Normal file
File diff suppressed because it is too large
Load Diff
34
data_other/testing/mappings_to_test.ts
Normal file
34
data_other/testing/mappings_to_test.ts
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
import MappingConfigType from "../../mappings/mapping-config.type";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* configurations de mapping pour les cas de tests
|
||||||
|
*/
|
||||||
|
export const mappingRemoveAll: MappingConfigType = {
|
||||||
|
config_name: 'testing config',
|
||||||
|
config_author: 'tykayn <contact@cipherbliss.com>',
|
||||||
|
default_properties_of_point: {
|
||||||
|
'amenity': 'charging_station'
|
||||||
|
},
|
||||||
|
tags: {
|
||||||
|
nom_amenageur: {
|
||||||
|
key_converted: 'autre_nom_amenageur',
|
||||||
|
conditional_values: {
|
||||||
|
'Accessibilité inconnue': {
|
||||||
|
ignore_this_data: true, // ne pas ajouter de tag si la valeur est égale à Accessibilité inconnue.
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const mappingIgnore: MappingConfigType = {
|
||||||
|
config_name: 'testing config',
|
||||||
|
config_author: 'tykayn <contact@cipherbliss.com>',
|
||||||
|
default_properties_of_point: {
|
||||||
|
'amenity': 'charging_station'
|
||||||
|
},
|
||||||
|
tags: {
|
||||||
|
nom_amenageur: {
|
||||||
|
ignore_this_data: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
21
data_other/testing/testing.json
Normal file
21
data_other/testing/testing.json
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"type": "FeatureCollection",
|
||||||
|
"features": [
|
||||||
|
{
|
||||||
|
"type": "Feature",
|
||||||
|
"geometry": {
|
||||||
|
"type": "Point",
|
||||||
|
"coordinates": [
|
||||||
|
4.822159,
|
||||||
|
45.635079
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"properties": {
|
||||||
|
"nom_amenageur": "ELECTRA",
|
||||||
|
"siren_amenageur": "891624884",
|
||||||
|
"consolidated_commune": "S\u00e9r\u00e9zin-du-Rh\u00f4ne",
|
||||||
|
"consolidated_is_lon_lat_correct": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
315
dataset_info.md
Normal file
315
dataset_info.md
Normal file
@ -0,0 +1,315 @@
|
|||||||
|
# info sur les jeux de données
|
||||||
|
|
||||||
|
D'après le jeu de données Etalab
|
||||||
|
|
||||||
|
## liste des 310 opérateurs
|
||||||
|
270 AGENCY
|
||||||
|
2Ed Coutances
|
||||||
|
2F Production
|
||||||
|
ABSOLUT CONCEPT
|
||||||
|
Acelec Charge
|
||||||
|
advenir@zeborne.com
|
||||||
|
Aeroports de lyon
|
||||||
|
Allego
|
||||||
|
ALLENERGIES
|
||||||
|
alterna energie
|
||||||
|
ALU GRANON
|
||||||
|
Ambroise Avocat
|
||||||
|
ANYOS
|
||||||
|
AQUA LOISIRS
|
||||||
|
AREA Tech
|
||||||
|
ART DECO FINITION
|
||||||
|
arteco44
|
||||||
|
ATELIERS PROVENCE LOISIRS
|
||||||
|
Atlante | FR*ATL
|
||||||
|
ATRACHATA
|
||||||
|
Autel Netherlands B.V. | FR*AUT
|
||||||
|
Automobile de l'Est
|
||||||
|
AUTORECHARGE
|
||||||
|
AUTORECHARGE
|
||||||
|
AUTORECHARGE SAS
|
||||||
|
Avomarks
|
||||||
|
Bastide beaudinard & tours (BBT)
|
||||||
|
BAUDUCEL
|
||||||
|
Bénédictines du Sacré-Coeur de Montmartre
|
||||||
|
BH RESTAURATION
|
||||||
|
BIARS DISTRIBUTION
|
||||||
|
BORNECO
|
||||||
|
Borneco | FR*BHM
|
||||||
|
Bornevo
|
||||||
|
Bornevo Connect
|
||||||
|
Bouygues Energies et Services
|
||||||
|
BOUYGUES ENERGIES ET SERVICES
|
||||||
|
box
|
||||||
|
Camping des graniers
|
||||||
|
CAP EMPLOI
|
||||||
|
CAR2PLUG
|
||||||
|
CHAPITEAUX SERVICE
|
||||||
|
ChargePoint
|
||||||
|
Chargepoint
|
||||||
|
CHARGEPOLY
|
||||||
|
CHARRIERE DISTRIBUTION
|
||||||
|
CHATEAU DE RONZIERE
|
||||||
|
COMMUNAUTE DE COMMUNE LE DOURDANNAIS EN HUREPOIX (CCDH)
|
||||||
|
contact@autorecharge.fr
|
||||||
|
COPRODEX
|
||||||
|
Daltoner Avranches
|
||||||
|
Daltoner Caen
|
||||||
|
DALTONER CHERBOURG
|
||||||
|
Daltoner Granville
|
||||||
|
Daltoner Vire
|
||||||
|
DIDIER CHARTON-VACHET
|
||||||
|
Dropnplug
|
||||||
|
e-nergyze
|
||||||
|
E-TOTEM
|
||||||
|
E-totem
|
||||||
|
Easycharge services
|
||||||
|
Ecophi mobility
|
||||||
|
EcoPhi mobility
|
||||||
|
EDF
|
||||||
|
EDFRAY
|
||||||
|
EGE NOEL BERANGER
|
||||||
|
EGSM
|
||||||
|
EIZMENDI TRAITEUR EVENEMENTS
|
||||||
|
Ekoplug
|
||||||
|
ELEC eCONNECT
|
||||||
|
ELECTRA
|
||||||
|
ELECTRIC 55 CHARGING
|
||||||
|
Electriox Groupe
|
||||||
|
electromaps
|
||||||
|
ELECTROMAPS
|
||||||
|
Electromaps
|
||||||
|
ELEK BORDEAUX
|
||||||
|
ENERGEM
|
||||||
|
ENERGIE Eure-et-Loir
|
||||||
|
ENERGIE EURE-ET-LOIR
|
||||||
|
ENNESSER ET FILS
|
||||||
|
ENTREPRISE ADAM
|
||||||
|
ESPACE AUTO COURNON
|
||||||
|
Est Auto
|
||||||
|
EURO TAXI LINE
|
||||||
|
EV MAP SAS
|
||||||
|
EVBOX
|
||||||
|
EVERON
|
||||||
|
EVzen (SMEG Développement)
|
||||||
|
evzen (SMEG Développement)
|
||||||
|
evZen | FR*EVZ
|
||||||
|
F.B. AUTOMOBILES
|
||||||
|
Festilight
|
||||||
|
Freshmile
|
||||||
|
FRESHMILE
|
||||||
|
freshmile
|
||||||
|
FReshmile
|
||||||
|
Freshmile SAS
|
||||||
|
GABMAG17
|
||||||
|
GABORIT BOCAGE SERVICES
|
||||||
|
Gamba et Rota
|
||||||
|
GARAGE HENRY
|
||||||
|
garage Henry herve
|
||||||
|
Garage lefebvre
|
||||||
|
GEDIBOIS BATI COLMAN
|
||||||
|
GENE ELEC 35
|
||||||
|
GF3e
|
||||||
|
Golf de La Wantzenau
|
||||||
|
Green Diffusion
|
||||||
|
Green Technologie
|
||||||
|
GREEN TECHNOLOGIE
|
||||||
|
Green To Wheel
|
||||||
|
GreenYellow | FR*GYM
|
||||||
|
GROFF SAS
|
||||||
|
GROUPE COURTOIS SOCOHY
|
||||||
|
GROUPE LGE
|
||||||
|
GROUPE LGE SOCIETE AUTOPLUG
|
||||||
|
Grunzke Beate
|
||||||
|
Guerin
|
||||||
|
H alu concept
|
||||||
|
HABT
|
||||||
|
HAEFELI
|
||||||
|
HERVE THERMIQUE
|
||||||
|
HFOOD PESSAC
|
||||||
|
HFOOD VILLENAVE
|
||||||
|
Hostellerie
|
||||||
|
Hotel Le Moulin
|
||||||
|
Hôtel MACCHI
|
||||||
|
Hôtel Restaurant Hubert Kieffer
|
||||||
|
ICS Scgilthigheim
|
||||||
|
IFERRO
|
||||||
|
INOUID
|
||||||
|
IONITY
|
||||||
|
Ispo france
|
||||||
|
IXINA Le Cres
|
||||||
|
IXINA Saint Jean de Védas
|
||||||
|
ixina Villeneuve-lès-Béziers
|
||||||
|
Izivia
|
||||||
|
JAV INVESTISSEMENT
|
||||||
|
jmd
|
||||||
|
JONNARD LES ISSAMBRES
|
||||||
|
Kallista BDR
|
||||||
|
KIEHL
|
||||||
|
Kotelon
|
||||||
|
LA CONCIERGERIE D'ISA.
|
||||||
|
La dilettante
|
||||||
|
La jabotte
|
||||||
|
La jardinerie d'aveze sarl
|
||||||
|
LA PRAIRIE
|
||||||
|
Lacotte
|
||||||
|
Lacotte Pierre
|
||||||
|
LANGON DISTRIBUTION
|
||||||
|
Last Mile Solutions
|
||||||
|
LE BISTRO D'HUGO
|
||||||
|
Le Capelan
|
||||||
|
Le Grand Large
|
||||||
|
Le Manoir de l'Isle
|
||||||
|
le Relais
|
||||||
|
LE REVE DE JANINE
|
||||||
|
Leclerc Millau
|
||||||
|
LEGA
|
||||||
|
LEGELEUX
|
||||||
|
Leroux Damien
|
||||||
|
Les jardins maraichers des bords de rance
|
||||||
|
LIDL, Nicolas Barbarin
|
||||||
|
LMS Informatique
|
||||||
|
LUMI'IN
|
||||||
|
M. Yannick PIERRE
|
||||||
|
M.A.J.U.
|
||||||
|
ma borne auto
|
||||||
|
ma Borne Auto
|
||||||
|
MABORNEAUTO
|
||||||
|
MAS DES OLIVES
|
||||||
|
MAS DU TERME
|
||||||
|
MAS GALOFFRE
|
||||||
|
Masseria
|
||||||
|
MEA ENERGIES
|
||||||
|
MEDIODENT
|
||||||
|
Mickael Auto
|
||||||
|
MOBELEC
|
||||||
|
Modulo
|
||||||
|
MODULO
|
||||||
|
Modulo énergies
|
||||||
|
MONTA
|
||||||
|
MOVIVE_Izivia
|
||||||
|
NEXTENEO
|
||||||
|
NM SECURELEC
|
||||||
|
NON CONCERNE
|
||||||
|
Non concerné
|
||||||
|
non concerné
|
||||||
|
Normatech
|
||||||
|
Normatech Lodmi
|
||||||
|
NVH
|
||||||
|
NW IECharge
|
||||||
|
OCR MAINTENANCE ELECTRONIQUE
|
||||||
|
PAS DITINERANCE
|
||||||
|
Pascal Chene
|
||||||
|
Perrin
|
||||||
|
perrin
|
||||||
|
PHARMACIE DE HUNDLING
|
||||||
|
Pilotage Maritime
|
||||||
|
PLAGECO DISTRIBUTION
|
||||||
|
Polybati
|
||||||
|
Prodici
|
||||||
|
ProperPhi
|
||||||
|
Provibat
|
||||||
|
PROVIRIDIS
|
||||||
|
Proviridis | FR*PVD
|
||||||
|
PTBG et associés
|
||||||
|
QoWatt
|
||||||
|
R3
|
||||||
|
Ramsay Pole Lille métropole
|
||||||
|
RechargerMonAuto
|
||||||
|
REGIE MUNICIPALE D'ELECTRICITE DE LOOS
|
||||||
|
Rencontre-handi
|
||||||
|
REVE
|
||||||
|
RICOME ET SADOUL AXA FRANCE
|
||||||
|
RONALEV
|
||||||
|
Royal Champagne
|
||||||
|
RSDA mobility
|
||||||
|
SA FOOTBALL CLUB DES GIRONDINS DE BORDEAUX
|
||||||
|
sa les broyers
|
||||||
|
Sanou électricité
|
||||||
|
SAP LABS FRANCE
|
||||||
|
SARL BEAUDRE BAUDOT
|
||||||
|
SARL JUMO
|
||||||
|
SARL LAFOURCADE
|
||||||
|
SARL LES BAINS DE ROYAN
|
||||||
|
SARL VAHE
|
||||||
|
SAS CHATEAU DE MEMANAT
|
||||||
|
SAS DE L'AILETTE
|
||||||
|
sas e-motum
|
||||||
|
SAS Lujasy
|
||||||
|
SAS Miodis
|
||||||
|
SAS Sabo
|
||||||
|
SATUJO
|
||||||
|
SCI LA COLLINE
|
||||||
|
SCI LA GRANGE DESSOUS
|
||||||
|
SCI LES RUISSEAUX
|
||||||
|
SCI Lounapiou
|
||||||
|
SCI NOKI
|
||||||
|
SCI OLYMPE
|
||||||
|
SCP ACANTHE DRIMARACCI
|
||||||
|
SDC CENTRE MEDICAL ARTZAMENDI
|
||||||
|
SDC ORDINAL
|
||||||
|
SECAL
|
||||||
|
Securecharge
|
||||||
|
See You Sun
|
||||||
|
SELARL PHARMACIE CEVENOLE
|
||||||
|
Séolis
|
||||||
|
SGA Industries
|
||||||
|
SIEGE 27
|
||||||
|
SIPECC
|
||||||
|
SNAM GROUPE
|
||||||
|
SNER RHONE-ALPES
|
||||||
|
société La Clérine
|
||||||
|
Société LEVAROY, Monsieur LEROY
|
||||||
|
Societe moderne d'isolation
|
||||||
|
Société Sigma Tec
|
||||||
|
Société SIPECC
|
||||||
|
Société Y
|
||||||
|
Sodetrel
|
||||||
|
SOLIDARAUTO 49
|
||||||
|
SOREGIES
|
||||||
|
SPBR1
|
||||||
|
SPIE CITYNETWORKS
|
||||||
|
STATIONS-E
|
||||||
|
Sud Camargue
|
||||||
|
Sud Hotel
|
||||||
|
TANAY Electricité
|
||||||
|
Technic Elec
|
||||||
|
themis
|
||||||
|
Thibal Distribution
|
||||||
|
TISSERANT
|
||||||
|
total énergie
|
||||||
|
Total marketing france
|
||||||
|
TotalEnergies Charging Services
|
||||||
|
TotalEnergies Marketing France
|
||||||
|
ubitricity
|
||||||
|
UBITRICITY GMBH
|
||||||
|
Vegetalis
|
||||||
|
Vigot
|
||||||
|
Ville de Riquewihr
|
||||||
|
Vincent
|
||||||
|
Virta
|
||||||
|
VIRTA
|
||||||
|
Volta Charging
|
||||||
|
WAAT
|
||||||
|
WAAT SAS | FR*W10
|
||||||
|
WAAT SAS | FR*W11
|
||||||
|
WAAT SAS | FR*WA1
|
||||||
|
WAAT SAS | FR*WA3
|
||||||
|
WAAT SAS | FR*WA4
|
||||||
|
WAAT SAS | FR*WA5
|
||||||
|
WAAT SAS | FR*WA6
|
||||||
|
WAAT SAS | FR*WA8
|
||||||
|
WAAT SAS | FR*WA9
|
||||||
|
WAAT SAS | FR*WAT
|
||||||
|
WattzHub | FR*SMI
|
||||||
|
web services advenir
|
||||||
|
Wedom
|
||||||
|
WeDoM
|
||||||
|
WeDoM.io
|
||||||
|
WeeCharge
|
||||||
|
WICKER TP
|
||||||
|
ZEBORNE
|
||||||
|
ZEborne
|
||||||
|
ZEENCO
|
||||||
|
Zen Construction
|
||||||
|
|
BIN
doc/carte_numéros_départements_fr.gif
Normal file
BIN
doc/carte_numéros_départements_fr.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 90 KiB |
125
doc/irve_puissances.md
Normal file
125
doc/irve_puissances.md
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
# domaines de données IRVE
|
||||||
|
|
||||||
|
## puissance_nominale
|
||||||
|
|
||||||
|
quelle pagaille:
|
||||||
|
|
||||||
|
|
||||||
|
225
|
||||||
|
150
|
||||||
|
50
|
||||||
|
300
|
||||||
|
22
|
||||||
|
24
|
||||||
|
18
|
||||||
|
350
|
||||||
|
25
|
||||||
|
21
|
||||||
|
7
|
||||||
|
48
|
||||||
|
38
|
||||||
|
63
|
||||||
|
320
|
||||||
|
160
|
||||||
|
125
|
||||||
|
43
|
||||||
|
2
|
||||||
|
11
|
||||||
|
3
|
||||||
|
200
|
||||||
|
175
|
||||||
|
6
|
||||||
|
20
|
||||||
|
23
|
||||||
|
45
|
||||||
|
17
|
||||||
|
44
|
||||||
|
56
|
||||||
|
325
|
||||||
|
90
|
||||||
|
75
|
||||||
|
120
|
||||||
|
4
|
||||||
|
36
|
||||||
|
180
|
||||||
|
54
|
||||||
|
7.4
|
||||||
|
42
|
||||||
|
12
|
||||||
|
15
|
||||||
|
60
|
||||||
|
100
|
||||||
|
30
|
||||||
|
210
|
||||||
|
126
|
||||||
|
40
|
||||||
|
9
|
||||||
|
70
|
||||||
|
80
|
||||||
|
360
|
||||||
|
124
|
||||||
|
64
|
||||||
|
115
|
||||||
|
62
|
||||||
|
22.00
|
||||||
|
22.0
|
||||||
|
0.0
|
||||||
|
50.00
|
||||||
|
12
|
||||||
|
47
|
||||||
|
24.00
|
||||||
|
7.00
|
||||||
|
7.0
|
||||||
|
5.9
|
||||||
|
71708
|
||||||
|
131524
|
||||||
|
49521
|
||||||
|
0
|
||||||
|
19
|
||||||
|
300000
|
||||||
|
22080
|
||||||
|
50000
|
||||||
|
63000
|
||||||
|
90000
|
||||||
|
60000
|
||||||
|
3.4
|
||||||
|
5
|
||||||
|
122
|
||||||
|
72
|
||||||
|
18.00
|
||||||
|
43.00
|
||||||
|
3.22
|
||||||
|
3.00
|
||||||
|
11.00
|
||||||
|
230
|
||||||
|
400
|
||||||
|
110
|
||||||
|
22
|
||||||
|
2.3
|
||||||
|
27
|
||||||
|
26
|
||||||
|
3.7
|
||||||
|
62500
|
||||||
|
22.08
|
||||||
|
7.36
|
||||||
|
11.04
|
||||||
|
1.7
|
||||||
|
14
|
||||||
|
22000
|
||||||
|
7000
|
||||||
|
3000
|
||||||
|
4.6
|
||||||
|
32
|
||||||
|
2859660
|
||||||
|
60973
|
||||||
|
5.5
|
||||||
|
12
|
||||||
|
175.00
|
||||||
|
22
|
||||||
|
22
|
||||||
|
100.00
|
||||||
|
6.9
|
||||||
|
240
|
||||||
|
149
|
||||||
|
16
|
||||||
|
|
44877
etalab_data/all.csv
Normal file
44877
etalab_data/all.csv
Normal file
File diff suppressed because it is too large
Load Diff
2782317
etalab_data/all.json
Normal file
2782317
etalab_data/all.json
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
2089090
etalab_data/etalab.json
Normal file
2089090
etalab_data/etalab.json
Normal file
File diff suppressed because it is too large
Load Diff
67
etalab_data/latest.json
Normal file
67
etalab_data/latest.json
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
{
|
||||||
|
"type": "FeatureCollection",
|
||||||
|
"features": [
|
||||||
|
{
|
||||||
|
"type": "Feature",
|
||||||
|
"geometry": {
|
||||||
|
"type": "Point",
|
||||||
|
"coordinates": [
|
||||||
|
4.822159,
|
||||||
|
45.635079
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"properties": {
|
||||||
|
"nom_amenageur": "ELECTRA",
|
||||||
|
"siren_amenageur": "891624884",
|
||||||
|
"contact_amenageur": "help@electra.com",
|
||||||
|
"nom_operateur": "ELECTRA",
|
||||||
|
"contact_operateur": "help@electra.com",
|
||||||
|
"telephone_operateur": "",
|
||||||
|
"nom_enseigne": "ELECTRA",
|
||||||
|
"id_station_itinerance": "FRELCPSDRPG",
|
||||||
|
"id_station_local": "",
|
||||||
|
"nom_station": "S\u00e9r\u00e9zin-du-Rh\u00f4ne - Peugeot",
|
||||||
|
"implantation_station": "Station d\u00e9di\u00e9e \u00e0 la recharge rapide",
|
||||||
|
"adresse_station": "12 chemin d\u00e9partemental 12, Peugeot 69360 S\u00e9r\u00e9zin-du-Rh\u00f4ne",
|
||||||
|
"code_insee_commune": "69294",
|
||||||
|
"coordonneesXY": "[4.82215900,45.63507900]",
|
||||||
|
"nbre_pdc": "8",
|
||||||
|
"id_pdc_itinerance": "FRELCECURS",
|
||||||
|
"id_pdc_local": "",
|
||||||
|
"puissance_nominale": "225",
|
||||||
|
"prise_type_ef": "false",
|
||||||
|
"prise_type_2": "false",
|
||||||
|
"prise_type_combo_ccs": "true",
|
||||||
|
"prise_type_chademo": "false",
|
||||||
|
"prise_type_autre": "false",
|
||||||
|
"gratuit": "false",
|
||||||
|
"paiement_acte": "true",
|
||||||
|
"paiement_cb": "true",
|
||||||
|
"paiement_autre": "true",
|
||||||
|
"tarification": "",
|
||||||
|
"condition_acces": "Acc\u00e8s libre",
|
||||||
|
"reservation": "true",
|
||||||
|
"horaires": "24/7",
|
||||||
|
"accessibilite_pmr": "Accessibilit\u00e9 inconnue",
|
||||||
|
"restriction_gabarit": "Inconnu",
|
||||||
|
"station_deux_roues": "false",
|
||||||
|
"raccordement": "Direct",
|
||||||
|
"num_pdl": "N/A",
|
||||||
|
"date_mise_en_service": "2022-10-27",
|
||||||
|
"observations": "T\u00e9l\u00e9charger l'application ELECTRA pour r\u00e9server et payer sur go-electra.com",
|
||||||
|
"date_maj": "2023-07-29",
|
||||||
|
"cable_t2_attache": "",
|
||||||
|
"last_modified": "2023-07-29T03:05:24.360000+00:00",
|
||||||
|
"datagouv_dataset_id": "623ca46c13130c3228abd018",
|
||||||
|
"datagouv_resource_id": "e9bb3424-77cd-40ba-8bbd-5a19362d0365",
|
||||||
|
"datagouv_organization_or_owner": "electra",
|
||||||
|
"consolidated_longitude": 4.822159,
|
||||||
|
"consolidated_latitude": 45.635079,
|
||||||
|
"consolidated_code_postal": "69360",
|
||||||
|
"consolidated_commune": "S\u00e9r\u00e9zin-du-Rh\u00f4ne",
|
||||||
|
"consolidated_is_lon_lat_correct": true,
|
||||||
|
"consolidated_is_code_insee_verified": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
4
etalab_data/small.csv
Normal file
4
etalab_data/small.csv
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
nom_amenageur,siren_amenageur,contact_amenageur,nom_operateur,contact_operateur,telephone_operateur,nom_enseigne,id_station_itinerance,id_station_local,nom_station,implantation_station,adresse_station,code_insee_commune,coordonneesXY,nbre_pdc,id_pdc_itinerance,id_pdc_local,puissance_nominale,prise_type_ef,prise_type_2,prise_type_combo_ccs,prise_type_chademo,prise_type_autre,gratuit,paiement_acte,paiement_cb,paiement_autre,tarification,condition_acces,reservation,horaires,accessibilite_pmr,restriction_gabarit,station_deux_roues,raccordement,num_pdl,date_mise_en_service,observations,date_maj,cable_t2_attache,last_modified,datagouv_dataset_id,datagouv_resource_id,datagouv_organization_or_owner,consolidated_longitude,consolidated_latitude,consolidated_code_postal,consolidated_commune,consolidated_is_lon_lat_correct,consolidated_is_code_insee_verified
|
||||||
|
ELECTRA,891624884,help@electra.com,ELECTRA,help@electra.com,,ELECTRA,FRELCPBROHI,,Bron - Hôtel Ibis Lyon Est,Station dédiée à la recharge rapide,36 avenue du Doyen Jean Lépine 69500 Bron,69029,"[4.90415400,45.74800500]",4,FRELCE2JW9,,225,false,false,true,false,false,false,true,true,true,,Accès libre,true,24/7,Accessibilité inconnue,Inconnu,false,Direct,N/A,2023-05-04,Télécharger l'application ELECTRA pour réserver et payer sur go-electra.com,2023-08-06,,2023-08-06T03:05:25.841000+00:00,623ca46c13130c3228abd018,e9bb3424-77cd-40ba-8bbd-5a19362d0365,electra,4.904154,45.748005,69500,Bron,True,True
|
||||||
|
ELECTRA,891624884,help@electra.com,ELECTRA,help@electra.com,,ELECTRA,FRELCPBLOHM,,Blotzheim - Hôtel Mercure Bâle Mulhouse Aéroport,Station dédiée à la recharge rapide,3 rue de l'industrie 68730 Blotzheim,68042,"[7.50290400,47.60821400]",8,FRELCEXY9P,,225,false,false,true,false,false,false,true,true,true,,Accès libre,true,24/7,Accessibilité inconnue,Inconnu,false,Direct,N/A,2022-09-22,Télécharger l'application ELECTRA pour réserver et payer sur go-electra.com,2023-08-06,,2023-08-06T03:05:25.841000+00:00,623ca46c13130c3228abd018,e9bb3424-77cd-40ba-8bbd-5a19362d0365,electra,7.502904,47.608214,68730,Blotzheim,True,True
|
||||||
|
ELECTRA,891624884,help@electra.com,ELECTRA,help@electra.com,,ELECTRA,FRELCPSMCLE,,Saint-Magne-de-Castillon - E.Leclerc,Station dédiée à la recharge rapide,2 chemin de Perrin 33350 Saint-Magne-de-Castillon,33437,"[-0.06340700,44.85401900]",4,FRELCEEEAW,,150,false,false,true,false,false,false,true,true,true,,Accès libre,true,24/7,Accessibilité inconnue,Inconnu,false,Direct,N/A,2023-04-28,Télécharger l'application ELECTRA pour réserver et payer sur go-electra.com,2023-08-06,,2023-08-06T03:05:25.841000+00:00,623ca46c13130c3228abd018,e9bb3424-77cd-40ba-8bbd-5a19362d0365,electra,-0.063407,44.854019,33350,Saint-Magne-de-Castillon,True,True
|
|
17
etalab_data/small.json
Normal file
17
etalab_data/small.json
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"type": "FeatureCollection",
|
||||||
|
"features": [
|
||||||
|
{
|
||||||
|
"type": "Feature",
|
||||||
|
"geometry": {
|
||||||
|
"type": "Point",
|
||||||
|
"coordinates": [
|
||||||
|
2.2335179, 48.6973801
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"properties": {
|
||||||
|
"accessibilite_pmr":"Mo-Fr 08:30-12:00,Mo-Fr 14:00-19:00,Sat 09:00-18:30"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
8
get_datasets.sh
Normal file
8
get_datasets.sh
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# get the updated geojson
|
||||||
|
wget https://www.data.gouv.fr/fr/datasets/r/7eee8f09-5d1b-4f48-a304-5e99e8da1e26 -P ./etalab_data -O ./etalab_data/all.json
|
||||||
|
wget https://www.data.gouv.fr/fr/datasets/r/8d9398ae-3037-48b2-be19-412c24561fbb -P ./etalab_data -O ./etalab_data/all.csv
|
||||||
|
# https://www.data.gouv.fr/fr/datasets/r/8d9398ae-3037-48b2-be19-412c24561fbb pour le jeu de données irve schema v2, non utilisé dans ce script
|
||||||
|
# filter its tags
|
||||||
|
echo "refresh de la data"
|
199
jest.config.ts
Normal file
199
jest.config.ts
Normal file
@ -0,0 +1,199 @@
|
|||||||
|
/**
|
||||||
|
* For a detailed explanation regarding each configuration property, visit:
|
||||||
|
* https://jestjs.io/docs/configuration
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type {Config} from 'jest';
|
||||||
|
|
||||||
|
const config: Config = {
|
||||||
|
// All imported modules in your tests should be mocked automatically
|
||||||
|
// automock: false,
|
||||||
|
|
||||||
|
// Stop running tests after `n` failures
|
||||||
|
// bail: 0,
|
||||||
|
|
||||||
|
// The directory where Jest should store its cached dependency information
|
||||||
|
// cacheDirectory: "/tmp/jest_rs",
|
||||||
|
|
||||||
|
// Automatically clear mock calls, instances, contexts and results before every test
|
||||||
|
clearMocks: true,
|
||||||
|
|
||||||
|
// Indicates whether the coverage information should be collected while executing the test
|
||||||
|
collectCoverage: true,
|
||||||
|
|
||||||
|
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||||
|
// collectCoverageFrom: undefined,
|
||||||
|
|
||||||
|
// The directory where Jest should output its coverage files
|
||||||
|
coverageDirectory: "coverage",
|
||||||
|
|
||||||
|
// An array of regexp pattern strings used to skip coverage collection
|
||||||
|
// coveragePathIgnorePatterns: [
|
||||||
|
// "/node_modules/"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// Indicates which provider should be used to instrument code for coverage
|
||||||
|
coverageProvider: "v8",
|
||||||
|
|
||||||
|
// A list of reporter names that Jest uses when writing coverage reports
|
||||||
|
// coverageReporters: [
|
||||||
|
// "json",
|
||||||
|
// "text",
|
||||||
|
// "lcov",
|
||||||
|
// "clover"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An object that configures minimum threshold enforcement for coverage results
|
||||||
|
// coverageThreshold: undefined,
|
||||||
|
|
||||||
|
// A path to a custom dependency extractor
|
||||||
|
// dependencyExtractor: undefined,
|
||||||
|
|
||||||
|
// Make calling deprecated APIs throw helpful error messages
|
||||||
|
// errorOnDeprecated: false,
|
||||||
|
|
||||||
|
// The default configuration for fake timers
|
||||||
|
// fakeTimers: {
|
||||||
|
// "enableGlobally": false
|
||||||
|
// },
|
||||||
|
|
||||||
|
// Force coverage collection from ignored files using an array of glob patterns
|
||||||
|
// forceCoverageMatch: [],
|
||||||
|
|
||||||
|
// A path to a module which exports an async function that is triggered once before all test suites
|
||||||
|
// globalSetup: undefined,
|
||||||
|
|
||||||
|
// A path to a module which exports an async function that is triggered once after all test suites
|
||||||
|
// globalTeardown: undefined,
|
||||||
|
|
||||||
|
// A set of global variables that need to be available in all test environments
|
||||||
|
// globals: {},
|
||||||
|
|
||||||
|
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||||
|
// maxWorkers: "50%",
|
||||||
|
|
||||||
|
// An array of directory names to be searched recursively up from the requiring module's location
|
||||||
|
// moduleDirectories: [
|
||||||
|
// "node_modules"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An array of file extensions your modules use
|
||||||
|
// moduleFileExtensions: [
|
||||||
|
// "js",
|
||||||
|
// "mjs",
|
||||||
|
// "cjs",
|
||||||
|
// "jsx",
|
||||||
|
// "ts",
|
||||||
|
// "tsx",
|
||||||
|
// "json",
|
||||||
|
// "node"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||||
|
// moduleNameMapper: {},
|
||||||
|
|
||||||
|
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||||
|
// modulePathIgnorePatterns: [],
|
||||||
|
|
||||||
|
// Activates notifications for test results
|
||||||
|
// notify: false,
|
||||||
|
|
||||||
|
// An enum that specifies notification mode. Requires { notify: true }
|
||||||
|
// notifyMode: "failure-change",
|
||||||
|
|
||||||
|
// A preset that is used as a base for Jest's configuration
|
||||||
|
// preset: undefined,
|
||||||
|
|
||||||
|
// Run tests from one or more projects
|
||||||
|
// projects: undefined,
|
||||||
|
|
||||||
|
// Use this configuration option to add custom reporters to Jest
|
||||||
|
// reporters: undefined,
|
||||||
|
|
||||||
|
// Automatically reset mock state before every test
|
||||||
|
// resetMocks: false,
|
||||||
|
|
||||||
|
// Reset the module registry before running each individual test
|
||||||
|
// resetModules: false,
|
||||||
|
|
||||||
|
// A path to a custom resolver
|
||||||
|
// resolver: undefined,
|
||||||
|
|
||||||
|
// Automatically restore mock state and implementation before every test
|
||||||
|
// restoreMocks: false,
|
||||||
|
|
||||||
|
// The root directory that Jest should scan for tests and modules within
|
||||||
|
// rootDir: undefined,
|
||||||
|
|
||||||
|
// A list of paths to directories that Jest should use to search for files in
|
||||||
|
// roots: [
|
||||||
|
// "<rootDir>"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// Allows you to use a custom runner instead of Jest's default test runner
|
||||||
|
// runner: "jest-runner",
|
||||||
|
|
||||||
|
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||||
|
// setupFiles: [],
|
||||||
|
|
||||||
|
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||||
|
// setupFilesAfterEnv: [],
|
||||||
|
|
||||||
|
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||||
|
// slowTestThreshold: 5,
|
||||||
|
|
||||||
|
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||||
|
// snapshotSerializers: [],
|
||||||
|
|
||||||
|
// The test environment that will be used for testing
|
||||||
|
// testEnvironment: "jest-environment-node",
|
||||||
|
|
||||||
|
// Options that will be passed to the testEnvironment
|
||||||
|
// testEnvironmentOptions: {},
|
||||||
|
|
||||||
|
// Adds a location field to test results
|
||||||
|
// testLocationInResults: false,
|
||||||
|
|
||||||
|
// The glob patterns Jest uses to detect test files
|
||||||
|
// testMatch: [
|
||||||
|
// "**/__tests__/**/*.[jt]s?(x)",
|
||||||
|
// "**/?(*.)+(spec|test).[tj]s?(x)"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||||
|
// testPathIgnorePatterns: [
|
||||||
|
// "/node_modules/"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||||
|
// testRegex: [],
|
||||||
|
|
||||||
|
// This option allows the use of a custom results processor
|
||||||
|
// testResultsProcessor: undefined,
|
||||||
|
|
||||||
|
// This option allows use of a custom test runner
|
||||||
|
// testRunner: "jest-circus/runner",
|
||||||
|
|
||||||
|
// A map from regular expressions to paths to transformers
|
||||||
|
// transform: undefined,
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||||
|
// transformIgnorePatterns: [
|
||||||
|
// "/node_modules/",
|
||||||
|
// "\\.pnp\\.[^\\/]+$"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||||
|
// unmockedModulePathPatterns: undefined,
|
||||||
|
|
||||||
|
// Indicates whether each individual test should be reported during the run
|
||||||
|
// verbose: undefined,
|
||||||
|
|
||||||
|
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||||
|
// watchPathIgnorePatterns: [],
|
||||||
|
|
||||||
|
// Whether to use watchman for file crawling
|
||||||
|
// watchman: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default config;
|
2780767
latest.json
Normal file
2780767
latest.json
Normal file
File diff suppressed because it is too large
Load Diff
126
make_variance_from_csv.ts
Normal file
126
make_variance_from_csv.ts
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
/**
|
||||||
|
prendre un CSV,
|
||||||
|
examiner toutes les colonnes et leurs valeurs,
|
||||||
|
garder en mémoire les valeurs uniques de chaque colonne
|
||||||
|
faire un nouveau csv qui ne montre que les valeurs uniques pour chacune des colonnes
|
||||||
|
et qui compte le nombre de valeurs
|
||||||
|
**/
|
||||||
|
import utils from './mappings/utils'
|
||||||
|
import {parse} from 'csv'
|
||||||
|
|
||||||
|
const fs = require('fs')
|
||||||
|
|
||||||
|
interface VarianceType {
|
||||||
|
[key: string]: Array<string>
|
||||||
|
}
|
||||||
|
|
||||||
|
let csv_content = 'variance de dataset\n';
|
||||||
|
let separator = ';';
|
||||||
|
// let columns_headings = [];
|
||||||
|
let data_variance: VarianceType = {};
|
||||||
|
|
||||||
|
const inputPath = './etalab_data/all.csv'
|
||||||
|
// const inputPath = './etalab_data/small.csv'
|
||||||
|
let columns_headings: Array<string> = [];
|
||||||
|
let lines_count = 0;
|
||||||
|
let longest_variance_count = 0;
|
||||||
|
|
||||||
|
console.log('open file ', inputPath)
|
||||||
|
fs.readFile(inputPath, function (err: any, fileData: any) {
|
||||||
|
|
||||||
|
if (err) {
|
||||||
|
throw new Error(err)
|
||||||
|
} else {
|
||||||
|
parse(fileData, {columns: false, trim: true}, function (err: any, rows: any) {
|
||||||
|
// Your CSV data is in an array of arrays passed to this callback as rows.
|
||||||
|
|
||||||
|
console.log('line ', lines_count)
|
||||||
|
|
||||||
|
rows.forEach((row: Array<any>) => {
|
||||||
|
|
||||||
|
|
||||||
|
if (lines_count === 0) {
|
||||||
|
// console.log('elem', row)
|
||||||
|
row.forEach((value: string) => {
|
||||||
|
// console.log('value', value)
|
||||||
|
columns_headings.push(value)
|
||||||
|
data_variance[value] = []
|
||||||
|
})
|
||||||
|
console.log('columns_headings.length', columns_headings.length)
|
||||||
|
lines_count++
|
||||||
|
} else {
|
||||||
|
// lignes suivantes
|
||||||
|
|
||||||
|
let column_index = 0;
|
||||||
|
|
||||||
|
|
||||||
|
row.forEach((value: string) => {
|
||||||
|
value = value.trim()
|
||||||
|
// dans chaque colonne, vérifier que la valeur n'est pas déjà présente dans les index de variance
|
||||||
|
// si la valeur est nouvelle, l'ajouter
|
||||||
|
if (data_variance[columns_headings[column_index]].indexOf(value) < 0) {
|
||||||
|
data_variance[columns_headings[column_index]].push(value)
|
||||||
|
if (
|
||||||
|
data_variance[columns_headings[column_index]].length > longest_variance_count
|
||||||
|
) {
|
||||||
|
longest_variance_count = data_variance[columns_headings[column_index]].length
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
column_index++
|
||||||
|
})
|
||||||
|
lines_count++
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log('longest_variance_count', longest_variance_count)
|
||||||
|
|
||||||
|
utils.writeFile('variance.csv', writeCSVVariance())
|
||||||
|
// console.log('data_variance', data_variance)
|
||||||
|
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('parsing done')
|
||||||
|
// console.log('data_variance', data_variance)
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* écrit un csv avec les données de variance du dataset donné
|
||||||
|
*/
|
||||||
|
function writeCSVVariance() {
|
||||||
|
|
||||||
|
let csv_content = ';variance de ' + inputPath + ';' + new Date() + '\n'
|
||||||
|
let columns = Object.keys(data_variance);
|
||||||
|
|
||||||
|
// add headings
|
||||||
|
columns_headings.forEach((heading: string) => {
|
||||||
|
csv_content = csv_content + separator + heading
|
||||||
|
})
|
||||||
|
csv_content = csv_content + '\n'
|
||||||
|
// add max length of variance for each column
|
||||||
|
let ii = 0
|
||||||
|
columns.forEach((column: string) => {
|
||||||
|
// console.log('column', column, data_variance[column].length)
|
||||||
|
csv_content = csv_content + separator + data_variance[column].length
|
||||||
|
ii++
|
||||||
|
})
|
||||||
|
|
||||||
|
csv_content = csv_content + '\n\n'
|
||||||
|
// add content of values
|
||||||
|
for (let ii = 0; ii < longest_variance_count; ii++) {
|
||||||
|
csv_content = csv_content + '\n'
|
||||||
|
columns.forEach((column: any) => {
|
||||||
|
if (ii < data_variance[column].length) {
|
||||||
|
|
||||||
|
let currentValue = data_variance[column][ii]
|
||||||
|
csv_content = csv_content + separator + currentValue
|
||||||
|
} else {
|
||||||
|
csv_content = csv_content + separator
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// console.log('csv_content', csv_content)
|
||||||
|
return csv_content;
|
||||||
|
}
|
174
mappings/converters/configIRVE.ts
Normal file
174
mappings/converters/configIRVE.ts
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
/**
|
||||||
|
* plan de conversion des clés du jeu de données vers les tags OSM
|
||||||
|
* détail dans le tableau
|
||||||
|
* https://wiki.openstreetmap.org/wiki/France/data.gouv.fr/Bornes_de_Recharge_pour_V%C3%A9hicules_%C3%89lectriques
|
||||||
|
*/
|
||||||
|
import MappingConfigType from "../mapping-config.type";
|
||||||
|
|
||||||
|
const MappingIRVE: MappingConfigType = {
|
||||||
|
config_name: "IRVE config",
|
||||||
|
config_author: "tykayn <contact@cipherbliss.com>",
|
||||||
|
default_properties_of_point: {
|
||||||
|
'amenity': 'charging_station'
|
||||||
|
},
|
||||||
|
source: {
|
||||||
|
geojson_path: "etalab_data/all.json",
|
||||||
|
url: 'https://www.data.gouv.fr/fr/datasets/r/7eee8f09-5d1b-4f48-a304-5e99e8da1e26'
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* select only certain points from the source
|
||||||
|
*/
|
||||||
|
filters: {
|
||||||
|
enable_coordinates_filter: false,
|
||||||
|
enable_properties_filter: true,
|
||||||
|
// add only geojson points who are found having this regex in the zipcode properties
|
||||||
|
properties: {
|
||||||
|
consolidated_code_postal: '^[76|27]'
|
||||||
|
},
|
||||||
|
bounding_box: [
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
add_not_mapped_tags_too: false,
|
||||||
|
tags: {
|
||||||
|
// ******* nombres
|
||||||
|
nbre_pdc: 'capacity',
|
||||||
|
// ******* textes
|
||||||
|
amenity: 'amenity', // conserver le tag de base
|
||||||
|
capacity: 'capacity', // conserver le tag de base
|
||||||
|
nom_amenageur: 'operator',
|
||||||
|
siren_amenageur: 'owner:ref:FR:SIREN',
|
||||||
|
nom_operateur: 'operator',
|
||||||
|
telephone_operateur: 'phone',
|
||||||
|
contact_operateur: 'email', // ici, on souhaite convertir la clé contact_operateur=bidule en email=bidule
|
||||||
|
|
||||||
|
// id_station_itinerance: 'ref:EU:EVSE',
|
||||||
|
id_station_local: 'ref',
|
||||||
|
|
||||||
|
gratuit: {
|
||||||
|
key_converted: 'fee',
|
||||||
|
convert_to_boolean_value: true,
|
||||||
|
},
|
||||||
|
paiement_acte:
|
||||||
|
{
|
||||||
|
key_converted: 'authentication:none',
|
||||||
|
convert_to_boolean_value: true, // convertit en yes ou no
|
||||||
|
},
|
||||||
|
reservation: {
|
||||||
|
convert_to_boolean_value: true, // convertit en yes ou no
|
||||||
|
},
|
||||||
|
// observations: 'note',
|
||||||
|
// nom_station: 'name',
|
||||||
|
nom_enseigne: 'network',
|
||||||
|
|
||||||
|
// ******* dates
|
||||||
|
date_mise_en_service: 'start_date',
|
||||||
|
// date_maj: 'source:date',
|
||||||
|
|
||||||
|
|
||||||
|
// ******** champs booléens
|
||||||
|
cable_t2_attache:
|
||||||
|
{
|
||||||
|
key_converted: 'socket:type2_cable',
|
||||||
|
// socket:type2_cable vaut 1 dans OSM si vrai
|
||||||
|
truthy_value: '1',
|
||||||
|
ignore_if_falsy: true,
|
||||||
|
}
|
||||||
|
,
|
||||||
|
prise_type_ef: {
|
||||||
|
key_converted: 'socket:typee',
|
||||||
|
ignore_if_falsy: true,
|
||||||
|
convert_to_boolean_value: true,
|
||||||
|
},
|
||||||
|
prise_type_2: {
|
||||||
|
key_converted: 'socket:type2',
|
||||||
|
ignore_if_falsy: true,
|
||||||
|
convert_to_boolean_value: true,
|
||||||
|
},
|
||||||
|
prise_type_combo_ccs: {
|
||||||
|
key_converted: 'socket:type2_combo',
|
||||||
|
ignore_if_falsy: true,
|
||||||
|
convert_to_boolean_value: true,
|
||||||
|
},
|
||||||
|
prise_type_chademo: {
|
||||||
|
key_converted: 'socket:chademo',
|
||||||
|
ignore_if_falsy: true,
|
||||||
|
convert_to_boolean_value: true,
|
||||||
|
},
|
||||||
|
// ******** champs plus complexes
|
||||||
|
horaires: 'opening_hours', // déjà au bon format
|
||||||
|
|
||||||
|
// accessibilite_pmr: 'wheelchair',
|
||||||
|
paiement_cb: {
|
||||||
|
key_converted: 'payment:credit_cards',
|
||||||
|
// ignore_if_falsy: true,
|
||||||
|
convert_to_boolean_value: true,
|
||||||
|
},
|
||||||
|
|
||||||
|
// accessibilite_pmr: {
|
||||||
|
// key_converted: "wheelchair",
|
||||||
|
// conditional_values: {
|
||||||
|
// "Accessibilité inconnue": {
|
||||||
|
// // value_converted: "",
|
||||||
|
// ignore_this_data: true, // ne pas ajouter de tag si la valeur est égale à Accessibilité inconnue.
|
||||||
|
// // transform_function : (original_value) => original_value.toLowerCase(),
|
||||||
|
// },
|
||||||
|
// "Accessible mais non réservé PMR": {
|
||||||
|
// value_converted: "yes"
|
||||||
|
// },
|
||||||
|
// "Réservé PMR": {
|
||||||
|
// value_converted: "yes"
|
||||||
|
// },
|
||||||
|
// "Non accessible": {
|
||||||
|
// value_converted: "no"
|
||||||
|
// },
|
||||||
|
// "Mo-Fr 08:30-12:00,Mo-Fr 14:00-19:00,Sat 09:00-18:30": {
|
||||||
|
// value_converted: "Mo-Fr 08:30-12:00,Mo-Fr 14:00-19:00,Sat 09:00-18:30"
|
||||||
|
// },
|
||||||
|
// "24/7": {
|
||||||
|
// value_converted: ""
|
||||||
|
// }
|
||||||
|
station_deux_roues: {
|
||||||
|
remove_original_key: true,
|
||||||
|
conditional_values: {
|
||||||
|
// ajout de trois tags si la valeur est yes
|
||||||
|
"yes": {
|
||||||
|
tags_to_add: [
|
||||||
|
{bicycle: "yes"},
|
||||||
|
{scooter: "yes"},
|
||||||
|
{motorcar: "no"},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO gestion des puissances de bornes
|
||||||
|
// avec une fonction de transformation des valeurs
|
||||||
|
// parmi le domaine du jeu de données
|
||||||
|
// nécessite une clé conditionnelle à la valeur true d'autres clés converties.
|
||||||
|
// c'est compliquay.
|
||||||
|
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// "nom_amenageur": "ELECTRA",
|
||||||
|
// "siren_amenageur": "891624884",
|
||||||
|
// "consolidated_commune": "S\u00e9r\u00e9zin-du-Rh\u00f4ne",
|
||||||
|
// "consolidated_is_lon_lat_correct": true,
|
||||||
|
// "cable_t2_attache": "True"
|
||||||
|
// "goal": "jeu de données pour tester la mapping engine",
|
||||||
|
// "prise_type_2": "yes",
|
||||||
|
// "station_deux_roues": "yes",
|
||||||
|
// "accessibilite_pmr": "Non accessible",
|
||||||
|
// "amenity": "charging_station",
|
||||||
|
// "capacity": 12,
|
||||||
|
// "reservation": "False",
|
||||||
|
// "nom_amenageur": "Bob Lenon",
|
||||||
|
// "siren_amenageur": "12345678",
|
||||||
|
// "socket:typee": "False",
|
||||||
|
// "prise_type_combo_ccs": "no",
|
||||||
|
// "fee": "no",
|
||||||
|
// "authentication:none": "yes"
|
||||||
|
|
||||||
|
|
||||||
|
export default MappingIRVE;
|
45
mappings/converters/mappingConfigIRVE_simple.ts
Normal file
45
mappings/converters/mappingConfigIRVE_simple.ts
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
/**
|
||||||
|
* plan de conversion des clés du jeu de données vers les tags OSM
|
||||||
|
* détail dans le tableau
|
||||||
|
* https://wiki.openstreetmap.org/wiki/France/data.gouv.fr/Bornes_de_Recharge_pour_V%C3%A9hicules_%C3%89lectriques
|
||||||
|
*/
|
||||||
|
|
||||||
|
const mappingIRVE:any = {
|
||||||
|
|
||||||
|
// ******* nombres
|
||||||
|
nbre_pdc: 'capacity',
|
||||||
|
amenity: 'amenity', // conserver le tag de base
|
||||||
|
capacity: 'capacity', // conserver le tag de base
|
||||||
|
nom_amenageur: 'operator',
|
||||||
|
siren_amenageur: 'owner:ref:FR:SIREN',
|
||||||
|
nom_operateur: 'operator',
|
||||||
|
telephone_operateur: 'phone',
|
||||||
|
// ici, on souhaite convertir la clé contact_operateur=bidule en email=bidule
|
||||||
|
contact_operateur: 'email',
|
||||||
|
|
||||||
|
id_station_itinerance: 'ref:EU:EVSE',
|
||||||
|
id_station_local: 'ref',
|
||||||
|
|
||||||
|
gratuit: 'fee',
|
||||||
|
paiement_acte: 'authentication:none',
|
||||||
|
|
||||||
|
reservation: 'reservation',
|
||||||
|
observations: 'note',
|
||||||
|
nom_station: 'name',
|
||||||
|
nom_enseigne: 'network',
|
||||||
|
|
||||||
|
// ******* dates
|
||||||
|
date_mise_en_service: 'start_date',
|
||||||
|
date_maj: 'source:date',
|
||||||
|
// ******** champs booléens
|
||||||
|
prise_type_ef: 'socket:typee',
|
||||||
|
prise_type_2: 'socket:type2',
|
||||||
|
prise_type_combo_ccs: 'socket:type2_combo',
|
||||||
|
prise_type_chademo: 'socket:chademo',
|
||||||
|
|
||||||
|
// ******** champs plus complexes
|
||||||
|
horaires: 'opening_hours', // déjà au bon format
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
export default mappingIRVE;
|
326
mappings/engine.ts
Normal file
326
mappings/engine.ts
Normal file
@ -0,0 +1,326 @@
|
|||||||
|
import custom_utils from './utils'
|
||||||
|
import MappingConfigType from "./mapping-config.type";
|
||||||
|
|
||||||
|
const {debugLog} = custom_utils
|
||||||
|
|
||||||
|
let listOfBooleanKeys = [
|
||||||
|
"prise_type_ef",
|
||||||
|
"prise_type_2",
|
||||||
|
"prise_type_combo_ccs",
|
||||||
|
"prise_type_chademo",
|
||||||
|
"gratuit",
|
||||||
|
"paiement_acte",
|
||||||
|
"paiement_cb",
|
||||||
|
"cable_t2_attache"
|
||||||
|
]
|
||||||
|
|
||||||
|
export default class {
|
||||||
|
mapping_config: any = {}
|
||||||
|
|
||||||
|
constructor(mappingConfig: MappingConfigType) {
|
||||||
|
this.setConfig(mappingConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
setConfig(mappingConfig: MappingConfigType) {
|
||||||
|
this.mapping_config = mappingConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
mapFeaturePoint(featurePointGeoJson: any) {
|
||||||
|
|
||||||
|
let geoJSONConvertedPoint: any = {}
|
||||||
|
geoJSONConvertedPoint.properties = {...this.mapping_config.default_properties_of_point}
|
||||||
|
geoJSONConvertedPoint.type = featurePointGeoJson.type
|
||||||
|
geoJSONConvertedPoint.geometry = featurePointGeoJson.geometry
|
||||||
|
|
||||||
|
// let props = featurePointGeoJson.properties
|
||||||
|
|
||||||
|
// props.forEach((key, value) => {
|
||||||
|
//
|
||||||
|
// })
|
||||||
|
|
||||||
|
return geoJSONConvertedPoint
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TODO convert to mapping config property to transform_truthy
|
||||||
|
* @param pointKeyName
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
isBooleanKey(pointKeyName: string): boolean {
|
||||||
|
|
||||||
|
return listOfBooleanKeys.indexOf(pointKeyName) !== -1
|
||||||
|
}
|
||||||
|
|
||||||
|
truthyValues = ['true', 'True', 'TRUE', '1', 'yes', 1]
|
||||||
|
falsyValues = ['false', 'False', 'FALSE', '0', 'no', 0]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* reduce number of features
|
||||||
|
* @param offsetCount
|
||||||
|
* @param listOfFeatures
|
||||||
|
*/
|
||||||
|
filterFeaturesByOffset(offsetCount: number, listOfFeatures: any): Array<any> {
|
||||||
|
let filteredList = listOfFeatures
|
||||||
|
// TODO
|
||||||
|
return filteredList
|
||||||
|
}
|
||||||
|
|
||||||
|
// filterFeaturesByPropertyRegex(bboxConfig:any, listOfFeatures:any) {
|
||||||
|
// debugLog('bboxConfig', bboxConfig)
|
||||||
|
// let filteredList = listOfFeatures
|
||||||
|
// // TODO
|
||||||
|
// return filteredList
|
||||||
|
// }
|
||||||
|
|
||||||
|
filterFeaturesByPropertyRegex(propertyName: string, criteriaRegex: any, listOfFeatures: any) {
|
||||||
|
let filteredList = listOfFeatures.filter((feature: any) => {
|
||||||
|
return criteriaRegex.test(feature?.properties[propertyName])
|
||||||
|
})
|
||||||
|
return filteredList
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* retuns the converted element from mapping config if present, null otherwise
|
||||||
|
*/
|
||||||
|
mapElementFromConf(featurePoint: any): any {
|
||||||
|
// debugLog('mapElementFromConf: mapElementFromConf', featurePoint)
|
||||||
|
if (!this.mapping_config) {
|
||||||
|
throw new Error('no config was loaded in the mapping engine. use setConfig(my_mapping_config) on this instance of mapping engine before using this.')
|
||||||
|
}
|
||||||
|
|
||||||
|
debugLog('mapElementFromConf: config_name', this.mapping_config.config_name)
|
||||||
|
let mappingKeys = Object.keys(this.mapping_config.tags)
|
||||||
|
let featurePointPropertiesKeys = Object.keys(featurePoint.properties)
|
||||||
|
|
||||||
|
debugLog('mapElementFromConf: ============= keys mappingKeys:', this.mapping_config.tags.length, mappingKeys.length)
|
||||||
|
debugLog('mapElementFromConf: ============= keys featurePointPropertiesKeys :', featurePoint.properties.length, featurePointPropertiesKeys.length)
|
||||||
|
|
||||||
|
let newProperties = {...this.mapping_config.default_properties_of_point}
|
||||||
|
|
||||||
|
|
||||||
|
// reinit properties of current point
|
||||||
|
let basePoint = Object.create(featurePoint)
|
||||||
|
basePoint.type = featurePoint.type
|
||||||
|
basePoint.geometry = featurePoint.geometry
|
||||||
|
basePoint.properties = {...this.mapping_config.default_properties_of_point}
|
||||||
|
|
||||||
|
// apply new properties if found in mapping config
|
||||||
|
featurePointPropertiesKeys.forEach(pointKeyName => {
|
||||||
|
|
||||||
|
debugLog('mapElementFromConf: convert', pointKeyName)
|
||||||
|
this.convertProperty(pointKeyName, mappingKeys, featurePoint, newProperties)
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
basePoint.properties = newProperties
|
||||||
|
|
||||||
|
// debugLog('mapElementFromConf: basePoint', basePoint)
|
||||||
|
return basePoint
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* convertit une propriété en une autre selon la config de mapping
|
||||||
|
* @param pointKeyName
|
||||||
|
* @param mappingKeys
|
||||||
|
* @param featurePoint
|
||||||
|
* @param newProperties
|
||||||
|
*/
|
||||||
|
convertProperty(pointKeyName: string, mappingKeys: any, featurePoint: any, newProperties: any) {
|
||||||
|
let originalValue = featurePoint.properties[pointKeyName]
|
||||||
|
let mappingValueObject: any = '';
|
||||||
|
|
||||||
|
if (mappingKeys.indexOf(pointKeyName) > 0) {
|
||||||
|
mappingValueObject = this.mapping_config.tags[pointKeyName]
|
||||||
|
debugLog('convertProperty: mappingValueObject ', mappingValueObject)
|
||||||
|
}
|
||||||
|
|
||||||
|
debugLog(' ------ convertProperty: pointKeyName', pointKeyName)
|
||||||
|
// debugLog('convertProperty: mappingKeys', mappingKeys)
|
||||||
|
|
||||||
|
if (this.mapping_config.add_not_mapped_tags_too && (mappingKeys.indexOf(pointKeyName) === -1)) {
|
||||||
|
/**
|
||||||
|
* add all unmapped tags is enabled
|
||||||
|
*/
|
||||||
|
newProperties[pointKeyName] = originalValue;
|
||||||
|
|
||||||
|
} else {
|
||||||
|
/**
|
||||||
|
* only use existing keys
|
||||||
|
*/
|
||||||
|
if (mappingKeys.indexOf(pointKeyName) > 0) {
|
||||||
|
let valueConvertedFromMapping = featurePoint.properties[pointKeyName]
|
||||||
|
let keyConvertedFromMapping = mappingKeys[mappingKeys.indexOf(pointKeyName)]
|
||||||
|
let mappingConfigOfTag = this.mapping_config.tags[pointKeyName]
|
||||||
|
|
||||||
|
debugLog('========== mappingConfigOfTag', mappingConfigOfTag)
|
||||||
|
debugLog('convertProperty: found element', pointKeyName, '=>', keyConvertedFromMapping, 'value : ', valueConvertedFromMapping)
|
||||||
|
let convertedValue = originalValue
|
||||||
|
|
||||||
|
let typeOfConfigForKey = typeof mappingConfigOfTag
|
||||||
|
let isStringValue = typeOfConfigForKey === 'string'
|
||||||
|
let isConfigMappingObject = typeOfConfigForKey === 'object'
|
||||||
|
|
||||||
|
debugLog('convertProperty: - typeofValue', typeOfConfigForKey)
|
||||||
|
debugLog('convertProperty: - pointKeyName', pointKeyName)
|
||||||
|
debugLog('convertProperty: - valueConvertedFromMapping', valueConvertedFromMapping)
|
||||||
|
debugLog('typeof valueConvertedFromMapping === \'string\'', typeOfConfigForKey)
|
||||||
|
|
||||||
|
|
||||||
|
debugLog('convertProperty: isStringValue?', valueConvertedFromMapping, isStringValue)
|
||||||
|
debugLog('convertProperty: isStringValue?', valueConvertedFromMapping, isStringValue)
|
||||||
|
|
||||||
|
debugLog('mappingConfigOfTag', mappingConfigOfTag)
|
||||||
|
debugLog('typeOfConfigForKey', typeOfConfigForKey)
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* conversion si la clé à une config d'une string, on ne change que la clé, pas la valeur
|
||||||
|
*/
|
||||||
|
if (isStringValue) {
|
||||||
|
debugLog('convertProperty: -- string value')
|
||||||
|
debugLog('convertProperty: -- string value')
|
||||||
|
debugLog('convertProperty: -- simple conversion : ', pointKeyName, '=> ', mappingConfigOfTag, '_', originalValue, '=>', valueConvertedFromMapping)
|
||||||
|
debugLog('convertProperty: -- convertedValue', convertedValue)
|
||||||
|
|
||||||
|
convertedValue = valueConvertedFromMapping
|
||||||
|
|
||||||
|
if (convertedValue) {
|
||||||
|
newProperties[mappingConfigOfTag] = convertedValue
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debugLog('convertProperty: no string value')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isConfigMappingObject) {
|
||||||
|
let configObject = mappingConfigOfTag
|
||||||
|
|
||||||
|
debugLog('convertProperty: is config object', configObject)
|
||||||
|
let newKey: any = '' + pointKeyName
|
||||||
|
let remove_original_key = false;
|
||||||
|
|
||||||
|
if (configObject.key_converted) {
|
||||||
|
newKey = configObject.key_converted
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if (configObject.truthy_value) {
|
||||||
|
// convertir la valeur, si elle est truthy, la transformer en ce que donne la propriété truthy_value
|
||||||
|
// exemple: le jeu de données dit que la colonne cable_t2_attache vaut "True", mais on veut le convertir en "1".
|
||||||
|
// on met donc truthy_value: '1'
|
||||||
|
|
||||||
|
debugLog('truthy_value', originalValue)
|
||||||
|
if (this.truthyValues.indexOf(originalValue) !== -1) {
|
||||||
|
convertedValue = configObject.truthy_value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (configObject.falsy_value) {
|
||||||
|
if (this.falsyValues.indexOf(originalValue) !== -1) {
|
||||||
|
convertedValue = configObject.falsy_value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* conversion booléenne
|
||||||
|
*/
|
||||||
|
if (mappingValueObject.convert_to_boolean_value) {
|
||||||
|
debugLog('convertProperty: is boolean_value_conversion')
|
||||||
|
debugLog('convertProperty: ==========> original value', originalValue)
|
||||||
|
if (this.truthyValues.indexOf(originalValue) !== -1) {
|
||||||
|
convertedValue = 'yes'
|
||||||
|
}
|
||||||
|
if (this.falsyValues.indexOf(originalValue) !== -1) {
|
||||||
|
convertedValue = 'no'
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debugLog('convertProperty: is NOT having boolean_value_conversion', mappingValueObject)
|
||||||
|
}
|
||||||
|
if (configObject.remove_original_key) {
|
||||||
|
remove_original_key = true
|
||||||
|
}
|
||||||
|
if (configObject.ignore_if_falsy && this.falsyValues.indexOf(originalValue) !== -1) {
|
||||||
|
remove_original_key = true
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* config pour une clé
|
||||||
|
* nous pouvons renseigner une string ou un objet décrivant les transformations à réaliser
|
||||||
|
*/
|
||||||
|
if (!remove_original_key && configObject.conditional_values) {
|
||||||
|
debugLog('convertProperty: conditional_values__________',
|
||||||
|
configObject.conditional_values)
|
||||||
|
|
||||||
|
let keysConditionnalValues: any = Object.keys(configObject.conditional_values)
|
||||||
|
|
||||||
|
let isFoundValue = keysConditionnalValues.indexOf(originalValue)
|
||||||
|
debugLog('isFoundValue', isFoundValue, originalValue)
|
||||||
|
debugLog('keysConditionnalValues', keysConditionnalValues)
|
||||||
|
|
||||||
|
|
||||||
|
if (isFoundValue > -1) {
|
||||||
|
let conditionnalConfig: any = configObject.conditional_values[keysConditionnalValues[isFoundValue]]
|
||||||
|
/** ----------------------
|
||||||
|
* gestion des valeurs conditionnelles
|
||||||
|
* ---------------------- */
|
||||||
|
debugLog('conditionnalConfig', conditionnalConfig)
|
||||||
|
|
||||||
|
if (conditionnalConfig.ignore_this_data) {
|
||||||
|
debugLog(`on ignore cette clé car sa valeur "${originalValue}" est à exclure: `, pointKeyName, '=>', newKey)
|
||||||
|
remove_original_key = true;
|
||||||
|
}
|
||||||
|
if (conditionnalConfig.tags_to_add) {
|
||||||
|
// on peut définir un ensemble de tags à rajouter
|
||||||
|
let tagKeys = Object.keys(conditionnalConfig.tags_to_add)
|
||||||
|
debugLog('conditionnalConfig.tags_to_add', conditionnalConfig.tags_to_add)
|
||||||
|
conditionnalConfig.tags_to_add.forEach((object: any, pair: any) => {
|
||||||
|
debugLog('object', object)
|
||||||
|
debugLog('pair', pair)
|
||||||
|
let key: any = Object.keys(object)
|
||||||
|
key = key[0]
|
||||||
|
let value = object[key]
|
||||||
|
|
||||||
|
debugLog('key', key)
|
||||||
|
debugLog('value', value)
|
||||||
|
newProperties[key] = value
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (conditionnalConfig.truthy_value) {
|
||||||
|
// convertir la valeur, si elle est truthy, la transformer en ce que donne la propriété truthy_value
|
||||||
|
// exemple: le jeu de données dit que la colonne cable_t2_attache vaut "True", mais on veut le convertir en "1".
|
||||||
|
// on met donc truthy_value: '1'
|
||||||
|
if (this.truthyValues.indexOf(originalValue) !== -1) {
|
||||||
|
convertedValue = conditionnalConfig.truthy_value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (conditionnalConfig.falsy_value) {
|
||||||
|
if (this.falsyValues.indexOf(originalValue) !== -1) {
|
||||||
|
convertedValue = conditionnalConfig.falsy_value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (conditionnalConfig.transform_function) {
|
||||||
|
// une transformation de la valeur
|
||||||
|
// apply transformation to value
|
||||||
|
convertedValue = conditionnalConfig.transform_function(originalValue)
|
||||||
|
}
|
||||||
|
// use the value converted
|
||||||
|
else if (conditionnalConfig.value_converted) {
|
||||||
|
convertedValue = conditionnalConfig.value_converted
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
debugLog('convertProperty: convertedValue ==========> {', newKey, ':', convertedValue, '}')
|
||||||
|
debugLog(' =============== remove_original_key',newKey, remove_original_key)
|
||||||
|
if (!remove_original_key && newKey && convertedValue && !configObject.ignore_this_data) {
|
||||||
|
|
||||||
|
debugLog('convertProperty: added')
|
||||||
|
newProperties[newKey] = convertedValue.trim()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
74
mappings/mapping-config.type.ts
Normal file
74
mappings/mapping-config.type.ts
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
interface GeoJsonGeometry {
|
||||||
|
type:string,
|
||||||
|
coordinates:Array<number>,
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GeoJsonProperties {
|
||||||
|
[key:string]: any,
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GeoJsonFeature {
|
||||||
|
type:string,
|
||||||
|
geometry:GeoJsonGeometry,
|
||||||
|
properties:GeoJsonProperties,
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FeatureCollection{
|
||||||
|
type:string,
|
||||||
|
features:Array<GeoJsonFeature>,
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BoundingBoxCoordinatesType{
|
||||||
|
xMin: number,
|
||||||
|
xMax: number,
|
||||||
|
yMin: number,
|
||||||
|
yMax: number,
|
||||||
|
}
|
||||||
|
export default interface MappingConfigType{
|
||||||
|
config_name: string,
|
||||||
|
config_author: string,
|
||||||
|
add_not_mapped_tags_too: boolean,
|
||||||
|
default_properties_of_point: object,
|
||||||
|
source: object,
|
||||||
|
filters: object,
|
||||||
|
tags: FeaturePropertyMappingConfigType
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* configuration concernant toutes les valeurs
|
||||||
|
*/
|
||||||
|
export interface FeaturePropertyMappingConfigType{
|
||||||
|
[key:string]: any,
|
||||||
|
convert_to_boolean_value?:boolean,
|
||||||
|
remove_original_key?:boolean,
|
||||||
|
conditionnal_values?:ConditionnalValuesType,
|
||||||
|
transform_function?:Function,
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* choix de conversion de la valeur originale selon des critères donnés
|
||||||
|
*/
|
||||||
|
export interface ConditionnalValuesConfigType{
|
||||||
|
key_converted?:string,
|
||||||
|
value_converted?:string,
|
||||||
|
truthy_value?:any,
|
||||||
|
falsy_value?:any, // si la valeur originale est falsy, la convertir en la valeur donnée ici
|
||||||
|
ignore_this_data?:boolean,
|
||||||
|
tags_to_add?:TagsToAddConfig,
|
||||||
|
transform_function?:Function,
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ConditionnalValuesType{
|
||||||
|
[key:string]: ConditionnalValuesConfigType,
|
||||||
|
}
|
||||||
|
interface OneOSMTag {
|
||||||
|
[key:string]: string,
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TagsToAddConfig{
|
||||||
|
tags_to_add: Array<OneOSMTag>
|
||||||
|
}
|
||||||
|
|
||||||
|
|
69
mappings/utils.ts
Normal file
69
mappings/utils.ts
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
import fs from 'fs'
|
||||||
|
|
||||||
|
let show_debug = 0
|
||||||
|
// show_debug = 1
|
||||||
|
let output_folder = 'output';
|
||||||
|
|
||||||
|
// console.log('----------------------show_debug', show_debug)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* wrapper de log qui se montre uniquemnt si show_debug a été activé
|
||||||
|
* @param args
|
||||||
|
*/
|
||||||
|
function debugLog(...args: any[]) {
|
||||||
|
if (show_debug) {
|
||||||
|
console.log('### debug: ', ...args)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
let listOfBooleanKeys = [
|
||||||
|
"prise_type_ef",
|
||||||
|
"prise_type_2",
|
||||||
|
"prise_type_combo_ccs",
|
||||||
|
"prise_type_chademo",
|
||||||
|
"gratuit",
|
||||||
|
"paiement_acte",
|
||||||
|
"paiement_cb",
|
||||||
|
"cable_t2_attache"
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param pointKeyName
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
function isBooleanKey(pointKeyName: string): boolean {
|
||||||
|
|
||||||
|
return listOfBooleanKeys.indexOf(pointKeyName) !== -1
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* crée un fichier dans le dossier par défaut, output
|
||||||
|
* @param fileName
|
||||||
|
* @param fileContent
|
||||||
|
*/
|
||||||
|
function writeFile(fileName: string, fileContent: any) {
|
||||||
|
debugLog('write file ', fileName)
|
||||||
|
|
||||||
|
|
||||||
|
return fs.writeFile(
|
||||||
|
`./${output_folder}/${fileName}`,
|
||||||
|
fileContent,
|
||||||
|
'utf8',
|
||||||
|
(err) => {
|
||||||
|
if (err) {
|
||||||
|
console.log(`Error writing file: ${err}`)
|
||||||
|
} else {
|
||||||
|
console.log(`File ${fileName} is written successfully!`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
debugLog,
|
||||||
|
isBooleanKey,
|
||||||
|
writeFile
|
||||||
|
}
|
1
osm_output/all_from_etalab.osm
Normal file
1
osm_output/all_from_etalab.osm
Normal file
File diff suppressed because one or more lines are too long
2308
osm_output/borne-présentes-dans-osm.osm
Normal file
2308
osm_output/borne-présentes-dans-osm.osm
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1 @@
|
|||||||
|
<osm version="0.6" generator="geojsontoosm"><node id="-1" lat="45.635079000000005" lon="4.822158999999999"><tag k="amenity" v="charging_station"/><tag k="operator" v="ELECTRA"/><tag k="owner:ref:FR:SIREN" v="891624884"/><tag k="email" v="help@electra.com"/><tag k="network" v="ELECTRA"/><tag k="socket:type2_combo" v="yes"/><tag k="fee" v="false"/><tag k="authentication:none" v="true"/><tag k="reservation" v="yes"/><tag k="opening_hours" v="24/7"/><tag k="start_date" v="2022-10-27"/></node></osm>
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1 @@
|
|||||||
|
<osm version="0.6" generator="geojsontoosm"><node id="-1" lat="45.635079000000005" lon="4.822158999999999"><tag k="amenity" v="charging_station"/><tag k="operator" v="ELECTRA"/><tag k="owner:ref:FR:SIREN" v="891624884"/><tag k="email" v="help@electra.com"/><tag k="network" v="ELECTRA"/><tag k="socket:type2_combo" v="yes"/><tag k="fee" v="false"/><tag k="authentication:none" v="true"/><tag k="reservation" v="yes"/><tag k="opening_hours" v="24/7"/><tag k="start_date" v="2022-10-27"/></node></osm>
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user