add delay in fetching each second

This commit is contained in:
Tykayn 2022-01-13 14:52:27 +01:00 committed by tykayn
parent 3f44754e94
commit d09767dd4e
3 changed files with 42 additions and 26 deletions

View File

@ -23,9 +23,9 @@ const parserConfig = {
// enableFetch: false,
dev_mode: true, // dev mode uses localhost instance of mobilizon running on port 4000
limit_persistence_of_new_events:true,
max_new_events_in_scrapping: 20,
max_new_events: 2,
bearer_token: "eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJtb2JpbGl6b24iLCJleHAiOjE2NDIwODE1MTksImlhdCI6MTY0MjA4MDYxOSwiaXNzIjoibW9iaWxpem9uIiwianRpIjoiOWNiNTU2MDUtZDJhNy00NjUxLWFiNjEtYmJiYmE2NTY5N2E3IiwibmJmIjoxNjQyMDgwNjE4LCJzdWIiOiJVc2VyOjEiLCJ0eXAiOiJhY2Nlc3MifQ.pykTIwQrzl6-qEcZowysmT9NnV71SBPZEv5Utbdny_AaU_ng7vspcdWxgZca0Q16i2lcclTJbuggtxAo8kWeIw",
max_new_events_in_scrapping: 400,
max_new_events: 5,
bearer_token: "eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJtb2JpbGl6b24iLCJleHAiOjE2NDIwODIzOTgsImlhdCI6MTY0MjA4MTQ5OCwiaXNzIjoibW9iaWxpem9uIiwianRpIjoiYWQyZjMxY2YtMDUxZi00MmY0LWI1ZDYtMzc4ZjZiOTgxY2EwIiwibmJmIjoxNjQyMDgxNDk3LCJzdWIiOiJVc2VyOjEiLCJ0eXAiOiJhY2Nlc3MifQ.U0JnKQFkklxDcWPE4utXwj65mR0N1LyT3qwC2RzOiWjLWyCS_0KMKJPm5uvylZ-lhiPigojfZsuQCekYL0qTHg",
ccpl: "https://www.cc-paysdelimours.fr/agenda"

View File

@ -38,7 +38,7 @@ async function runImportEvents() {
return console.log(err);
}
filecontent = JSON.parse(data)
filecontent = filecontent.slice(0,parserConfig.max_new_events_in_scrapping)
filecontent = filecontent.slice(0, parserConfig.max_new_events_in_scrapping)
console.log('events in the scrapped json', filecontent.length);
let ii = 0;
@ -89,10 +89,12 @@ async function runImportEvents() {
"authorization": "Bearer " + parserConfig.bearer_token,
}
}
if(!utilsTools.agendadulibre.doesEventExistsFromJsonScrap(pair.event)){
if (!utilsTools.agendadulibre.doesEventExistsFromJsonScrap(pair.event)) {
console.log('ajouter');
fetchEvent(url, options)
}else{
// add a little delay between creations
fetchEvent(url, options, counter, pair.event)
} else {
console.log('nope');
}
} else {
@ -107,22 +109,31 @@ async function runImportEvents() {
}
const fetchEvent = (theUrl, theOptions) => {
fetch(theUrl, theOptions)
.then((res: any) => {
let status = res.status;
console.log('status', status);
if (status === 401) {
console.error(' /!\\ ------------------ ERROR: Bearer token invalid ------------------')
const fetchEvent = (theUrl, theOptions, counter, event) => {
let timeout = setTimeout(
function () {
} else if (status === 200) {
console.log('succès');
fetch(theUrl, theOptions)
.then((res: any) => {
let status = res.status;
console.log('status', status);
if (status === 401) {
console.error(' /!\\ ------------------ ERROR: Bearer token invalid ------------------')
clearTimeout(timeout);
} else if (status === 200) {
console.log('succès - ' + event.title + ' ' + event.start_time);
}
res.json()
})
.then((json: any) => console.log(json))
.catch((err: any) => console.log(err))
}
,
1000 * counter
)
}
res.json()
})
.then((json: any) => console.log(json))
.catch((err: any) => console.log(err))
}
// fs.stat(filepath, function (err, stat) {

View File

@ -222,10 +222,8 @@ class utils {
.format("YYYY-MM-DD") + ' ' + event.title
},
doesEventExistsFromJsonScrap: (event: any): boolean => {
console.log('this.agendadulibre.uniqTitle(event)', this.agendadulibre.uniqTitle(event));
const eventAlreadyExists =
-1 !== this.localMobilizonEventsByTitle.indexOf(this.agendadulibre.uniqTitle(event));
console.log('eventAlreadyExists', eventAlreadyExists);
return eventAlreadyExists;
},
addQueryFromJsonScrap: (event: any) => {
@ -235,15 +233,22 @@ class utils {
})
tags.push("imported")
console.log('tags', tags);
let newQuery = {
operationName: "createEvent",
query: createEventQueryMobilizon,
variables: {
attributedToId: null,
beginsOn: event.start_time,
contacts: [],
description: "<p>" + event.description + "</p>",
contacts: [event.contact],
description:
"<address>" + event.city
+ "<br/>"+
event.address +
"<br/>"+
event.place_name +
"</address>"+
"<p>" + event.description + "</p>",
draft: false,
endsOn: event.end_time,
joinOptions: "FREE",