daemon: Remove birdnet_miner and call birdnet_output_to_sql at each new model execution
This commit is contained in:
parent
39233fe937
commit
4f09a2dd4e
@ -3,8 +3,9 @@
|
|||||||
|
|
||||||
- Add docker compose port
|
- Add docker compose port
|
||||||
- Improve install script
|
- Improve install script
|
||||||
|
- Add base uninstall script (need deeper work)
|
||||||
- Add ttyd for systemd logging
|
- Add ttyd for systemd logging
|
||||||
## v0.0.1-rc
|
## v0.0.1-rc (2022-08-18)
|
||||||
|
|
||||||
- Integrate BirdNET-Analyzer as submodule
|
- Integrate BirdNET-Analyzer as submodule
|
||||||
- Add birdnet_recording service
|
- Add birdnet_recording service
|
||||||
|
@ -129,7 +129,7 @@ sudo mv /composer.phar /usr/local/bin/composer
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
cd www
|
cd www
|
||||||
composer install
|
composer install --no-dev --prefer-dist --optimize-autoloader
|
||||||
```
|
```
|
||||||
|
|
||||||
### Install nodejs and npm
|
### Install nodejs and npm
|
||||||
@ -147,7 +147,7 @@ nvm use 16
|
|||||||
```
|
```
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
sudo dnf install npm
|
sudo apt-get install npm
|
||||||
```
|
```
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
@ -3,9 +3,7 @@ set -e
|
|||||||
|
|
||||||
DEBUG=${DEBUG:-1}
|
DEBUG=${DEBUG:-1}
|
||||||
debug() {
|
debug() {
|
||||||
if [ $DEBUG -eq 1 ]; then
|
[[ $DEBUG -eq 1 ]] && echo "$@"
|
||||||
echo "$1"
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
config_filepath="./config/birdnet.conf"
|
config_filepath="./config/birdnet.conf"
|
||||||
@ -64,7 +62,9 @@ check_prerequisites() {
|
|||||||
|
|
||||||
# Get array of audio chunks to be processed
|
# Get array of audio chunks to be processed
|
||||||
get_chunk_list() {
|
get_chunk_list() {
|
||||||
find "${CHUNK_FOLDER}/in" -type f -name '*.wav' -exec basename {} \; ! -size 0 | sort
|
chunk_list=($(ls ${CHUNK_FOLDER}/in))
|
||||||
|
echo "${chunk_list}"
|
||||||
|
# find "${CHUNK_FOLDER}/in" -type f -name '*.wav' -exec basename {} \; ! -size 0 | sort
|
||||||
}
|
}
|
||||||
|
|
||||||
# Perform audio chunk analysis on one chunk
|
# Perform audio chunk analysis on one chunk
|
||||||
@ -75,13 +75,22 @@ analyze_chunk() {
|
|||||||
mkdir -p "$output_dir"
|
mkdir -p "$output_dir"
|
||||||
date=$(echo $chunk_name | cut -d'_' -f2)
|
date=$(echo $chunk_name | cut -d'_' -f2)
|
||||||
week=$(./daemon/weekof.sh $date)
|
week=$(./daemon/weekof.sh $date)
|
||||||
$PYTHON_EXECUTABLE ./analyzer/analyze.py --i $chunk_path --o "$output_dir/model.out.csv" --lat $LATITUDE --lon $LONGITUDE --week $week --min_conf $CONFIDENCE --threads 4 --rtype csv
|
if [[ ! -z "${THREADS}" ]]; then
|
||||||
|
threads="--threads ${THREADS}"
|
||||||
|
else
|
||||||
|
threads=""
|
||||||
|
fi
|
||||||
|
$PYTHON_EXECUTABLE ./analyzer/analyze.py --i $chunk_path --o "$output_dir/model.out.csv" --lat $LATITUDE --lon $LONGITUDE --week $week --min_conf $CONFIDENCE $threads --rtype csv
|
||||||
debug "Model output written to $output_dir/model.out.csv"
|
debug "Model output written to $output_dir/model.out.csv"
|
||||||
|
bash ./daemon/birdnet_output_to_sql.sh "$output_dir/model.out.csv"
|
||||||
|
debug "Dumped to SQL database"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Perform audio chunk analysis on all recorded chunks
|
# Perform audio chunk analysis on all recorded chunks
|
||||||
analyze_chunks() {
|
analyze_chunks() {
|
||||||
for chunk_name in $(get_chunk_list); do
|
local chunks
|
||||||
|
chunks="${1}"
|
||||||
|
for chunk_name in "${chunks}"; do
|
||||||
if [[ -f "${CHUNK_FOLDER}/out/$chunk_name.d/model.out.csv" ]]; then
|
if [[ -f "${CHUNK_FOLDER}/out/$chunk_name.d/model.out.csv" ]]; then
|
||||||
debug "Skipping $chunk_name, as it has already been analyzed"
|
debug "Skipping $chunk_name, as it has already been analyzed"
|
||||||
else
|
else
|
||||||
@ -98,4 +107,4 @@ check_prerequisites
|
|||||||
chunks=$(get_chunk_list)
|
chunks=$(get_chunk_list)
|
||||||
|
|
||||||
# Analyze all chunks in working directory
|
# Analyze all chunks in working directory
|
||||||
analyze_chunks $chunks
|
analyze_chunks "$chunks"
|
@ -1,15 +1,13 @@
|
|||||||
#! /usr/bin/env bash
|
#! /usr/bin/env bash
|
||||||
# Extract observations from a model output folder
|
# Extract observations from a model output file into SQL database
|
||||||
#
|
#
|
||||||
|
|
||||||
DEBUG=${DEBUG:-1}
|
DEBUG=${DEBUG:-1}
|
||||||
set -e
|
set -e
|
||||||
# set -x
|
# set -x
|
||||||
|
DEBUG=${DEBUG:-1}
|
||||||
debug() {
|
debug() {
|
||||||
if [ $DEBUG -eq 1 ]; then
|
[[ $DEBUG -eq 1 ]] && echo "$@"
|
||||||
echo "$1"
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Load bash library to deal with BirdNET-stream database
|
# Load bash library to deal with BirdNET-stream database
|
||||||
@ -18,16 +16,6 @@ source ./daemon/database/scripts/database.sh
|
|||||||
# Load config
|
# Load config
|
||||||
source ./config/birdnet.conf
|
source ./config/birdnet.conf
|
||||||
# Check config
|
# Check config
|
||||||
if [[ -z ${CHUNK_FOLDER} ]]; then
|
|
||||||
echo "CHUNK_FOLDER is not set"
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
if [[ ! -d ${CHUNK_FOLDER}/out ]]; then
|
|
||||||
echo "CHUNK_FOLDER does not exist: ${CHUNK_FOLDER}/out"
|
|
||||||
echo "Cannot extract observations."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z ${LATITUDE} ]]; then
|
if [[ -z ${LATITUDE} ]]; then
|
||||||
echo "LATITUDE is not set"
|
echo "LATITUDE is not set"
|
||||||
@ -39,10 +27,6 @@ if [[ -z ${LONGITUDE} ]]; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
model_outputs() {
|
|
||||||
ls ${CHUNK_FOLDER}/out/*/model.out.csv
|
|
||||||
}
|
|
||||||
|
|
||||||
source_wav() {
|
source_wav() {
|
||||||
model_output_path=$1
|
model_output_path=$1
|
||||||
model_output_dir=$(dirname $model_output_path)
|
model_output_dir=$(dirname $model_output_path)
|
||||||
@ -107,13 +91,6 @@ save_observations() {
|
|||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
main() {
|
model_output_path="$1"
|
||||||
# # Remove all junk observations
|
|
||||||
# ./daemon/birdnet_clean.sh
|
|
||||||
# Get model outputs
|
|
||||||
for model_output in $(model_outputs); do
|
|
||||||
save_observations $model_output
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
main
|
save_observations $model_output_path
|
@ -6,31 +6,44 @@ import matplotlib.pyplot as plt
|
|||||||
from matplotlib.colors import LogNorm
|
from matplotlib.colors import LogNorm
|
||||||
import seaborn as sns
|
import seaborn as sns
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
import glob
|
||||||
|
|
||||||
CONFIG = {
|
CONFIG = {
|
||||||
"readings": 10,
|
"readings": 10,
|
||||||
"palette": "Greens",
|
"palette": "Greens",
|
||||||
"db": "./var/db.sqlite",
|
"db": "./var/db.sqlite",
|
||||||
"date": datetime.now().strftime("%Y-%m-%d")
|
"date": datetime.now().strftime("%Y-%m-%d"),
|
||||||
# "date": "2022-08-15"
|
"charts_dir": "./var/charts"
|
||||||
}
|
}
|
||||||
|
|
||||||
db = sqlite3.connect(CONFIG['db'])
|
db = None
|
||||||
|
|
||||||
df = pd.read_sql_query("""SELECT common_name, date, location_id, confidence
|
def get_database():
|
||||||
|
global db
|
||||||
|
if db is None:
|
||||||
|
db = sqlite3.connect(CONFIG["db"])
|
||||||
|
return db
|
||||||
|
|
||||||
|
def chart(date):
|
||||||
|
db = get_database()
|
||||||
|
df = pd.read_sql_query(f"""SELECT common_name, date, location_id, confidence
|
||||||
FROM observation
|
FROM observation
|
||||||
INNER JOIN taxon
|
INNER JOIN taxon
|
||||||
ON observation.taxon_id = taxon.taxon_id""", db)
|
ON observation.taxon_id = taxon.taxon_id
|
||||||
|
WHERE STRFTIME("%Y-%m-%d", `date`) = '{date}'""", db)
|
||||||
df['date'] = pd.to_datetime(df['date'])
|
df['date'] = pd.to_datetime(df['date'])
|
||||||
df['hour'] = df['date'].dt.hour
|
df['hour'] = df['date'].dt.hour
|
||||||
df['date'] = df['date'].dt.date
|
df['date'] = df['date'].dt.date
|
||||||
df['date'] = df['date'].astype(str)
|
df['date'] = df['date'].astype(str)
|
||||||
df_on_date = df[df['date'] == CONFIG['date']]
|
df_on_date = df[df['date'] == date]
|
||||||
|
|
||||||
top_on_date = (df_on_date['common_name'].value_counts()[:CONFIG['readings']])
|
top_on_date = (df_on_date['common_name'].value_counts()[:CONFIG['readings']])
|
||||||
if top_on_date.empty:
|
if top_on_date.empty:
|
||||||
print("No observations on {}".format(CONFIG['date']))
|
print("No observations on {}".format(date))
|
||||||
exit()
|
return
|
||||||
|
else:
|
||||||
|
print(f"Found observations on {date}")
|
||||||
|
|
||||||
df_top_on_date = df_on_date[df_on_date['common_name'].isin(top_on_date.index)]
|
df_top_on_date = df_on_date[df_on_date['common_name'].isin(top_on_date.index)]
|
||||||
|
|
||||||
@ -90,6 +103,22 @@ plot.set(xlabel="Hour of day")
|
|||||||
plt.suptitle(f"Top {CONFIG['readings']} species on {CONFIG['date']}", fontsize=14)
|
plt.suptitle(f"Top {CONFIG['readings']} species on {CONFIG['date']}", fontsize=14)
|
||||||
plt.text(15, 11, f"(Updated on {datetime.now().strftime('%Y/%m-%d %H:%M')})")
|
plt.text(15, 11, f"(Updated on {datetime.now().strftime('%Y/%m-%d %H:%M')})")
|
||||||
plt.savefig(f"./var/charts/chart_{CONFIG['date']}.png", dpi=300)
|
plt.savefig(f"./var/charts/chart_{CONFIG['date']}.png", dpi=300)
|
||||||
|
print(f"Plot for {date} saved.")
|
||||||
plt.close()
|
plt.close()
|
||||||
|
|
||||||
|
def main():
|
||||||
|
done_charts = glob.glob(f"{CONFIG['charts_dir']}/*.png")
|
||||||
|
last_modified = max(done_charts, key=os.path.getctime)
|
||||||
|
last_modified_date = last_modified.split("_")[-1].split(".")[0]
|
||||||
|
missing_dates = pd.date_range(start=last_modified_date, end=CONFIG['date'], freq='D')
|
||||||
|
print(missing_dates)
|
||||||
|
for missing_date in missing_dates:
|
||||||
|
date = missing_date.strftime("%Y-%m-%d")
|
||||||
|
chart(date)
|
||||||
|
chart(CONFIG['date'])
|
||||||
|
if db is not None:
|
||||||
db.close()
|
db.close()
|
||||||
|
print("Done.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@ -1,13 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=BirdNET-stream miner service
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
User=<USER>
|
|
||||||
Group=<GROUP>
|
|
||||||
WorkingDirectory=<DIR>
|
|
||||||
ExecStart=bash ./daemon/birdnet_miner.sh
|
|
||||||
RemainAfterExit=yes
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
@ -1,9 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=BirdNET-stream miner Timer
|
|
||||||
|
|
||||||
[Timer]
|
|
||||||
OnCalendar=*:0/15
|
|
||||||
Unit=birdnet_miner.service
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=timers.target
|
|
@ -34,10 +34,13 @@ Then, create your dotenv file and populate it with your own configuration (for i
|
|||||||
cp .env.example .env
|
cp .env.example .env
|
||||||
```
|
```
|
||||||
|
|
||||||
Then, run docker-compose:
|
You may need to adapt the listening ports of the services or other configuration parameters.
|
||||||
|
In general all variables stated with ${VARIABLE:-default} inside [../docker-compose.yml](../docker-compose.yml) can be override in the .env file using `VARIABLE=value`.
|
||||||
|
|
||||||
|
Once that is done, you can build and start docker services:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Build image (first time only)
|
# Build images (first time only)
|
||||||
docker compose build
|
docker compose build
|
||||||
# Run
|
# Run
|
||||||
docker compose up # add `-d`, to run in background
|
docker compose up # add `-d`, to run in background
|
||||||
|
@ -67,7 +67,7 @@ install_birdnetstream_services() {
|
|||||||
DIR="$WORKDIR"
|
DIR="$WORKDIR"
|
||||||
cd "$WORKDIR"
|
cd "$WORKDIR"
|
||||||
debug "Setting up BirdNET stream systemd services"
|
debug "Setting up BirdNET stream systemd services"
|
||||||
services="birdnet_recording.service birdnet_analyzis.service birdnet_miner.timer birdnet_miner.service birdnet_plotter.service birdnet_plotter.timer"
|
services="birdnet_recording.service birdnet_analyzis.service birdnet_plotter.service birdnet_plotter.timer"
|
||||||
read -r -a services_array <<<"$services"
|
read -r -a services_array <<<"$services"
|
||||||
for service in ${services_array[@]}; do
|
for service in ${services_array[@]}; do
|
||||||
sudo cp "daemon/systemd/templates/$service" "/etc/systemd/system/"
|
sudo cp "daemon/systemd/templates/$service" "/etc/systemd/system/"
|
||||||
@ -78,7 +78,7 @@ install_birdnetstream_services() {
|
|||||||
done
|
done
|
||||||
sudo sed -i "s|<VENV>|$WORKDIR/$PYTHON_VENV|g" "/etc/systemd/system/birdnet_plotter.service"
|
sudo sed -i "s|<VENV>|$WORKDIR/$PYTHON_VENV|g" "/etc/systemd/system/birdnet_plotter.service"
|
||||||
sudo systemctl daemon-reload
|
sudo systemctl daemon-reload
|
||||||
enabled_services="birdnet_recording.service birdnet_analyzis.service birdnet_miner.timer birdnet_plotter.timer"
|
enabled_services="birdnet_recording.service birdnet_analyzis.service birdnet_plotter.timer"
|
||||||
read -r -a services_array <<<"$services"
|
read -r -a services_array <<<"$services"
|
||||||
for service in ${services_array[@]}; do
|
for service in ${services_array[@]}; do
|
||||||
debug "Enabling $service"
|
debug "Enabling $service"
|
||||||
@ -163,7 +163,7 @@ setup_http_server() {
|
|||||||
fi
|
fi
|
||||||
debug "Enable birdnet.lan domain"
|
debug "Enable birdnet.lan domain"
|
||||||
sudo ln -s /etc/nginx/sites-available/birdnet-stream.conf /etc/nginx/sites-enabled/birdnet-stream.conf
|
sudo ln -s /etc/nginx/sites-available/birdnet-stream.conf /etc/nginx/sites-enabled/birdnet-stream.conf
|
||||||
debug "Info: Please edit /etc/nginx/sites-available/birdnet-stream.conf to set the correct server name and paths"
|
debug "INFO: Please edit /etc/nginx/sites-available/birdnet-stream.conf to set the correct server name and paths"
|
||||||
debug "Setup nginx variables the best way possible"
|
debug "Setup nginx variables the best way possible"
|
||||||
sudo sed -i "s|<SYMFONY_PUBLIC>|$WORKDIR/www/public/|g" /etc/nginx/sites-available/birdnet-stream.conf
|
sudo sed -i "s|<SYMFONY_PUBLIC>|$WORKDIR/www/public/|g" /etc/nginx/sites-available/birdnet-stream.conf
|
||||||
sudo sed -i "s|<RECORDS_DIR>|$CHUNK_FOLDER/out|g" /etc/nginx/sites-available/birdnet-stream.conf
|
sudo sed -i "s|<RECORDS_DIR>|$CHUNK_FOLDER/out|g" /etc/nginx/sites-available/birdnet-stream.conf
|
||||||
|
@ -38,3 +38,12 @@ uninstall_webapp() {
|
|||||||
sudo unlink /etc/nginx/sites-enabled/birdnet-stream.conf
|
sudo unlink /etc/nginx/sites-enabled/birdnet-stream.conf
|
||||||
sudo systemctl restart nginx
|
sudo systemctl restart nginx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
main() {
|
||||||
|
echo "WARNING: This will remove all BirdNET-stream related files and services. \
|
||||||
|
Note that it may forget some special configuration."
|
||||||
|
uninstall_webapp
|
||||||
|
uninstall_birdnet_services
|
||||||
|
}
|
||||||
|
|
||||||
|
main
|
@ -25,11 +25,15 @@ class HomeController extends AbstractController
|
|||||||
* @Route("", name="home")
|
* @Route("", name="home")
|
||||||
* @Route("/{_locale<%app.supported_locales%>}/", name="home_i18n")
|
* @Route("/{_locale<%app.supported_locales%>}/", name="home_i18n")
|
||||||
*/
|
*/
|
||||||
public function index()
|
public function index(Request $request)
|
||||||
{
|
{
|
||||||
|
$date = $request->get("on");
|
||||||
|
if ($date == null) {
|
||||||
|
$date = date("Y-m-d");
|
||||||
|
}
|
||||||
return $this->render('index.html.twig', [
|
return $this->render('index.html.twig', [
|
||||||
"stats" => $this->get_stats(),
|
"stats" => $this->get_stats($date),
|
||||||
"charts" => $this->last_chart_generated(),
|
"charts" => $this->last_chart_generated($date),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -42,11 +46,12 @@ class HomeController extends AbstractController
|
|||||||
return $this->render('about/index.html.twig', []);
|
return $this->render('about/index.html.twig', []);
|
||||||
}
|
}
|
||||||
|
|
||||||
private function get_stats()
|
private function get_stats($date)
|
||||||
{
|
{
|
||||||
$stats = array();
|
$stats = array();
|
||||||
$stats["most-recorded-species"] = $this->get_most_recorded_species();
|
$stats["most-recorded-species"] = $this->get_most_recorded_species();
|
||||||
$stats["last-detected-species"] = $this->get_last_recorded_species();
|
$stats["last-detected-species"] = $this->get_last_recorded_species();
|
||||||
|
$stats["number-of-species-detected"] = $this->get_number_of_species_detected($date);
|
||||||
return $stats;
|
return $stats;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,6 +91,27 @@ class HomeController extends AbstractController
|
|||||||
return $species;
|
return $species;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private function get_number_of_species_detected($date)
|
||||||
|
{
|
||||||
|
$count = 0;
|
||||||
|
$sql = "SELECT COUNT(`taxon_id`) AS contact_count
|
||||||
|
FROM `observation`
|
||||||
|
WHERE STRFTIME('%Y-%m-%d', `date`) = :date
|
||||||
|
GROUP BY `taxon_id`";
|
||||||
|
try {
|
||||||
|
$stmt = $this->connection->prepare($sql);
|
||||||
|
$stmt->bindValue(":date", $date);
|
||||||
|
$result = $stmt->executeQuery();
|
||||||
|
$output = $result->fetchAllAssociative();
|
||||||
|
if ($output != null) {
|
||||||
|
$count = $output[0]["contact_count"];
|
||||||
|
}
|
||||||
|
} catch (\Exception $e) {
|
||||||
|
$this->logger->error($e->getMessage());
|
||||||
|
}
|
||||||
|
return $count;
|
||||||
|
}
|
||||||
|
|
||||||
private function last_chart_generated()
|
private function last_chart_generated()
|
||||||
{
|
{
|
||||||
$files = glob($this->getParameter('kernel.project_dir') . '/../var/charts/*.png');
|
$files = glob($this->getParameter('kernel.project_dir') . '/../var/charts/*.png');
|
||||||
|
@ -1,49 +1,77 @@
|
|||||||
<div id="stats">
|
<div id="stats">
|
||||||
<h2>{{ "Quick Stats" | trans }}</h2>
|
<h2>
|
||||||
|
{{ 'Quick Stats'|trans }}
|
||||||
|
</h2>
|
||||||
<ul>
|
<ul>
|
||||||
<li class="most-recorded-species">
|
<li class="stat">
|
||||||
{{ "Most recorded species" | trans }}:
|
{{ 'Most recorded species'|trans }}:{% if
|
||||||
{% if stats["most-recorded-species"] is defined and stats["most-recorded-species"]|length > 0 %}
|
stats['most-recorded-species'] is defined
|
||||||
|
and (stats['most-recorded-species']|length) > 0 %}
|
||||||
<span class="scientific-name">
|
<span class="scientific-name">
|
||||||
{{ stats["most-recorded-species"]["scientific_name"] }}
|
{{ stats['most-recorded-species']['scientific_name'] }}
|
||||||
</span>
|
</span>
|
||||||
(<span class="common_name">{{ stats["most-recorded-species"]["common_name"] }}</span>)
|
(<span class="common_name">
|
||||||
{{ "with" | trans }}
|
{{ stats['most-recorded-species']['common_name'] }}
|
||||||
|
</span>)
|
||||||
|
{{ 'with'|trans }}
|
||||||
<span class="observation-count">
|
<span class="observation-count">
|
||||||
{{ stats["most-recorded-species"]["contact_count"] }}
|
{{ stats['most-recorded-species']['contact_count'] }}
|
||||||
</span>
|
</span>
|
||||||
{{ "contacts" | trans }}.
|
{{ 'contacts'|trans }}.
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ "No species in database." | trans }}
|
{{ 'No species in database.'|trans }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</li>
|
</li>
|
||||||
<li class="last-recorded-species">
|
<li class="stat">
|
||||||
{{ "Last detected species" | trans }}:
|
{{ 'Last detected species'|trans }}:{% if
|
||||||
{% if stats["last-detected-species"] is defined and stats["last-detected-species"]|length > 0 %}
|
stats['last-detected-species'] is defined
|
||||||
|
and (stats['last-detected-species']|length) > 0 %}
|
||||||
<span class="scientific-name">
|
<span class="scientific-name">
|
||||||
{{ stats["last-detected-species"]["scientific_name"] }}
|
{{ stats['last-detected-species']['scientific_name'] }}
|
||||||
</span>
|
</span>
|
||||||
(<span class="common_name">{{ stats["last-detected-species"]["common_name"] }}</span>)
|
(<span class="common_name">
|
||||||
{{ "with" | trans }}
|
{{ stats['last-detected-species']['common_name'] }}
|
||||||
|
</span>)
|
||||||
|
{{ 'with'|trans }}
|
||||||
<span class="confidence">
|
<span class="confidence">
|
||||||
{{ stats["last-detected-species"]["confidence"] }}
|
{{ stats['last-detected-species']['confidence'] }}
|
||||||
</span>
|
</span>
|
||||||
{{ "AI confidence" | trans }}
|
{{ 'AI confidence'|trans }}
|
||||||
<span class="datetime">
|
<span class="datetime">
|
||||||
{% set date = stats["last-detected-species"]["date"] %}
|
{% set date = stats['last-detected-species']['date'] %}
|
||||||
{% if date | date("Y-m-d") == "now" | date("Y-m-d") %}
|
{% if (date|date('Y-m-d')) == ('now'|date('Y-m-d')) %}
|
||||||
{{ "today" | trans }}
|
{{ 'today'|trans }}
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ "on" | trans }}
|
{{ 'on'|trans }}
|
||||||
{{ date | format_datetime("full", "none") }}
|
{{ date|format_datetime('full', 'none') }}
|
||||||
{% endif %}
|
{% endif %}at
|
||||||
at
|
<span class="time">{{ date|date('H:i') }}</span>
|
||||||
<span class="time">
|
|
||||||
{{ date | date("H:i") }}
|
|
||||||
</span>
|
|
||||||
</span>.
|
</span>.
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ "No species in database" | trans }}
|
{{ 'No species in database'|trans }}
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
<li class="stat">
|
||||||
|
{% set today = 'now'|date('Y-m-d') %}
|
||||||
|
{% set date = app.request.get('on') %}
|
||||||
|
{% if
|
||||||
|
stats['number-of-species-detected'] is defined
|
||||||
|
and stats['number-of-species-detected'] > 0 %}
|
||||||
|
{% if today == date %}
|
||||||
|
{{ 'Number of species detected today: '|trans }}
|
||||||
|
{% else %}
|
||||||
|
{{ 'Number of species detected on '|trans }}
|
||||||
|
{{ date|format_datetime('full', 'none') }}:
|
||||||
|
{% endif %}
|
||||||
|
<span>{{ stats['number-of-species-detected'] }}</span>.
|
||||||
|
{% else %}
|
||||||
|
{# {{ 'No species detected today'|trans }} #}
|
||||||
|
{% if today == date %}
|
||||||
|
{{ 'No species detected today.'|trans }}
|
||||||
|
{% else %}
|
||||||
|
{{ 'No species detected on '|trans }}
|
||||||
|
{{ date|format_datetime('full', 'none') }}
|
||||||
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
Loading…
Reference in New Issue
Block a user