Installation des outils pour filtrer les robots

This commit is contained in:
Jean-Marie Favreau 2024-08-28 21:52:22 +02:00
parent 6902ba22f4
commit cbb34190cf
4 changed files with 15 additions and 1 deletions

View File

@ -33,6 +33,8 @@ else:
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.sitemaps",
"django.contrib.sites",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
@ -49,8 +51,11 @@ INSTALLED_APPS = [
"recurrence",
"location_field.apps.DefaultConfig",
"django.contrib.postgres",
"robots",
]
SITE_ID = 1
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"whitenoise.middleware.WhiteNoiseMiddleware",
@ -229,3 +234,8 @@ LOCATION_FIELD = {
"provider.openstreetmap.max_zoom": 18,
"search.provider": "addok",
}
# stop robots
ROBOTS_USE_SITEMAP = False

View File

@ -4,6 +4,7 @@
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Pommes de lune — {% block title %}{% endblock %}</title>
<meta name="robots" content="noindex, nofollow">
{% load static %}
<meta property="og:title" content="Pommes de lune — {% block og_title %}{% endblock %}" />

View File

@ -2,7 +2,7 @@ from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.urls import path, include
from django.urls import path, include, re_path
from django.views.i18n import JavaScriptCatalog
@ -171,6 +171,8 @@ urlpatterns = [
"ical",
export_ical,
name="export_ical"),
re_path(r'^robots\.txt', include('robots.urls')),
]
if settings.DEBUG:

View File

@ -38,3 +38,4 @@ lxml==5.1.0
bbcode==1.1.0
json5==0.9.25
django-location-field==2.7.3
django-robots==6.1