From 5aa4eed816e4170f431c26b31d63227d5417fedd Mon Sep 17 00:00:00 2001 From: Danielle McLean Date: Tue, 31 Oct 2017 15:10:13 +1100 Subject: [PATCH] Add simple /robots.txt support --- home/urls.py | 1 + home/views.py | 13 ++++++++++++- lemoncurry/urls.py | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/home/urls.py b/home/urls.py index a2c599e..f8f9663 100644 --- a/home/urls.py +++ b/home/urls.py @@ -5,4 +5,5 @@ from . import views app_name = 'home' urlpatterns = [ url(r'^$', views.index, name='index'), + url(r'^robots.txt$', views.robots, name='robots.txt'), ] diff --git a/home/views.py b/home/views.py index cfc0842..70e1a5e 100644 --- a/home/views.py +++ b/home/views.py @@ -1,7 +1,10 @@ from annoying.decorators import render_to +from django.http import HttpResponse from django.shortcuts import get_object_or_404 +from django.urls import reverse from users.models import User from lemoncurry import breadcrumbs, utils +from urllib.parse import urljoin breadcrumbs.add('home:index', 'home') @@ -10,10 +13,18 @@ breadcrumbs.add('home:index', 'home') def index(request): query = User.objects.prefetch_related('entries', 'profiles', 'keys') user = get_object_or_404(query, pk=1) - uri = utils.uri(request) return { 'user': user, 'entries': user.entries.all(), 'meta': user.as_meta(request), } + + +def robots(request): + base = utils.origin(request) + lines = ( + 'User-agent: *', + 'Sitemap: {0}'.format(urljoin(base, reverse('sitemap'))) + ) + return HttpResponse("\n".join(lines) + "\n", content_type='text/plain') diff --git a/lemoncurry/urls.py b/lemoncurry/urls.py index 55e1b4a..27ccc89 100644 --- a/lemoncurry/urls.py +++ b/lemoncurry/urls.py @@ -41,7 +41,7 @@ urlpatterns = [ url('^auth/', include('lemonauth.urls')), url('^s/', include('shorturls.urls')), - url(r'^sitemap\.xml$', sitemap.index, maps), + url(r'^sitemap\.xml$', sitemap.index, maps, name='sitemap'), url(r'^sitemaps/(?P
.+)\.xml$', sitemap.sitemap, maps, name='django.contrib.sitemaps.views.sitemap'), ]