Add simple /robots.txt support

This commit is contained in:
Danielle McLean 2017-10-31 15:10:13 +11:00
parent 7090db3c37
commit 5aa4eed816
Signed by untrusted user: 00dani
GPG key ID: 5A5D2D1AFF12EEC5
3 changed files with 14 additions and 2 deletions

View file

@ -5,4 +5,5 @@ from . import views
app_name = 'home'
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^robots.txt$', views.robots, name='robots.txt'),
]

View file

@ -1,7 +1,10 @@
from annoying.decorators import render_to
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.urls import reverse
from users.models import User
from lemoncurry import breadcrumbs, utils
from urllib.parse import urljoin
breadcrumbs.add('home:index', 'home')
@ -10,10 +13,18 @@ breadcrumbs.add('home:index', 'home')
def index(request):
query = User.objects.prefetch_related('entries', 'profiles', 'keys')
user = get_object_or_404(query, pk=1)
uri = utils.uri(request)
return {
'user': user,
'entries': user.entries.all(),
'meta': user.as_meta(request),
}
def robots(request):
base = utils.origin(request)
lines = (
'User-agent: *',
'Sitemap: {0}'.format(urljoin(base, reverse('sitemap')))
)
return HttpResponse("\n".join(lines) + "\n", content_type='text/plain')

View file

@ -41,7 +41,7 @@ urlpatterns = [
url('^auth/', include('lemonauth.urls')),
url('^s/', include('shorturls.urls')),
url(r'^sitemap\.xml$', sitemap.index, maps),
url(r'^sitemap\.xml$', sitemap.index, maps, name='sitemap'),
url(r'^sitemaps/(?P<section>.+)\.xml$', sitemap.sitemap, maps,
name='django.contrib.sitemaps.views.sitemap'),
]