forked from 00dani/lemoncurry
Run Black over the whole codebase
This commit is contained in:
parent
cd990e4e2f
commit
2e7d12b3e6
109 changed files with 1539 additions and 1209 deletions
|
@ -3,10 +3,10 @@ from django.urls import reverse
|
|||
|
||||
|
||||
class HomeSitemap(sitemaps.Sitemap):
|
||||
changefreq = 'daily'
|
||||
changefreq = "daily"
|
||||
|
||||
def items(self):
|
||||
return ('home:index',)
|
||||
return ("home:index",)
|
||||
|
||||
def location(self, item):
|
||||
return reverse(item)
|
||||
|
|
|
@ -2,9 +2,9 @@ from django.urls import path
|
|||
|
||||
from . import views
|
||||
|
||||
app_name = 'home'
|
||||
app_name = "home"
|
||||
urlpatterns = [
|
||||
path('', views.index, name='index'),
|
||||
path('page/<int:page>', views.index, name='index'),
|
||||
path('robots.txt', views.robots, name='robots.txt'),
|
||||
path("", views.index, name="index"),
|
||||
path("page/<int:page>", views.index, name="index"),
|
||||
path("robots.txt", views.robots, name="robots.txt"),
|
||||
]
|
||||
|
|
|
@ -8,34 +8,31 @@ from urllib.parse import urljoin
|
|||
from entries import kinds, pagination
|
||||
from lemoncurry import breadcrumbs, utils
|
||||
|
||||
breadcrumbs.add('home:index', 'home')
|
||||
breadcrumbs.add("home:index", "home")
|
||||
|
||||
|
||||
@render_to('home/index.html')
|
||||
@render_to("home/index.html")
|
||||
def index(request, page=None):
|
||||
def url(page):
|
||||
kwargs = {'page': page} if page != 1 else {}
|
||||
return reverse('home:index', kwargs=kwargs)
|
||||
kwargs = {"page": page} if page != 1 else {}
|
||||
return reverse("home:index", kwargs=kwargs)
|
||||
|
||||
user = request.user
|
||||
if not hasattr(user, 'entries'):
|
||||
if not hasattr(user, "entries"):
|
||||
user = get_object_or_404(User, pk=1)
|
||||
|
||||
entries = user.entries.filter(kind__in=kinds.on_home)
|
||||
entries = pagination.paginate(queryset=entries, reverse=url, page=page)
|
||||
|
||||
return {
|
||||
'user': user,
|
||||
'entries': entries,
|
||||
'atom': reverse('entries:atom'),
|
||||
'rss': reverse('entries:rss'),
|
||||
"user": user,
|
||||
"entries": entries,
|
||||
"atom": reverse("entries:atom"),
|
||||
"rss": reverse("entries:rss"),
|
||||
}
|
||||
|
||||
|
||||
def robots(request):
|
||||
base = utils.origin(request)
|
||||
lines = (
|
||||
'User-agent: *',
|
||||
'Sitemap: {0}'.format(urljoin(base, reverse('sitemap')))
|
||||
)
|
||||
return HttpResponse("\n".join(lines) + "\n", content_type='text/plain')
|
||||
lines = ("User-agent: *", "Sitemap: {0}".format(urljoin(base, reverse("sitemap"))))
|
||||
return HttpResponse("\n".join(lines) + "\n", content_type="text/plain")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue