Implement request caching in Redis so that we don't always have to fetch remote pages every time we want their mf2 items

This commit is contained in:
Danielle McLean 2017-11-10 09:17:32 +11:00
parent b8a8cd62cf
commit a7f6824334
Signed by untrusted user: 00dani
GPG key ID: 5A5D2D1AFF12EEC5
5 changed files with 82 additions and 5 deletions

44
lemoncurry/requests.py Normal file
View file

@ -0,0 +1,44 @@
import requests
from cachecontrol.wrapper import CacheControl
from cachecontrol.cache import BaseCache
from cachecontrol.heuristics import LastModified
from datetime import datetime
from django.core.cache import cache as django_cache
from hashlib import sha256
from mf2py import Parser
class DjangoCache(BaseCache):
@classmethod
def key(cls, url):
return 'req:' + sha256(url.encode('utf-8')).hexdigest()
def get(self, url):
key = self.key(url)
return django_cache.get(key)
def set(self, url, value, expires=None):
key = self.key(url)
if expires:
lifetime = (expires - datetime.utcnow()).total_seconds()
django_cache.set(key, value, lifetime)
else:
django_cache.set(key, value)
req = CacheControl(
requests.Session(),
cache=DjangoCache(),
heuristic=LastModified(),
)
def get(url):
r = req.get(url)
r.raise_for_status()
return r
def mf2(url):
r = get(url)
return Parser(doc=r.text, url=url, html_parser='html5lib')

View file

@ -134,6 +134,11 @@ CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '127.0.0.1:6380',
'KEY_PREFIX': 'lemoncurry',
'OPTIONS': {
'DB': 0,
'PARSER_CLASS': 'redis.connection.HiredisParser',
},
}
}