Ensure news and events archive doesn't generate thousands of queries
authorMagnus Hagander <magnus@hagander.net>
Sun, 30 Aug 2020 15:31:57 +0000 (17:31 +0200)
committerMagnus Hagander <magnus@hagander.net>
Sun, 30 Aug 2020 15:34:20 +0000 (17:34 +0200)
While there is Varnish to take the edge off it, this is just too
ineffective to leave around :)

pgweb/events/views.py
pgweb/news/views.py

index fe331e904a79e55f1c0f2408110554f74c6f855f..e7b8d703858de4ad7453deda8b9a5a865381871c 100644 (file)
@@ -20,7 +20,7 @@ def main(request):
 
 def _eventarchive(request, title):
     # Hardcode to the latest 100 events. Do we need paging too?
-    events = Event.objects.select_related('country').filter(approved=True).filter(enddate__lte=date.today()).order_by('-enddate', '-startdate',)[:100]
+    events = Event.objects.select_related('country', 'language').filter(approved=True).filter(enddate__lte=date.today()).order_by('-enddate', '-startdate',)[:100]
     return render_pgweb(request, 'about', 'events/archive.html', {
         'title': '%s Archive' % title,
         'archive': True,
index adcc785903aa501bcc89b9d9929b7329476ee9f2..0a3dbe553600e5f1a904a249706952d4fd05e52c 100644 (file)
@@ -14,10 +14,10 @@ import json
 def archive(request, tag=None, paging=None):
     if tag:
         tag = get_object_or_404(NewsTag, urlname=tag.strip('/'))
-        news = NewsArticle.objects.filter(approved=True, tags=tag)
+        news = NewsArticle.objects.select_related('org').filter(approved=True, tags=tag)
     else:
         tag = None
-        news = NewsArticle.objects.filter(approved=True)
+        news = NewsArticle.objects.select_related('org').filter(approved=True)
     return render_pgweb(request, 'about', 'news/newsarchive.html', {
         'news': news,
         'tag': tag,