Fix robots.txt in https only scenario
authorMagnus Hagander <magnus@hagander.net>
Sat, 21 May 2016 17:15:11 +0000 (13:15 -0400)
committerMagnus Hagander <magnus@hagander.net>
Tue, 24 May 2016 19:14:33 +0000 (21:14 +0200)
We now allow all searching

pgweb/core/views.py

index acaf4c758f29ecf6730ca01f7f8483733c689c7f..a34f9139adcbe1c653e9a9f8a3bf8aeaf280491f 100644 (file)
@@ -118,12 +118,7 @@ def organisationform(request, itemid):
 
 # robots.txt
 def robots(request):
-       if not is_behind_cache(request):
-               # If we're not serving this through one of our Varnish caches, we allow *nothing* to be indexed
-               return HttpResponse("User-agent: *\nDisallow: /\n", content_type='text/plain')
-       else:
-               # Regular website
-               return HttpResponse("""User-agent: *
+       return HttpResponse("""User-agent: *
 Disallow: /admin/
 Disallow: /account/
 Disallow: /docs/devel/