Update sitemap URLs for https
authorMagnus Hagander <magnus@hagander.net>
Thu, 19 May 2016 17:26:32 +0000 (13:26 -0400)
committerMagnus Hagander <magnus@hagander.net>
Tue, 24 May 2016 19:13:00 +0000 (21:13 +0200)
pgweb/core/views.py
tools/search/crawler/lib/sitemapsite.py

index 5ed72206ecd7a272d9c9b91301ee6d14812cec60..4cc2e53a87d0a3296e0dad59d751adede1c54020 100644 (file)
@@ -133,7 +133,7 @@ Disallow: /search/
 Disallow: /message-id/raw/
 Disallow: /message-id/flat/
 
-Sitemap: http://www.postgresql.org/sitemap.xml
+Sitemap: https://www.postgresql.org/sitemap.xml
 """, content_type='text/plain')
 
 
@@ -148,7 +148,7 @@ def sitemap(request):
        for p in get_all_pages_struct():
                pages+=1
                x.startElement('url', {})
-               x.add_xml_element('loc', 'http://www.postgresql.org/%s' % urllib.quote(p[0]))
+               x.add_xml_element('loc', 'https://www.postgresql.org/%s' % urllib.quote(p[0]))
                if len(p) > 1 and p[1]:
                        x.add_xml_element('priority', unicode(p[1]))
                if len(p) > 2 and p[2]:
index cd41bfc0a9ab187d0b2313b2268d9bb1074afb34..a6f5ae83139ebaf6f8526300657c8e5c3bc7893b 100644 (file)
@@ -63,7 +63,7 @@ class SitemapSiteCrawler(BaseSiteCrawler):
        def init_crawl(self):
                # Fetch the sitemap. We ignore robots.txt in this case, and
                # assume it's always under /sitemap.xml
-               u = urllib.urlopen("http://%s/sitemap.xml" % self.hostname)
+               u = urllib.urlopen("https://%s/sitemap.xml" % self.hostname)
                p = SitemapParser()
                p.parse(u)
                u.close()