Remove dead code
authorMagnus Hagander <magnus@hagander.net>
Tue, 7 Feb 2012 12:04:19 +0000 (13:04 +0100)
committerMagnus Hagander <magnus@hagander.net>
Tue, 7 Feb 2012 12:04:19 +0000 (13:04 +0100)
tools/search/crawler/lib/sitemapsite.py

index 9dbc670320db2bd2fec1de6a68501d88942384c3..cd41bfc0a9ab187d0b2313b2268d9bb1074afb34 100644 (file)
@@ -61,10 +61,6 @@ class SitemapSiteCrawler(BaseSiteCrawler):
                super(SitemapSiteCrawler, self).__init__(hostname, dbconn, siteid, serverip)
 
        def init_crawl(self):
-               # We need to seed the crawler with every URL we've already seen, since
-               # we don't recrawl the contents if they haven't changed.
-               allpages = self.scantimes.keys()
-
                # Fetch the sitemap. We ignore robots.txt in this case, and
                # assume it's always under /sitemap.xml
                u = urllib.urlopen("http://%s/sitemap.xml" % self.hostname)