From: Michael Tremer Date: Sat, 13 Jan 2018 13:21:38 +0000 (+0000) Subject: Stop crawlers from indexing package content X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=242dd957d481ac005652e5c0c8b092420fd1ece2;p=pbs.git Stop crawlers from indexing package content Signed-off-by: Michael Tremer --- diff --git a/src/static/robots.txt b/src/static/robots.txt index 6ffbc308..1f53798b 100644 --- a/src/static/robots.txt +++ b/src/static/robots.txt @@ -1,3 +1,2 @@ User-agent: * Disallow: / - diff --git a/src/web/packages.py b/src/web/packages.py index cc038f71..9f820b38 100644 --- a/src/web/packages.py +++ b/src/web/packages.py @@ -200,6 +200,9 @@ class PackageFileDownloadHandler(base.BaseHandler): self.set_header("Content-Disposition", "attachment; filename=%s" % os.path.basename(filename)) self.set_header("Content-Type", mimetype) + # These pages should not be indexed + self.add_header("X-Robots-Tag", "noindex") + # Transfer the content chunk by chunk. while True: buf = f.read(BUFFER_SIZE) @@ -222,6 +225,9 @@ class PackageFileViewHandler(PackageFileDownloadHandler): content = f.read() f.close() + # These pages should not be indexed + self.add_header("X-Robots-Tag", "noindex") + self.render("packages/view-file.html", pkg=pkg, filename=filename, mimetype=mimetype, content=content, filesize=f.size)