From c0f0ded2aa98ed534ac55e3a301e5a7b91a6b9c7 Mon Sep 17 00:00:00 2001 From: Unit 193 Date: Fri, 20 Mar 2020 18:31:26 -0400 Subject: Disallow '/packages/*' and '/posts/*' in robots.txt --- conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conf.py b/conf.py index 6043f94..a45834c 100644 --- a/conf.py +++ b/conf.py @@ -739,7 +739,7 @@ COMMENT_SYSTEM_ID = "" # from indexing and other robotic spidering. * is supported. Will only be effective # if SITE_URL points to server root. The list is used to exclude resources from # /robots.txt and /sitemap.xml, and to inform search engines about /sitemapindex.xml. -ROBOTS_EXCLUSIONS = ["/archive.html", "/category/*.html", "/debian/*"] +ROBOTS_EXCLUSIONS = ["/archive.html", "/category/*.html", "/debian/*", "/packages/*", "/posts/*"] # Instead of putting files in .html, put them in /index.html. # No web server configuration is required. Also enables STRIP_INDEXES. -- cgit v1.2.3