Disallow certain pages in robots.txt which would mislead crawlers.
authorArt Cancro <ajc@citadel.org>
Thu, 20 Jan 2011 03:37:30 +0000 (22:37 -0500)
committerWilfried Goesgens <dothebart@citadel.org>
Sun, 4 Sep 2011 14:03:21 +0000 (14:03 +0000)
/printmsg
/msgheaders
/groupdav
/do_template
/static

webcit/static.c

index 971def8905503f61557354f83cdf5274d9f7597c..71ca5641ab2d40de3e2271adb4d0ab042ba71d59 100644 (file)
@@ -283,7 +283,11 @@ void robots_txt(void) {
        begin_burst();
 
        wc_printf("User-agent: *\r\n"
-               "Disallow:\r\n"
+               "Disallow: /printmsg\r\n"
+               "Disallow: /msgheaders\r\n"
+               "Disallow: /groupdav\r\n"
+               "Disallow: /do_template\r\n"
+               "Disallow: /static\r\n"
                "Sitemap: %s/sitemap.xml\r\n"
                "\r\n"
                ,