# $Id: robots.txt,v 1.7.2.1 2007/03/23 18:57:07 drumm Exp $ # # robots.txt # # This file is to prevent the crawling and indexing of certain parts # of your site by web crawlers and spiders run by sites like Yahoo! # and Google. By telling these "robots" where not to go on your site, # you save bandwidth and server resources. # # This file will be ignored unless it is at the root of your host: # Used: http://example.com/robots.txt # Ignored: http://example.com/site/robots.txt # # For more information about the robots.txt standard, see: # http://www.robotstxt.org/wc/robots.html # # For syntax checking, see: # http://www.sxw.org.uk/computing/robots/check.html # # Greg June 25, 2009 # temporarily prohibit crawling to investigate performance problems # # IMPORTANT. I'm moving this to the bottom of the file to prohibit anyone # not listed in the user-agent line below #User-agent: * #Disallow: / #User-agent: msnbot msnbot-media Googlebot Slurp mcgill-gsa-crawler User-agent: * Crawl-delay: 30 # Directories Disallow: /database/ Disallow: /includes/ Disallow: /misc/ Disallow: /modules/ Disallow: /sites/ Disallow: /themes/ Disallow: /scripts/ Disallow: /updates/ Disallow: /profiles/ # # Disallow: /local/ DIsallow: /files/ # gdl March 6, 2008 Disallow: /event/ # # Not sure if this will work # trying to block: GET /user/login?destination=comment/reply/2175%2523comment-form Disallow: /user/ # # These are mysql intensive but not sure if they will match # Added at 13:35 June 25 - see what the logs say... # Sept 21, 2009, GDL the mcgill-gsa-crawler is killing the server with biblio searches. Explicitly prohibit ALL biblio indexing while # figuring out what is going on Disallow: /biblio #Disallow: /biblio/filter/ #Disallow: /biblio/Author/ #Disallow: /biblio?sort=year&order=asc #Disallow: /biblio?sort=title&order=asc #Disallow: /biblio?sort=type&order=asc #Disallow: /biblio?sort=author&order=asc # Files Disallow: /xmlrpc.php Disallow: /cron.php Disallow: /update.php Disallow: /install.php Disallow: /INSTALL.txt Disallow: /INSTALL.mysql.txt Disallow: /INSTALL.pgsql.txt Disallow: /CHANGELOG.txt Disallow: /MAINTAINERS.txt Disallow: /LICENSE.txt Disallow: /UPGRADE.txt # Paths (clean URLs) Disallow: /admin/ Disallow: /aggregator/ Disallow: /comment/reply/ Disallow: /contact/ Disallow: /logout/ Disallow: /node/add/ Disallow: /search/ # mcgill-gsa-crawler is grabbing /user/login?... so block all of /user/ Disallow: /user/ #Disallow: /user/register/ #Disallow: /user/password/ #Disallow: /user/login/ # Paths (no clean URLs) Disallow: /?q=admin/ Disallow: /?q=aggregator/ Disallow: /?q=comment/reply/ Disallow: /?q=contact/ Disallow: /?q=logout/ Disallow: /?q=node/add/ Disallow: /?q=search/ Disallow: /?q=user/password/ Disallow: /?q=user/register/ Disallow: /?q=user/login/ # # database intensive searches Disallow: /people/students/user/