# robots.txt for http://www.wrath.com/ # $Id: robots.txt,v 1.0 2000/04/09 02:53:04 brian Exp $ # # for more robots info visit: http://info.webcrawler.com/mak/projects/robots/norobots.html # for list of robots visit: http://info.webcrawler.com/mak/projects/robots/active/html/index.html # # to include all robots: User-agent: * # to include one robot: User-agent: robot name # # to disallow all everything: Disallow: / # to disallow one directory: Disallow: /directory name/ # to disallow one document: Disallow: /directory name/document.name # # to allow a single robot: #User-agent: AltaVista Intranet V2.0 W3C Webreq #Disallow: # #User-agent: * #Disallow: / User-agent: * Disallow: /cgi-bin/