# robots.txt for http://www.newcopasetics.com/
## 7-8-07 upped slurp to 15, added msnbot linit to 10, upped voila to 15, added google delay of 6
# last out of memory error was june 18, 2007
#Additional Symbols
#Additional symbols allowed in the robots.txt directives include:
#
#'*' - matches a sequence of characters
#'$' - anchors at the end of the URL string
#
#Using Wildcard Match: '*'
#A '*' in robots directives is used to wildcard match a sequence of characters in your URL.
#You can use this symbol in any part of the URL string that you provide in the robots directive.
#
#Example of '*':
# User-agent: Slurp
# Allow: /public*/
# Disallow: /*_print*.html
# Disallow: /*?sessionid
User-agent: *
Disallow: /cgi-bin/
Disallow: /images/
Disallow: /library/
Disallow: /pdfs/
User-agent: Slurp #yahoo
Crawl-delay: 40
User-agent: Fatbot #thefind
Crawl-delay: 40
User-agent: msnbot
Crawl-delay: 40
User-Agent: Twiceler
Crawl-delay: 15
User-Agent: Googlebot
Crawl-delay: 20
User-agent: ShopWiki
Crawl-Delay: 40
User-agent: ia_archiver
#Disallow: /cgi-bin
Disallow: /test/ # test area
Disallow: /demo/ # client demos
Disallow: /includes/
Disallow: /tda/ #
Disallow: /scripts/ #
User-agent: Teoma
#Disallow: /cgi-bin
Disallow: /test/ # test area
Disallow: /demo/ # client demos
Disallow: /includes/
Disallow: /tda/ #
Disallow: /scripts/ #
User-agent: NuSearch Spider
Disallow: /cgi-bin
Disallow: /test/ # test area
Disallow: /demo/ # client demos
Disallow: /includes/
Disallow: /tda/ #
Disallow: /scripts/ #
User-agent: VoilaBot
Crawl-Delay: 30
Disallow: /cgi-bin
Disallow: /test/ # test area
Disallow: /demo/ # client demos
Disallow: /includes/
Disallow: /tda/ #
Disallow: /scripts/ #
User-agent: Gigabot
Crawl-Delay: 30
#Disallow: /cgi-bin maybe have to deactivate this
Disallow: /test/ # test area
Disallow: /demo/ # client demos
Disallow: /includes/
Disallow: /tda/ #
Disallow: /scripts/ #