#****************************************************************************
# robots.txt
# : Robots, spiders, and search engines use this file to detmine which
# content they should *not* crawl while indexing your website.
# : This system is called "The Robots Exclusion Standard."
# : It is strongly encouraged to use a robots.txt validator to check
# for valid syntax before any robots read it!
#
# Examples:
#
# Instruct all robots to stay out of the admin area.
# : User-agent: *
# : Disallow: /admin/
#
# Restrict Google and MSN from indexing your images.
# : User-agent: Googlebot
# : Disallow: /images/
# : User-agent: MSNBot
# : Disallow: /images/
#****************************************************************************
User-agent: Browsershots
Disallow:
User-agent: *
Disallow: /cgi-bin/
Disallow: /sandbox/
Disallow: /store/personal/
Disallow: /store/reset.php
Disallow: /store/account*.php
Disallow: /store/address*.php
Disallow: /store/checkout*.php
Disallow: /store/create_account*.php
Disallow: /store/saico/
Allow: /store/saico/sapublist.pdf
Disallow: /store/src/
Disallow: /store/sys/
Crawl-Delay: 10
Sitemap: https://www.sexaholics.org/sitemap.xml