# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-agent: *
# Disallow: /
User-agent: SemrushBot
Disallow: /
User-agent: SemrushBot-SA
Disallow: /
User-agent: SemrushBot-BA
Disallow: /
User-agent: SemrushBot-SI
Disallow: /
User-agent: SemrushBot-SWA
Disallow: /
User-agent: SemrushBot-CT
Disallow: /
User-agent: SemrushBot-BM
Disallow: /
User-agent: AhrefsBot
Disallow: /
User-agent: Baiduspider
Disallow: /
User-agent: HTTrack
Disallow: /
User-agent: Yandex
Disallow: /
User-agent: exabot
Disallow: /
User-agent: MJ12bot
Disallow: /
User-agent: dotbot
Disallow: /
User-agent: gigabot
Disallow: /
User-agent: Visbot
Disallow: /
User-agent: SpammerRobot
Disallow: /
User-agent: SecurityHoleRobot
Disallow: /
User-agent: AwarioRssBot
User-agent: AwarioSmartBot
Disallow: /
User-agent: Googlebot
Disallow: /nogooglebot/
User-agent: BLEXBot
Disallow: /
User-agent: barkrowler
Disallow: /