# Generic robots.txt to stop the worst:
# (1) stops big chinese/russian crawlers
# (2) stops email scanners
# (3) stops web copiers
#
# This list is not complete or exhaustive at all, the whole idea is just stop the worst offenders
User-agent: ahrefsbot
User-agent: baiduspider
User-agent: blexbot
User-agent: emailcollector
User-agent: emailsiphon
User-agent: emailwolf
User-agent: ezooms
User-agent: ia_archiver
User-agent: linkedinbot
User-agent: mj12bot
User-agent: msiecrawler
User-agent: msnbot
User-agent: netvibes
User-agent: nutch
User-agent: offline explorer
User-agent: offline.explorer
User-agent: pgbot
User-agent: pingdom
User-agent: psbot
User-agent: relcybot
User-agent: scoutjet
User-agent: seznambot
User-agent: sitesnagger
User-agent: slurp
User-agent: sogou
User-agent: sosobot
User-agent: sougou
User-agent: teleport
User-agent: teleport pro
User-agent: teoma
User-agent: twitterbot
User-agent: webcopier
User-agent: webstripper
User-agent: yandex
User-agent: yandexantivirus
User-agent: yandexblogs
User-agent: yandexbot
User-agent: yandexcatalog
User-agent: yandexdirect
User-agent: yandexfavicons
User-agent: yandeximages
User-agent: yandexmedia
User-agent: yandexnews
User-agent: yandexpagechecker
User-agent: yandexvideo
User-agent: yandexwebmaster
User-agent: yandexzakladki
disallow: /