bifocal/robots.txt

113 lines
1.6 KiB
Plaintext

# Blocks AI web scrapers
# Crawlers sourced from Dark Visitors and https://github.com/ai-robots-txt/ai.robots.txt
# Used by Google, for Gemini assistant and Vertex AI
User-agent: AdsBot-Google
Disallow: /
User-agent: Google-Extended
Disallow: /
User-agent: GoogleOther
Disallow: /
# Used by Anthropic, for Claude
User-agent: anthropic-ai
Disallow: /
User-agent: Claude-Web
Disallow: /
User-agent: ClaudeBot
Disallow: /
# Used by ByteDance for Doubao
User-agent: Bytespider
Disallow: /
# Used by Common Crawl for open source repository of web crawl data
User-agent: CCBot
Disallow: /
# Used by OpenAI GPT for updating content and ChatGPT plug-ins
User-agent: ChatGPT-User
Disallow: /
User-agent: GPTBot
Disallow: /
# Sells crawled data to LLM companies
User-agent: Diffbot
Disallow: /
# For speech recognition language models by Meta
User-agent: FacebookBot
Disallow: /
# Used by Omigli, sells crawled data to LLM companies
User-agent: omgilibot
Disallow: /
User-agent: omgili
Disallow: /
# Undocumented
User-agent: Amazonbot
Disallow: /
User-agent: Applebot
Disallow: /
User-agent: AwarioRssBot
Disallow: /
User-agent: AwarioSmartBot
Disallow: /
User-agent: cohere-ai
Disallow: /
User-agent: DataForSeoBot
Disallow: /
User-agent: FriendlyCrawler
Disallow: /
User-agent: img2dataset
Disallow: /
User-agent: ImagesiftBot
Disallow: /
User-agent: magpie-crawler
Disallow: /
User-agent: Meltwater
Disallow: /
User-agent: peer39_crawler
Disallow: /
User-agent: peer39_crawler/1.0
Disallow: /
User-agent: PerplexityBot
Disallow: /
User-agent: PiplBot
Disallow: /
User-agent: Seekr
Disallow: /
User-agent: YouBot
Disallow: /