# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
User-agent: *
Disallow: /checkout
Disallow: /cart
Disallow: /users
Disallow: /account
Disallow: /api
Disallow: /password
Disallow: /vouchers
Disallow: /admin
# OpenAI's web crawler: GPT3.5, GPT4, ChatGPT
# https://platform.openai.com/docs/bots
User-agent: GPTBot
Allow: /
# ChatGPT plugins
# https://platform.openai.com/docs/bots
User-agent: ChatGPT-User
Allow: /
# OpenAI Search bot
# https://platform.openai.com/docs/bots
User-agent: OAI-SearchBot
Allow: /
# Google's web crawler: Bard, VertexAI, Gemini
# https://blog.google/technology/ai/an-update-on-web-publisher-controls/
User-agent: Google-Extended
Allow: /
# Permitir a Gemini (Google AI) indexar el sitio
User-agent: Google-Extended
Allow: /
# Apple's web crawler, dedicated to GenAI projects
# https://support.apple.com/en-us/119829
User-agent: Applebot-Extended
Allow: /
# Amazonbot
# https://developer.amazon.com/amazonbot
User-agent: Amazonbot
Allow: /