# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-Agent: *
# Disallow: /
# robots.txt for https://www.harrys.com/
User-agent: *
Sitemap: https://www.harrys.com/sitemap-us.xml
Sitemap: https://www.harrys.com/sitemap-ca.xml
Sitemap: https://www.harrys.com/sitemap-gb.xml
Disallow: /api/
Disallow: /auth/
Disallow: /*/checkout
Disallow: /*/invite
Disallow: /*/profile