# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
User-agent: *
Crawl-delay: 0.1
Disallow: /search
Disallow: /products/search
Disallow: /*/refresh_prices
Disallow: /*/add_to_cart
Disallow: /*/get_prices
Disallow: /lists/add
Disallow: /*/add$
Disallow: /api/
Disallow: /users/bits
Disallow: /users/create
Disallow: /prices/
Disallow: /widgets/issue
#User-Agent: MJ12bot
#Crawl-Delay: 10
User-Agent: YandexBot
Crawl-Delay: 300
Sitemap: https://booko.com.au/sitemap_au.xml.gz