# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
# Allow good bots
User-agent: Googlebot
User-agent: Bingbot
User-agent: Slurp
User-agent: DuckDuckBot
User-agent: Baiduspider
User-agent: YandexBot
User-agent: facebookexternalhit
User-agent: Twitterbot
User-agent: LinkedInBot
User-agent: WhatsApp
User-agent: Applebot
Crawl-delay: 1
Disallow: /admin
Disallow: /sidekiq
Disallow: /users/sign_in
Disallow: /users/sign_up
Disallow: /users/password
Allow: /
# Block known scrapers and bad bots
User-agent: HTTrack
User-agent: wget
User-agent: curl
User-agent: scrapy
User-agent: python-requests
User-agent: libwww-perl
User-agent: mechanize
User-agent: go-http-client
User-agent: okhttp
User-agent: apache-httpclient
User-agent: node-fetch
User-agent: axios
User-agent: postman
User-agent: insomnia
User-agent: nikto
User-agent: sqlmap
User-agent: nmap
User-agent: masscan
User-agent: zap
User-agent: burp
User-agent: w3af
User-agent: skipfish
User-agent: dirb
User-agent: dirbuster
User-agent: gobuster
User-agent: wfuzz
User-agent: ffuf
User-agent: hydra
User-agent: medusa
User-agent: metasploit
User-agent: nessus
User-agent: openvas
User-agent: nuclei
User-agent: sqlninja
User-agent: havij
User-agent: pangolin
User-agent: bbqsql
User-agent: bsqlbf
User-agent: mole
User-agent: paros
User-agent: webscarab
User-agent: webinspect
User-agent: appscan
User-agent: acunetix
User-agent: qualys
User-agent: rapid7
User-agent: veracode
User-agent: checkmarx
User-agent: fortify
User-agent: sonarqube
User-agent: snyk
Disallow: /
# Default rules for other bots
User-agent: *
Crawl-delay: 2
Disallow: /admin/
Disallow: /sidekiq/
Disallow: /users/sign_in
Disallow: /users/sign_up
Disallow: /users/password
Disallow: /search?*
Disallow: /*?*
Disallow: /tmp/
Disallow: /cache/
Disallow: /log/
Disallow: /*.json
Disallow: /*.xml$
Disallow: /*_xmlhttprequest
Disallow: /*ajax*
Disallow: /api/
Host: boatflow.jp
Sitemap: http://cdn41.s3.amazonaws.com/sitemaps/sitemap.xml.gz