# See https://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
#
User-agent: *
Disallow: /works? # cruel but efficient
Disallow: /autocomplete/
Disallow: /downloads/
Disallow: /external_works/
# disallow indexing of search results
Disallow: /bookmarks/search?
Disallow: /people/search?
Disallow: /tags/search?
Disallow: /works/search?
User-agent: Googlebot
Disallow: /autocomplete/
Disallow: /downloads/
Disallow: /external_works/
# Googlebot is smart and knows pattern matching
Disallow: /works/*?
Disallow: /*search?
Disallow: /*?*query=
Disallow: /*?*sort_
Disallow: /*?*selected_tags
Disallow: /*?*selected_pseuds
Disallow: /*?*use_openid
Disallow: /*?*view_adult
Disallow: /*?*tag_id
Disallow: /*?*pseud_id
Disallow: /*?*user_id
Disallow: /*?*pseud=
Disallow: /people?*show=
User-agent: Slurp
Crawl-delay: 30