add robots.txt to specify doc versions to appear in search engines

This commit is contained in:
Dan Halbert 2020-08-17 09:48:17 -04:00
parent dcbd3b4994
commit e1a843878e
2 changed files with 7 additions and 1 deletions

View File

@ -272,7 +272,7 @@ html_static_path = ['docs/static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
html_extra_path = ["docs/robots.txt"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.

6
docs/robots.txt Normal file
View File

@ -0,0 +1,6 @@
User-agent: *
Allow: /*/latest/
Allow: /en/latest/ # Fallback for bots that don't understand wildcards
Allow: /*/5.3.x/
Allow: /en/5.3.x/ # Fallback for bots that don't understand wildcards
Disallow: /