Merge pull request #3291 from dhalbert/robots.txt

add robots.txt to specify doc versions to appear in search engines
This commit is contained in:
Scott Shawcroft 2021-01-11 15:44:21 -08:00 committed by GitHub
commit b669b622a3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 7 additions and 1 deletions

View File

@ -276,7 +276,7 @@ html_static_path = ['docs/static']
# Add any extra paths that contain custom files (such as robots.txt or # Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied # .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation. # directly to the root of the documentation.
#html_extra_path = [] html_extra_path = ["docs/robots.txt"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format. # using the given strftime format.

6
docs/robots.txt Normal file
View File

@ -0,0 +1,6 @@
User-agent: *
Allow: /*/latest/
Allow: /en/latest/ # Fallback for bots that don't understand wildcards
Allow: /*/6.0.x/
Allow: /en/6.0.x/ # Fallback for bots that don't understand wildcards
Disallow: /