add robots.txt to specify doc versions to appear in search engines
This commit is contained in:
parent
dcbd3b4994
commit
e1a843878e
2
conf.py
2
conf.py
@ -272,7 +272,7 @@ html_static_path = ['docs/static']
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
html_extra_path = ["docs/robots.txt"]
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
|
6
docs/robots.txt
Normal file
6
docs/robots.txt
Normal file
@ -0,0 +1,6 @@
|
||||
User-agent: *
|
||||
Allow: /*/latest/
|
||||
Allow: /en/latest/ # Fallback for bots that don't understand wildcards
|
||||
Allow: /*/5.3.x/
|
||||
Allow: /en/5.3.x/ # Fallback for bots that don't understand wildcards
|
||||
Disallow: /
|
Loading…
Reference in New Issue
Block a user