Revert "add robots.txt to specify doc versions to appear in search engines"
This commit is contained in:
parent
8a86f9dff6
commit
ddca91020e
2
conf.py
2
conf.py
|
@ -284,7 +284,7 @@ html_static_path = ['docs/static']
|
||||||
# Add any extra paths that contain custom files (such as robots.txt or
|
# Add any extra paths that contain custom files (such as robots.txt or
|
||||||
# .htaccess) here, relative to this directory. These files are copied
|
# .htaccess) here, relative to this directory. These files are copied
|
||||||
# directly to the root of the documentation.
|
# directly to the root of the documentation.
|
||||||
html_extra_path = ["docs/robots.txt"]
|
#html_extra_path = []
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
|
|
|
@ -1,6 +0,0 @@
|
||||||
User-agent: *
|
|
||||||
Allow: /*/latest/
|
|
||||||
Allow: /en/latest/ # Fallback for bots that don't understand wildcards
|
|
||||||
Allow: /*/6.0.x/
|
|
||||||
Allow: /en/6.0.x/ # Fallback for bots that don't understand wildcards
|
|
||||||
Disallow: /
|
|
Loading…
Reference in New Issue