A quick robots.txt to tell bots to stop crawling tags. (#321)
This commit is contained in:
parent
d247baa307
commit
7eff751224
|
@ -36,6 +36,11 @@ http {
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
|
|
||||||
|
# Serve robots.txt from the non-collected dir as a special case.
|
||||||
|
location /robots.txt {
|
||||||
|
alias /takahe/static/robots.txt;
|
||||||
|
}
|
||||||
|
|
||||||
# Serves static files from the collected dir
|
# Serves static files from the collected dir
|
||||||
location /static/ {
|
location /static/ {
|
||||||
# Files in static have cache-busting hashes in the name, thus can be cached forever
|
# Files in static have cache-busting hashes in the name, thus can be cached forever
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
User-Agent: *
|
||||||
|
|
||||||
|
# Don't allow any bot to crawl tags.
|
||||||
|
Disallow: /tags/
|
||||||
|
Disallow: /tags/*
|
Loading…
Reference in New Issue