A quick robots.txt to tell bots to stop crawling tags. (#321)

This commit is contained in:
Tyler Kennedy 2022-12-30 12:14:12 -05:00 committed by GitHub
parent d247baa307
commit 7eff751224
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 10 additions and 0 deletions

View File

@ -36,6 +36,11 @@ http {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_http_version 1.1; proxy_http_version 1.1;
# Serve robots.txt from the non-collected dir as a special case.
location /robots.txt {
alias /takahe/static/robots.txt;
}
# Serves static files from the collected dir # Serves static files from the collected dir
location /static/ { location /static/ {
# Files in static have cache-busting hashes in the name, thus can be cached forever # Files in static have cache-busting hashes in the name, thus can be cached forever

5
static/robots.txt Normal file
View File

@ -0,0 +1,5 @@
User-Agent: *
# Don't allow any bot to crawl tags.
Disallow: /tags/
Disallow: /tags/*