enabled caching, gzip and robots.txt

master
Alexander Graf 3 years ago
parent 103918ba57
commit f4e7ce0990

@ -33,6 +33,17 @@ http {
default $http_x_forwarded_proto;
'' $scheme;
}
map $uri $expires {
default off;
~*\.(ico|css|js|gif|jpeg|jpg|png|woff|ttf|otf|svg|woff2|eot)$ 97d;
}
# compression
gzip on;
gzip_static on;
gzip_types text/plain text/css application/xml application/javascript
gzip_min_length 1024;
# TODO: figure out how to server pre-compressed assets from admin container
{% if KUBERNETES_INGRESS != 'true' and TLS_FLAVOR in [ 'letsencrypt', 'cert' ] %}
# Enable the proxy for certbot if the flavor is letsencrypt and not on kubernetes
@ -46,10 +57,16 @@ http {
proxy_pass http://127.0.0.1:8008;
}
{% endif %}
# robots.txt
location = /robots.txt {
add_header Content-Type text/plain;
return 200 "User-agent: *\nDisallow: /\n";
}
# redirect to https
location / {
return 301 https://$host$request_uri;
}
}
{% endif %}
@ -95,6 +112,8 @@ http {
proxy_hide_header X-XSS-Protection;
proxy_hide_header X-Powered-By;
expires $expires;
add_header X-Frame-Options 'SAMEORIGIN';
add_header X-Content-Type-Options 'nosniff';
add_header X-Permitted-Cross-Domain-Policies 'none';
@ -114,6 +133,12 @@ http {
}
{% else %}
# robots.txt
location = /robots.txt {
add_header Content-Type text/plain;
return 200 "User-agent: *\nDisallow: /\n";
}
include /overrides/*.conf;
# Actual logic

Loading…
Cancel
Save