enabled caching, gzip and robots.txt

master
Alexander Graf 3 years ago
parent 103918ba57
commit f4e7ce0990

@ -33,6 +33,17 @@ http {
default $http_x_forwarded_proto; default $http_x_forwarded_proto;
'' $scheme; '' $scheme;
} }
map $uri $expires {
default off;
~*\.(ico|css|js|gif|jpeg|jpg|png|woff|ttf|otf|svg|woff2|eot)$ 97d;
}
# compression
gzip on;
gzip_static on;
gzip_types text/plain text/css application/xml application/javascript
gzip_min_length 1024;
# TODO: figure out how to server pre-compressed assets from admin container
{% if KUBERNETES_INGRESS != 'true' and TLS_FLAVOR in [ 'letsencrypt', 'cert' ] %} {% if KUBERNETES_INGRESS != 'true' and TLS_FLAVOR in [ 'letsencrypt', 'cert' ] %}
# Enable the proxy for certbot if the flavor is letsencrypt and not on kubernetes # Enable the proxy for certbot if the flavor is letsencrypt and not on kubernetes
@ -46,10 +57,16 @@ http {
proxy_pass http://127.0.0.1:8008; proxy_pass http://127.0.0.1:8008;
} }
{% endif %} {% endif %}
# robots.txt
location = /robots.txt {
add_header Content-Type text/plain;
return 200 "User-agent: *\nDisallow: /\n";
}
# redirect to https # redirect to https
location / { location / {
return 301 https://$host$request_uri; return 301 https://$host$request_uri;
} }
} }
{% endif %} {% endif %}
@ -95,6 +112,8 @@ http {
proxy_hide_header X-XSS-Protection; proxy_hide_header X-XSS-Protection;
proxy_hide_header X-Powered-By; proxy_hide_header X-Powered-By;
expires $expires;
add_header X-Frame-Options 'SAMEORIGIN'; add_header X-Frame-Options 'SAMEORIGIN';
add_header X-Content-Type-Options 'nosniff'; add_header X-Content-Type-Options 'nosniff';
add_header X-Permitted-Cross-Domain-Policies 'none'; add_header X-Permitted-Cross-Domain-Policies 'none';
@ -114,6 +133,12 @@ http {
} }
{% else %} {% else %}
# robots.txt
location = /robots.txt {
add_header Content-Type text/plain;
return 200 "User-agent: *\nDisallow: /\n";
}
include /overrides/*.conf; include /overrides/*.conf;
# Actual logic # Actual logic

Loading…
Cancel
Save