1. Caddy version (caddy version
):
latest
2. How I run Caddy:
docker
a. System environment:
ubuntu
b. Command:
paste command here
c. Service/unit/compose file:
paste full file contents here
d. My complete Caddyfile or JSON config:
# GLOBAL
{
# Global options block. Entirely optional, https is on by default
# Optional email key for lets encrypt
email mail@example.com
# Optional staging lets encrypt for testing.
acme_ca https://acme-staging-v02.api.letsencrypt.org/directory
servers {
timeouts {
read_body 10s
read_header 10s
write 10s
idle 2m
}
max_header_size 16384
}
}
# SNIPPETS
(mustheaders) {
header {
Strict-Transport-Security "max-age=31536000; includesubdomains; preload"
Content-Security-Policy "default-src https: 'unsafe-inline' 'unsafe-eval'"
X-Content-Type-Options "nosniff"
X-Frame-Options "SAMEORIGIN"
Referrer-Policy "strict-origin-when-cross-origin"
X-Xss-Protection "1; mode=block"
Feature-Policy "accelerometer 'none'; ambient-light-sensor 'none'; autoplay 'none'; camera 'none'; encrypted-media 'none'; fullscreen 'self'; geolocation 'none'; gyroscope 'none'; magnetometer 'none'; microphone 'none'; midi 'none'; payment 'none'; picture-in-picture *; speaker 'none'; sync-xhr 'none'; usb 'none'; vr 'none'"
Expect-CT "max-age=604800"
-Server
}
}
(offlinewebsite) {
header {
X-Robots-Tag "noindex, nofollow, noarchive, nosnippet, notranslate, noimageindex"
}
basicauth * {
admin hashed-password
}
}
(onlinewebsite) {
header {
X-Robots-Tag "noarchive, notranslate"
}
}
(compression) {
encode zstd gzip
}
(caching) {
header {
Cache-Control "public, max-age=604800, must-revalidate"
}
}
(security) {
# Unusual URL rewrite
try_files {path} {path}/ /index.*
# deny all access to these folders
@denied_folders path_regexp /(\.github|cache|bin|logs|backup.*|test.*|content|core|image.*|js|css|php|config|lib|assets|rel|priv|tracker)/.*$
respond @denied_folders "Access denied" 403
# deny running scripts inside core system folders
@denied_system_scripts path_regexp /(core|content|test|system|vendor)/.*\.(txt|xml|md|html|yaml|php|pl|py|cgi|twig|sh|bat|yml|js)$
respond @denied_system_scripts "Access denied" 403
# deny running scripts inside user folder
@denied_user_folder path_regexp /user/.*\.(txt|md|yaml|php|pl|py|cgi|twig|sh|bat|yml|js)$
respond @denied_user_folder "Access denied" 403
# deny access to specific files in the root folder
@denied_root_folder path_regexp /(index.php.*|wp-admin.php|wp-login.php|wp-config.php.*|xmlrpc.php|config.production.json|config.development.json|index.js|package.json|renovate.json|.*lock|mix.*|ghost.js|startup.js|\.editorconfig|\.eslintignore|\.eslintrc.json|\.gitattributes|\.gitignore|\.gitmodules|\.npmignore|Gruntfile.js|LICENSE|MigratorConfig.js|LICENSE.txt|composer.lock|composer.json|nginx.conf|web.config|htaccess.txt|\.htaccess)
respond @denied_root_folder "Access denied" 403
# block bad crawlers
@badbots header User-Agent "aesop_com_spiderman, alexibot, backweb, batchftp, bigfoot, blackwidow, blowfish, botalot, buddy, builtbottough, bullseye, cheesebot, chinaclaw, cosmos, crescent, curl, custo, da, diibot, disco, dittospyder, dragonfly, drip, easydl, ebingbong, erocrawler, exabot, eyenetie, filehound, flashget, flunky, frontpage, getright, getweb, go-ahead-got-it, gotit, grabnet, grafula, harvest, hloader, hmview, httplib, humanlinks, ilsebot, infonavirobot, infotekies, intelliseek, interget, iria, jennybot, jetcar, joc, justview, jyxobot, kenjin, keyword, larbin, leechftp, lexibot, lftp, libweb, likse, linkscan, linkwalker, lnspiderguy, lwp, magnet, mag-net, markwatch, memo, miixpc, mirror, missigua, moget, nameprotect, navroad, backdoorbot, nearsite, netants, netcraft, netmechanic, netspider, nextgensearchbot, attach, nicerspro, nimblecrawler, npbot, openfind, outfoxbot, pagegrabber, papa, pavuk, pcbrowser, pockey, propowerbot, prowebwalker, psbot, pump, queryn, recorder, realdownload, reaper, reget, true_robot, repomonkey, rma, internetseer, sitesnagger, siphon, slysearch, smartdownload, snake, snapbot, snoopy, sogou, spacebison, spankbot, spanner, sqworm, superbot, superhttp, surfbot, asterias, suzuran, szukacz, takeout, teleport, telesoft, thenomad, tighttwatbot, titan, urldispatcher, turingos, turnitinbot, *vacuum*, vci, voideye, libwww-perl, widow, wisenutbot, wwwoffle, xaldon, xenu, zeus, zyborg, anonymouse, *zip*, *mail*, *enhanc*, *fetch*, *auto*, *bandit*, *clip*, *copier*, *master*, *reaper*, *sauger*, *quester*, *whack*, *picker*, *catch*, *vampire*, *hari*, *offline*, *track*, *craftbot*, *download*, *extract*, *stripper*, *sucker*, *ninja*, *clshttp*, *webspider*, *leacher*, *collector*, *grabber*, *webpictures*, *seo*, *hole*, *copyright*, *check*"
respond @badbots "Access denied" 403
}
(proxy) {
header_up X-Forwarded-Proto {scheme}
header_up X-Forwarded-For {remote}
header_up X-Real-IP {remote}
header_down X-Powered-By "the Holy Spirit"
header_down Server "CERN httpd"
}
(logs) {
log {
output file /var/log/caddy/caddy.log
format single_field common_log
}
}
# STRIP WWW PREFIX
www.example.com {
redir * https://{http.request.host.labels.1}.{http.request.host.labels.0}{path} permanent
}
# WEBSITES
example.com {
import mustheaders
import offlinewebsite
import security
import caching
header /wp-dmin/* /folder/* {
Cache-Control: no-cache, no-store, must-revalidate
}
reverse_proxy internal_IP:2351 {
import proxy
}
import logs
}
3. The problem I’m having:
Can I do this by inserting caching by snippet and adding another cache rule (no caching for 2 directories and its subdirectories)? please, check example.com section.
Also, I am thinking about modifing the caching snippet into this. does it make sense?:
(caching) {
header {
Cache-Control "public, max-age=604800, must-revalidate"
}
header / {
Cache-Control "public, max-age=3600, must-revalidate"
}
header /sitemap* {
Cache-Control: no-cache, no-store, must-revalidate
}
}