[SOLVED, user error] Intermittent HTTP 525 errors with Cloudflare

1. Caddy version (caddy version):

2.2.1, custom built with Caddy Docker Proxy and Cloudflare support

2. How I run Caddy:

a. System environment:

OS: Ubuntu 20.04.1 LTS
Docker: 19.03.8
Docker Compose: 1.23.2

b. Command:

`docker-compose up -d caddy`

c. Service/unit/compose file:

---
version: "3.4"

services:
  caddy:
    build: .
    env_file:
      - .caddy-env
    container_name: caddy
    volumes:
      - /var/www/files:/var/www
      - /var/run/docker.sock:/var/run/docker.sock
      - caddy_data:/data
    ports:
      - 80:80
      - 443:443
    labels:
      caddy_1.email: "i.am@chrisrees.dev"
      caddy_1.admin: "off"
      caddy_2: (cloudflare-tls)
      caddy_2.tls.dns: "cloudflare {env.CLOUDFLARE_API_KEY}"
      caddy_3: files.chrisrees.dev
      caddy_3.file_server: "browse"
      caddy_3.root: "* /var/www"
      caddy_3.import: "cloudflare-tls"
    restart: unless-stopped

  codex:
    image: ajslater/codex
    container_name: codex
    environment:
      - PUID=992 # ubooquity
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/codex:/config
      - /data/media/Comics:/comics
    ports:
      - 9810:9810
    depends_on:
      - caddy
    labels:
      caddy: codex.chrisrees.dev
      # caddy: comics.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 9810}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  ddclient:
    image: ghcr.io/linuxserver/ddclient
    container_name: ddclient
    environment:
      - PUID=988 # ddclient
      - PGID=988 # ddclient
      - TZ=America/New_York
    volumes:
      - /data/config/ddclient:/config
    restart: unless-stopped

  dozzle:
    image: amir20/dozzle
    container_name: dozzle
    volumes:
      - /var/run/docker.sock:/var/run/docker.sock
    ports:
      - 9999:8080
    depends_on:
      - caddy
    labels:
      caddy: dozzle.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 8080}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  grocy:
    image: ghcr.io/linuxserver/grocy
    container_name: grocy
    environment:
      - PUID=982 # grocy
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/grocy:/config
    ports:
      - 9283:80
    depends_on:
      - caddy
    labels:
      caddy: grocy.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 80}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  lidarr:
    image: ghcr.io/linuxserver/lidarr
    container_name: lidarr
    environment:
      - PUID=994 # lidarr
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/lidarr:/config
      - /data:/data
    ports:
      - 8686:8686
    depends_on:
      - caddy
      - sabnzbd
    labels:
      caddy: lidarr.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 8686}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  lychee:
    image: ghcr.io/linuxserver/lychee
    container_name: lychee
    env_file: .lychee-env
    environment:
      - PUID=981 # lychee
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/lychee:/config
      - /data/media/sync/Serneum/files/Pictures/lychee:/pictures
    ports:
      - 9284:80
    depends_on:
      - caddy
      - mariadb
    labels:
      caddy: lychee.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 80}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  mariadb:
    image: ghcr.io/linuxserver/mariadb
    container_name: mariadb
    environment:
      - PUID=118 # mysql
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    ports:
      - 3306:3306
    volumes:
      - /data/config/mariadb:/config
    restart: unless-stopped

  mylar:
    image: ghcr.io/linuxserver/mylar3:nightly
    container_name: mylar
    environment:
      - PUID=991 # mylar
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/mylar:/config
      - /data:/data
    ports:
      - 8090:8090
    depends_on:
      - caddy
      - sabnzbd
    labels:
      caddy: mylar.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 8090}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  nextcloud:
    image: ghcr.io/linuxserver/nextcloud
    container_name: nextcloud
    environment:
      - PUID=989 # nextcloud
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/nextcloud:/config
      - /data/media/sync:/data
    ports:
      - 9443:443
    depends_on:
      - caddy
      - mariadb
    labels:
      caddy: nextcloud.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 443}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.reverse_proxy.transport: http
      caddy.reverse_proxy.transport.tls_insecure_skip_verify: "{{\"\"}}"
      caddy.encode: "gzip"
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  nzbhydra:
    image: ghcr.io/linuxserver/nzbhydra2
    container_name: nzbhydra
    environment:
      - PUID=987 # nzbhydra
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/nzbhydra:/config
      - /data:/data
    ports:
      - 5076:5076
    depends_on:
      - caddy
      - sabnzbd
    labels:
      caddy: hydra.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 5076}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  organizr:
    image: organizr/organizr
    container_name: organizr
    environment:
      - PUID=985 # organizr
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/organizr:/config
      - /data:/data
    ports:
      - 8001:80
    depends_on:
      - caddy
    labels:
      caddy: organizr.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 80}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  plex:
    image: ghcr.io/linuxserver/plex
    container_name: plex
    environment:
      - PUID=117 # plex
      - PGID=995 # media
      - TZ=America/New_York
      - VERSION=docker
      - UMASK_SET=002
    volumes:
      - /data/config/plex:/config
      - /data/media:/data
    ports:
      - 32400:32400
    depends_on:
      - caddy
    labels:
      caddy: plex.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 32400}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    devices:
      - /dev/dri:/dev/dri
    restart: unless-stopped

  radarr:
    image: ghcr.io/linuxserver/radarr
    container_name: radarr
    environment:
      - PUID=995 # radarr
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/radarr:/config
      - /data:/data
    ports:
      - 7878:7878
    depends_on:
      - caddy
      - sabnzbd
    labels:
      caddy: radarr.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 7878}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  sabnzbd:
    image: ghcr.io/linuxserver/sabnzbd
    container_name: sabnzbd
    environment:
      - PUID=993 # sabnzbd
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/sabnzbd:/config
      - /data/usenet:/data/usenet
    ports:
      - 8080:8080
    depends_on:
      - caddy
    labels:
      caddy: sabnzbd.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 8080}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  signald:
    image: finn/signald
    container_name: signald
    volumes:
      - /data/config/signald/run:/signald
    restart: unless-stopped

  sonarr:
    image: ghcr.io/linuxserver/sonarr:preview
    container_name: sonarr
    environment:
      - PUID=997 # sonarr
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/sonarr:/config
      - /data:/data
    ports:
      - 8989:8989
    depends_on:
      - caddy
      - sabnzbd
    labels:
      caddy: sonarr.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 8989}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  speedtest:
    image: henrywhitaker3/speedtest-tracker
    container_name: speedtest
    environment:
      - PUID=984 # speedtest
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK=002
      - OOKLA_EULA_GDPR=true
      - AUTH=true
    volumes:
      - /data/config/speedtest:/config
    ports:
      - 8765:80
    depends_on:
      - caddy
    logging:
      driver: "json-file"
      options:
        max-file: "10"
        max-size: "200k"
    labels:
      caddy: speedtest.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 80}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  syncthing:
    image: ghcr.io/linuxserver/syncthing
    container_name: syncthing
    environment:
      - PUID=990 # syncthing
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/syncthing:/config
      - /data/media/sync:/data
    ports:
      - 8384:8384
      - 22000:22000
      - 21027:21027/udp
    depends_on:
      - caddy
    labels:
      caddy: sync.chrisrees.dev
      caddy.reverse_proxy: "/* {{upstreams 8384}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

  thelounge:
    image: ghcr.io/linuxserver/thelounge
    container_name: thelounge
    environment:
      - PUID=983 # thelounge
      - PGID=995 # media
      - TZ=America/New_York
      - UMASK_SET=002
    volumes:
      - /data/config/thelounge:/config
    ports:
      - 9000:9000
    depends_on:
      - caddy
    labels:
      caddy: lounge.chrisrees.dev
      caddy.reverse_proxy: "{{upstreams 9000}}"
      caddy.reverse_proxy.flush_interval: -1
      caddy.import: "cloudflare-tls"
    restart: unless-stopped

volumes:
  caddy_data: {}

d. My complete Caddyfile or JSON config:

See reply, as I seem to have hit a character limit

3. The problem I’m having:

My server usually works, but sometimes I will come back to an app (usually Codex) and get a 525: SSL Handshake failed error. As an example, the site was working around 4am last night but was not working at 10am this morning. Other services continue to function, so it could be an issue with Codex. I find that sometimes restarting Caddy works, but I am hesitant to do so as I seem to have started hitting rate limits with Cloudflare

4. Error messages and/or full log output:

today at 3:32 AM  2020/12/31 08:32:00 [INFO] Sending configuration to localhost
today at 3:32 AM  {"level":"info","ts":1609403520.178887,"logger":"admin.api","msg":"received request","method":"POST","host":"localhost:2019","uri":"/load","remote_addr":"127.0.0.1:49174","headers":{"Accept-Encoding":["gzip"],"Content-Length":["4527"],"Content-Type":["application/json"],"User-Agent":["Go-http-client/1.1"]}}
today at 3:32 AM  {"level":"info","ts":1609403520.179935,"logger":"admin","msg":"admin endpoint started","address":"tcp/localhost:2019","enforce_origin":false,"origins":["[::1]:2019","127.0.0.1:2019","localhost:2019"]}
today at 3:32 AM  {"level":"info","ts":1609403520.180245,"logger":"tls.cache.maintenance","msg":"started background certificate maintenance","cache":"0xc0000fa2a0"}
today at 3:32 AM  {"level":"info","ts":1609403520.1804156,"logger":"http","msg":"server is listening only on the HTTPS port but has no TLS connection policies; adding one to enable TLS","server_name":"srv0","https_port":443}
today at 3:32 AM  {"level":"info","ts":1609403520.1804454,"logger":"http","msg":"enabling automatic HTTP->HTTPS redirects","server_name":"srv0"}
today at 3:32 AM  {"level":"info","ts":1609403520.1820664,"logger":"http","msg":"enabling automatic TLS certificate management","domains":["plex.chrisrees.dev","lychee.chrisrees.dev","lidarr.chrisrees.dev","speedtest.chrisrees.dev","mylar.chrisrees.dev","nextcloud.chrisrees.dev","dozzle.chrisrees.dev","sonarr.chrisrees.dev","organizr.chrisrees.dev","sabnzbd.chrisrees.dev","lounge.chrisrees.dev","files.chrisrees.dev","radarr.chrisrees.dev","codex.chrisrees.dev","hydra.chrisrees.dev","sync.chrisrees.dev","grocy.chrisrees.dev"]}
today at 3:32 AM  {"level":"info","ts":1609403520.1832397,"logger":"tls.obtain","msg":"acquiring lock","identifier":"lidarr.chrisrees.dev"}
today at 3:32 AM  {"level":"error","ts":1609403522.7708068,"logger":"tls.obtain","msg":"will retry","error":"[lidarr.chrisrees.dev] Obtain: [lidarr.chrisrees.dev] creating new order: request to https://acme-v02.api.letsencrypt.org/acme/new-order failed after 1 attempts: HTTP 429 urn:ietf:params:acme:error:rateLimited - Error creating new order :: too many certificates already issued for exact set of domains: lidarr.chrisrees.dev: see https://letsencrypt.org/docs/rate-limits/ (ca=https://acme-v02.api.letsencrypt.org/directory)","attempt":1,"retrying_in":60,"elapsed":4.751438741,"max_duration":2592000}
today at 3:32 AM  {"level":"error","ts":1609403530.1801753,"logger":"admin","msg":"stopping current admin endpoint","error":"shutting down admin server: context deadline exceeded"}
today at 3:32 AM  {"level":"error","ts":1609403563.7391703,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:32 AM  {"level":"info","ts":1609403564.709628,"logger":"tls.obtain","msg":"releasing lock","identifier":"lidarr.chrisrees.dev"}
today at 3:32 AM  {"level":"info","ts":1609403564.7096689,"logger":"tls.cache.maintenance","msg":"stopped background certificate maintenance","cache":"0xc0001862a0"}
today at 3:32 AM  {"level":"error","ts":1609403564.7099679,"logger":"tls","msg":"job failed","error":"lidarr.chrisrees.dev: obtaining certificate: context canceled"}
today at 3:32 AM  {"level":"info","ts":1609403564.7100244,"msg":"autosaved config","file":"/config/caddy/autosave.json"}
today at 3:32 AM  {"level":"info","ts":1609403564.7100406,"logger":"admin.api","msg":"load complete"}
today at 3:32 AM  2020/12/31 08:32:44 [INFO] Successfully configured localhost
today at 3:32 AM  {"level":"error","ts":1609403564.7374127,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:32 AM  {"level":"info","ts":1609403565.195896,"logger":"tls.obtain","msg":"lock acquired","identifier":"lidarr.chrisrees.dev"}
today at 3:32 AM  {"level":"info","ts":1609403565.1969838,"logger":"tls.issuance.acme","msg":"waiting on internal rate limiter","identifiers":["lidarr.chrisrees.dev"]}
today at 3:32 AM  {"level":"info","ts":1609403565.197028,"logger":"tls.issuance.acme","msg":"done waiting on internal rate limiter","identifiers":["lidarr.chrisrees.dev"]}
today at 3:32 AM  {"level":"error","ts":1609403569.6222637,"logger":"tls.obtain","msg":"will retry","error":"[lidarr.chrisrees.dev] Obtain: [lidarr.chrisrees.dev] creating new order: request to https://acme-v02.api.letsencrypt.org/acme/new-order failed after 1 attempts: HTTP 429 urn:ietf:params:acme:error:rateLimited - Error creating new order :: too many certificates already issued for exact set of domains: lidarr.chrisrees.dev: see https://letsencrypt.org/docs/rate-limits/ (ca=https://acme-v02.api.letsencrypt.org/directory)","attempt":1,"retrying_in":60,"elapsed":4.426327143,"max_duration":2592000}
today at 3:33 AM  {"level":"info","ts":1609403634.254799,"logger":"tls.issuance.acme.acme_client","msg":"validations succeeded; finalizing order","order":"https://acme-staging-v02.api.letsencrypt.org/acme/order/17325381/210697232"}
today at 3:33 AM  {"level":"info","ts":1609403634.9067106,"logger":"tls.issuance.acme.acme_client","msg":"successfully downloaded available certificate chains","count":2,"first_url":"https://acme-staging-v02.api.letsencrypt.org/acme/cert/fae535f8a8647a1d535bb60e2b5a8fcaccb7"}
today at 3:33 AM  {"level":"info","ts":1609403634.9071007,"logger":"tls.issuance.acme","msg":"waiting on internal rate limiter","identifiers":["lidarr.chrisrees.dev"]}
today at 3:33 AM  {"level":"info","ts":1609403634.9071317,"logger":"tls.issuance.acme","msg":"done waiting on internal rate limiter","identifiers":["lidarr.chrisrees.dev"]}
today at 3:33 AM  {"level":"info","ts":1609403639.2443113,"logger":"tls.obtain","msg":"releasing lock","identifier":"lidarr.chrisrees.dev"}
today at 3:33 AM  {"level":"error","ts":1609403639.24461,"logger":"tls","msg":"job failed","error":"lidarr.chrisrees.dev: obtaining certificate: [lidarr.chrisrees.dev] Obtain: [lidarr.chrisrees.dev] creating new order: request to https://acme-v02.api.letsencrypt.org/acme/new-order failed after 1 attempts: HTTP 429 urn:ietf:params:acme:error:rateLimited - Error creating new order :: too many certificates already issued for exact set of domains: lidarr.chrisrees.dev: see https://letsencrypt.org/docs/rate-limits/ (ca=https://acme-v02.api.letsencrypt.org/directory)"}
today at 3:35 AM  {"level":"error","ts":1609403700.296945,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:36 AM  {"level":"error","ts":1609403803.4472125,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:38 AM  {"level":"error","ts":1609403906.8651874,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:40 AM  {"level":"error","ts":1609404010.735137,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:41 AM  {"level":"error","ts":1609404114.2361896,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:43 AM  {"level":"error","ts":1609404217.694259,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:45 AM  {"level":"error","ts":1609404321.673235,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:47 AM  {"level":"error","ts":1609404425.078202,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:48 AM  {"level":"error","ts":1609404528.734216,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:50 AM  {"level":"error","ts":1609404631.861046,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:51 AM  {"level":"error","ts":1609404680.384811,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:52 AM  {"level":"error","ts":1609404779.6926677,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 3:52 AM  {"level":"error","ts":1609404779.6926677,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:29 AM  {"level":"error","ts":1609428542.659997,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:29 AM  {"level":"error","ts":1609428542.659997,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:30 AM  {"level":"error","ts":1609428645.9888477,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:30 AM  {"level":"error","ts":1609428645.9888477,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:32 AM  {"level":"error","ts":1609428749.9139576,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:32 AM  {"level":"error","ts":1609428749.9139576,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:34 AM  {"level":"error","ts":1609428853.09779,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:34 AM  {"level":"error","ts":1609428853.09779,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:35 AM  {"level":"error","ts":1609428957.150645,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:35 AM  {"level":"error","ts":1609428957.150645,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:37 AM  {"level":"error","ts":1609429061.150932,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:37 AM  {"level":"error","ts":1609429061.150932,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:39 AM  {"level":"error","ts":1609429165.253752,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:39 AM  {"level":"error","ts":1609429165.253752,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:41 AM  {"level":"error","ts":1609429269.079028,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:41 AM  {"level":"error","ts":1609429269.079028,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:42 AM  {"level":"error","ts":1609429373.098621,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:42 AM  {"level":"error","ts":1609429373.098621,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:44 AM  {"level":"error","ts":1609429476.6597562,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:44 AM  {"level":"error","ts":1609429476.6597562,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:46 AM  {"level":"error","ts":1609429580.5916452,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:46 AM  {"level":"error","ts":1609429580.5916452,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:48 AM  {"level":"error","ts":1609429684.6039712,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:48 AM  {"level":"error","ts":1609429684.6039712,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:49 AM  {"level":"error","ts":1609429788.7203343,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:49 AM  {"level":"error","ts":1609429788.7203343,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}
today at 10:51 AM  {"level":"error","ts":1609429892.6537583,"logger":"http.handlers.reverse_proxy","msg":"aborting with incomplete response","error":"context canceled"}

5. What I already tried:

Usually I’ve restarted Caddy to solve my problem. I originally had all of my services in their own docker-compose.yml files, but restarting Caddy and all apps was too difficult that way, and just restarting Caddy would often fail because of how my systemd services were tied together.

I also looked up the incomplete response errors yesterday when hitting this problem earlier in the day, which is how I ended up with all of the flush_interval -1 lines in my Caddyfile, though they do not seem to have solved the problem.

I also just tried running a simple curl to Caddy from my local machine:

curl https://localhost:443
curl: (35) error:14094438:SSL routines:ssl3_read_bytes:tlsv1 alert internal error

6. Links to relevant resources:

I’ve looked at the following but haven’t seen anything relevant:

Caddyfile

{
  admin off
  email i.am@chrisrees.dev
}
(cloudflare-tls) {
  tls {
      dns cloudflare {env.CLOUDFLARE_API_KEY}
  }
}
codex.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.16:9810 {
      flush_interval -1
  }
}
dozzle.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.8:8080 {
      flush_interval -1
  }
}
files.chrisrees.dev {
  file_server browse
  import cloudflare-tls
  root * /var/www
}
grocy.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.7:80 {
      flush_interval -1
  }
}
hydra.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.17:5076 {
      flush_interval -1
  }
}
lidarr.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.18:8686 {
      flush_interval -1
  }
}
lounge.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.14:9000 {
      flush_interval -1
  }
}
lychee.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.13:80 {
      flush_interval -1
  }
}
mylar.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.19:8090 {
      flush_interval -1
  }
}
nextcloud.chrisrees.dev {
  encode gzip
  import cloudflare-tls
  reverse_proxy 192.168.128.6:443 {
      flush_interval -1
      transport http {
          tls_insecure_skip_verify
      }
  }
}
organizr.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.15:80 {
      flush_interval -1
  }
}
plex.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.10:32400 {
      flush_interval -1
  }
}
radarr.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.20:7878 {
      flush_interval -1
  }
}
sabnzbd.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.9:8080 {
      flush_interval -1
  }
}
sonarr.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.21:8989 {
      flush_interval -1
  }
}
speedtest.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy 192.168.128.12:80 {
      flush_interval -1
  }
}
sync.chrisrees.dev {
  import cloudflare-tls
  reverse_proxy /* 192.168.128.11:8384 {
      flush_interval -1
  }

JSON

{
  "admin": {
    "disabled": true
  },
  "apps": {
    "http": {
      "servers": {
        "srv0": {
          "listen": [
            ":443"
          ],
          "routes": [
            {
              "match": [
                {
                  "host": [
                    "nextcloud.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "encodings": {
                            "gzip": {}
                          },
                          "handler": "encode"
                        },
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "transport": {
                            "protocol": "http",
                            "tls": {
                              "insecure_skip_verify": true
                            }
                          },
                          "upstreams": [
                            {
                              "dial": "192.168.128.6:443"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "speedtest.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.12:80"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "organizr.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.15:80"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "sabnzbd.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.9:8080"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "dozzle.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.8:8080"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "lidarr.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.18:8686"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "lounge.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.14:9000"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "lychee.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.13:80"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "radarr.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.20:7878"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "sonarr.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.21:8989"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "codex.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.16:9810"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "files.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "handler": "vars",
                          "root": "/var/www"
                        },
                        {
                          "browse": {},
                          "handler": "file_server",
                          "hide": [
                            "Caddyfile"
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "grocy.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.7:80"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "hydra.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.17:5076"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "mylar.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.19:8090"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "plex.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.10:32400"
                            }
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            },
            {
              "match": [
                {
                  "host": [
                    "sync.chrisrees.dev"
                  ]
                }
              ],
              "handle": [
                {
                  "handler": "subroute",
                  "routes": [
                    {
                      "handle": [
                        {
                          "flush_interval": -1,
                          "handler": "reverse_proxy",
                          "upstreams": [
                            {
                              "dial": "192.168.128.11:8384"
                            }
                          ]
                        }
                      ],
                      "match": [
                        {
                          "path": [
                            "/*"
                          ]
                        }
                      ]
                    }
                  ]
                }
              ],
              "terminal": true
            }
          ]
        }
      }
    },
    "tls": {
      "automation": {
        "policies": [
          {
            "subjects": [
              "nextcloud.chrisrees.dev",
              "speedtest.chrisrees.dev",
              "organizr.chrisrees.dev",
              "sabnzbd.chrisrees.dev",
              "dozzle.chrisrees.dev",
              "lidarr.chrisrees.dev",
              "lounge.chrisrees.dev",
              "lychee.chrisrees.dev",
              "radarr.chrisrees.dev",
              "sonarr.chrisrees.dev",
              "codex.chrisrees.dev",
              "files.chrisrees.dev",
              "grocy.chrisrees.dev",
              "hydra.chrisrees.dev",
              "mylar.chrisrees.dev",
              "plex.chrisrees.dev",
              "sync.chrisrees.dev"
            ],
            "issuer": {
              "challenges": {
                "dns": {
                  "provider": {
                    "api_token": "<TOKEN>",
                    "name": "cloudflare"
                  }
                }
              },
              "email": "i.am@chrisrees.dev",
              "module": "acme"
            }
          },
          {
            "issuer": {
              "email": "i.am@chrisrees.dev",
              "module": "acme"
            }
          }
        ]
      }
    }
  }
}
1 Like

Sorry, I was hitting an old subdomain that I had to switch away from because of the Letsencrypt rate limits. I tried to delete the thread and it will not let me

This topic was automatically closed after 30 days. New replies are no longer allowed.