Redirect Loop with caddy-security

1. Caddy version (caddy version):

$ caddy_container_id=$(docker ps | grep caddy | awk '{print $1;}')
$ docker exec -w /etc/caddy $caddy_container_id caddy version
v2.5.1 h1:bAWwslD1jNeCzDa+jDCNwb8M3UJ2tPa8UZFFzPVmGKs=

2. How I run Caddy:

$ cat Dockerfile 
FROM caddy:builder AS builder

LABEL org.opencontainers.image.title=authp
LABEL org.opencontainers.image.description="Authentication Portal"
LABEL org.opencontainers.image.url=https://github.com/greenpau/caddy-security
LABEL org.opencontainers.image.source=https://github.com/greenpau/caddy-security
LABEL org.opencontainers.image.version=1.0.1
LABEL maintainer="greenpau"

RUN xcaddy build \
    --with github.com/greenpau/caddy-security \
    --with github.com/greenpau/caddy-trace \
    --with github.com/caddy-dns/cloudflare

FROM caddy:latest

COPY --from=builder /usr/bin/caddy /usr/bin/caddy

a. System environment:

$ uname -a
Linux Catacomb 4.4.180+ #42661 SMP Fri Apr 1 15:33:03 CST 2022 x86_64 GNU/Linux synology_geminilake_1520+
$ docker version
Client:
 Version:           20.10.3
 API version:       1.41
 Go version:        go1.17.1
 Git commit:        55f0773
 Built:             Thu Mar 17 08:33:54 2022
 OS/Arch:           linux/amd64
 Context:           default
 Experimental:      true

Server:
 Engine:
  Version:          20.10.3
  API version:      1.41 (minimum version 1.12)
  Go version:       go1.17.1
  Git commit:       b487c8f
  Built:            Thu Mar 17 08:32:12 2022
  OS/Arch:          linux/amd64
  Experimental:     false
 containerd:
  Version:          v1.4.3
  GitCommit:        3fa00912415f3e9c6f82dd72119179d599efd13b
 runc:
  Version:          v1.0.0-rc93
  GitCommit:        31cc25f16f5eba4d0f53e35374532873744f4b31
 docker-init:
  Version:          0.19.0
  GitCommit:        ed96d00

b. Command:

$docker-compose -f docker-compose.yml up -d --build caddy prometheus alertmanager grafana

c. Service/unit/compose file:

$ cat docker-compose.yml 
version: "3.7"
##############################
# NETWORKS
#
networks:
  default:
    name: monitor-net
    driver: bridge
    ipam:
      config:
        - subnet: 172.16.0.64/27
#
##############################

##############################
# VOLUMES
#
volumes:
  prometheus_data: {}
  grafana_data: {}
#
##############################

##############################
# SECRETS (Non-Swarm)
#
secrets:
  caddy_admin:
    file: ./secrets/caddy_admin
  caddy_admin_pw:
    file: ./secrets/caddy_admin_pw
  caddy_admin_pw_hash:
    file: ./secrets/caddy_admin_pw_hash
  caddy_jwt_token_key:
    file: ./secrets/caddy_jwt_token_key
  grafana_admin:
    file: ./secrets/grafana_admin
  grafana_admin_pw:
    file: ./secrets/grafana_admin_pw
#
##############################

##############################
# SERVICES
#
services:
  ######################
  # MONITORING
  #
  ##########
  # Caddy 2 - a powerful, enterprise-ready, open source web server with automatic HTTPS written in Go
  #
  caddy:
    #image: caddy:${CADDY_TAG}
    build: ./caddy
    container_name: caddy
    depends_on:
      - prometheus
      - alertmanager
      - grafana
    restart: unless-stopped
    security_opt:
      - no-new-privileges:true
    networks:
      - default
    ports:
      - ${CADDY_HTTP_PORT}:18080
      - ${CADDY_HTTPS_PORT}:18443
      - ${CADDY_METRICS_PORT}:2019
      - ${GRAFANA_PORT}:3000
      - ${PROMETHEUS_PORT}:9090
      - ${ALERTMANAGER_PORT}:9093
    volumes:
      - ./caddy:/etc/caddy
    secrets:
      - caddy_admin
      - caddy_admin_pw
      - caddy_admin_pw_hash
      - caddy_jwt_token_key
    environment:
      TZ: ${TZ}
      GRAFANA_PORT: ${GRAFANA_PORT}
      PROMETHEUS_PORT: ${PROMETHEUS_PORT}
      ALERTMANAGER_PORT: ${ALERTMANAGER_PORT}
      ADMIN_USER: admin-caddy
      # ADMIN_PASSWORD: /run/secrets/caddy_admin_pw
      ADMIN_PASSWORD_HASH: JDJhJDE0JFFPeHh1Mm1YdjdIdVl1LkYvVlJTRC5zVFlPQ09SR1dRaTZmR1NkczBScE1ZREpOSGJRL3Rx
      JWT_SHARED_KEY: /run/secrets/caddy_jwt_token_key
      GOOGLE_CLIENT_ID: ${GOOGLE_CLIENT_ID}
      GOOGLE_CLIENT_SECRET: ${GOOGLE_CLIENT_SECRET}
    labels:
      org.label-schema.group: "monitoring"
  #
  ##########
  
  ##########
  # Prometheus - open-source systems monitoring and alerting toolkit
  #
  prometheus:
    image: prom/prometheus:${PROMETHEUS_TAG}
    container_name: prometheus
    depends_on:
      - alertmanager
    restart: unless-stopped
    security_opt:
      - no-new-privileges:true
    networks:
      - default
    volumes:
      - ./prometheus:/etc/prometheus
      - prometheus_data:/prometheus
    command:
      - '--config.file=/etc/prometheus/prometheus.yml'
      - '--storage.tsdb.path=/prometheus'
      - '--web.console.libraries=/etc/prometheus/console_libraries'
      - '--web.console.templates=/etc/prometheus/consoles'
      - '--storage.tsdb.retention.time=200h'
      - '--web.enable-lifecycle'
      - '--web.enable-admin-api'
    environment:
      TZ: ${TZ}
    labels:
      org.label-schema.group: "monitoring"
  #
  ##########
  
  ##########
  # Alertmanager - handles alerts sent by client applications such as the Prometheus server.
  #
  alertmanager:
    image: prom/alertmanager:${ALERTMANAGER_TAG}
    container_name: alertmanager
    restart: unless-stopped
    security_opt:
      - no-new-privileges:true
    networks:
      - default
    volumes:
      - ./alertmanager:/etc/alertmanager
    command:
      - '--config.file=/etc/alertmanager/config.yml'
      - '--storage.path=/alertmanager'
      - '--cluster.listen-address='
    environment:
      TZ: ${TZ}
    labels:
      org.label-schema.group: "monitoring"
  #
  ##########
  
  ##########
  # Grafana - is the open source analytics & monitoring solution for every database.
  #
  grafana:
    image: grafana/grafana:${GRAFANA_TAG}
    container_name: grafana
    depends_on:
      - prometheus
    restart: unless-stopped
    security_opt:
      - no-new-privileges:true
    networks:
      - default
    volumes:
      - grafana_data:/var/lib/grafana
      - ./grafana/provisioning/dashboards:/etc/grafana/provisioning/dashboards
      - ./grafana/provisioning/datasources:/etc/grafana/provisioning/datasources
    secrets:
      - grafana_admin
      - grafana_admin_pw
    environment:
      TZ: ${TZ}
      GF_SECURITY_ADMIN_USER: admin-grafana
      GF_SECURITY_ADMIN_PASSWORD: g3x5DTbQ&geqsps
      GF_USERS_ALLOW_SIGN_UP: "false"
    labels:
      org.label-schema.group: "monitoring"
  #
  ##########
  #
  ####################
#
##############################

d. My complete Caddyfile or JSON config:

$ docker exec -w /etc/caddy $caddy_container_id caddy fmt
{
	##############################
	# General Options
	#
	http_port 18080
	https_port 18443
	debug
	#
	##############################

	##############################
	# TLS Options
	#

	#Disable HTTPS/TLS
	#auto_https off
	local_certs
	#
	##############################

	##############################
	# caddy-security
	#

	# Set Order
	order authenticate before respond
	order authorize before basicauth

	security {
		# Local Identity Store
		local identity store localdb {
			realm local
			path /etc/caddy/users.json
		}
		authentication portal myportal {
			crypto default token lifetime 3600
			crypto key sign-verify {env.JWT_SHARED_KEY}
			#cookie domain .local
			enable identity store localdb
			ui {
				links {
					"My Identity" "/whoami" icon "las la-user"
					"Prometheus" "http://catacomb.local:9090" icon "las la-fire"
					"Alert Manager" "http://catacomb.local:9093" icon "las la-exclamation-circle"
					"Grafana" "http://catacomb.local:3000" icon "las la-tachometer-alt"
				}
				#password_recovery_enabled yes
			}
			transform user {
				match origin local
				action add role authp/admin
				action add role authp/user
				ui link "Portal Settings" /settings icon "las la-cog"
			}
		}

		authorization policy guests_policy {
			# disable auth redirect
			set auth url https://catacomb.local:18443/
			allow roles authp/admin authp/user
			crypto key verify {env.JWT_SHARED_KEY}
			acl rule {
				comment allow guests only
				match role guest authp/guest
				allow stop log info
			}
			acl rule {
				comment default deny
				match any
				deny log warn
			}
		}

		authorization policy users_policy {
			set auth url https://catacomb.local:18443/
			allow roles authp/admin authp/user
			crypto key verify {env.JWT_SHARED_KEY}
			acl rule {
				comment allow users
				match role authp/user
				allow stop log info
			}
			acl rule {
				comment default deny
				match any
				deny log warn
			}
		}

		authorization policy admins_policy {
			set auth url https://catacomb.local:18443/
			allow roles authp/admin authp/user
			crypto key verify {env.JWT_SHARED_KEY}
			acl rule {
				comment allow users
				match role authp/admin
				allow stop log info
			}
			acl rule {
				comment default deny
				match any
				deny log warn
			}
		}
	}
	#
	##############################
}

##############################
# Snippets
#
(basic-auth) {
	basicauth /* {
		#pw = hiccup
		admin JDJhJDEwJEVCNmdaNEg2Ti5iejRMYkF3MFZhZ3VtV3E1SzBWZEZ5Q3VWc0tzOEJwZE9TaFlZdEVkZDhX
	}
}
#
##############################

##############################
# Sites
#

# Authentication Portal
catacomb.local:18443 {
	route {
		authenticate with myportal
	}
}

:{$PROMETHEUS_PORT} {
	route {
		authorize with admins_policy
		reverse_proxy prometheus:{$PROMETHEUS_PORT}
	}
}
:{$ALERTMANAGER_PORT} {
	route {
		import basic-auth
		reverse_proxy alertmanager:{$ALERTMANAGER_PORT}
	}
}
:{$GRAFANA_PORT} {
	route {
		trace tag="grafana" response_debug=yes
		reverse_proxy grafana:{$GRAFANA_PORT}
	}
}
#
##############################

3. The problem I’m having:

Attempting to protect and internally hosted docker service(s) (Prometheus/Alert Manager/Grafana) with Caddy.

The reverse proxy works correctly when the site does not look for authorization and/or when using basic-auth. There seems to be a token issue when trying to use the authentication/authorization mechanics.

4. Error messages and/or full log output:

Log is too big to fit within character limits

https://gist.github.com/jshessen/fbc84d3de170efb4a90c3dd49e6afc64

5. What I already tried:

  1. I started with an existing Caddyfile which included a dummy basic-auth loop
  2. I attempted to use my public DNS and domains, before moving to local
  3. I have tried to disable TLS
  4. I have tried to enable Google/GitHub OAUTH
  5. I have tried to include a specific domain cookie and without
  6. I have created a file to test no-auth/basic-auth/authp simultaneously

6. Links to relevant resources:

Original Caddyfile setup
https://josheli.com/knob/2021/02/24/single-sign-on-in-caddy-server-using-only-the-caddyfile-and-basic-authentication/

FYI @greenpau

1 Like

@jshessen , please open an issue in caddy-security repo and add a reference to this issue. Please share your config in the issue.

Additional information:

This seems to work if I push the access token via cURL:
can’t post cookie in this post – but the cookie below is “access_token”

curl -vk --cookie "access_token=<REAL_TOKEN_REDACTED>" http://catacomb.local:9090
*   Trying 192.168.1.20:9090...
* Connected to catacomb.local (192.168.1.20) port 9090 (#0)
> GET / HTTP/1.1
> Host: catacomb.local:9090
> User-Agent: curl/7.82.0
> Accept: */*
> Cookie: access_token=<REAL_TOKEN_REDACTED>
> 
* Mark bundle as not supporting multiuse
< HTTP/1.1 302 Found
< Content-Length: 29
< Content-Type: text/html; charset=utf-8
< Date: Thu, 30 Jun 2022 20:40:12 GMT
< Location: /graph
< Server: Caddy
< 
**<a href="/graph">Found</a>.**

Authentication Portal

*   Trying 192.168.1.20:18443...
* Connected to catacomb.local (192.168.1.20) port 18443 (#0)
* ALPN, offering h2
* ALPN, offering http/1.1
* TLSv1.0 (OUT), TLS header, Certificate Status (22):
* TLSv1.3 (OUT), TLS handshake, Client hello (1):
* TLSv1.2 (IN), TLS header, Certificate Status (22):
* TLSv1.3 (IN), TLS handshake, Server hello (2):
* TLSv1.2 (IN), TLS header, Finished (20):
* TLSv1.2 (IN), TLS header, Supplemental data (23):
* TLSv1.3 (IN), TLS handshake, Encrypted Extensions (8):
* TLSv1.2 (IN), TLS header, Supplemental data (23):
* TLSv1.3 (IN), TLS handshake, Certificate (11):
* TLSv1.2 (IN), TLS header, Supplemental data (23):
* TLSv1.3 (IN), TLS handshake, CERT verify (15):
* TLSv1.2 (IN), TLS header, Supplemental data (23):
* TLSv1.3 (IN), TLS handshake, Finished (20):
* TLSv1.2 (OUT), TLS header, Finished (20):
* TLSv1.3 (OUT), TLS change cipher, Change cipher spec (1):
* TLSv1.2 (OUT), TLS header, Supplemental data (23):
* TLSv1.3 (OUT), TLS handshake, Finished (20):
* SSL connection using TLSv1.3 / TLS_AES_128_GCM_SHA256
* ALPN, server accepted to use h2
* Server certificate:
*  subject: [NONE]
*  start date: Jun 30 17:13:07 2022 GMT
*  expire date: Jul  1 05:13:07 2022 GMT
*  issuer: CN=Caddy Local Authority - ECC Intermediate
*  SSL certificate verify result: unable to get local issuer certificate (20), continuing anyway.
* Using HTTP2, server supports multiplexing
* Copying HTTP/2 data in stream buffer to connection buffer after upgrade: len=0
* TLSv1.2 (OUT), TLS header, Supplemental data (23):
* TLSv1.2 (OUT), TLS header, Supplemental data (23):
* TLSv1.2 (OUT), TLS header, Supplemental data (23):
* h2h3 [:method: GET]
* h2h3 [:path: /]
* h2h3 [:scheme: https]
* h2h3 [:authority: catacomb.local:18443]
* h2h3 [user-agent: curl/7.82.0]
* h2h3 [accept: */*]
* Using Stream ID: 1 (easy handle 0x55dc09972390)
* TLSv1.2 (OUT), TLS header, Supplemental data (23):
> GET / HTTP/2
> Host: catacomb.local:18443
> user-agent: curl/7.82.0
> accept: */*
> 
* TLSv1.2 (IN), TLS header, Supplemental data (23):
* TLSv1.3 (IN), TLS handshake, Newsession Ticket (4):
* TLSv1.2 (IN), TLS header, Supplemental data (23):
* Connection state changed (MAX_CONCURRENT_STREAMS == 250)!
* TLSv1.2 (OUT), TLS header, Supplemental data (23):
* TLSv1.2 (IN), TLS header, Supplemental data (23):
* TLSv1.2 (IN), TLS header, Supplemental data (23):
< HTTP/2 302 
< cache-control: no-store
< location: https://catacomb.local:18443/login
< pragma: no-cache
< server: Caddy
< set-cookie: AUTHP_SESSION_ID=en9TwID2PpodbLSeSMdNbllQY73Q6tWrugQPmB; Domain=catacomb.local; Path=/; Secure; HttpOnly;
< content-length: 0
< date: Thu, 30 Jun 2022 20:34:14 GMT
< 
* Connection #0 to host catacomb.local left intact

@greenpau – submitted
https://github.com/greenpau/caddy-security/issues/131

The issue was sending secure cookie to HTTP endpoint.

3 Likes

This topic was automatically closed after 30 days. New replies are no longer allowed.