Compare commits
118 Commits
trustchain
...
sources/ld
Author | SHA1 | Date |
---|---|---|
Jens Langhammer | 3835734ed4 | |
Jens Langhammer | e0355b13cd | |
Jens Langhammer | d092093e94 | |
Jens L | f728bbb14b | |
Jens L | 4080080acd | |
authentik-automation[bot] | 0a0f87b9ca | |
dependabot[bot] | 7699a119a3 | |
dependabot[bot] | 73fbcde924 | |
dependabot[bot] | a1efcc4da9 | |
dependabot[bot] | d594574ffa | |
dependabot[bot] | dbbb5e75cf | |
dependabot[bot] | ddb73db287 | |
dependabot[bot] | 143f092153 | |
dependabot[bot] | d89adef963 | |
dependabot[bot] | 5f3cbf6f7f | |
transifex-integration[bot] | a9fdacc60b | |
Philipp Kolberg | 9db9ad3d66 | |
dependabot[bot] | 11dcda77fa | |
dependabot[bot] | 4ce5f0931b | |
dependabot[bot] | f8e2cd5639 | |
dependabot[bot] | 8b4f66e457 | |
dependabot[bot] | 939631c94e | |
dependabot[bot] | 467a149c06 | |
Tana M Berry | f62f720c55 | |
authentik-automation[bot] | ba8fd9fcb2 | |
dependabot[bot] | fdc323af62 | |
dependabot[bot] | 44bac0d67b | |
dependabot[bot] | 191514864e | |
dependabot[bot] | 258a4d5283 | |
dependabot[bot] | 62a85fb888 | |
dependabot[bot] | 7685320466 | |
Jens Langhammer | c30a2406a9 | |
Jens L | 9232042c55 | |
Marc 'risson' Schmitt | d8b1a59dad | |
Jens L | 1e05d38059 | |
Marc 'risson' Schmitt | d5871fef4e | |
Andrey | 7f4fa70a41 | |
Jens Langhammer | fa0c4d8410 | |
Jens Langhammer | aeb24889fd | |
Jens Langhammer | 8ac9042501 | |
Marc 'risson' Schmitt | 2d821a07c6 | |
Andrey | 9680106b45 | |
dependabot[bot] | 709358615c | |
dependabot[bot] | 0ad1b42706 | |
dependabot[bot] | 2333e1f434 | |
dependabot[bot] | 4444db9e6d | |
dependabot[bot] | c5d483a238 | |
Ken Sternberg | cc1c66aa13 | |
Ken Sternberg | 67d6c0e8af | |
dependabot[bot] | b9afac5008 | |
François-Xavier Payet | aadda1f314 | |
dependabot[bot] | 293fa2e375 | |
dependabot[bot] | ddb1597501 | |
dependabot[bot] | 96f8e961ea | |
dependabot[bot] | f699dba2ae | |
dependabot[bot] | 250e8ee4a1 | |
dependabot[bot] | ce47755049 | |
transifex-integration[bot] | 8125a790a9 | |
transifex-integration[bot] | b7e653db6a | |
transifex-integration[bot] | 74958693a1 | |
dependabot[bot] | cadc311703 | |
dependabot[bot] | 924f3c9075 | |
Jens L | a7933c84c1 | |
Jens L | fe1a06ebf2 | |
dependabot[bot] | 823e7dbe1a | |
macmoritz | 90b8217eb2 | |
dependabot[bot] | c897271756 | |
dependabot[bot] | d1c9d41954 | |
dependabot[bot] | 1906a10b1a | |
dependabot[bot] | a03cc57473 | |
dependabot[bot] | e00799b314 | |
dependabot[bot] | faa5ce3e83 | |
dependabot[bot] | 937d025ef6 | |
dependabot[bot] | a748a61cd6 | |
dependabot[bot] | b24420598c | |
dependabot[bot] | b005ec7684 | |
dependabot[bot] | 6f6ee29738 | |
dependabot[bot] | ff3fef6d09 | |
dependabot[bot] | 515958157c | |
Jens L | dd4e9030b4 | |
Jens L | f94670cad7 | |
dependabot[bot] | b4dd74f2ff | |
dependabot[bot] | 9a2b548bf6 | |
dependabot[bot] | d6e3de4f48 | |
dependabot[bot] | 30ccaaf97c | |
Jens L | 3d9f7ee27e | |
Tana M Berry | 211dcf3272 | |
transifex-integration[bot] | 1d0b8a065b | |
dependabot[bot] | 7f82b555c8 | |
dependabot[bot] | f7aec3cf28 | |
dependabot[bot] | c6c133f67d | |
dependabot[bot] | 73db23f21f | |
dependabot[bot] | 4744f5c6c6 | |
Philipp Kolberg | e92bda2659 | |
gc4g40u6 | a10392efcc | |
dependabot[bot] | e52f13afae | |
dependabot[bot] | 07c50a43ae | |
dependabot[bot] | 0cd2f68bf3 | |
dependabot[bot] | 4ef10f1cec | |
dependabot[bot] | 43151c09e2 | |
dependabot[bot] | 871b5f3246 | |
dependabot[bot] | ed66bdaec4 | |
dependabot[bot] | 345022f1aa | |
dependabot[bot] | f296862d3c | |
dependabot[bot] | 5aca310d10 | |
dependabot[bot] | 7dab5dc03f | |
dependabot[bot] | 2d6e0984d1 | |
Jens L | 028c7af00f | |
Ken Sternberg | 6df83e4259 | |
dependabot[bot] | afdca418e1 | |
senare | d8728c1749 | |
dependabot[bot] | e5afabb221 | |
dependabot[bot] | a0a6ee0769 | |
dependabot[bot] | a65bb0b29f | |
dependabot[bot] | 3df7b5504e | |
authentik-automation[bot] | 99f44ea805 | |
Jens Langhammer | 97ccc84796 | |
Jens Langhammer | a43b2fb17c |
|
@ -1,5 +1,5 @@
|
|||
[bumpversion]
|
||||
current_version = 2023.10.6
|
||||
current_version = 2023.10.3
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||
|
|
|
@ -2,39 +2,36 @@ name: "Setup authentik testing environment"
|
|||
description: "Setup authentik testing environment"
|
||||
|
||||
inputs:
|
||||
postgresql_version:
|
||||
postgresql_tag:
|
||||
description: "Optional postgresql image tag"
|
||||
default: "12"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install poetry & deps
|
||||
- name: Install poetry
|
||||
shell: bash
|
||||
run: |
|
||||
pipx install poetry || true
|
||||
sudo apt-get update
|
||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||
sudo apt update
|
||||
sudo apt install -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version-file: 'pyproject.toml'
|
||||
python-version: "3.11"
|
||||
cache: "poetry"
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Setup go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||
export PSQL_TAG=${{ inputs.postgresql_tag }}
|
||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry env use python3.11
|
||||
poetry install
|
||||
cd web && npm ci
|
||||
- name: Generate config
|
||||
|
|
|
@ -48,38 +48,25 @@ jobs:
|
|||
- name: run migrations
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-migrations-from-stable:
|
||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 12-alpine
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: checkout stable
|
||||
run: |
|
||||
# Delete all poetry envs
|
||||
rm -rf /home/runner/.cache/pypoetry
|
||||
# Copy current, latest config to local
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
cp -R .github ..
|
||||
cp -R scripts ..
|
||||
git checkout version/$(python -c "from authentik import __version__; print(__version__)")
|
||||
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
rm -rf .github/ scripts/
|
||||
mv ../.github ../scripts .
|
||||
- name: Setup authentik env (ensure stable deps are installed)
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: run migrations to stable
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
- name: checkout current code
|
||||
|
@ -89,13 +76,9 @@ jobs:
|
|||
git reset --hard HEAD
|
||||
git clean -d -fx .
|
||||
git checkout $GITHUB_SHA
|
||||
# Delete previous poetry env
|
||||
rm -rf $(poetry env info --path)
|
||||
poetry install
|
||||
- name: Setup authentik env (ensure latest deps are installed)
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: migrate to latest
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-unittest:
|
||||
|
@ -114,7 +97,7 @@ jobs:
|
|||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
postgresql_tag: ${{ matrix.psql }}
|
||||
- name: run unittest
|
||||
run: |
|
||||
poetry run make test
|
||||
|
|
|
@ -130,7 +130,7 @@ jobs:
|
|||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Generate API
|
||||
|
|
|
@ -24,7 +24,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ${{ matrix.project }}/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
||||
- working-directory: ${{ matrix.project }}/
|
||||
|
@ -40,7 +40,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
|
@ -62,7 +62,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ${{ matrix.project }}/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
||||
- working-directory: ${{ matrix.project }}/
|
||||
|
@ -78,7 +78,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
|
@ -110,7 +110,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
|
|
|
@ -18,7 +18,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
|
@ -32,7 +32,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
|
@ -53,7 +53,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
|
|
|
@ -131,7 +131,7 @@ jobs:
|
|||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Build web
|
||||
|
|
|
@ -19,7 +19,7 @@ jobs:
|
|||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Generate API Client
|
||||
run: make gen-client-ts
|
||||
|
|
14
Dockerfile
14
Dockerfile
|
@ -1,5 +1,3 @@
|
|||
# syntax=docker/dockerfile:1
|
||||
|
||||
# Stage 1: Build website
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder
|
||||
|
||||
|
@ -9,7 +7,7 @@ WORKDIR /work/website
|
|||
|
||||
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
|
||||
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
|
||||
--mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \
|
||||
--mount=type=cache,target=/root/.npm \
|
||||
npm ci --include=dev
|
||||
|
||||
COPY ./website /work/website/
|
||||
|
@ -27,7 +25,7 @@ WORKDIR /work/web
|
|||
|
||||
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
|
||||
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
|
||||
--mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
|
||||
--mount=type=cache,target=/root/.npm \
|
||||
npm ci --include=dev
|
||||
|
||||
COPY ./web /work/web/
|
||||
|
@ -64,8 +62,8 @@ COPY ./go.sum /go/src/goauthentik.io/go.sum
|
|||
|
||||
ENV CGO_ENABLED=0
|
||||
|
||||
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
||||
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
|
||||
RUN --mount=type=cache,target=/go/pkg/mod \
|
||||
--mount=type=cache,target=/root/.cache/go-build \
|
||||
GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
|
||||
|
||||
# Stage 4: MaxMind GeoIP
|
||||
|
@ -91,9 +89,7 @@ ENV VENV_PATH="/ak-root/venv" \
|
|||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="/ak-root/venv/bin:$PATH"
|
||||
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
||||
RUN --mount=type=cache,target=/var/cache/apt \
|
||||
apt-get update && \
|
||||
# Required for installing pip packages
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
|
||||
|
|
2
Makefile
2
Makefile
|
@ -110,6 +110,8 @@ gen-diff: ## (Release) generate the changelog diff between the current schema a
|
|||
--markdown /local/diff.md \
|
||||
/local/old_schema.yml /local/schema.yml
|
||||
rm old_schema.yml
|
||||
sed -i 's/{/{/g' diff.md
|
||||
sed -i 's/}/}/g' diff.md
|
||||
npx prettier --write diff.md
|
||||
|
||||
gen-clean:
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
__version__ = "2023.10.6"
|
||||
__version__ = "2023.10.3"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
|
|
@ -93,10 +93,10 @@ class ConfigView(APIView):
|
|||
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
|
||||
},
|
||||
"capabilities": self.get_capabilities(),
|
||||
"cache_timeout": CONFIG.get_int("redis.cache_timeout"),
|
||||
"cache_timeout_flows": CONFIG.get_int("redis.cache_timeout_flows"),
|
||||
"cache_timeout_policies": CONFIG.get_int("redis.cache_timeout_policies"),
|
||||
"cache_timeout_reputation": CONFIG.get_int("redis.cache_timeout_reputation"),
|
||||
"cache_timeout": CONFIG.get_int("cache.timeout"),
|
||||
"cache_timeout_flows": CONFIG.get_int("cache.timeout_flows"),
|
||||
"cache_timeout_policies": CONFIG.get_int("cache.timeout_policies"),
|
||||
"cache_timeout_reputation": CONFIG.get_int("cache.timeout_reputation"),
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -21,9 +21,7 @@ _other_urls = []
|
|||
for _authentik_app in get_apps():
|
||||
try:
|
||||
api_urls = import_module(f"{_authentik_app.name}.urls")
|
||||
except ModuleNotFoundError:
|
||||
continue
|
||||
except ImportError as exc:
|
||||
except (ModuleNotFoundError, ImportError) as exc:
|
||||
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
|
||||
continue
|
||||
if not hasattr(api_urls, "api_urlpatterns"):
|
||||
|
|
|
@ -40,7 +40,7 @@ class ManagedAppConfig(AppConfig):
|
|||
meth()
|
||||
self._logger.debug("Successfully reconciled", name=name)
|
||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||
self._logger.warning("Failed to run reconcile", name=name, exc=exc)
|
||||
self._logger.debug("Failed to run reconcile", name=name, exc=exc)
|
||||
|
||||
|
||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
|
|
|
@ -75,13 +75,13 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
|||
return
|
||||
if event.is_directory:
|
||||
return
|
||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||
path = Path(event.src_path).absolute()
|
||||
rel_path = str(path.relative_to(root))
|
||||
if isinstance(event, FileCreatedEvent):
|
||||
LOGGER.debug("new blueprint file created, starting discovery", path=rel_path)
|
||||
blueprints_discovery.delay(rel_path)
|
||||
LOGGER.debug("new blueprint file created, starting discovery")
|
||||
blueprints_discovery.delay()
|
||||
if isinstance(event, FileModifiedEvent):
|
||||
path = Path(event.src_path)
|
||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||
rel_path = str(path.relative_to(root))
|
||||
for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
|
||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||
apply_blueprint.delay(instance.pk.hex)
|
||||
|
@ -98,32 +98,39 @@ def blueprints_find_dict():
|
|||
return blueprints
|
||||
|
||||
|
||||
def blueprints_find() -> list[BlueprintFile]:
|
||||
def blueprints_find():
|
||||
"""Find blueprints and return valid ones"""
|
||||
blueprints = []
|
||||
root = Path(CONFIG.get("blueprints_dir"))
|
||||
for path in root.rglob("**/*.yaml"):
|
||||
rel_path = path.relative_to(root)
|
||||
# Check if any part in the path starts with a dot and assume a hidden file
|
||||
if any(part for part in path.parts if part.startswith(".")):
|
||||
continue
|
||||
LOGGER.debug("found blueprint", path=str(path))
|
||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
||||
try:
|
||||
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
|
||||
except YAMLError as exc:
|
||||
raw_blueprint = None
|
||||
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(rel_path))
|
||||
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(path))
|
||||
if not raw_blueprint:
|
||||
continue
|
||||
metadata = raw_blueprint.get("metadata", None)
|
||||
version = raw_blueprint.get("version", 1)
|
||||
if version != 1:
|
||||
LOGGER.warning("invalid blueprint version", version=version, path=str(rel_path))
|
||||
LOGGER.warning("invalid blueprint version", version=version, path=str(path))
|
||||
continue
|
||||
file_hash = sha512(path.read_bytes()).hexdigest()
|
||||
blueprint = BlueprintFile(str(rel_path), version, file_hash, int(path.stat().st_mtime))
|
||||
blueprint = BlueprintFile(
|
||||
str(path.relative_to(root)), version, file_hash, int(path.stat().st_mtime)
|
||||
)
|
||||
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
||||
blueprints.append(blueprint)
|
||||
LOGGER.debug(
|
||||
"parsed & loaded blueprint",
|
||||
hash=file_hash,
|
||||
path=str(path),
|
||||
)
|
||||
return blueprints
|
||||
|
||||
|
||||
|
@ -131,12 +138,10 @@ def blueprints_find() -> list[BlueprintFile]:
|
|||
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
||||
)
|
||||
@prefill_task
|
||||
def blueprints_discovery(self: MonitoredTask, path: Optional[str] = None):
|
||||
def blueprints_discovery(self: MonitoredTask):
|
||||
"""Find blueprints and check if they need to be created in the database"""
|
||||
count = 0
|
||||
for blueprint in blueprints_find():
|
||||
if path and blueprint.path != path:
|
||||
continue
|
||||
check_blueprint_v1_file(blueprint)
|
||||
count += 1
|
||||
self.set_status(
|
||||
|
@ -166,11 +171,7 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
|
|||
metadata={},
|
||||
)
|
||||
instance.save()
|
||||
LOGGER.info(
|
||||
"Creating new blueprint instance from file", instance=instance, path=instance.path
|
||||
)
|
||||
if instance.last_applied_hash != blueprint.hash:
|
||||
LOGGER.info("Applying blueprint due to changed file", instance=instance, path=instance.path)
|
||||
apply_blueprint.delay(str(instance.pk))
|
||||
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
|||
|
||||
managed = ReadOnlyField()
|
||||
component = SerializerMethodField()
|
||||
icon = ReadOnlyField(source="icon_url")
|
||||
icon = ReadOnlyField(source="get_icon")
|
||||
|
||||
def get_component(self, obj: Source) -> str:
|
||||
"""Get object component so that we know how to edit the object"""
|
||||
|
|
|
@ -171,11 +171,6 @@ class UserSerializer(ModelSerializer):
|
|||
raise ValidationError("Setting a user to internal service account is not allowed.")
|
||||
return user_type
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if self.instance and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT:
|
||||
raise ValidationError("Can't modify internal service account users")
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
|
|
|
@ -44,7 +44,6 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
|||
if request:
|
||||
req.http_request = request
|
||||
self._context["request"] = req
|
||||
req.context.update(**kwargs)
|
||||
self._context.update(**kwargs)
|
||||
self.dry_run = dry_run
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
{% block head_before %}
|
||||
{% endblock %}
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject>
|
||||
<script src="{% static 'dist/poly.js' %}?version={{ version }}" type="module"></script>
|
||||
<script src="{% static 'dist/standalone/loading/index.js' %}?version={{ version }}" type="module"></script>
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
{% block head_before %}
|
||||
<link rel="prefetch" href="/static/dist/assets/images/flow_background.jpg" />
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
|
||||
{% include "base/header_js.html" %}
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -27,7 +27,6 @@ from authentik.lib.sentry import before_send
|
|||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
from authentik.policies.reputation.models import Reputation
|
||||
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
||||
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
||||
from authentik.stages.authenticator_static.models import StaticToken
|
||||
|
@ -53,13 +52,11 @@ IGNORED_MODELS = (
|
|||
RefreshToken,
|
||||
SCIMUser,
|
||||
SCIMGroup,
|
||||
Reputation,
|
||||
)
|
||||
|
||||
|
||||
def should_log_model(model: Model) -> bool:
|
||||
"""Return true if operation on `model` should be logged"""
|
||||
# Check for silk by string so this comparison doesn't fail when silk isn't installed
|
||||
if model.__module__.startswith("silk"):
|
||||
return False
|
||||
return model.__class__ not in IGNORED_MODELS
|
||||
|
@ -96,30 +93,21 @@ class AuditMiddleware:
|
|||
of models"""
|
||||
|
||||
get_response: Callable[[HttpRequest], HttpResponse]
|
||||
anonymous_user: User = None
|
||||
|
||||
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
||||
self.get_response = get_response
|
||||
|
||||
def _ensure_fallback_user(self):
|
||||
"""Defer fetching anonymous user until we have to"""
|
||||
if self.anonymous_user:
|
||||
return
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
|
||||
self.anonymous_user = get_anonymous_user()
|
||||
|
||||
def connect(self, request: HttpRequest):
|
||||
"""Connect signal for automatic logging"""
|
||||
self._ensure_fallback_user()
|
||||
user = getattr(request, "user", self.anonymous_user)
|
||||
if not user.is_authenticated:
|
||||
user = self.anonymous_user
|
||||
if not hasattr(request, "user"):
|
||||
return
|
||||
if not getattr(request.user, "is_authenticated", False):
|
||||
return
|
||||
if not hasattr(request, "request_id"):
|
||||
return
|
||||
post_save_handler = partial(self.post_save_handler, user=user, request=request)
|
||||
pre_delete_handler = partial(self.pre_delete_handler, user=user, request=request)
|
||||
m2m_changed_handler = partial(self.m2m_changed_handler, user=user, request=request)
|
||||
post_save_handler = partial(self.post_save_handler, user=request.user, request=request)
|
||||
pre_delete_handler = partial(self.pre_delete_handler, user=request.user, request=request)
|
||||
m2m_changed_handler = partial(self.m2m_changed_handler, user=request.user, request=request)
|
||||
post_save.connect(
|
||||
post_save_handler,
|
||||
dispatch_uid=request.request_id,
|
||||
|
|
|
@ -217,7 +217,6 @@ class Event(SerializerModel, ExpiringModel):
|
|||
"path": request.path,
|
||||
"method": request.method,
|
||||
"args": cleanse_dict(QueryDict(request.META.get("QUERY_STRING", ""))),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", ""),
|
||||
}
|
||||
# Special case for events created during flow execution
|
||||
# since they keep the http query within a wrapped query
|
||||
|
|
|
@ -53,15 +53,7 @@ class TestEvents(TestCase):
|
|||
"""Test plain from_http"""
|
||||
event = Event.new("unittest").from_http(self.factory.get("/"))
|
||||
self.assertEqual(
|
||||
event.context,
|
||||
{
|
||||
"http_request": {
|
||||
"args": {},
|
||||
"method": "GET",
|
||||
"path": "/",
|
||||
"user_agent": "",
|
||||
}
|
||||
},
|
||||
event.context, {"http_request": {"args": {}, "method": "GET", "path": "/"}}
|
||||
)
|
||||
|
||||
def test_from_http_clean_querystring(self):
|
||||
|
@ -75,7 +67,6 @@ class TestEvents(TestCase):
|
|||
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
|
||||
"method": "GET",
|
||||
"path": "/",
|
||||
"user_agent": "",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
@ -92,7 +83,6 @@ class TestEvents(TestCase):
|
|||
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
|
||||
"method": "GET",
|
||||
"path": "/",
|
||||
"user_agent": "",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
|
|
@ -5,13 +5,12 @@ from dataclasses import asdict, is_dataclass
|
|||
from datetime import date, datetime, time, timedelta
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from types import GeneratorType, NoneType
|
||||
from types import GeneratorType
|
||||
from typing import Any, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.core.handlers.wsgi import WSGIRequest
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db import models
|
||||
from django.db.models.base import Model
|
||||
from django.http.request import HttpRequest
|
||||
|
@ -154,20 +153,7 @@ def sanitize_item(value: Any) -> Any:
|
|||
return value.isoformat()
|
||||
if isinstance(value, timedelta):
|
||||
return str(value.total_seconds())
|
||||
if callable(value):
|
||||
return {
|
||||
"type": "callable",
|
||||
"name": value.__name__,
|
||||
"module": value.__module__,
|
||||
}
|
||||
# List taken from the stdlib's JSON encoder (_make_iterencode, encoder.py:415)
|
||||
if isinstance(value, (bool, int, float, NoneType, list, tuple, dict)):
|
||||
return value
|
||||
try:
|
||||
return DjangoJSONEncoder().default(value)
|
||||
except TypeError:
|
||||
return str(value)
|
||||
return str(value)
|
||||
|
||||
|
||||
def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]:
|
||||
|
|
|
@ -33,7 +33,7 @@ PLAN_CONTEXT_SOURCE = "source"
|
|||
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
|
||||
# was restored.
|
||||
PLAN_CONTEXT_IS_RESTORED = "is_restored"
|
||||
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_flows")
|
||||
CACHE_TIMEOUT = CONFIG.get_int("cache.timeout_flows")
|
||||
CACHE_PREFIX = "goauthentik.io/flows/planner/"
|
||||
|
||||
|
||||
|
|
|
@ -167,11 +167,7 @@ class ChallengeStageView(StageView):
|
|||
stage_type=self.__class__.__name__, method="get_challenge"
|
||||
).time(),
|
||||
):
|
||||
try:
|
||||
challenge = self.get_challenge(*args, **kwargs)
|
||||
except StageInvalidException as exc:
|
||||
self.logger.debug("Got StageInvalidException", exc=exc)
|
||||
return self.executor.stage_invalid()
|
||||
with Hub.current.start_span(
|
||||
op="authentik.flow.stage._get_challenge",
|
||||
description=self.__class__.__name__,
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
"""authentik core config loader"""
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
from collections.abc import Mapping
|
||||
from contextlib import contextmanager
|
||||
|
@ -22,6 +24,25 @@ SEARCH_PATHS = ["authentik/lib/default.yml", "/etc/authentik/config.yml", ""] +
|
|||
ENV_PREFIX = "AUTHENTIK"
|
||||
ENVIRONMENT = os.getenv(f"{ENV_PREFIX}_ENV", "local")
|
||||
|
||||
REDIS_ENV_KEYS = [
|
||||
f"{ENV_PREFIX}_REDIS__HOST",
|
||||
f"{ENV_PREFIX}_REDIS__PORT",
|
||||
f"{ENV_PREFIX}_REDIS__DB",
|
||||
f"{ENV_PREFIX}_REDIS__USERNAME",
|
||||
f"{ENV_PREFIX}_REDIS__PASSWORD",
|
||||
f"{ENV_PREFIX}_REDIS__TLS",
|
||||
f"{ENV_PREFIX}_REDIS__TLS_REQS",
|
||||
]
|
||||
|
||||
DEPRECATIONS = {
|
||||
"redis.broker_url": "broker.url",
|
||||
"redis.broker_transport_options": "broker.transport_options",
|
||||
"redis.cache_timeout": "cache.timeout",
|
||||
"redis.cache_timeout_flows": "cache.timeout_flows",
|
||||
"redis.cache_timeout_policies": "cache.timeout_policies",
|
||||
"redis.cache_timeout_reputation": "cache.timeout_reputation",
|
||||
}
|
||||
|
||||
|
||||
def get_path_from_dict(root: dict, path: str, sep=".", default=None) -> Any:
|
||||
"""Recursively walk through `root`, checking each part of `path` separated by `sep`.
|
||||
|
@ -81,6 +102,10 @@ class AttrEncoder(JSONEncoder):
|
|||
return super().default(o)
|
||||
|
||||
|
||||
class UNSET:
|
||||
"""Used to test whether configuration key has not been set."""
|
||||
|
||||
|
||||
class ConfigLoader:
|
||||
"""Search through SEARCH_PATHS and load configuration. Environment variables starting with
|
||||
`ENV_PREFIX` are also applied.
|
||||
|
@ -113,6 +138,40 @@ class ConfigLoader:
|
|||
self.update_from_file(env_file)
|
||||
self.update_from_env()
|
||||
self.update(self.__config, kwargs)
|
||||
self.check_deprecations()
|
||||
|
||||
def check_deprecations(self):
|
||||
"""Warn if any deprecated configuration options are used"""
|
||||
|
||||
def _pop_deprecated_key(current_obj, dot_parts, index):
|
||||
"""Recursive function to remove deprecated keys in configuration"""
|
||||
dot_part = dot_parts[index]
|
||||
if index == len(dot_parts) - 1:
|
||||
return current_obj.pop(dot_part)
|
||||
value = _pop_deprecated_key(current_obj[dot_part], dot_parts, index + 1)
|
||||
if not current_obj[dot_part]:
|
||||
current_obj.pop(dot_part)
|
||||
return value
|
||||
|
||||
for deprecation, replacement in DEPRECATIONS.items():
|
||||
if self.get(deprecation, default=UNSET) is not UNSET:
|
||||
message = (
|
||||
f"'{deprecation}' has been deprecated in favor of '{replacement}'! "
|
||||
+ "Please update your configuration."
|
||||
)
|
||||
self.log(
|
||||
"warning",
|
||||
message,
|
||||
)
|
||||
try:
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
Event.new(EventAction.CONFIGURATION_ERROR, message=message).save()
|
||||
except ImportError:
|
||||
continue
|
||||
|
||||
deprecated_attr = _pop_deprecated_key(self.__config, deprecation.split("."), 0)
|
||||
self.set(replacement, deprecated_attr.value)
|
||||
|
||||
def log(self, level: str, message: str, **kwargs):
|
||||
"""Custom Log method, we want to ensure ConfigLoader always logs JSON even when
|
||||
|
@ -180,6 +239,10 @@ class ConfigLoader:
|
|||
error=str(exc),
|
||||
)
|
||||
|
||||
def update_from_dict(self, update: dict):
|
||||
"""Update config from dict"""
|
||||
self.__config.update(update)
|
||||
|
||||
def update_from_env(self):
|
||||
"""Check environment variables"""
|
||||
outer = {}
|
||||
|
@ -188,19 +251,13 @@ class ConfigLoader:
|
|||
if not key.startswith(ENV_PREFIX):
|
||||
continue
|
||||
relative_key = key.replace(f"{ENV_PREFIX}_", "", 1).replace("__", ".").lower()
|
||||
# Recursively convert path from a.b.c into outer[a][b][c]
|
||||
current_obj = outer
|
||||
dot_parts = relative_key.split(".")
|
||||
for dot_part in dot_parts[:-1]:
|
||||
if dot_part not in current_obj:
|
||||
current_obj[dot_part] = {}
|
||||
current_obj = current_obj[dot_part]
|
||||
# Check if the value is json, and try to load it
|
||||
try:
|
||||
value = loads(value)
|
||||
except JSONDecodeError:
|
||||
pass
|
||||
current_obj[dot_parts[-1]] = Attr(value, Attr.Source.ENV, key)
|
||||
attr_value = Attr(value, Attr.Source.ENV, relative_key)
|
||||
set_path_in_dict(outer, relative_key, attr_value)
|
||||
idx += 1
|
||||
if idx > 0:
|
||||
self.log("debug", "Loaded environment variables", count=idx)
|
||||
|
@ -241,6 +298,23 @@ class ConfigLoader:
|
|||
"""Wrapper for get that converts value into boolean"""
|
||||
return str(self.get(path, default)).lower() == "true"
|
||||
|
||||
def get_dict_from_b64_json(self, path: str, default=None) -> dict:
|
||||
"""Wrapper for get that converts value from Base64 encoded string into dictionary"""
|
||||
config_value = self.get(path)
|
||||
if config_value is None:
|
||||
return {}
|
||||
try:
|
||||
b64decoded_str = base64.b64decode(config_value).decode("utf-8")
|
||||
b64decoded_str = b64decoded_str.strip().lstrip("{").rstrip("}")
|
||||
b64decoded_str = "{" + b64decoded_str + "}"
|
||||
return json.loads(b64decoded_str)
|
||||
except (JSONDecodeError, TypeError, ValueError) as exc:
|
||||
self.log(
|
||||
"warning",
|
||||
f"Ignored invalid configuration for '{path}' due to exception: {str(exc)}",
|
||||
)
|
||||
return default if isinstance(default, dict) else {}
|
||||
|
||||
def set(self, path: str, value: Any, sep="."):
|
||||
"""Set value using same syntax as get()"""
|
||||
set_path_in_dict(self.raw, path, Attr(value), sep=sep)
|
||||
|
|
|
@ -28,14 +28,28 @@ listen:
|
|||
redis:
|
||||
host: localhost
|
||||
port: 6379
|
||||
db: 0
|
||||
username: ""
|
||||
password: ""
|
||||
tls: false
|
||||
tls_reqs: "none"
|
||||
db: 0
|
||||
cache_timeout: 300
|
||||
cache_timeout_flows: 300
|
||||
cache_timeout_policies: 300
|
||||
cache_timeout_reputation: 300
|
||||
|
||||
# broker:
|
||||
# url: ""
|
||||
# transport_options: ""
|
||||
|
||||
cache:
|
||||
# url: ""
|
||||
timeout: 300
|
||||
timeout_flows: 300
|
||||
timeout_policies: 300
|
||||
timeout_reputation: 300
|
||||
|
||||
# channel:
|
||||
# url: ""
|
||||
|
||||
# result_backend:
|
||||
# url: ""
|
||||
|
||||
paths:
|
||||
media: ./media
|
||||
|
|
|
@ -1,20 +1,32 @@
|
|||
"""Test config loader"""
|
||||
import base64
|
||||
from json import dumps
|
||||
from os import chmod, environ, unlink, write
|
||||
from tempfile import mkstemp
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import ImproperlyConfigured
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.lib.config import ENV_PREFIX, ConfigLoader
|
||||
from authentik.lib.config import ENV_PREFIX, UNSET, Attr, AttrEncoder, ConfigLoader
|
||||
|
||||
|
||||
class TestConfig(TestCase):
|
||||
"""Test config loader"""
|
||||
|
||||
check_deprecations_env_vars = {
|
||||
ENV_PREFIX + "_REDIS__BROKER_URL": "redis://myredis:8327/43",
|
||||
ENV_PREFIX + "_REDIS__BROKER_TRANSPORT_OPTIONS": "bWFzdGVybmFtZT1teW1hc3Rlcg==",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT": "124s",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_FLOWS": "32m",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_POLICIES": "3920ns",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_REPUTATION": "298382us",
|
||||
}
|
||||
|
||||
@mock.patch.dict(environ, {ENV_PREFIX + "_test__test": "bar"})
|
||||
def test_env(self):
|
||||
"""Test simple instance"""
|
||||
config = ConfigLoader()
|
||||
environ[ENV_PREFIX + "_test__test"] = "bar"
|
||||
config.update_from_env()
|
||||
self.assertEqual(config.get("test.test"), "bar")
|
||||
|
||||
|
@ -27,12 +39,20 @@ class TestConfig(TestCase):
|
|||
self.assertEqual(config.get("foo.bar"), "baz")
|
||||
self.assertEqual(config.get("foo.bar"), "bar")
|
||||
|
||||
@mock.patch.dict(environ, {"foo": "bar"})
|
||||
def test_uri_env(self):
|
||||
"""Test URI parsing (environment)"""
|
||||
config = ConfigLoader()
|
||||
environ["foo"] = "bar"
|
||||
self.assertEqual(config.parse_uri("env://foo").value, "bar")
|
||||
self.assertEqual(config.parse_uri("env://foo?bar").value, "bar")
|
||||
foo_uri = "env://foo"
|
||||
foo_parsed = config.parse_uri(foo_uri)
|
||||
self.assertEqual(foo_parsed.value, "bar")
|
||||
self.assertEqual(foo_parsed.source_type, Attr.Source.URI)
|
||||
self.assertEqual(foo_parsed.source, foo_uri)
|
||||
foo_bar_uri = "env://foo?bar"
|
||||
foo_bar_parsed = config.parse_uri(foo_bar_uri)
|
||||
self.assertEqual(foo_bar_parsed.value, "bar")
|
||||
self.assertEqual(foo_bar_parsed.source_type, Attr.Source.URI)
|
||||
self.assertEqual(foo_bar_parsed.source, foo_bar_uri)
|
||||
|
||||
def test_uri_file(self):
|
||||
"""Test URI parsing (file load)"""
|
||||
|
@ -91,3 +111,60 @@ class TestConfig(TestCase):
|
|||
config = ConfigLoader()
|
||||
config.set("foo", "bar")
|
||||
self.assertEqual(config.get_int("foo", 1234), 1234)
|
||||
|
||||
def test_get_dict_from_b64_json(self):
|
||||
"""Test get_dict_from_b64_json"""
|
||||
config = ConfigLoader()
|
||||
test_value = ' { "foo": "bar" } '.encode("utf-8")
|
||||
b64_value = base64.b64encode(test_value)
|
||||
config.set("foo", b64_value)
|
||||
self.assertEqual(config.get_dict_from_b64_json("foo"), {"foo": "bar"})
|
||||
|
||||
def test_get_dict_from_b64_json_missing_brackets(self):
|
||||
"""Test get_dict_from_b64_json with missing brackets"""
|
||||
config = ConfigLoader()
|
||||
test_value = ' "foo": "bar" '.encode("utf-8")
|
||||
b64_value = base64.b64encode(test_value)
|
||||
config.set("foo", b64_value)
|
||||
self.assertEqual(config.get_dict_from_b64_json("foo"), {"foo": "bar"})
|
||||
|
||||
def test_get_dict_from_b64_json_invalid(self):
|
||||
"""Test get_dict_from_b64_json with invalid value"""
|
||||
config = ConfigLoader()
|
||||
config.set("foo", "bar")
|
||||
self.assertEqual(config.get_dict_from_b64_json("foo"), {})
|
||||
|
||||
def test_attr_json_encoder(self):
|
||||
"""Test AttrEncoder"""
|
||||
test_attr = Attr("foo", Attr.Source.ENV, "AUTHENTIK_REDIS__USERNAME")
|
||||
json_attr = dumps(test_attr, indent=4, cls=AttrEncoder)
|
||||
self.assertEqual(json_attr, '"foo"')
|
||||
|
||||
def test_attr_json_encoder_no_attr(self):
|
||||
"""Test AttrEncoder if no Attr is passed"""
|
||||
|
||||
class Test:
|
||||
"""Non Attr class"""
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
test_obj = Test()
|
||||
dumps(test_obj, indent=4, cls=AttrEncoder)
|
||||
|
||||
@mock.patch.dict(environ, check_deprecations_env_vars)
|
||||
def test_check_deprecations(self):
|
||||
"""Test config key re-write for deprecated env vars"""
|
||||
config = ConfigLoader()
|
||||
config.update_from_env()
|
||||
config.check_deprecations()
|
||||
self.assertEqual(config.get("redis.broker_url", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.broker_transport_options", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout_flows", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout_policies", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout_reputation", UNSET), UNSET)
|
||||
self.assertEqual(config.get("broker.url"), "redis://myredis:8327/43")
|
||||
self.assertEqual(config.get("broker.transport_options"), "bWFzdGVybmFtZT1teW1hc3Rlcg==")
|
||||
self.assertEqual(config.get("cache.timeout"), "124s")
|
||||
self.assertEqual(config.get("cache.timeout_flows"), "32m")
|
||||
self.assertEqual(config.get("cache.timeout_policies"), "3920ns")
|
||||
self.assertEqual(config.get("cache.timeout_reputation"), "298382us")
|
||||
|
|
|
@ -18,7 +18,7 @@ from authentik.core.api.used_by import UsedByMixin
|
|||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
from authentik.core.models import Provider
|
||||
from authentik.outposts.api.service_connections import ServiceConnectionSerializer
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST, MANAGED_OUTPOST_NAME
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import (
|
||||
Outpost,
|
||||
OutpostConfig,
|
||||
|
@ -47,16 +47,6 @@ class OutpostSerializer(ModelSerializer):
|
|||
source="service_connection", read_only=True
|
||||
)
|
||||
|
||||
def validate_name(self, name: str) -> str:
|
||||
"""Validate name (especially for embedded outpost)"""
|
||||
if not self.instance:
|
||||
return name
|
||||
if self.instance.managed == MANAGED_OUTPOST and name != MANAGED_OUTPOST_NAME:
|
||||
raise ValidationError("Embedded outpost's name cannot be changed")
|
||||
if self.instance.name == MANAGED_OUTPOST_NAME:
|
||||
self.instance.managed = MANAGED_OUTPOST
|
||||
return name
|
||||
|
||||
def validate_providers(self, providers: list[Provider]) -> list[Provider]:
|
||||
"""Check that all providers match the type of the outpost"""
|
||||
type_map = {
|
||||
|
|
|
@ -15,7 +15,6 @@ GAUGE_OUTPOSTS_LAST_UPDATE = Gauge(
|
|||
["outpost", "uid", "version"],
|
||||
)
|
||||
MANAGED_OUTPOST = "goauthentik.io/outposts/embedded"
|
||||
MANAGED_OUTPOST_NAME = "authentik Embedded Outpost"
|
||||
|
||||
|
||||
class AuthentikOutpostConfig(ManagedAppConfig):
|
||||
|
@ -36,17 +35,14 @@ class AuthentikOutpostConfig(ManagedAppConfig):
|
|||
DockerServiceConnection,
|
||||
KubernetesServiceConnection,
|
||||
Outpost,
|
||||
OutpostConfig,
|
||||
OutpostType,
|
||||
)
|
||||
|
||||
if outpost := Outpost.objects.filter(name=MANAGED_OUTPOST_NAME, managed="").first():
|
||||
outpost.managed = MANAGED_OUTPOST
|
||||
outpost.save()
|
||||
return
|
||||
outpost, updated = Outpost.objects.update_or_create(
|
||||
defaults={
|
||||
"name": "authentik Embedded Outpost",
|
||||
"type": OutpostType.PROXY,
|
||||
"name": MANAGED_OUTPOST_NAME,
|
||||
},
|
||||
managed=MANAGED_OUTPOST,
|
||||
)
|
||||
|
@ -55,4 +51,10 @@ class AuthentikOutpostConfig(ManagedAppConfig):
|
|||
outpost.service_connection = KubernetesServiceConnection.objects.first()
|
||||
elif DockerServiceConnection.objects.exists():
|
||||
outpost.service_connection = DockerServiceConnection.objects.first()
|
||||
outpost.config = OutpostConfig(
|
||||
kubernetes_disabled_components=[
|
||||
"deployment",
|
||||
"secret",
|
||||
]
|
||||
)
|
||||
outpost.save()
|
||||
|
|
|
@ -93,7 +93,7 @@ class OutpostConsumer(AuthJsonConsumer):
|
|||
expected=self.outpost.config.kubernetes_replicas,
|
||||
).dec()
|
||||
|
||||
def receive_json(self, content: Data):
|
||||
def receive_json(self, content: Data, **kwargs):
|
||||
msg = from_dict(WebsocketMessage, content)
|
||||
uid = msg.args.get("uuid", self.channel_name)
|
||||
self.last_uid = uid
|
||||
|
|
|
@ -43,10 +43,6 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||
self.api = AppsV1Api(controller.client)
|
||||
self.outpost = self.controller.outpost
|
||||
|
||||
@property
|
||||
def noop(self) -> bool:
|
||||
return self.is_embedded
|
||||
|
||||
@staticmethod
|
||||
def reconciler_name() -> str:
|
||||
return "deployment"
|
||||
|
|
|
@ -24,10 +24,6 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
|
|||
super().__init__(controller)
|
||||
self.api = CoreV1Api(controller.client)
|
||||
|
||||
@property
|
||||
def noop(self) -> bool:
|
||||
return self.is_embedded
|
||||
|
||||
@staticmethod
|
||||
def reconciler_name() -> str:
|
||||
return "secret"
|
||||
|
|
|
@ -77,10 +77,7 @@ class PrometheusServiceMonitorReconciler(KubernetesObjectReconciler[PrometheusSe
|
|||
|
||||
@property
|
||||
def noop(self) -> bool:
|
||||
if not self._crd_exists():
|
||||
self.logger.debug("CRD doesn't exist")
|
||||
return True
|
||||
return self.is_embedded
|
||||
return (not self._crd_exists()) or (self.is_embedded)
|
||||
|
||||
def _crd_exists(self) -> bool:
|
||||
"""Check if the Prometheus ServiceMonitor exists"""
|
||||
|
|
|
@ -344,21 +344,11 @@ class Outpost(SerializerModel, ManagedModel):
|
|||
user_created = False
|
||||
if not user:
|
||||
user: User = User.objects.create(username=self.user_identifier)
|
||||
user_created = True
|
||||
attrs = {
|
||||
"type": UserTypes.INTERNAL_SERVICE_ACCOUNT,
|
||||
"name": f"Outpost {self.name} Service-Account",
|
||||
"path": USER_PATH_OUTPOSTS,
|
||||
}
|
||||
dirty = False
|
||||
for key, value in attrs.items():
|
||||
if getattr(user, key) != value:
|
||||
dirty = True
|
||||
setattr(user, key, value)
|
||||
if user.has_usable_password():
|
||||
user.set_unusable_password()
|
||||
dirty = True
|
||||
if dirty:
|
||||
user_created = True
|
||||
user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||
user.name = f"Outpost {self.name} Service-Account"
|
||||
user.path = USER_PATH_OUTPOSTS
|
||||
user.save()
|
||||
if user_created:
|
||||
self.build_user_permissions(user)
|
||||
|
|
|
@ -2,13 +2,11 @@
|
|||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import PropertyMapping
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.outposts.api.outposts import OutpostSerializer
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost, OutpostType, default_outpost_config
|
||||
from authentik.outposts.models import OutpostType, default_outpost_config
|
||||
from authentik.providers.ldap.models import LDAPProvider
|
||||
from authentik.providers.proxy.models import ProxyProvider
|
||||
|
||||
|
@ -24,36 +22,7 @@ class TestOutpostServiceConnectionsAPI(APITestCase):
|
|||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_managed_name_change(self):
|
||||
"""Test name change for embedded outpost"""
|
||||
embedded_outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
|
||||
self.assertIsNotNone(embedded_outpost)
|
||||
response = self.client.patch(
|
||||
reverse("authentik_api:outpost-detail", kwargs={"pk": embedded_outpost.pk}),
|
||||
{"name": "foo"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content, {"name": ["Embedded outpost's name cannot be changed"]}
|
||||
)
|
||||
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_managed_without_managed(self):
|
||||
"""Test name change for embedded outpost"""
|
||||
embedded_outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
|
||||
self.assertIsNotNone(embedded_outpost)
|
||||
embedded_outpost.managed = ""
|
||||
embedded_outpost.save()
|
||||
response = self.client.patch(
|
||||
reverse("authentik_api:outpost-detail", kwargs={"pk": embedded_outpost.pk}),
|
||||
{"name": "foo"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
embedded_outpost.refresh_from_db()
|
||||
self.assertEqual(embedded_outpost.managed, MANAGED_OUTPOST)
|
||||
|
||||
def test_outpost_validation(self):
|
||||
def test_outpost_validaton(self):
|
||||
"""Test Outpost validation"""
|
||||
valid = OutpostSerializer(
|
||||
data={
|
||||
|
|
|
@ -20,7 +20,7 @@ from authentik.policies.types import CACHE_PREFIX, PolicyRequest, PolicyResult
|
|||
LOGGER = get_logger()
|
||||
|
||||
FORK_CTX = get_context("fork")
|
||||
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_policies")
|
||||
CACHE_TIMEOUT = CONFIG.get_int("cache.timeout_policies")
|
||||
PROCESS_CLASS = FORK_CTX.Process
|
||||
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ from authentik.policies.reputation.tasks import save_reputation
|
|||
from authentik.stages.identification.signals import identification_failed
|
||||
|
||||
LOGGER = get_logger()
|
||||
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_reputation")
|
||||
CACHE_TIMEOUT = CONFIG.get_int("cache.timeout_reputation")
|
||||
|
||||
|
||||
def update_score(request: HttpRequest, identifier: str, amount: int):
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
# Generated by Django 5.0 on 2023-12-22 23:20
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_providers_oauth2", "0016_alter_refreshtoken_token"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="accesstoken",
|
||||
name="session_id",
|
||||
field=models.CharField(blank=True, default=""),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="authorizationcode",
|
||||
name="session_id",
|
||||
field=models.CharField(blank=True, default=""),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="refreshtoken",
|
||||
name="session_id",
|
||||
field=models.CharField(blank=True, default=""),
|
||||
),
|
||||
]
|
|
@ -296,7 +296,6 @@ class BaseGrantModel(models.Model):
|
|||
revoked = models.BooleanField(default=False)
|
||||
_scope = models.TextField(default="", verbose_name=_("Scopes"))
|
||||
auth_time = models.DateTimeField(verbose_name="Authentication time")
|
||||
session_id = models.CharField(default="", blank=True)
|
||||
|
||||
@property
|
||||
def scope(self) -> list[str]:
|
||||
|
|
|
@ -85,25 +85,6 @@ class TestAuthorize(OAuthTestCase):
|
|||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
|
||||
def test_blocked_redirect_uri(self):
|
||||
"""test missing/invalid redirect URI"""
|
||||
OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris="data:local.invalid",
|
||||
)
|
||||
with self.assertRaises(RedirectUriError):
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"redirect_uri": "data:localhost",
|
||||
},
|
||||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
|
||||
def test_invalid_redirect_uri_empty(self):
|
||||
"""test missing/invalid redirect URI"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
|
|
|
@ -1,187 +0,0 @@
|
|||
"""Test token view"""
|
||||
from base64 import b64encode, urlsafe_b64encode
|
||||
from hashlib import sha256
|
||||
|
||||
from django.test import RequestFactory
|
||||
from django.urls import reverse
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.flows.challenge import ChallengeTypes
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.constants import GRANT_TYPE_AUTHORIZATION_CODE
|
||||
from authentik.providers.oauth2.models import AuthorizationCode, OAuth2Provider
|
||||
from authentik.providers.oauth2.tests.utils import OAuthTestCase
|
||||
|
||||
|
||||
class TestTokenPKCE(OAuthTestCase):
|
||||
"""Test token view"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.factory = RequestFactory()
|
||||
self.app = Application.objects.create(name=generate_id(), slug="test")
|
||||
|
||||
def test_pkce_missing_in_token(self):
|
||||
"""Test full with pkce"""
|
||||
flow = create_test_flow()
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=flow,
|
||||
redirect_uris="foo://localhost",
|
||||
access_code_validity="seconds=100",
|
||||
)
|
||||
Application.objects.create(name="app", slug="app", provider=provider)
|
||||
state = generate_id()
|
||||
user = create_test_admin_user()
|
||||
self.client.force_login(user)
|
||||
challenge = generate_id()
|
||||
header = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
|
||||
# Step 1, initiate params and get redirect to flow
|
||||
self.client.get(
|
||||
reverse("authentik_providers_oauth2:authorize"),
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"state": state,
|
||||
"redirect_uri": "foo://localhost",
|
||||
"code_challenge": challenge,
|
||||
"code_challenge_method": "S256",
|
||||
},
|
||||
)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
code: AuthorizationCode = AuthorizationCode.objects.filter(user=user).first()
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{
|
||||
"component": "xak-flow-redirect",
|
||||
"type": ChallengeTypes.REDIRECT.value,
|
||||
"to": f"foo://localhost?code={code.code}&state={state}",
|
||||
},
|
||||
)
|
||||
response = self.client.post(
|
||||
reverse("authentik_providers_oauth2:token"),
|
||||
data={
|
||||
"grant_type": GRANT_TYPE_AUTHORIZATION_CODE,
|
||||
"code": code.code,
|
||||
# Missing the code_verifier here
|
||||
"redirect_uri": "foo://localhost",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Basic {header}",
|
||||
)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{"error": "invalid_request", "error_description": "The request is otherwise malformed"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_pkce_correct_s256(self):
|
||||
"""Test full with pkce"""
|
||||
flow = create_test_flow()
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=flow,
|
||||
redirect_uris="foo://localhost",
|
||||
access_code_validity="seconds=100",
|
||||
)
|
||||
Application.objects.create(name="app", slug="app", provider=provider)
|
||||
state = generate_id()
|
||||
user = create_test_admin_user()
|
||||
self.client.force_login(user)
|
||||
verifier = generate_id()
|
||||
challenge = (
|
||||
urlsafe_b64encode(sha256(verifier.encode("ascii")).digest())
|
||||
.decode("utf-8")
|
||||
.replace("=", "")
|
||||
)
|
||||
header = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
|
||||
# Step 1, initiate params and get redirect to flow
|
||||
self.client.get(
|
||||
reverse("authentik_providers_oauth2:authorize"),
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"state": state,
|
||||
"redirect_uri": "foo://localhost",
|
||||
"code_challenge": challenge,
|
||||
"code_challenge_method": "S256",
|
||||
},
|
||||
)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
code: AuthorizationCode = AuthorizationCode.objects.filter(user=user).first()
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{
|
||||
"component": "xak-flow-redirect",
|
||||
"type": ChallengeTypes.REDIRECT.value,
|
||||
"to": f"foo://localhost?code={code.code}&state={state}",
|
||||
},
|
||||
)
|
||||
response = self.client.post(
|
||||
reverse("authentik_providers_oauth2:token"),
|
||||
data={
|
||||
"grant_type": GRANT_TYPE_AUTHORIZATION_CODE,
|
||||
"code": code.code,
|
||||
"code_verifier": verifier,
|
||||
"redirect_uri": "foo://localhost",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Basic {header}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_pkce_correct_plain(self):
|
||||
"""Test full with pkce"""
|
||||
flow = create_test_flow()
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=flow,
|
||||
redirect_uris="foo://localhost",
|
||||
access_code_validity="seconds=100",
|
||||
)
|
||||
Application.objects.create(name="app", slug="app", provider=provider)
|
||||
state = generate_id()
|
||||
user = create_test_admin_user()
|
||||
self.client.force_login(user)
|
||||
verifier = generate_id()
|
||||
header = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
|
||||
# Step 1, initiate params and get redirect to flow
|
||||
self.client.get(
|
||||
reverse("authentik_providers_oauth2:authorize"),
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"state": state,
|
||||
"redirect_uri": "foo://localhost",
|
||||
"code_challenge": verifier,
|
||||
},
|
||||
)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
code: AuthorizationCode = AuthorizationCode.objects.filter(user=user).first()
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{
|
||||
"component": "xak-flow-redirect",
|
||||
"type": ChallengeTypes.REDIRECT.value,
|
||||
"to": f"foo://localhost?code={code.code}&state={state}",
|
||||
},
|
||||
)
|
||||
response = self.client.post(
|
||||
reverse("authentik_providers_oauth2:token"),
|
||||
data={
|
||||
"grant_type": GRANT_TYPE_AUTHORIZATION_CODE,
|
||||
"code": code.code,
|
||||
"code_verifier": verifier,
|
||||
"redirect_uri": "foo://localhost",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Basic {header}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
|
@ -1,7 +1,6 @@
|
|||
"""authentik OAuth2 Authorization views"""
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import timedelta
|
||||
from hashlib import sha256
|
||||
from json import dumps
|
||||
from re import error as RegexError
|
||||
from re import fullmatch
|
||||
|
@ -75,7 +74,6 @@ PLAN_CONTEXT_PARAMS = "goauthentik.io/providers/oauth2/params"
|
|||
SESSION_KEY_LAST_LOGIN_UID = "authentik/providers/oauth2/last_login_uid"
|
||||
|
||||
ALLOWED_PROMPT_PARAMS = {PROMPT_NONE, PROMPT_CONSENT, PROMPT_LOGIN}
|
||||
FORBIDDEN_URI_SCHEMES = {"javascript", "data", "vbscript"}
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
|
@ -176,10 +174,6 @@ class OAuthAuthorizationParams:
|
|||
self.check_scope()
|
||||
self.check_nonce()
|
||||
self.check_code_challenge()
|
||||
if self.request:
|
||||
raise AuthorizeError(
|
||||
self.redirect_uri, "request_not_supported", self.grant_type, self.state
|
||||
)
|
||||
|
||||
def check_redirect_uri(self):
|
||||
"""Redirect URI validation."""
|
||||
|
@ -217,9 +211,10 @@ class OAuthAuthorizationParams:
|
|||
expected=allowed_redirect_urls,
|
||||
)
|
||||
raise RedirectUriError(self.redirect_uri, allowed_redirect_urls)
|
||||
# Check against forbidden schemes
|
||||
if urlparse(self.redirect_uri).scheme in FORBIDDEN_URI_SCHEMES:
|
||||
raise RedirectUriError(self.redirect_uri, allowed_redirect_urls)
|
||||
if self.request:
|
||||
raise AuthorizeError(
|
||||
self.redirect_uri, "request_not_supported", self.grant_type, self.state
|
||||
)
|
||||
|
||||
def check_scope(self):
|
||||
"""Ensure openid scope is set in Hybrid flows, or when requesting an id_token"""
|
||||
|
@ -287,7 +282,6 @@ class OAuthAuthorizationParams:
|
|||
expires=now + timedelta_from_string(self.provider.access_code_validity),
|
||||
scope=self.scope,
|
||||
nonce=self.nonce,
|
||||
session_id=sha256(request.session.session_key.encode("ascii")).hexdigest(),
|
||||
)
|
||||
|
||||
if self.code_challenge and self.code_challenge_method:
|
||||
|
@ -575,7 +569,6 @@ class OAuthFulfillmentStage(StageView):
|
|||
expires=access_token_expiry,
|
||||
provider=self.provider,
|
||||
auth_time=auth_event.created if auth_event else now,
|
||||
session_id=sha256(self.request.session.session_key.encode("ascii")).hexdigest(),
|
||||
)
|
||||
|
||||
id_token = IDToken.new(self.provider, token, self.request)
|
||||
|
|
|
@ -6,7 +6,6 @@ from hashlib import sha256
|
|||
from re import error as RegexError
|
||||
from re import fullmatch
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.utils import timezone
|
||||
|
@ -55,7 +54,6 @@ from authentik.providers.oauth2.models import (
|
|||
RefreshToken,
|
||||
)
|
||||
from authentik.providers.oauth2.utils import TokenResponse, cors_allow, extract_client_auth
|
||||
from authentik.providers.oauth2.views.authorize import FORBIDDEN_URI_SCHEMES
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
||||
|
||||
|
@ -207,10 +205,6 @@ class TokenParams:
|
|||
).from_http(request)
|
||||
raise TokenError("invalid_client")
|
||||
|
||||
# Check against forbidden schemes
|
||||
if urlparse(self.redirect_uri).scheme in FORBIDDEN_URI_SCHEMES:
|
||||
raise TokenError("invalid_request")
|
||||
|
||||
self.authorization_code = AuthorizationCode.objects.filter(code=raw_code).first()
|
||||
if not self.authorization_code:
|
||||
LOGGER.warning("Code does not exist", code=raw_code)
|
||||
|
@ -228,10 +222,7 @@ class TokenParams:
|
|||
raise TokenError("invalid_grant")
|
||||
|
||||
# Validate PKCE parameters.
|
||||
if self.authorization_code.code_challenge:
|
||||
# Authorization code had PKCE but we didn't get one
|
||||
if not self.code_verifier:
|
||||
raise TokenError("invalid_request")
|
||||
if self.code_verifier:
|
||||
if self.authorization_code.code_challenge_method == PKCE_METHOD_S256:
|
||||
new_code_challenge = (
|
||||
urlsafe_b64encode(sha256(self.code_verifier.encode("ascii")).digest())
|
||||
|
@ -493,7 +484,6 @@ class TokenView(View):
|
|||
# Keep same scopes as previous token
|
||||
scope=self.params.authorization_code.scope,
|
||||
auth_time=self.params.authorization_code.auth_time,
|
||||
session_id=self.params.authorization_code.session_id,
|
||||
)
|
||||
access_token.id_token = IDToken.new(
|
||||
self.provider,
|
||||
|
@ -509,7 +499,6 @@ class TokenView(View):
|
|||
expires=refresh_token_expiry,
|
||||
provider=self.provider,
|
||||
auth_time=self.params.authorization_code.auth_time,
|
||||
session_id=self.params.authorization_code.session_id,
|
||||
)
|
||||
id_token = IDToken.new(
|
||||
self.provider,
|
||||
|
@ -547,7 +536,6 @@ class TokenView(View):
|
|||
# Keep same scopes as previous token
|
||||
scope=self.params.refresh_token.scope,
|
||||
auth_time=self.params.refresh_token.auth_time,
|
||||
session_id=self.params.refresh_token.session_id,
|
||||
)
|
||||
access_token.id_token = IDToken.new(
|
||||
self.provider,
|
||||
|
@ -563,7 +551,6 @@ class TokenView(View):
|
|||
expires=refresh_token_expiry,
|
||||
provider=self.provider,
|
||||
auth_time=self.params.refresh_token.auth_time,
|
||||
session_id=self.params.refresh_token.session_id,
|
||||
)
|
||||
id_token = IDToken.new(
|
||||
self.provider,
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
"""proxy provider tasks"""
|
||||
from hashlib import sha256
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.layers import get_channel_layer
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
|
@ -25,7 +23,6 @@ def proxy_set_defaults():
|
|||
def proxy_on_logout(session_id: str):
|
||||
"""Update outpost instances connected to a single outpost"""
|
||||
layer = get_channel_layer()
|
||||
hashed_session_id = sha256(session_id.encode("ascii")).hexdigest()
|
||||
for outpost in Outpost.objects.filter(type=OutpostType.PROXY):
|
||||
group = OUTPOST_GROUP % {"outpost_pk": str(outpost.pk)}
|
||||
async_to_sync(layer.group_send)(
|
||||
|
@ -33,6 +30,6 @@ def proxy_on_logout(session_id: str):
|
|||
{
|
||||
"type": "event.provider.specific",
|
||||
"sub_type": "logout",
|
||||
"session_id": hashed_session_id,
|
||||
"session_id": session_id,
|
||||
},
|
||||
)
|
||||
|
|
|
@ -46,9 +46,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
|
|||
|
||||
def to_scim(self, obj: Group) -> SCIMGroupSchema:
|
||||
"""Convert authentik user into SCIM"""
|
||||
raw_scim_group = {
|
||||
"schemas": ("urn:ietf:params:scim:schemas:core:2.0:Group",),
|
||||
}
|
||||
raw_scim_group = {}
|
||||
for mapping in (
|
||||
self.provider.property_mappings_group.all().order_by("name").select_subclasses()
|
||||
):
|
||||
|
|
|
@ -15,14 +15,12 @@ from pydanticscim.user import User as BaseUser
|
|||
class User(BaseUser):
|
||||
"""Modified User schema with added externalId field"""
|
||||
|
||||
schemas: tuple[str] = ("urn:ietf:params:scim:schemas:core:2.0:User",)
|
||||
externalId: Optional[str] = None
|
||||
|
||||
|
||||
class Group(BaseGroup):
|
||||
"""Modified Group schema with added externalId field"""
|
||||
|
||||
schemas: tuple[str] = ("urn:ietf:params:scim:schemas:core:2.0:Group",)
|
||||
externalId: Optional[str] = None
|
||||
|
||||
|
||||
|
|
|
@ -39,9 +39,7 @@ class SCIMUserClient(SCIMClient[User, SCIMUserSchema]):
|
|||
|
||||
def to_scim(self, obj: User) -> SCIMUserSchema:
|
||||
"""Convert authentik user into SCIM"""
|
||||
raw_scim_user = {
|
||||
"schemas": ("urn:ietf:params:scim:schemas:core:2.0:User",),
|
||||
}
|
||||
raw_scim_user = {}
|
||||
for mapping in self.provider.property_mappings.all().order_by("name").select_subclasses():
|
||||
if not isinstance(mapping, SCIMMapping):
|
||||
continue
|
||||
|
|
|
@ -61,11 +61,7 @@ class SCIMGroupTests(TestCase):
|
|||
self.assertEqual(mock.request_history[1].method, "POST")
|
||||
self.assertJSONEqual(
|
||||
mock.request_history[1].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
|
||||
"externalId": str(group.pk),
|
||||
"displayName": group.name,
|
||||
},
|
||||
{"externalId": str(group.pk), "displayName": group.name},
|
||||
)
|
||||
|
||||
@Mocker()
|
||||
|
@ -100,11 +96,7 @@ class SCIMGroupTests(TestCase):
|
|||
validate(body, loads(schema.read()))
|
||||
self.assertEqual(
|
||||
body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
|
||||
"externalId": str(group.pk),
|
||||
"displayName": group.name,
|
||||
},
|
||||
{"externalId": str(group.pk), "displayName": group.name},
|
||||
)
|
||||
group.save()
|
||||
self.assertEqual(mock.call_count, 4)
|
||||
|
@ -137,11 +129,7 @@ class SCIMGroupTests(TestCase):
|
|||
self.assertEqual(mock.request_history[1].method, "POST")
|
||||
self.assertJSONEqual(
|
||||
mock.request_history[1].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
|
||||
"externalId": str(group.pk),
|
||||
"displayName": group.name,
|
||||
},
|
||||
{"externalId": str(group.pk), "displayName": group.name},
|
||||
)
|
||||
group.delete()
|
||||
self.assertEqual(mock.call_count, 4)
|
||||
|
|
|
@ -89,7 +89,6 @@ class SCIMMembershipTests(TestCase):
|
|||
self.assertJSONEqual(
|
||||
mocker.request_history[3].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
"emails": [],
|
||||
"active": True,
|
||||
"externalId": user.uid,
|
||||
|
@ -100,11 +99,7 @@ class SCIMMembershipTests(TestCase):
|
|||
)
|
||||
self.assertJSONEqual(
|
||||
mocker.request_history[5].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
|
||||
"externalId": str(group.pk),
|
||||
"displayName": group.name,
|
||||
},
|
||||
{"externalId": str(group.pk), "displayName": group.name},
|
||||
)
|
||||
|
||||
with Mocker() as mocker:
|
||||
|
@ -123,7 +118,6 @@ class SCIMMembershipTests(TestCase):
|
|||
self.assertJSONEqual(
|
||||
mocker.request_history[1].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
"Operations": [
|
||||
{
|
||||
"op": "add",
|
||||
|
@ -131,6 +125,7 @@ class SCIMMembershipTests(TestCase):
|
|||
"value": [{"value": user_scim_id}],
|
||||
}
|
||||
],
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -179,7 +174,6 @@ class SCIMMembershipTests(TestCase):
|
|||
self.assertJSONEqual(
|
||||
mocker.request_history[3].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
"active": True,
|
||||
"displayName": "",
|
||||
"emails": [],
|
||||
|
@ -190,11 +184,7 @@ class SCIMMembershipTests(TestCase):
|
|||
)
|
||||
self.assertJSONEqual(
|
||||
mocker.request_history[5].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
|
||||
"externalId": str(group.pk),
|
||||
"displayName": group.name,
|
||||
},
|
||||
{"externalId": str(group.pk), "displayName": group.name},
|
||||
)
|
||||
|
||||
with Mocker() as mocker:
|
||||
|
@ -213,7 +203,6 @@ class SCIMMembershipTests(TestCase):
|
|||
self.assertJSONEqual(
|
||||
mocker.request_history[1].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
"Operations": [
|
||||
{
|
||||
"op": "add",
|
||||
|
@ -221,6 +210,7 @@ class SCIMMembershipTests(TestCase):
|
|||
"value": [{"value": user_scim_id}],
|
||||
}
|
||||
],
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -240,7 +230,6 @@ class SCIMMembershipTests(TestCase):
|
|||
self.assertJSONEqual(
|
||||
mocker.request_history[1].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
"Operations": [
|
||||
{
|
||||
"op": "remove",
|
||||
|
@ -248,5 +237,6 @@ class SCIMMembershipTests(TestCase):
|
|||
"value": [{"value": user_scim_id}],
|
||||
}
|
||||
],
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
},
|
||||
)
|
||||
|
|
|
@ -57,7 +57,7 @@ class SCIMUserTests(TestCase):
|
|||
uid = generate_id()
|
||||
user = User.objects.create(
|
||||
username=uid,
|
||||
name=f"{uid} {uid}",
|
||||
name=uid,
|
||||
email=f"{uid}@goauthentik.io",
|
||||
)
|
||||
self.assertEqual(mock.call_count, 2)
|
||||
|
@ -66,7 +66,6 @@ class SCIMUserTests(TestCase):
|
|||
self.assertJSONEqual(
|
||||
mock.request_history[1].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
"active": True,
|
||||
"emails": [
|
||||
{
|
||||
|
@ -77,11 +76,11 @@ class SCIMUserTests(TestCase):
|
|||
],
|
||||
"externalId": user.uid,
|
||||
"name": {
|
||||
"familyName": uid,
|
||||
"formatted": f"{uid} {uid}",
|
||||
"familyName": "",
|
||||
"formatted": uid,
|
||||
"givenName": uid,
|
||||
},
|
||||
"displayName": f"{uid} {uid}",
|
||||
"displayName": uid,
|
||||
"userName": uid,
|
||||
},
|
||||
)
|
||||
|
@ -110,7 +109,7 @@ class SCIMUserTests(TestCase):
|
|||
uid = generate_id()
|
||||
user = User.objects.create(
|
||||
username=uid,
|
||||
name=f"{uid} {uid}",
|
||||
name=uid,
|
||||
email=f"{uid}@goauthentik.io",
|
||||
)
|
||||
self.assertEqual(mock.call_count, 2)
|
||||
|
@ -122,7 +121,6 @@ class SCIMUserTests(TestCase):
|
|||
self.assertEqual(
|
||||
body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
"active": True,
|
||||
"emails": [
|
||||
{
|
||||
|
@ -131,11 +129,11 @@ class SCIMUserTests(TestCase):
|
|||
"value": f"{uid}@goauthentik.io",
|
||||
}
|
||||
],
|
||||
"displayName": f"{uid} {uid}",
|
||||
"displayName": uid,
|
||||
"externalId": user.uid,
|
||||
"name": {
|
||||
"familyName": uid,
|
||||
"formatted": f"{uid} {uid}",
|
||||
"familyName": "",
|
||||
"formatted": uid,
|
||||
"givenName": uid,
|
||||
},
|
||||
"userName": uid,
|
||||
|
@ -166,7 +164,7 @@ class SCIMUserTests(TestCase):
|
|||
uid = generate_id()
|
||||
user = User.objects.create(
|
||||
username=uid,
|
||||
name=f"{uid} {uid}",
|
||||
name=uid,
|
||||
email=f"{uid}@goauthentik.io",
|
||||
)
|
||||
self.assertEqual(mock.call_count, 2)
|
||||
|
@ -175,7 +173,6 @@ class SCIMUserTests(TestCase):
|
|||
self.assertJSONEqual(
|
||||
mock.request_history[1].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
"active": True,
|
||||
"emails": [
|
||||
{
|
||||
|
@ -186,11 +183,11 @@ class SCIMUserTests(TestCase):
|
|||
],
|
||||
"externalId": user.uid,
|
||||
"name": {
|
||||
"familyName": uid,
|
||||
"formatted": f"{uid} {uid}",
|
||||
"familyName": "",
|
||||
"formatted": uid,
|
||||
"givenName": uid,
|
||||
},
|
||||
"displayName": f"{uid} {uid}",
|
||||
"displayName": uid,
|
||||
"userName": uid,
|
||||
},
|
||||
)
|
||||
|
@ -230,7 +227,7 @@ class SCIMUserTests(TestCase):
|
|||
)
|
||||
user = User.objects.create(
|
||||
username=uid,
|
||||
name=f"{uid} {uid}",
|
||||
name=uid,
|
||||
email=f"{uid}@goauthentik.io",
|
||||
)
|
||||
|
||||
|
@ -243,7 +240,6 @@ class SCIMUserTests(TestCase):
|
|||
self.assertJSONEqual(
|
||||
mock.request_history[1].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
"active": True,
|
||||
"emails": [
|
||||
{
|
||||
|
@ -254,11 +250,11 @@ class SCIMUserTests(TestCase):
|
|||
],
|
||||
"externalId": user.uid,
|
||||
"name": {
|
||||
"familyName": uid,
|
||||
"formatted": f"{uid} {uid}",
|
||||
"familyName": "",
|
||||
"formatted": uid,
|
||||
"givenName": uid,
|
||||
},
|
||||
"displayName": f"{uid} {uid}",
|
||||
"displayName": uid,
|
||||
"userName": uid,
|
||||
},
|
||||
)
|
||||
|
|
|
@ -24,10 +24,7 @@ class ExtraRoleObjectPermissionSerializer(RoleObjectPermissionSerializer):
|
|||
|
||||
def get_app_label_verbose(self, instance: GroupObjectPermission) -> str:
|
||||
"""Get app label from permission's model"""
|
||||
try:
|
||||
return apps.get_app_config(instance.content_type.app_label).verbose_name
|
||||
except LookupError:
|
||||
return instance.content_type.app_label
|
||||
|
||||
def get_model_verbose(self, instance: GroupObjectPermission) -> str:
|
||||
"""Get model label from permission's model"""
|
||||
|
|
|
@ -24,10 +24,7 @@ class ExtraUserObjectPermissionSerializer(UserObjectPermissionSerializer):
|
|||
|
||||
def get_app_label_verbose(self, instance: UserObjectPermission) -> str:
|
||||
"""Get app label from permission's model"""
|
||||
try:
|
||||
return apps.get_app_config(instance.content_type.app_label).verbose_name
|
||||
except LookupError:
|
||||
return instance.content_type.app_label
|
||||
|
||||
def get_model_verbose(self, instance: UserObjectPermission) -> str:
|
||||
"""Get model label from permission's model"""
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""root settings for authentik"""
|
||||
|
||||
import importlib
|
||||
import os
|
||||
from hashlib import sha512
|
||||
|
@ -195,8 +194,8 @@ _redis_url = (
|
|||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": f"{_redis_url}/{CONFIG.get('redis.db')}",
|
||||
"TIMEOUT": CONFIG.get_int("redis.cache_timeout", 300),
|
||||
"LOCATION": CONFIG.get("cache.url") or f"{_redis_url}/{CONFIG.get('redis.db')}",
|
||||
"TIMEOUT": CONFIG.get_int("cache.timeout", 300),
|
||||
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
||||
"KEY_PREFIX": "authentik_cache",
|
||||
}
|
||||
|
@ -256,7 +255,7 @@ CHANNEL_LAYERS = {
|
|||
"default": {
|
||||
"BACKEND": "channels_redis.pubsub.RedisPubSubChannelLayer",
|
||||
"CONFIG": {
|
||||
"hosts": [f"{_redis_url}/{CONFIG.get('redis.db')}"],
|
||||
"hosts": [CONFIG.get("channel.url", f"{_redis_url}/{CONFIG.get('redis.db')}")],
|
||||
"prefix": "authentik_channels_",
|
||||
},
|
||||
},
|
||||
|
@ -349,8 +348,11 @@ CELERY = {
|
|||
},
|
||||
"task_create_missing_queues": True,
|
||||
"task_default_queue": "authentik",
|
||||
"broker_url": f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
|
||||
"result_backend": f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
|
||||
"broker_url": CONFIG.get("broker.url")
|
||||
or f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
|
||||
"broker_transport_options": CONFIG.get_dict_from_b64_json("broker.transport_options"),
|
||||
"result_backend": CONFIG.get("result_backend.url")
|
||||
or f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
|
||||
}
|
||||
|
||||
# Sentry integration
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
"""Source API Views"""
|
||||
from django_filters.filters import AllValuesMultipleFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.sources.ldap.models import LDAPPropertyMapping
|
||||
|
||||
|
||||
class LDAPPropertyMappingSerializer(PropertyMappingSerializer):
|
||||
"""LDAP PropertyMapping Serializer"""
|
||||
|
||||
class Meta:
|
||||
model = LDAPPropertyMapping
|
||||
fields = PropertyMappingSerializer.Meta.fields + [
|
||||
"object_field",
|
||||
]
|
||||
|
||||
|
||||
class LDAPPropertyMappingFilter(FilterSet):
|
||||
"""Filter for LDAPPropertyMapping"""
|
||||
|
||||
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||
|
||||
class Meta:
|
||||
model = LDAPPropertyMapping
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class LDAPPropertyMappingViewSet(UsedByMixin, ModelViewSet):
|
||||
"""LDAP PropertyMapping Viewset"""
|
||||
|
||||
queryset = LDAPPropertyMapping.objects.all()
|
||||
serializer_class = LDAPPropertyMappingSerializer
|
||||
filterset_class = LDAPPropertyMappingFilter
|
||||
search_fields = ["name"]
|
||||
ordering = ["name"]
|
|
@ -1,31 +1,30 @@
|
|||
"""Source API Views"""
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
from django_filters.filters import AllValuesMultipleFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field, inline_serializer
|
||||
from django.core.cache import cache
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import DictField, ListField
|
||||
from rest_framework.fields import BooleanField, DictField, ListField, SerializerMethodField
|
||||
from rest_framework.relations import PrimaryKeyRelatedField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.admin.api.tasks import TaskSerializer
|
||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||
from authentik.core.api.sources import SourceSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.events.monitored_tasks import TaskInfo
|
||||
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
||||
from authentik.sources.ldap.tasks import SYNC_CLASSES
|
||||
from authentik.sources.ldap.models import LDAPSource
|
||||
from authentik.sources.ldap.tasks import CACHE_KEY_STATUS, SYNC_CLASSES, ldap_sync_single
|
||||
|
||||
|
||||
class LDAPSourceSerializer(SourceSerializer):
|
||||
"""LDAP Source Serializer"""
|
||||
|
||||
connectivity = SerializerMethodField()
|
||||
client_certificate = PrimaryKeyRelatedField(
|
||||
allow_null=True,
|
||||
help_text="Client certificate to authenticate against the LDAP Server's Certificate.",
|
||||
|
@ -35,6 +34,10 @@ class LDAPSourceSerializer(SourceSerializer):
|
|||
required=False,
|
||||
)
|
||||
|
||||
def get_connectivity(self, source: LDAPSource) -> Optional[dict[str, dict[str, str]]]:
|
||||
"""Get cached source connectivity"""
|
||||
return cache.get(CACHE_KEY_STATUS + source.slug, None)
|
||||
|
||||
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Check that only a single source has password_sync on"""
|
||||
sync_users_password = attrs.get("sync_users_password", True)
|
||||
|
@ -52,6 +55,20 @@ class LDAPSourceSerializer(SourceSerializer):
|
|||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data) -> LDAPSource:
|
||||
# Create both creates the actual model and assigns m2m fields
|
||||
instance: LDAPSource = super().create(validated_data)
|
||||
if not instance.enabled:
|
||||
return instance
|
||||
# Don't sync sources when they don't have any property mappings. This will only happen if:
|
||||
# - the user forgets to set them or
|
||||
# - the source is newly created, this is the first save event
|
||||
# and the mappings are created with an m2m event
|
||||
if not instance.property_mappings.exists() or not instance.property_mappings_group.exists():
|
||||
return instance
|
||||
ldap_sync_single.delay(instance.pk)
|
||||
return instance
|
||||
|
||||
class Meta:
|
||||
model = LDAPSource
|
||||
fields = SourceSerializer.Meta.fields + [
|
||||
|
@ -75,10 +92,18 @@ class LDAPSourceSerializer(SourceSerializer):
|
|||
"sync_parent_group",
|
||||
"property_mappings",
|
||||
"property_mappings_group",
|
||||
"connectivity",
|
||||
]
|
||||
extra_kwargs = {"bind_password": {"write_only": True}}
|
||||
|
||||
|
||||
class LDAPSyncStatusSerializer(PassiveSerializer):
|
||||
"""LDAP Source sync status"""
|
||||
|
||||
is_running = BooleanField(read_only=True)
|
||||
tasks = TaskSerializer(many=True, read_only=True)
|
||||
|
||||
|
||||
class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
"""LDAP Source Viewset"""
|
||||
|
||||
|
@ -113,20 +138,24 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
|||
ordering = ["name"]
|
||||
|
||||
@extend_schema(
|
||||
request=None,
|
||||
responses={
|
||||
200: TaskSerializer(many=True),
|
||||
}
|
||||
200: LDAPSyncStatusSerializer(),
|
||||
},
|
||||
)
|
||||
@action(methods=["GET"], detail=True, pagination_class=None, filter_backends=[])
|
||||
def sync_status(self, request: Request, slug: str) -> Response:
|
||||
"""Get source's sync status"""
|
||||
@action(methods=["GET", "POST"], detail=True, pagination_class=None, filter_backends=[])
|
||||
def sync(self, request: Request, slug: str) -> Response:
|
||||
"""Get source's sync status or start source sync"""
|
||||
source = self.get_object()
|
||||
results = []
|
||||
tasks = TaskInfo.by_name(f"ldap_sync:{source.slug}:*")
|
||||
if tasks:
|
||||
for task in tasks:
|
||||
results.append(task)
|
||||
return Response(TaskSerializer(results, many=True).data)
|
||||
if request.method == "POST":
|
||||
# We're not waiting for the sync to finish here as it could take multiple hours
|
||||
ldap_sync_single.delay(source.pk)
|
||||
tasks = TaskInfo.by_name(f"ldap_sync:{source.slug}:*") or []
|
||||
status = {
|
||||
"tasks": tasks,
|
||||
"is_running": source.sync_lock.locked(),
|
||||
}
|
||||
return Response(LDAPSyncStatusSerializer(status).data)
|
||||
|
||||
@extend_schema(
|
||||
responses={
|
||||
|
@ -154,33 +183,3 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
|||
obj.pop("raw_dn", None)
|
||||
all_objects[class_name].append(obj)
|
||||
return Response(data=all_objects)
|
||||
|
||||
|
||||
class LDAPPropertyMappingSerializer(PropertyMappingSerializer):
|
||||
"""LDAP PropertyMapping Serializer"""
|
||||
|
||||
class Meta:
|
||||
model = LDAPPropertyMapping
|
||||
fields = PropertyMappingSerializer.Meta.fields + [
|
||||
"object_field",
|
||||
]
|
||||
|
||||
|
||||
class LDAPPropertyMappingFilter(FilterSet):
|
||||
"""Filter for LDAPPropertyMapping"""
|
||||
|
||||
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||
|
||||
class Meta:
|
||||
model = LDAPPropertyMapping
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class LDAPPropertyMappingViewSet(UsedByMixin, ModelViewSet):
|
||||
"""LDAP PropertyMapping Viewset"""
|
||||
|
||||
queryset = LDAPPropertyMapping.objects.all()
|
||||
serializer_class = LDAPPropertyMappingSerializer
|
||||
filterset_class = LDAPPropertyMappingFilter
|
||||
search_fields = ["name"]
|
||||
ordering = ["name"]
|
|
@ -0,0 +1,24 @@
|
|||
"""LDAP Connection check"""
|
||||
from json import dumps
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.sources.ldap.models import LDAPSource
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Check connectivity to LDAP servers for a source"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("source_slugs", nargs="?", type=str)
|
||||
|
||||
def handle(self, **options):
|
||||
sources = LDAPSource.objects.filter(enabled=True)
|
||||
if options["source_slugs"]:
|
||||
sources = LDAPSource.objects.filter(slug__in=options["source_slugs"])
|
||||
for source in sources.order_by("slug"):
|
||||
status = source.check_connection()
|
||||
self.stdout.write(dumps(status, indent=4))
|
|
@ -4,10 +4,12 @@ from ssl import CERT_REQUIRED
|
|||
from tempfile import NamedTemporaryFile, mkdtemp
|
||||
from typing import Optional
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from ldap3 import ALL, NONE, RANDOM, Connection, Server, ServerPool, Tls
|
||||
from ldap3.core.exceptions import LDAPInsufficientAccessRightsResult, LDAPSchemaError
|
||||
from ldap3.core.exceptions import LDAPException, LDAPInsufficientAccessRightsResult, LDAPSchemaError
|
||||
from redis.lock import Lock
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
from authentik.core.models import Group, PropertyMapping, Source
|
||||
|
@ -113,11 +115,11 @@ class LDAPSource(Source):
|
|||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
from authentik.sources.ldap.api import LDAPSourceSerializer
|
||||
from authentik.sources.ldap.api.sources import LDAPSourceSerializer
|
||||
|
||||
return LDAPSourceSerializer
|
||||
|
||||
def server(self, **kwargs) -> Server:
|
||||
def server(self, **kwargs) -> ServerPool:
|
||||
"""Get LDAP Server/ServerPool"""
|
||||
servers = []
|
||||
tls_kwargs = {}
|
||||
|
@ -154,7 +156,10 @@ class LDAPSource(Source):
|
|||
return ServerPool(servers, RANDOM, active=5, exhaust=True)
|
||||
|
||||
def connection(
|
||||
self, server_kwargs: Optional[dict] = None, connection_kwargs: Optional[dict] = None
|
||||
self,
|
||||
server: Optional[Server] = None,
|
||||
server_kwargs: Optional[dict] = None,
|
||||
connection_kwargs: Optional[dict] = None,
|
||||
) -> Connection:
|
||||
"""Get a fully connected and bound LDAP Connection"""
|
||||
server_kwargs = server_kwargs or {}
|
||||
|
@ -164,7 +169,7 @@ class LDAPSource(Source):
|
|||
if self.bind_password is not None:
|
||||
connection_kwargs.setdefault("password", self.bind_password)
|
||||
connection = Connection(
|
||||
self.server(**server_kwargs),
|
||||
server or self.server(**server_kwargs),
|
||||
raise_exceptions=True,
|
||||
receive_timeout=LDAP_TIMEOUT,
|
||||
**connection_kwargs,
|
||||
|
@ -183,9 +188,55 @@ class LDAPSource(Source):
|
|||
if server_kwargs.get("get_info", ALL) == NONE:
|
||||
raise exc
|
||||
server_kwargs["get_info"] = NONE
|
||||
return self.connection(server_kwargs, connection_kwargs)
|
||||
return self.connection(server, server_kwargs, connection_kwargs)
|
||||
return RuntimeError("Failed to bind")
|
||||
|
||||
@property
|
||||
def sync_lock(self) -> Lock:
|
||||
"""Redis lock for syncing LDAP to prevent multiple parallel syncs happening"""
|
||||
return Lock(
|
||||
cache.client.get_client(),
|
||||
name=f"goauthentik.io/sources/ldap/sync-{self.slug}",
|
||||
# Convert task timeout hours to seconds, and multiply times 3
|
||||
# (see authentik/sources/ldap/tasks.py:54)
|
||||
# multiply by 3 to add even more leeway
|
||||
timeout=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3,
|
||||
)
|
||||
|
||||
def check_connection(self) -> dict[str, dict[str, str]]:
|
||||
"""Check LDAP Connection"""
|
||||
from authentik.sources.ldap.sync.base import flatten
|
||||
|
||||
servers = self.server()
|
||||
server_info = {}
|
||||
# Check each individual server
|
||||
for server in servers.servers:
|
||||
server: Server
|
||||
try:
|
||||
connection = self.connection(server=server)
|
||||
server_info[server.host] = {
|
||||
"vendor": str(flatten(connection.server.info.vendor_name)),
|
||||
"version": str(flatten(connection.server.info.vendor_version)),
|
||||
"status": "ok",
|
||||
}
|
||||
except LDAPException as exc:
|
||||
server_info[server.host] = {
|
||||
"status": str(exc),
|
||||
}
|
||||
# Check server pool
|
||||
try:
|
||||
connection = self.connection()
|
||||
server_info["__all__"] = {
|
||||
"vendor": str(flatten(connection.server.info.vendor_name)),
|
||||
"version": str(flatten(connection.server.info.vendor_version)),
|
||||
"status": "ok",
|
||||
}
|
||||
except LDAPException as exc:
|
||||
server_info["__all__"] = {
|
||||
"status": str(exc),
|
||||
}
|
||||
return server_info
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("LDAP Source")
|
||||
verbose_name_plural = _("LDAP Sources")
|
||||
|
@ -202,7 +253,7 @@ class LDAPPropertyMapping(PropertyMapping):
|
|||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
from authentik.sources.ldap.api import LDAPPropertyMappingSerializer
|
||||
from authentik.sources.ldap.api.property_mappings import LDAPPropertyMappingSerializer
|
||||
|
||||
return LDAPPropertyMappingSerializer
|
||||
|
||||
|
|
|
@ -8,5 +8,10 @@ CELERY_BEAT_SCHEDULE = {
|
|||
"task": "authentik.sources.ldap.tasks.ldap_sync_all",
|
||||
"schedule": crontab(minute=fqdn_rand("sources_ldap_sync"), hour="*/2"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
}
|
||||
},
|
||||
"sources_ldap_connectivity_check": {
|
||||
"task": "authentik.sources.ldap.tasks.ldap_connectivity_check",
|
||||
"schedule": crontab(minute=fqdn_rand("sources_ldap_connectivity_check"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -14,24 +14,18 @@ from authentik.events.models import Event, EventAction
|
|||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||
from authentik.sources.ldap.models import LDAPSource
|
||||
from authentik.sources.ldap.password import LDAPPasswordChanger
|
||||
from authentik.sources.ldap.tasks import ldap_sync_single
|
||||
from authentik.sources.ldap.tasks import ldap_connectivity_check
|
||||
from authentik.stages.prompt.signals import password_validate
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@receiver(post_save, sender=LDAPSource)
|
||||
def sync_ldap_source_on_save(sender, instance: LDAPSource, **_):
|
||||
"""Ensure that source is synced on save (if enabled)"""
|
||||
def check_ldap_source_on_save(sender, instance: LDAPSource, **_):
|
||||
"""Check LDAP source's connectivity on save (if enabled)"""
|
||||
if not instance.enabled:
|
||||
return
|
||||
# Don't sync sources when they don't have any property mappings. This will only happen if:
|
||||
# - the user forgets to set them or
|
||||
# - the source is newly created, this is the first save event
|
||||
# and the mappings are created with an m2m event
|
||||
if not instance.property_mappings.exists() or not instance.property_mappings_group.exists():
|
||||
return
|
||||
ldap_sync_single.delay(instance.pk)
|
||||
ldap_connectivity_check.delay(instance.pk)
|
||||
|
||||
|
||||
@receiver(password_validate)
|
||||
|
|
|
@ -17,6 +17,15 @@ from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
|||
LDAP_UNIQUENESS = "ldap_uniq"
|
||||
|
||||
|
||||
def flatten(value: Any) -> Any:
|
||||
"""Flatten `value` if its a list"""
|
||||
if isinstance(value, list):
|
||||
if len(value) < 1:
|
||||
return None
|
||||
return value[0]
|
||||
return value
|
||||
|
||||
|
||||
class BaseLDAPSynchronizer:
|
||||
"""Sync LDAP Users and groups into authentik"""
|
||||
|
||||
|
@ -122,14 +131,6 @@ class BaseLDAPSynchronizer:
|
|||
cookie = None
|
||||
yield self._connection.response
|
||||
|
||||
def _flatten(self, value: Any) -> Any:
|
||||
"""Flatten `value` if its a list"""
|
||||
if isinstance(value, list):
|
||||
if len(value) < 1:
|
||||
return None
|
||||
return value[0]
|
||||
return value
|
||||
|
||||
def build_user_properties(self, user_dn: str, **kwargs) -> dict[str, Any]:
|
||||
"""Build attributes for User object based on property mappings."""
|
||||
props = self._build_object_properties(user_dn, self._source.property_mappings, **kwargs)
|
||||
|
@ -163,10 +164,10 @@ class BaseLDAPSynchronizer:
|
|||
object_field = mapping.object_field
|
||||
if object_field.startswith("attributes."):
|
||||
# Because returning a list might desired, we can't
|
||||
# rely on self._flatten here. Instead, just save the result as-is
|
||||
# rely on flatten here. Instead, just save the result as-is
|
||||
set_path_in_dict(properties, object_field, value)
|
||||
else:
|
||||
properties[object_field] = self._flatten(value)
|
||||
properties[object_field] = flatten(value)
|
||||
except PropertyMappingExpressionException as exc:
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
|
@ -177,7 +178,7 @@ class BaseLDAPSynchronizer:
|
|||
self._logger.warning("Mapping failed to evaluate", exc=exc, mapping=mapping)
|
||||
continue
|
||||
if self._source.object_uniqueness_field in kwargs:
|
||||
properties["attributes"][LDAP_UNIQUENESS] = self._flatten(
|
||||
properties["attributes"][LDAP_UNIQUENESS] = flatten(
|
||||
kwargs.get(self._source.object_uniqueness_field)
|
||||
)
|
||||
properties["attributes"][LDAP_DISTINGUISHED_NAME] = object_dn
|
||||
|
|
|
@ -7,7 +7,7 @@ from ldap3 import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, SUBTREE
|
|||
|
||||
from authentik.core.models import Group
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
|
||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer, flatten
|
||||
|
||||
|
||||
class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
|
@ -39,7 +39,7 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||
if "attributes" not in group:
|
||||
continue
|
||||
attributes = group.get("attributes", {})
|
||||
group_dn = self._flatten(self._flatten(group.get("entryDN", group.get("dn"))))
|
||||
group_dn = flatten(flatten(group.get("entryDN", group.get("dn"))))
|
||||
if self._source.object_uniqueness_field not in attributes:
|
||||
self.message(
|
||||
f"Cannot find uniqueness field in attributes: '{group_dn}'",
|
||||
|
@ -47,7 +47,7 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||
dn=group_dn,
|
||||
)
|
||||
continue
|
||||
uniq = self._flatten(attributes[self._source.object_uniqueness_field])
|
||||
uniq = flatten(attributes[self._source.object_uniqueness_field])
|
||||
try:
|
||||
defaults = self.build_group_properties(group_dn, **attributes)
|
||||
defaults["parent"] = self._source.sync_parent_group
|
||||
|
|
|
@ -7,7 +7,7 @@ from ldap3 import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, SUBTREE
|
|||
|
||||
from authentik.core.models import User
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
|
||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer, flatten
|
||||
from authentik.sources.ldap.sync.vendor.freeipa import FreeIPA
|
||||
from authentik.sources.ldap.sync.vendor.ms_ad import MicrosoftActiveDirectory
|
||||
|
||||
|
@ -41,7 +41,7 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||
if "attributes" not in user:
|
||||
continue
|
||||
attributes = user.get("attributes", {})
|
||||
user_dn = self._flatten(user.get("entryDN", user.get("dn")))
|
||||
user_dn = flatten(user.get("entryDN", user.get("dn")))
|
||||
if self._source.object_uniqueness_field not in attributes:
|
||||
self.message(
|
||||
f"Cannot find uniqueness field in attributes: '{user_dn}'",
|
||||
|
@ -49,7 +49,7 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||
dn=user_dn,
|
||||
)
|
||||
continue
|
||||
uniq = self._flatten(attributes[self._source.object_uniqueness_field])
|
||||
uniq = flatten(attributes[self._source.object_uniqueness_field])
|
||||
try:
|
||||
defaults = self.build_user_properties(user_dn, **attributes)
|
||||
self._logger.debug("Writing user with attributes", **defaults)
|
||||
|
|
|
@ -5,7 +5,7 @@ from typing import Any, Generator
|
|||
from pytz import UTC
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer
|
||||
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer, flatten
|
||||
|
||||
|
||||
class FreeIPA(BaseLDAPSynchronizer):
|
||||
|
@ -47,7 +47,7 @@ class FreeIPA(BaseLDAPSynchronizer):
|
|||
return
|
||||
# For some reason, nsaccountlock is not defined properly in the schema as bool
|
||||
# hence we get it as a list of strings
|
||||
_is_locked = str(self._flatten(attributes.get("nsaccountlock", ["FALSE"])))
|
||||
_is_locked = str(flatten(attributes.get("nsaccountlock", ["FALSE"])))
|
||||
# So we have to attempt to convert it to a bool
|
||||
is_locked = _is_locked.lower() == "true"
|
||||
# And then invert it since freeipa saves locked and we save active
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
"""LDAP Sync tasks"""
|
||||
from typing import Optional
|
||||
from uuid import uuid4
|
||||
|
||||
from celery import chain, group
|
||||
from django.core.cache import cache
|
||||
from ldap3.core.exceptions import LDAPException
|
||||
from redis.exceptions import LockError
|
||||
from redis.lock import Lock
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.events.monitored_tasks import CACHE_KEY_PREFIX as CACHE_KEY_PREFIX_TASKS
|
||||
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
|
@ -26,6 +27,7 @@ SYNC_CLASSES = [
|
|||
MembershipLDAPSynchronizer,
|
||||
]
|
||||
CACHE_KEY_PREFIX = "goauthentik.io/sources/ldap/page/"
|
||||
CACHE_KEY_STATUS = "goauthentik.io/sources/ldap/status/"
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
|
@ -35,6 +37,19 @@ def ldap_sync_all():
|
|||
ldap_sync_single.apply_async(args=[source.pk])
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def ldap_connectivity_check(pk: Optional[str] = None):
|
||||
"""Check connectivity for LDAP Sources"""
|
||||
# 2 hour timeout, this task should run every hour
|
||||
timeout = 60 * 60 * 2
|
||||
sources = LDAPSource.objects.filter(enabled=True)
|
||||
if pk:
|
||||
sources = sources.filter(pk=pk)
|
||||
for source in sources:
|
||||
status = source.check_connection()
|
||||
cache.set(CACHE_KEY_STATUS + source.slug, status, timeout=timeout)
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
# We take the configured hours timeout time by 2.5 as we run user and
|
||||
# group in parallel and then membership, so 2x is to cover the serial tasks,
|
||||
|
@ -47,12 +62,15 @@ def ldap_sync_single(source_pk: str):
|
|||
source: LDAPSource = LDAPSource.objects.filter(pk=source_pk).first()
|
||||
if not source:
|
||||
return
|
||||
lock = Lock(cache.client.get_client(), name=f"goauthentik.io/sources/ldap/sync-{source.slug}")
|
||||
lock = source.sync_lock
|
||||
if lock.locked():
|
||||
LOGGER.debug("LDAP sync locked, skipping task", source=source.slug)
|
||||
return
|
||||
try:
|
||||
with lock:
|
||||
# Delete all sync tasks from the cache
|
||||
keys = cache.keys(f"{CACHE_KEY_PREFIX_TASKS}ldap_sync:{source.slug}*")
|
||||
cache.delete_many(keys)
|
||||
task = chain(
|
||||
# User and group sync can happen at once, they have no dependencies on each other
|
||||
group(
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""API URLs"""
|
||||
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||
from authentik.sources.ldap.api.property_mappings import LDAPPropertyMappingViewSet
|
||||
from authentik.sources.ldap.api.sources import LDAPSourceViewSet
|
||||
|
||||
api_urlpatterns = [
|
||||
("propertymappings/ldap", LDAPPropertyMappingViewSet),
|
||||
|
|
|
@ -4,8 +4,8 @@ from typing import Any
|
|||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.sources.oauth.clients.oauth2 import UserprofileHeaderAuthClient
|
||||
from authentik.sources.oauth.types.oidc import OpenIDConnectOAuth2Callback
|
||||
from authentik.sources.oauth.types.registry import SourceType, registry
|
||||
from authentik.sources.oauth.views.callback import OAuthCallback
|
||||
from authentik.sources.oauth.views.redirect import OAuthRedirect
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
@ -20,7 +20,7 @@ class AzureADOAuthRedirect(OAuthRedirect):
|
|||
}
|
||||
|
||||
|
||||
class AzureADOAuthCallback(OpenIDConnectOAuth2Callback):
|
||||
class AzureADOAuthCallback(OAuthCallback):
|
||||
"""AzureAD OAuth2 Callback"""
|
||||
|
||||
client_class = UserprofileHeaderAuthClient
|
||||
|
@ -50,7 +50,7 @@ class AzureADType(SourceType):
|
|||
|
||||
authorization_url = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize"
|
||||
access_token_url = "https://login.microsoftonline.com/common/oauth2/v2.0/token" # nosec
|
||||
profile_url = "https://login.microsoftonline.com/common/openid/userinfo"
|
||||
profile_url = "https://graph.microsoft.com/v1.0/me"
|
||||
oidc_well_known_url = (
|
||||
"https://login.microsoftonline.com/common/.well-known/openid-configuration"
|
||||
)
|
||||
|
|
|
@ -23,7 +23,7 @@ class OpenIDConnectOAuth2Callback(OAuthCallback):
|
|||
client_class = UserprofileHeaderAuthClient
|
||||
|
||||
def get_user_id(self, info: dict[str, str]) -> str:
|
||||
return info.get("sub", None)
|
||||
return info.get("sub", "")
|
||||
|
||||
def get_user_enroll_context(
|
||||
self,
|
||||
|
|
|
@ -3,8 +3,8 @@ from typing import Any
|
|||
|
||||
from authentik.sources.oauth.clients.oauth2 import UserprofileHeaderAuthClient
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
from authentik.sources.oauth.types.oidc import OpenIDConnectOAuth2Callback
|
||||
from authentik.sources.oauth.types.registry import SourceType, registry
|
||||
from authentik.sources.oauth.views.callback import OAuthCallback
|
||||
from authentik.sources.oauth.views.redirect import OAuthRedirect
|
||||
|
||||
|
||||
|
@ -17,7 +17,7 @@ class OktaOAuthRedirect(OAuthRedirect):
|
|||
}
|
||||
|
||||
|
||||
class OktaOAuth2Callback(OpenIDConnectOAuth2Callback):
|
||||
class OktaOAuth2Callback(OAuthCallback):
|
||||
"""Okta OAuth2 Callback"""
|
||||
|
||||
# Okta has the same quirk as azure and throws an error if the access token
|
||||
|
@ -25,6 +25,9 @@ class OktaOAuth2Callback(OpenIDConnectOAuth2Callback):
|
|||
# see https://github.com/goauthentik/authentik/issues/1910
|
||||
client_class = UserprofileHeaderAuthClient
|
||||
|
||||
def get_user_id(self, info: dict[str, str]) -> str:
|
||||
return info.get("sub", "")
|
||||
|
||||
def get_user_enroll_context(
|
||||
self,
|
||||
info: dict[str, Any],
|
||||
|
|
|
@ -3,8 +3,8 @@ from json import dumps
|
|||
from typing import Any, Optional
|
||||
|
||||
from authentik.sources.oauth.clients.oauth2 import UserprofileHeaderAuthClient
|
||||
from authentik.sources.oauth.types.oidc import OpenIDConnectOAuth2Callback
|
||||
from authentik.sources.oauth.types.registry import SourceType, registry
|
||||
from authentik.sources.oauth.views.callback import OAuthCallback
|
||||
from authentik.sources.oauth.views.redirect import OAuthRedirect
|
||||
|
||||
|
||||
|
@ -27,11 +27,14 @@ class TwitchOAuthRedirect(OAuthRedirect):
|
|||
}
|
||||
|
||||
|
||||
class TwitchOAuth2Callback(OpenIDConnectOAuth2Callback):
|
||||
class TwitchOAuth2Callback(OAuthCallback):
|
||||
"""Twitch OAuth2 Callback"""
|
||||
|
||||
client_class = TwitchClient
|
||||
|
||||
def get_user_id(self, info: dict[str, str]) -> str:
|
||||
return info.get("sub", "")
|
||||
|
||||
def get_user_enroll_context(
|
||||
self,
|
||||
info: dict[str, Any],
|
||||
|
|
|
@ -69,6 +69,7 @@ class AuthenticatorSMSStageView(ChallengeStageView):
|
|||
stage: AuthenticatorSMSStage = self.executor.current_stage
|
||||
hashed_number = hash_phone_number(phone_number)
|
||||
query = Q(phone_number=hashed_number) | Q(phone_number=phone_number)
|
||||
print(SMSDevice.objects.filter(query, stage=stage.pk))
|
||||
if SMSDevice.objects.filter(query, stage=stage.pk).exists():
|
||||
raise ValidationError(_("Invalid phone number"))
|
||||
# No code yet, but we have a phone number, so send a verification message
|
||||
|
|
|
@ -199,9 +199,11 @@ class AuthenticatorSMSStageTests(FlowTestCase):
|
|||
sms_send_mock,
|
||||
),
|
||||
):
|
||||
print(self.client.session[SESSION_KEY_PLAN])
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
)
|
||||
print(response.content.decode())
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
self.flow,
|
||||
|
|
|
@ -184,7 +184,6 @@ class AuthenticatorValidateStageDuoTests(FlowTestCase):
|
|||
"args": {},
|
||||
"method": "GET",
|
||||
"path": f"/api/v3/flows/executor/{flow.slug}/",
|
||||
"user_agent": "",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
"""authentik multi-stage authentication engine"""
|
||||
from datetime import timedelta
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib import messages
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.http.request import QueryDict
|
||||
from django.template.exceptions import TemplateSyntaxError
|
||||
from django.urls import reverse
|
||||
from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
|
@ -13,14 +11,11 @@ from django.utils.translation import gettext as _
|
|||
from rest_framework.fields import CharField
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.challenge import Challenge, ChallengeResponse, ChallengeTypes
|
||||
from authentik.flows.exceptions import StageInvalidException
|
||||
from authentik.flows.models import FlowDesignation, FlowToken
|
||||
from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, PLAN_CONTEXT_PENDING_USER
|
||||
from authentik.flows.stage import ChallengeStageView
|
||||
from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.stages.email.models import EmailStage
|
||||
from authentik.stages.email.tasks import send_mails
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
|
@ -63,6 +58,7 @@ class EmailStageView(ChallengeStageView):
|
|||
query_params = QueryDict(self.request.GET.get(QS_QUERY), mutable=True)
|
||||
query_params.pop(QS_KEY_TOKEN, None)
|
||||
query_params.update(kwargs)
|
||||
print(query_params)
|
||||
full_url = base_url
|
||||
if len(query_params) > 0:
|
||||
full_url = f"{full_url}?{query_params.urlencode()}"
|
||||
|
@ -75,7 +71,7 @@ class EmailStageView(ChallengeStageView):
|
|||
valid_delta = timedelta(
|
||||
minutes=current_stage.token_expiry + 1
|
||||
) # + 1 because django timesince always rounds down
|
||||
identifier = slugify(f"ak-email-stage-{current_stage.name}-{str(uuid4())}")
|
||||
identifier = slugify(f"ak-email-stage-{current_stage.name}-{pending_user}")
|
||||
# Don't check for validity here, we only care if the token exists
|
||||
tokens = FlowToken.objects.filter(identifier=identifier)
|
||||
if not tokens.exists():
|
||||
|
@ -107,7 +103,6 @@ class EmailStageView(ChallengeStageView):
|
|||
current_stage: EmailStage = self.executor.current_stage
|
||||
token = self.get_token()
|
||||
# Send mail to user
|
||||
try:
|
||||
message = TemplateEmailMessage(
|
||||
subject=_(current_stage.subject),
|
||||
to=[email],
|
||||
|
@ -120,14 +115,6 @@ class EmailStageView(ChallengeStageView):
|
|||
},
|
||||
)
|
||||
send_mails(current_stage, message)
|
||||
except TemplateSyntaxError as exc:
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message=_("Exception occurred while rendering E-mail template"),
|
||||
error=exception_to_string(exc),
|
||||
template=current_stage.template,
|
||||
).from_http(self.request)
|
||||
raise StageInvalidException from exc
|
||||
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
# Check if the user came back from the email link to verify
|
||||
|
@ -148,11 +135,7 @@ class EmailStageView(ChallengeStageView):
|
|||
return self.executor.stage_invalid()
|
||||
# Check if we've already sent the initial e-mail
|
||||
if PLAN_CONTEXT_EMAIL_SENT not in self.executor.plan.context:
|
||||
try:
|
||||
self.send_email()
|
||||
except StageInvalidException as exc:
|
||||
self.logger.debug("Got StageInvalidException", exc=exc)
|
||||
return self.executor.stage_invalid()
|
||||
self.executor.plan.context[PLAN_CONTEXT_EMAIL_SENT] = True
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
|
|
|
@ -4,20 +4,11 @@ from pathlib import Path
|
|||
from shutil import rmtree
|
||||
from tempfile import mkdtemp, mkstemp
|
||||
from typing import Any
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.mail.backends.locmem import EmailBackend
|
||||
from django.urls import reverse
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.markers import StageMarker
|
||||
from authentik.flows.models import FlowDesignation, FlowStageBinding
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
|
||||
from authentik.flows.tests import FlowTestCase
|
||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||
from authentik.stages.email.models import EmailStage, get_template_choices
|
||||
from authentik.stages.email.models import get_template_choices
|
||||
|
||||
|
||||
def get_templates_setting(temp_dir: str) -> dict[str, Any]:
|
||||
|
@ -27,18 +18,11 @@ def get_templates_setting(temp_dir: str) -> dict[str, Any]:
|
|||
return templates_setting
|
||||
|
||||
|
||||
class TestEmailStageTemplates(FlowTestCase):
|
||||
class TestEmailStageTemplates(TestCase):
|
||||
"""Email tests"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.dir = Path(mkdtemp())
|
||||
self.user = create_test_admin_user()
|
||||
|
||||
self.flow = create_test_flow(FlowDesignation.AUTHENTICATION)
|
||||
self.stage = EmailStage.objects.create(
|
||||
name="email",
|
||||
)
|
||||
self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
||||
self.dir = mkdtemp()
|
||||
|
||||
def tearDown(self) -> None:
|
||||
rmtree(self.dir)
|
||||
|
@ -54,37 +38,3 @@ class TestEmailStageTemplates(FlowTestCase):
|
|||
self.assertEqual(len(choices), 3)
|
||||
unlink(file)
|
||||
unlink(file2)
|
||||
|
||||
def test_custom_template_invalid_syntax(self):
|
||||
"""Test with custom template"""
|
||||
with open(self.dir / Path("invalid.html"), "w+", encoding="utf-8") as _invalid:
|
||||
_invalid.write("{% blocktranslate %}")
|
||||
with self.settings(TEMPLATES=get_templates_setting(self.dir)):
|
||||
self.stage.template = "invalid.html"
|
||||
plan = FlowPlan(
|
||||
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||
)
|
||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||
session = self.client.session
|
||||
session[SESSION_KEY_PLAN] = plan
|
||||
session.save()
|
||||
|
||||
url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
||||
with patch(
|
||||
"authentik.stages.email.models.EmailStage.backend_class",
|
||||
PropertyMock(return_value=EmailBackend),
|
||||
):
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
self.flow,
|
||||
error_message="Unknown error",
|
||||
)
|
||||
events = Event.objects.filter(action=EventAction.CONFIGURATION_ERROR)
|
||||
self.assertEqual(len(events), 1)
|
||||
event = events.first()
|
||||
self.assertEqual(
|
||||
event.context["message"], "Exception occurred while rendering E-mail template"
|
||||
)
|
||||
self.assertEqual(event.context["template"], "invalid.html")
|
||||
|
|
|
@ -6,7 +6,6 @@ from django.urls import reverse
|
|||
from authentik.core.models import USER_ATTRIBUTE_SOURCES, Group, Source, User, UserSourceConnection
|
||||
from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.markers import StageMarker
|
||||
from authentik.flows.models import FlowStageBinding
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
|
||||
|
@ -59,33 +58,11 @@ class TestUserWriteStage(FlowTestCase):
|
|||
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
|
||||
user_qs = User.objects.filter(username=plan.context[PLAN_CONTEXT_PROMPT]["username"])
|
||||
self.assertTrue(user_qs.exists())
|
||||
user = user_qs.first()
|
||||
self.assertTrue(user.check_password(password))
|
||||
self.assertEqual(list(user.ak_groups.order_by("name")), [self.other_group, self.group])
|
||||
self.assertEqual(user.attributes, {USER_ATTRIBUTE_SOURCES: [self.source.name]})
|
||||
|
||||
self.assertTrue(
|
||||
Event.objects.filter(
|
||||
action=EventAction.MODEL_CREATED,
|
||||
context__model={
|
||||
"app": "authentik_core",
|
||||
"model_name": "user",
|
||||
"pk": user.pk,
|
||||
"name": "name",
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
Event.objects.filter(
|
||||
action=EventAction.MODEL_UPDATED,
|
||||
context__model={
|
||||
"app": "authentik_core",
|
||||
"model_name": "user",
|
||||
"pk": user.pk,
|
||||
"name": "name",
|
||||
},
|
||||
)
|
||||
self.assertTrue(user_qs.first().check_password(password))
|
||||
self.assertEqual(
|
||||
list(user_qs.first().ak_groups.order_by("name")), [self.other_group, self.group]
|
||||
)
|
||||
self.assertEqual(user_qs.first().attributes, {USER_ATTRIBUTE_SOURCES: [self.source.name]})
|
||||
|
||||
def test_user_update(self):
|
||||
"""Test update of existing user"""
|
||||
|
|
|
@ -14,11 +14,8 @@ entries:
|
|||
expression: |
|
||||
# This mapping is used by the authentik proxy. It passes extra user attributes,
|
||||
# which are used for example for the HTTP-Basic Authentication mapping.
|
||||
session_id = None
|
||||
if "token" in request.context:
|
||||
session_id = request.context.get("token").session_id
|
||||
return {
|
||||
"sid": session_id,
|
||||
"sid": request.http_request.session.session_key,
|
||||
"ak_proxy": {
|
||||
"user_attributes": request.user.group_attributes(request),
|
||||
"is_superuser": request.user.is_superuser,
|
||||
|
|
|
@ -12,14 +12,12 @@ entries:
|
|||
expression: |
|
||||
# Some implementations require givenName and familyName to be set
|
||||
givenName, familyName = request.user.name, ""
|
||||
formatted = request.user.name + " "
|
||||
# This default sets givenName to the name before the first space
|
||||
# and the remainder as family name
|
||||
# if the user's name has no space the givenName is the entire name
|
||||
# (this might cause issues with some SCIM implementations)
|
||||
if " " in request.user.name:
|
||||
givenName, _, familyName = request.user.name.partition(" ")
|
||||
formatted = request.user.name
|
||||
|
||||
# photos supports URLs to images, however authentik might return data URIs
|
||||
avatar = request.user.avatar
|
||||
|
@ -41,7 +39,7 @@ entries:
|
|||
return {
|
||||
"userName": request.user.username,
|
||||
"name": {
|
||||
"formatted": formatted,
|
||||
"formatted": request.user.name,
|
||||
"givenName": givenName,
|
||||
"familyName": familyName,
|
||||
},
|
||||
|
|
|
@ -32,7 +32,7 @@ services:
|
|||
volumes:
|
||||
- redis:/data
|
||||
server:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.10.6}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.10.3}
|
||||
restart: unless-stopped
|
||||
command: server
|
||||
environment:
|
||||
|
@ -53,7 +53,7 @@ services:
|
|||
- postgresql
|
||||
- redis
|
||||
worker:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.10.6}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.10.3}
|
||||
restart: unless-stopped
|
||||
command: worker
|
||||
environment:
|
||||
|
|
30
go.mod
30
go.mod
|
@ -13,24 +13,24 @@ require (
|
|||
github.com/go-openapi/strfmt v0.21.7
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible
|
||||
github.com/google/uuid v1.4.0
|
||||
github.com/gorilla/handlers v1.5.1
|
||||
github.com/gorilla/mux v1.8.0
|
||||
github.com/gorilla/securecookie v1.1.1
|
||||
github.com/gorilla/sessions v1.2.1
|
||||
github.com/gorilla/websocket v1.5.0
|
||||
github.com/gorilla/handlers v1.5.2
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/gorilla/securecookie v1.1.2
|
||||
github.com/gorilla/sessions v1.2.2
|
||||
github.com/gorilla/websocket v1.5.1
|
||||
github.com/jellydator/ttlcache/v3 v3.1.0
|
||||
github.com/mitchellh/mapstructure v1.5.0
|
||||
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
|
||||
github.com/pires/go-proxyproto v0.7.0
|
||||
github.com/prometheus/client_golang v1.17.0
|
||||
github.com/redis/go-redis/v9 v9.2.1
|
||||
github.com/redis/go-redis/v9 v9.3.0
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/spf13/cobra v1.7.0
|
||||
github.com/spf13/cobra v1.8.0
|
||||
github.com/stretchr/testify v1.8.4
|
||||
goauthentik.io/api/v3 v3.2023101.1
|
||||
goauthentik.io/api/v3 v3.2023103.1
|
||||
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
|
||||
golang.org/x/oauth2 v0.13.0
|
||||
golang.org/x/sync v0.4.0
|
||||
golang.org/x/oauth2 v0.14.0
|
||||
golang.org/x/sync v0.5.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
layeh.com/radius v0.0.0-20210819152912-ad72663a72ab
|
||||
)
|
||||
|
@ -42,7 +42,7 @@ require (
|
|||
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/felixge/httpsnoop v1.0.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.3 // indirect
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.5 // indirect
|
||||
github.com/go-http-utils/fresh v0.0.0-20161124030543-7231e26a4b27 // indirect
|
||||
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a // indirect
|
||||
|
@ -72,10 +72,10 @@ require (
|
|||
go.mongodb.org/mongo-driver v1.11.3 // indirect
|
||||
go.opentelemetry.io/otel v1.14.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.14.0 // indirect
|
||||
golang.org/x/crypto v0.14.0 // indirect
|
||||
golang.org/x/net v0.17.0 // indirect
|
||||
golang.org/x/sys v0.13.0 // indirect
|
||||
golang.org/x/text v0.13.0 // indirect
|
||||
golang.org/x/crypto v0.15.0 // indirect
|
||||
golang.org/x/net v0.18.0 // indirect
|
||||
golang.org/x/sys v0.14.0 // indirect
|
||||
golang.org/x/text v0.14.0 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/protobuf v1.31.0 // indirect
|
||||
gopkg.in/square/go-jose.v2 v2.5.1 // indirect
|
||||
|
|
63
go.sum
63
go.sum
|
@ -62,7 +62,7 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk
|
|||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/coreos/go-oidc v2.2.1+incompatible h1:mh48q/BqXqgjVHpy2ZY7WnWAbenxRjsz9N1i1YxjHAk=
|
||||
github.com/coreos/go-oidc v2.2.1+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
|
@ -73,8 +73,8 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF
|
|||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/felixge/httpsnoop v1.0.1 h1:lvB5Jl89CsZtGIWuTcDM1E/vkVs49/Ml7JJe07l8SPQ=
|
||||
github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk=
|
||||
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/getsentry/sentry-go v0.25.0 h1:q6Eo+hS+yoJlTO3uu/azhQadsD8V+jQn2D8VvX1eOyI=
|
||||
github.com/getsentry/sentry-go v0.25.0/go.mod h1:lc76E2QywIyW8WuBnwl8Lc4bkmQH4+w1gwTf25trprY=
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.5 h1:MNHlNMBDgEKD4TcKr36vQN68BA00aDfjIt3/bD50WnA=
|
||||
|
@ -200,6 +200,8 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
|||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
|
@ -216,16 +218,16 @@ github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
|
|||
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4=
|
||||
github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q=
|
||||
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
|
||||
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
|
||||
github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
|
||||
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
||||
github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
|
||||
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
||||
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE=
|
||||
github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w=
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/gorilla/securecookie v1.1.2 h1:YCIWL56dvtr73r6715mJs5ZvhtnY73hBvEF8kXD8ePA=
|
||||
github.com/gorilla/securecookie v1.1.2/go.mod h1:NfCASbcHqRSY+3a8tlWJwsQap2VX5pwzwo4h3eOamfo=
|
||||
github.com/gorilla/sessions v1.2.2 h1:lqzMYz6bOfvn2WriPUjNByzeXIlVzURcPmgMczkmTjY=
|
||||
github.com/gorilla/sessions v1.2.2/go.mod h1:ePLdVu+jbEgHH+KWw8I1z2wqd0BAdAQh/8LRvBeoNcQ=
|
||||
github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
|
||||
github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
|
@ -295,8 +297,8 @@ github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdO
|
|||
github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY=
|
||||
github.com/prometheus/procfs v0.11.1 h1:xRC8Iq1yyca5ypa9n1EZnWZkt7dwcoRPQwX/5gwaUuI=
|
||||
github.com/prometheus/procfs v0.11.1/go.mod h1:eesXgaPo1q7lBpVMoMy0ZOFTth9hBn4W/y0/p/ScXhY=
|
||||
github.com/redis/go-redis/v9 v9.2.1 h1:WlYJg71ODF0dVspZZCpYmoF1+U1Jjk9Rwd7pq6QmlCg=
|
||||
github.com/redis/go-redis/v9 v9.2.1/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M=
|
||||
github.com/redis/go-redis/v9 v9.3.0 h1:RiVDjmig62jIWp7Kk4XVLs0hzV6pI3PyTnnL0cnn0u0=
|
||||
github.com/redis/go-redis/v9 v9.3.0/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M=
|
||||
github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
|
@ -309,8 +311,8 @@ github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6Mwd
|
|||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
||||
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
|
||||
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
|
||||
github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0=
|
||||
github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho=
|
||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
|
@ -356,8 +358,8 @@ go.opentelemetry.io/otel/trace v1.14.0 h1:wp2Mmvj41tDsyAJXiWDWpfNsOiIyd38fy85pyK
|
|||
go.opentelemetry.io/otel/trace v1.14.0/go.mod h1:8avnQLK+CG77yNLUae4ea2JDQ6iT+gozhnZjy/rw9G8=
|
||||
go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A=
|
||||
go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4=
|
||||
goauthentik.io/api/v3 v3.2023101.1 h1:KIQ4wmxjE+geAVB0wBfmxW9Uzo/tA0dbd2hSUJ7YJ3M=
|
||||
goauthentik.io/api/v3 v3.2023101.1/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
|
||||
goauthentik.io/api/v3 v3.2023103.1 h1:KqZny4BPDEQ6cIDuZ9pn6/kpvyu+o6o/EekAfujffow=
|
||||
goauthentik.io/api/v3 v3.2023103.1/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
|
||||
|
@ -370,8 +372,8 @@ golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899/go.mod h1:LzIPMQfyMNhhGPh
|
|||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
|
||||
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
|
||||
golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA=
|
||||
golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
|
@ -438,16 +440,16 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
|
|||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
|
||||
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
||||
golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg=
|
||||
golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.13.0 h1:jDDenyj+WgFtmV3zYVoi8aE2BwtXFLWOA67ZfNWftiY=
|
||||
golang.org/x/oauth2 v0.13.0/go.mod h1:/JMhi4ZRXAf4HG9LiNmxvk+45+96RUlVThiH8FzNBn0=
|
||||
golang.org/x/oauth2 v0.14.0 h1:P0Vrf/2538nmC0H+pEQ3MNFRRnVR7RlqyVw+bvm26z0=
|
||||
golang.org/x/oauth2 v0.14.0/go.mod h1:lAtNWgaWfL4cm7j2OV8TxGi9Qb7ECORx8DktCY74OwM=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
|
@ -460,8 +462,8 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
|
|||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
|
||||
golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE=
|
||||
golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
|
@ -502,8 +504,8 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
|
|||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
|
||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q=
|
||||
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
|
@ -519,8 +521,9 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
|||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
|
|
|
@ -27,14 +27,11 @@ type Config struct {
|
|||
type RedisConfig struct {
|
||||
Host string `yaml:"host" env:"AUTHENTIK_REDIS__HOST"`
|
||||
Port int `yaml:"port" env:"AUTHENTIK_REDIS__PORT"`
|
||||
DB int `yaml:"db" env:"AUTHENTIK_REDIS__DB"`
|
||||
Username string `yaml:"username" env:"AUTHENTIK_REDIS__USERNAME"`
|
||||
Password string `yaml:"password" env:"AUTHENTIK_REDIS__PASSWORD"`
|
||||
TLS bool `yaml:"tls" env:"AUTHENTIK_REDIS__TLS"`
|
||||
TLSReqs string `yaml:"tls_reqs" env:"AUTHENTIK_REDIS__TLS_REQS"`
|
||||
DB int `yaml:"cache_db" env:"AUTHENTIK_REDIS__DB"`
|
||||
CacheTimeout int `yaml:"cache_timeout" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT"`
|
||||
CacheTimeoutFlows int `yaml:"cache_timeout_flows" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT_FLOWS"`
|
||||
CacheTimeoutPolicies int `yaml:"cache_timeout_policies" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT_POLICIES"`
|
||||
CacheTimeoutReputation int `yaml:"cache_timeout_reputation" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT_REPUTATION"`
|
||||
}
|
||||
|
||||
type ListenConfig struct {
|
||||
|
|
|
@ -29,4 +29,4 @@ func UserAgent() string {
|
|||
return fmt.Sprintf("authentik@%s", FullVersion())
|
||||
}
|
||||
|
||||
const VERSION = "2023.10.6"
|
||||
const VERSION = "2023.10.3"
|
||||
|
|
|
@ -29,16 +29,6 @@ var (
|
|||
Name: "authentik_outpost_flow_timing_post_seconds",
|
||||
Help: "Duration it took to send a challenge in seconds",
|
||||
}, []string{"stage", "flow"})
|
||||
|
||||
// NOTE: the following metrics are kept for compatibility purpose
|
||||
FlowTimingGetLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
||||
Name: "authentik_outpost_flow_timing_get",
|
||||
Help: "Duration it took to get a challenge",
|
||||
}, []string{"stage", "flow"})
|
||||
FlowTimingPostLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
||||
Name: "authentik_outpost_flow_timing_post",
|
||||
Help: "Duration it took to send a challenge",
|
||||
}, []string{"stage", "flow"})
|
||||
)
|
||||
|
||||
type SolverFunction func(*api.ChallengeTypes, api.ApiFlowsExecutorSolveRequest) (api.FlowChallengeResponseRequest, error)
|
||||
|
@ -198,10 +188,6 @@ func (fe *FlowExecutor) getInitialChallenge() (*api.ChallengeTypes, error) {
|
|||
"stage": ch.GetComponent(),
|
||||
"flow": fe.flowSlug,
|
||||
}).Observe(float64(gcsp.EndTime.Sub(gcsp.StartTime)) / float64(time.Second))
|
||||
FlowTimingGetLegacy.With(prometheus.Labels{
|
||||
"stage": ch.GetComponent(),
|
||||
"flow": fe.flowSlug,
|
||||
}).Observe(float64(gcsp.EndTime.Sub(gcsp.StartTime)))
|
||||
return challenge, nil
|
||||
}
|
||||
|
||||
|
@ -259,10 +245,6 @@ func (fe *FlowExecutor) solveFlowChallenge(challenge *api.ChallengeTypes, depth
|
|||
"stage": ch.GetComponent(),
|
||||
"flow": fe.flowSlug,
|
||||
}).Observe(float64(scsp.EndTime.Sub(scsp.StartTime)) / float64(time.Second))
|
||||
FlowTimingPostLegacy.With(prometheus.Labels{
|
||||
"stage": ch.GetComponent(),
|
||||
"flow": fe.flowSlug,
|
||||
}).Observe(float64(scsp.EndTime.Sub(scsp.StartTime)))
|
||||
|
||||
if depth >= 10 {
|
||||
return false, errors.New("exceeded stage recursion depth")
|
||||
|
|
|
@ -22,11 +22,6 @@ func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LD
|
|||
"type": "bind",
|
||||
"app": selectedApp,
|
||||
}).Observe(float64(span.EndTime.Sub(span.StartTime)) / float64(time.Second))
|
||||
metrics.RequestsLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ls.ac.Outpost.Name,
|
||||
"type": "bind",
|
||||
"app": selectedApp,
|
||||
}).Observe(float64(span.EndTime.Sub(span.StartTime)))
|
||||
req.Log().WithField("took-ms", span.EndTime.Sub(span.StartTime).Milliseconds()).Info("Bind request")
|
||||
}()
|
||||
|
||||
|
@ -55,12 +50,6 @@ func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LD
|
|||
"reason": "no_provider",
|
||||
"app": "",
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ls.ac.Outpost.Name,
|
||||
"type": "bind",
|
||||
"reason": "no_provider",
|
||||
"app": "",
|
||||
}).Inc()
|
||||
|
||||
return ldap.LDAPResultInsufficientAccessRights, nil
|
||||
}
|
||||
|
|
|
@ -47,12 +47,6 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
|
|||
"reason": "flow_error",
|
||||
"app": db.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": db.si.GetOutpostName(),
|
||||
"type": "bind",
|
||||
"reason": "flow_error",
|
||||
"app": db.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
req.Log().WithError(err).Warning("failed to execute flow")
|
||||
return ldap.LDAPResultInvalidCredentials, nil
|
||||
}
|
||||
|
@ -63,12 +57,6 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
|
|||
"reason": "invalid_credentials",
|
||||
"app": db.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": db.si.GetOutpostName(),
|
||||
"type": "bind",
|
||||
"reason": "invalid_credentials",
|
||||
"app": db.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
req.Log().Info("Invalid credentials")
|
||||
return ldap.LDAPResultInvalidCredentials, nil
|
||||
}
|
||||
|
@ -82,12 +70,6 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
|
|||
"reason": "access_denied",
|
||||
"app": db.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": db.si.GetOutpostName(),
|
||||
"type": "bind",
|
||||
"reason": "access_denied",
|
||||
"app": db.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
return ldap.LDAPResultInsufficientAccessRights, nil
|
||||
}
|
||||
if err != nil {
|
||||
|
@ -97,12 +79,6 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
|
|||
"reason": "access_check_fail",
|
||||
"app": db.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": db.si.GetOutpostName(),
|
||||
"type": "bind",
|
||||
"reason": "access_check_fail",
|
||||
"app": db.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
req.Log().WithError(err).Warning("failed to check access")
|
||||
return ldap.LDAPResultOperationsError, nil
|
||||
}
|
||||
|
@ -117,12 +93,6 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
|
|||
"reason": "user_info_fail",
|
||||
"app": db.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": db.si.GetOutpostName(),
|
||||
"type": "bind",
|
||||
"reason": "user_info_fail",
|
||||
"app": db.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
req.Log().WithError(err).Warning("failed to get user info")
|
||||
return ldap.LDAPResultOperationsError, nil
|
||||
}
|
||||
|
|
|
@ -22,16 +22,6 @@ var (
|
|||
Name: "authentik_outpost_ldap_requests_rejected_total",
|
||||
Help: "Total number of rejected requests",
|
||||
}, []string{"outpost_name", "type", "reason", "app"})
|
||||
|
||||
// NOTE: the following metrics are kept for compatibility purpose
|
||||
RequestsLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
||||
Name: "authentik_outpost_ldap_requests",
|
||||
Help: "The total number of configured providers",
|
||||
}, []string{"outpost_name", "type", "app"})
|
||||
RequestsRejectedLegacy = promauto.NewCounterVec(prometheus.CounterOpts{
|
||||
Name: "authentik_outpost_ldap_requests_rejected",
|
||||
Help: "Total number of rejected requests",
|
||||
}, []string{"outpost_name", "type", "reason", "app"})
|
||||
)
|
||||
|
||||
func RunServer() {
|
||||
|
|
|
@ -23,11 +23,6 @@ func (ls *LDAPServer) Search(bindDN string, searchReq ldap.SearchRequest, conn n
|
|||
"type": "search",
|
||||
"app": selectedApp,
|
||||
}).Observe(float64(span.EndTime.Sub(span.StartTime)) / float64(time.Second))
|
||||
metrics.RequestsLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ls.ac.Outpost.Name,
|
||||
"type": "search",
|
||||
"app": selectedApp,
|
||||
}).Observe(float64(span.EndTime.Sub(span.StartTime)))
|
||||
req.Log().WithField("attributes", searchReq.Attributes).WithField("took-ms", span.EndTime.Sub(span.StartTime).Milliseconds()).Info("Search request")
|
||||
}()
|
||||
|
||||
|
|
|
@ -45,12 +45,6 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
|||
"reason": "empty_bind_dn",
|
||||
"app": ds.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ds.si.GetOutpostName(),
|
||||
"type": "search",
|
||||
"reason": "empty_bind_dn",
|
||||
"app": ds.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, fmt.Errorf("Search Error: Anonymous BindDN not allowed %s", req.BindDN)
|
||||
}
|
||||
if !utils.HasSuffixNoCase(req.BindDN, ","+baseDN) {
|
||||
|
@ -60,12 +54,6 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
|||
"reason": "invalid_bind_dn",
|
||||
"app": ds.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ds.si.GetOutpostName(),
|
||||
"type": "search",
|
||||
"reason": "invalid_bind_dn",
|
||||
"app": ds.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, fmt.Errorf("Search Error: BindDN %s not in our BaseDN %s", req.BindDN, ds.si.GetBaseDN())
|
||||
}
|
||||
|
||||
|
@ -78,12 +66,6 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
|||
"reason": "user_info_not_cached",
|
||||
"app": ds.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ds.si.GetOutpostName(),
|
||||
"type": "search",
|
||||
"reason": "user_info_not_cached",
|
||||
"app": ds.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, errors.New("access denied")
|
||||
}
|
||||
accsp.Finish()
|
||||
|
@ -96,12 +78,6 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
|||
"reason": "filter_parse_fail",
|
||||
"app": ds.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ds.si.GetOutpostName(),
|
||||
"type": "search",
|
||||
"reason": "filter_parse_fail",
|
||||
"app": ds.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultOperationsError}, fmt.Errorf("Search Error: error parsing filter: %s", req.Filter)
|
||||
}
|
||||
|
||||
|
|
|
@ -62,12 +62,6 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
|||
"reason": "empty_bind_dn",
|
||||
"app": ms.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ms.si.GetOutpostName(),
|
||||
"type": "search",
|
||||
"reason": "empty_bind_dn",
|
||||
"app": ms.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, fmt.Errorf("Search Error: Anonymous BindDN not allowed %s", req.BindDN)
|
||||
}
|
||||
if !utils.HasSuffixNoCase(req.BindDN, ","+baseDN) {
|
||||
|
@ -77,12 +71,6 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
|||
"reason": "invalid_bind_dn",
|
||||
"app": ms.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ms.si.GetOutpostName(),
|
||||
"type": "search",
|
||||
"reason": "invalid_bind_dn",
|
||||
"app": ms.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, fmt.Errorf("Search Error: BindDN %s not in our BaseDN %s", req.BindDN, ms.si.GetBaseDN())
|
||||
}
|
||||
|
||||
|
@ -95,12 +83,6 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
|||
"reason": "user_info_not_cached",
|
||||
"app": ms.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ms.si.GetOutpostName(),
|
||||
"type": "search",
|
||||
"reason": "user_info_not_cached",
|
||||
"app": ms.si.GetAppSlug(),
|
||||
}).Inc()
|
||||
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, errors.New("access denied")
|
||||
}
|
||||
accsp.Finish()
|
||||
|
|
|
@ -22,11 +22,6 @@ func (ls *LDAPServer) Unbind(boundDN string, conn net.Conn) (ldap.LDAPResultCode
|
|||
"type": "unbind",
|
||||
"app": selectedApp,
|
||||
}).Observe(float64(span.EndTime.Sub(span.StartTime)) / float64(time.Second))
|
||||
metrics.RequestsLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ls.ac.Outpost.Name,
|
||||
"type": "unbind",
|
||||
"app": selectedApp,
|
||||
}).Observe(float64(span.EndTime.Sub(span.StartTime)))
|
||||
req.Log().WithField("took-ms", span.EndTime.Sub(span.StartTime).Milliseconds()).Info("Unbind request")
|
||||
}()
|
||||
|
||||
|
@ -55,11 +50,5 @@ func (ls *LDAPServer) Unbind(boundDN string, conn net.Conn) (ldap.LDAPResultCode
|
|||
"reason": "no_provider",
|
||||
"app": "",
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ls.ac.Outpost.Name,
|
||||
"type": "unbind",
|
||||
"reason": "no_provider",
|
||||
"app": "",
|
||||
}).Inc()
|
||||
return ldap.LDAPResultOperationsError, nil
|
||||
}
|
||||
|
|
|
@ -173,12 +173,6 @@ func NewApplication(p api.ProxyOutpostConfig, c *http.Client, server Server) (*A
|
|||
"method": r.Method,
|
||||
"host": web.GetHost(r),
|
||||
}).Observe(float64(elapsed) / float64(time.Second))
|
||||
metrics.RequestsLegacy.With(prometheus.Labels{
|
||||
"outpost_name": a.outpostName,
|
||||
"type": "app",
|
||||
"method": r.Method,
|
||||
"host": web.GetHost(r),
|
||||
}).Observe(float64(elapsed))
|
||||
})
|
||||
})
|
||||
if server.API().GlobalConfig.ErrorReporting.Enabled {
|
||||
|
|
|
@ -64,13 +64,6 @@ func (a *Application) configureProxy() error {
|
|||
"scheme": r.URL.Scheme,
|
||||
"host": web.GetHost(r),
|
||||
}).Observe(float64(elapsed) / float64(time.Second))
|
||||
metrics.UpstreamTimingLegacy.With(prometheus.Labels{
|
||||
"outpost_name": a.outpostName,
|
||||
"upstream_host": r.URL.Host,
|
||||
"method": r.Method,
|
||||
"scheme": r.URL.Scheme,
|
||||
"host": web.GetHost(r),
|
||||
}).Observe(float64(elapsed))
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -31,11 +31,16 @@ func (a *Application) redeemCallback(savedState string, u *url.URL, c context.Co
|
|||
return nil, err
|
||||
}
|
||||
|
||||
jwt := oauth2Token.AccessToken
|
||||
a.log.WithField("jwt", jwt).Trace("access_token")
|
||||
// Extract the ID Token from OAuth2 token.
|
||||
rawIDToken, ok := oauth2Token.Extra("id_token").(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("missing id_token")
|
||||
}
|
||||
|
||||
a.log.WithField("id_token", rawIDToken).Trace("id_token")
|
||||
|
||||
// Parse and verify ID Token payload.
|
||||
idToken, err := a.tokenVerifier.Verify(ctx, jwt)
|
||||
idToken, err := a.tokenVerifier.Verify(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -48,6 +53,6 @@ func (a *Application) redeemCallback(savedState string, u *url.URL, c context.Co
|
|||
if claims.Proxy == nil {
|
||||
claims.Proxy = &ProxyClaims{}
|
||||
}
|
||||
claims.RawToken = jwt
|
||||
claims.RawToken = rawIDToken
|
||||
return claims, nil
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ func (a *Application) getStore(p api.ProxyOutpostConfig, externalHost *url.URL)
|
|||
cs.Options.Domain = *p.CookieDomain
|
||||
cs.Options.SameSite = http.SameSiteLaxMode
|
||||
cs.Options.MaxAge = maxAge
|
||||
cs.Options.Path = "/"
|
||||
cs.Options.Path = externalHost.Path
|
||||
a.log.WithField("dir", dir).Trace("using filesystem session backend")
|
||||
return cs
|
||||
}
|
||||
|
|
|
@ -26,12 +26,6 @@ func (ps *ProxyServer) HandlePing(rw http.ResponseWriter, r *http.Request) {
|
|||
"host": web.GetHost(r),
|
||||
"type": "ping",
|
||||
}).Observe(float64(elapsed) / float64(time.Second))
|
||||
metrics.RequestsLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ps.akAPI.Outpost.Name,
|
||||
"method": r.Method,
|
||||
"host": web.GetHost(r),
|
||||
"type": "ping",
|
||||
}).Observe(float64(elapsed))
|
||||
}
|
||||
|
||||
func (ps *ProxyServer) HandleStatic(rw http.ResponseWriter, r *http.Request) {
|
||||
|
@ -44,12 +38,6 @@ func (ps *ProxyServer) HandleStatic(rw http.ResponseWriter, r *http.Request) {
|
|||
"host": web.GetHost(r),
|
||||
"type": "static",
|
||||
}).Observe(float64(elapsed) / float64(time.Second))
|
||||
metrics.RequestsLegacy.With(prometheus.Labels{
|
||||
"outpost_name": ps.akAPI.Outpost.Name,
|
||||
"method": r.Method,
|
||||
"host": web.GetHost(r),
|
||||
"type": "static",
|
||||
}).Observe(float64(elapsed))
|
||||
}
|
||||
|
||||
func (ps *ProxyServer) lookupApp(r *http.Request) (*application.Application, string) {
|
||||
|
|
|
@ -22,16 +22,6 @@ var (
|
|||
Name: "authentik_outpost_proxy_upstream_response_duration_seconds",
|
||||
Help: "Proxy upstream response latencies in seconds",
|
||||
}, []string{"outpost_name", "method", "scheme", "host", "upstream_host"})
|
||||
|
||||
// NOTE: the following metric is kept for compatibility purpose
|
||||
RequestsLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
||||
Name: "authentik_outpost_proxy_requests",
|
||||
Help: "The total number of configured providers",
|
||||
}, []string{"outpost_name", "method", "host", "type"})
|
||||
UpstreamTimingLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
||||
Name: "authentik_outpost_proxy_upstream_time",
|
||||
Help: "A summary of the duration we wait for the upstream reply",
|
||||
}, []string{"outpost_name", "method", "scheme", "host", "upstream_host"})
|
||||
)
|
||||
|
||||
func RunServer() {
|
||||
|
|
|
@ -36,7 +36,6 @@ func (ps *ProxyServer) handleWSMessage(ctx context.Context, args map[string]inte
|
|||
switch msg.SubType {
|
||||
case WSProviderSubTypeLogout:
|
||||
for _, p := range ps.apps {
|
||||
ps.log.WithField("provider", p.Host).Debug("Logging out")
|
||||
err := p.Logout(ctx, func(c application.Claims) bool {
|
||||
return c.Sid == msg.SessionID
|
||||
})
|
||||
|
|
|
@ -35,11 +35,6 @@ func (rs *RadiusServer) Handle_AccessRequest(w radius.ResponseWriter, r *RadiusR
|
|||
"reason": "flow_error",
|
||||
"app": r.pi.appSlug,
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": rs.ac.Outpost.Name,
|
||||
"reason": "flow_error",
|
||||
"app": r.pi.appSlug,
|
||||
}).Inc()
|
||||
_ = w.Write(r.Response(radius.CodeAccessReject))
|
||||
return
|
||||
}
|
||||
|
@ -49,11 +44,6 @@ func (rs *RadiusServer) Handle_AccessRequest(w radius.ResponseWriter, r *RadiusR
|
|||
"reason": "invalid_credentials",
|
||||
"app": r.pi.appSlug,
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": rs.ac.Outpost.Name,
|
||||
"reason": "invalid_credentials",
|
||||
"app": r.pi.appSlug,
|
||||
}).Inc()
|
||||
_ = w.Write(r.Response(radius.CodeAccessReject))
|
||||
return
|
||||
}
|
||||
|
@ -66,11 +56,6 @@ func (rs *RadiusServer) Handle_AccessRequest(w radius.ResponseWriter, r *RadiusR
|
|||
"reason": "access_check_fail",
|
||||
"app": r.pi.appSlug,
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": rs.ac.Outpost.Name,
|
||||
"reason": "access_check_fail",
|
||||
"app": r.pi.appSlug,
|
||||
}).Inc()
|
||||
return
|
||||
}
|
||||
if !access {
|
||||
|
@ -81,11 +66,6 @@ func (rs *RadiusServer) Handle_AccessRequest(w radius.ResponseWriter, r *RadiusR
|
|||
"reason": "access_denied",
|
||||
"app": r.pi.appSlug,
|
||||
}).Inc()
|
||||
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||
"outpost_name": rs.ac.Outpost.Name,
|
||||
"reason": "access_denied",
|
||||
"app": r.pi.appSlug,
|
||||
}).Inc()
|
||||
return
|
||||
}
|
||||
_ = w.Write(r.Response(radius.CodeAccessAccept))
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue