root: add support for storing media files in S3
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
This commit is contained in:
parent
6e83b890bd
commit
4ae3c6c6ac
|
@ -34,6 +34,7 @@ REDIS_ENV_KEYS = [
|
||||||
f"{ENV_PREFIX}_REDIS__TLS_REQS",
|
f"{ENV_PREFIX}_REDIS__TLS_REQS",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Old key -> new key
|
||||||
DEPRECATIONS = {
|
DEPRECATIONS = {
|
||||||
"geoip": "events.context_processors.geoip",
|
"geoip": "events.context_processors.geoip",
|
||||||
"redis.broker_url": "broker.url",
|
"redis.broker_url": "broker.url",
|
||||||
|
@ -201,12 +202,13 @@ class ConfigLoader:
|
||||||
root[key] = value
|
root[key] = value
|
||||||
return root
|
return root
|
||||||
|
|
||||||
def refresh(self, key: str):
|
def refresh(self, key: str, default=None, sep=".") -> Any:
|
||||||
"""Update a single value"""
|
"""Update a single value"""
|
||||||
attr: Attr = get_path_from_dict(self.raw, key)
|
attr: Attr = get_path_from_dict(self.raw, key, sep=sep, default=Attr(default))
|
||||||
if attr.source_type != Attr.Source.URI:
|
if attr.source_type != Attr.Source.URI:
|
||||||
return
|
return attr.value
|
||||||
attr.value = self.parse_uri(attr.source).value
|
attr.value = self.parse_uri(attr.source).value
|
||||||
|
return attr.value
|
||||||
|
|
||||||
def parse_uri(self, value: str) -> Attr:
|
def parse_uri(self, value: str) -> Attr:
|
||||||
"""Parse string values which start with a URI"""
|
"""Parse string values which start with a URI"""
|
||||||
|
|
|
@ -37,8 +37,8 @@ redis:
|
||||||
tls_reqs: "none"
|
tls_reqs: "none"
|
||||||
|
|
||||||
# broker:
|
# broker:
|
||||||
# url: ""
|
# url: ""
|
||||||
# transport_options: ""
|
# transport_options: ""
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
# url: ""
|
# url: ""
|
||||||
|
@ -48,13 +48,10 @@ cache:
|
||||||
timeout_reputation: 300
|
timeout_reputation: 300
|
||||||
|
|
||||||
# channel:
|
# channel:
|
||||||
# url: ""
|
# url: ""
|
||||||
|
|
||||||
# result_backend:
|
# result_backend:
|
||||||
# url: ""
|
# url: ""
|
||||||
|
|
||||||
paths:
|
|
||||||
media: ./media
|
|
||||||
|
|
||||||
debug: false
|
debug: false
|
||||||
remote_debug: false
|
remote_debug: false
|
||||||
|
@ -133,3 +130,21 @@ web:
|
||||||
|
|
||||||
worker:
|
worker:
|
||||||
concurrency: 2
|
concurrency: 2
|
||||||
|
|
||||||
|
storage:
|
||||||
|
media:
|
||||||
|
backend: file # or s3
|
||||||
|
file:
|
||||||
|
path: ./media
|
||||||
|
s3:
|
||||||
|
# How to talk to s3
|
||||||
|
# region: "us-east-1"
|
||||||
|
# use_ssl: True
|
||||||
|
# endpoint: "https://s3.us-east-1.amazonaws.com"
|
||||||
|
# addressing_style: "path" # or "virtual"
|
||||||
|
# access_key: ""
|
||||||
|
# secret_key: ""
|
||||||
|
# bucket_name: "authentik-media"
|
||||||
|
# How to render file URLs
|
||||||
|
# custom_domain: null
|
||||||
|
secure_urls: True
|
||||||
|
|
|
@ -75,7 +75,7 @@ class TestConfig(TestCase):
|
||||||
|
|
||||||
# Update config file
|
# Update config file
|
||||||
write(file, "bar".encode())
|
write(file, "bar".encode())
|
||||||
config.refresh("file_test")
|
self.assertEqual(config.refresh("file_test"), "foobar")
|
||||||
self.assertEqual(config.get("file_test"), "foobar")
|
self.assertEqual(config.get("file_test"), "foobar")
|
||||||
|
|
||||||
unlink(file_name)
|
unlink(file_name)
|
||||||
|
|
|
@ -16,8 +16,6 @@ from authentik.lib.utils.reflection import get_env
|
||||||
from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BACKEND_LDAP
|
from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BACKEND_LDAP
|
||||||
|
|
||||||
BASE_DIR = Path(__file__).absolute().parent.parent.parent
|
BASE_DIR = Path(__file__).absolute().parent.parent.parent
|
||||||
STATICFILES_DIRS = [BASE_DIR / Path("web")]
|
|
||||||
MEDIA_ROOT = BASE_DIR / Path("media")
|
|
||||||
|
|
||||||
DEBUG = CONFIG.get_bool("debug")
|
DEBUG = CONFIG.get_bool("debug")
|
||||||
SECRET_KEY = CONFIG.get("secret_key")
|
SECRET_KEY = CONFIG.get("secret_key")
|
||||||
|
@ -372,8 +370,53 @@ if _ERROR_REPORTING:
|
||||||
# Static files (CSS, JavaScript, Images)
|
# Static files (CSS, JavaScript, Images)
|
||||||
# https://docs.djangoproject.com/en/2.1/howto/static-files/
|
# https://docs.djangoproject.com/en/2.1/howto/static-files/
|
||||||
|
|
||||||
|
STATICFILES_DIRS = [BASE_DIR / Path("web")]
|
||||||
STATIC_URL = "/static/"
|
STATIC_URL = "/static/"
|
||||||
MEDIA_URL = "/media/"
|
|
||||||
|
STORAGES = {
|
||||||
|
"staticfiles": {
|
||||||
|
"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Media files
|
||||||
|
|
||||||
|
if CONFIG.get("storage.media.backend", "file") == "s3":
|
||||||
|
STORAGES["default"] = {
|
||||||
|
"BACKEND": "authentik.root.storages.S3Storage",
|
||||||
|
"OPTIONS": {
|
||||||
|
# How to talk to S3
|
||||||
|
"session_profile": CONFIG.get("storage.media.s3.session_profile", None),
|
||||||
|
"access_key": CONFIG.get("storage.media.s3.access_key", None),
|
||||||
|
"secret_key": CONFIG.get("storage.media.s3.secret_key", None),
|
||||||
|
"security_token": CONFIG.get("storage.media.s3.security_token", None),
|
||||||
|
"region_name": CONFIG.get("storage.media.s3.region", None),
|
||||||
|
"use_ssl": CONFIG.get_bool("storage.media.s3.use_ssl", True),
|
||||||
|
"endpoint_url": CONFIG.get("storage.media.s3.endpoint", None),
|
||||||
|
"addressing_style": CONFIG.get("storage.media.s3.addressing_style", "virtual"),
|
||||||
|
"bucket_name": CONFIG.get("storage.media.s3.bucket_name"),
|
||||||
|
"default_acl": "private",
|
||||||
|
"querystring_auth": True,
|
||||||
|
"signature_version": "s3v4",
|
||||||
|
"file_overwrite": False,
|
||||||
|
"location": "media",
|
||||||
|
# How to render file URLS
|
||||||
|
"url_protocol": "https:"
|
||||||
|
if CONFIG.get("storage.media.s3.secure_urls", True)
|
||||||
|
else "http:",
|
||||||
|
"custom_domain": CONFIG.get("storage.media.s3.custom_domain", None),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
# Fallback on file storage backend
|
||||||
|
else:
|
||||||
|
STORAGES["default"] = {
|
||||||
|
"BACKEND": "django.core.files.storage.FileSystemStorage",
|
||||||
|
"OPTIONS": {
|
||||||
|
"location": Path(CONFIG.get("storage.media.file.path")),
|
||||||
|
"base_url": "/media/",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
TEST = False
|
TEST = False
|
||||||
TEST_RUNNER = "authentik.root.test_runner.PytestTestRunner"
|
TEST_RUNNER = "authentik.root.test_runner.PytestTestRunner"
|
||||||
|
|
|
@ -0,0 +1,85 @@
|
||||||
|
"""authentik storage backends"""
|
||||||
|
from storages.backends.s3 import S3Storage as BaseS3Storage
|
||||||
|
from storages.utils import clean_name
|
||||||
|
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=abstract-method
|
||||||
|
class S3Storage(BaseS3Storage):
|
||||||
|
"""S3 storage backend"""
|
||||||
|
|
||||||
|
# The parent class sets these attributes in it's __init__ method, getting them from the Django
|
||||||
|
# settings. To be able to support secrets reloading, we instead make those properties and
|
||||||
|
# refresh them from the CONFIG.
|
||||||
|
# Because of this, we also need to let the parent class do its setting thing, thus we have
|
||||||
|
# empty setters that just ignore whatever is set in those attributes.
|
||||||
|
@property
|
||||||
|
def session_profile(self) -> str | None:
|
||||||
|
"""Get session profile"""
|
||||||
|
return CONFIG.refresh("storage.media.s3.session_profile", None)
|
||||||
|
|
||||||
|
@session_profile.setter
|
||||||
|
def session_profile(self, value: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def access_key(self) -> str | None:
|
||||||
|
"""Get access key"""
|
||||||
|
return CONFIG.refresh("storage.media.s3.access_key", None)
|
||||||
|
|
||||||
|
@access_key.setter
|
||||||
|
def access_key(self, value: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def secret_key(self) -> str | None:
|
||||||
|
"""Get secret key"""
|
||||||
|
return CONFIG.refresh("storage.media.s3.secret_key", None)
|
||||||
|
|
||||||
|
@secret_key.setter
|
||||||
|
def secret_key(self, value: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def security_token(self) -> str | None:
|
||||||
|
"""Get security token"""
|
||||||
|
return CONFIG.refresh("storage.media.s3.security_token", None)
|
||||||
|
|
||||||
|
@security_token.setter
|
||||||
|
def security_token(self, value: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# This is a fix for https://github.com/jschneier/django-storages/pull/839
|
||||||
|
# pylint: disable=arguments-differ,no-member
|
||||||
|
def url(self, name, parameters=None, expire=None, http_method=None):
|
||||||
|
# Preserve the trailing slash after normalizing the path.
|
||||||
|
name = self._normalize_name(clean_name(name))
|
||||||
|
params = parameters.copy() if parameters else {}
|
||||||
|
if expire is None:
|
||||||
|
expire = self.querystring_expire
|
||||||
|
|
||||||
|
params["Bucket"] = self.bucket.name
|
||||||
|
params["Key"] = name
|
||||||
|
url = self.bucket.meta.client.generate_presigned_url(
|
||||||
|
"get_object",
|
||||||
|
Params=params,
|
||||||
|
ExpiresIn=expire,
|
||||||
|
HttpMethod=http_method,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.custom_domain:
|
||||||
|
# Key parameter can't be empty. Use "/" and remove it later.
|
||||||
|
params["Key"] = "/"
|
||||||
|
root_url_signed = self.bucket.meta.client.generate_presigned_url(
|
||||||
|
"get_object", Params=params, ExpiresIn=expire
|
||||||
|
)
|
||||||
|
# Remove signing parameter and previously added key "/".
|
||||||
|
root_url = self._strip_signing_parameters(root_url_signed)[:-1]
|
||||||
|
# Replace bucket domain with custom domain.
|
||||||
|
custom_url = "{}//{}/".format(self.url_protocol, self.custom_domain)
|
||||||
|
url = url.replace(root_url, custom_url)
|
||||||
|
|
||||||
|
if self.querystring_auth:
|
||||||
|
return url
|
||||||
|
return self._strip_signing_parameters(url)
|
|
@ -2,7 +2,7 @@ package config
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
// Core specific config
|
// Core specific config
|
||||||
Paths PathsConfig `yaml:"paths"`
|
Storage StorageConfig `yaml:"storage"`
|
||||||
LogLevel string `yaml:"log_level" env:"AUTHENTIK_LOG_LEVEL"`
|
LogLevel string `yaml:"log_level" env:"AUTHENTIK_LOG_LEVEL"`
|
||||||
ErrorReporting ErrorReportingConfig `yaml:"error_reporting"`
|
ErrorReporting ErrorReportingConfig `yaml:"error_reporting"`
|
||||||
Redis RedisConfig `yaml:"redis"`
|
Redis RedisConfig `yaml:"redis"`
|
||||||
|
@ -45,8 +45,17 @@ type ListenConfig struct {
|
||||||
TrustedProxyCIDRs []string `yaml:"trusted_proxy_cidrs" env:"AUTHENTIK_LISTEN__TRUSTED_PROXY_CIDRS"`
|
TrustedProxyCIDRs []string `yaml:"trusted_proxy_cidrs" env:"AUTHENTIK_LISTEN__TRUSTED_PROXY_CIDRS"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type PathsConfig struct {
|
type StorageConfig struct {
|
||||||
Media string `yaml:"media"`
|
Media StorageMediaConfig `yaml:"media"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type StorageMediaConfig struct {
|
||||||
|
Backend string `yaml:"backend" env:"AUTHENTIK_STORAGE_MEDIA_BACKEND"`
|
||||||
|
File StorageFileConfig `yaml:"file"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type StorageFileConfig struct {
|
||||||
|
Path string `yaml:"path" env:"AUTHENTIK_STORAGE_MEDIA_FILE_PATH"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ErrorReportingConfig struct {
|
type ErrorReportingConfig struct {
|
||||||
|
|
|
@ -6,6 +6,7 @@ import (
|
||||||
|
|
||||||
"github.com/go-http-utils/etag"
|
"github.com/go-http-utils/etag"
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
|
|
||||||
"goauthentik.io/internal/config"
|
"goauthentik.io/internal/config"
|
||||||
"goauthentik.io/internal/constants"
|
"goauthentik.io/internal/constants"
|
||||||
"goauthentik.io/internal/utils/web"
|
"goauthentik.io/internal/utils/web"
|
||||||
|
@ -17,8 +18,6 @@ func (ws *WebServer) configureStatic() {
|
||||||
statRouter.Use(ws.staticHeaderMiddleware)
|
statRouter.Use(ws.staticHeaderMiddleware)
|
||||||
indexLessRouter := statRouter.NewRoute().Subrouter()
|
indexLessRouter := statRouter.NewRoute().Subrouter()
|
||||||
indexLessRouter.Use(web.DisableIndex)
|
indexLessRouter.Use(web.DisableIndex)
|
||||||
// Media files, always local
|
|
||||||
fs := http.FileServer(http.Dir(config.Get().Paths.Media))
|
|
||||||
distFs := http.FileServer(http.Dir("./web/dist"))
|
distFs := http.FileServer(http.Dir("./web/dist"))
|
||||||
distHandler := http.StripPrefix("/static/dist/", distFs)
|
distHandler := http.StripPrefix("/static/dist/", distFs)
|
||||||
authentikHandler := http.StripPrefix("/static/authentik/", http.FileServer(http.Dir("./web/authentik")))
|
authentikHandler := http.StripPrefix("/static/authentik/", http.FileServer(http.Dir("./web/authentik")))
|
||||||
|
@ -40,7 +39,11 @@ func (ws *WebServer) configureStatic() {
|
||||||
web.DisableIndex(http.StripPrefix(fmt.Sprintf("/if/rac/%s", vars["app_slug"]), distFs)).ServeHTTP(rw, r)
|
web.DisableIndex(http.StripPrefix(fmt.Sprintf("/if/rac/%s", vars["app_slug"]), distFs)).ServeHTTP(rw, r)
|
||||||
})
|
})
|
||||||
|
|
||||||
indexLessRouter.PathPrefix("/media/").Handler(http.StripPrefix("/media", fs))
|
// Media files, if backend is file
|
||||||
|
if config.Get().Storage.Media.Backend == "file" {
|
||||||
|
fsMedia := http.FileServer(http.Dir(config.Get().Storage.Media.File.Path))
|
||||||
|
indexLessRouter.PathPrefix("/media/").Handler(http.StripPrefix("/media", fsMedia))
|
||||||
|
}
|
||||||
|
|
||||||
statRouter.PathPrefix("/if/help/").Handler(http.StripPrefix("/if/help/", helpHandler))
|
statRouter.PathPrefix("/if/help/").Handler(http.StripPrefix("/if/help/", helpHandler))
|
||||||
statRouter.PathPrefix("/help").Handler(http.RedirectHandler("/if/help/", http.StatusMovedPermanently))
|
statRouter.PathPrefix("/help").Handler(http.RedirectHandler("/if/help/", http.StatusMovedPermanently))
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand.
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiohttp"
|
name = "aiohttp"
|
||||||
|
@ -458,6 +458,44 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
|
||||||
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
||||||
uvloop = ["uvloop (>=0.15.2)"]
|
uvloop = ["uvloop (>=0.15.2)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "boto3"
|
||||||
|
version = "1.33.12"
|
||||||
|
description = "The AWS SDK for Python"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">= 3.7"
|
||||||
|
files = [
|
||||||
|
{file = "boto3-1.33.12-py3-none-any.whl", hash = "sha256:475efcff30401041e9c348e20613eca90ab14a224e2f978ca80de98ba3499435"},
|
||||||
|
{file = "boto3-1.33.12.tar.gz", hash = "sha256:2225edaea2fa17274f62707c12d9f7803c998af7089fe8a1ec8e4f1ebf47677e"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
botocore = ">=1.33.12,<1.34.0"
|
||||||
|
jmespath = ">=0.7.1,<2.0.0"
|
||||||
|
s3transfer = ">=0.8.2,<0.9.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "botocore"
|
||||||
|
version = "1.33.12"
|
||||||
|
description = "Low-level, data-driven core of boto 3."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">= 3.7"
|
||||||
|
files = [
|
||||||
|
{file = "botocore-1.33.12-py3-none-any.whl", hash = "sha256:48b9cfb9c5f7f9634a71782f16a324acb522b65856ad46be69efe04c3322b23c"},
|
||||||
|
{file = "botocore-1.33.12.tar.gz", hash = "sha256:067c94fa88583c04ae897d48a11d2be09f280363b8e794b82d78d631d3a3e910"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
jmespath = ">=0.7.1,<2.0.0"
|
||||||
|
python-dateutil = ">=2.1,<3.0.0"
|
||||||
|
urllib3 = {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
crt = ["awscrt (==0.19.17)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bump2version"
|
name = "bump2version"
|
||||||
version = "1.0.1"
|
version = "1.0.1"
|
||||||
|
@ -1218,6 +1256,30 @@ Django = ">=3.2"
|
||||||
gprof2dot = ">=2017.09.19"
|
gprof2dot = ">=2017.09.19"
|
||||||
sqlparse = "*"
|
sqlparse = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "django-storages"
|
||||||
|
version = "1.14.2"
|
||||||
|
description = "Support for many storage backends in Django"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "django-storages-1.14.2.tar.gz", hash = "sha256:51b36af28cc5813b98d5f3dfe7459af638d84428c8df4a03990c7d74d1bea4e5"},
|
||||||
|
{file = "django_storages-1.14.2-py3-none-any.whl", hash = "sha256:1db759346b52ada6c2efd9f23d8241ecf518813eb31db9e2589207174f58f6ad"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
boto3 = {version = ">=1.4.4", optional = true, markers = "extra == \"s3\""}
|
||||||
|
Django = ">=3.2"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
azure = ["azure-core (>=1.13)", "azure-storage-blob (>=12)"]
|
||||||
|
boto3 = ["boto3 (>=1.4.4)"]
|
||||||
|
dropbox = ["dropbox (>=7.2.1)"]
|
||||||
|
google = ["google-cloud-storage (>=1.27)"]
|
||||||
|
libcloud = ["apache-libcloud"]
|
||||||
|
s3 = ["boto3 (>=1.4.4)"]
|
||||||
|
sftp = ["paramiko (>=1.15)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "djangorestframework"
|
name = "djangorestframework"
|
||||||
version = "3.14.0"
|
version = "3.14.0"
|
||||||
|
@ -1801,6 +1863,17 @@ MarkupSafe = ">=2.0"
|
||||||
[package.extras]
|
[package.extras]
|
||||||
i18n = ["Babel (>=2.7)"]
|
i18n = ["Babel (>=2.7)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jmespath"
|
||||||
|
version = "1.0.1"
|
||||||
|
description = "JSON Matching Expressions"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
|
||||||
|
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonpatch"
|
name = "jsonpatch"
|
||||||
version = "1.33"
|
version = "1.33"
|
||||||
|
@ -3550,6 +3623,23 @@ files = [
|
||||||
{file = "ruff-0.1.9.tar.gz", hash = "sha256:b041dee2734719ddbb4518f762c982f2e912e7f28b8ee4fe1dee0b15d1b6e800"},
|
{file = "ruff-0.1.9.tar.gz", hash = "sha256:b041dee2734719ddbb4518f762c982f2e912e7f28b8ee4fe1dee0b15d1b6e800"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "s3transfer"
|
||||||
|
version = "0.8.2"
|
||||||
|
description = "An Amazon S3 Transfer Manager"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">= 3.7"
|
||||||
|
files = [
|
||||||
|
{file = "s3transfer-0.8.2-py3-none-any.whl", hash = "sha256:c9e56cbe88b28d8e197cf841f1f0c130f246595e77ae5b5a05b69fe7cb83de76"},
|
||||||
|
{file = "s3transfer-0.8.2.tar.gz", hash = "sha256:368ac6876a9e9ed91f6bc86581e319be08188dc60d50e0d56308ed5765446283"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
botocore = ">=1.33.2,<2.0a.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "selenium"
|
name = "selenium"
|
||||||
version = "4.16.0"
|
version = "4.16.0"
|
||||||
|
@ -3970,23 +4060,40 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "urllib3"
|
name = "urllib3"
|
||||||
version = "2.1.0"
|
version = "2.0.7"
|
||||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"},
|
{file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"},
|
||||||
{file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"},
|
{file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
certifi = {version = "*", optional = true, markers = "extra == \"secure\""}
|
||||||
|
cryptography = {version = ">=1.9", optional = true, markers = "extra == \"secure\""}
|
||||||
|
idna = {version = ">=2.0.0", optional = true, markers = "extra == \"secure\""}
|
||||||
|
pyopenssl = {version = ">=17.1.0", optional = true, markers = "extra == \"secure\""}
|
||||||
pysocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""}
|
pysocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""}
|
||||||
|
urllib3-secure-extra = {version = "*", optional = true, markers = "extra == \"secure\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
|
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
|
||||||
|
secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
|
||||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||||
zstd = ["zstandard (>=0.18.0)"]
|
zstd = ["zstandard (>=0.18.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "urllib3-secure-extra"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Marker library to detect whether urllib3 was installed with the deprecated [secure] extra"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "urllib3-secure-extra-0.1.0.tar.gz", hash = "sha256:ee9409cbfeb4b8609047be4c32fb4317870c602767e53fd8a41005ebe6a41dff"},
|
||||||
|
{file = "urllib3_secure_extra-0.1.0-py2.py3-none-any.whl", hash = "sha256:f7adcb108b4d12a4b26b99eb60e265d087f435052a76aefa396b6ee85e9a6ef9"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uvicorn"
|
name = "uvicorn"
|
||||||
version = "0.25.0"
|
version = "0.25.0"
|
||||||
|
@ -4542,4 +4649,4 @@ files = [
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "~3.12"
|
python-versions = "~3.12"
|
||||||
content-hash = "9d28b9e79139895839ffcba88e2eaad0f842a15888f3f6f8c0ac8879616ac850"
|
content-hash = "c20d193368ab621efad415fb3b05275abc579208e9c269df423859c59abb6bb4"
|
||||||
|
|
|
@ -133,6 +133,7 @@ django-guardian = "*"
|
||||||
django-model-utils = "*"
|
django-model-utils = "*"
|
||||||
django-prometheus = "*"
|
django-prometheus = "*"
|
||||||
django-redis = "*"
|
django-redis = "*"
|
||||||
|
django-storages = { extras = ["s3"], version = "*" }
|
||||||
djangorestframework = "*"
|
djangorestframework = "*"
|
||||||
djangorestframework-guardian = "*"
|
djangorestframework-guardian = "*"
|
||||||
docker = "*"
|
docker = "*"
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
#!/usr/bin/env -S bash -e
|
||||||
|
|
||||||
|
AWS_ACCESS_KEY_ID=accessKey1 AWS_SECRET_ACCESS_KEY=secretKey1 aws \
|
||||||
|
s3api \
|
||||||
|
--endpoint-url http://localhost:8020 \
|
||||||
|
create-bucket \
|
||||||
|
--acl private \
|
||||||
|
--bucket authentik-media
|
||||||
|
|
||||||
|
AWS_ACCESS_KEY_ID=accessKey1 AWS_SECRET_ACCESS_KEY=secretKey1 aws \
|
||||||
|
s3api \
|
||||||
|
--endpoint-url http://localhost:8020 \
|
||||||
|
put-bucket-cors \
|
||||||
|
--bucket authentik-media \
|
||||||
|
--cors-configuration \
|
||||||
|
'{"CORSRules": [{"AllowedOrigins": ["*"], "AllowedHeaders": [], "AllowedMethods": ["GET"], "MaxAgeSeconds": 3000}]}'
|
|
@ -18,7 +18,24 @@ services:
|
||||||
ports:
|
ports:
|
||||||
- 127.0.0.1:6379:6379
|
- 127.0.0.1:6379:6379
|
||||||
restart: always
|
restart: always
|
||||||
|
s3:
|
||||||
|
container_name: s3
|
||||||
|
image: docker.io/zenko/cloudserver
|
||||||
|
environment:
|
||||||
|
REMOTE_MANAGEMENT_DISABLE: "1"
|
||||||
|
SCALITY_ACCESS_KEY_ID: accessKey1
|
||||||
|
SCALITY_SECRET_ACCESS_KEY: secretKey1
|
||||||
|
ports:
|
||||||
|
- 8020:8000
|
||||||
|
volumes:
|
||||||
|
- s3-data:/usr/src/app/localData
|
||||||
|
- s3-metadata:/usr/scr/app/localMetadata
|
||||||
|
restart: always
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
db-data:
|
db-data:
|
||||||
driver: local
|
driver: local
|
||||||
|
s3-data:
|
||||||
|
driver: local
|
||||||
|
s3-metadata:
|
||||||
|
driver: local
|
||||||
|
|
|
@ -21,7 +21,21 @@ with open("local.env.yml", "w", encoding="utf-8") as _config:
|
||||||
"processors": {
|
"processors": {
|
||||||
"geoip": "tests/GeoLite2-City-Test.mmdb",
|
"geoip": "tests/GeoLite2-City-Test.mmdb",
|
||||||
"asn": "tests/GeoLite2-ASN-Test.mmdb",
|
"asn": "tests/GeoLite2-ASN-Test.mmdb",
|
||||||
}
|
},
|
||||||
|
},
|
||||||
|
"storage": {
|
||||||
|
"media": {
|
||||||
|
"backend": "file",
|
||||||
|
"s3": {
|
||||||
|
"endpoint": "http://localhost:8020",
|
||||||
|
"access_key": "accessKey1",
|
||||||
|
"secret_key": "secretKey1",
|
||||||
|
"addressing_style": "path",
|
||||||
|
"bucket_name": "authentik-media",
|
||||||
|
"custom_domain": "localhost:8020/authentik-media",
|
||||||
|
"secure_urls": False,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_config,
|
_config,
|
||||||
|
|
|
@ -13,7 +13,7 @@ title: Full development environment
|
||||||
|
|
||||||
## Services Setup
|
## Services Setup
|
||||||
|
|
||||||
For PostgreSQL and Redis, you can use the `docker-compose.yml` file in `/scripts`.To use these pre-configured database instances, navigate to the `/scripts` directory in your local copy of the authentik git repo, and run `docker compose up -d`.
|
For PostgreSQL and Redis, you can use the `docker-compose.yml` file in `/scripts`. To use these pre-configured database instances, navigate to the `/scripts` directory in your local copy of the authentik git repo, and run `docker compose up -d`.
|
||||||
You can also use a native install, if you prefer.
|
You can also use a native install, if you prefer.
|
||||||
|
|
||||||
:::info
|
:::info
|
||||||
|
|
|
@ -116,7 +116,7 @@ To check if your config has been applied correctly, you can run the following co
|
||||||
`AUTHENTIK_REDIS__CACHE_TIMEOUT_REPUTATION` only applies to the cache expiry, see [`AUTHENTIK_REPUTATION__EXPIRY`](#authentik_reputation__expiry) to control how long reputation is persisted for.
|
`AUTHENTIK_REDIS__CACHE_TIMEOUT_REPUTATION` only applies to the cache expiry, see [`AUTHENTIK_REPUTATION__EXPIRY`](#authentik_reputation__expiry) to control how long reputation is persisted for.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Listen Setting
|
## Listen Settings
|
||||||
|
|
||||||
- `AUTHENTIK_LISTEN__HTTP`: Listening address:port (e.g. `0.0.0.0:9000`) for HTTP (Applies to Server and Proxy outpost)
|
- `AUTHENTIK_LISTEN__HTTP`: Listening address:port (e.g. `0.0.0.0:9000`) for HTTP (Applies to Server and Proxy outpost)
|
||||||
- `AUTHENTIK_LISTEN__HTTPS`: Listening address:port (e.g. `0.0.0.0:9443`) for HTTPS (Applies to Server and Proxy outpost)
|
- `AUTHENTIK_LISTEN__HTTPS`: Listening address:port (e.g. `0.0.0.0:9443`) for HTTPS (Applies to Server and Proxy outpost)
|
||||||
|
@ -130,6 +130,23 @@ To check if your config has been applied correctly, you can run the following co
|
||||||
|
|
||||||
Requests directly coming from one an address within a CIDR specified here are able to set proxy headers, such as `X-Forwarded-For`. Requests coming from other addresses will not be able to set these headers.
|
Requests directly coming from one an address within a CIDR specified here are able to set proxy headers, such as `X-Forwarded-For`. Requests coming from other addresses will not be able to set these headers.
|
||||||
|
|
||||||
|
## Media Storage Settings
|
||||||
|
|
||||||
|
These settings affect where media files are stored. Those files include applications and sources icons. By default, they are stored on disk in the `/media` directory of the authentik container. S3 storage is also supported.
|
||||||
|
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__BACKEND`: Where to store files. Valid values are `file` and `s3`. For `file` storage, files are stored in a `/media` directory in the container. For `s3`, see below.
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__S3__REGION`: S3 region where the bucket has been created. May be omitted depending on which S3 provider you use. No default.
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__S3__USE_SSL`: Whether to use HTTPS when talking to the S3 storage providers. Defaults to `true`.
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__S3__ENDPOINT`: Endpoint to use to talk to the S3 storage provider. Override the previous region and use_ssl settings. Must be a valid URL in the form of `https://s3.provider`. No default.
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__S3__ADDRESSING_STYLE`: Whether the S3 bucket will be accessed using a subdomain or a URL path. Valid values are `virtual` and `path`. Defaults to `virtual`.
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__S3__SESSION_PROFILE`: Profile to use when using AWS SDK authentication. No default. Supports hot-reloading.
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__S3__ACCESS_KEY`: Access key to authenticate to S3. May be omitted if using AWS SDK authentication. Supports hot-reloading.
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__S3__SECRET_KEY`: Secret key to authenticate to S3. May be omitted if using AWS SDK authentication. Supports hot-reloading.
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__S3__SECURITY_TOKEN`: Security token to authenticate to S3. May be omitted. Supports hot-reloading.
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__S3__BUCKET_NAME`: Name of the bucket to use to store files.
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__S3__CUSTOM_DOMAIN`: Domain to use to create URLs for users. Mainly useful for non-AWS providers. May include a port. Must include the bucket. Example: `s3.company:8080/authentik-media`.
|
||||||
|
- `AUTHENTIK_STORAGE__MEDIA__S3__SECURE_URLS`: Whether URLS created for users use `http` or `https`. Defaults to `true`.
|
||||||
|
|
||||||
## authentik Settings
|
## authentik Settings
|
||||||
|
|
||||||
### `AUTHENTIK_SECRET_KEY`
|
### `AUTHENTIK_SECRET_KEY`
|
||||||
|
|
|
@ -0,0 +1,106 @@
|
||||||
|
---
|
||||||
|
title: S3 storage setup
|
||||||
|
---
|
||||||
|
|
||||||
|
### Preparation
|
||||||
|
|
||||||
|
First, create a user on your S3 storage provider and get access credentials for S3, hereafter referred as `access_key` and `secret_key`.
|
||||||
|
|
||||||
|
You'll also need to know which endpoint authentik is going to use to access the S3 API, hereafter referred as `https://s3.provider`.
|
||||||
|
|
||||||
|
The bucket in which authentik is going to store files is going to be called `authentik-media`. You may need to change this name depending on your S3 provider limitations. Also, we're suffixing the bucket name with `-media` as authentik currently only stores media files, but may use other buckets in the future.
|
||||||
|
|
||||||
|
The domain used to access authentik is going to be referred to as `authentik.company`.
|
||||||
|
|
||||||
|
You will also need the AWS CLI.
|
||||||
|
|
||||||
|
### S3 configuration
|
||||||
|
|
||||||
|
#### Bucket creation
|
||||||
|
|
||||||
|
Let's create the bucket in which authentik is going to store files:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
AWS_ACCESS_KEY_ID=access_key AWS_SECRET_ACCESS_KEY=secret_key aws s3api --endpoint-url=https://s3.provider create-bucket --bucket=authentik-media --acl=private
|
||||||
|
```
|
||||||
|
|
||||||
|
If using AWS S3, you can omit the `--endpoint-url` option, but may need to specify the `--region` option. If using Google Cloud Storage, refer to its documentation on how to create buckets.
|
||||||
|
|
||||||
|
The bucket ACL is set to private, although that is not strictly necessary, as an ACL associated with each object stored in the bucket will be private as well.
|
||||||
|
|
||||||
|
#### CORS policy
|
||||||
|
|
||||||
|
Next, let's associate a CORS policy to the bucket, to allow the authentik web interface to show images stored in the bucket.
|
||||||
|
|
||||||
|
First, save the following file locally as `cors.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"CORSRules": [
|
||||||
|
{
|
||||||
|
"AllowedOrigins": ["authentik.company"],
|
||||||
|
"AllowedHeaders": [],
|
||||||
|
"AllowedMethods": ["GET"],
|
||||||
|
"MaxAgeSeconds": 3000
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If authentik is accessed from multiple domains, you can add them to the `AllowedOrigins` list.
|
||||||
|
|
||||||
|
Let's apply that policy to the bucket:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
AWS_ACCESS_KEY_ID=access_key AWS_SECRET_ACCESS_KEY=secret_key aws s3api --endpoint-url=https://s3.provider put-bucket-cors --bucket=authentik-media --cors-configuration=file://cors.json
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuring authentik
|
||||||
|
|
||||||
|
Add the following to your `.env` file:
|
||||||
|
|
||||||
|
```env
|
||||||
|
AUTHENTIK_STORAGE__MEDIA__BACKEND=s3
|
||||||
|
AUTHENTIK_STORAGE__MEDIA__S3__ACCESS_KEY=access_key
|
||||||
|
AUTHENTIK_STORAGE__MEDIA__S3__SECRET_KEY=secret_key
|
||||||
|
AUTHENTIK_STORAGE__MEDIA__S3__BUCKET_NAME=authentik-media
|
||||||
|
```
|
||||||
|
|
||||||
|
If you're using AWS S3 as your S3 provider, add the following:
|
||||||
|
|
||||||
|
```env
|
||||||
|
AUTHENTIK_STORAGE__MEDIA__S3__REGION=us-east-1 # Use the region of the bucket
|
||||||
|
```
|
||||||
|
|
||||||
|
If you're not using AWS S3 as your S3 provider, add the following:
|
||||||
|
|
||||||
|
```env
|
||||||
|
AUTHENTIK_STORAGE__MEDIA__S3__ENDPOINT=https://s3.provider
|
||||||
|
AUTHENTIK_STORAGE__MEDIA__S3__CUSTOM_DOMAIN=s3.provider/authentik-media
|
||||||
|
```
|
||||||
|
|
||||||
|
You may also need to specify `AUTHENTIK_STORAGE__MEDIA__S3__ADDRESSING_STYLE` if your S3 provider does not support subdomain bucket access.
|
||||||
|
|
||||||
|
The `ENDPOINT` setting specifies how authentik talks to the S3 provider.
|
||||||
|
|
||||||
|
The `CUSTOM_DOMAIN` setting specifies how URLs are constructed to be shown on the web interface. For example, an object stored at `application-icons/application.png` with a `CUSTOM_DOMAIN` setting of `s3.provider/authentik-media` will result in a URL of `https://s3.provider/authentik-media/application-icons/application.png`. You can also use subdomains for your buckets depending on what your S3 provider offers: `authentik-media.s3.provider`. Whether HTTPS is used is controlled by the `AUTHENTIK_STORAGE__MEDIA__S3__SECURE_URLS` which defaults to true.
|
||||||
|
|
||||||
|
For more control over settings, refer to the [configuration reference](./configuration.mdx#media-storage-settings)
|
||||||
|
|
||||||
|
### Migrating between storage backends
|
||||||
|
|
||||||
|
The following section assumes that the local storage path is `/media` and the bucket name is `authentik-media`. It also assumes you have a working `aws` CLI that can interact with the bucket.
|
||||||
|
|
||||||
|
#### From file to s3
|
||||||
|
|
||||||
|
Follow the setup steps above, and then migrate the files from your local directory to s3:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
aws s3 sync /media s3://authentik-media/media
|
||||||
|
```
|
||||||
|
|
||||||
|
#### From s3 to file
|
||||||
|
|
||||||
|
```bash
|
||||||
|
aws s3 sync s3://authentik-media/media /media
|
||||||
|
```
|
|
@ -29,6 +29,10 @@ slug: "/releases/2024.1"
|
||||||
|
|
||||||
Previously the identification stage would only continue if a user matching the user identifier exists. While this was the intended functionality, this release adds an option to continue to the next stage even if no matching user was found. "Pretend" users cannot authenticate nor receive emails, and don't exist in the database. **This feature is enabled by default.**
|
Previously the identification stage would only continue if a user matching the user identifier exists. While this was the intended functionality, this release adds an option to continue to the next stage even if no matching user was found. "Pretend" users cannot authenticate nor receive emails, and don't exist in the database. **This feature is enabled by default.**
|
||||||
|
|
||||||
|
- S3 file storage
|
||||||
|
|
||||||
|
Media files can now be stored on S3. Follow the [setup guide](../../installation/storage-s3.md) to get started.
|
||||||
|
|
||||||
## Upgrading
|
## Upgrading
|
||||||
|
|
||||||
This release does not introduce any new requirements.
|
This release does not introduce any new requirements.
|
||||||
|
|
|
@ -27,6 +27,7 @@ const docsSidebar = {
|
||||||
"installation/automated-install",
|
"installation/automated-install",
|
||||||
"installation/air-gapped",
|
"installation/air-gapped",
|
||||||
"installation/monitoring",
|
"installation/monitoring",
|
||||||
|
"installation/storage-s3",
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
Reference in New Issue