2018-11-11 12:41:48 +00:00
|
|
|
"""Wrapper for ldap3 to easily manage user"""
|
2019-10-11 10:53:48 +00:00
|
|
|
from typing import Any, Dict, Optional
|
2018-11-11 12:41:48 +00:00
|
|
|
|
|
|
|
import ldap3
|
|
|
|
import ldap3.core.exceptions
|
2020-02-19 15:20:33 +00:00
|
|
|
from django.db.utils import IntegrityError
|
2020-02-20 16:23:05 +00:00
|
|
|
from structlog import get_logger
|
2018-11-11 12:41:48 +00:00
|
|
|
|
2020-02-18 09:13:05 +00:00
|
|
|
from passbook.core.exceptions import PropertyMappingExpressionException
|
2019-10-11 10:53:48 +00:00
|
|
|
from passbook.core.models import Group, User
|
2020-05-16 14:02:42 +00:00
|
|
|
from passbook.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
2018-11-11 12:41:48 +00:00
|
|
|
|
2019-10-04 08:08:53 +00:00
|
|
|
LOGGER = get_logger()
|
2018-11-11 12:41:48 +00:00
|
|
|
|
|
|
|
|
2019-10-10 15:36:09 +00:00
|
|
|
class Connector:
|
2018-11-26 17:12:04 +00:00
|
|
|
"""Wrapper for ldap3 to easily manage user authentication and creation"""
|
2018-11-11 12:41:48 +00:00
|
|
|
|
2019-10-10 15:36:09 +00:00
|
|
|
_server: ldap3.Server
|
|
|
|
_connection = ldap3.Connection
|
2020-05-16 14:02:42 +00:00
|
|
|
_source: LDAPSource
|
2018-11-11 12:41:48 +00:00
|
|
|
|
2020-05-16 14:02:42 +00:00
|
|
|
def __init__(self, source: LDAPSource):
|
|
|
|
self._source = source
|
2019-12-31 11:45:29 +00:00
|
|
|
self._server = ldap3.Server(source.server_uri) # Implement URI parsing
|
2019-10-11 10:53:48 +00:00
|
|
|
|
|
|
|
def bind(self):
|
2020-05-16 14:02:42 +00:00
|
|
|
"""Bind using Source's Credentials"""
|
2019-12-31 11:51:16 +00:00
|
|
|
self._connection = ldap3.Connection(
|
|
|
|
self._server,
|
|
|
|
raise_exceptions=True,
|
2020-05-16 14:02:42 +00:00
|
|
|
user=self._source.bind_cn,
|
|
|
|
password=self._source.bind_password,
|
2019-12-31 11:51:16 +00:00
|
|
|
)
|
2018-11-26 17:12:04 +00:00
|
|
|
|
|
|
|
self._connection.bind()
|
2020-05-16 14:02:42 +00:00
|
|
|
if self._source.start_tls:
|
2019-10-10 15:36:09 +00:00
|
|
|
self._connection.start_tls()
|
2018-11-11 12:41:48 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2019-10-11 10:53:48 +00:00
|
|
|
def encode_pass(password: str) -> bytes:
|
2018-11-11 12:41:48 +00:00
|
|
|
"""Encodes a plain-text password so it can be used by AD"""
|
2019-12-31 11:51:16 +00:00
|
|
|
return '"{}"'.format(password).encode("utf-16-le")
|
2018-11-11 12:41:48 +00:00
|
|
|
|
2019-10-11 10:53:48 +00:00
|
|
|
@property
|
|
|
|
def base_dn_users(self) -> str:
|
|
|
|
"""Shortcut to get full base_dn for user lookups"""
|
2020-05-16 14:02:42 +00:00
|
|
|
return ",".join([self._source.additional_user_dn, self._source.base_dn])
|
2019-10-11 10:53:48 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def base_dn_groups(self) -> str:
|
|
|
|
"""Shortcut to get full base_dn for group lookups"""
|
2020-05-16 14:02:42 +00:00
|
|
|
return ",".join([self._source.additional_group_dn, self._source.base_dn])
|
2019-10-11 10:53:48 +00:00
|
|
|
|
|
|
|
def sync_groups(self):
|
|
|
|
"""Iterate over all LDAP Groups and create passbook_core.Group instances"""
|
2020-05-16 14:02:42 +00:00
|
|
|
if not self._source.sync_groups:
|
|
|
|
LOGGER.debug("Group syncing is disabled for this Source")
|
2019-10-11 11:41:12 +00:00
|
|
|
return
|
2019-10-11 10:53:48 +00:00
|
|
|
groups = self._connection.extend.standard.paged_search(
|
|
|
|
search_base=self.base_dn_groups,
|
2020-05-16 14:02:42 +00:00
|
|
|
search_filter=self._source.group_object_filter,
|
2019-10-11 10:53:48 +00:00
|
|
|
search_scope=ldap3.SUBTREE,
|
2019-12-31 11:51:16 +00:00
|
|
|
attributes=ldap3.ALL_ATTRIBUTES,
|
|
|
|
)
|
2019-10-11 10:53:48 +00:00
|
|
|
for group in groups:
|
2019-12-31 11:51:16 +00:00
|
|
|
attributes = group.get("attributes", {})
|
2019-10-11 10:53:48 +00:00
|
|
|
_, created = Group.objects.update_or_create(
|
2019-12-31 11:51:16 +00:00
|
|
|
attributes__ldap_uniq=attributes.get(
|
2020-05-16 14:02:42 +00:00
|
|
|
self._source.object_uniqueness_field, ""
|
2019-12-31 11:51:16 +00:00
|
|
|
),
|
2020-05-16 14:02:42 +00:00
|
|
|
parent=self._source.sync_parent_group,
|
2019-10-11 11:41:12 +00:00
|
|
|
# defaults=self._build_object_properties(attributes),
|
|
|
|
defaults={
|
2019-12-31 11:51:16 +00:00
|
|
|
"name": attributes.get("name", ""),
|
|
|
|
"attributes": {
|
|
|
|
"ldap_uniq": attributes.get(
|
2020-05-16 14:02:42 +00:00
|
|
|
self._source.object_uniqueness_field, ""
|
2019-12-31 11:51:16 +00:00
|
|
|
),
|
|
|
|
"distinguishedName": attributes.get("distinguishedName"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
LOGGER.debug(
|
|
|
|
"Synced group", group=attributes.get("name", ""), created=created
|
2019-10-11 10:53:48 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def sync_users(self):
|
|
|
|
"""Iterate over all LDAP Users and create passbook_core.User instances"""
|
|
|
|
users = self._connection.extend.standard.paged_search(
|
|
|
|
search_base=self.base_dn_users,
|
2020-05-16 14:02:42 +00:00
|
|
|
search_filter=self._source.user_object_filter,
|
2019-10-11 10:53:48 +00:00
|
|
|
search_scope=ldap3.SUBTREE,
|
2019-12-31 11:51:16 +00:00
|
|
|
attributes=ldap3.ALL_ATTRIBUTES,
|
|
|
|
)
|
2019-10-11 10:53:48 +00:00
|
|
|
for user in users:
|
2019-12-31 11:51:16 +00:00
|
|
|
attributes = user.get("attributes", {})
|
2020-02-19 15:20:33 +00:00
|
|
|
try:
|
2020-05-16 14:02:42 +00:00
|
|
|
uniq = attributes[self._source.object_uniqueness_field]
|
2020-02-19 15:20:33 +00:00
|
|
|
except KeyError:
|
|
|
|
LOGGER.warning("Cannot find uniqueness Field in attributes")
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
user, created = User.objects.update_or_create(
|
|
|
|
attributes__ldap_uniq=uniq,
|
|
|
|
defaults=self._build_object_properties(attributes),
|
|
|
|
)
|
|
|
|
except IntegrityError as exc:
|
|
|
|
LOGGER.warning("Failed to create user", exc=exc)
|
|
|
|
LOGGER.warning(
|
|
|
|
(
|
|
|
|
"To merge new User with existing user, set the User's "
|
|
|
|
f"Attribute 'ldap_uniq' to '{uniq}'"
|
|
|
|
)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
if created:
|
|
|
|
user.set_unusable_password()
|
|
|
|
user.save()
|
|
|
|
LOGGER.debug(
|
|
|
|
"Synced User", user=attributes.get("name", ""), created=created
|
|
|
|
)
|
2019-10-11 10:53:48 +00:00
|
|
|
|
|
|
|
def sync_membership(self):
|
|
|
|
"""Iterate over all Users and assign Groups using memberOf Field"""
|
2019-10-11 11:41:12 +00:00
|
|
|
users = self._connection.extend.standard.paged_search(
|
|
|
|
search_base=self.base_dn_users,
|
2020-05-16 14:02:42 +00:00
|
|
|
search_filter=self._source.user_object_filter,
|
2019-10-11 11:41:12 +00:00
|
|
|
search_scope=ldap3.SUBTREE,
|
|
|
|
attributes=[
|
2020-05-16 14:02:42 +00:00
|
|
|
self._source.user_group_membership_field,
|
|
|
|
self._source.object_uniqueness_field,
|
2019-12-31 11:51:16 +00:00
|
|
|
],
|
|
|
|
)
|
2019-10-11 11:41:12 +00:00
|
|
|
group_cache: Dict[str, Group] = {}
|
|
|
|
for user in users:
|
2019-12-31 11:51:16 +00:00
|
|
|
member_of = user.get("attributes", {}).get(
|
2020-05-16 14:02:42 +00:00
|
|
|
self._source.user_group_membership_field, []
|
2019-12-31 11:51:16 +00:00
|
|
|
)
|
|
|
|
uniq = user.get("attributes", {}).get(
|
2020-05-16 14:02:42 +00:00
|
|
|
self._source.object_uniqueness_field, []
|
2019-12-31 11:51:16 +00:00
|
|
|
)
|
2019-10-11 11:41:12 +00:00
|
|
|
for group_dn in member_of:
|
|
|
|
# Check if group_dn is within our base_dn_groups, and skip if not
|
|
|
|
if not group_dn.endswith(self.base_dn_groups):
|
|
|
|
continue
|
|
|
|
# Check if we fetched the group already, and if not cache it for later
|
|
|
|
if group_dn not in group_cache:
|
2019-12-31 11:51:16 +00:00
|
|
|
groups = Group.objects.filter(
|
|
|
|
attributes__distinguishedName=group_dn
|
|
|
|
)
|
2019-10-11 11:41:12 +00:00
|
|
|
if not groups.exists():
|
2019-12-31 11:51:16 +00:00
|
|
|
LOGGER.warning(
|
|
|
|
"Group does not exist in our DB yet, run sync_groups first.",
|
|
|
|
group=group_dn,
|
|
|
|
)
|
2019-10-11 11:41:12 +00:00
|
|
|
return
|
|
|
|
group_cache[group_dn] = groups.first()
|
|
|
|
group = group_cache[group_dn]
|
|
|
|
users = User.objects.filter(attributes__ldap_uniq=uniq)
|
|
|
|
group.user_set.add(*list(users))
|
|
|
|
# Now that all users are added, lets write everything
|
|
|
|
for _, group in group_cache.items():
|
|
|
|
group.save()
|
|
|
|
LOGGER.debug("Successfully updated group membership")
|
2019-10-11 10:53:48 +00:00
|
|
|
|
2019-12-31 11:51:16 +00:00
|
|
|
def _build_object_properties(
|
|
|
|
self, attributes: Dict[str, Any]
|
|
|
|
) -> Dict[str, Dict[Any, Any]]:
|
|
|
|
properties = {"attributes": {}}
|
2020-05-16 14:02:42 +00:00
|
|
|
for mapping in self._source.property_mappings.all().select_subclasses():
|
2020-02-19 15:20:33 +00:00
|
|
|
if not isinstance(mapping, LDAPPropertyMapping):
|
|
|
|
continue
|
2020-02-17 16:55:48 +00:00
|
|
|
mapping: LDAPPropertyMapping
|
2020-02-18 09:13:05 +00:00
|
|
|
try:
|
|
|
|
properties[mapping.object_field] = mapping.evaluate(
|
|
|
|
user=None, request=None, ldap=attributes
|
|
|
|
)
|
|
|
|
except PropertyMappingExpressionException as exc:
|
2020-02-19 15:20:33 +00:00
|
|
|
LOGGER.warning("Mapping failed to evaluate", exc=exc, mapping=mapping)
|
2020-02-18 09:13:05 +00:00
|
|
|
continue
|
2020-05-16 14:02:42 +00:00
|
|
|
if self._source.object_uniqueness_field in attributes:
|
2019-12-31 11:51:16 +00:00
|
|
|
properties["attributes"]["ldap_uniq"] = attributes.get(
|
2020-05-16 14:02:42 +00:00
|
|
|
self._source.object_uniqueness_field
|
2019-12-31 11:51:16 +00:00
|
|
|
)
|
|
|
|
properties["attributes"]["distinguishedName"] = attributes.get(
|
|
|
|
"distinguishedName"
|
|
|
|
)
|
2019-10-11 10:53:48 +00:00
|
|
|
return properties
|
|
|
|
|
|
|
|
def auth_user(self, password: str, **filters: Dict[str, str]) -> Optional[User]:
|
2018-11-11 12:41:48 +00:00
|
|
|
"""Try to bind as either user_dn or mail with password.
|
|
|
|
Returns True on success, otherwise False"""
|
2019-10-11 10:53:48 +00:00
|
|
|
users = User.objects.filter(**filters)
|
|
|
|
if not users.exists():
|
2018-11-26 17:12:04 +00:00
|
|
|
return None
|
2019-10-12 12:00:34 +00:00
|
|
|
user: User = users.first()
|
2019-12-31 11:51:16 +00:00
|
|
|
if "distinguishedName" not in user.attributes:
|
|
|
|
LOGGER.debug(
|
|
|
|
"User doesn't have DN set, assuming not LDAP imported.", user=user
|
|
|
|
)
|
2018-11-11 12:41:48 +00:00
|
|
|
return None
|
2019-10-12 12:00:34 +00:00
|
|
|
# Either has unusable password,
|
|
|
|
# or has a password, but couldn't be authenticated by ModelBackend.
|
|
|
|
# This means we check with a bind to see if the LDAP password has changed
|
|
|
|
if self.auth_user_by_bind(user, password):
|
|
|
|
# Password given successfully binds to LDAP, so we save it in our Database
|
|
|
|
LOGGER.debug("Updating user's password in DB", user=user)
|
|
|
|
user.set_password(password)
|
|
|
|
user.save()
|
|
|
|
return user
|
|
|
|
# Password doesn't match
|
|
|
|
LOGGER.debug("Failed to bind, password invalid")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def auth_user_by_bind(self, user: User, password: str) -> Optional[User]:
|
|
|
|
"""Attempt authentication by binding to the LDAP server as `user`. This
|
|
|
|
method should be avoided as its slow to do the bind."""
|
2018-11-11 12:41:48 +00:00
|
|
|
# Try to bind as new user
|
2019-10-11 10:53:48 +00:00
|
|
|
LOGGER.debug("Attempting Binding as user", user=user)
|
2018-11-11 12:41:48 +00:00
|
|
|
try:
|
2019-12-31 11:51:16 +00:00
|
|
|
temp_connection = ldap3.Connection(
|
|
|
|
self._server,
|
|
|
|
user=user.attributes.get("distinguishedName"),
|
|
|
|
password=password,
|
|
|
|
raise_exceptions=True,
|
|
|
|
)
|
2018-11-26 17:12:04 +00:00
|
|
|
temp_connection.bind()
|
2019-10-11 10:53:48 +00:00
|
|
|
return user
|
2018-11-11 12:41:48 +00:00
|
|
|
except ldap3.core.exceptions.LDAPInvalidCredentialsResult as exception:
|
2019-12-31 11:45:29 +00:00
|
|
|
LOGGER.debug("LDAPInvalidCredentialsResult", user=user, error=exception)
|
2018-11-11 12:41:48 +00:00
|
|
|
except ldap3.core.exceptions.LDAPException as exception:
|
|
|
|
LOGGER.warning(exception)
|
|
|
|
return None
|