2019-06-07 21:43:23 +02:00
|
|
|
# django-mellon - SAML2 authentication for Django
|
|
|
|
# Copyright (C) 2014-2019 Entr'ouvert
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2019-06-14 15:13:54 +02:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2019-06-07 21:46:07 +02:00
|
|
|
from xml.etree import ElementTree as ET
|
|
|
|
import hashlib
|
2019-06-14 15:13:54 +02:00
|
|
|
|
2014-04-28 14:33:04 +02:00
|
|
|
import logging
|
2019-06-07 21:46:07 +02:00
|
|
|
import os
|
|
|
|
import threading
|
|
|
|
import time
|
2016-02-12 17:19:34 +01:00
|
|
|
import uuid
|
2016-02-26 13:27:32 +01:00
|
|
|
|
|
|
|
import lasso
|
|
|
|
import requests
|
|
|
|
import requests.exceptions
|
2019-06-07 21:46:07 +02:00
|
|
|
from atomicwrites import atomic_write
|
2014-04-28 14:33:04 +02:00
|
|
|
|
2019-06-06 13:52:21 +02:00
|
|
|
from django.core.exceptions import PermissionDenied, FieldDoesNotExist
|
2019-06-07 21:46:07 +02:00
|
|
|
from django.core.files.storage import default_storage
|
2014-04-28 14:33:04 +02:00
|
|
|
from django.contrib import auth
|
|
|
|
from django.contrib.auth.models import Group
|
2018-03-25 10:19:36 +02:00
|
|
|
from django.utils import six
|
2018-05-29 12:21:13 +02:00
|
|
|
from django.utils.encoding import force_text
|
2019-06-07 21:46:07 +02:00
|
|
|
from django.utils.six.moves.urllib.parse import urlparse
|
2014-04-28 14:33:04 +02:00
|
|
|
|
2015-04-29 16:39:14 +02:00
|
|
|
from . import utils, app_settings, models
|
2014-04-28 14:33:04 +02:00
|
|
|
|
2019-06-06 10:59:28 +02:00
|
|
|
User = auth.get_user_model()
|
|
|
|
|
2019-06-07 21:46:07 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-04-28 14:33:04 +02:00
|
|
|
|
2016-03-02 15:34:07 +01:00
|
|
|
class UserCreationError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-06-06 13:52:21 +02:00
|
|
|
def display_truncated_list(l, max_length=10):
|
|
|
|
s = '[' + ', '.join(map(six.text_type, l))
|
|
|
|
if len(l) > max_length:
|
|
|
|
s += '..truncated more than %d items (%d)]' % (max_length, len(l))
|
|
|
|
else:
|
|
|
|
s += ']'
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
2014-04-28 14:33:04 +02:00
|
|
|
class DefaultAdapter(object):
|
2014-08-05 18:19:33 +02:00
|
|
|
def get_idp(self, entity_id):
|
|
|
|
'''Find the first IdP definition matching entity_id'''
|
2018-01-09 21:43:25 +01:00
|
|
|
for idp in self.get_idps():
|
|
|
|
if entity_id == idp['ENTITY_ID']:
|
|
|
|
return idp
|
2014-08-05 18:19:33 +02:00
|
|
|
|
2016-02-26 13:27:32 +01:00
|
|
|
def get_identity_providers_setting(self):
|
2018-01-09 21:43:25 +01:00
|
|
|
return app_settings.IDENTITY_PROVIDERS
|
2016-02-26 13:27:32 +01:00
|
|
|
|
2019-06-06 22:52:22 +02:00
|
|
|
def get_users_queryset(self, idp, saml_attributes):
|
|
|
|
return User.objects.all()
|
|
|
|
|
2015-02-13 18:03:47 +01:00
|
|
|
def get_idps(self):
|
2016-02-26 13:27:32 +01:00
|
|
|
for i, idp in enumerate(self.get_identity_providers_setting()):
|
2019-06-07 21:46:07 +02:00
|
|
|
if self.load_idp(idp, i):
|
|
|
|
yield idp
|
|
|
|
|
|
|
|
def load_metadata_path(self, idp, i):
|
|
|
|
path = idp['METADATA_PATH']
|
|
|
|
if not os.path.exists(path):
|
|
|
|
logger.warning('metadata path %s does not exist', path)
|
|
|
|
return
|
|
|
|
last_update = idp.get('METADATA_PATH_LAST_UPDATE', 0)
|
|
|
|
try:
|
|
|
|
mtime = os.stat(path).st_mtime
|
|
|
|
except OSError as e:
|
|
|
|
logger.warning('metadata path %s : stat() call failed, %s', path, e)
|
|
|
|
return
|
|
|
|
if last_update == 0 or mtime >= last_update:
|
|
|
|
idp['METADATA_PATH_LAST_UPDATE'] = time.time()
|
|
|
|
try:
|
|
|
|
with open(path) as fd:
|
|
|
|
metadata = fd.read()
|
|
|
|
except OSError as e:
|
|
|
|
logger.warning('metadata path %s : open()/read() call failed, %s', path, e)
|
|
|
|
return
|
|
|
|
entity_id = self.load_entity_id(metadata, i)
|
|
|
|
if not entity_id:
|
|
|
|
logger.error('invalid metadata file retrieved from %s', path)
|
|
|
|
return
|
|
|
|
if 'ENTITY_ID' in idp and idp['ENTITY_ID'] != entity_id:
|
|
|
|
logger.error('metadata path %s : entityID changed %r != %r', path, entity_id, idp['ENTITY_ID'])
|
|
|
|
del idp['ENTITY_ID']
|
|
|
|
idp['METADATA'] = metadata
|
|
|
|
|
|
|
|
def load_metadata_url(self, idp, i):
|
|
|
|
url = idp['METADATA_URL']
|
|
|
|
metadata_cache_time = utils.get_setting(idp, 'METADATA_CACHE_TIME')
|
|
|
|
timeout = utils.get_setting(idp, 'METADATA_HTTP_TIMEOUT')
|
|
|
|
|
|
|
|
warning = logger.warning
|
|
|
|
if 'METADATA' not in idp:
|
|
|
|
# if we have no metadata in cache, we must emit errors
|
|
|
|
warning = logger.error
|
|
|
|
|
|
|
|
try:
|
|
|
|
hostname = urlparse(url).hostname
|
|
|
|
except (ValueError, TypeError) as e:
|
|
|
|
warning('invalid METADATA_URL %r: %s', url, e)
|
|
|
|
return
|
|
|
|
if not hostname:
|
|
|
|
warning('no hostname in METADATA_URL %r: %s', url)
|
|
|
|
return
|
|
|
|
|
|
|
|
last_update = idp.get('METADATA_URL_LAST_UPDATE', 0)
|
|
|
|
now = time.time()
|
|
|
|
|
|
|
|
try:
|
|
|
|
url_fingerprint = hashlib.md5(url.encode('ascii')).hexdigest()
|
|
|
|
file_cache_key = '%s_%s.xml' % (hostname, url_fingerprint)
|
|
|
|
except (UnicodeError, TypeError, ValueError):
|
|
|
|
warning('unable to compute file_cache_key')
|
|
|
|
return
|
|
|
|
|
|
|
|
cache_directory = default_storage.path('mellon_metadata_cache')
|
|
|
|
file_cache_path = os.path.join(cache_directory, file_cache_key)
|
|
|
|
|
|
|
|
if metadata_cache_time:
|
|
|
|
# METADATA_CACHE_TIME == 0 disable the file cache
|
|
|
|
if not os.path.exists(cache_directory):
|
|
|
|
os.makedirs(cache_directory)
|
|
|
|
|
|
|
|
if os.path.exists(file_cache_path) and 'METADATA' not in idp:
|
|
|
|
try:
|
|
|
|
with open(file_cache_path) as fd:
|
|
|
|
idp['METADATA'] = fd.read()
|
|
|
|
# use file cache mtime as last_update time, prevent too many loading from different workers
|
|
|
|
last_update = max(last_update, os.stat(file_cache_path).st_mtime)
|
2019-12-17 09:04:23 +01:00
|
|
|
except (IOError, OSError):
|
2019-06-07 21:46:07 +02:00
|
|
|
warning('metadata url %s : error when loading the file cache %s', url, file_cache_path)
|
|
|
|
|
|
|
|
# fresh cache, skip loading
|
|
|
|
if last_update and 'METADATA' in idp and (now - last_update) < metadata_cache_time:
|
|
|
|
return
|
|
|
|
|
|
|
|
def __http_get():
|
|
|
|
try:
|
2016-02-26 13:27:32 +01:00
|
|
|
verify_ssl_certificate = utils.get_setting(
|
|
|
|
idp, 'VERIFY_SSL_CERTIFICATE')
|
|
|
|
try:
|
2019-06-07 21:46:07 +02:00
|
|
|
response = requests.get(url, verify=verify_ssl_certificate, timeout=timeout)
|
2016-02-26 13:27:32 +01:00
|
|
|
response.raise_for_status()
|
2017-12-28 17:31:20 +01:00
|
|
|
except requests.exceptions.RequestException as e:
|
2019-06-07 21:46:07 +02:00
|
|
|
warning('metadata url %s : HTTP request failed %s', url, e)
|
|
|
|
return
|
|
|
|
|
|
|
|
entity_id = self.load_entity_id(response.text, i)
|
|
|
|
if not entity_id:
|
|
|
|
warning('invalid metadata file retrieved from %s', url)
|
|
|
|
return
|
|
|
|
|
|
|
|
if 'ENTITY_ID' in idp and idp['ENTITY_ID'] != entity_id:
|
|
|
|
# entityID change is always en error
|
|
|
|
logger.error('metadata url %s : entityID changed %r != %r', url, entity_id, idp['ENTITY_ID'])
|
|
|
|
del idp['ENTITY_ID']
|
|
|
|
|
2018-04-05 14:38:25 +02:00
|
|
|
idp['METADATA'] = response.text
|
2019-06-07 21:46:07 +02:00
|
|
|
idp['METADATA_URL_LAST_UPDATE'] = now
|
|
|
|
if metadata_cache_time:
|
|
|
|
try:
|
|
|
|
with atomic_write(file_cache_path, mode='wb', overwrite=True) as fd:
|
|
|
|
fd.write(response.text.encode('utf-8'))
|
|
|
|
except OSError as e:
|
|
|
|
logger.error('metadata url %s : could not write file cache %s, %s', url, file_cache_path, e)
|
|
|
|
idp['METADATA_PATH'] = file_cache_path
|
|
|
|
# prevent reloading of the file cache immediately
|
|
|
|
idp['METADATA_PATH_LAST_UPDATE'] = time.time() + 1
|
|
|
|
logger.debug('metadata url %s : update throught HTTP', url)
|
|
|
|
finally:
|
|
|
|
# release thread object
|
|
|
|
idp.pop('METADATA_URL_UPDATE_THREAD', None)
|
|
|
|
# emit an error if cache is too old
|
2019-06-25 16:05:15 +02:00
|
|
|
if metadata_cache_time:
|
|
|
|
stale_timeout = 24 * metadata_cache_time
|
|
|
|
if last_update and (now - idp['METADATA_URL_LAST_UPDATE']) > stale_timeout:
|
2019-06-25 17:31:08 +02:00
|
|
|
logger.error('metadata url %s: not updated since %.1f hours',
|
|
|
|
url, stale_timeout / 3600.0)
|
2019-06-07 21:46:07 +02:00
|
|
|
|
|
|
|
# we have cache, update in background
|
|
|
|
if last_update and 'METADATA' in idp:
|
|
|
|
t = threading.Thread(target=__http_get)
|
|
|
|
t.start()
|
|
|
|
# store thread in idp for tests
|
|
|
|
idp['METADATA_URL_UPDATE_THREAD'] = t
|
|
|
|
# suspend updates for HTTP timeout + 5 seconds
|
|
|
|
idp['METADATA_URL_LAST_UPDATE'] = last_update + timeout + 5
|
|
|
|
else:
|
|
|
|
# synchronous update
|
|
|
|
__http_get()
|
|
|
|
|
|
|
|
def load_metadata(self, idp, i):
|
|
|
|
# legacy support
|
|
|
|
if 'METADATA' in idp and idp['METADATA'].startswith('/'):
|
|
|
|
idp['METADATA_PATH'] = idp['METADATA']
|
|
|
|
del idp['METADATA']
|
|
|
|
|
|
|
|
if 'METADATA_PATH' in idp:
|
|
|
|
self.load_metadata_path(idp, i)
|
|
|
|
|
|
|
|
if 'METADATA_URL' in idp:
|
|
|
|
self.load_metadata_url(idp, i)
|
|
|
|
|
|
|
|
if 'METADATA' in idp:
|
2018-01-09 21:43:25 +01:00
|
|
|
if 'ENTITY_ID' not in idp:
|
2019-06-07 21:46:07 +02:00
|
|
|
entity_id = self.load_entity_id(idp['METADATA'], i)
|
|
|
|
if entity_id:
|
|
|
|
idp['ENTITY_ID'] = entity_id
|
2018-01-09 21:43:25 +01:00
|
|
|
|
2019-06-07 21:46:07 +02:00
|
|
|
if 'ENTITY_ID' in idp:
|
|
|
|
return idp['METADATA']
|
|
|
|
|
|
|
|
def load_entity_id(self, metadata, i):
|
|
|
|
try:
|
|
|
|
doc = ET.fromstring(metadata)
|
|
|
|
except (TypeError, ET.ParseError):
|
2019-06-14 15:13:54 +02:00
|
|
|
logger.error('METADATA of %d-th idp is invalid', i)
|
2019-06-07 21:46:07 +02:00
|
|
|
return None
|
|
|
|
if doc.tag != '{%s}EntityDescriptor' % lasso.SAML2_METADATA_HREF:
|
2019-06-14 15:13:54 +02:00
|
|
|
logger.error('METADATA of %d-th idp has no EntityDescriptor root tag', i)
|
2019-06-07 21:46:07 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
if 'entityID' not in doc.attrib:
|
|
|
|
logger.error(
|
2019-06-14 15:13:54 +02:00
|
|
|
'METADATA of %d-th idp has no entityID attribute on its root tag', i)
|
2019-06-07 21:46:07 +02:00
|
|
|
return None
|
|
|
|
return doc.attrib['entityID']
|
|
|
|
|
|
|
|
def load_idp(self, idp, i):
|
|
|
|
self.load_metadata(idp, i)
|
|
|
|
return 'ENTITY_ID' in idp
|
2015-02-13 18:03:47 +01:00
|
|
|
|
2014-04-28 14:33:04 +02:00
|
|
|
def authorize(self, idp, saml_attributes):
|
|
|
|
if not idp:
|
|
|
|
return False
|
2014-08-05 17:55:29 +02:00
|
|
|
required_classref = utils.get_setting(idp, 'AUTHN_CLASSREF')
|
2014-04-28 14:33:04 +02:00
|
|
|
if required_classref:
|
|
|
|
given_classref = saml_attributes['authn_context_class_ref']
|
|
|
|
if given_classref is None or \
|
|
|
|
given_classref not in required_classref:
|
|
|
|
raise PermissionDenied
|
|
|
|
return True
|
|
|
|
|
|
|
|
def format_username(self, idp, saml_attributes):
|
2014-08-05 17:55:29 +02:00
|
|
|
realm = utils.get_setting(idp, 'REALM')
|
|
|
|
username_template = utils.get_setting(idp, 'USERNAME_TEMPLATE')
|
2014-04-28 14:33:04 +02:00
|
|
|
try:
|
2018-05-29 12:21:13 +02:00
|
|
|
username = force_text(username_template).format(
|
2015-04-29 17:07:14 +02:00
|
|
|
realm=realm, attributes=saml_attributes, idp=idp)[:30]
|
2014-04-28 14:33:04 +02:00
|
|
|
except ValueError:
|
2019-06-14 15:13:54 +02:00
|
|
|
logger.error('invalid username template %r', username_template)
|
2017-12-28 17:31:20 +01:00
|
|
|
except (AttributeError, KeyError, IndexError) as e:
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.error(
|
2019-06-14 15:13:54 +02:00
|
|
|
'invalid reference in username template %r: %s', username_template, e)
|
2019-06-07 21:43:23 +02:00
|
|
|
except Exception:
|
2019-06-14 15:13:54 +02:00
|
|
|
logger.exception('unknown error when formatting username')
|
2014-04-28 14:33:04 +02:00
|
|
|
else:
|
|
|
|
return username
|
|
|
|
|
2016-03-02 15:34:07 +01:00
|
|
|
def create_user(self, user_class):
|
|
|
|
return user_class.objects.create(username=uuid.uuid4().hex[:30])
|
|
|
|
|
|
|
|
def finish_create_user(self, idp, saml_attributes, user):
|
|
|
|
username = self.format_username(idp, saml_attributes)
|
|
|
|
if not username:
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.warning('could not build a username, login refused')
|
2016-03-02 15:34:07 +01:00
|
|
|
raise UserCreationError
|
|
|
|
user.username = username
|
|
|
|
user.save()
|
|
|
|
|
2014-04-28 14:33:04 +02:00
|
|
|
def lookup_user(self, idp, saml_attributes):
|
2016-04-11 19:20:05 +02:00
|
|
|
transient_federation_attribute = utils.get_setting(idp, 'TRANSIENT_FEDERATION_ATTRIBUTE')
|
|
|
|
if saml_attributes['name_id_format'] == lasso.SAML2_NAME_IDENTIFIER_FORMAT_TRANSIENT:
|
|
|
|
if (transient_federation_attribute
|
|
|
|
and saml_attributes.get(transient_federation_attribute)):
|
|
|
|
name_id = saml_attributes[transient_federation_attribute]
|
2018-03-25 10:19:36 +02:00
|
|
|
if not isinstance(name_id, six.string_types):
|
2016-04-11 19:20:05 +02:00
|
|
|
if len(name_id) == 1:
|
|
|
|
name_id = name_id[0]
|
|
|
|
else:
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.warning('more than one value for attribute %r, cannot federate',
|
|
|
|
transient_federation_attribute)
|
2016-04-11 19:20:05 +02:00
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
name_id = saml_attributes['name_id_content']
|
2015-04-29 16:39:14 +02:00
|
|
|
issuer = saml_attributes['issuer']
|
|
|
|
try:
|
2019-06-06 22:52:22 +02:00
|
|
|
user = self.get_users_queryset(idp, saml_attributes).get(
|
|
|
|
saml_identifiers__name_id=name_id,
|
|
|
|
saml_identifiers__issuer=issuer)
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.info('looked up user %s with name_id %s from issuer %s', user, name_id, issuer)
|
2019-06-06 13:52:21 +02:00
|
|
|
return user
|
2015-04-29 16:39:14 +02:00
|
|
|
except User.DoesNotExist:
|
2019-06-06 10:12:14 +02:00
|
|
|
pass
|
|
|
|
|
2019-06-06 13:52:21 +02:00
|
|
|
user = None
|
|
|
|
lookup_by_attributes = utils.get_setting(idp, 'LOOKUP_BY_ATTRIBUTES')
|
|
|
|
if lookup_by_attributes:
|
|
|
|
user = self._lookup_by_attributes(idp, saml_attributes, lookup_by_attributes)
|
|
|
|
|
|
|
|
created = False
|
|
|
|
if not user:
|
|
|
|
if not utils.get_setting(idp, 'PROVISION'):
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.debug('provisionning disabled, login refused')
|
2019-06-06 13:52:21 +02:00
|
|
|
return None
|
|
|
|
created = True
|
|
|
|
user = self.create_user(User)
|
2019-06-06 10:12:14 +02:00
|
|
|
|
2019-06-06 10:59:28 +02:00
|
|
|
nameid_user = self._link_user(idp, saml_attributes, issuer, name_id, user)
|
|
|
|
if user != nameid_user:
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.info('looked up user %s with name_id %s from issuer %s', nameid_user, name_id, issuer)
|
2019-06-06 13:52:21 +02:00
|
|
|
if created:
|
|
|
|
user.delete()
|
2019-06-06 10:59:28 +02:00
|
|
|
return nameid_user
|
|
|
|
|
2019-06-06 13:52:21 +02:00
|
|
|
if created:
|
|
|
|
try:
|
|
|
|
self.finish_create_user(idp, saml_attributes, nameid_user)
|
|
|
|
except UserCreationError:
|
|
|
|
user.delete()
|
|
|
|
return None
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.info('created new user %s with name_id %s from issuer %s', nameid_user, name_id, issuer)
|
2019-06-06 10:59:28 +02:00
|
|
|
return nameid_user
|
|
|
|
|
2019-06-06 13:52:21 +02:00
|
|
|
def _lookup_by_attributes(self, idp, saml_attributes, lookup_by_attributes):
|
|
|
|
if not isinstance(lookup_by_attributes, list):
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: it must be a list', lookup_by_attributes)
|
2019-06-06 13:52:21 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
users = set()
|
|
|
|
for line in lookup_by_attributes:
|
|
|
|
if not isinstance(line, dict):
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: it must be a list of dicts', line)
|
2019-06-06 13:52:21 +02:00
|
|
|
continue
|
|
|
|
user_field = line.get('user_field')
|
|
|
|
if not hasattr(user_field, 'isalpha'):
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: user_field is missing', line)
|
2019-06-06 13:52:21 +02:00
|
|
|
continue
|
|
|
|
try:
|
|
|
|
User._meta.get_field(user_field)
|
|
|
|
except FieldDoesNotExist:
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r, user field %s does not exist',
|
|
|
|
line, user_field)
|
2019-06-06 13:52:21 +02:00
|
|
|
continue
|
|
|
|
saml_attribute = line.get('saml_attribute')
|
|
|
|
if not hasattr(saml_attribute, 'isalpha'):
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: saml_attribute is missing', line)
|
2019-06-06 13:52:21 +02:00
|
|
|
continue
|
|
|
|
values = saml_attributes.get(saml_attribute)
|
|
|
|
if not values:
|
2020-01-29 15:12:46 +01:00
|
|
|
logger.warning('looking for user by saml attribute %r and user field %r, skipping because empty',
|
2019-06-07 21:46:07 +02:00
|
|
|
saml_attribute, user_field)
|
2019-06-06 13:52:21 +02:00
|
|
|
continue
|
|
|
|
ignore_case = line.get('ignore-case', False)
|
|
|
|
for value in values:
|
|
|
|
key = user_field
|
|
|
|
if ignore_case:
|
|
|
|
key += '__iexact'
|
2019-06-06 22:52:22 +02:00
|
|
|
users_found = self.get_users_queryset(idp, saml_attributes).filter(
|
|
|
|
saml_identifiers__isnull=True, **{key: value})
|
2019-06-06 13:52:21 +02:00
|
|
|
if not users_found:
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.debug('looking for users by attribute %r and user field %r with value %r: not found',
|
|
|
|
saml_attribute, user_field, value)
|
2019-06-06 13:52:21 +02:00
|
|
|
continue
|
2019-06-14 15:13:54 +02:00
|
|
|
logger.info('looking for user by attribute %r and user field %r with value %r: found %s',
|
2019-06-07 21:46:07 +02:00
|
|
|
saml_attribute, user_field, value, display_truncated_list(users_found))
|
2019-06-06 13:52:21 +02:00
|
|
|
users.update(users_found)
|
|
|
|
if len(users) == 1:
|
|
|
|
user = list(users)[0]
|
2019-06-14 15:13:54 +02:00
|
|
|
logger.info('looking for user by attributes %r: found user %s', lookup_by_attributes, user)
|
2019-06-06 13:52:21 +02:00
|
|
|
return user
|
|
|
|
elif len(users) > 1:
|
2019-06-14 15:13:54 +02:00
|
|
|
logger.warning('looking for user by attributes %r: too many users found(%d), failing',
|
2019-06-07 21:46:07 +02:00
|
|
|
lookup_by_attributes, len(users))
|
2019-06-06 13:52:21 +02:00
|
|
|
return None
|
|
|
|
|
2019-06-06 10:59:28 +02:00
|
|
|
def _link_user(self, idp, saml_attributes, issuer, name_id, user):
|
2019-06-06 10:12:14 +02:00
|
|
|
saml_id, created = models.UserSAMLIdentifier.objects.get_or_create(
|
|
|
|
name_id=name_id, issuer=issuer, defaults={'user': user})
|
|
|
|
if created:
|
2019-06-06 10:59:28 +02:00
|
|
|
return user
|
2019-06-06 10:12:14 +02:00
|
|
|
else:
|
2019-06-06 10:59:28 +02:00
|
|
|
return saml_id.user
|
2014-04-28 14:33:04 +02:00
|
|
|
|
|
|
|
def provision(self, user, idp, saml_attributes):
|
|
|
|
self.provision_attribute(user, idp, saml_attributes)
|
|
|
|
self.provision_superuser(user, idp, saml_attributes)
|
|
|
|
self.provision_groups(user, idp, saml_attributes)
|
|
|
|
|
|
|
|
def provision_attribute(self, user, idp, saml_attributes):
|
2014-08-05 17:55:29 +02:00
|
|
|
realm = utils.get_setting(idp, 'REALM')
|
|
|
|
attribute_mapping = utils.get_setting(idp, 'ATTRIBUTE_MAPPING')
|
2015-03-10 14:08:33 +01:00
|
|
|
attribute_set = False
|
2018-03-25 09:57:42 +02:00
|
|
|
for field, tpl in attribute_mapping.items():
|
2014-04-28 14:33:04 +02:00
|
|
|
try:
|
2018-05-29 12:21:13 +02:00
|
|
|
value = force_text(tpl).format(realm=realm, attributes=saml_attributes, idp=idp)
|
2014-04-28 14:33:04 +02:00
|
|
|
except ValueError:
|
2019-06-14 15:13:54 +02:00
|
|
|
logger.warning('invalid attribute mapping template %r', tpl)
|
2017-12-28 17:31:20 +01:00
|
|
|
except (AttributeError, KeyError, IndexError, ValueError) as e:
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.warning(
|
2019-06-14 15:13:54 +02:00
|
|
|
'invalid reference in attribute mapping template %r: %s', tpl, e)
|
2014-04-28 14:33:04 +02:00
|
|
|
else:
|
2015-07-22 16:22:59 +02:00
|
|
|
model_field = user._meta.get_field(field)
|
|
|
|
if hasattr(model_field, 'max_length'):
|
|
|
|
value = value[:model_field.max_length]
|
2016-02-26 13:23:12 +01:00
|
|
|
if getattr(user, field) != value:
|
|
|
|
old_value = getattr(user, field)
|
|
|
|
setattr(user, field, value)
|
|
|
|
attribute_set = True
|
2019-06-14 15:13:54 +02:00
|
|
|
logger.info('set field %s of user %s to value %r (old value %r)', field, user, value, old_value)
|
2015-03-10 14:08:33 +01:00
|
|
|
if attribute_set:
|
|
|
|
user.save()
|
2014-04-28 14:33:04 +02:00
|
|
|
|
|
|
|
def provision_superuser(self, user, idp, saml_attributes):
|
2014-08-05 17:55:29 +02:00
|
|
|
superuser_mapping = utils.get_setting(idp, 'SUPERUSER_MAPPING')
|
2014-04-28 14:33:04 +02:00
|
|
|
if not superuser_mapping:
|
|
|
|
return
|
2016-02-26 13:23:12 +01:00
|
|
|
attribute_set = False
|
2018-03-25 09:57:42 +02:00
|
|
|
for key, values in superuser_mapping.items():
|
2014-04-28 14:33:04 +02:00
|
|
|
if key in saml_attributes:
|
|
|
|
if not isinstance(values, (tuple, list)):
|
|
|
|
values = [values]
|
|
|
|
values = set(values)
|
|
|
|
attribute_values = saml_attributes[key]
|
|
|
|
if not isinstance(attribute_values, (tuple, list)):
|
|
|
|
attribute_values = [attribute_values]
|
|
|
|
attribute_values = set(attribute_values)
|
|
|
|
if attribute_values & values:
|
2016-02-26 13:23:12 +01:00
|
|
|
if not (user.is_staff and user.is_superuser):
|
|
|
|
user.is_staff = True
|
|
|
|
user.is_superuser = True
|
|
|
|
attribute_set = True
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.info('flag is_staff and is_superuser added to user %s', user)
|
2016-03-04 10:02:32 +01:00
|
|
|
break
|
2014-04-28 14:33:04 +02:00
|
|
|
else:
|
2016-01-21 20:02:34 +01:00
|
|
|
if user.is_superuser or user.is_staff:
|
|
|
|
user.is_staff = False
|
2014-04-28 14:33:04 +02:00
|
|
|
user.is_superuser = False
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.info('flag is_staff and is_superuser removed from user %s', user)
|
2016-02-26 13:23:12 +01:00
|
|
|
attribute_set = True
|
|
|
|
if attribute_set:
|
|
|
|
user.save()
|
2014-04-28 14:33:04 +02:00
|
|
|
|
|
|
|
def provision_groups(self, user, idp, saml_attributes):
|
2014-08-05 17:55:29 +02:00
|
|
|
group_attribute = utils.get_setting(idp, 'GROUP_ATTRIBUTE')
|
|
|
|
create_group = utils.get_setting(idp, 'CREATE_GROUP')
|
2014-04-28 14:33:04 +02:00
|
|
|
if group_attribute in saml_attributes:
|
|
|
|
values = saml_attributes[group_attribute]
|
|
|
|
if not isinstance(values, (list, tuple)):
|
|
|
|
values = [values]
|
|
|
|
groups = []
|
|
|
|
for value in set(values):
|
|
|
|
if create_group:
|
|
|
|
group, created = Group.objects.get_or_create(name=value)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
group = Group.objects.get(name=value)
|
|
|
|
except Group.DoesNotExist:
|
|
|
|
continue
|
|
|
|
groups.append(group)
|
2015-12-14 16:39:05 +01:00
|
|
|
for group in Group.objects.filter(pk__in=[g.pk for g in groups]).exclude(user=user):
|
2019-06-07 21:46:07 +02:00
|
|
|
logger.info(
|
2019-06-14 15:13:54 +02:00
|
|
|
'adding group %s (%s) to user %s (%s)', group, group.pk, user, user.pk)
|
2015-12-14 16:39:05 +01:00
|
|
|
User.groups.through.objects.get_or_create(group=group, user=user)
|
2016-02-12 17:22:48 +01:00
|
|
|
qs = User.groups.through.objects.exclude(
|
|
|
|
group__pk__in=[g.pk for g in groups]).filter(user=user)
|
2015-12-14 16:39:05 +01:00
|
|
|
for rel in qs:
|
2019-06-14 15:13:54 +02:00
|
|
|
logger.info('removing group %s (%s) from user %s (%s)', rel.group, rel.group.pk, rel.user, rel.user.pk)
|
2015-12-14 16:39:05 +01:00
|
|
|
qs.delete()
|