run pre-commit hooks (#86510)

Remove isort which seem incompatible with ruff.
This commit is contained in:
Benjamin Dauvergne 2024-02-07 12:54:24 +01:00
parent 6c703b545c
commit 491fd26df5
12 changed files with 145 additions and 37 deletions

View File

@ -15,11 +15,6 @@ repos:
hooks:
- id: django-upgrade
args: ['--target-version', '3.2']
- repo: https://github.com/PyCQA/isort
rev: 5.12.0
hooks:
- id: isort
args: ['--profile', 'black', '--line-length', '110']
- repo: https://github.com/rtts/djhtml
rev: '3.0.5'
hooks:

4
.ruff.toml Normal file
View File

@ -0,0 +1,4 @@
line-length = 100
[format]
quote-style = "single"

View File

@ -3,7 +3,7 @@
import os
import subprocess
from setuptools import find_packages, setup
from setuptools import setup
from setuptools.command.sdist import sdist
@ -45,7 +45,9 @@ def get_version():
version = result.replace('.dirty', '+dirty')
return version
else:
return '0.0.post%s' % len(subprocess.check_output(['git', 'rev-list', 'HEAD']).splitlines())
return '0.0.post%s' % len(
subprocess.check_output(['git', 'rev-list', 'HEAD']).splitlines()
)
return '0.0'

View File

@ -19,7 +19,10 @@ class LDAPSource:
def search(self):
for dn, entry in self.conn.paged_search_ext_s(
self.base_dn, ldap.SCOPE_SUBTREE, filterstr=self.filterstr, attrlist=self.attributes
self.base_dn,
ldap.SCOPE_SUBTREE,
filterstr=self.filterstr,
attrlist=self.attributes,
):
if not dn:
continue

View File

@ -46,7 +46,8 @@ Base DN of the source is remapped to another DN in the target directory"""
required=True,
type=object_class_pivot,
action='append',
help='an objectClass and an attribute name which is the unique identifier ' 'for this class',
help='an objectClass and an attribute name which is the unique identifier '
'for this class',
)
parser.add_argument(
'--attributes-file',
@ -71,13 +72,20 @@ Base DN of the source is remapped to another DN in the target directory"""
parser.add_argument('--source-filter', help='filter to apply to a source LDAP directory')
parser.add_argument('--source-objectclasses', help='keep only thoses object classes')
parser.add_argument(
'--target-uri', type=source_uri, required=True, help='URL of the target LDAP directory'
'--target-uri',
type=source_uri,
required=True,
help='URL of the target LDAP directory',
)
parser.add_argument(
'--target-base-dn', required=True, help='base DN of the target LDAP directory'
)
parser.add_argument('--target-base-dn', required=True, help='base DN of the target LDAP directory')
parser.add_argument('--target-bind-dn', help='bind DN for a target LDAP directory')
parser.add_argument('--target-bind-password', help='bind password for a target LDAP directory')
parser.add_argument(
'--fake', action='store_true', help='compute synchronization actions but do not apply'
'--fake',
action='store_true',
help='compute synchronization actions but do not apply',
)
parser.add_argument('--verbose', action='store_true', help='print all actions to stdout')
@ -114,7 +122,10 @@ Base DN of the source is remapped to another DN in the target directory"""
conn.simple_bind_s(options.source_bind_dn, options.source_bind_password)
source = ldap_source.LDAPSource(
conn, base_dn=options.source_base_dn, attributes=attributes, filterstr=options.source_filter
conn,
base_dn=options.source_base_dn,
attributes=attributes,
filterstr=options.source_filter,
)
if options.verbose:

View File

@ -47,7 +47,9 @@ class PagedResultsSearchObject:
rtype, rdata, rmsgid, rctrls = self.result3(msgid)
yield from rdata
# Extract the simple paged results response control
pctrls = [c for c in rctrls if c.controlType == SimplePagedResultsControl.controlType]
pctrls = [
c for c in rctrls if c.controlType == SimplePagedResultsControl.controlType
]
if pctrls and pctrls[0].cookie:
# Copy cookie from response control to request control
req_ctrl.cookie = pctrls[0].cookie

View File

@ -22,7 +22,14 @@ from ldaptools.paged import PagedLDAPObject
SLAPD_PATH = None
SLAPADD_PATH = None
SLAPD_PATHS = ['/bin', '/usr/bin', '/sbin', '/usr/sbin', '/usr/local/bin', '/usr/local/sbin']
SLAPD_PATHS = [
'/bin',
'/usr/bin',
'/sbin',
'/usr/sbin',
'/usr/local/bin',
'/usr/local/sbin',
]
def has_slapd():
@ -93,11 +100,20 @@ olcAccess: {{0}}to *
"""
process = None
schemas = ['core', 'cosine', 'inetorgperson', 'nis', 'eduorg-200210-openldap', 'eduperson', 'supann-2009']
schemas = [
'core',
'cosine',
'inetorgperson',
'nis',
'eduorg-200210-openldap',
'eduperson',
'supann-2009',
]
schemas_ldif = []
for schema in schemas:
with codecs.open(
os.path.join(os.path.dirname(__file__), 'schemas', '%s.ldif' % schema), encoding='utf-8'
os.path.join(os.path.dirname(__file__), 'schemas', '%s.ldif' % schema),
encoding='utf-8',
) as fd:
schemas_ldif.append(fd.read())
checkpoints = None
@ -201,10 +217,18 @@ olcAccess: {{0}}to * by * manage
"""Launch slapd"""
assert not self.process
cmd = [SLAPD_PATH, '-d768', '-F' + self.config_dir, '-h', self.ldap_url] # put slapd in foreground
cmd = [
SLAPD_PATH,
'-d768',
'-F' + self.config_dir,
'-h',
self.ldap_url,
] # put slapd in foreground
out_file = open(os.path.join(self.slapd_dir, 'stdout'), 'w')
dev_null = open(os.devnull)
self.process = subprocess.Popen(cmd, stdin=dev_null, env=os.environ, stdout=out_file, stderr=out_file)
self.process = subprocess.Popen(
cmd, stdin=dev_null, env=os.environ, stdout=out_file, stderr=out_file
)
self.set_close_fds([out_file, dev_null])
atexit.register(self.clean)
@ -243,7 +267,9 @@ olcAccess: {{0}}to * by * manage
"""Stop slapd and save current data state"""
assert not self.process
self.checkpoints.append(os.path.join(self.slapd_dir, 'checkpoint-%d' % len(self.checkpoints)))
self.checkpoints.append(
os.path.join(self.slapd_dir, 'checkpoint-%d' % len(self.checkpoints))
)
for data_dir in self.data_dirs:
dirname = os.path.basename(data_dir)
target = os.path.join(self.checkpoints[-1], dirname)

View File

@ -7,7 +7,15 @@ import ldap.dn
import ldap.modlist
from ldap.filter import filter_format
from .utils import batch_generator, bytes2str_entry, idict, istr, str2bytes_entry, str2dn, to_dict_of_set
from .utils import (
batch_generator,
bytes2str_entry,
idict,
istr,
str2bytes_entry,
str2dn,
to_dict_of_set,
)
@functools.total_ordering
@ -160,7 +168,8 @@ class Synchronize:
"""Find a pivot attribute value for an LDAP entry"""
for objc, attr in self.pivot_attributes:
if istr(objc) in [
istr(oc.decode('utf-8')) if isinstance(oc, bytes) else oc for oc in entry['objectclass']
istr(oc.decode('utf-8')) if isinstance(oc, bytes) else oc
for oc in entry['objectclass']
]:
try:
value = entry[attr]
@ -208,7 +217,9 @@ class Synchronize:
# Ignore some objectclasses
if self.objectclasses:
for dn, entry in entries:
entry['objectclass'] = [v for v in entry['objectclass'] if istr(v) in self.objectclasses]
entry['objectclass'] = [
v for v in entry['objectclass'] if istr(v) in self.objectclasses
]
# Transform input entries into filters
for dn, entry in entries:
objectclass, attr, value = self.get_pivot_attribute(dn, entry)

View File

@ -81,7 +81,9 @@ class idict(dict):
"""A caseless way of checking if a key exists or not.
It returns None or the correct key."""
if not isinstance(item, str):
raise TypeError('Keywords for this object must be strings. You supplied %s' % type(item))
raise TypeError(
'Keywords for this object must be strings. You supplied %s' % type(item)
)
key = item.lower()
try:
return self._keydict[key]

View File

@ -33,14 +33,20 @@ def slapd_tcp2(request):
@pytest.fixture
def slapd_ssl(request):
port = 6389
with Slapd(ldap_url='ldaps://localhost.entrouvert.org:%s' % port, tls=(key_file, certificate_file)) as s:
with Slapd(
ldap_url='ldaps://localhost.entrouvert.org:%s' % port,
tls=(key_file, certificate_file),
) as s:
yield s
@pytest.fixture
def slapd_tls(request):
port = 7389
with Slapd(ldap_url='ldap://localhost.entrouvert.org:%s' % port, tls=(key_file, certificate_file)) as s:
with Slapd(
ldap_url='ldap://localhost.entrouvert.org:%s' % port,
tls=(key_file, certificate_file),
) as s:
yield s

View File

@ -25,10 +25,15 @@ def test_ldapsync_ldif_to_ldapi(slapd, ldif_path, attributes, pivot_attributes):
main(args)
conn = slapd.get_connection()
assert len(conn.search_s('o=orga', ldap.SCOPE_SUBTREE)) == 2
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {'o=orga', 'uid=admin,o=orga'}
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {
'o=orga',
'uid=admin,o=orga',
}
def test_ldapsync_ldif_to_ldapi_attributes_file(slapd, ldif_path, attributes_path, pivot_attributes):
def test_ldapsync_ldif_to_ldapi_attributes_file(
slapd, ldif_path, attributes_path, pivot_attributes
):
args = [
'--source-uri',
ldif_path,
@ -50,7 +55,10 @@ def test_ldapsync_ldif_to_ldapi_attributes_file(slapd, ldif_path, attributes_pat
main(args)
conn = slapd.get_connection()
assert len(conn.search_s('o=orga', ldap.SCOPE_SUBTREE)) == 2
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {'o=orga', 'uid=admin,o=orga'}
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {
'o=orga',
'uid=admin,o=orga',
}
def test_ldapsync_ldap_to_ldap(slapd_tcp1, slapd_tcp2, ldif, attributes, pivot_attributes):
@ -86,4 +94,7 @@ def test_ldapsync_ldap_to_ldap(slapd_tcp1, slapd_tcp2, ldif, attributes, pivot_a
main(args)
conn = slapd_tcp2.get_connection()
assert len(conn.search_s('o=orga', ldap.SCOPE_SUBTREE)) == 2
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {'o=orga', 'uid=admin,o=orga'}
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {
'o=orga',
'uid=admin,o=orga',
}

View File

@ -23,7 +23,12 @@ def test_synchronize_ldif(slapd):
parser = ListLDIFParser(StringIO(ldif))
parser.parse()
synchronize = Synchronize(
parser, 'o=orga', conn, 'o=orga', pivot_attributes=pivot_attributes, attributes=attributes
parser,
'o=orga',
conn,
'o=orga',
pivot_attributes=pivot_attributes,
attributes=attributes,
)
synchronize.run()
return synchronize
@ -70,7 +75,10 @@ objectClass: organizationalUnit
assert isinstance(synchronize.actions[0], Rename)
assert isinstance(synchronize.actions[1], Delete)
assert len(conn.search_s('o=orga', ldap.SCOPE_SUBTREE)) == 2
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {'o=orga', 'uid=admin,o=orga'}
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {
'o=orga',
'uid=admin,o=orga',
}
# Delete one entry
ldif = """dn: o=orga
@ -116,7 +124,12 @@ mail: john.doe@entrouvert.com
source = LDAPSource(conn, base_dn='dc=orga2', attributes=attributes)
synchronize = Synchronize(
source, 'dc=orga2', conn, 'o=orga', pivot_attributes=pivot_attributes, attributes=attributes
source,
'dc=orga2',
conn,
'o=orga',
pivot_attributes=pivot_attributes,
attributes=attributes,
)
synchronize.run()
@ -125,7 +138,10 @@ mail: john.doe@entrouvert.com
assert isinstance(synchronize.actions[0], Update)
assert isinstance(synchronize.actions[1], Create)
assert len(conn.search_s('o=orga', ldap.SCOPE_SUBTREE)) == 2
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {'o=orga', 'uid=admin,o=orga'}
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {
'o=orga',
'uid=admin,o=orga',
}
# Rename
slapd.add_ldif(
@ -148,7 +164,10 @@ objectClass: organizationalUnit
assert isinstance(synchronize.actions[0], Rename)
assert isinstance(synchronize.actions[1], Delete)
assert len(conn.search_s('o=orga', ldap.SCOPE_SUBTREE)) == 2
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {'o=orga', 'uid=admin,o=orga'}
assert {dn for dn, entry in conn.search_s('o=orga', ldap.SCOPE_SUBTREE)} == {
'o=orga',
'uid=admin,o=orga',
}
# Delete one entry
conn.delete_s('uid=admin,dc=orga2')
@ -166,14 +185,30 @@ def test_synchronize_deep_rename(slapd):
('inetOrgPerson', 'uid'),
('organizationalUnit', 'ou'),
)
attributes = ['o', 'objectClass', 'uid', 'sn', 'givenName', 'mail', 'dc', 'cn', 'description', 'ou']
attributes = [
'o',
'objectClass',
'uid',
'sn',
'givenName',
'mail',
'dc',
'cn',
'description',
'ou',
]
conn = slapd.get_connection_admin()
def syn_ldif(ldif):
parser = ListLDIFParser(StringIO(ldif))
parser.parse()
synchronize = Synchronize(
parser, 'o=orga', conn, 'o=orga', pivot_attributes=pivot_attributes, attributes=attributes
parser,
'o=orga',
conn,
'o=orga',
pivot_attributes=pivot_attributes,
attributes=attributes,
)
synchronize.run()
return synchronize